diff options
| author | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
|---|---|---|
| committer | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
| commit | 68df54d6629ec019142eb149dd037774f2d11e7c (patch) | |
| tree | 345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip | |
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip')
288 files changed, 100545 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__init__.py new file mode 100644 index 0000000..0a3b850 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__init__.py | |||
| @@ -0,0 +1 @@ | |||
| __version__ = "10.0.1" | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__main__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__main__.py new file mode 100644 index 0000000..a128ee3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/__main__.py | |||
| @@ -0,0 +1,19 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import os | ||
| 4 | import sys | ||
| 5 | |||
| 6 | # If we are running from a wheel, add the wheel to sys.path | ||
| 7 | # This allows the usage python pip-*.whl/pip install pip-*.whl | ||
| 8 | if __package__ == '': | ||
| 9 | # __file__ is pip-*.whl/pip/__main__.py | ||
| 10 | # first dirname call strips of '/__main__.py', second strips off '/pip' | ||
| 11 | # Resulting path is the name of the wheel itself | ||
| 12 | # Add that to sys.path so we can import pip | ||
| 13 | path = os.path.dirname(os.path.dirname(__file__)) | ||
| 14 | sys.path.insert(0, path) | ||
| 15 | |||
| 16 | from pip._internal import main as _main # noqa | ||
| 17 | |||
| 18 | if __name__ == '__main__': | ||
| 19 | sys.exit(_main()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py new file mode 100644 index 0000000..d713b0d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py | |||
| @@ -0,0 +1,246 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import locale | ||
| 5 | import logging | ||
| 6 | import os | ||
| 7 | import optparse | ||
| 8 | import warnings | ||
| 9 | |||
| 10 | import sys | ||
| 11 | |||
| 12 | # 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, | ||
| 13 | # but if invoked (i.e. imported), it will issue a warning to stderr if socks | ||
| 14 | # isn't available. requests unconditionally imports urllib3's socks contrib | ||
| 15 | # module, triggering this warning. The warning breaks DEP-8 tests (because of | ||
| 16 | # the stderr output) and is just plain annoying in normal usage. I don't want | ||
| 17 | # to add socks as yet another dependency for pip, nor do I want to allow-stder | ||
| 18 | # in the DEP-8 tests, so just suppress the warning. pdb tells me this has to | ||
| 19 | # be done before the import of pip.vcs. | ||
| 20 | from pip._vendor.urllib3.exceptions import DependencyWarning | ||
| 21 | warnings.filterwarnings("ignore", category=DependencyWarning) # noqa | ||
| 22 | |||
| 23 | # We want to inject the use of SecureTransport as early as possible so that any | ||
| 24 | # references or sessions or what have you are ensured to have it, however we | ||
| 25 | # only want to do this in the case that we're running on macOS and the linked | ||
| 26 | # OpenSSL is too old to handle TLSv1.2 | ||
| 27 | try: | ||
| 28 | import ssl | ||
| 29 | except ImportError: | ||
| 30 | pass | ||
| 31 | else: | ||
| 32 | # Checks for OpenSSL 1.0.1 on MacOS | ||
| 33 | if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: | ||
| 34 | try: | ||
| 35 | from pip._vendor.urllib3.contrib import securetransport | ||
| 36 | except (ImportError, OSError): | ||
| 37 | pass | ||
| 38 | else: | ||
| 39 | securetransport.inject_into_urllib3() | ||
| 40 | |||
| 41 | from pip import __version__ | ||
| 42 | from pip._internal import cmdoptions | ||
| 43 | from pip._internal.exceptions import CommandError, PipError | ||
| 44 | from pip._internal.utils.misc import get_installed_distributions, get_prog | ||
| 45 | from pip._internal.utils import deprecation | ||
| 46 | from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa | ||
| 47 | from pip._internal.baseparser import ( | ||
| 48 | ConfigOptionParser, UpdatingDefaultsHelpFormatter, | ||
| 49 | ) | ||
| 50 | from pip._internal.commands import get_summaries, get_similar_commands | ||
| 51 | from pip._internal.commands import commands_dict | ||
| 52 | from pip._vendor.urllib3.exceptions import InsecureRequestWarning | ||
| 53 | |||
| 54 | logger = logging.getLogger(__name__) | ||
| 55 | |||
| 56 | # Hide the InsecureRequestWarning from urllib3 | ||
| 57 | warnings.filterwarnings("ignore", category=InsecureRequestWarning) | ||
| 58 | |||
| 59 | |||
| 60 | def autocomplete(): | ||
| 61 | """Command and option completion for the main option parser (and options) | ||
| 62 | and its subcommands (and options). | ||
| 63 | |||
| 64 | Enable by sourcing one of the completion shell scripts (bash, zsh or fish). | ||
| 65 | """ | ||
| 66 | # Don't complete if user hasn't sourced bash_completion file. | ||
| 67 | if 'PIP_AUTO_COMPLETE' not in os.environ: | ||
| 68 | return | ||
| 69 | cwords = os.environ['COMP_WORDS'].split()[1:] | ||
| 70 | cword = int(os.environ['COMP_CWORD']) | ||
| 71 | try: | ||
| 72 | current = cwords[cword - 1] | ||
| 73 | except IndexError: | ||
| 74 | current = '' | ||
| 75 | |||
| 76 | subcommands = [cmd for cmd, summary in get_summaries()] | ||
| 77 | options = [] | ||
| 78 | # subcommand | ||
| 79 | try: | ||
| 80 | subcommand_name = [w for w in cwords if w in subcommands][0] | ||
| 81 | except IndexError: | ||
| 82 | subcommand_name = None | ||
| 83 | |||
| 84 | parser = create_main_parser() | ||
| 85 | # subcommand options | ||
| 86 | if subcommand_name: | ||
| 87 | # special case: 'help' subcommand has no options | ||
| 88 | if subcommand_name == 'help': | ||
| 89 | sys.exit(1) | ||
| 90 | # special case: list locally installed dists for show and uninstall | ||
| 91 | should_list_installed = ( | ||
| 92 | subcommand_name in ['show', 'uninstall'] and | ||
| 93 | not current.startswith('-') | ||
| 94 | ) | ||
| 95 | if should_list_installed: | ||
| 96 | installed = [] | ||
| 97 | lc = current.lower() | ||
| 98 | for dist in get_installed_distributions(local_only=True): | ||
| 99 | if dist.key.startswith(lc) and dist.key not in cwords[1:]: | ||
| 100 | installed.append(dist.key) | ||
| 101 | # if there are no dists installed, fall back to option completion | ||
| 102 | if installed: | ||
| 103 | for dist in installed: | ||
| 104 | print(dist) | ||
| 105 | sys.exit(1) | ||
| 106 | |||
| 107 | subcommand = commands_dict[subcommand_name]() | ||
| 108 | |||
| 109 | for opt in subcommand.parser.option_list_all: | ||
| 110 | if opt.help != optparse.SUPPRESS_HELP: | ||
| 111 | for opt_str in opt._long_opts + opt._short_opts: | ||
| 112 | options.append((opt_str, opt.nargs)) | ||
| 113 | |||
| 114 | # filter out previously specified options from available options | ||
| 115 | prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] | ||
| 116 | options = [(x, v) for (x, v) in options if x not in prev_opts] | ||
| 117 | # filter options by current input | ||
| 118 | options = [(k, v) for k, v in options if k.startswith(current)] | ||
| 119 | for option in options: | ||
| 120 | opt_label = option[0] | ||
| 121 | # append '=' to options which require args | ||
| 122 | if option[1] and option[0][:2] == "--": | ||
| 123 | opt_label += '=' | ||
| 124 | print(opt_label) | ||
| 125 | else: | ||
| 126 | # show main parser options only when necessary | ||
| 127 | if current.startswith('-') or current.startswith('--'): | ||
| 128 | opts = [i.option_list for i in parser.option_groups] | ||
| 129 | opts.append(parser.option_list) | ||
| 130 | opts = (o for it in opts for o in it) | ||
| 131 | |||
| 132 | for opt in opts: | ||
| 133 | if opt.help != optparse.SUPPRESS_HELP: | ||
| 134 | subcommands += opt._long_opts + opt._short_opts | ||
| 135 | |||
| 136 | print(' '.join([x for x in subcommands if x.startswith(current)])) | ||
| 137 | sys.exit(1) | ||
| 138 | |||
| 139 | |||
| 140 | def create_main_parser(): | ||
| 141 | parser_kw = { | ||
| 142 | 'usage': '\n%prog <command> [options]', | ||
| 143 | 'add_help_option': False, | ||
| 144 | 'formatter': UpdatingDefaultsHelpFormatter(), | ||
| 145 | 'name': 'global', | ||
| 146 | 'prog': get_prog(), | ||
| 147 | } | ||
| 148 | |||
| 149 | parser = ConfigOptionParser(**parser_kw) | ||
| 150 | parser.disable_interspersed_args() | ||
| 151 | |||
| 152 | pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | ||
| 153 | parser.version = 'pip %s from %s (python %s)' % ( | ||
| 154 | __version__, pip_pkg_dir, sys.version[:3], | ||
| 155 | ) | ||
| 156 | |||
| 157 | # add the general options | ||
| 158 | gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) | ||
| 159 | parser.add_option_group(gen_opts) | ||
| 160 | |||
| 161 | parser.main = True # so the help formatter knows | ||
| 162 | |||
| 163 | # create command listing for description | ||
| 164 | command_summaries = get_summaries() | ||
| 165 | description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] | ||
| 166 | parser.description = '\n'.join(description) | ||
| 167 | |||
| 168 | return parser | ||
| 169 | |||
| 170 | |||
| 171 | def parseopts(args): | ||
| 172 | parser = create_main_parser() | ||
| 173 | |||
| 174 | # Note: parser calls disable_interspersed_args(), so the result of this | ||
| 175 | # call is to split the initial args into the general options before the | ||
| 176 | # subcommand and everything else. | ||
| 177 | # For example: | ||
| 178 | # args: ['--timeout=5', 'install', '--user', 'INITools'] | ||
| 179 | # general_options: ['--timeout==5'] | ||
| 180 | # args_else: ['install', '--user', 'INITools'] | ||
| 181 | general_options, args_else = parser.parse_args(args) | ||
| 182 | |||
| 183 | # --version | ||
| 184 | if general_options.version: | ||
| 185 | sys.stdout.write(parser.version) | ||
| 186 | sys.stdout.write(os.linesep) | ||
| 187 | sys.exit() | ||
| 188 | |||
| 189 | # pip || pip help -> print_help() | ||
| 190 | if not args_else or (args_else[0] == 'help' and len(args_else) == 1): | ||
| 191 | parser.print_help() | ||
| 192 | sys.exit() | ||
| 193 | |||
| 194 | # the subcommand name | ||
| 195 | cmd_name = args_else[0] | ||
| 196 | |||
| 197 | if cmd_name not in commands_dict: | ||
| 198 | guess = get_similar_commands(cmd_name) | ||
| 199 | |||
| 200 | msg = ['unknown command "%s"' % cmd_name] | ||
| 201 | if guess: | ||
| 202 | msg.append('maybe you meant "%s"' % guess) | ||
| 203 | |||
| 204 | raise CommandError(' - '.join(msg)) | ||
| 205 | |||
| 206 | # all the args without the subcommand | ||
| 207 | cmd_args = args[:] | ||
| 208 | cmd_args.remove(cmd_name) | ||
| 209 | |||
| 210 | return cmd_name, cmd_args | ||
| 211 | |||
| 212 | |||
| 213 | def check_isolated(args): | ||
| 214 | isolated = False | ||
| 215 | |||
| 216 | if "--isolated" in args: | ||
| 217 | isolated = True | ||
| 218 | |||
| 219 | return isolated | ||
| 220 | |||
| 221 | |||
| 222 | def main(args=None): | ||
| 223 | if args is None: | ||
| 224 | args = sys.argv[1:] | ||
| 225 | |||
| 226 | # Configure our deprecation warnings to be sent through loggers | ||
| 227 | deprecation.install_warning_logger() | ||
| 228 | |||
| 229 | autocomplete() | ||
| 230 | |||
| 231 | try: | ||
| 232 | cmd_name, cmd_args = parseopts(args) | ||
| 233 | except PipError as exc: | ||
| 234 | sys.stderr.write("ERROR: %s" % exc) | ||
| 235 | sys.stderr.write(os.linesep) | ||
| 236 | sys.exit(1) | ||
| 237 | |||
| 238 | # Needed for locale.getpreferredencoding(False) to work | ||
| 239 | # in pip._internal.utils.encoding.auto_decode | ||
| 240 | try: | ||
| 241 | locale.setlocale(locale.LC_ALL, '') | ||
| 242 | except locale.Error as e: | ||
| 243 | # setlocale can apparently crash if locale are uninitialized | ||
| 244 | logger.debug("Ignoring error %s when setting locale", e) | ||
| 245 | command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) | ||
| 246 | return command.main(cmd_args) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py new file mode 100644 index 0000000..e900928 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py | |||
| @@ -0,0 +1,373 @@ | |||
| 1 | """Base Command class, and related routines""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import logging | ||
| 5 | import logging.config | ||
| 6 | import optparse | ||
| 7 | import os | ||
| 8 | import sys | ||
| 9 | import warnings | ||
| 10 | |||
| 11 | from pip._internal import cmdoptions | ||
| 12 | from pip._internal.baseparser import ( | ||
| 13 | ConfigOptionParser, UpdatingDefaultsHelpFormatter, | ||
| 14 | ) | ||
| 15 | from pip._internal.compat import WINDOWS | ||
| 16 | from pip._internal.download import PipSession | ||
| 17 | from pip._internal.exceptions import ( | ||
| 18 | BadCommand, CommandError, InstallationError, PreviousBuildDirError, | ||
| 19 | UninstallationError, | ||
| 20 | ) | ||
| 21 | from pip._internal.index import PackageFinder | ||
| 22 | from pip._internal.locations import running_under_virtualenv | ||
| 23 | from pip._internal.req.req_file import parse_requirements | ||
| 24 | from pip._internal.req.req_install import InstallRequirement | ||
| 25 | from pip._internal.status_codes import ( | ||
| 26 | ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, | ||
| 27 | VIRTUALENV_NOT_FOUND, | ||
| 28 | ) | ||
| 29 | from pip._internal.utils import deprecation | ||
| 30 | from pip._internal.utils.logging import IndentingFormatter | ||
| 31 | from pip._internal.utils.misc import get_prog, normalize_path | ||
| 32 | from pip._internal.utils.outdated import pip_version_check | ||
| 33 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 34 | |||
| 35 | if MYPY_CHECK_RUNNING: | ||
| 36 | from typing import Optional | ||
| 37 | |||
| 38 | __all__ = ['Command'] | ||
| 39 | |||
| 40 | logger = logging.getLogger(__name__) | ||
| 41 | |||
| 42 | |||
| 43 | class Command(object): | ||
| 44 | name = None # type: Optional[str] | ||
| 45 | usage = None # type: Optional[str] | ||
| 46 | hidden = False # type: bool | ||
| 47 | ignore_require_venv = False # type: bool | ||
| 48 | log_streams = ("ext://sys.stdout", "ext://sys.stderr") | ||
| 49 | |||
| 50 | def __init__(self, isolated=False): | ||
| 51 | parser_kw = { | ||
| 52 | 'usage': self.usage, | ||
| 53 | 'prog': '%s %s' % (get_prog(), self.name), | ||
| 54 | 'formatter': UpdatingDefaultsHelpFormatter(), | ||
| 55 | 'add_help_option': False, | ||
| 56 | 'name': self.name, | ||
| 57 | 'description': self.__doc__, | ||
| 58 | 'isolated': isolated, | ||
| 59 | } | ||
| 60 | |||
| 61 | self.parser = ConfigOptionParser(**parser_kw) | ||
| 62 | |||
| 63 | # Commands should add options to this option group | ||
| 64 | optgroup_name = '%s Options' % self.name.capitalize() | ||
| 65 | self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) | ||
| 66 | |||
| 67 | # Add the general options | ||
| 68 | gen_opts = cmdoptions.make_option_group( | ||
| 69 | cmdoptions.general_group, | ||
| 70 | self.parser, | ||
| 71 | ) | ||
| 72 | self.parser.add_option_group(gen_opts) | ||
| 73 | |||
| 74 | def _build_session(self, options, retries=None, timeout=None): | ||
| 75 | session = PipSession( | ||
| 76 | cache=( | ||
| 77 | normalize_path(os.path.join(options.cache_dir, "http")) | ||
| 78 | if options.cache_dir else None | ||
| 79 | ), | ||
| 80 | retries=retries if retries is not None else options.retries, | ||
| 81 | insecure_hosts=options.trusted_hosts, | ||
| 82 | ) | ||
| 83 | |||
| 84 | # Handle custom ca-bundles from the user | ||
| 85 | if options.cert: | ||
| 86 | session.verify = options.cert | ||
| 87 | |||
| 88 | # Handle SSL client certificate | ||
| 89 | if options.client_cert: | ||
| 90 | session.cert = options.client_cert | ||
| 91 | |||
| 92 | # Handle timeouts | ||
| 93 | if options.timeout or timeout: | ||
| 94 | session.timeout = ( | ||
| 95 | timeout if timeout is not None else options.timeout | ||
| 96 | ) | ||
| 97 | |||
| 98 | # Handle configured proxies | ||
| 99 | if options.proxy: | ||
| 100 | session.proxies = { | ||
| 101 | "http": options.proxy, | ||
| 102 | "https": options.proxy, | ||
| 103 | } | ||
| 104 | |||
| 105 | # Determine if we can prompt the user for authentication or not | ||
| 106 | session.auth.prompting = not options.no_input | ||
| 107 | |||
| 108 | return session | ||
| 109 | |||
| 110 | def parse_args(self, args): | ||
| 111 | # factored out for testability | ||
| 112 | return self.parser.parse_args(args) | ||
| 113 | |||
| 114 | def main(self, args): | ||
| 115 | options, args = self.parse_args(args) | ||
| 116 | |||
| 117 | # Set verbosity so that it can be used elsewhere. | ||
| 118 | self.verbosity = options.verbose - options.quiet | ||
| 119 | |||
| 120 | if self.verbosity >= 1: | ||
| 121 | level = "DEBUG" | ||
| 122 | elif self.verbosity == -1: | ||
| 123 | level = "WARNING" | ||
| 124 | elif self.verbosity == -2: | ||
| 125 | level = "ERROR" | ||
| 126 | elif self.verbosity <= -3: | ||
| 127 | level = "CRITICAL" | ||
| 128 | else: | ||
| 129 | level = "INFO" | ||
| 130 | |||
| 131 | # The root logger should match the "console" level *unless* we | ||
| 132 | # specified "--log" to send debug logs to a file. | ||
| 133 | root_level = level | ||
| 134 | if options.log: | ||
| 135 | root_level = "DEBUG" | ||
| 136 | |||
| 137 | logger_class = "pip._internal.utils.logging.ColorizedStreamHandler" | ||
| 138 | handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler" | ||
| 139 | |||
| 140 | logging.config.dictConfig({ | ||
| 141 | "version": 1, | ||
| 142 | "disable_existing_loggers": False, | ||
| 143 | "filters": { | ||
| 144 | "exclude_warnings": { | ||
| 145 | "()": "pip._internal.utils.logging.MaxLevelFilter", | ||
| 146 | "level": logging.WARNING, | ||
| 147 | }, | ||
| 148 | }, | ||
| 149 | "formatters": { | ||
| 150 | "indent": { | ||
| 151 | "()": IndentingFormatter, | ||
| 152 | "format": "%(message)s", | ||
| 153 | }, | ||
| 154 | }, | ||
| 155 | "handlers": { | ||
| 156 | "console": { | ||
| 157 | "level": level, | ||
| 158 | "class": logger_class, | ||
| 159 | "no_color": options.no_color, | ||
| 160 | "stream": self.log_streams[0], | ||
| 161 | "filters": ["exclude_warnings"], | ||
| 162 | "formatter": "indent", | ||
| 163 | }, | ||
| 164 | "console_errors": { | ||
| 165 | "level": "WARNING", | ||
| 166 | "class": logger_class, | ||
| 167 | "no_color": options.no_color, | ||
| 168 | "stream": self.log_streams[1], | ||
| 169 | "formatter": "indent", | ||
| 170 | }, | ||
| 171 | "user_log": { | ||
| 172 | "level": "DEBUG", | ||
| 173 | "class": handler_class, | ||
| 174 | "filename": options.log or "/dev/null", | ||
| 175 | "delay": True, | ||
| 176 | "formatter": "indent", | ||
| 177 | }, | ||
| 178 | }, | ||
| 179 | "root": { | ||
| 180 | "level": root_level, | ||
| 181 | "handlers": list(filter(None, [ | ||
| 182 | "console", | ||
| 183 | "console_errors", | ||
| 184 | "user_log" if options.log else None, | ||
| 185 | ])), | ||
| 186 | }, | ||
| 187 | # Disable any logging besides WARNING unless we have DEBUG level | ||
| 188 | # logging enabled. These use both pip._vendor and the bare names | ||
| 189 | # for the case where someone unbundles our libraries. | ||
| 190 | "loggers": { | ||
| 191 | name: { | ||
| 192 | "level": ( | ||
| 193 | "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" | ||
| 194 | ) | ||
| 195 | } for name in [ | ||
| 196 | "pip._vendor", "distlib", "requests", "urllib3" | ||
| 197 | ] | ||
| 198 | }, | ||
| 199 | }) | ||
| 200 | |||
| 201 | if sys.version_info[:2] == (3, 3): | ||
| 202 | warnings.warn( | ||
| 203 | "Python 3.3 supported has been deprecated and support for it " | ||
| 204 | "will be dropped in the future. Please upgrade your Python.", | ||
| 205 | deprecation.RemovedInPip11Warning, | ||
| 206 | ) | ||
| 207 | |||
| 208 | # TODO: try to get these passing down from the command? | ||
| 209 | # without resorting to os.environ to hold these. | ||
| 210 | |||
| 211 | if options.no_input: | ||
| 212 | os.environ['PIP_NO_INPUT'] = '1' | ||
| 213 | |||
| 214 | if options.exists_action: | ||
| 215 | os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) | ||
| 216 | |||
| 217 | if options.require_venv and not self.ignore_require_venv: | ||
| 218 | # If a venv is required check if it can really be found | ||
| 219 | if not running_under_virtualenv(): | ||
| 220 | logger.critical( | ||
| 221 | 'Could not find an activated virtualenv (required).' | ||
| 222 | ) | ||
| 223 | sys.exit(VIRTUALENV_NOT_FOUND) | ||
| 224 | |||
| 225 | original_root_handlers = set(logging.root.handlers) | ||
| 226 | |||
| 227 | try: | ||
| 228 | status = self.run(options, args) | ||
| 229 | # FIXME: all commands should return an exit status | ||
| 230 | # and when it is done, isinstance is not needed anymore | ||
| 231 | if isinstance(status, int): | ||
| 232 | return status | ||
| 233 | except PreviousBuildDirError as exc: | ||
| 234 | logger.critical(str(exc)) | ||
| 235 | logger.debug('Exception information:', exc_info=True) | ||
| 236 | |||
| 237 | return PREVIOUS_BUILD_DIR_ERROR | ||
| 238 | except (InstallationError, UninstallationError, BadCommand) as exc: | ||
| 239 | logger.critical(str(exc)) | ||
| 240 | logger.debug('Exception information:', exc_info=True) | ||
| 241 | |||
| 242 | return ERROR | ||
| 243 | except CommandError as exc: | ||
| 244 | logger.critical('ERROR: %s', exc) | ||
| 245 | logger.debug('Exception information:', exc_info=True) | ||
| 246 | |||
| 247 | return ERROR | ||
| 248 | except KeyboardInterrupt: | ||
| 249 | logger.critical('Operation cancelled by user') | ||
| 250 | logger.debug('Exception information:', exc_info=True) | ||
| 251 | |||
| 252 | return ERROR | ||
| 253 | except: | ||
| 254 | logger.critical('Exception:', exc_info=True) | ||
| 255 | |||
| 256 | return UNKNOWN_ERROR | ||
| 257 | finally: | ||
| 258 | # Check if we're using the latest version of pip available | ||
| 259 | if (not options.disable_pip_version_check and not | ||
| 260 | getattr(options, "no_index", False)): | ||
| 261 | with self._build_session( | ||
| 262 | options, | ||
| 263 | retries=0, | ||
| 264 | timeout=min(5, options.timeout)) as session: | ||
| 265 | pip_version_check(session, options) | ||
| 266 | # Avoid leaking loggers | ||
| 267 | for handler in set(logging.root.handlers) - original_root_handlers: | ||
| 268 | # this method benefit from the Logger class internal lock | ||
| 269 | logging.root.removeHandler(handler) | ||
| 270 | |||
| 271 | return SUCCESS | ||
| 272 | |||
| 273 | |||
| 274 | class RequirementCommand(Command): | ||
| 275 | |||
| 276 | @staticmethod | ||
| 277 | def populate_requirement_set(requirement_set, args, options, finder, | ||
| 278 | session, name, wheel_cache): | ||
| 279 | """ | ||
| 280 | Marshal cmd line args into a requirement set. | ||
| 281 | """ | ||
| 282 | # NOTE: As a side-effect, options.require_hashes and | ||
| 283 | # requirement_set.require_hashes may be updated | ||
| 284 | |||
| 285 | for filename in options.constraints: | ||
| 286 | for req_to_add in parse_requirements( | ||
| 287 | filename, | ||
| 288 | constraint=True, finder=finder, options=options, | ||
| 289 | session=session, wheel_cache=wheel_cache): | ||
| 290 | req_to_add.is_direct = True | ||
| 291 | requirement_set.add_requirement(req_to_add) | ||
| 292 | |||
| 293 | for req in args: | ||
| 294 | req_to_add = InstallRequirement.from_line( | ||
| 295 | req, None, isolated=options.isolated_mode, | ||
| 296 | wheel_cache=wheel_cache | ||
| 297 | ) | ||
| 298 | req_to_add.is_direct = True | ||
| 299 | requirement_set.add_requirement(req_to_add) | ||
| 300 | |||
| 301 | for req in options.editables: | ||
| 302 | req_to_add = InstallRequirement.from_editable( | ||
| 303 | req, | ||
| 304 | isolated=options.isolated_mode, | ||
| 305 | wheel_cache=wheel_cache | ||
| 306 | ) | ||
| 307 | req_to_add.is_direct = True | ||
| 308 | requirement_set.add_requirement(req_to_add) | ||
| 309 | |||
| 310 | for filename in options.requirements: | ||
| 311 | for req_to_add in parse_requirements( | ||
| 312 | filename, | ||
| 313 | finder=finder, options=options, session=session, | ||
| 314 | wheel_cache=wheel_cache): | ||
| 315 | req_to_add.is_direct = True | ||
| 316 | requirement_set.add_requirement(req_to_add) | ||
| 317 | # If --require-hashes was a line in a requirements file, tell | ||
| 318 | # RequirementSet about it: | ||
| 319 | requirement_set.require_hashes = options.require_hashes | ||
| 320 | |||
| 321 | if not (args or options.editables or options.requirements): | ||
| 322 | opts = {'name': name} | ||
| 323 | if options.find_links: | ||
| 324 | raise CommandError( | ||
| 325 | 'You must give at least one requirement to %(name)s ' | ||
| 326 | '(maybe you meant "pip %(name)s %(links)s"?)' % | ||
| 327 | dict(opts, links=' '.join(options.find_links))) | ||
| 328 | else: | ||
| 329 | raise CommandError( | ||
| 330 | 'You must give at least one requirement to %(name)s ' | ||
| 331 | '(see "pip help %(name)s")' % opts) | ||
| 332 | |||
| 333 | # On Windows, any operation modifying pip should be run as: | ||
| 334 | # python -m pip ... | ||
| 335 | # See https://github.com/pypa/pip/issues/1299 for more discussion | ||
| 336 | should_show_use_python_msg = ( | ||
| 337 | WINDOWS and | ||
| 338 | requirement_set.has_requirement("pip") and | ||
| 339 | os.path.basename(sys.argv[0]).startswith("pip") | ||
| 340 | ) | ||
| 341 | if should_show_use_python_msg: | ||
| 342 | new_command = [ | ||
| 343 | sys.executable, "-m", "pip" | ||
| 344 | ] + sys.argv[1:] | ||
| 345 | raise CommandError( | ||
| 346 | 'To modify pip, please run the following command:\n{}' | ||
| 347 | .format(" ".join(new_command)) | ||
| 348 | ) | ||
| 349 | |||
| 350 | def _build_package_finder(self, options, session, | ||
| 351 | platform=None, python_versions=None, | ||
| 352 | abi=None, implementation=None): | ||
| 353 | """ | ||
| 354 | Create a package finder appropriate to this requirement command. | ||
| 355 | """ | ||
| 356 | index_urls = [options.index_url] + options.extra_index_urls | ||
| 357 | if options.no_index: | ||
| 358 | logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | ||
| 359 | index_urls = [] | ||
| 360 | |||
| 361 | return PackageFinder( | ||
| 362 | find_links=options.find_links, | ||
| 363 | format_control=options.format_control, | ||
| 364 | index_urls=index_urls, | ||
| 365 | trusted_hosts=options.trusted_hosts, | ||
| 366 | allow_all_prereleases=options.pre, | ||
| 367 | process_dependency_links=options.process_dependency_links, | ||
| 368 | session=session, | ||
| 369 | platform=platform, | ||
| 370 | versions=python_versions, | ||
| 371 | abi=abi, | ||
| 372 | implementation=implementation, | ||
| 373 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py new file mode 100644 index 0000000..ed28a1b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py | |||
| @@ -0,0 +1,240 @@ | |||
| 1 | """Base option parser setup""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import logging | ||
| 5 | import optparse | ||
| 6 | import sys | ||
| 7 | import textwrap | ||
| 8 | from distutils.util import strtobool | ||
| 9 | |||
| 10 | from pip._vendor.six import string_types | ||
| 11 | |||
| 12 | from pip._internal.compat import get_terminal_size | ||
| 13 | from pip._internal.configuration import Configuration, ConfigurationError | ||
| 14 | |||
| 15 | logger = logging.getLogger(__name__) | ||
| 16 | |||
| 17 | |||
| 18 | class PrettyHelpFormatter(optparse.IndentedHelpFormatter): | ||
| 19 | """A prettier/less verbose help formatter for optparse.""" | ||
| 20 | |||
| 21 | def __init__(self, *args, **kwargs): | ||
| 22 | # help position must be aligned with __init__.parseopts.description | ||
| 23 | kwargs['max_help_position'] = 30 | ||
| 24 | kwargs['indent_increment'] = 1 | ||
| 25 | kwargs['width'] = get_terminal_size()[0] - 2 | ||
| 26 | optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) | ||
| 27 | |||
| 28 | def format_option_strings(self, option): | ||
| 29 | return self._format_option_strings(option, ' <%s>', ', ') | ||
| 30 | |||
| 31 | def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): | ||
| 32 | """ | ||
| 33 | Return a comma-separated list of option strings and metavars. | ||
| 34 | |||
| 35 | :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') | ||
| 36 | :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar | ||
| 37 | :param optsep: separator | ||
| 38 | """ | ||
| 39 | opts = [] | ||
| 40 | |||
| 41 | if option._short_opts: | ||
| 42 | opts.append(option._short_opts[0]) | ||
| 43 | if option._long_opts: | ||
| 44 | opts.append(option._long_opts[0]) | ||
| 45 | if len(opts) > 1: | ||
| 46 | opts.insert(1, optsep) | ||
| 47 | |||
| 48 | if option.takes_value(): | ||
| 49 | metavar = option.metavar or option.dest.lower() | ||
| 50 | opts.append(mvarfmt % metavar.lower()) | ||
| 51 | |||
| 52 | return ''.join(opts) | ||
| 53 | |||
| 54 | def format_heading(self, heading): | ||
| 55 | if heading == 'Options': | ||
| 56 | return '' | ||
| 57 | return heading + ':\n' | ||
| 58 | |||
| 59 | def format_usage(self, usage): | ||
| 60 | """ | ||
| 61 | Ensure there is only one newline between usage and the first heading | ||
| 62 | if there is no description. | ||
| 63 | """ | ||
| 64 | msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") | ||
| 65 | return msg | ||
| 66 | |||
| 67 | def format_description(self, description): | ||
| 68 | # leave full control over description to us | ||
| 69 | if description: | ||
| 70 | if hasattr(self.parser, 'main'): | ||
| 71 | label = 'Commands' | ||
| 72 | else: | ||
| 73 | label = 'Description' | ||
| 74 | # some doc strings have initial newlines, some don't | ||
| 75 | description = description.lstrip('\n') | ||
| 76 | # some doc strings have final newlines and spaces, some don't | ||
| 77 | description = description.rstrip() | ||
| 78 | # dedent, then reindent | ||
| 79 | description = self.indent_lines(textwrap.dedent(description), " ") | ||
| 80 | description = '%s:\n%s\n' % (label, description) | ||
| 81 | return description | ||
| 82 | else: | ||
| 83 | return '' | ||
| 84 | |||
| 85 | def format_epilog(self, epilog): | ||
| 86 | # leave full control over epilog to us | ||
| 87 | if epilog: | ||
| 88 | return epilog | ||
| 89 | else: | ||
| 90 | return '' | ||
| 91 | |||
| 92 | def indent_lines(self, text, indent): | ||
| 93 | new_lines = [indent + line for line in text.split('\n')] | ||
| 94 | return "\n".join(new_lines) | ||
| 95 | |||
| 96 | |||
| 97 | class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): | ||
| 98 | """Custom help formatter for use in ConfigOptionParser. | ||
| 99 | |||
| 100 | This is updates the defaults before expanding them, allowing | ||
| 101 | them to show up correctly in the help listing. | ||
| 102 | """ | ||
| 103 | |||
| 104 | def expand_default(self, option): | ||
| 105 | if self.parser is not None: | ||
| 106 | self.parser._update_defaults(self.parser.defaults) | ||
| 107 | return optparse.IndentedHelpFormatter.expand_default(self, option) | ||
| 108 | |||
| 109 | |||
| 110 | class CustomOptionParser(optparse.OptionParser): | ||
| 111 | |||
| 112 | def insert_option_group(self, idx, *args, **kwargs): | ||
| 113 | """Insert an OptionGroup at a given position.""" | ||
| 114 | group = self.add_option_group(*args, **kwargs) | ||
| 115 | |||
| 116 | self.option_groups.pop() | ||
| 117 | self.option_groups.insert(idx, group) | ||
| 118 | |||
| 119 | return group | ||
| 120 | |||
| 121 | @property | ||
| 122 | def option_list_all(self): | ||
| 123 | """Get a list of all options, including those in option groups.""" | ||
| 124 | res = self.option_list[:] | ||
| 125 | for i in self.option_groups: | ||
| 126 | res.extend(i.option_list) | ||
| 127 | |||
| 128 | return res | ||
| 129 | |||
| 130 | |||
| 131 | class ConfigOptionParser(CustomOptionParser): | ||
| 132 | """Custom option parser which updates its defaults by checking the | ||
| 133 | configuration files and environmental variables""" | ||
| 134 | |||
| 135 | def __init__(self, *args, **kwargs): | ||
| 136 | self.name = kwargs.pop('name') | ||
| 137 | |||
| 138 | isolated = kwargs.pop("isolated", False) | ||
| 139 | self.config = Configuration(isolated) | ||
| 140 | |||
| 141 | assert self.name | ||
| 142 | optparse.OptionParser.__init__(self, *args, **kwargs) | ||
| 143 | |||
| 144 | def check_default(self, option, key, val): | ||
| 145 | try: | ||
| 146 | return option.check_value(key, val) | ||
| 147 | except optparse.OptionValueError as exc: | ||
| 148 | print("An error occurred during configuration: %s" % exc) | ||
| 149 | sys.exit(3) | ||
| 150 | |||
| 151 | def _get_ordered_configuration_items(self): | ||
| 152 | # Configuration gives keys in an unordered manner. Order them. | ||
| 153 | override_order = ["global", self.name, ":env:"] | ||
| 154 | |||
| 155 | # Pool the options into different groups | ||
| 156 | section_items = {name: [] for name in override_order} | ||
| 157 | for section_key, val in self.config.items(): | ||
| 158 | # ignore empty values | ||
| 159 | if not val: | ||
| 160 | logger.debug( | ||
| 161 | "Ignoring configuration key '%s' as it's value is empty.", | ||
| 162 | section_key | ||
| 163 | ) | ||
| 164 | continue | ||
| 165 | |||
| 166 | section, key = section_key.split(".", 1) | ||
| 167 | if section in override_order: | ||
| 168 | section_items[section].append((key, val)) | ||
| 169 | |||
| 170 | # Yield each group in their override order | ||
| 171 | for section in override_order: | ||
| 172 | for key, val in section_items[section]: | ||
| 173 | yield key, val | ||
| 174 | |||
| 175 | def _update_defaults(self, defaults): | ||
| 176 | """Updates the given defaults with values from the config files and | ||
| 177 | the environ. Does a little special handling for certain types of | ||
| 178 | options (lists).""" | ||
| 179 | |||
| 180 | # Accumulate complex default state. | ||
| 181 | self.values = optparse.Values(self.defaults) | ||
| 182 | late_eval = set() | ||
| 183 | # Then set the options with those values | ||
| 184 | for key, val in self._get_ordered_configuration_items(): | ||
| 185 | # '--' because configuration supports only long names | ||
| 186 | option = self.get_option('--' + key) | ||
| 187 | |||
| 188 | # Ignore options not present in this parser. E.g. non-globals put | ||
| 189 | # in [global] by users that want them to apply to all applicable | ||
| 190 | # commands. | ||
| 191 | if option is None: | ||
| 192 | continue | ||
| 193 | |||
| 194 | if option.action in ('store_true', 'store_false', 'count'): | ||
| 195 | val = strtobool(val) | ||
| 196 | elif option.action == 'append': | ||
| 197 | val = val.split() | ||
| 198 | val = [self.check_default(option, key, v) for v in val] | ||
| 199 | elif option.action == 'callback': | ||
| 200 | late_eval.add(option.dest) | ||
| 201 | opt_str = option.get_opt_string() | ||
| 202 | val = option.convert_value(opt_str, val) | ||
| 203 | # From take_action | ||
| 204 | args = option.callback_args or () | ||
| 205 | kwargs = option.callback_kwargs or {} | ||
| 206 | option.callback(option, opt_str, val, self, *args, **kwargs) | ||
| 207 | else: | ||
| 208 | val = self.check_default(option, key, val) | ||
| 209 | |||
| 210 | defaults[option.dest] = val | ||
| 211 | |||
| 212 | for key in late_eval: | ||
| 213 | defaults[key] = getattr(self.values, key) | ||
| 214 | self.values = None | ||
| 215 | return defaults | ||
| 216 | |||
| 217 | def get_default_values(self): | ||
| 218 | """Overriding to make updating the defaults after instantiation of | ||
| 219 | the option parser possible, _update_defaults() does the dirty work.""" | ||
| 220 | if not self.process_default_values: | ||
| 221 | # Old, pre-Optik 1.5 behaviour. | ||
| 222 | return optparse.Values(self.defaults) | ||
| 223 | |||
| 224 | # Load the configuration, or error out in case of an error | ||
| 225 | try: | ||
| 226 | self.config.load() | ||
| 227 | except ConfigurationError as err: | ||
| 228 | self.exit(2, err.args[0]) | ||
| 229 | |||
| 230 | defaults = self._update_defaults(self.defaults.copy()) # ours | ||
| 231 | for option in self._get_all_options(): | ||
| 232 | default = defaults.get(option.dest) | ||
| 233 | if isinstance(default, string_types): | ||
| 234 | opt_str = option.get_opt_string() | ||
| 235 | defaults[option.dest] = option.check_value(opt_str, default) | ||
| 236 | return optparse.Values(defaults) | ||
| 237 | |||
| 238 | def error(self, msg): | ||
| 239 | self.print_usage(sys.stderr) | ||
| 240 | self.exit(2, "%s\n" % msg) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py new file mode 100644 index 0000000..8ad7735 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py | |||
| @@ -0,0 +1,92 @@ | |||
| 1 | """Build Environment used for isolation during sdist building | ||
| 2 | """ | ||
| 3 | |||
| 4 | import os | ||
| 5 | from distutils.sysconfig import get_python_lib | ||
| 6 | from sysconfig import get_paths | ||
| 7 | |||
| 8 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 9 | |||
| 10 | |||
| 11 | class BuildEnvironment(object): | ||
| 12 | """Creates and manages an isolated environment to install build deps | ||
| 13 | """ | ||
| 14 | |||
| 15 | def __init__(self, no_clean): | ||
| 16 | self._temp_dir = TempDirectory(kind="build-env") | ||
| 17 | self._no_clean = no_clean | ||
| 18 | |||
| 19 | @property | ||
| 20 | def path(self): | ||
| 21 | return self._temp_dir.path | ||
| 22 | |||
| 23 | def __enter__(self): | ||
| 24 | self._temp_dir.create() | ||
| 25 | |||
| 26 | self.save_path = os.environ.get('PATH', None) | ||
| 27 | self.save_pythonpath = os.environ.get('PYTHONPATH', None) | ||
| 28 | self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None) | ||
| 29 | |||
| 30 | install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' | ||
| 31 | install_dirs = get_paths(install_scheme, vars={ | ||
| 32 | 'base': self.path, | ||
| 33 | 'platbase': self.path, | ||
| 34 | }) | ||
| 35 | |||
| 36 | scripts = install_dirs['scripts'] | ||
| 37 | if self.save_path: | ||
| 38 | os.environ['PATH'] = scripts + os.pathsep + self.save_path | ||
| 39 | else: | ||
| 40 | os.environ['PATH'] = scripts + os.pathsep + os.defpath | ||
| 41 | |||
| 42 | # Note: prefer distutils' sysconfig to get the | ||
| 43 | # library paths so PyPy is correctly supported. | ||
| 44 | purelib = get_python_lib(plat_specific=0, prefix=self.path) | ||
| 45 | platlib = get_python_lib(plat_specific=1, prefix=self.path) | ||
| 46 | if purelib == platlib: | ||
| 47 | lib_dirs = purelib | ||
| 48 | else: | ||
| 49 | lib_dirs = purelib + os.pathsep + platlib | ||
| 50 | if self.save_pythonpath: | ||
| 51 | os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \ | ||
| 52 | self.save_pythonpath | ||
| 53 | else: | ||
| 54 | os.environ['PYTHONPATH'] = lib_dirs | ||
| 55 | |||
| 56 | os.environ['PYTHONNOUSERSITE'] = '1' | ||
| 57 | |||
| 58 | return self.path | ||
| 59 | |||
| 60 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 61 | if not self._no_clean: | ||
| 62 | self._temp_dir.cleanup() | ||
| 63 | |||
| 64 | def restore_var(varname, old_value): | ||
| 65 | if old_value is None: | ||
| 66 | os.environ.pop(varname, None) | ||
| 67 | else: | ||
| 68 | os.environ[varname] = old_value | ||
| 69 | |||
| 70 | restore_var('PATH', self.save_path) | ||
| 71 | restore_var('PYTHONPATH', self.save_pythonpath) | ||
| 72 | restore_var('PYTHONNOUSERSITE', self.save_nousersite) | ||
| 73 | |||
| 74 | def cleanup(self): | ||
| 75 | self._temp_dir.cleanup() | ||
| 76 | |||
| 77 | |||
| 78 | class NoOpBuildEnvironment(BuildEnvironment): | ||
| 79 | """A no-op drop-in replacement for BuildEnvironment | ||
| 80 | """ | ||
| 81 | |||
| 82 | def __init__(self, no_clean): | ||
| 83 | pass | ||
| 84 | |||
| 85 | def __enter__(self): | ||
| 86 | pass | ||
| 87 | |||
| 88 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 89 | pass | ||
| 90 | |||
| 91 | def cleanup(self): | ||
| 92 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py new file mode 100644 index 0000000..5547d73 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py | |||
| @@ -0,0 +1,202 @@ | |||
| 1 | """Cache Management | ||
| 2 | """ | ||
| 3 | |||
| 4 | import errno | ||
| 5 | import hashlib | ||
| 6 | import logging | ||
| 7 | import os | ||
| 8 | |||
| 9 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 10 | |||
| 11 | from pip._internal import index | ||
| 12 | from pip._internal.compat import expanduser | ||
| 13 | from pip._internal.download import path_to_url | ||
| 14 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 15 | from pip._internal.wheel import InvalidWheelFilename, Wheel | ||
| 16 | |||
| 17 | logger = logging.getLogger(__name__) | ||
| 18 | |||
| 19 | |||
| 20 | class Cache(object): | ||
| 21 | """An abstract class - provides cache directories for data from links | ||
| 22 | |||
| 23 | |||
| 24 | :param cache_dir: The root of the cache. | ||
| 25 | :param format_control: A pip.index.FormatControl object to limit | ||
| 26 | binaries being read from the cache. | ||
| 27 | :param allowed_formats: which formats of files the cache should store. | ||
| 28 | ('binary' and 'source' are the only allowed values) | ||
| 29 | """ | ||
| 30 | |||
| 31 | def __init__(self, cache_dir, format_control, allowed_formats): | ||
| 32 | super(Cache, self).__init__() | ||
| 33 | self.cache_dir = expanduser(cache_dir) if cache_dir else None | ||
| 34 | self.format_control = format_control | ||
| 35 | self.allowed_formats = allowed_formats | ||
| 36 | |||
| 37 | _valid_formats = {"source", "binary"} | ||
| 38 | assert self.allowed_formats.union(_valid_formats) == _valid_formats | ||
| 39 | |||
| 40 | def _get_cache_path_parts(self, link): | ||
| 41 | """Get parts of part that must be os.path.joined with cache_dir | ||
| 42 | """ | ||
| 43 | |||
| 44 | # We want to generate an url to use as our cache key, we don't want to | ||
| 45 | # just re-use the URL because it might have other items in the fragment | ||
| 46 | # and we don't care about those. | ||
| 47 | key_parts = [link.url_without_fragment] | ||
| 48 | if link.hash_name is not None and link.hash is not None: | ||
| 49 | key_parts.append("=".join([link.hash_name, link.hash])) | ||
| 50 | key_url = "#".join(key_parts) | ||
| 51 | |||
| 52 | # Encode our key url with sha224, we'll use this because it has similar | ||
| 53 | # security properties to sha256, but with a shorter total output (and | ||
| 54 | # thus less secure). However the differences don't make a lot of | ||
| 55 | # difference for our use case here. | ||
| 56 | hashed = hashlib.sha224(key_url.encode()).hexdigest() | ||
| 57 | |||
| 58 | # We want to nest the directories some to prevent having a ton of top | ||
| 59 | # level directories where we might run out of sub directories on some | ||
| 60 | # FS. | ||
| 61 | parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] | ||
| 62 | |||
| 63 | return parts | ||
| 64 | |||
| 65 | def _get_candidates(self, link, package_name): | ||
| 66 | can_not_cache = ( | ||
| 67 | not self.cache_dir or | ||
| 68 | not package_name or | ||
| 69 | not link | ||
| 70 | ) | ||
| 71 | if can_not_cache: | ||
| 72 | return [] | ||
| 73 | |||
| 74 | canonical_name = canonicalize_name(package_name) | ||
| 75 | formats = index.fmt_ctl_formats( | ||
| 76 | self.format_control, canonical_name | ||
| 77 | ) | ||
| 78 | if not self.allowed_formats.intersection(formats): | ||
| 79 | return [] | ||
| 80 | |||
| 81 | root = self.get_path_for_link(link) | ||
| 82 | try: | ||
| 83 | return os.listdir(root) | ||
| 84 | except OSError as err: | ||
| 85 | if err.errno in {errno.ENOENT, errno.ENOTDIR}: | ||
| 86 | return [] | ||
| 87 | raise | ||
| 88 | |||
| 89 | def get_path_for_link(self, link): | ||
| 90 | """Return a directory to store cached items in for link. | ||
| 91 | """ | ||
| 92 | raise NotImplementedError() | ||
| 93 | |||
| 94 | def get(self, link, package_name): | ||
| 95 | """Returns a link to a cached item if it exists, otherwise returns the | ||
| 96 | passed link. | ||
| 97 | """ | ||
| 98 | raise NotImplementedError() | ||
| 99 | |||
| 100 | def _link_for_candidate(self, link, candidate): | ||
| 101 | root = self.get_path_for_link(link) | ||
| 102 | path = os.path.join(root, candidate) | ||
| 103 | |||
| 104 | return index.Link(path_to_url(path)) | ||
| 105 | |||
| 106 | def cleanup(self): | ||
| 107 | pass | ||
| 108 | |||
| 109 | |||
| 110 | class SimpleWheelCache(Cache): | ||
| 111 | """A cache of wheels for future installs. | ||
| 112 | """ | ||
| 113 | |||
| 114 | def __init__(self, cache_dir, format_control): | ||
| 115 | super(SimpleWheelCache, self).__init__( | ||
| 116 | cache_dir, format_control, {"binary"} | ||
| 117 | ) | ||
| 118 | |||
| 119 | def get_path_for_link(self, link): | ||
| 120 | """Return a directory to store cached wheels for link | ||
| 121 | |||
| 122 | Because there are M wheels for any one sdist, we provide a directory | ||
| 123 | to cache them in, and then consult that directory when looking up | ||
| 124 | cache hits. | ||
| 125 | |||
| 126 | We only insert things into the cache if they have plausible version | ||
| 127 | numbers, so that we don't contaminate the cache with things that were | ||
| 128 | not unique. E.g. ./package might have dozens of installs done for it | ||
| 129 | and build a version of 0.0...and if we built and cached a wheel, we'd | ||
| 130 | end up using the same wheel even if the source has been edited. | ||
| 131 | |||
| 132 | :param link: The link of the sdist for which this will cache wheels. | ||
| 133 | """ | ||
| 134 | parts = self._get_cache_path_parts(link) | ||
| 135 | |||
| 136 | # Store wheels within the root cache_dir | ||
| 137 | return os.path.join(self.cache_dir, "wheels", *parts) | ||
| 138 | |||
| 139 | def get(self, link, package_name): | ||
| 140 | candidates = [] | ||
| 141 | |||
| 142 | for wheel_name in self._get_candidates(link, package_name): | ||
| 143 | try: | ||
| 144 | wheel = Wheel(wheel_name) | ||
| 145 | except InvalidWheelFilename: | ||
| 146 | continue | ||
| 147 | if not wheel.supported(): | ||
| 148 | # Built for a different python/arch/etc | ||
| 149 | continue | ||
| 150 | candidates.append((wheel.support_index_min(), wheel_name)) | ||
| 151 | |||
| 152 | if not candidates: | ||
| 153 | return link | ||
| 154 | |||
| 155 | return self._link_for_candidate(link, min(candidates)[1]) | ||
| 156 | |||
| 157 | |||
| 158 | class EphemWheelCache(SimpleWheelCache): | ||
| 159 | """A SimpleWheelCache that creates it's own temporary cache directory | ||
| 160 | """ | ||
| 161 | |||
| 162 | def __init__(self, format_control): | ||
| 163 | self._temp_dir = TempDirectory(kind="ephem-wheel-cache") | ||
| 164 | self._temp_dir.create() | ||
| 165 | |||
| 166 | super(EphemWheelCache, self).__init__( | ||
| 167 | self._temp_dir.path, format_control | ||
| 168 | ) | ||
| 169 | |||
| 170 | def cleanup(self): | ||
| 171 | self._temp_dir.cleanup() | ||
| 172 | |||
| 173 | |||
| 174 | class WheelCache(Cache): | ||
| 175 | """Wraps EphemWheelCache and SimpleWheelCache into a single Cache | ||
| 176 | |||
| 177 | This Cache allows for gracefully degradation, using the ephem wheel cache | ||
| 178 | when a certain link is not found in the simple wheel cache first. | ||
| 179 | """ | ||
| 180 | |||
| 181 | def __init__(self, cache_dir, format_control): | ||
| 182 | super(WheelCache, self).__init__( | ||
| 183 | cache_dir, format_control, {'binary'} | ||
| 184 | ) | ||
| 185 | self._wheel_cache = SimpleWheelCache(cache_dir, format_control) | ||
| 186 | self._ephem_cache = EphemWheelCache(format_control) | ||
| 187 | |||
| 188 | def get_path_for_link(self, link): | ||
| 189 | return self._wheel_cache.get_path_for_link(link) | ||
| 190 | |||
| 191 | def get_ephem_path_for_link(self, link): | ||
| 192 | return self._ephem_cache.get_path_for_link(link) | ||
| 193 | |||
| 194 | def get(self, link, package_name): | ||
| 195 | retval = self._wheel_cache.get(link, package_name) | ||
| 196 | if retval is link: | ||
| 197 | retval = self._ephem_cache.get(link, package_name) | ||
| 198 | return retval | ||
| 199 | |||
| 200 | def cleanup(self): | ||
| 201 | self._wheel_cache.cleanup() | ||
| 202 | self._ephem_cache.cleanup() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py new file mode 100644 index 0000000..58854e3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py | |||
| @@ -0,0 +1,609 @@ | |||
| 1 | """ | ||
| 2 | shared options and groups | ||
| 3 | |||
| 4 | The principle here is to define options once, but *not* instantiate them | ||
| 5 | globally. One reason being that options with action='append' can carry state | ||
| 6 | between parses. pip parses general options twice internally, and shouldn't | ||
| 7 | pass on state. To be consistent, all options will follow this design. | ||
| 8 | |||
| 9 | """ | ||
| 10 | from __future__ import absolute_import | ||
| 11 | |||
| 12 | import warnings | ||
| 13 | from functools import partial | ||
| 14 | from optparse import SUPPRESS_HELP, Option, OptionGroup | ||
| 15 | |||
| 16 | from pip._internal.index import ( | ||
| 17 | FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, | ||
| 18 | ) | ||
| 19 | from pip._internal.locations import USER_CACHE_DIR, src_prefix | ||
| 20 | from pip._internal.models import PyPI | ||
| 21 | from pip._internal.utils.hashes import STRONG_HASHES | ||
| 22 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 23 | from pip._internal.utils.ui import BAR_TYPES | ||
| 24 | |||
| 25 | if MYPY_CHECK_RUNNING: | ||
| 26 | from typing import Any | ||
| 27 | |||
| 28 | |||
| 29 | def make_option_group(group, parser): | ||
| 30 | """ | ||
| 31 | Return an OptionGroup object | ||
| 32 | group -- assumed to be dict with 'name' and 'options' keys | ||
| 33 | parser -- an optparse Parser | ||
| 34 | """ | ||
| 35 | option_group = OptionGroup(parser, group['name']) | ||
| 36 | for option in group['options']: | ||
| 37 | option_group.add_option(option()) | ||
| 38 | return option_group | ||
| 39 | |||
| 40 | |||
| 41 | def check_install_build_global(options, check_options=None): | ||
| 42 | """Disable wheels if per-setup.py call options are set. | ||
| 43 | |||
| 44 | :param options: The OptionParser options to update. | ||
| 45 | :param check_options: The options to check, if not supplied defaults to | ||
| 46 | options. | ||
| 47 | """ | ||
| 48 | if check_options is None: | ||
| 49 | check_options = options | ||
| 50 | |||
| 51 | def getname(n): | ||
| 52 | return getattr(check_options, n, None) | ||
| 53 | names = ["build_options", "global_options", "install_options"] | ||
| 54 | if any(map(getname, names)): | ||
| 55 | control = options.format_control | ||
| 56 | fmt_ctl_no_binary(control) | ||
| 57 | warnings.warn( | ||
| 58 | 'Disabling all use of wheels due to the use of --build-options ' | ||
| 59 | '/ --global-options / --install-options.', stacklevel=2, | ||
| 60 | ) | ||
| 61 | |||
| 62 | |||
| 63 | ########### | ||
| 64 | # options # | ||
| 65 | ########### | ||
| 66 | |||
| 67 | help_ = partial( | ||
| 68 | Option, | ||
| 69 | '-h', '--help', | ||
| 70 | dest='help', | ||
| 71 | action='help', | ||
| 72 | help='Show help.', | ||
| 73 | ) # type: Any | ||
| 74 | |||
| 75 | isolated_mode = partial( | ||
| 76 | Option, | ||
| 77 | "--isolated", | ||
| 78 | dest="isolated_mode", | ||
| 79 | action="store_true", | ||
| 80 | default=False, | ||
| 81 | help=( | ||
| 82 | "Run pip in an isolated mode, ignoring environment variables and user " | ||
| 83 | "configuration." | ||
| 84 | ), | ||
| 85 | ) | ||
| 86 | |||
| 87 | require_virtualenv = partial( | ||
| 88 | Option, | ||
| 89 | # Run only if inside a virtualenv, bail if not. | ||
| 90 | '--require-virtualenv', '--require-venv', | ||
| 91 | dest='require_venv', | ||
| 92 | action='store_true', | ||
| 93 | default=False, | ||
| 94 | help=SUPPRESS_HELP | ||
| 95 | ) # type: Any | ||
| 96 | |||
| 97 | verbose = partial( | ||
| 98 | Option, | ||
| 99 | '-v', '--verbose', | ||
| 100 | dest='verbose', | ||
| 101 | action='count', | ||
| 102 | default=0, | ||
| 103 | help='Give more output. Option is additive, and can be used up to 3 times.' | ||
| 104 | ) | ||
| 105 | |||
| 106 | no_color = partial( | ||
| 107 | Option, | ||
| 108 | '--no-color', | ||
| 109 | dest='no_color', | ||
| 110 | action='store_true', | ||
| 111 | default=False, | ||
| 112 | help="Suppress colored output", | ||
| 113 | ) | ||
| 114 | |||
| 115 | version = partial( | ||
| 116 | Option, | ||
| 117 | '-V', '--version', | ||
| 118 | dest='version', | ||
| 119 | action='store_true', | ||
| 120 | help='Show version and exit.', | ||
| 121 | ) # type: Any | ||
| 122 | |||
| 123 | quiet = partial( | ||
| 124 | Option, | ||
| 125 | '-q', '--quiet', | ||
| 126 | dest='quiet', | ||
| 127 | action='count', | ||
| 128 | default=0, | ||
| 129 | help=( | ||
| 130 | 'Give less output. Option is additive, and can be used up to 3' | ||
| 131 | ' times (corresponding to WARNING, ERROR, and CRITICAL logging' | ||
| 132 | ' levels).' | ||
| 133 | ), | ||
| 134 | ) # type: Any | ||
| 135 | |||
| 136 | progress_bar = partial( | ||
| 137 | Option, | ||
| 138 | '--progress-bar', | ||
| 139 | dest='progress_bar', | ||
| 140 | type='choice', | ||
| 141 | choices=list(BAR_TYPES.keys()), | ||
| 142 | default='on', | ||
| 143 | help=( | ||
| 144 | 'Specify type of progress to be displayed [' + | ||
| 145 | '|'.join(BAR_TYPES.keys()) + '] (default: %default)' | ||
| 146 | ), | ||
| 147 | ) # type: Any | ||
| 148 | |||
| 149 | log = partial( | ||
| 150 | Option, | ||
| 151 | "--log", "--log-file", "--local-log", | ||
| 152 | dest="log", | ||
| 153 | metavar="path", | ||
| 154 | help="Path to a verbose appending log." | ||
| 155 | ) # type: Any | ||
| 156 | |||
| 157 | no_input = partial( | ||
| 158 | Option, | ||
| 159 | # Don't ask for input | ||
| 160 | '--no-input', | ||
| 161 | dest='no_input', | ||
| 162 | action='store_true', | ||
| 163 | default=False, | ||
| 164 | help=SUPPRESS_HELP | ||
| 165 | ) # type: Any | ||
| 166 | |||
| 167 | proxy = partial( | ||
| 168 | Option, | ||
| 169 | '--proxy', | ||
| 170 | dest='proxy', | ||
| 171 | type='str', | ||
| 172 | default='', | ||
| 173 | help="Specify a proxy in the form [user:passwd@]proxy.server:port." | ||
| 174 | ) # type: Any | ||
| 175 | |||
| 176 | retries = partial( | ||
| 177 | Option, | ||
| 178 | '--retries', | ||
| 179 | dest='retries', | ||
| 180 | type='int', | ||
| 181 | default=5, | ||
| 182 | help="Maximum number of retries each connection should attempt " | ||
| 183 | "(default %default times).", | ||
| 184 | ) # type: Any | ||
| 185 | |||
| 186 | timeout = partial( | ||
| 187 | Option, | ||
| 188 | '--timeout', '--default-timeout', | ||
| 189 | metavar='sec', | ||
| 190 | dest='timeout', | ||
| 191 | type='float', | ||
| 192 | default=15, | ||
| 193 | help='Set the socket timeout (default %default seconds).', | ||
| 194 | ) # type: Any | ||
| 195 | |||
| 196 | skip_requirements_regex = partial( | ||
| 197 | Option, | ||
| 198 | # A regex to be used to skip requirements | ||
| 199 | '--skip-requirements-regex', | ||
| 200 | dest='skip_requirements_regex', | ||
| 201 | type='str', | ||
| 202 | default='', | ||
| 203 | help=SUPPRESS_HELP, | ||
| 204 | ) # type: Any | ||
| 205 | |||
| 206 | |||
| 207 | def exists_action(): | ||
| 208 | return Option( | ||
| 209 | # Option when path already exist | ||
| 210 | '--exists-action', | ||
| 211 | dest='exists_action', | ||
| 212 | type='choice', | ||
| 213 | choices=['s', 'i', 'w', 'b', 'a'], | ||
| 214 | default=[], | ||
| 215 | action='append', | ||
| 216 | metavar='action', | ||
| 217 | help="Default action when a path already exists: " | ||
| 218 | "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).", | ||
| 219 | ) | ||
| 220 | |||
| 221 | |||
| 222 | cert = partial( | ||
| 223 | Option, | ||
| 224 | '--cert', | ||
| 225 | dest='cert', | ||
| 226 | type='str', | ||
| 227 | metavar='path', | ||
| 228 | help="Path to alternate CA bundle.", | ||
| 229 | ) # type: Any | ||
| 230 | |||
| 231 | client_cert = partial( | ||
| 232 | Option, | ||
| 233 | '--client-cert', | ||
| 234 | dest='client_cert', | ||
| 235 | type='str', | ||
| 236 | default=None, | ||
| 237 | metavar='path', | ||
| 238 | help="Path to SSL client certificate, a single file containing the " | ||
| 239 | "private key and the certificate in PEM format.", | ||
| 240 | ) # type: Any | ||
| 241 | |||
| 242 | index_url = partial( | ||
| 243 | Option, | ||
| 244 | '-i', '--index-url', '--pypi-url', | ||
| 245 | dest='index_url', | ||
| 246 | metavar='URL', | ||
| 247 | default=PyPI.simple_url, | ||
| 248 | help="Base URL of Python Package Index (default %default). " | ||
| 249 | "This should point to a repository compliant with PEP 503 " | ||
| 250 | "(the simple repository API) or a local directory laid out " | ||
| 251 | "in the same format.", | ||
| 252 | ) # type: Any | ||
| 253 | |||
| 254 | |||
| 255 | def extra_index_url(): | ||
| 256 | return Option( | ||
| 257 | '--extra-index-url', | ||
| 258 | dest='extra_index_urls', | ||
| 259 | metavar='URL', | ||
| 260 | action='append', | ||
| 261 | default=[], | ||
| 262 | help="Extra URLs of package indexes to use in addition to " | ||
| 263 | "--index-url. Should follow the same rules as " | ||
| 264 | "--index-url.", | ||
| 265 | ) | ||
| 266 | |||
| 267 | |||
| 268 | no_index = partial( | ||
| 269 | Option, | ||
| 270 | '--no-index', | ||
| 271 | dest='no_index', | ||
| 272 | action='store_true', | ||
| 273 | default=False, | ||
| 274 | help='Ignore package index (only looking at --find-links URLs instead).', | ||
| 275 | ) # type: Any | ||
| 276 | |||
| 277 | |||
| 278 | def find_links(): | ||
| 279 | return Option( | ||
| 280 | '-f', '--find-links', | ||
| 281 | dest='find_links', | ||
| 282 | action='append', | ||
| 283 | default=[], | ||
| 284 | metavar='url', | ||
| 285 | help="If a url or path to an html file, then parse for links to " | ||
| 286 | "archives. If a local path or file:// url that's a directory, " | ||
| 287 | "then look for archives in the directory listing.", | ||
| 288 | ) | ||
| 289 | |||
| 290 | |||
| 291 | def trusted_host(): | ||
| 292 | return Option( | ||
| 293 | "--trusted-host", | ||
| 294 | dest="trusted_hosts", | ||
| 295 | action="append", | ||
| 296 | metavar="HOSTNAME", | ||
| 297 | default=[], | ||
| 298 | help="Mark this host as trusted, even though it does not have valid " | ||
| 299 | "or any HTTPS.", | ||
| 300 | ) | ||
| 301 | |||
| 302 | |||
| 303 | # Remove after 1.5 | ||
| 304 | process_dependency_links = partial( | ||
| 305 | Option, | ||
| 306 | "--process-dependency-links", | ||
| 307 | dest="process_dependency_links", | ||
| 308 | action="store_true", | ||
| 309 | default=False, | ||
| 310 | help="Enable the processing of dependency links.", | ||
| 311 | ) # type: Any | ||
| 312 | |||
| 313 | |||
| 314 | def constraints(): | ||
| 315 | return Option( | ||
| 316 | '-c', '--constraint', | ||
| 317 | dest='constraints', | ||
| 318 | action='append', | ||
| 319 | default=[], | ||
| 320 | metavar='file', | ||
| 321 | help='Constrain versions using the given constraints file. ' | ||
| 322 | 'This option can be used multiple times.' | ||
| 323 | ) | ||
| 324 | |||
| 325 | |||
| 326 | def requirements(): | ||
| 327 | return Option( | ||
| 328 | '-r', '--requirement', | ||
| 329 | dest='requirements', | ||
| 330 | action='append', | ||
| 331 | default=[], | ||
| 332 | metavar='file', | ||
| 333 | help='Install from the given requirements file. ' | ||
| 334 | 'This option can be used multiple times.' | ||
| 335 | ) | ||
| 336 | |||
| 337 | |||
| 338 | def editable(): | ||
| 339 | return Option( | ||
| 340 | '-e', '--editable', | ||
| 341 | dest='editables', | ||
| 342 | action='append', | ||
| 343 | default=[], | ||
| 344 | metavar='path/url', | ||
| 345 | help=('Install a project in editable mode (i.e. setuptools ' | ||
| 346 | '"develop mode") from a local project path or a VCS url.'), | ||
| 347 | ) | ||
| 348 | |||
| 349 | |||
| 350 | src = partial( | ||
| 351 | Option, | ||
| 352 | '--src', '--source', '--source-dir', '--source-directory', | ||
| 353 | dest='src_dir', | ||
| 354 | metavar='dir', | ||
| 355 | default=src_prefix, | ||
| 356 | help='Directory to check out editable projects into. ' | ||
| 357 | 'The default in a virtualenv is "<venv path>/src". ' | ||
| 358 | 'The default for global installs is "<current dir>/src".' | ||
| 359 | ) # type: Any | ||
| 360 | |||
| 361 | |||
| 362 | def _get_format_control(values, option): | ||
| 363 | """Get a format_control object.""" | ||
| 364 | return getattr(values, option.dest) | ||
| 365 | |||
| 366 | |||
| 367 | def _handle_no_binary(option, opt_str, value, parser): | ||
| 368 | existing = getattr(parser.values, option.dest) | ||
| 369 | fmt_ctl_handle_mutual_exclude( | ||
| 370 | value, existing.no_binary, existing.only_binary, | ||
| 371 | ) | ||
| 372 | |||
| 373 | |||
| 374 | def _handle_only_binary(option, opt_str, value, parser): | ||
| 375 | existing = getattr(parser.values, option.dest) | ||
| 376 | fmt_ctl_handle_mutual_exclude( | ||
| 377 | value, existing.only_binary, existing.no_binary, | ||
| 378 | ) | ||
| 379 | |||
| 380 | |||
| 381 | def no_binary(): | ||
| 382 | return Option( | ||
| 383 | "--no-binary", dest="format_control", action="callback", | ||
| 384 | callback=_handle_no_binary, type="str", | ||
| 385 | default=FormatControl(set(), set()), | ||
| 386 | help="Do not use binary packages. Can be supplied multiple times, and " | ||
| 387 | "each time adds to the existing value. Accepts either :all: to " | ||
| 388 | "disable all binary packages, :none: to empty the set, or one or " | ||
| 389 | "more package names with commas between them. Note that some " | ||
| 390 | "packages are tricky to compile and may fail to install when " | ||
| 391 | "this option is used on them.", | ||
| 392 | ) | ||
| 393 | |||
| 394 | |||
| 395 | def only_binary(): | ||
| 396 | return Option( | ||
| 397 | "--only-binary", dest="format_control", action="callback", | ||
| 398 | callback=_handle_only_binary, type="str", | ||
| 399 | default=FormatControl(set(), set()), | ||
| 400 | help="Do not use source packages. Can be supplied multiple times, and " | ||
| 401 | "each time adds to the existing value. Accepts either :all: to " | ||
| 402 | "disable all source packages, :none: to empty the set, or one or " | ||
| 403 | "more package names with commas between them. Packages without " | ||
| 404 | "binary distributions will fail to install when this option is " | ||
| 405 | "used on them.", | ||
| 406 | ) | ||
| 407 | |||
| 408 | |||
| 409 | cache_dir = partial( | ||
| 410 | Option, | ||
| 411 | "--cache-dir", | ||
| 412 | dest="cache_dir", | ||
| 413 | default=USER_CACHE_DIR, | ||
| 414 | metavar="dir", | ||
| 415 | help="Store the cache data in <dir>." | ||
| 416 | ) | ||
| 417 | |||
| 418 | no_cache = partial( | ||
| 419 | Option, | ||
| 420 | "--no-cache-dir", | ||
| 421 | dest="cache_dir", | ||
| 422 | action="store_false", | ||
| 423 | help="Disable the cache.", | ||
| 424 | ) | ||
| 425 | |||
| 426 | no_deps = partial( | ||
| 427 | Option, | ||
| 428 | '--no-deps', '--no-dependencies', | ||
| 429 | dest='ignore_dependencies', | ||
| 430 | action='store_true', | ||
| 431 | default=False, | ||
| 432 | help="Don't install package dependencies.", | ||
| 433 | ) # type: Any | ||
| 434 | |||
| 435 | build_dir = partial( | ||
| 436 | Option, | ||
| 437 | '-b', '--build', '--build-dir', '--build-directory', | ||
| 438 | dest='build_dir', | ||
| 439 | metavar='dir', | ||
| 440 | help='Directory to unpack packages into and build in. Note that ' | ||
| 441 | 'an initial build still takes place in a temporary directory. ' | ||
| 442 | 'The location of temporary directories can be controlled by setting ' | ||
| 443 | 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' | ||
| 444 | 'When passed, build directories are not cleaned in case of failures.' | ||
| 445 | ) # type: Any | ||
| 446 | |||
| 447 | ignore_requires_python = partial( | ||
| 448 | Option, | ||
| 449 | '--ignore-requires-python', | ||
| 450 | dest='ignore_requires_python', | ||
| 451 | action='store_true', | ||
| 452 | help='Ignore the Requires-Python information.' | ||
| 453 | ) # type: Any | ||
| 454 | |||
| 455 | no_build_isolation = partial( | ||
| 456 | Option, | ||
| 457 | '--no-build-isolation', | ||
| 458 | dest='build_isolation', | ||
| 459 | action='store_false', | ||
| 460 | default=True, | ||
| 461 | help='Disable isolation when building a modern source distribution. ' | ||
| 462 | 'Build dependencies specified by PEP 518 must be already installed ' | ||
| 463 | 'if this option is used.' | ||
| 464 | ) # type: Any | ||
| 465 | |||
| 466 | install_options = partial( | ||
| 467 | Option, | ||
| 468 | '--install-option', | ||
| 469 | dest='install_options', | ||
| 470 | action='append', | ||
| 471 | metavar='options', | ||
| 472 | help="Extra arguments to be supplied to the setup.py install " | ||
| 473 | "command (use like --install-option=\"--install-scripts=/usr/local/" | ||
| 474 | "bin\"). Use multiple --install-option options to pass multiple " | ||
| 475 | "options to setup.py install. If you are using an option with a " | ||
| 476 | "directory path, be sure to use absolute path.", | ||
| 477 | ) # type: Any | ||
| 478 | |||
| 479 | global_options = partial( | ||
| 480 | Option, | ||
| 481 | '--global-option', | ||
| 482 | dest='global_options', | ||
| 483 | action='append', | ||
| 484 | metavar='options', | ||
| 485 | help="Extra global options to be supplied to the setup.py " | ||
| 486 | "call before the install command.", | ||
| 487 | ) # type: Any | ||
| 488 | |||
| 489 | no_clean = partial( | ||
| 490 | Option, | ||
| 491 | '--no-clean', | ||
| 492 | action='store_true', | ||
| 493 | default=False, | ||
| 494 | help="Don't clean up build directories)." | ||
| 495 | ) # type: Any | ||
| 496 | |||
| 497 | pre = partial( | ||
| 498 | Option, | ||
| 499 | '--pre', | ||
| 500 | action='store_true', | ||
| 501 | default=False, | ||
| 502 | help="Include pre-release and development versions. By default, " | ||
| 503 | "pip only finds stable versions.", | ||
| 504 | ) # type: Any | ||
| 505 | |||
| 506 | disable_pip_version_check = partial( | ||
| 507 | Option, | ||
| 508 | "--disable-pip-version-check", | ||
| 509 | dest="disable_pip_version_check", | ||
| 510 | action="store_true", | ||
| 511 | default=False, | ||
| 512 | help="Don't periodically check PyPI to determine whether a new version " | ||
| 513 | "of pip is available for download. Implied with --no-index.", | ||
| 514 | ) # type: Any | ||
| 515 | |||
| 516 | |||
| 517 | # Deprecated, Remove later | ||
| 518 | always_unzip = partial( | ||
| 519 | Option, | ||
| 520 | '-Z', '--always-unzip', | ||
| 521 | dest='always_unzip', | ||
| 522 | action='store_true', | ||
| 523 | help=SUPPRESS_HELP, | ||
| 524 | ) # type: Any | ||
| 525 | |||
| 526 | |||
| 527 | def _merge_hash(option, opt_str, value, parser): | ||
| 528 | """Given a value spelled "algo:digest", append the digest to a list | ||
| 529 | pointed to in a dict by the algo name.""" | ||
| 530 | if not parser.values.hashes: | ||
| 531 | parser.values.hashes = {} | ||
| 532 | try: | ||
| 533 | algo, digest = value.split(':', 1) | ||
| 534 | except ValueError: | ||
| 535 | parser.error('Arguments to %s must be a hash name ' | ||
| 536 | 'followed by a value, like --hash=sha256:abcde...' % | ||
| 537 | opt_str) | ||
| 538 | if algo not in STRONG_HASHES: | ||
| 539 | parser.error('Allowed hash algorithms for %s are %s.' % | ||
| 540 | (opt_str, ', '.join(STRONG_HASHES))) | ||
| 541 | parser.values.hashes.setdefault(algo, []).append(digest) | ||
| 542 | |||
| 543 | |||
| 544 | hash = partial( | ||
| 545 | Option, | ||
| 546 | '--hash', | ||
| 547 | # Hash values eventually end up in InstallRequirement.hashes due to | ||
| 548 | # __dict__ copying in process_line(). | ||
| 549 | dest='hashes', | ||
| 550 | action='callback', | ||
| 551 | callback=_merge_hash, | ||
| 552 | type='string', | ||
| 553 | help="Verify that the package's archive matches this " | ||
| 554 | 'hash before installing. Example: --hash=sha256:abcdef...', | ||
| 555 | ) # type: Any | ||
| 556 | |||
| 557 | |||
| 558 | require_hashes = partial( | ||
| 559 | Option, | ||
| 560 | '--require-hashes', | ||
| 561 | dest='require_hashes', | ||
| 562 | action='store_true', | ||
| 563 | default=False, | ||
| 564 | help='Require a hash to check each requirement against, for ' | ||
| 565 | 'repeatable installs. This option is implied when any package in a ' | ||
| 566 | 'requirements file has a --hash option.', | ||
| 567 | ) # type: Any | ||
| 568 | |||
| 569 | |||
| 570 | ########## | ||
| 571 | # groups # | ||
| 572 | ########## | ||
| 573 | |||
| 574 | general_group = { | ||
| 575 | 'name': 'General Options', | ||
| 576 | 'options': [ | ||
| 577 | help_, | ||
| 578 | isolated_mode, | ||
| 579 | require_virtualenv, | ||
| 580 | verbose, | ||
| 581 | version, | ||
| 582 | quiet, | ||
| 583 | log, | ||
| 584 | no_input, | ||
| 585 | proxy, | ||
| 586 | retries, | ||
| 587 | timeout, | ||
| 588 | skip_requirements_regex, | ||
| 589 | exists_action, | ||
| 590 | trusted_host, | ||
| 591 | cert, | ||
| 592 | client_cert, | ||
| 593 | cache_dir, | ||
| 594 | no_cache, | ||
| 595 | disable_pip_version_check, | ||
| 596 | no_color, | ||
| 597 | ] | ||
| 598 | } | ||
| 599 | |||
| 600 | index_group = { | ||
| 601 | 'name': 'Package Index Options', | ||
| 602 | 'options': [ | ||
| 603 | index_url, | ||
| 604 | extra_index_url, | ||
| 605 | no_index, | ||
| 606 | find_links, | ||
| 607 | process_dependency_links, | ||
| 608 | ] | ||
| 609 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py new file mode 100644 index 0000000..d44e6f1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py | |||
| @@ -0,0 +1,79 @@ | |||
| 1 | """ | ||
| 2 | Package containing all pip commands | ||
| 3 | """ | ||
| 4 | from __future__ import absolute_import | ||
| 5 | |||
| 6 | from pip._internal.commands.completion import CompletionCommand | ||
| 7 | from pip._internal.commands.configuration import ConfigurationCommand | ||
| 8 | from pip._internal.commands.download import DownloadCommand | ||
| 9 | from pip._internal.commands.freeze import FreezeCommand | ||
| 10 | from pip._internal.commands.hash import HashCommand | ||
| 11 | from pip._internal.commands.help import HelpCommand | ||
| 12 | from pip._internal.commands.list import ListCommand | ||
| 13 | from pip._internal.commands.check import CheckCommand | ||
| 14 | from pip._internal.commands.search import SearchCommand | ||
| 15 | from pip._internal.commands.show import ShowCommand | ||
| 16 | from pip._internal.commands.install import InstallCommand | ||
| 17 | from pip._internal.commands.uninstall import UninstallCommand | ||
| 18 | from pip._internal.commands.wheel import WheelCommand | ||
| 19 | |||
| 20 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 21 | |||
| 22 | if MYPY_CHECK_RUNNING: | ||
| 23 | from typing import List, Type | ||
| 24 | from pip._internal.basecommand import Command | ||
| 25 | |||
| 26 | commands_order = [ | ||
| 27 | InstallCommand, | ||
| 28 | DownloadCommand, | ||
| 29 | UninstallCommand, | ||
| 30 | FreezeCommand, | ||
| 31 | ListCommand, | ||
| 32 | ShowCommand, | ||
| 33 | CheckCommand, | ||
| 34 | ConfigurationCommand, | ||
| 35 | SearchCommand, | ||
| 36 | WheelCommand, | ||
| 37 | HashCommand, | ||
| 38 | CompletionCommand, | ||
| 39 | HelpCommand, | ||
| 40 | ] # type: List[Type[Command]] | ||
| 41 | |||
| 42 | commands_dict = {c.name: c for c in commands_order} | ||
| 43 | |||
| 44 | |||
| 45 | def get_summaries(ordered=True): | ||
| 46 | """Yields sorted (command name, command summary) tuples.""" | ||
| 47 | |||
| 48 | if ordered: | ||
| 49 | cmditems = _sort_commands(commands_dict, commands_order) | ||
| 50 | else: | ||
| 51 | cmditems = commands_dict.items() | ||
| 52 | |||
| 53 | for name, command_class in cmditems: | ||
| 54 | yield (name, command_class.summary) | ||
| 55 | |||
| 56 | |||
| 57 | def get_similar_commands(name): | ||
| 58 | """Command name auto-correct.""" | ||
| 59 | from difflib import get_close_matches | ||
| 60 | |||
| 61 | name = name.lower() | ||
| 62 | |||
| 63 | close_commands = get_close_matches(name, commands_dict.keys()) | ||
| 64 | |||
| 65 | if close_commands: | ||
| 66 | return close_commands[0] | ||
| 67 | else: | ||
| 68 | return False | ||
| 69 | |||
| 70 | |||
| 71 | def _sort_commands(cmddict, order): | ||
| 72 | def keyfn(key): | ||
| 73 | try: | ||
| 74 | return order.index(key[1]) | ||
| 75 | except ValueError: | ||
| 76 | # unordered items should come last | ||
| 77 | return 0xff | ||
| 78 | |||
| 79 | return sorted(cmddict.items(), key=keyfn) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py new file mode 100644 index 0000000..b1bf38a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py | |||
| @@ -0,0 +1,42 @@ | |||
| 1 | import logging | ||
| 2 | |||
| 3 | from pip._internal.basecommand import Command | ||
| 4 | from pip._internal.operations.check import ( | ||
| 5 | check_package_set, create_package_set_from_installed, | ||
| 6 | ) | ||
| 7 | from pip._internal.utils.misc import get_installed_distributions | ||
| 8 | |||
| 9 | logger = logging.getLogger(__name__) | ||
| 10 | |||
| 11 | |||
| 12 | class CheckCommand(Command): | ||
| 13 | """Verify installed packages have compatible dependencies.""" | ||
| 14 | name = 'check' | ||
| 15 | usage = """ | ||
| 16 | %prog [options]""" | ||
| 17 | summary = 'Verify installed packages have compatible dependencies.' | ||
| 18 | |||
| 19 | def run(self, options, args): | ||
| 20 | package_set = create_package_set_from_installed() | ||
| 21 | missing, conflicting = check_package_set(package_set) | ||
| 22 | |||
| 23 | for project_name in missing: | ||
| 24 | version = package_set[project_name].version | ||
| 25 | for dependency in missing[project_name]: | ||
| 26 | logger.info( | ||
| 27 | "%s %s requires %s, which is not installed.", | ||
| 28 | project_name, version, dependency[0], | ||
| 29 | ) | ||
| 30 | |||
| 31 | for project_name in conflicting: | ||
| 32 | version = package_set[project_name].version | ||
| 33 | for dep_name, dep_version, req in conflicting[project_name]: | ||
| 34 | logger.info( | ||
| 35 | "%s %s has requirement %s, but you have %s %s.", | ||
| 36 | project_name, version, req, dep_name, dep_version, | ||
| 37 | ) | ||
| 38 | |||
| 39 | if missing or conflicting: | ||
| 40 | return 1 | ||
| 41 | else: | ||
| 42 | logger.info("No broken requirements found.") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py new file mode 100644 index 0000000..8da1e83 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py | |||
| @@ -0,0 +1,94 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import sys | ||
| 4 | import textwrap | ||
| 5 | |||
| 6 | from pip._internal.basecommand import Command | ||
| 7 | from pip._internal.utils.misc import get_prog | ||
| 8 | |||
| 9 | BASE_COMPLETION = """ | ||
| 10 | # pip %(shell)s completion start%(script)s# pip %(shell)s completion end | ||
| 11 | """ | ||
| 12 | |||
| 13 | COMPLETION_SCRIPTS = { | ||
| 14 | 'bash': """ | ||
| 15 | _pip_completion() | ||
| 16 | { | ||
| 17 | COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ | ||
| 18 | COMP_CWORD=$COMP_CWORD \\ | ||
| 19 | PIP_AUTO_COMPLETE=1 $1 ) ) | ||
| 20 | } | ||
| 21 | complete -o default -F _pip_completion %(prog)s | ||
| 22 | """, | ||
| 23 | 'zsh': """ | ||
| 24 | function _pip_completion { | ||
| 25 | local words cword | ||
| 26 | read -Ac words | ||
| 27 | read -cn cword | ||
| 28 | reply=( $( COMP_WORDS="$words[*]" \\ | ||
| 29 | COMP_CWORD=$(( cword-1 )) \\ | ||
| 30 | PIP_AUTO_COMPLETE=1 $words[1] ) ) | ||
| 31 | } | ||
| 32 | compctl -K _pip_completion %(prog)s | ||
| 33 | """, | ||
| 34 | 'fish': """ | ||
| 35 | function __fish_complete_pip | ||
| 36 | set -lx COMP_WORDS (commandline -o) "" | ||
| 37 | set -lx COMP_CWORD ( \\ | ||
| 38 | math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ | ||
| 39 | ) | ||
| 40 | set -lx PIP_AUTO_COMPLETE 1 | ||
| 41 | string split \\ -- (eval $COMP_WORDS[1]) | ||
| 42 | end | ||
| 43 | complete -fa "(__fish_complete_pip)" -c %(prog)s | ||
| 44 | """, | ||
| 45 | } | ||
| 46 | |||
| 47 | |||
| 48 | class CompletionCommand(Command): | ||
| 49 | """A helper command to be used for command completion.""" | ||
| 50 | name = 'completion' | ||
| 51 | summary = 'A helper command used for command completion.' | ||
| 52 | ignore_require_venv = True | ||
| 53 | |||
| 54 | def __init__(self, *args, **kw): | ||
| 55 | super(CompletionCommand, self).__init__(*args, **kw) | ||
| 56 | |||
| 57 | cmd_opts = self.cmd_opts | ||
| 58 | |||
| 59 | cmd_opts.add_option( | ||
| 60 | '--bash', '-b', | ||
| 61 | action='store_const', | ||
| 62 | const='bash', | ||
| 63 | dest='shell', | ||
| 64 | help='Emit completion code for bash') | ||
| 65 | cmd_opts.add_option( | ||
| 66 | '--zsh', '-z', | ||
| 67 | action='store_const', | ||
| 68 | const='zsh', | ||
| 69 | dest='shell', | ||
| 70 | help='Emit completion code for zsh') | ||
| 71 | cmd_opts.add_option( | ||
| 72 | '--fish', '-f', | ||
| 73 | action='store_const', | ||
| 74 | const='fish', | ||
| 75 | dest='shell', | ||
| 76 | help='Emit completion code for fish') | ||
| 77 | |||
| 78 | self.parser.insert_option_group(0, cmd_opts) | ||
| 79 | |||
| 80 | def run(self, options, args): | ||
| 81 | """Prints the completion code of the given shell""" | ||
| 82 | shells = COMPLETION_SCRIPTS.keys() | ||
| 83 | shell_options = ['--' + shell for shell in sorted(shells)] | ||
| 84 | if options.shell in shells: | ||
| 85 | script = textwrap.dedent( | ||
| 86 | COMPLETION_SCRIPTS.get(options.shell, '') % { | ||
| 87 | 'prog': get_prog(), | ||
| 88 | } | ||
| 89 | ) | ||
| 90 | print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) | ||
| 91 | else: | ||
| 92 | sys.stderr.write( | ||
| 93 | 'ERROR: You must pass %s\n' % ' or '.join(shell_options) | ||
| 94 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py new file mode 100644 index 0000000..e10d9a9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py | |||
| @@ -0,0 +1,227 @@ | |||
| 1 | import logging | ||
| 2 | import os | ||
| 3 | import subprocess | ||
| 4 | |||
| 5 | from pip._internal.basecommand import Command | ||
| 6 | from pip._internal.configuration import Configuration, kinds | ||
| 7 | from pip._internal.exceptions import PipError | ||
| 8 | from pip._internal.locations import venv_config_file | ||
| 9 | from pip._internal.status_codes import ERROR, SUCCESS | ||
| 10 | from pip._internal.utils.misc import get_prog | ||
| 11 | |||
| 12 | logger = logging.getLogger(__name__) | ||
| 13 | |||
| 14 | |||
| 15 | class ConfigurationCommand(Command): | ||
| 16 | """Manage local and global configuration. | ||
| 17 | |||
| 18 | Subcommands: | ||
| 19 | |||
| 20 | list: List the active configuration (or from the file specified) | ||
| 21 | edit: Edit the configuration file in an editor | ||
| 22 | get: Get the value associated with name | ||
| 23 | set: Set the name=value | ||
| 24 | unset: Unset the value associated with name | ||
| 25 | |||
| 26 | If none of --user, --global and --venv are passed, a virtual | ||
| 27 | environment configuration file is used if one is active and the file | ||
| 28 | exists. Otherwise, all modifications happen on the to the user file by | ||
| 29 | default. | ||
| 30 | """ | ||
| 31 | |||
| 32 | name = 'config' | ||
| 33 | usage = """ | ||
| 34 | %prog [<file-option>] list | ||
| 35 | %prog [<file-option>] [--editor <editor-path>] edit | ||
| 36 | |||
| 37 | %prog [<file-option>] get name | ||
| 38 | %prog [<file-option>] set name value | ||
| 39 | %prog [<file-option>] unset name | ||
| 40 | """ | ||
| 41 | |||
| 42 | summary = "Manage local and global configuration." | ||
| 43 | |||
| 44 | def __init__(self, *args, **kwargs): | ||
| 45 | super(ConfigurationCommand, self).__init__(*args, **kwargs) | ||
| 46 | |||
| 47 | self.configuration = None | ||
| 48 | |||
| 49 | self.cmd_opts.add_option( | ||
| 50 | '--editor', | ||
| 51 | dest='editor', | ||
| 52 | action='store', | ||
| 53 | default=None, | ||
| 54 | help=( | ||
| 55 | 'Editor to use to edit the file. Uses VISUAL or EDITOR ' | ||
| 56 | 'environment variables if not provided.' | ||
| 57 | ) | ||
| 58 | ) | ||
| 59 | |||
| 60 | self.cmd_opts.add_option( | ||
| 61 | '--global', | ||
| 62 | dest='global_file', | ||
| 63 | action='store_true', | ||
| 64 | default=False, | ||
| 65 | help='Use the system-wide configuration file only' | ||
| 66 | ) | ||
| 67 | |||
| 68 | self.cmd_opts.add_option( | ||
| 69 | '--user', | ||
| 70 | dest='user_file', | ||
| 71 | action='store_true', | ||
| 72 | default=False, | ||
| 73 | help='Use the user configuration file only' | ||
| 74 | ) | ||
| 75 | |||
| 76 | self.cmd_opts.add_option( | ||
| 77 | '--venv', | ||
| 78 | dest='venv_file', | ||
| 79 | action='store_true', | ||
| 80 | default=False, | ||
| 81 | help='Use the virtualenv configuration file only' | ||
| 82 | ) | ||
| 83 | |||
| 84 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 85 | |||
| 86 | def run(self, options, args): | ||
| 87 | handlers = { | ||
| 88 | "list": self.list_values, | ||
| 89 | "edit": self.open_in_editor, | ||
| 90 | "get": self.get_name, | ||
| 91 | "set": self.set_name_value, | ||
| 92 | "unset": self.unset_name | ||
| 93 | } | ||
| 94 | |||
| 95 | # Determine action | ||
| 96 | if not args or args[0] not in handlers: | ||
| 97 | logger.error("Need an action ({}) to perform.".format( | ||
| 98 | ", ".join(sorted(handlers))) | ||
| 99 | ) | ||
| 100 | return ERROR | ||
| 101 | |||
| 102 | action = args[0] | ||
| 103 | |||
| 104 | # Determine which configuration files are to be loaded | ||
| 105 | # Depends on whether the command is modifying. | ||
| 106 | try: | ||
| 107 | load_only = self._determine_file( | ||
| 108 | options, need_value=(action in ["get", "set", "unset", "edit"]) | ||
| 109 | ) | ||
| 110 | except PipError as e: | ||
| 111 | logger.error(e.args[0]) | ||
| 112 | return ERROR | ||
| 113 | |||
| 114 | # Load a new configuration | ||
| 115 | self.configuration = Configuration( | ||
| 116 | isolated=options.isolated_mode, load_only=load_only | ||
| 117 | ) | ||
| 118 | self.configuration.load() | ||
| 119 | |||
| 120 | # Error handling happens here, not in the action-handlers. | ||
| 121 | try: | ||
| 122 | handlers[action](options, args[1:]) | ||
| 123 | except PipError as e: | ||
| 124 | logger.error(e.args[0]) | ||
| 125 | return ERROR | ||
| 126 | |||
| 127 | return SUCCESS | ||
| 128 | |||
| 129 | def _determine_file(self, options, need_value): | ||
| 130 | file_options = { | ||
| 131 | kinds.USER: options.user_file, | ||
| 132 | kinds.GLOBAL: options.global_file, | ||
| 133 | kinds.VENV: options.venv_file | ||
| 134 | } | ||
| 135 | |||
| 136 | if sum(file_options.values()) == 0: | ||
| 137 | if not need_value: | ||
| 138 | return None | ||
| 139 | # Default to user, unless there's a virtualenv file. | ||
| 140 | elif os.path.exists(venv_config_file): | ||
| 141 | return kinds.VENV | ||
| 142 | else: | ||
| 143 | return kinds.USER | ||
| 144 | elif sum(file_options.values()) == 1: | ||
| 145 | # There's probably a better expression for this. | ||
| 146 | return [key for key in file_options if file_options[key]][0] | ||
| 147 | |||
| 148 | raise PipError( | ||
| 149 | "Need exactly one file to operate upon " | ||
| 150 | "(--user, --venv, --global) to perform." | ||
| 151 | ) | ||
| 152 | |||
| 153 | def list_values(self, options, args): | ||
| 154 | self._get_n_args(args, "list", n=0) | ||
| 155 | |||
| 156 | for key, value in sorted(self.configuration.items()): | ||
| 157 | logger.info("%s=%r", key, value) | ||
| 158 | |||
| 159 | def get_name(self, options, args): | ||
| 160 | key = self._get_n_args(args, "get [name]", n=1) | ||
| 161 | value = self.configuration.get_value(key) | ||
| 162 | |||
| 163 | logger.info("%s", value) | ||
| 164 | |||
| 165 | def set_name_value(self, options, args): | ||
| 166 | key, value = self._get_n_args(args, "set [name] [value]", n=2) | ||
| 167 | self.configuration.set_value(key, value) | ||
| 168 | |||
| 169 | self._save_configuration() | ||
| 170 | |||
| 171 | def unset_name(self, options, args): | ||
| 172 | key = self._get_n_args(args, "unset [name]", n=1) | ||
| 173 | self.configuration.unset_value(key) | ||
| 174 | |||
| 175 | self._save_configuration() | ||
| 176 | |||
| 177 | def open_in_editor(self, options, args): | ||
| 178 | editor = self._determine_editor(options) | ||
| 179 | |||
| 180 | fname = self.configuration.get_file_to_edit() | ||
| 181 | if fname is None: | ||
| 182 | raise PipError("Could not determine appropriate file.") | ||
| 183 | |||
| 184 | try: | ||
| 185 | subprocess.check_call([editor, fname]) | ||
| 186 | except subprocess.CalledProcessError as e: | ||
| 187 | raise PipError( | ||
| 188 | "Editor Subprocess exited with exit code {}" | ||
| 189 | .format(e.returncode) | ||
| 190 | ) | ||
| 191 | |||
| 192 | def _get_n_args(self, args, example, n): | ||
| 193 | """Helper to make sure the command got the right number of arguments | ||
| 194 | """ | ||
| 195 | if len(args) != n: | ||
| 196 | msg = ( | ||
| 197 | 'Got unexpected number of arguments, expected {}. ' | ||
| 198 | '(example: "{} config {}")' | ||
| 199 | ).format(n, get_prog(), example) | ||
| 200 | raise PipError(msg) | ||
| 201 | |||
| 202 | if n == 1: | ||
| 203 | return args[0] | ||
| 204 | else: | ||
| 205 | return args | ||
| 206 | |||
| 207 | def _save_configuration(self): | ||
| 208 | # We successfully ran a modifying command. Need to save the | ||
| 209 | # configuration. | ||
| 210 | try: | ||
| 211 | self.configuration.save() | ||
| 212 | except Exception: | ||
| 213 | logger.error( | ||
| 214 | "Unable to save configuration. Please report this as a bug.", | ||
| 215 | exc_info=1 | ||
| 216 | ) | ||
| 217 | raise PipError("Internal Error.") | ||
| 218 | |||
| 219 | def _determine_editor(self, options): | ||
| 220 | if options.editor is not None: | ||
| 221 | return options.editor | ||
| 222 | elif "VISUAL" in os.environ: | ||
| 223 | return os.environ["VISUAL"] | ||
| 224 | elif "EDITOR" in os.environ: | ||
| 225 | return os.environ["EDITOR"] | ||
| 226 | else: | ||
| 227 | raise PipError("Could not determine editor to use.") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py new file mode 100644 index 0000000..916a470 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py | |||
| @@ -0,0 +1,233 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | |||
| 6 | from pip._internal import cmdoptions | ||
| 7 | from pip._internal.basecommand import RequirementCommand | ||
| 8 | from pip._internal.exceptions import CommandError | ||
| 9 | from pip._internal.index import FormatControl | ||
| 10 | from pip._internal.operations.prepare import RequirementPreparer | ||
| 11 | from pip._internal.req import RequirementSet | ||
| 12 | from pip._internal.resolve import Resolver | ||
| 13 | from pip._internal.utils.filesystem import check_path_owner | ||
| 14 | from pip._internal.utils.misc import ensure_dir, normalize_path | ||
| 15 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 16 | |||
| 17 | logger = logging.getLogger(__name__) | ||
| 18 | |||
| 19 | |||
| 20 | class DownloadCommand(RequirementCommand): | ||
| 21 | """ | ||
| 22 | Download packages from: | ||
| 23 | |||
| 24 | - PyPI (and other indexes) using requirement specifiers. | ||
| 25 | - VCS project urls. | ||
| 26 | - Local project directories. | ||
| 27 | - Local or remote source archives. | ||
| 28 | |||
| 29 | pip also supports downloading from "requirements files", which provide | ||
| 30 | an easy way to specify a whole environment to be downloaded. | ||
| 31 | """ | ||
| 32 | name = 'download' | ||
| 33 | |||
| 34 | usage = """ | ||
| 35 | %prog [options] <requirement specifier> [package-index-options] ... | ||
| 36 | %prog [options] -r <requirements file> [package-index-options] ... | ||
| 37 | %prog [options] <vcs project url> ... | ||
| 38 | %prog [options] <local project path> ... | ||
| 39 | %prog [options] <archive url/path> ...""" | ||
| 40 | |||
| 41 | summary = 'Download packages.' | ||
| 42 | |||
| 43 | def __init__(self, *args, **kw): | ||
| 44 | super(DownloadCommand, self).__init__(*args, **kw) | ||
| 45 | |||
| 46 | cmd_opts = self.cmd_opts | ||
| 47 | |||
| 48 | cmd_opts.add_option(cmdoptions.constraints()) | ||
| 49 | cmd_opts.add_option(cmdoptions.requirements()) | ||
| 50 | cmd_opts.add_option(cmdoptions.build_dir()) | ||
| 51 | cmd_opts.add_option(cmdoptions.no_deps()) | ||
| 52 | cmd_opts.add_option(cmdoptions.global_options()) | ||
| 53 | cmd_opts.add_option(cmdoptions.no_binary()) | ||
| 54 | cmd_opts.add_option(cmdoptions.only_binary()) | ||
| 55 | cmd_opts.add_option(cmdoptions.src()) | ||
| 56 | cmd_opts.add_option(cmdoptions.pre()) | ||
| 57 | cmd_opts.add_option(cmdoptions.no_clean()) | ||
| 58 | cmd_opts.add_option(cmdoptions.require_hashes()) | ||
| 59 | cmd_opts.add_option(cmdoptions.progress_bar()) | ||
| 60 | cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||
| 61 | |||
| 62 | cmd_opts.add_option( | ||
| 63 | '-d', '--dest', '--destination-dir', '--destination-directory', | ||
| 64 | dest='download_dir', | ||
| 65 | metavar='dir', | ||
| 66 | default=os.curdir, | ||
| 67 | help=("Download packages into <dir>."), | ||
| 68 | ) | ||
| 69 | |||
| 70 | cmd_opts.add_option( | ||
| 71 | '--platform', | ||
| 72 | dest='platform', | ||
| 73 | metavar='platform', | ||
| 74 | default=None, | ||
| 75 | help=("Only download wheels compatible with <platform>. " | ||
| 76 | "Defaults to the platform of the running system."), | ||
| 77 | ) | ||
| 78 | |||
| 79 | cmd_opts.add_option( | ||
| 80 | '--python-version', | ||
| 81 | dest='python_version', | ||
| 82 | metavar='python_version', | ||
| 83 | default=None, | ||
| 84 | help=("Only download wheels compatible with Python " | ||
| 85 | "interpreter version <version>. If not specified, then the " | ||
| 86 | "current system interpreter minor version is used. A major " | ||
| 87 | "version (e.g. '2') can be specified to match all " | ||
| 88 | "minor revs of that major version. A minor version " | ||
| 89 | "(e.g. '34') can also be specified."), | ||
| 90 | ) | ||
| 91 | |||
| 92 | cmd_opts.add_option( | ||
| 93 | '--implementation', | ||
| 94 | dest='implementation', | ||
| 95 | metavar='implementation', | ||
| 96 | default=None, | ||
| 97 | help=("Only download wheels compatible with Python " | ||
| 98 | "implementation <implementation>, e.g. 'pp', 'jy', 'cp', " | ||
| 99 | " or 'ip'. If not specified, then the current " | ||
| 100 | "interpreter implementation is used. Use 'py' to force " | ||
| 101 | "implementation-agnostic wheels."), | ||
| 102 | ) | ||
| 103 | |||
| 104 | cmd_opts.add_option( | ||
| 105 | '--abi', | ||
| 106 | dest='abi', | ||
| 107 | metavar='abi', | ||
| 108 | default=None, | ||
| 109 | help=("Only download wheels compatible with Python " | ||
| 110 | "abi <abi>, e.g. 'pypy_41'. If not specified, then the " | ||
| 111 | "current interpreter abi tag is used. Generally " | ||
| 112 | "you will need to specify --implementation, " | ||
| 113 | "--platform, and --python-version when using " | ||
| 114 | "this option."), | ||
| 115 | ) | ||
| 116 | |||
| 117 | index_opts = cmdoptions.make_option_group( | ||
| 118 | cmdoptions.index_group, | ||
| 119 | self.parser, | ||
| 120 | ) | ||
| 121 | |||
| 122 | self.parser.insert_option_group(0, index_opts) | ||
| 123 | self.parser.insert_option_group(0, cmd_opts) | ||
| 124 | |||
| 125 | def run(self, options, args): | ||
| 126 | options.ignore_installed = True | ||
| 127 | # editable doesn't really make sense for `pip download`, but the bowels | ||
| 128 | # of the RequirementSet code require that property. | ||
| 129 | options.editables = [] | ||
| 130 | |||
| 131 | if options.python_version: | ||
| 132 | python_versions = [options.python_version] | ||
| 133 | else: | ||
| 134 | python_versions = None | ||
| 135 | |||
| 136 | dist_restriction_set = any([ | ||
| 137 | options.python_version, | ||
| 138 | options.platform, | ||
| 139 | options.abi, | ||
| 140 | options.implementation, | ||
| 141 | ]) | ||
| 142 | binary_only = FormatControl(set(), {':all:'}) | ||
| 143 | no_sdist_dependencies = ( | ||
| 144 | options.format_control != binary_only and | ||
| 145 | not options.ignore_dependencies | ||
| 146 | ) | ||
| 147 | if dist_restriction_set and no_sdist_dependencies: | ||
| 148 | raise CommandError( | ||
| 149 | "When restricting platform and interpreter constraints using " | ||
| 150 | "--python-version, --platform, --abi, or --implementation, " | ||
| 151 | "either --no-deps must be set, or --only-binary=:all: must be " | ||
| 152 | "set and --no-binary must not be set (or must be set to " | ||
| 153 | ":none:)." | ||
| 154 | ) | ||
| 155 | |||
| 156 | options.src_dir = os.path.abspath(options.src_dir) | ||
| 157 | options.download_dir = normalize_path(options.download_dir) | ||
| 158 | |||
| 159 | ensure_dir(options.download_dir) | ||
| 160 | |||
| 161 | with self._build_session(options) as session: | ||
| 162 | finder = self._build_package_finder( | ||
| 163 | options=options, | ||
| 164 | session=session, | ||
| 165 | platform=options.platform, | ||
| 166 | python_versions=python_versions, | ||
| 167 | abi=options.abi, | ||
| 168 | implementation=options.implementation, | ||
| 169 | ) | ||
| 170 | build_delete = (not (options.no_clean or options.build_dir)) | ||
| 171 | if options.cache_dir and not check_path_owner(options.cache_dir): | ||
| 172 | logger.warning( | ||
| 173 | "The directory '%s' or its parent directory is not owned " | ||
| 174 | "by the current user and caching wheels has been " | ||
| 175 | "disabled. check the permissions and owner of that " | ||
| 176 | "directory. If executing pip with sudo, you may want " | ||
| 177 | "sudo's -H flag.", | ||
| 178 | options.cache_dir, | ||
| 179 | ) | ||
| 180 | options.cache_dir = None | ||
| 181 | |||
| 182 | with TempDirectory( | ||
| 183 | options.build_dir, delete=build_delete, kind="download" | ||
| 184 | ) as directory: | ||
| 185 | |||
| 186 | requirement_set = RequirementSet( | ||
| 187 | require_hashes=options.require_hashes, | ||
| 188 | ) | ||
| 189 | self.populate_requirement_set( | ||
| 190 | requirement_set, | ||
| 191 | args, | ||
| 192 | options, | ||
| 193 | finder, | ||
| 194 | session, | ||
| 195 | self.name, | ||
| 196 | None | ||
| 197 | ) | ||
| 198 | |||
| 199 | preparer = RequirementPreparer( | ||
| 200 | build_dir=directory.path, | ||
| 201 | src_dir=options.src_dir, | ||
| 202 | download_dir=options.download_dir, | ||
| 203 | wheel_download_dir=None, | ||
| 204 | progress_bar=options.progress_bar, | ||
| 205 | build_isolation=options.build_isolation, | ||
| 206 | ) | ||
| 207 | |||
| 208 | resolver = Resolver( | ||
| 209 | preparer=preparer, | ||
| 210 | finder=finder, | ||
| 211 | session=session, | ||
| 212 | wheel_cache=None, | ||
| 213 | use_user_site=False, | ||
| 214 | upgrade_strategy="to-satisfy-only", | ||
| 215 | force_reinstall=False, | ||
| 216 | ignore_dependencies=options.ignore_dependencies, | ||
| 217 | ignore_requires_python=False, | ||
| 218 | ignore_installed=True, | ||
| 219 | isolated=options.isolated_mode, | ||
| 220 | ) | ||
| 221 | resolver.resolve(requirement_set) | ||
| 222 | |||
| 223 | downloaded = ' '.join([ | ||
| 224 | req.name for req in requirement_set.successfully_downloaded | ||
| 225 | ]) | ||
| 226 | if downloaded: | ||
| 227 | logger.info('Successfully downloaded %s', downloaded) | ||
| 228 | |||
| 229 | # Clean up | ||
| 230 | if not options.no_clean: | ||
| 231 | requirement_set.cleanup_files() | ||
| 232 | |||
| 233 | return requirement_set | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py new file mode 100644 index 0000000..ac562d7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py | |||
| @@ -0,0 +1,96 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import sys | ||
| 4 | |||
| 5 | from pip._internal import index | ||
| 6 | from pip._internal.basecommand import Command | ||
| 7 | from pip._internal.cache import WheelCache | ||
| 8 | from pip._internal.compat import stdlib_pkgs | ||
| 9 | from pip._internal.operations.freeze import freeze | ||
| 10 | |||
| 11 | DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} | ||
| 12 | |||
| 13 | |||
| 14 | class FreezeCommand(Command): | ||
| 15 | """ | ||
| 16 | Output installed packages in requirements format. | ||
| 17 | |||
| 18 | packages are listed in a case-insensitive sorted order. | ||
| 19 | """ | ||
| 20 | name = 'freeze' | ||
| 21 | usage = """ | ||
| 22 | %prog [options]""" | ||
| 23 | summary = 'Output installed packages in requirements format.' | ||
| 24 | log_streams = ("ext://sys.stderr", "ext://sys.stderr") | ||
| 25 | |||
| 26 | def __init__(self, *args, **kw): | ||
| 27 | super(FreezeCommand, self).__init__(*args, **kw) | ||
| 28 | |||
| 29 | self.cmd_opts.add_option( | ||
| 30 | '-r', '--requirement', | ||
| 31 | dest='requirements', | ||
| 32 | action='append', | ||
| 33 | default=[], | ||
| 34 | metavar='file', | ||
| 35 | help="Use the order in the given requirements file and its " | ||
| 36 | "comments when generating output. This option can be " | ||
| 37 | "used multiple times.") | ||
| 38 | self.cmd_opts.add_option( | ||
| 39 | '-f', '--find-links', | ||
| 40 | dest='find_links', | ||
| 41 | action='append', | ||
| 42 | default=[], | ||
| 43 | metavar='URL', | ||
| 44 | help='URL for finding packages, which will be added to the ' | ||
| 45 | 'output.') | ||
| 46 | self.cmd_opts.add_option( | ||
| 47 | '-l', '--local', | ||
| 48 | dest='local', | ||
| 49 | action='store_true', | ||
| 50 | default=False, | ||
| 51 | help='If in a virtualenv that has global access, do not output ' | ||
| 52 | 'globally-installed packages.') | ||
| 53 | self.cmd_opts.add_option( | ||
| 54 | '--user', | ||
| 55 | dest='user', | ||
| 56 | action='store_true', | ||
| 57 | default=False, | ||
| 58 | help='Only output packages installed in user-site.') | ||
| 59 | self.cmd_opts.add_option( | ||
| 60 | '--all', | ||
| 61 | dest='freeze_all', | ||
| 62 | action='store_true', | ||
| 63 | help='Do not skip these packages in the output:' | ||
| 64 | ' %s' % ', '.join(DEV_PKGS)) | ||
| 65 | self.cmd_opts.add_option( | ||
| 66 | '--exclude-editable', | ||
| 67 | dest='exclude_editable', | ||
| 68 | action='store_true', | ||
| 69 | help='Exclude editable package from output.') | ||
| 70 | |||
| 71 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 72 | |||
| 73 | def run(self, options, args): | ||
| 74 | format_control = index.FormatControl(set(), set()) | ||
| 75 | wheel_cache = WheelCache(options.cache_dir, format_control) | ||
| 76 | skip = set(stdlib_pkgs) | ||
| 77 | if not options.freeze_all: | ||
| 78 | skip.update(DEV_PKGS) | ||
| 79 | |||
| 80 | freeze_kwargs = dict( | ||
| 81 | requirement=options.requirements, | ||
| 82 | find_links=options.find_links, | ||
| 83 | local_only=options.local, | ||
| 84 | user_only=options.user, | ||
| 85 | skip_regex=options.skip_requirements_regex, | ||
| 86 | isolated=options.isolated_mode, | ||
| 87 | wheel_cache=wheel_cache, | ||
| 88 | skip=skip, | ||
| 89 | exclude_editable=options.exclude_editable, | ||
| 90 | ) | ||
| 91 | |||
| 92 | try: | ||
| 93 | for line in freeze(**freeze_kwargs): | ||
| 94 | sys.stdout.write(line + '\n') | ||
| 95 | finally: | ||
| 96 | wheel_cache.cleanup() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py new file mode 100644 index 0000000..0ce1419 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py | |||
| @@ -0,0 +1,57 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import hashlib | ||
| 4 | import logging | ||
| 5 | import sys | ||
| 6 | |||
| 7 | from pip._internal.basecommand import Command | ||
| 8 | from pip._internal.status_codes import ERROR | ||
| 9 | from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES | ||
| 10 | from pip._internal.utils.misc import read_chunks | ||
| 11 | |||
| 12 | logger = logging.getLogger(__name__) | ||
| 13 | |||
| 14 | |||
| 15 | class HashCommand(Command): | ||
| 16 | """ | ||
| 17 | Compute a hash of a local package archive. | ||
| 18 | |||
| 19 | These can be used with --hash in a requirements file to do repeatable | ||
| 20 | installs. | ||
| 21 | |||
| 22 | """ | ||
| 23 | name = 'hash' | ||
| 24 | usage = '%prog [options] <file> ...' | ||
| 25 | summary = 'Compute hashes of package archives.' | ||
| 26 | ignore_require_venv = True | ||
| 27 | |||
| 28 | def __init__(self, *args, **kw): | ||
| 29 | super(HashCommand, self).__init__(*args, **kw) | ||
| 30 | self.cmd_opts.add_option( | ||
| 31 | '-a', '--algorithm', | ||
| 32 | dest='algorithm', | ||
| 33 | choices=STRONG_HASHES, | ||
| 34 | action='store', | ||
| 35 | default=FAVORITE_HASH, | ||
| 36 | help='The hash algorithm to use: one of %s' % | ||
| 37 | ', '.join(STRONG_HASHES)) | ||
| 38 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 39 | |||
| 40 | def run(self, options, args): | ||
| 41 | if not args: | ||
| 42 | self.parser.print_usage(sys.stderr) | ||
| 43 | return ERROR | ||
| 44 | |||
| 45 | algorithm = options.algorithm | ||
| 46 | for path in args: | ||
| 47 | logger.info('%s:\n--hash=%s:%s', | ||
| 48 | path, algorithm, _hash_of_file(path, algorithm)) | ||
| 49 | |||
| 50 | |||
| 51 | def _hash_of_file(path, algorithm): | ||
| 52 | """Return the hash digest of a file.""" | ||
| 53 | with open(path, 'rb') as archive: | ||
| 54 | hash = hashlib.new(algorithm) | ||
| 55 | for chunk in read_chunks(archive): | ||
| 56 | hash.update(chunk) | ||
| 57 | return hash.hexdigest() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py new file mode 100644 index 0000000..f4a0e40 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py | |||
| @@ -0,0 +1,36 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | from pip._internal.basecommand import SUCCESS, Command | ||
| 4 | from pip._internal.exceptions import CommandError | ||
| 5 | |||
| 6 | |||
| 7 | class HelpCommand(Command): | ||
| 8 | """Show help for commands""" | ||
| 9 | name = 'help' | ||
| 10 | usage = """ | ||
| 11 | %prog <command>""" | ||
| 12 | summary = 'Show help for commands.' | ||
| 13 | ignore_require_venv = True | ||
| 14 | |||
| 15 | def run(self, options, args): | ||
| 16 | from pip._internal.commands import commands_dict, get_similar_commands | ||
| 17 | |||
| 18 | try: | ||
| 19 | # 'pip help' with no args is handled by pip.__init__.parseopt() | ||
| 20 | cmd_name = args[0] # the command we need help for | ||
| 21 | except IndexError: | ||
| 22 | return SUCCESS | ||
| 23 | |||
| 24 | if cmd_name not in commands_dict: | ||
| 25 | guess = get_similar_commands(cmd_name) | ||
| 26 | |||
| 27 | msg = ['unknown command "%s"' % cmd_name] | ||
| 28 | if guess: | ||
| 29 | msg.append('maybe you meant "%s"' % guess) | ||
| 30 | |||
| 31 | raise CommandError(' - '.join(msg)) | ||
| 32 | |||
| 33 | command = commands_dict[cmd_name]() | ||
| 34 | command.parser.print_help() | ||
| 35 | |||
| 36 | return SUCCESS | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py new file mode 100644 index 0000000..057a64e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py | |||
| @@ -0,0 +1,502 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import errno | ||
| 4 | import logging | ||
| 5 | import operator | ||
| 6 | import os | ||
| 7 | import shutil | ||
| 8 | from optparse import SUPPRESS_HELP | ||
| 9 | |||
| 10 | from pip._internal import cmdoptions | ||
| 11 | from pip._internal.basecommand import RequirementCommand | ||
| 12 | from pip._internal.cache import WheelCache | ||
| 13 | from pip._internal.exceptions import ( | ||
| 14 | CommandError, InstallationError, PreviousBuildDirError, | ||
| 15 | ) | ||
| 16 | from pip._internal.locations import distutils_scheme, virtualenv_no_global | ||
| 17 | from pip._internal.operations.check import check_install_conflicts | ||
| 18 | from pip._internal.operations.prepare import RequirementPreparer | ||
| 19 | from pip._internal.req import RequirementSet, install_given_reqs | ||
| 20 | from pip._internal.resolve import Resolver | ||
| 21 | from pip._internal.status_codes import ERROR | ||
| 22 | from pip._internal.utils.filesystem import check_path_owner | ||
| 23 | from pip._internal.utils.misc import ensure_dir, get_installed_version | ||
| 24 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 25 | from pip._internal.wheel import WheelBuilder | ||
| 26 | |||
| 27 | try: | ||
| 28 | import wheel | ||
| 29 | except ImportError: | ||
| 30 | wheel = None | ||
| 31 | |||
| 32 | |||
| 33 | logger = logging.getLogger(__name__) | ||
| 34 | |||
| 35 | |||
| 36 | class InstallCommand(RequirementCommand): | ||
| 37 | """ | ||
| 38 | Install packages from: | ||
| 39 | |||
| 40 | - PyPI (and other indexes) using requirement specifiers. | ||
| 41 | - VCS project urls. | ||
| 42 | - Local project directories. | ||
| 43 | - Local or remote source archives. | ||
| 44 | |||
| 45 | pip also supports installing from "requirements files", which provide | ||
| 46 | an easy way to specify a whole environment to be installed. | ||
| 47 | """ | ||
| 48 | name = 'install' | ||
| 49 | |||
| 50 | usage = """ | ||
| 51 | %prog [options] <requirement specifier> [package-index-options] ... | ||
| 52 | %prog [options] -r <requirements file> [package-index-options] ... | ||
| 53 | %prog [options] [-e] <vcs project url> ... | ||
| 54 | %prog [options] [-e] <local project path> ... | ||
| 55 | %prog [options] <archive url/path> ...""" | ||
| 56 | |||
| 57 | summary = 'Install packages.' | ||
| 58 | |||
| 59 | def __init__(self, *args, **kw): | ||
| 60 | super(InstallCommand, self).__init__(*args, **kw) | ||
| 61 | |||
| 62 | cmd_opts = self.cmd_opts | ||
| 63 | |||
| 64 | cmd_opts.add_option(cmdoptions.requirements()) | ||
| 65 | cmd_opts.add_option(cmdoptions.constraints()) | ||
| 66 | cmd_opts.add_option(cmdoptions.no_deps()) | ||
| 67 | cmd_opts.add_option(cmdoptions.pre()) | ||
| 68 | |||
| 69 | cmd_opts.add_option(cmdoptions.editable()) | ||
| 70 | cmd_opts.add_option( | ||
| 71 | '-t', '--target', | ||
| 72 | dest='target_dir', | ||
| 73 | metavar='dir', | ||
| 74 | default=None, | ||
| 75 | help='Install packages into <dir>. ' | ||
| 76 | 'By default this will not replace existing files/folders in ' | ||
| 77 | '<dir>. Use --upgrade to replace existing packages in <dir> ' | ||
| 78 | 'with new versions.' | ||
| 79 | ) | ||
| 80 | cmd_opts.add_option( | ||
| 81 | '--user', | ||
| 82 | dest='use_user_site', | ||
| 83 | action='store_true', | ||
| 84 | help="Install to the Python user install directory for your " | ||
| 85 | "platform. Typically ~/.local/, or %APPDATA%\\Python on " | ||
| 86 | "Windows. (See the Python documentation for site.USER_BASE " | ||
| 87 | "for full details.)") | ||
| 88 | cmd_opts.add_option( | ||
| 89 | '--no-user', | ||
| 90 | dest='use_user_site', | ||
| 91 | action='store_false', | ||
| 92 | help=SUPPRESS_HELP) | ||
| 93 | cmd_opts.add_option( | ||
| 94 | '--root', | ||
| 95 | dest='root_path', | ||
| 96 | metavar='dir', | ||
| 97 | default=None, | ||
| 98 | help="Install everything relative to this alternate root " | ||
| 99 | "directory.") | ||
| 100 | cmd_opts.add_option( | ||
| 101 | '--prefix', | ||
| 102 | dest='prefix_path', | ||
| 103 | metavar='dir', | ||
| 104 | default=None, | ||
| 105 | help="Installation prefix where lib, bin and other top-level " | ||
| 106 | "folders are placed") | ||
| 107 | |||
| 108 | cmd_opts.add_option(cmdoptions.build_dir()) | ||
| 109 | |||
| 110 | cmd_opts.add_option(cmdoptions.src()) | ||
| 111 | |||
| 112 | cmd_opts.add_option( | ||
| 113 | '-U', '--upgrade', | ||
| 114 | dest='upgrade', | ||
| 115 | action='store_true', | ||
| 116 | help='Upgrade all specified packages to the newest available ' | ||
| 117 | 'version. The handling of dependencies depends on the ' | ||
| 118 | 'upgrade-strategy used.' | ||
| 119 | ) | ||
| 120 | |||
| 121 | cmd_opts.add_option( | ||
| 122 | '--upgrade-strategy', | ||
| 123 | dest='upgrade_strategy', | ||
| 124 | default='only-if-needed', | ||
| 125 | choices=['only-if-needed', 'eager'], | ||
| 126 | help='Determines how dependency upgrading should be handled ' | ||
| 127 | '[default: %default]. ' | ||
| 128 | '"eager" - dependencies are upgraded regardless of ' | ||
| 129 | 'whether the currently installed version satisfies the ' | ||
| 130 | 'requirements of the upgraded package(s). ' | ||
| 131 | '"only-if-needed" - are upgraded only when they do not ' | ||
| 132 | 'satisfy the requirements of the upgraded package(s).' | ||
| 133 | ) | ||
| 134 | |||
| 135 | cmd_opts.add_option( | ||
| 136 | '--force-reinstall', | ||
| 137 | dest='force_reinstall', | ||
| 138 | action='store_true', | ||
| 139 | help='Reinstall all packages even if they are already ' | ||
| 140 | 'up-to-date.') | ||
| 141 | |||
| 142 | cmd_opts.add_option( | ||
| 143 | '-I', '--ignore-installed', | ||
| 144 | dest='ignore_installed', | ||
| 145 | action='store_true', | ||
| 146 | help='Ignore the installed packages (reinstalling instead).') | ||
| 147 | |||
| 148 | cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||
| 149 | cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||
| 150 | |||
| 151 | cmd_opts.add_option(cmdoptions.install_options()) | ||
| 152 | cmd_opts.add_option(cmdoptions.global_options()) | ||
| 153 | |||
| 154 | cmd_opts.add_option( | ||
| 155 | "--compile", | ||
| 156 | action="store_true", | ||
| 157 | dest="compile", | ||
| 158 | default=True, | ||
| 159 | help="Compile Python source files to bytecode", | ||
| 160 | ) | ||
| 161 | |||
| 162 | cmd_opts.add_option( | ||
| 163 | "--no-compile", | ||
| 164 | action="store_false", | ||
| 165 | dest="compile", | ||
| 166 | help="Do not compile Python source files to bytecode", | ||
| 167 | ) | ||
| 168 | |||
| 169 | cmd_opts.add_option( | ||
| 170 | "--no-warn-script-location", | ||
| 171 | action="store_false", | ||
| 172 | dest="warn_script_location", | ||
| 173 | default=True, | ||
| 174 | help="Do not warn when installing scripts outside PATH", | ||
| 175 | ) | ||
| 176 | cmd_opts.add_option( | ||
| 177 | "--no-warn-conflicts", | ||
| 178 | action="store_false", | ||
| 179 | dest="warn_about_conflicts", | ||
| 180 | default=True, | ||
| 181 | help="Do not warn about broken dependencies", | ||
| 182 | ) | ||
| 183 | |||
| 184 | cmd_opts.add_option(cmdoptions.no_binary()) | ||
| 185 | cmd_opts.add_option(cmdoptions.only_binary()) | ||
| 186 | cmd_opts.add_option(cmdoptions.no_clean()) | ||
| 187 | cmd_opts.add_option(cmdoptions.require_hashes()) | ||
| 188 | cmd_opts.add_option(cmdoptions.progress_bar()) | ||
| 189 | |||
| 190 | index_opts = cmdoptions.make_option_group( | ||
| 191 | cmdoptions.index_group, | ||
| 192 | self.parser, | ||
| 193 | ) | ||
| 194 | |||
| 195 | self.parser.insert_option_group(0, index_opts) | ||
| 196 | self.parser.insert_option_group(0, cmd_opts) | ||
| 197 | |||
| 198 | def run(self, options, args): | ||
| 199 | cmdoptions.check_install_build_global(options) | ||
| 200 | |||
| 201 | upgrade_strategy = "to-satisfy-only" | ||
| 202 | if options.upgrade: | ||
| 203 | upgrade_strategy = options.upgrade_strategy | ||
| 204 | |||
| 205 | if options.build_dir: | ||
| 206 | options.build_dir = os.path.abspath(options.build_dir) | ||
| 207 | |||
| 208 | options.src_dir = os.path.abspath(options.src_dir) | ||
| 209 | install_options = options.install_options or [] | ||
| 210 | if options.use_user_site: | ||
| 211 | if options.prefix_path: | ||
| 212 | raise CommandError( | ||
| 213 | "Can not combine '--user' and '--prefix' as they imply " | ||
| 214 | "different installation locations" | ||
| 215 | ) | ||
| 216 | if virtualenv_no_global(): | ||
| 217 | raise InstallationError( | ||
| 218 | "Can not perform a '--user' install. User site-packages " | ||
| 219 | "are not visible in this virtualenv." | ||
| 220 | ) | ||
| 221 | install_options.append('--user') | ||
| 222 | install_options.append('--prefix=') | ||
| 223 | |||
| 224 | target_temp_dir = TempDirectory(kind="target") | ||
| 225 | if options.target_dir: | ||
| 226 | options.ignore_installed = True | ||
| 227 | options.target_dir = os.path.abspath(options.target_dir) | ||
| 228 | if (os.path.exists(options.target_dir) and not | ||
| 229 | os.path.isdir(options.target_dir)): | ||
| 230 | raise CommandError( | ||
| 231 | "Target path exists but is not a directory, will not " | ||
| 232 | "continue." | ||
| 233 | ) | ||
| 234 | |||
| 235 | # Create a target directory for using with the target option | ||
| 236 | target_temp_dir.create() | ||
| 237 | install_options.append('--home=' + target_temp_dir.path) | ||
| 238 | |||
| 239 | global_options = options.global_options or [] | ||
| 240 | |||
| 241 | with self._build_session(options) as session: | ||
| 242 | finder = self._build_package_finder(options, session) | ||
| 243 | build_delete = (not (options.no_clean or options.build_dir)) | ||
| 244 | wheel_cache = WheelCache(options.cache_dir, options.format_control) | ||
| 245 | |||
| 246 | if options.cache_dir and not check_path_owner(options.cache_dir): | ||
| 247 | logger.warning( | ||
| 248 | "The directory '%s' or its parent directory is not owned " | ||
| 249 | "by the current user and caching wheels has been " | ||
| 250 | "disabled. check the permissions and owner of that " | ||
| 251 | "directory. If executing pip with sudo, you may want " | ||
| 252 | "sudo's -H flag.", | ||
| 253 | options.cache_dir, | ||
| 254 | ) | ||
| 255 | options.cache_dir = None | ||
| 256 | |||
| 257 | with TempDirectory( | ||
| 258 | options.build_dir, delete=build_delete, kind="install" | ||
| 259 | ) as directory: | ||
| 260 | requirement_set = RequirementSet( | ||
| 261 | require_hashes=options.require_hashes, | ||
| 262 | ) | ||
| 263 | |||
| 264 | try: | ||
| 265 | self.populate_requirement_set( | ||
| 266 | requirement_set, args, options, finder, session, | ||
| 267 | self.name, wheel_cache | ||
| 268 | ) | ||
| 269 | preparer = RequirementPreparer( | ||
| 270 | build_dir=directory.path, | ||
| 271 | src_dir=options.src_dir, | ||
| 272 | download_dir=None, | ||
| 273 | wheel_download_dir=None, | ||
| 274 | progress_bar=options.progress_bar, | ||
| 275 | build_isolation=options.build_isolation, | ||
| 276 | ) | ||
| 277 | |||
| 278 | resolver = Resolver( | ||
| 279 | preparer=preparer, | ||
| 280 | finder=finder, | ||
| 281 | session=session, | ||
| 282 | wheel_cache=wheel_cache, | ||
| 283 | use_user_site=options.use_user_site, | ||
| 284 | upgrade_strategy=upgrade_strategy, | ||
| 285 | force_reinstall=options.force_reinstall, | ||
| 286 | ignore_dependencies=options.ignore_dependencies, | ||
| 287 | ignore_requires_python=options.ignore_requires_python, | ||
| 288 | ignore_installed=options.ignore_installed, | ||
| 289 | isolated=options.isolated_mode, | ||
| 290 | ) | ||
| 291 | resolver.resolve(requirement_set) | ||
| 292 | |||
| 293 | # If caching is disabled or wheel is not installed don't | ||
| 294 | # try to build wheels. | ||
| 295 | if wheel and options.cache_dir: | ||
| 296 | # build wheels before install. | ||
| 297 | wb = WheelBuilder( | ||
| 298 | finder, preparer, wheel_cache, | ||
| 299 | build_options=[], global_options=[], | ||
| 300 | ) | ||
| 301 | # Ignore the result: a failed wheel will be | ||
| 302 | # installed from the sdist/vcs whatever. | ||
| 303 | wb.build( | ||
| 304 | requirement_set.requirements.values(), | ||
| 305 | session=session, autobuilding=True | ||
| 306 | ) | ||
| 307 | |||
| 308 | to_install = resolver.get_installation_order( | ||
| 309 | requirement_set | ||
| 310 | ) | ||
| 311 | |||
| 312 | # Consistency Checking of the package set we're installing. | ||
| 313 | should_warn_about_conflicts = ( | ||
| 314 | not options.ignore_dependencies and | ||
| 315 | options.warn_about_conflicts | ||
| 316 | ) | ||
| 317 | if should_warn_about_conflicts: | ||
| 318 | self._warn_about_conflicts(to_install) | ||
| 319 | |||
| 320 | # Don't warn about script install locations if | ||
| 321 | # --target has been specified | ||
| 322 | warn_script_location = options.warn_script_location | ||
| 323 | if options.target_dir: | ||
| 324 | warn_script_location = False | ||
| 325 | |||
| 326 | installed = install_given_reqs( | ||
| 327 | to_install, | ||
| 328 | install_options, | ||
| 329 | global_options, | ||
| 330 | root=options.root_path, | ||
| 331 | home=target_temp_dir.path, | ||
| 332 | prefix=options.prefix_path, | ||
| 333 | pycompile=options.compile, | ||
| 334 | warn_script_location=warn_script_location, | ||
| 335 | use_user_site=options.use_user_site, | ||
| 336 | ) | ||
| 337 | |||
| 338 | possible_lib_locations = get_lib_location_guesses( | ||
| 339 | user=options.use_user_site, | ||
| 340 | home=target_temp_dir.path, | ||
| 341 | root=options.root_path, | ||
| 342 | prefix=options.prefix_path, | ||
| 343 | isolated=options.isolated_mode, | ||
| 344 | ) | ||
| 345 | reqs = sorted(installed, key=operator.attrgetter('name')) | ||
| 346 | items = [] | ||
| 347 | for req in reqs: | ||
| 348 | item = req.name | ||
| 349 | try: | ||
| 350 | installed_version = get_installed_version( | ||
| 351 | req.name, possible_lib_locations | ||
| 352 | ) | ||
| 353 | if installed_version: | ||
| 354 | item += '-' + installed_version | ||
| 355 | except Exception: | ||
| 356 | pass | ||
| 357 | items.append(item) | ||
| 358 | installed = ' '.join(items) | ||
| 359 | if installed: | ||
| 360 | logger.info('Successfully installed %s', installed) | ||
| 361 | except EnvironmentError as error: | ||
| 362 | show_traceback = (self.verbosity >= 1) | ||
| 363 | |||
| 364 | message = create_env_error_message( | ||
| 365 | error, show_traceback, options.use_user_site, | ||
| 366 | ) | ||
| 367 | logger.error(message, exc_info=show_traceback) | ||
| 368 | |||
| 369 | return ERROR | ||
| 370 | except PreviousBuildDirError: | ||
| 371 | options.no_clean = True | ||
| 372 | raise | ||
| 373 | finally: | ||
| 374 | # Clean up | ||
| 375 | if not options.no_clean: | ||
| 376 | requirement_set.cleanup_files() | ||
| 377 | wheel_cache.cleanup() | ||
| 378 | |||
| 379 | if options.target_dir: | ||
| 380 | self._handle_target_dir( | ||
| 381 | options.target_dir, target_temp_dir, options.upgrade | ||
| 382 | ) | ||
| 383 | return requirement_set | ||
| 384 | |||
| 385 | def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): | ||
| 386 | ensure_dir(target_dir) | ||
| 387 | |||
| 388 | # Checking both purelib and platlib directories for installed | ||
| 389 | # packages to be moved to target directory | ||
| 390 | lib_dir_list = [] | ||
| 391 | |||
| 392 | with target_temp_dir: | ||
| 393 | # Checking both purelib and platlib directories for installed | ||
| 394 | # packages to be moved to target directory | ||
| 395 | scheme = distutils_scheme('', home=target_temp_dir.path) | ||
| 396 | purelib_dir = scheme['purelib'] | ||
| 397 | platlib_dir = scheme['platlib'] | ||
| 398 | data_dir = scheme['data'] | ||
| 399 | |||
| 400 | if os.path.exists(purelib_dir): | ||
| 401 | lib_dir_list.append(purelib_dir) | ||
| 402 | if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: | ||
| 403 | lib_dir_list.append(platlib_dir) | ||
| 404 | if os.path.exists(data_dir): | ||
| 405 | lib_dir_list.append(data_dir) | ||
| 406 | |||
| 407 | for lib_dir in lib_dir_list: | ||
| 408 | for item in os.listdir(lib_dir): | ||
| 409 | if lib_dir == data_dir: | ||
| 410 | ddir = os.path.join(data_dir, item) | ||
| 411 | if any(s.startswith(ddir) for s in lib_dir_list[:-1]): | ||
| 412 | continue | ||
| 413 | target_item_dir = os.path.join(target_dir, item) | ||
| 414 | if os.path.exists(target_item_dir): | ||
| 415 | if not upgrade: | ||
| 416 | logger.warning( | ||
| 417 | 'Target directory %s already exists. Specify ' | ||
| 418 | '--upgrade to force replacement.', | ||
| 419 | target_item_dir | ||
| 420 | ) | ||
| 421 | continue | ||
| 422 | if os.path.islink(target_item_dir): | ||
| 423 | logger.warning( | ||
| 424 | 'Target directory %s already exists and is ' | ||
| 425 | 'a link. Pip will not automatically replace ' | ||
| 426 | 'links, please remove if replacement is ' | ||
| 427 | 'desired.', | ||
| 428 | target_item_dir | ||
| 429 | ) | ||
| 430 | continue | ||
| 431 | if os.path.isdir(target_item_dir): | ||
| 432 | shutil.rmtree(target_item_dir) | ||
| 433 | else: | ||
| 434 | os.remove(target_item_dir) | ||
| 435 | |||
| 436 | shutil.move( | ||
| 437 | os.path.join(lib_dir, item), | ||
| 438 | target_item_dir | ||
| 439 | ) | ||
| 440 | |||
| 441 | def _warn_about_conflicts(self, to_install): | ||
| 442 | package_set, _dep_info = check_install_conflicts(to_install) | ||
| 443 | missing, conflicting = _dep_info | ||
| 444 | |||
| 445 | # NOTE: There is some duplication here from pip check | ||
| 446 | for project_name in missing: | ||
| 447 | version = package_set[project_name][0] | ||
| 448 | for dependency in missing[project_name]: | ||
| 449 | logger.critical( | ||
| 450 | "%s %s requires %s, which is not installed.", | ||
| 451 | project_name, version, dependency[1], | ||
| 452 | ) | ||
| 453 | |||
| 454 | for project_name in conflicting: | ||
| 455 | version = package_set[project_name][0] | ||
| 456 | for dep_name, dep_version, req in conflicting[project_name]: | ||
| 457 | logger.critical( | ||
| 458 | "%s %s has requirement %s, but you'll have %s %s which is " | ||
| 459 | "incompatible.", | ||
| 460 | project_name, version, req, dep_name, dep_version, | ||
| 461 | ) | ||
| 462 | |||
| 463 | |||
| 464 | def get_lib_location_guesses(*args, **kwargs): | ||
| 465 | scheme = distutils_scheme('', *args, **kwargs) | ||
| 466 | return [scheme['purelib'], scheme['platlib']] | ||
| 467 | |||
| 468 | |||
| 469 | def create_env_error_message(error, show_traceback, using_user_site): | ||
| 470 | """Format an error message for an EnvironmentError | ||
| 471 | |||
| 472 | It may occur anytime during the execution of the install command. | ||
| 473 | """ | ||
| 474 | parts = [] | ||
| 475 | |||
| 476 | # Mention the error if we are not going to show a traceback | ||
| 477 | parts.append("Could not install packages due to an EnvironmentError") | ||
| 478 | if not show_traceback: | ||
| 479 | parts.append(": ") | ||
| 480 | parts.append(str(error)) | ||
| 481 | else: | ||
| 482 | parts.append(".") | ||
| 483 | |||
| 484 | # Spilt the error indication from a helper message (if any) | ||
| 485 | parts[-1] += "\n" | ||
| 486 | |||
| 487 | # Suggest useful actions to the user: | ||
| 488 | # (1) using user site-packages or (2) verifying the permissions | ||
| 489 | if error.errno == errno.EACCES: | ||
| 490 | user_option_part = "Consider using the `--user` option" | ||
| 491 | permissions_part = "Check the permissions" | ||
| 492 | |||
| 493 | if not using_user_site: | ||
| 494 | parts.extend([ | ||
| 495 | user_option_part, " or ", | ||
| 496 | permissions_part.lower(), | ||
| 497 | ]) | ||
| 498 | else: | ||
| 499 | parts.append(permissions_part) | ||
| 500 | parts.append(".\n") | ||
| 501 | |||
| 502 | return "".join(parts).strip() + "\n" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py new file mode 100644 index 0000000..1b46c6f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py | |||
| @@ -0,0 +1,343 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import json | ||
| 4 | import logging | ||
| 5 | import warnings | ||
| 6 | |||
| 7 | from pip._vendor import six | ||
| 8 | from pip._vendor.six.moves import zip_longest | ||
| 9 | |||
| 10 | from pip._internal.basecommand import Command | ||
| 11 | from pip._internal.cmdoptions import index_group, make_option_group | ||
| 12 | from pip._internal.exceptions import CommandError | ||
| 13 | from pip._internal.index import PackageFinder | ||
| 14 | from pip._internal.utils.deprecation import RemovedInPip11Warning | ||
| 15 | from pip._internal.utils.misc import ( | ||
| 16 | dist_is_editable, get_installed_distributions, | ||
| 17 | ) | ||
| 18 | from pip._internal.utils.packaging import get_installer | ||
| 19 | |||
| 20 | logger = logging.getLogger(__name__) | ||
| 21 | |||
| 22 | |||
| 23 | class ListCommand(Command): | ||
| 24 | """ | ||
| 25 | List installed packages, including editables. | ||
| 26 | |||
| 27 | Packages are listed in a case-insensitive sorted order. | ||
| 28 | """ | ||
| 29 | name = 'list' | ||
| 30 | usage = """ | ||
| 31 | %prog [options]""" | ||
| 32 | summary = 'List installed packages.' | ||
| 33 | |||
| 34 | def __init__(self, *args, **kw): | ||
| 35 | super(ListCommand, self).__init__(*args, **kw) | ||
| 36 | |||
| 37 | cmd_opts = self.cmd_opts | ||
| 38 | |||
| 39 | cmd_opts.add_option( | ||
| 40 | '-o', '--outdated', | ||
| 41 | action='store_true', | ||
| 42 | default=False, | ||
| 43 | help='List outdated packages') | ||
| 44 | cmd_opts.add_option( | ||
| 45 | '-u', '--uptodate', | ||
| 46 | action='store_true', | ||
| 47 | default=False, | ||
| 48 | help='List uptodate packages') | ||
| 49 | cmd_opts.add_option( | ||
| 50 | '-e', '--editable', | ||
| 51 | action='store_true', | ||
| 52 | default=False, | ||
| 53 | help='List editable projects.') | ||
| 54 | cmd_opts.add_option( | ||
| 55 | '-l', '--local', | ||
| 56 | action='store_true', | ||
| 57 | default=False, | ||
| 58 | help=('If in a virtualenv that has global access, do not list ' | ||
| 59 | 'globally-installed packages.'), | ||
| 60 | ) | ||
| 61 | self.cmd_opts.add_option( | ||
| 62 | '--user', | ||
| 63 | dest='user', | ||
| 64 | action='store_true', | ||
| 65 | default=False, | ||
| 66 | help='Only output packages installed in user-site.') | ||
| 67 | |||
| 68 | cmd_opts.add_option( | ||
| 69 | '--pre', | ||
| 70 | action='store_true', | ||
| 71 | default=False, | ||
| 72 | help=("Include pre-release and development versions. By default, " | ||
| 73 | "pip only finds stable versions."), | ||
| 74 | ) | ||
| 75 | |||
| 76 | cmd_opts.add_option( | ||
| 77 | '--format', | ||
| 78 | action='store', | ||
| 79 | dest='list_format', | ||
| 80 | default="columns", | ||
| 81 | choices=('legacy', 'columns', 'freeze', 'json'), | ||
| 82 | help="Select the output format among: columns (default), freeze, " | ||
| 83 | "json, or legacy.", | ||
| 84 | ) | ||
| 85 | |||
| 86 | cmd_opts.add_option( | ||
| 87 | '--not-required', | ||
| 88 | action='store_true', | ||
| 89 | dest='not_required', | ||
| 90 | help="List packages that are not dependencies of " | ||
| 91 | "installed packages.", | ||
| 92 | ) | ||
| 93 | |||
| 94 | cmd_opts.add_option( | ||
| 95 | '--exclude-editable', | ||
| 96 | action='store_false', | ||
| 97 | dest='include_editable', | ||
| 98 | help='Exclude editable package from output.', | ||
| 99 | ) | ||
| 100 | cmd_opts.add_option( | ||
| 101 | '--include-editable', | ||
| 102 | action='store_true', | ||
| 103 | dest='include_editable', | ||
| 104 | help='Include editable package from output.', | ||
| 105 | default=True, | ||
| 106 | ) | ||
| 107 | index_opts = make_option_group(index_group, self.parser) | ||
| 108 | |||
| 109 | self.parser.insert_option_group(0, index_opts) | ||
| 110 | self.parser.insert_option_group(0, cmd_opts) | ||
| 111 | |||
| 112 | def _build_package_finder(self, options, index_urls, session): | ||
| 113 | """ | ||
| 114 | Create a package finder appropriate to this list command. | ||
| 115 | """ | ||
| 116 | return PackageFinder( | ||
| 117 | find_links=options.find_links, | ||
| 118 | index_urls=index_urls, | ||
| 119 | allow_all_prereleases=options.pre, | ||
| 120 | trusted_hosts=options.trusted_hosts, | ||
| 121 | process_dependency_links=options.process_dependency_links, | ||
| 122 | session=session, | ||
| 123 | ) | ||
| 124 | |||
| 125 | def run(self, options, args): | ||
| 126 | if options.list_format == "legacy": | ||
| 127 | warnings.warn( | ||
| 128 | "The legacy format has been deprecated and will be removed " | ||
| 129 | "in the future.", | ||
| 130 | RemovedInPip11Warning, | ||
| 131 | ) | ||
| 132 | |||
| 133 | if options.outdated and options.uptodate: | ||
| 134 | raise CommandError( | ||
| 135 | "Options --outdated and --uptodate cannot be combined.") | ||
| 136 | |||
| 137 | packages = get_installed_distributions( | ||
| 138 | local_only=options.local, | ||
| 139 | user_only=options.user, | ||
| 140 | editables_only=options.editable, | ||
| 141 | include_editables=options.include_editable, | ||
| 142 | ) | ||
| 143 | |||
| 144 | if options.outdated: | ||
| 145 | packages = self.get_outdated(packages, options) | ||
| 146 | elif options.uptodate: | ||
| 147 | packages = self.get_uptodate(packages, options) | ||
| 148 | |||
| 149 | if options.not_required: | ||
| 150 | packages = self.get_not_required(packages, options) | ||
| 151 | |||
| 152 | self.output_package_listing(packages, options) | ||
| 153 | |||
| 154 | def get_outdated(self, packages, options): | ||
| 155 | return [ | ||
| 156 | dist for dist in self.iter_packages_latest_infos(packages, options) | ||
| 157 | if dist.latest_version > dist.parsed_version | ||
| 158 | ] | ||
| 159 | |||
| 160 | def get_uptodate(self, packages, options): | ||
| 161 | return [ | ||
| 162 | dist for dist in self.iter_packages_latest_infos(packages, options) | ||
| 163 | if dist.latest_version == dist.parsed_version | ||
| 164 | ] | ||
| 165 | |||
| 166 | def get_not_required(self, packages, options): | ||
| 167 | dep_keys = set() | ||
| 168 | for dist in packages: | ||
| 169 | dep_keys.update(requirement.key for requirement in dist.requires()) | ||
| 170 | return {pkg for pkg in packages if pkg.key not in dep_keys} | ||
| 171 | |||
| 172 | def iter_packages_latest_infos(self, packages, options): | ||
| 173 | index_urls = [options.index_url] + options.extra_index_urls | ||
| 174 | if options.no_index: | ||
| 175 | logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | ||
| 176 | index_urls = [] | ||
| 177 | |||
| 178 | dependency_links = [] | ||
| 179 | for dist in packages: | ||
| 180 | if dist.has_metadata('dependency_links.txt'): | ||
| 181 | dependency_links.extend( | ||
| 182 | dist.get_metadata_lines('dependency_links.txt'), | ||
| 183 | ) | ||
| 184 | |||
| 185 | with self._build_session(options) as session: | ||
| 186 | finder = self._build_package_finder(options, index_urls, session) | ||
| 187 | finder.add_dependency_links(dependency_links) | ||
| 188 | |||
| 189 | for dist in packages: | ||
| 190 | typ = 'unknown' | ||
| 191 | all_candidates = finder.find_all_candidates(dist.key) | ||
| 192 | if not options.pre: | ||
| 193 | # Remove prereleases | ||
| 194 | all_candidates = [candidate for candidate in all_candidates | ||
| 195 | if not candidate.version.is_prerelease] | ||
| 196 | |||
| 197 | if not all_candidates: | ||
| 198 | continue | ||
| 199 | best_candidate = max(all_candidates, | ||
| 200 | key=finder._candidate_sort_key) | ||
| 201 | remote_version = best_candidate.version | ||
| 202 | if best_candidate.location.is_wheel: | ||
| 203 | typ = 'wheel' | ||
| 204 | else: | ||
| 205 | typ = 'sdist' | ||
| 206 | # This is dirty but makes the rest of the code much cleaner | ||
| 207 | dist.latest_version = remote_version | ||
| 208 | dist.latest_filetype = typ | ||
| 209 | yield dist | ||
| 210 | |||
| 211 | def output_legacy(self, dist, options): | ||
| 212 | if options.verbose >= 1: | ||
| 213 | return '%s (%s, %s, %s)' % ( | ||
| 214 | dist.project_name, | ||
| 215 | dist.version, | ||
| 216 | dist.location, | ||
| 217 | get_installer(dist), | ||
| 218 | ) | ||
| 219 | elif dist_is_editable(dist): | ||
| 220 | return '%s (%s, %s)' % ( | ||
| 221 | dist.project_name, | ||
| 222 | dist.version, | ||
| 223 | dist.location, | ||
| 224 | ) | ||
| 225 | else: | ||
| 226 | return '%s (%s)' % (dist.project_name, dist.version) | ||
| 227 | |||
| 228 | def output_legacy_latest(self, dist, options): | ||
| 229 | return '%s - Latest: %s [%s]' % ( | ||
| 230 | self.output_legacy(dist, options), | ||
| 231 | dist.latest_version, | ||
| 232 | dist.latest_filetype, | ||
| 233 | ) | ||
| 234 | |||
| 235 | def output_package_listing(self, packages, options): | ||
| 236 | packages = sorted( | ||
| 237 | packages, | ||
| 238 | key=lambda dist: dist.project_name.lower(), | ||
| 239 | ) | ||
| 240 | if options.list_format == 'columns' and packages: | ||
| 241 | data, header = format_for_columns(packages, options) | ||
| 242 | self.output_package_listing_columns(data, header) | ||
| 243 | elif options.list_format == 'freeze': | ||
| 244 | for dist in packages: | ||
| 245 | if options.verbose >= 1: | ||
| 246 | logger.info("%s==%s (%s)", dist.project_name, | ||
| 247 | dist.version, dist.location) | ||
| 248 | else: | ||
| 249 | logger.info("%s==%s", dist.project_name, dist.version) | ||
| 250 | elif options.list_format == 'json': | ||
| 251 | logger.info(format_for_json(packages, options)) | ||
| 252 | elif options.list_format == "legacy": | ||
| 253 | for dist in packages: | ||
| 254 | if options.outdated: | ||
| 255 | logger.info(self.output_legacy_latest(dist, options)) | ||
| 256 | else: | ||
| 257 | logger.info(self.output_legacy(dist, options)) | ||
| 258 | |||
| 259 | def output_package_listing_columns(self, data, header): | ||
| 260 | # insert the header first: we need to know the size of column names | ||
| 261 | if len(data) > 0: | ||
| 262 | data.insert(0, header) | ||
| 263 | |||
| 264 | pkg_strings, sizes = tabulate(data) | ||
| 265 | |||
| 266 | # Create and add a separator. | ||
| 267 | if len(data) > 0: | ||
| 268 | pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) | ||
| 269 | |||
| 270 | for val in pkg_strings: | ||
| 271 | logger.info(val) | ||
| 272 | |||
| 273 | |||
| 274 | def tabulate(vals): | ||
| 275 | # From pfmoore on GitHub: | ||
| 276 | # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 | ||
| 277 | assert len(vals) > 0 | ||
| 278 | |||
| 279 | sizes = [0] * max(len(x) for x in vals) | ||
| 280 | for row in vals: | ||
| 281 | sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] | ||
| 282 | |||
| 283 | result = [] | ||
| 284 | for row in vals: | ||
| 285 | display = " ".join([str(c).ljust(s) if c is not None else '' | ||
| 286 | for s, c in zip_longest(sizes, row)]) | ||
| 287 | result.append(display) | ||
| 288 | |||
| 289 | return result, sizes | ||
| 290 | |||
| 291 | |||
| 292 | def format_for_columns(pkgs, options): | ||
| 293 | """ | ||
| 294 | Convert the package data into something usable | ||
| 295 | by output_package_listing_columns. | ||
| 296 | """ | ||
| 297 | running_outdated = options.outdated | ||
| 298 | # Adjust the header for the `pip list --outdated` case. | ||
| 299 | if running_outdated: | ||
| 300 | header = ["Package", "Version", "Latest", "Type"] | ||
| 301 | else: | ||
| 302 | header = ["Package", "Version"] | ||
| 303 | |||
| 304 | data = [] | ||
| 305 | if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): | ||
| 306 | header.append("Location") | ||
| 307 | if options.verbose >= 1: | ||
| 308 | header.append("Installer") | ||
| 309 | |||
| 310 | for proj in pkgs: | ||
| 311 | # if we're working on the 'outdated' list, separate out the | ||
| 312 | # latest_version and type | ||
| 313 | row = [proj.project_name, proj.version] | ||
| 314 | |||
| 315 | if running_outdated: | ||
| 316 | row.append(proj.latest_version) | ||
| 317 | row.append(proj.latest_filetype) | ||
| 318 | |||
| 319 | if options.verbose >= 1 or dist_is_editable(proj): | ||
| 320 | row.append(proj.location) | ||
| 321 | if options.verbose >= 1: | ||
| 322 | row.append(get_installer(proj)) | ||
| 323 | |||
| 324 | data.append(row) | ||
| 325 | |||
| 326 | return data, header | ||
| 327 | |||
| 328 | |||
| 329 | def format_for_json(packages, options): | ||
| 330 | data = [] | ||
| 331 | for dist in packages: | ||
| 332 | info = { | ||
| 333 | 'name': dist.project_name, | ||
| 334 | 'version': six.text_type(dist.version), | ||
| 335 | } | ||
| 336 | if options.verbose >= 1: | ||
| 337 | info['location'] = dist.location | ||
| 338 | info['installer'] = get_installer(dist) | ||
| 339 | if options.outdated: | ||
| 340 | info['latest_version'] = six.text_type(dist.latest_version) | ||
| 341 | info['latest_filetype'] = dist.latest_filetype | ||
| 342 | data.append(info) | ||
| 343 | return json.dumps(data) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py new file mode 100644 index 0000000..83895ce --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py | |||
| @@ -0,0 +1,135 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import sys | ||
| 5 | import textwrap | ||
| 6 | from collections import OrderedDict | ||
| 7 | |||
| 8 | from pip._vendor import pkg_resources | ||
| 9 | from pip._vendor.packaging.version import parse as parse_version | ||
| 10 | # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is | ||
| 11 | # why we ignore the type on this import | ||
| 12 | from pip._vendor.six.moves import xmlrpc_client # type: ignore | ||
| 13 | |||
| 14 | from pip._internal.basecommand import SUCCESS, Command | ||
| 15 | from pip._internal.compat import get_terminal_size | ||
| 16 | from pip._internal.download import PipXmlrpcTransport | ||
| 17 | from pip._internal.exceptions import CommandError | ||
| 18 | from pip._internal.models import PyPI | ||
| 19 | from pip._internal.status_codes import NO_MATCHES_FOUND | ||
| 20 | from pip._internal.utils.logging import indent_log | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | class SearchCommand(Command): | ||
| 26 | """Search for PyPI packages whose name or summary contains <query>.""" | ||
| 27 | name = 'search' | ||
| 28 | usage = """ | ||
| 29 | %prog [options] <query>""" | ||
| 30 | summary = 'Search PyPI for packages.' | ||
| 31 | ignore_require_venv = True | ||
| 32 | |||
| 33 | def __init__(self, *args, **kw): | ||
| 34 | super(SearchCommand, self).__init__(*args, **kw) | ||
| 35 | self.cmd_opts.add_option( | ||
| 36 | '-i', '--index', | ||
| 37 | dest='index', | ||
| 38 | metavar='URL', | ||
| 39 | default=PyPI.pypi_url, | ||
| 40 | help='Base URL of Python Package Index (default %default)') | ||
| 41 | |||
| 42 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 43 | |||
| 44 | def run(self, options, args): | ||
| 45 | if not args: | ||
| 46 | raise CommandError('Missing required argument (search query).') | ||
| 47 | query = args | ||
| 48 | pypi_hits = self.search(query, options) | ||
| 49 | hits = transform_hits(pypi_hits) | ||
| 50 | |||
| 51 | terminal_width = None | ||
| 52 | if sys.stdout.isatty(): | ||
| 53 | terminal_width = get_terminal_size()[0] | ||
| 54 | |||
| 55 | print_results(hits, terminal_width=terminal_width) | ||
| 56 | if pypi_hits: | ||
| 57 | return SUCCESS | ||
| 58 | return NO_MATCHES_FOUND | ||
| 59 | |||
| 60 | def search(self, query, options): | ||
| 61 | index_url = options.index | ||
| 62 | with self._build_session(options) as session: | ||
| 63 | transport = PipXmlrpcTransport(index_url, session) | ||
| 64 | pypi = xmlrpc_client.ServerProxy(index_url, transport) | ||
| 65 | hits = pypi.search({'name': query, 'summary': query}, 'or') | ||
| 66 | return hits | ||
| 67 | |||
| 68 | |||
| 69 | def transform_hits(hits): | ||
| 70 | """ | ||
| 71 | The list from pypi is really a list of versions. We want a list of | ||
| 72 | packages with the list of versions stored inline. This converts the | ||
| 73 | list from pypi into one we can use. | ||
| 74 | """ | ||
| 75 | packages = OrderedDict() | ||
| 76 | for hit in hits: | ||
| 77 | name = hit['name'] | ||
| 78 | summary = hit['summary'] | ||
| 79 | version = hit['version'] | ||
| 80 | |||
| 81 | if name not in packages.keys(): | ||
| 82 | packages[name] = { | ||
| 83 | 'name': name, | ||
| 84 | 'summary': summary, | ||
| 85 | 'versions': [version], | ||
| 86 | } | ||
| 87 | else: | ||
| 88 | packages[name]['versions'].append(version) | ||
| 89 | |||
| 90 | # if this is the highest version, replace summary and score | ||
| 91 | if version == highest_version(packages[name]['versions']): | ||
| 92 | packages[name]['summary'] = summary | ||
| 93 | |||
| 94 | return list(packages.values()) | ||
| 95 | |||
| 96 | |||
| 97 | def print_results(hits, name_column_width=None, terminal_width=None): | ||
| 98 | if not hits: | ||
| 99 | return | ||
| 100 | if name_column_width is None: | ||
| 101 | name_column_width = max([ | ||
| 102 | len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) | ||
| 103 | for hit in hits | ||
| 104 | ]) + 4 | ||
| 105 | |||
| 106 | installed_packages = [p.project_name for p in pkg_resources.working_set] | ||
| 107 | for hit in hits: | ||
| 108 | name = hit['name'] | ||
| 109 | summary = hit['summary'] or '' | ||
| 110 | latest = highest_version(hit.get('versions', ['-'])) | ||
| 111 | if terminal_width is not None: | ||
| 112 | target_width = terminal_width - name_column_width - 5 | ||
| 113 | if target_width > 10: | ||
| 114 | # wrap and indent summary to fit terminal | ||
| 115 | summary = textwrap.wrap(summary, target_width) | ||
| 116 | summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) | ||
| 117 | |||
| 118 | line = '%-*s - %s' % (name_column_width, | ||
| 119 | '%s (%s)' % (name, latest), summary) | ||
| 120 | try: | ||
| 121 | logger.info(line) | ||
| 122 | if name in installed_packages: | ||
| 123 | dist = pkg_resources.get_distribution(name) | ||
| 124 | with indent_log(): | ||
| 125 | if dist.version == latest: | ||
| 126 | logger.info('INSTALLED: %s (latest)', dist.version) | ||
| 127 | else: | ||
| 128 | logger.info('INSTALLED: %s', dist.version) | ||
| 129 | logger.info('LATEST: %s', latest) | ||
| 130 | except UnicodeEncodeError: | ||
| 131 | pass | ||
| 132 | |||
| 133 | |||
| 134 | def highest_version(versions): | ||
| 135 | return max(versions, key=parse_version) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py new file mode 100644 index 0000000..bad9628 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py | |||
| @@ -0,0 +1,164 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | from email.parser import FeedParser # type: ignore | ||
| 6 | |||
| 7 | from pip._vendor import pkg_resources | ||
| 8 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 9 | |||
| 10 | from pip._internal.basecommand import Command | ||
| 11 | from pip._internal.status_codes import ERROR, SUCCESS | ||
| 12 | |||
| 13 | logger = logging.getLogger(__name__) | ||
| 14 | |||
| 15 | |||
| 16 | class ShowCommand(Command): | ||
| 17 | """Show information about one or more installed packages.""" | ||
| 18 | name = 'show' | ||
| 19 | usage = """ | ||
| 20 | %prog [options] <package> ...""" | ||
| 21 | summary = 'Show information about installed packages.' | ||
| 22 | ignore_require_venv = True | ||
| 23 | |||
| 24 | def __init__(self, *args, **kw): | ||
| 25 | super(ShowCommand, self).__init__(*args, **kw) | ||
| 26 | self.cmd_opts.add_option( | ||
| 27 | '-f', '--files', | ||
| 28 | dest='files', | ||
| 29 | action='store_true', | ||
| 30 | default=False, | ||
| 31 | help='Show the full list of installed files for each package.') | ||
| 32 | |||
| 33 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 34 | |||
| 35 | def run(self, options, args): | ||
| 36 | if not args: | ||
| 37 | logger.warning('ERROR: Please provide a package name or names.') | ||
| 38 | return ERROR | ||
| 39 | query = args | ||
| 40 | |||
| 41 | results = search_packages_info(query) | ||
| 42 | if not print_results( | ||
| 43 | results, list_files=options.files, verbose=options.verbose): | ||
| 44 | return ERROR | ||
| 45 | return SUCCESS | ||
| 46 | |||
| 47 | |||
| 48 | def search_packages_info(query): | ||
| 49 | """ | ||
| 50 | Gather details from installed distributions. Print distribution name, | ||
| 51 | version, location, and installed files. Installed files requires a | ||
| 52 | pip generated 'installed-files.txt' in the distributions '.egg-info' | ||
| 53 | directory. | ||
| 54 | """ | ||
| 55 | installed = {} | ||
| 56 | for p in pkg_resources.working_set: | ||
| 57 | installed[canonicalize_name(p.project_name)] = p | ||
| 58 | |||
| 59 | query_names = [canonicalize_name(name) for name in query] | ||
| 60 | |||
| 61 | for dist in [installed[pkg] for pkg in query_names if pkg in installed]: | ||
| 62 | package = { | ||
| 63 | 'name': dist.project_name, | ||
| 64 | 'version': dist.version, | ||
| 65 | 'location': dist.location, | ||
| 66 | 'requires': [dep.project_name for dep in dist.requires()], | ||
| 67 | } | ||
| 68 | file_list = None | ||
| 69 | metadata = None | ||
| 70 | if isinstance(dist, pkg_resources.DistInfoDistribution): | ||
| 71 | # RECORDs should be part of .dist-info metadatas | ||
| 72 | if dist.has_metadata('RECORD'): | ||
| 73 | lines = dist.get_metadata_lines('RECORD') | ||
| 74 | paths = [l.split(',')[0] for l in lines] | ||
| 75 | paths = [os.path.join(dist.location, p) for p in paths] | ||
| 76 | file_list = [os.path.relpath(p, dist.location) for p in paths] | ||
| 77 | |||
| 78 | if dist.has_metadata('METADATA'): | ||
| 79 | metadata = dist.get_metadata('METADATA') | ||
| 80 | else: | ||
| 81 | # Otherwise use pip's log for .egg-info's | ||
| 82 | if dist.has_metadata('installed-files.txt'): | ||
| 83 | paths = dist.get_metadata_lines('installed-files.txt') | ||
| 84 | paths = [os.path.join(dist.egg_info, p) for p in paths] | ||
| 85 | file_list = [os.path.relpath(p, dist.location) for p in paths] | ||
| 86 | |||
| 87 | if dist.has_metadata('PKG-INFO'): | ||
| 88 | metadata = dist.get_metadata('PKG-INFO') | ||
| 89 | |||
| 90 | if dist.has_metadata('entry_points.txt'): | ||
| 91 | entry_points = dist.get_metadata_lines('entry_points.txt') | ||
| 92 | package['entry_points'] = entry_points | ||
| 93 | |||
| 94 | if dist.has_metadata('INSTALLER'): | ||
| 95 | for line in dist.get_metadata_lines('INSTALLER'): | ||
| 96 | if line.strip(): | ||
| 97 | package['installer'] = line.strip() | ||
| 98 | break | ||
| 99 | |||
| 100 | # @todo: Should pkg_resources.Distribution have a | ||
| 101 | # `get_pkg_info` method? | ||
| 102 | feed_parser = FeedParser() | ||
| 103 | feed_parser.feed(metadata) | ||
| 104 | pkg_info_dict = feed_parser.close() | ||
| 105 | for key in ('metadata-version', 'summary', | ||
| 106 | 'home-page', 'author', 'author-email', 'license'): | ||
| 107 | package[key] = pkg_info_dict.get(key) | ||
| 108 | |||
| 109 | # It looks like FeedParser cannot deal with repeated headers | ||
| 110 | classifiers = [] | ||
| 111 | for line in metadata.splitlines(): | ||
| 112 | if line.startswith('Classifier: '): | ||
| 113 | classifiers.append(line[len('Classifier: '):]) | ||
| 114 | package['classifiers'] = classifiers | ||
| 115 | |||
| 116 | if file_list: | ||
| 117 | package['files'] = sorted(file_list) | ||
| 118 | yield package | ||
| 119 | |||
| 120 | |||
| 121 | def print_results(distributions, list_files=False, verbose=False): | ||
| 122 | """ | ||
| 123 | Print the informations from installed distributions found. | ||
| 124 | """ | ||
| 125 | results_printed = False | ||
| 126 | for i, dist in enumerate(distributions): | ||
| 127 | results_printed = True | ||
| 128 | if i > 0: | ||
| 129 | logger.info("---") | ||
| 130 | |||
| 131 | name = dist.get('name', '') | ||
| 132 | required_by = [ | ||
| 133 | pkg.project_name for pkg in pkg_resources.working_set | ||
| 134 | if name in [required.name for required in pkg.requires()] | ||
| 135 | ] | ||
| 136 | |||
| 137 | logger.info("Name: %s", name) | ||
| 138 | logger.info("Version: %s", dist.get('version', '')) | ||
| 139 | logger.info("Summary: %s", dist.get('summary', '')) | ||
| 140 | logger.info("Home-page: %s", dist.get('home-page', '')) | ||
| 141 | logger.info("Author: %s", dist.get('author', '')) | ||
| 142 | logger.info("Author-email: %s", dist.get('author-email', '')) | ||
| 143 | logger.info("License: %s", dist.get('license', '')) | ||
| 144 | logger.info("Location: %s", dist.get('location', '')) | ||
| 145 | logger.info("Requires: %s", ', '.join(dist.get('requires', []))) | ||
| 146 | logger.info("Required-by: %s", ', '.join(required_by)) | ||
| 147 | |||
| 148 | if verbose: | ||
| 149 | logger.info("Metadata-Version: %s", | ||
| 150 | dist.get('metadata-version', '')) | ||
| 151 | logger.info("Installer: %s", dist.get('installer', '')) | ||
| 152 | logger.info("Classifiers:") | ||
| 153 | for classifier in dist.get('classifiers', []): | ||
| 154 | logger.info(" %s", classifier) | ||
| 155 | logger.info("Entry-points:") | ||
| 156 | for entry in dist.get('entry_points', []): | ||
| 157 | logger.info(" %s", entry.strip()) | ||
| 158 | if list_files: | ||
| 159 | logger.info("Files:") | ||
| 160 | for line in dist.get('files', []): | ||
| 161 | logger.info(" %s", line.strip()) | ||
| 162 | if "files" not in dist: | ||
| 163 | logger.info("Cannot locate installed-files.txt") | ||
| 164 | return results_printed | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000..3bfa07f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py | |||
| @@ -0,0 +1,71 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 4 | |||
| 5 | from pip._internal.basecommand import Command | ||
| 6 | from pip._internal.exceptions import InstallationError | ||
| 7 | from pip._internal.req import InstallRequirement, parse_requirements | ||
| 8 | |||
| 9 | |||
| 10 | class UninstallCommand(Command): | ||
| 11 | """ | ||
| 12 | Uninstall packages. | ||
| 13 | |||
| 14 | pip is able to uninstall most installed packages. Known exceptions are: | ||
| 15 | |||
| 16 | - Pure distutils packages installed with ``python setup.py install``, which | ||
| 17 | leave behind no metadata to determine what files were installed. | ||
| 18 | - Script wrappers installed by ``python setup.py develop``. | ||
| 19 | """ | ||
| 20 | name = 'uninstall' | ||
| 21 | usage = """ | ||
| 22 | %prog [options] <package> ... | ||
| 23 | %prog [options] -r <requirements file> ...""" | ||
| 24 | summary = 'Uninstall packages.' | ||
| 25 | |||
| 26 | def __init__(self, *args, **kw): | ||
| 27 | super(UninstallCommand, self).__init__(*args, **kw) | ||
| 28 | self.cmd_opts.add_option( | ||
| 29 | '-r', '--requirement', | ||
| 30 | dest='requirements', | ||
| 31 | action='append', | ||
| 32 | default=[], | ||
| 33 | metavar='file', | ||
| 34 | help='Uninstall all the packages listed in the given requirements ' | ||
| 35 | 'file. This option can be used multiple times.', | ||
| 36 | ) | ||
| 37 | self.cmd_opts.add_option( | ||
| 38 | '-y', '--yes', | ||
| 39 | dest='yes', | ||
| 40 | action='store_true', | ||
| 41 | help="Don't ask for confirmation of uninstall deletions.") | ||
| 42 | |||
| 43 | self.parser.insert_option_group(0, self.cmd_opts) | ||
| 44 | |||
| 45 | def run(self, options, args): | ||
| 46 | with self._build_session(options) as session: | ||
| 47 | reqs_to_uninstall = {} | ||
| 48 | for name in args: | ||
| 49 | req = InstallRequirement.from_line( | ||
| 50 | name, isolated=options.isolated_mode, | ||
| 51 | ) | ||
| 52 | if req.name: | ||
| 53 | reqs_to_uninstall[canonicalize_name(req.name)] = req | ||
| 54 | for filename in options.requirements: | ||
| 55 | for req in parse_requirements( | ||
| 56 | filename, | ||
| 57 | options=options, | ||
| 58 | session=session): | ||
| 59 | if req.name: | ||
| 60 | reqs_to_uninstall[canonicalize_name(req.name)] = req | ||
| 61 | if not reqs_to_uninstall: | ||
| 62 | raise InstallationError( | ||
| 63 | 'You must give at least one requirement to %(name)s (see ' | ||
| 64 | '"pip help %(name)s")' % dict(name=self.name) | ||
| 65 | ) | ||
| 66 | for req in reqs_to_uninstall.values(): | ||
| 67 | uninstall_pathset = req.uninstall( | ||
| 68 | auto_confirm=options.yes, verbose=self.verbosity > 0, | ||
| 69 | ) | ||
| 70 | if uninstall_pathset: | ||
| 71 | uninstall_pathset.commit() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py new file mode 100644 index 0000000..ed8cdfc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py | |||
| @@ -0,0 +1,179 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import logging | ||
| 5 | import os | ||
| 6 | |||
| 7 | from pip._internal import cmdoptions | ||
| 8 | from pip._internal.basecommand import RequirementCommand | ||
| 9 | from pip._internal.cache import WheelCache | ||
| 10 | from pip._internal.exceptions import CommandError, PreviousBuildDirError | ||
| 11 | from pip._internal.operations.prepare import RequirementPreparer | ||
| 12 | from pip._internal.req import RequirementSet | ||
| 13 | from pip._internal.resolve import Resolver | ||
| 14 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 15 | from pip._internal.wheel import WheelBuilder | ||
| 16 | |||
| 17 | logger = logging.getLogger(__name__) | ||
| 18 | |||
| 19 | |||
| 20 | class WheelCommand(RequirementCommand): | ||
| 21 | """ | ||
| 22 | Build Wheel archives for your requirements and dependencies. | ||
| 23 | |||
| 24 | Wheel is a built-package format, and offers the advantage of not | ||
| 25 | recompiling your software during every install. For more details, see the | ||
| 26 | wheel docs: https://wheel.readthedocs.io/en/latest/ | ||
| 27 | |||
| 28 | Requirements: setuptools>=0.8, and wheel. | ||
| 29 | |||
| 30 | 'pip wheel' uses the bdist_wheel setuptools extension from the wheel | ||
| 31 | package to build individual wheels. | ||
| 32 | |||
| 33 | """ | ||
| 34 | |||
| 35 | name = 'wheel' | ||
| 36 | usage = """ | ||
| 37 | %prog [options] <requirement specifier> ... | ||
| 38 | %prog [options] -r <requirements file> ... | ||
| 39 | %prog [options] [-e] <vcs project url> ... | ||
| 40 | %prog [options] [-e] <local project path> ... | ||
| 41 | %prog [options] <archive url/path> ...""" | ||
| 42 | |||
| 43 | summary = 'Build wheels from your requirements.' | ||
| 44 | |||
| 45 | def __init__(self, *args, **kw): | ||
| 46 | super(WheelCommand, self).__init__(*args, **kw) | ||
| 47 | |||
| 48 | cmd_opts = self.cmd_opts | ||
| 49 | |||
| 50 | cmd_opts.add_option( | ||
| 51 | '-w', '--wheel-dir', | ||
| 52 | dest='wheel_dir', | ||
| 53 | metavar='dir', | ||
| 54 | default=os.curdir, | ||
| 55 | help=("Build wheels into <dir>, where the default is the " | ||
| 56 | "current working directory."), | ||
| 57 | ) | ||
| 58 | cmd_opts.add_option(cmdoptions.no_binary()) | ||
| 59 | cmd_opts.add_option(cmdoptions.only_binary()) | ||
| 60 | cmd_opts.add_option( | ||
| 61 | '--build-option', | ||
| 62 | dest='build_options', | ||
| 63 | metavar='options', | ||
| 64 | action='append', | ||
| 65 | help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", | ||
| 66 | ) | ||
| 67 | cmd_opts.add_option(cmdoptions.no_build_isolation()) | ||
| 68 | cmd_opts.add_option(cmdoptions.constraints()) | ||
| 69 | cmd_opts.add_option(cmdoptions.editable()) | ||
| 70 | cmd_opts.add_option(cmdoptions.requirements()) | ||
| 71 | cmd_opts.add_option(cmdoptions.src()) | ||
| 72 | cmd_opts.add_option(cmdoptions.ignore_requires_python()) | ||
| 73 | cmd_opts.add_option(cmdoptions.no_deps()) | ||
| 74 | cmd_opts.add_option(cmdoptions.build_dir()) | ||
| 75 | cmd_opts.add_option(cmdoptions.progress_bar()) | ||
| 76 | |||
| 77 | cmd_opts.add_option( | ||
| 78 | '--global-option', | ||
| 79 | dest='global_options', | ||
| 80 | action='append', | ||
| 81 | metavar='options', | ||
| 82 | help="Extra global options to be supplied to the setup.py " | ||
| 83 | "call before the 'bdist_wheel' command.") | ||
| 84 | |||
| 85 | cmd_opts.add_option( | ||
| 86 | '--pre', | ||
| 87 | action='store_true', | ||
| 88 | default=False, | ||
| 89 | help=("Include pre-release and development versions. By default, " | ||
| 90 | "pip only finds stable versions."), | ||
| 91 | ) | ||
| 92 | |||
| 93 | cmd_opts.add_option(cmdoptions.no_clean()) | ||
| 94 | cmd_opts.add_option(cmdoptions.require_hashes()) | ||
| 95 | |||
| 96 | index_opts = cmdoptions.make_option_group( | ||
| 97 | cmdoptions.index_group, | ||
| 98 | self.parser, | ||
| 99 | ) | ||
| 100 | |||
| 101 | self.parser.insert_option_group(0, index_opts) | ||
| 102 | self.parser.insert_option_group(0, cmd_opts) | ||
| 103 | |||
| 104 | def run(self, options, args): | ||
| 105 | cmdoptions.check_install_build_global(options) | ||
| 106 | |||
| 107 | index_urls = [options.index_url] + options.extra_index_urls | ||
| 108 | if options.no_index: | ||
| 109 | logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | ||
| 110 | index_urls = [] | ||
| 111 | |||
| 112 | if options.build_dir: | ||
| 113 | options.build_dir = os.path.abspath(options.build_dir) | ||
| 114 | |||
| 115 | options.src_dir = os.path.abspath(options.src_dir) | ||
| 116 | |||
| 117 | with self._build_session(options) as session: | ||
| 118 | finder = self._build_package_finder(options, session) | ||
| 119 | build_delete = (not (options.no_clean or options.build_dir)) | ||
| 120 | wheel_cache = WheelCache(options.cache_dir, options.format_control) | ||
| 121 | |||
| 122 | with TempDirectory( | ||
| 123 | options.build_dir, delete=build_delete, kind="wheel" | ||
| 124 | ) as directory: | ||
| 125 | requirement_set = RequirementSet( | ||
| 126 | require_hashes=options.require_hashes, | ||
| 127 | ) | ||
| 128 | |||
| 129 | try: | ||
| 130 | self.populate_requirement_set( | ||
| 131 | requirement_set, args, options, finder, session, | ||
| 132 | self.name, wheel_cache | ||
| 133 | ) | ||
| 134 | |||
| 135 | preparer = RequirementPreparer( | ||
| 136 | build_dir=directory.path, | ||
| 137 | src_dir=options.src_dir, | ||
| 138 | download_dir=None, | ||
| 139 | wheel_download_dir=options.wheel_dir, | ||
| 140 | progress_bar=options.progress_bar, | ||
| 141 | build_isolation=options.build_isolation, | ||
| 142 | ) | ||
| 143 | |||
| 144 | resolver = Resolver( | ||
| 145 | preparer=preparer, | ||
| 146 | finder=finder, | ||
| 147 | session=session, | ||
| 148 | wheel_cache=wheel_cache, | ||
| 149 | use_user_site=False, | ||
| 150 | upgrade_strategy="to-satisfy-only", | ||
| 151 | force_reinstall=False, | ||
| 152 | ignore_dependencies=options.ignore_dependencies, | ||
| 153 | ignore_requires_python=options.ignore_requires_python, | ||
| 154 | ignore_installed=True, | ||
| 155 | isolated=options.isolated_mode, | ||
| 156 | ) | ||
| 157 | resolver.resolve(requirement_set) | ||
| 158 | |||
| 159 | # build wheels | ||
| 160 | wb = WheelBuilder( | ||
| 161 | finder, preparer, wheel_cache, | ||
| 162 | build_options=options.build_options or [], | ||
| 163 | global_options=options.global_options or [], | ||
| 164 | no_clean=options.no_clean, | ||
| 165 | ) | ||
| 166 | wheels_built_successfully = wb.build( | ||
| 167 | requirement_set.requirements.values(), session=session, | ||
| 168 | ) | ||
| 169 | if not wheels_built_successfully: | ||
| 170 | raise CommandError( | ||
| 171 | "Failed to build one or more wheels" | ||
| 172 | ) | ||
| 173 | except PreviousBuildDirError: | ||
| 174 | options.no_clean = True | ||
| 175 | raise | ||
| 176 | finally: | ||
| 177 | if not options.no_clean: | ||
| 178 | requirement_set.cleanup_files() | ||
| 179 | wheel_cache.cleanup() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py new file mode 100644 index 0000000..064717d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py | |||
| @@ -0,0 +1,235 @@ | |||
| 1 | """Stuff that differs in different Python versions and platform | ||
| 2 | distributions.""" | ||
| 3 | from __future__ import absolute_import, division | ||
| 4 | |||
| 5 | import codecs | ||
| 6 | import locale | ||
| 7 | import logging | ||
| 8 | import os | ||
| 9 | import shutil | ||
| 10 | import sys | ||
| 11 | |||
| 12 | from pip._vendor.six import text_type | ||
| 13 | |||
| 14 | try: | ||
| 15 | import ipaddress | ||
| 16 | except ImportError: | ||
| 17 | try: | ||
| 18 | from pip._vendor import ipaddress # type: ignore | ||
| 19 | except ImportError: | ||
| 20 | import ipaddr as ipaddress # type: ignore | ||
| 21 | ipaddress.ip_address = ipaddress.IPAddress | ||
| 22 | ipaddress.ip_network = ipaddress.IPNetwork | ||
| 23 | |||
| 24 | |||
| 25 | __all__ = [ | ||
| 26 | "ipaddress", "uses_pycache", "console_to_str", "native_str", | ||
| 27 | "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", | ||
| 28 | ] | ||
| 29 | |||
| 30 | |||
| 31 | logger = logging.getLogger(__name__) | ||
| 32 | |||
| 33 | if sys.version_info >= (3, 4): | ||
| 34 | uses_pycache = True | ||
| 35 | from importlib.util import cache_from_source | ||
| 36 | else: | ||
| 37 | import imp | ||
| 38 | |||
| 39 | try: | ||
| 40 | cache_from_source = imp.cache_from_source # type: ignore | ||
| 41 | except AttributeError: | ||
| 42 | # does not use __pycache__ | ||
| 43 | cache_from_source = None | ||
| 44 | |||
| 45 | uses_pycache = cache_from_source is not None | ||
| 46 | |||
| 47 | |||
| 48 | if sys.version_info >= (3, 5): | ||
| 49 | backslashreplace_decode = "backslashreplace" | ||
| 50 | else: | ||
| 51 | # In version 3.4 and older, backslashreplace exists | ||
| 52 | # but does not support use for decoding. | ||
| 53 | # We implement our own replace handler for this | ||
| 54 | # situation, so that we can consistently use | ||
| 55 | # backslash replacement for all versions. | ||
| 56 | def backslashreplace_decode_fn(err): | ||
| 57 | raw_bytes = (err.object[i] for i in range(err.start, err.end)) | ||
| 58 | if sys.version_info[0] == 2: | ||
| 59 | # Python 2 gave us characters - convert to numeric bytes | ||
| 60 | raw_bytes = (ord(b) for b in raw_bytes) | ||
| 61 | return u"".join(u"\\x%x" % c for c in raw_bytes), err.end | ||
| 62 | codecs.register_error( | ||
| 63 | "backslashreplace_decode", | ||
| 64 | backslashreplace_decode_fn, | ||
| 65 | ) | ||
| 66 | backslashreplace_decode = "backslashreplace_decode" | ||
| 67 | |||
| 68 | |||
| 69 | def console_to_str(data): | ||
| 70 | """Return a string, safe for output, of subprocess output. | ||
| 71 | |||
| 72 | We assume the data is in the locale preferred encoding. | ||
| 73 | If it won't decode properly, we warn the user but decode as | ||
| 74 | best we can. | ||
| 75 | |||
| 76 | We also ensure that the output can be safely written to | ||
| 77 | standard output without encoding errors. | ||
| 78 | """ | ||
| 79 | |||
| 80 | # First, get the encoding we assume. This is the preferred | ||
| 81 | # encoding for the locale, unless that is not found, or | ||
| 82 | # it is ASCII, in which case assume UTF-8 | ||
| 83 | encoding = locale.getpreferredencoding() | ||
| 84 | if (not encoding) or codecs.lookup(encoding).name == "ascii": | ||
| 85 | encoding = "utf-8" | ||
| 86 | |||
| 87 | # Now try to decode the data - if we fail, warn the user and | ||
| 88 | # decode with replacement. | ||
| 89 | try: | ||
| 90 | s = data.decode(encoding) | ||
| 91 | except UnicodeDecodeError: | ||
| 92 | logger.warning( | ||
| 93 | "Subprocess output does not appear to be encoded as %s", | ||
| 94 | encoding, | ||
| 95 | ) | ||
| 96 | s = data.decode(encoding, errors=backslashreplace_decode) | ||
| 97 | |||
| 98 | # Make sure we can print the output, by encoding it to the output | ||
| 99 | # encoding with replacement of unencodable characters, and then | ||
| 100 | # decoding again. | ||
| 101 | # We use stderr's encoding because it's less likely to be | ||
| 102 | # redirected and if we don't find an encoding we skip this | ||
| 103 | # step (on the assumption that output is wrapped by something | ||
| 104 | # that won't fail). | ||
| 105 | # The double getattr is to deal with the possibility that we're | ||
| 106 | # being called in a situation where sys.__stderr__ doesn't exist, | ||
| 107 | # or doesn't have an encoding attribute. Neither of these cases | ||
| 108 | # should occur in normal pip use, but there's no harm in checking | ||
| 109 | # in case people use pip in (unsupported) unusual situations. | ||
| 110 | output_encoding = getattr(getattr(sys, "__stderr__", None), | ||
| 111 | "encoding", None) | ||
| 112 | |||
| 113 | if output_encoding: | ||
| 114 | s = s.encode(output_encoding, errors="backslashreplace") | ||
| 115 | s = s.decode(output_encoding) | ||
| 116 | |||
| 117 | return s | ||
| 118 | |||
| 119 | |||
| 120 | if sys.version_info >= (3,): | ||
| 121 | def native_str(s, replace=False): | ||
| 122 | if isinstance(s, bytes): | ||
| 123 | return s.decode('utf-8', 'replace' if replace else 'strict') | ||
| 124 | return s | ||
| 125 | |||
| 126 | else: | ||
| 127 | def native_str(s, replace=False): | ||
| 128 | # Replace is ignored -- unicode to UTF-8 can't fail | ||
| 129 | if isinstance(s, text_type): | ||
| 130 | return s.encode('utf-8') | ||
| 131 | return s | ||
| 132 | |||
| 133 | |||
| 134 | def get_path_uid(path): | ||
| 135 | """ | ||
| 136 | Return path's uid. | ||
| 137 | |||
| 138 | Does not follow symlinks: | ||
| 139 | https://github.com/pypa/pip/pull/935#discussion_r5307003 | ||
| 140 | |||
| 141 | Placed this function in compat due to differences on AIX and | ||
| 142 | Jython, that should eventually go away. | ||
| 143 | |||
| 144 | :raises OSError: When path is a symlink or can't be read. | ||
| 145 | """ | ||
| 146 | if hasattr(os, 'O_NOFOLLOW'): | ||
| 147 | fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) | ||
| 148 | file_uid = os.fstat(fd).st_uid | ||
| 149 | os.close(fd) | ||
| 150 | else: # AIX and Jython | ||
| 151 | # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW | ||
| 152 | if not os.path.islink(path): | ||
| 153 | # older versions of Jython don't have `os.fstat` | ||
| 154 | file_uid = os.stat(path).st_uid | ||
| 155 | else: | ||
| 156 | # raise OSError for parity with os.O_NOFOLLOW above | ||
| 157 | raise OSError( | ||
| 158 | "%s is a symlink; Will not return uid for symlinks" % path | ||
| 159 | ) | ||
| 160 | return file_uid | ||
| 161 | |||
| 162 | |||
| 163 | def expanduser(path): | ||
| 164 | """ | ||
| 165 | Expand ~ and ~user constructions. | ||
| 166 | |||
| 167 | Includes a workaround for http://bugs.python.org/issue14768 | ||
| 168 | """ | ||
| 169 | expanded = os.path.expanduser(path) | ||
| 170 | if path.startswith('~/') and expanded.startswith('//'): | ||
| 171 | expanded = expanded[1:] | ||
| 172 | return expanded | ||
| 173 | |||
| 174 | |||
| 175 | # packages in the stdlib that may have installation metadata, but should not be | ||
| 176 | # considered 'installed'. this theoretically could be determined based on | ||
| 177 | # dist.location (py27:`sysconfig.get_paths()['stdlib']`, | ||
| 178 | # py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may | ||
| 179 | # make this ineffective, so hard-coding | ||
| 180 | stdlib_pkgs = {"python", "wsgiref", "argparse"} | ||
| 181 | |||
| 182 | |||
| 183 | # windows detection, covers cpython and ironpython | ||
| 184 | WINDOWS = (sys.platform.startswith("win") or | ||
| 185 | (sys.platform == 'cli' and os.name == 'nt')) | ||
| 186 | |||
| 187 | |||
| 188 | def samefile(file1, file2): | ||
| 189 | """Provide an alternative for os.path.samefile on Windows/Python2""" | ||
| 190 | if hasattr(os.path, 'samefile'): | ||
| 191 | return os.path.samefile(file1, file2) | ||
| 192 | else: | ||
| 193 | path1 = os.path.normcase(os.path.abspath(file1)) | ||
| 194 | path2 = os.path.normcase(os.path.abspath(file2)) | ||
| 195 | return path1 == path2 | ||
| 196 | |||
| 197 | |||
| 198 | if hasattr(shutil, 'get_terminal_size'): | ||
| 199 | def get_terminal_size(): | ||
| 200 | """ | ||
| 201 | Returns a tuple (x, y) representing the width(x) and the height(y) | ||
| 202 | in characters of the terminal window. | ||
| 203 | """ | ||
| 204 | return tuple(shutil.get_terminal_size()) | ||
| 205 | else: | ||
| 206 | def get_terminal_size(): | ||
| 207 | """ | ||
| 208 | Returns a tuple (x, y) representing the width(x) and the height(y) | ||
| 209 | in characters of the terminal window. | ||
| 210 | """ | ||
| 211 | def ioctl_GWINSZ(fd): | ||
| 212 | try: | ||
| 213 | import fcntl | ||
| 214 | import termios | ||
| 215 | import struct | ||
| 216 | cr = struct.unpack_from( | ||
| 217 | 'hh', | ||
| 218 | fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') | ||
| 219 | ) | ||
| 220 | except: | ||
| 221 | return None | ||
| 222 | if cr == (0, 0): | ||
| 223 | return None | ||
| 224 | return cr | ||
| 225 | cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) | ||
| 226 | if not cr: | ||
| 227 | try: | ||
| 228 | fd = os.open(os.ctermid(), os.O_RDONLY) | ||
| 229 | cr = ioctl_GWINSZ(fd) | ||
| 230 | os.close(fd) | ||
| 231 | except: | ||
| 232 | pass | ||
| 233 | if not cr: | ||
| 234 | cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) | ||
| 235 | return int(cr[1]), int(cr[0]) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py new file mode 100644 index 0000000..07af373 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py | |||
| @@ -0,0 +1,378 @@ | |||
| 1 | """Configuration management setup | ||
| 2 | |||
| 3 | Some terminology: | ||
| 4 | - name | ||
| 5 | As written in config files. | ||
| 6 | - value | ||
| 7 | Value associated with a name | ||
| 8 | - key | ||
| 9 | Name combined with it's section (section.name) | ||
| 10 | - variant | ||
| 11 | A single word describing where the configuration key-value pair came from | ||
| 12 | """ | ||
| 13 | |||
| 14 | import locale | ||
| 15 | import logging | ||
| 16 | import os | ||
| 17 | |||
| 18 | from pip._vendor import six | ||
| 19 | from pip._vendor.six.moves import configparser | ||
| 20 | |||
| 21 | from pip._internal.exceptions import ConfigurationError | ||
| 22 | from pip._internal.locations import ( | ||
| 23 | legacy_config_file, new_config_file, running_under_virtualenv, | ||
| 24 | site_config_files, venv_config_file, | ||
| 25 | ) | ||
| 26 | from pip._internal.utils.misc import ensure_dir, enum | ||
| 27 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 28 | |||
| 29 | if MYPY_CHECK_RUNNING: | ||
| 30 | from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple | ||
| 31 | |||
| 32 | RawConfigParser = configparser.RawConfigParser # Shorthand | ||
| 33 | Kind = NewType("Kind", str) | ||
| 34 | |||
| 35 | logger = logging.getLogger(__name__) | ||
| 36 | |||
| 37 | |||
| 38 | # NOTE: Maybe use the optionx attribute to normalize keynames. | ||
| 39 | def _normalize_name(name): | ||
| 40 | # type: (str) -> str | ||
| 41 | """Make a name consistent regardless of source (environment or file) | ||
| 42 | """ | ||
| 43 | name = name.lower().replace('_', '-') | ||
| 44 | if name.startswith('--'): | ||
| 45 | name = name[2:] # only prefer long opts | ||
| 46 | return name | ||
| 47 | |||
| 48 | |||
| 49 | def _disassemble_key(name): | ||
| 50 | # type: (str) -> List[str] | ||
| 51 | return name.split(".", 1) | ||
| 52 | |||
| 53 | |||
| 54 | # The kinds of configurations there are. | ||
| 55 | kinds = enum( | ||
| 56 | USER="user", # User Specific | ||
| 57 | GLOBAL="global", # System Wide | ||
| 58 | VENV="venv", # Virtual Environment Specific | ||
| 59 | ENV="env", # from PIP_CONFIG_FILE | ||
| 60 | ENV_VAR="env-var", # from Environment Variables | ||
| 61 | ) | ||
| 62 | |||
| 63 | |||
| 64 | class Configuration(object): | ||
| 65 | """Handles management of configuration. | ||
| 66 | |||
| 67 | Provides an interface to accessing and managing configuration files. | ||
| 68 | |||
| 69 | This class converts provides an API that takes "section.key-name" style | ||
| 70 | keys and stores the value associated with it as "key-name" under the | ||
| 71 | section "section". | ||
| 72 | |||
| 73 | This allows for a clean interface wherein the both the section and the | ||
| 74 | key-name are preserved in an easy to manage form in the configuration files | ||
| 75 | and the data stored is also nice. | ||
| 76 | """ | ||
| 77 | |||
| 78 | def __init__(self, isolated, load_only=None): | ||
| 79 | # type: (bool, Kind) -> None | ||
| 80 | super(Configuration, self).__init__() | ||
| 81 | |||
| 82 | _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None] | ||
| 83 | if load_only not in _valid_load_only: | ||
| 84 | raise ConfigurationError( | ||
| 85 | "Got invalid value for load_only - should be one of {}".format( | ||
| 86 | ", ".join(map(repr, _valid_load_only[:-1])) | ||
| 87 | ) | ||
| 88 | ) | ||
| 89 | self.isolated = isolated # type: bool | ||
| 90 | self.load_only = load_only # type: Optional[Kind] | ||
| 91 | |||
| 92 | # The order here determines the override order. | ||
| 93 | self._override_order = [ | ||
| 94 | kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR | ||
| 95 | ] | ||
| 96 | |||
| 97 | self._ignore_env_names = ["version", "help"] | ||
| 98 | |||
| 99 | # Because we keep track of where we got the data from | ||
| 100 | self._parsers = { | ||
| 101 | variant: [] for variant in self._override_order | ||
| 102 | } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] | ||
| 103 | self._config = { | ||
| 104 | variant: {} for variant in self._override_order | ||
| 105 | } # type: Dict[Kind, Dict[str, Any]] | ||
| 106 | self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] | ||
| 107 | |||
| 108 | def load(self): | ||
| 109 | # type: () -> None | ||
| 110 | """Loads configuration from configuration files and environment | ||
| 111 | """ | ||
| 112 | self._load_config_files() | ||
| 113 | if not self.isolated: | ||
| 114 | self._load_environment_vars() | ||
| 115 | |||
| 116 | def get_file_to_edit(self): | ||
| 117 | # type: () -> Optional[str] | ||
| 118 | """Returns the file with highest priority in configuration | ||
| 119 | """ | ||
| 120 | assert self.load_only is not None, \ | ||
| 121 | "Need to be specified a file to be editing" | ||
| 122 | |||
| 123 | try: | ||
| 124 | return self._get_parser_to_modify()[0] | ||
| 125 | except IndexError: | ||
| 126 | return None | ||
| 127 | |||
| 128 | def items(self): | ||
| 129 | # type: () -> Iterable[Tuple[str, Any]] | ||
| 130 | """Returns key-value pairs like dict.items() representing the loaded | ||
| 131 | configuration | ||
| 132 | """ | ||
| 133 | return self._dictionary.items() | ||
| 134 | |||
| 135 | def get_value(self, key): | ||
| 136 | # type: (str) -> Any | ||
| 137 | """Get a value from the configuration. | ||
| 138 | """ | ||
| 139 | try: | ||
| 140 | return self._dictionary[key] | ||
| 141 | except KeyError: | ||
| 142 | raise ConfigurationError("No such key - {}".format(key)) | ||
| 143 | |||
| 144 | def set_value(self, key, value): | ||
| 145 | # type: (str, Any) -> None | ||
| 146 | """Modify a value in the configuration. | ||
| 147 | """ | ||
| 148 | self._ensure_have_load_only() | ||
| 149 | |||
| 150 | fname, parser = self._get_parser_to_modify() | ||
| 151 | |||
| 152 | if parser is not None: | ||
| 153 | section, name = _disassemble_key(key) | ||
| 154 | |||
| 155 | # Modify the parser and the configuration | ||
| 156 | if not parser.has_section(section): | ||
| 157 | parser.add_section(section) | ||
| 158 | parser.set(section, name, value) | ||
| 159 | |||
| 160 | self._config[self.load_only][key] = value | ||
| 161 | self._mark_as_modified(fname, parser) | ||
| 162 | |||
| 163 | def unset_value(self, key): | ||
| 164 | # type: (str) -> None | ||
| 165 | """Unset a value in the configuration. | ||
| 166 | """ | ||
| 167 | self._ensure_have_load_only() | ||
| 168 | |||
| 169 | if key not in self._config[self.load_only]: | ||
| 170 | raise ConfigurationError("No such key - {}".format(key)) | ||
| 171 | |||
| 172 | fname, parser = self._get_parser_to_modify() | ||
| 173 | |||
| 174 | if parser is not None: | ||
| 175 | section, name = _disassemble_key(key) | ||
| 176 | |||
| 177 | # Remove the key in the parser | ||
| 178 | modified_something = False | ||
| 179 | if parser.has_section(section): | ||
| 180 | # Returns whether the option was removed or not | ||
| 181 | modified_something = parser.remove_option(section, name) | ||
| 182 | |||
| 183 | if modified_something: | ||
| 184 | # name removed from parser, section may now be empty | ||
| 185 | section_iter = iter(parser.items(section)) | ||
| 186 | try: | ||
| 187 | val = six.next(section_iter) | ||
| 188 | except StopIteration: | ||
| 189 | val = None | ||
| 190 | |||
| 191 | if val is None: | ||
| 192 | parser.remove_section(section) | ||
| 193 | |||
| 194 | self._mark_as_modified(fname, parser) | ||
| 195 | else: | ||
| 196 | raise ConfigurationError( | ||
| 197 | "Fatal Internal error [id=1]. Please report as a bug." | ||
| 198 | ) | ||
| 199 | |||
| 200 | del self._config[self.load_only][key] | ||
| 201 | |||
| 202 | def save(self): | ||
| 203 | # type: () -> None | ||
| 204 | """Save the currentin-memory state. | ||
| 205 | """ | ||
| 206 | self._ensure_have_load_only() | ||
| 207 | |||
| 208 | for fname, parser in self._modified_parsers: | ||
| 209 | logger.info("Writing to %s", fname) | ||
| 210 | |||
| 211 | # Ensure directory exists. | ||
| 212 | ensure_dir(os.path.dirname(fname)) | ||
| 213 | |||
| 214 | with open(fname, "w") as f: | ||
| 215 | parser.write(f) # type: ignore | ||
| 216 | |||
| 217 | # | ||
| 218 | # Private routines | ||
| 219 | # | ||
| 220 | |||
| 221 | def _ensure_have_load_only(self): | ||
| 222 | # type: () -> None | ||
| 223 | if self.load_only is None: | ||
| 224 | raise ConfigurationError("Needed a specific file to be modifying.") | ||
| 225 | logger.debug("Will be working with %s variant only", self.load_only) | ||
| 226 | |||
| 227 | @property | ||
| 228 | def _dictionary(self): | ||
| 229 | # type: () -> Dict[str, Any] | ||
| 230 | """A dictionary representing the loaded configuration. | ||
| 231 | """ | ||
| 232 | # NOTE: Dictionaries are not populated if not loaded. So, conditionals | ||
| 233 | # are not needed here. | ||
| 234 | retval = {} | ||
| 235 | |||
| 236 | for variant in self._override_order: | ||
| 237 | retval.update(self._config[variant]) | ||
| 238 | |||
| 239 | return retval | ||
| 240 | |||
| 241 | def _load_config_files(self): | ||
| 242 | # type: () -> None | ||
| 243 | """Loads configuration from configuration files | ||
| 244 | """ | ||
| 245 | config_files = dict(self._iter_config_files()) | ||
| 246 | if config_files[kinds.ENV][0:1] == [os.devnull]: | ||
| 247 | logger.debug( | ||
| 248 | "Skipping loading configuration files due to " | ||
| 249 | "environment's PIP_CONFIG_FILE being os.devnull" | ||
| 250 | ) | ||
| 251 | return | ||
| 252 | |||
| 253 | for variant, files in config_files.items(): | ||
| 254 | for fname in files: | ||
| 255 | # If there's specific variant set in `load_only`, load only | ||
| 256 | # that variant, not the others. | ||
| 257 | if self.load_only is not None and variant != self.load_only: | ||
| 258 | logger.debug( | ||
| 259 | "Skipping file '%s' (variant: %s)", fname, variant | ||
| 260 | ) | ||
| 261 | continue | ||
| 262 | |||
| 263 | parser = self._load_file(variant, fname) | ||
| 264 | |||
| 265 | # Keeping track of the parsers used | ||
| 266 | self._parsers[variant].append((fname, parser)) | ||
| 267 | |||
| 268 | def _load_file(self, variant, fname): | ||
| 269 | # type: (Kind, str) -> RawConfigParser | ||
| 270 | logger.debug("For variant '%s', will try loading '%s'", variant, fname) | ||
| 271 | parser = self._construct_parser(fname) | ||
| 272 | |||
| 273 | for section in parser.sections(): | ||
| 274 | items = parser.items(section) | ||
| 275 | self._config[variant].update(self._normalized_keys(section, items)) | ||
| 276 | |||
| 277 | return parser | ||
| 278 | |||
| 279 | def _construct_parser(self, fname): | ||
| 280 | # type: (str) -> RawConfigParser | ||
| 281 | parser = configparser.RawConfigParser() | ||
| 282 | # If there is no such file, don't bother reading it but create the | ||
| 283 | # parser anyway, to hold the data. | ||
| 284 | # Doing this is useful when modifying and saving files, where we don't | ||
| 285 | # need to construct a parser. | ||
| 286 | if os.path.exists(fname): | ||
| 287 | try: | ||
| 288 | parser.read(fname) | ||
| 289 | except UnicodeDecodeError: | ||
| 290 | raise ConfigurationError(( | ||
| 291 | "ERROR: " | ||
| 292 | "Configuration file contains invalid %s characters.\n" | ||
| 293 | "Please fix your configuration, located at %s\n" | ||
| 294 | ) % (locale.getpreferredencoding(False), fname)) | ||
| 295 | return parser | ||
| 296 | |||
| 297 | def _load_environment_vars(self): | ||
| 298 | # type: () -> None | ||
| 299 | """Loads configuration from environment variables | ||
| 300 | """ | ||
| 301 | self._config[kinds.ENV_VAR].update( | ||
| 302 | self._normalized_keys(":env:", self._get_environ_vars()) | ||
| 303 | ) | ||
| 304 | |||
| 305 | def _normalized_keys(self, section, items): | ||
| 306 | # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] | ||
| 307 | """Normalizes items to construct a dictionary with normalized keys. | ||
| 308 | |||
| 309 | This routine is where the names become keys and are made the same | ||
| 310 | regardless of source - configuration files or environment. | ||
| 311 | """ | ||
| 312 | normalized = {} | ||
| 313 | for name, val in items: | ||
| 314 | key = section + "." + _normalize_name(name) | ||
| 315 | normalized[key] = val | ||
| 316 | return normalized | ||
| 317 | |||
| 318 | def _get_environ_vars(self): | ||
| 319 | # type: () -> Iterable[Tuple[str, str]] | ||
| 320 | """Returns a generator with all environmental vars with prefix PIP_""" | ||
| 321 | for key, val in os.environ.items(): | ||
| 322 | should_be_yielded = ( | ||
| 323 | key.startswith("PIP_") and | ||
| 324 | key[4:].lower() not in self._ignore_env_names | ||
| 325 | ) | ||
| 326 | if should_be_yielded: | ||
| 327 | yield key[4:].lower(), val | ||
| 328 | |||
| 329 | # XXX: This is patched in the tests. | ||
| 330 | def _iter_config_files(self): | ||
| 331 | # type: () -> Iterable[Tuple[Kind, List[str]]] | ||
| 332 | """Yields variant and configuration files associated with it. | ||
| 333 | |||
| 334 | This should be treated like items of a dictionary. | ||
| 335 | """ | ||
| 336 | # SMELL: Move the conditions out of this function | ||
| 337 | |||
| 338 | # environment variables have the lowest priority | ||
| 339 | config_file = os.environ.get('PIP_CONFIG_FILE', None) | ||
| 340 | if config_file is not None: | ||
| 341 | yield kinds.ENV, [config_file] | ||
| 342 | else: | ||
| 343 | yield kinds.ENV, [] | ||
| 344 | |||
| 345 | # at the base we have any global configuration | ||
| 346 | yield kinds.GLOBAL, list(site_config_files) | ||
| 347 | |||
| 348 | # per-user configuration next | ||
| 349 | should_load_user_config = not self.isolated and not ( | ||
| 350 | config_file and os.path.exists(config_file) | ||
| 351 | ) | ||
| 352 | if should_load_user_config: | ||
| 353 | # The legacy config file is overridden by the new config file | ||
| 354 | yield kinds.USER, [legacy_config_file, new_config_file] | ||
| 355 | |||
| 356 | # finally virtualenv configuration first trumping others | ||
| 357 | if running_under_virtualenv(): | ||
| 358 | yield kinds.VENV, [venv_config_file] | ||
| 359 | |||
| 360 | def _get_parser_to_modify(self): | ||
| 361 | # type: () -> Tuple[str, RawConfigParser] | ||
| 362 | # Determine which parser to modify | ||
| 363 | parsers = self._parsers[self.load_only] | ||
| 364 | if not parsers: | ||
| 365 | # This should not happen if everything works correctly. | ||
| 366 | raise ConfigurationError( | ||
| 367 | "Fatal Internal error [id=2]. Please report as a bug." | ||
| 368 | ) | ||
| 369 | |||
| 370 | # Use the highest priority parser. | ||
| 371 | return parsers[-1] | ||
| 372 | |||
| 373 | # XXX: This is patched in the tests. | ||
| 374 | def _mark_as_modified(self, fname, parser): | ||
| 375 | # type: (str, RawConfigParser) -> None | ||
| 376 | file_parser_tuple = (fname, parser) | ||
| 377 | if file_parser_tuple not in self._modified_parsers: | ||
| 378 | self._modified_parsers.append(file_parser_tuple) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py new file mode 100644 index 0000000..e0e2d24 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py | |||
| @@ -0,0 +1,922 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import cgi | ||
| 4 | import email.utils | ||
| 5 | import getpass | ||
| 6 | import json | ||
| 7 | import logging | ||
| 8 | import mimetypes | ||
| 9 | import os | ||
| 10 | import platform | ||
| 11 | import re | ||
| 12 | import shutil | ||
| 13 | import sys | ||
| 14 | |||
| 15 | from pip._vendor import requests, six, urllib3 | ||
| 16 | from pip._vendor.cachecontrol import CacheControlAdapter | ||
| 17 | from pip._vendor.cachecontrol.caches import FileCache | ||
| 18 | from pip._vendor.lockfile import LockError | ||
| 19 | from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter | ||
| 20 | from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth | ||
| 21 | from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response | ||
| 22 | from pip._vendor.requests.structures import CaseInsensitiveDict | ||
| 23 | from pip._vendor.requests.utils import get_netrc_auth | ||
| 24 | # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is | ||
| 25 | # why we ignore the type on this import | ||
| 26 | from pip._vendor.six.moves import xmlrpc_client # type: ignore | ||
| 27 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 28 | from pip._vendor.six.moves.urllib import request as urllib_request | ||
| 29 | from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote | ||
| 30 | from pip._vendor.urllib3.util import IS_PYOPENSSL | ||
| 31 | |||
| 32 | import pip | ||
| 33 | from pip._internal.compat import WINDOWS | ||
| 34 | from pip._internal.exceptions import HashMismatch, InstallationError | ||
| 35 | from pip._internal.locations import write_delete_marker_file | ||
| 36 | from pip._internal.models import PyPI | ||
| 37 | from pip._internal.utils.encoding import auto_decode | ||
| 38 | from pip._internal.utils.filesystem import check_path_owner | ||
| 39 | from pip._internal.utils.glibc import libc_ver | ||
| 40 | from pip._internal.utils.logging import indent_log | ||
| 41 | from pip._internal.utils.misc import ( | ||
| 42 | ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume, | ||
| 43 | display_path, format_size, get_installed_version, rmtree, splitext, | ||
| 44 | unpack_file, | ||
| 45 | ) | ||
| 46 | from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | ||
| 47 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 48 | from pip._internal.utils.ui import DownloadProgressProvider | ||
| 49 | from pip._internal.vcs import vcs | ||
| 50 | |||
| 51 | try: | ||
| 52 | import ssl # noqa | ||
| 53 | except ImportError: | ||
| 54 | ssl = None | ||
| 55 | |||
| 56 | HAS_TLS = (ssl is not None) or IS_PYOPENSSL | ||
| 57 | |||
| 58 | __all__ = ['get_file_content', | ||
| 59 | 'is_url', 'url_to_path', 'path_to_url', | ||
| 60 | 'is_archive_file', 'unpack_vcs_link', | ||
| 61 | 'unpack_file_url', 'is_vcs_url', 'is_file_url', | ||
| 62 | 'unpack_http_url', 'unpack_url'] | ||
| 63 | |||
| 64 | |||
| 65 | logger = logging.getLogger(__name__) | ||
| 66 | |||
| 67 | |||
| 68 | def user_agent(): | ||
| 69 | """ | ||
| 70 | Return a string representing the user agent. | ||
| 71 | """ | ||
| 72 | data = { | ||
| 73 | "installer": {"name": "pip", "version": pip.__version__}, | ||
| 74 | "python": platform.python_version(), | ||
| 75 | "implementation": { | ||
| 76 | "name": platform.python_implementation(), | ||
| 77 | }, | ||
| 78 | } | ||
| 79 | |||
| 80 | if data["implementation"]["name"] == 'CPython': | ||
| 81 | data["implementation"]["version"] = platform.python_version() | ||
| 82 | elif data["implementation"]["name"] == 'PyPy': | ||
| 83 | if sys.pypy_version_info.releaselevel == 'final': | ||
| 84 | pypy_version_info = sys.pypy_version_info[:3] | ||
| 85 | else: | ||
| 86 | pypy_version_info = sys.pypy_version_info | ||
| 87 | data["implementation"]["version"] = ".".join( | ||
| 88 | [str(x) for x in pypy_version_info] | ||
| 89 | ) | ||
| 90 | elif data["implementation"]["name"] == 'Jython': | ||
| 91 | # Complete Guess | ||
| 92 | data["implementation"]["version"] = platform.python_version() | ||
| 93 | elif data["implementation"]["name"] == 'IronPython': | ||
| 94 | # Complete Guess | ||
| 95 | data["implementation"]["version"] = platform.python_version() | ||
| 96 | |||
| 97 | if sys.platform.startswith("linux"): | ||
| 98 | from pip._vendor import distro | ||
| 99 | distro_infos = dict(filter( | ||
| 100 | lambda x: x[1], | ||
| 101 | zip(["name", "version", "id"], distro.linux_distribution()), | ||
| 102 | )) | ||
| 103 | libc = dict(filter( | ||
| 104 | lambda x: x[1], | ||
| 105 | zip(["lib", "version"], libc_ver()), | ||
| 106 | )) | ||
| 107 | if libc: | ||
| 108 | distro_infos["libc"] = libc | ||
| 109 | if distro_infos: | ||
| 110 | data["distro"] = distro_infos | ||
| 111 | |||
| 112 | if sys.platform.startswith("darwin") and platform.mac_ver()[0]: | ||
| 113 | data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} | ||
| 114 | |||
| 115 | if platform.system(): | ||
| 116 | data.setdefault("system", {})["name"] = platform.system() | ||
| 117 | |||
| 118 | if platform.release(): | ||
| 119 | data.setdefault("system", {})["release"] = platform.release() | ||
| 120 | |||
| 121 | if platform.machine(): | ||
| 122 | data["cpu"] = platform.machine() | ||
| 123 | |||
| 124 | if HAS_TLS: | ||
| 125 | data["openssl_version"] = ssl.OPENSSL_VERSION | ||
| 126 | |||
| 127 | setuptools_version = get_installed_version("setuptools") | ||
| 128 | if setuptools_version is not None: | ||
| 129 | data["setuptools_version"] = setuptools_version | ||
| 130 | |||
| 131 | return "{data[installer][name]}/{data[installer][version]} {json}".format( | ||
| 132 | data=data, | ||
| 133 | json=json.dumps(data, separators=(",", ":"), sort_keys=True), | ||
| 134 | ) | ||
| 135 | |||
| 136 | |||
| 137 | class MultiDomainBasicAuth(AuthBase): | ||
| 138 | |||
| 139 | def __init__(self, prompting=True): | ||
| 140 | self.prompting = prompting | ||
| 141 | self.passwords = {} | ||
| 142 | |||
| 143 | def __call__(self, req): | ||
| 144 | parsed = urllib_parse.urlparse(req.url) | ||
| 145 | |||
| 146 | # Get the netloc without any embedded credentials | ||
| 147 | netloc = parsed.netloc.rsplit("@", 1)[-1] | ||
| 148 | |||
| 149 | # Set the url of the request to the url without any credentials | ||
| 150 | req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) | ||
| 151 | |||
| 152 | # Use any stored credentials that we have for this netloc | ||
| 153 | username, password = self.passwords.get(netloc, (None, None)) | ||
| 154 | |||
| 155 | # Extract credentials embedded in the url if we have none stored | ||
| 156 | if username is None: | ||
| 157 | username, password = self.parse_credentials(parsed.netloc) | ||
| 158 | |||
| 159 | # Get creds from netrc if we still don't have them | ||
| 160 | if username is None and password is None: | ||
| 161 | netrc_auth = get_netrc_auth(req.url) | ||
| 162 | username, password = netrc_auth if netrc_auth else (None, None) | ||
| 163 | |||
| 164 | if username or password: | ||
| 165 | # Store the username and password | ||
| 166 | self.passwords[netloc] = (username, password) | ||
| 167 | |||
| 168 | # Send the basic auth with this request | ||
| 169 | req = HTTPBasicAuth(username or "", password or "")(req) | ||
| 170 | |||
| 171 | # Attach a hook to handle 401 responses | ||
| 172 | req.register_hook("response", self.handle_401) | ||
| 173 | |||
| 174 | return req | ||
| 175 | |||
| 176 | def handle_401(self, resp, **kwargs): | ||
| 177 | # We only care about 401 responses, anything else we want to just | ||
| 178 | # pass through the actual response | ||
| 179 | if resp.status_code != 401: | ||
| 180 | return resp | ||
| 181 | |||
| 182 | # We are not able to prompt the user so simply return the response | ||
| 183 | if not self.prompting: | ||
| 184 | return resp | ||
| 185 | |||
| 186 | parsed = urllib_parse.urlparse(resp.url) | ||
| 187 | |||
| 188 | # Prompt the user for a new username and password | ||
| 189 | username = six.moves.input("User for %s: " % parsed.netloc) | ||
| 190 | password = getpass.getpass("Password: ") | ||
| 191 | |||
| 192 | # Store the new username and password to use for future requests | ||
| 193 | if username or password: | ||
| 194 | self.passwords[parsed.netloc] = (username, password) | ||
| 195 | |||
| 196 | # Consume content and release the original connection to allow our new | ||
| 197 | # request to reuse the same one. | ||
| 198 | resp.content | ||
| 199 | resp.raw.release_conn() | ||
| 200 | |||
| 201 | # Add our new username and password to the request | ||
| 202 | req = HTTPBasicAuth(username or "", password or "")(resp.request) | ||
| 203 | |||
| 204 | # Send our new request | ||
| 205 | new_resp = resp.connection.send(req, **kwargs) | ||
| 206 | new_resp.history.append(resp) | ||
| 207 | |||
| 208 | return new_resp | ||
| 209 | |||
| 210 | def parse_credentials(self, netloc): | ||
| 211 | if "@" in netloc: | ||
| 212 | userinfo = netloc.rsplit("@", 1)[0] | ||
| 213 | if ":" in userinfo: | ||
| 214 | user, pwd = userinfo.split(":", 1) | ||
| 215 | return (urllib_unquote(user), urllib_unquote(pwd)) | ||
| 216 | return urllib_unquote(userinfo), None | ||
| 217 | return None, None | ||
| 218 | |||
| 219 | |||
| 220 | class LocalFSAdapter(BaseAdapter): | ||
| 221 | |||
| 222 | def send(self, request, stream=None, timeout=None, verify=None, cert=None, | ||
| 223 | proxies=None): | ||
| 224 | pathname = url_to_path(request.url) | ||
| 225 | |||
| 226 | resp = Response() | ||
| 227 | resp.status_code = 200 | ||
| 228 | resp.url = request.url | ||
| 229 | |||
| 230 | try: | ||
| 231 | stats = os.stat(pathname) | ||
| 232 | except OSError as exc: | ||
| 233 | resp.status_code = 404 | ||
| 234 | resp.raw = exc | ||
| 235 | else: | ||
| 236 | modified = email.utils.formatdate(stats.st_mtime, usegmt=True) | ||
| 237 | content_type = mimetypes.guess_type(pathname)[0] or "text/plain" | ||
| 238 | resp.headers = CaseInsensitiveDict({ | ||
| 239 | "Content-Type": content_type, | ||
| 240 | "Content-Length": stats.st_size, | ||
| 241 | "Last-Modified": modified, | ||
| 242 | }) | ||
| 243 | |||
| 244 | resp.raw = open(pathname, "rb") | ||
| 245 | resp.close = resp.raw.close | ||
| 246 | |||
| 247 | return resp | ||
| 248 | |||
| 249 | def close(self): | ||
| 250 | pass | ||
| 251 | |||
| 252 | |||
| 253 | class SafeFileCache(FileCache): | ||
| 254 | """ | ||
| 255 | A file based cache which is safe to use even when the target directory may | ||
| 256 | not be accessible or writable. | ||
| 257 | """ | ||
| 258 | |||
| 259 | def __init__(self, *args, **kwargs): | ||
| 260 | super(SafeFileCache, self).__init__(*args, **kwargs) | ||
| 261 | |||
| 262 | # Check to ensure that the directory containing our cache directory | ||
| 263 | # is owned by the user current executing pip. If it does not exist | ||
| 264 | # we will check the parent directory until we find one that does exist. | ||
| 265 | # If it is not owned by the user executing pip then we will disable | ||
| 266 | # the cache and log a warning. | ||
| 267 | if not check_path_owner(self.directory): | ||
| 268 | logger.warning( | ||
| 269 | "The directory '%s' or its parent directory is not owned by " | ||
| 270 | "the current user and the cache has been disabled. Please " | ||
| 271 | "check the permissions and owner of that directory. If " | ||
| 272 | "executing pip with sudo, you may want sudo's -H flag.", | ||
| 273 | self.directory, | ||
| 274 | ) | ||
| 275 | |||
| 276 | # Set our directory to None to disable the Cache | ||
| 277 | self.directory = None | ||
| 278 | |||
| 279 | def get(self, *args, **kwargs): | ||
| 280 | # If we don't have a directory, then the cache should be a no-op. | ||
| 281 | if self.directory is None: | ||
| 282 | return | ||
| 283 | |||
| 284 | try: | ||
| 285 | return super(SafeFileCache, self).get(*args, **kwargs) | ||
| 286 | except (LockError, OSError, IOError): | ||
| 287 | # We intentionally silence this error, if we can't access the cache | ||
| 288 | # then we can just skip caching and process the request as if | ||
| 289 | # caching wasn't enabled. | ||
| 290 | pass | ||
| 291 | |||
| 292 | def set(self, *args, **kwargs): | ||
| 293 | # If we don't have a directory, then the cache should be a no-op. | ||
| 294 | if self.directory is None: | ||
| 295 | return | ||
| 296 | |||
| 297 | try: | ||
| 298 | return super(SafeFileCache, self).set(*args, **kwargs) | ||
| 299 | except (LockError, OSError, IOError): | ||
| 300 | # We intentionally silence this error, if we can't access the cache | ||
| 301 | # then we can just skip caching and process the request as if | ||
| 302 | # caching wasn't enabled. | ||
| 303 | pass | ||
| 304 | |||
| 305 | def delete(self, *args, **kwargs): | ||
| 306 | # If we don't have a directory, then the cache should be a no-op. | ||
| 307 | if self.directory is None: | ||
| 308 | return | ||
| 309 | |||
| 310 | try: | ||
| 311 | return super(SafeFileCache, self).delete(*args, **kwargs) | ||
| 312 | except (LockError, OSError, IOError): | ||
| 313 | # We intentionally silence this error, if we can't access the cache | ||
| 314 | # then we can just skip caching and process the request as if | ||
| 315 | # caching wasn't enabled. | ||
| 316 | pass | ||
| 317 | |||
| 318 | |||
| 319 | class InsecureHTTPAdapter(HTTPAdapter): | ||
| 320 | |||
| 321 | def cert_verify(self, conn, url, verify, cert): | ||
| 322 | conn.cert_reqs = 'CERT_NONE' | ||
| 323 | conn.ca_certs = None | ||
| 324 | |||
| 325 | |||
| 326 | class PipSession(requests.Session): | ||
| 327 | |||
| 328 | timeout = None | ||
| 329 | |||
| 330 | def __init__(self, *args, **kwargs): | ||
| 331 | retries = kwargs.pop("retries", 0) | ||
| 332 | cache = kwargs.pop("cache", None) | ||
| 333 | insecure_hosts = kwargs.pop("insecure_hosts", []) | ||
| 334 | |||
| 335 | super(PipSession, self).__init__(*args, **kwargs) | ||
| 336 | |||
| 337 | # Attach our User Agent to the request | ||
| 338 | self.headers["User-Agent"] = user_agent() | ||
| 339 | |||
| 340 | # Attach our Authentication handler to the session | ||
| 341 | self.auth = MultiDomainBasicAuth() | ||
| 342 | |||
| 343 | # Create our urllib3.Retry instance which will allow us to customize | ||
| 344 | # how we handle retries. | ||
| 345 | retries = urllib3.Retry( | ||
| 346 | # Set the total number of retries that a particular request can | ||
| 347 | # have. | ||
| 348 | total=retries, | ||
| 349 | |||
| 350 | # A 503 error from PyPI typically means that the Fastly -> Origin | ||
| 351 | # connection got interrupted in some way. A 503 error in general | ||
| 352 | # is typically considered a transient error so we'll go ahead and | ||
| 353 | # retry it. | ||
| 354 | # A 500 may indicate transient error in Amazon S3 | ||
| 355 | # A 520 or 527 - may indicate transient error in CloudFlare | ||
| 356 | status_forcelist=[500, 503, 520, 527], | ||
| 357 | |||
| 358 | # Add a small amount of back off between failed requests in | ||
| 359 | # order to prevent hammering the service. | ||
| 360 | backoff_factor=0.25, | ||
| 361 | ) | ||
| 362 | |||
| 363 | # We want to _only_ cache responses on securely fetched origins. We do | ||
| 364 | # this because we can't validate the response of an insecurely fetched | ||
| 365 | # origin, and we don't want someone to be able to poison the cache and | ||
| 366 | # require manual eviction from the cache to fix it. | ||
| 367 | if cache: | ||
| 368 | secure_adapter = CacheControlAdapter( | ||
| 369 | cache=SafeFileCache(cache, use_dir_lock=True), | ||
| 370 | max_retries=retries, | ||
| 371 | ) | ||
| 372 | else: | ||
| 373 | secure_adapter = HTTPAdapter(max_retries=retries) | ||
| 374 | |||
| 375 | # Our Insecure HTTPAdapter disables HTTPS validation. It does not | ||
| 376 | # support caching (see above) so we'll use it for all http:// URLs as | ||
| 377 | # well as any https:// host that we've marked as ignoring TLS errors | ||
| 378 | # for. | ||
| 379 | insecure_adapter = InsecureHTTPAdapter(max_retries=retries) | ||
| 380 | |||
| 381 | self.mount("https://", secure_adapter) | ||
| 382 | self.mount("http://", insecure_adapter) | ||
| 383 | |||
| 384 | # Enable file:// urls | ||
| 385 | self.mount("file://", LocalFSAdapter()) | ||
| 386 | |||
| 387 | # We want to use a non-validating adapter for any requests which are | ||
| 388 | # deemed insecure. | ||
| 389 | for host in insecure_hosts: | ||
| 390 | self.mount("https://{}/".format(host), insecure_adapter) | ||
| 391 | |||
| 392 | def request(self, method, url, *args, **kwargs): | ||
| 393 | # Allow setting a default timeout on a session | ||
| 394 | kwargs.setdefault("timeout", self.timeout) | ||
| 395 | |||
| 396 | # Dispatch the actual request | ||
| 397 | return super(PipSession, self).request(method, url, *args, **kwargs) | ||
| 398 | |||
| 399 | |||
| 400 | def get_file_content(url, comes_from=None, session=None): | ||
| 401 | """Gets the content of a file; it may be a filename, file: URL, or | ||
| 402 | http: URL. Returns (location, content). Content is unicode. | ||
| 403 | |||
| 404 | :param url: File path or url. | ||
| 405 | :param comes_from: Origin description of requirements. | ||
| 406 | :param session: Instance of pip.download.PipSession. | ||
| 407 | """ | ||
| 408 | if session is None: | ||
| 409 | raise TypeError( | ||
| 410 | "get_file_content() missing 1 required keyword argument: 'session'" | ||
| 411 | ) | ||
| 412 | |||
| 413 | match = _scheme_re.search(url) | ||
| 414 | if match: | ||
| 415 | scheme = match.group(1).lower() | ||
| 416 | if (scheme == 'file' and comes_from and | ||
| 417 | comes_from.startswith('http')): | ||
| 418 | raise InstallationError( | ||
| 419 | 'Requirements file %s references URL %s, which is local' | ||
| 420 | % (comes_from, url)) | ||
| 421 | if scheme == 'file': | ||
| 422 | path = url.split(':', 1)[1] | ||
| 423 | path = path.replace('\\', '/') | ||
| 424 | match = _url_slash_drive_re.match(path) | ||
| 425 | if match: | ||
| 426 | path = match.group(1) + ':' + path.split('|', 1)[1] | ||
| 427 | path = urllib_parse.unquote(path) | ||
| 428 | if path.startswith('/'): | ||
| 429 | path = '/' + path.lstrip('/') | ||
| 430 | url = path | ||
| 431 | else: | ||
| 432 | # FIXME: catch some errors | ||
| 433 | resp = session.get(url) | ||
| 434 | resp.raise_for_status() | ||
| 435 | return resp.url, resp.text | ||
| 436 | try: | ||
| 437 | with open(url, 'rb') as f: | ||
| 438 | content = auto_decode(f.read()) | ||
| 439 | except IOError as exc: | ||
| 440 | raise InstallationError( | ||
| 441 | 'Could not open requirements file: %s' % str(exc) | ||
| 442 | ) | ||
| 443 | return url, content | ||
| 444 | |||
| 445 | |||
| 446 | _scheme_re = re.compile(r'^(http|https|file):', re.I) | ||
| 447 | _url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) | ||
| 448 | |||
| 449 | |||
| 450 | def is_url(name): | ||
| 451 | """Returns true if the name looks like a URL""" | ||
| 452 | if ':' not in name: | ||
| 453 | return False | ||
| 454 | scheme = name.split(':', 1)[0].lower() | ||
| 455 | return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes | ||
| 456 | |||
| 457 | |||
| 458 | def url_to_path(url): | ||
| 459 | """ | ||
| 460 | Convert a file: URL to a path. | ||
| 461 | """ | ||
| 462 | assert url.startswith('file:'), ( | ||
| 463 | "You can only turn file: urls into filenames (not %r)" % url) | ||
| 464 | |||
| 465 | _, netloc, path, _, _ = urllib_parse.urlsplit(url) | ||
| 466 | |||
| 467 | # if we have a UNC path, prepend UNC share notation | ||
| 468 | if netloc: | ||
| 469 | netloc = '\\\\' + netloc | ||
| 470 | |||
| 471 | path = urllib_request.url2pathname(netloc + path) | ||
| 472 | return path | ||
| 473 | |||
| 474 | |||
| 475 | def path_to_url(path): | ||
| 476 | """ | ||
| 477 | Convert a path to a file: URL. The path will be made absolute and have | ||
| 478 | quoted path parts. | ||
| 479 | """ | ||
| 480 | path = os.path.normpath(os.path.abspath(path)) | ||
| 481 | url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) | ||
| 482 | return url | ||
| 483 | |||
| 484 | |||
| 485 | def is_archive_file(name): | ||
| 486 | """Return True if `name` is a considered as an archive file.""" | ||
| 487 | ext = splitext(name)[1].lower() | ||
| 488 | if ext in ARCHIVE_EXTENSIONS: | ||
| 489 | return True | ||
| 490 | return False | ||
| 491 | |||
| 492 | |||
| 493 | def unpack_vcs_link(link, location): | ||
| 494 | vcs_backend = _get_used_vcs_backend(link) | ||
| 495 | vcs_backend.unpack(location) | ||
| 496 | |||
| 497 | |||
| 498 | def _get_used_vcs_backend(link): | ||
| 499 | for backend in vcs.backends: | ||
| 500 | if link.scheme in backend.schemes: | ||
| 501 | vcs_backend = backend(link.url) | ||
| 502 | return vcs_backend | ||
| 503 | |||
| 504 | |||
| 505 | def is_vcs_url(link): | ||
| 506 | return bool(_get_used_vcs_backend(link)) | ||
| 507 | |||
| 508 | |||
| 509 | def is_file_url(link): | ||
| 510 | return link.url.lower().startswith('file:') | ||
| 511 | |||
| 512 | |||
| 513 | def is_dir_url(link): | ||
| 514 | """Return whether a file:// Link points to a directory. | ||
| 515 | |||
| 516 | ``link`` must not have any other scheme but file://. Call is_file_url() | ||
| 517 | first. | ||
| 518 | |||
| 519 | """ | ||
| 520 | link_path = url_to_path(link.url_without_fragment) | ||
| 521 | return os.path.isdir(link_path) | ||
| 522 | |||
| 523 | |||
| 524 | def _progress_indicator(iterable, *args, **kwargs): | ||
| 525 | return iterable | ||
| 526 | |||
| 527 | |||
| 528 | def _download_url(resp, link, content_file, hashes, progress_bar): | ||
| 529 | try: | ||
| 530 | total_length = int(resp.headers['content-length']) | ||
| 531 | except (ValueError, KeyError, TypeError): | ||
| 532 | total_length = 0 | ||
| 533 | |||
| 534 | cached_resp = getattr(resp, "from_cache", False) | ||
| 535 | if logger.getEffectiveLevel() > logging.INFO: | ||
| 536 | show_progress = False | ||
| 537 | elif cached_resp: | ||
| 538 | show_progress = False | ||
| 539 | elif total_length > (40 * 1000): | ||
| 540 | show_progress = True | ||
| 541 | elif not total_length: | ||
| 542 | show_progress = True | ||
| 543 | else: | ||
| 544 | show_progress = False | ||
| 545 | |||
| 546 | show_url = link.show_url | ||
| 547 | |||
| 548 | def resp_read(chunk_size): | ||
| 549 | try: | ||
| 550 | # Special case for urllib3. | ||
| 551 | for chunk in resp.raw.stream( | ||
| 552 | chunk_size, | ||
| 553 | # We use decode_content=False here because we don't | ||
| 554 | # want urllib3 to mess with the raw bytes we get | ||
| 555 | # from the server. If we decompress inside of | ||
| 556 | # urllib3 then we cannot verify the checksum | ||
| 557 | # because the checksum will be of the compressed | ||
| 558 | # file. This breakage will only occur if the | ||
| 559 | # server adds a Content-Encoding header, which | ||
| 560 | # depends on how the server was configured: | ||
| 561 | # - Some servers will notice that the file isn't a | ||
| 562 | # compressible file and will leave the file alone | ||
| 563 | # and with an empty Content-Encoding | ||
| 564 | # - Some servers will notice that the file is | ||
| 565 | # already compressed and will leave the file | ||
| 566 | # alone and will add a Content-Encoding: gzip | ||
| 567 | # header | ||
| 568 | # - Some servers won't notice anything at all and | ||
| 569 | # will take a file that's already been compressed | ||
| 570 | # and compress it again and set the | ||
| 571 | # Content-Encoding: gzip header | ||
| 572 | # | ||
| 573 | # By setting this not to decode automatically we | ||
| 574 | # hope to eliminate problems with the second case. | ||
| 575 | decode_content=False): | ||
| 576 | yield chunk | ||
| 577 | except AttributeError: | ||
| 578 | # Standard file-like object. | ||
| 579 | while True: | ||
| 580 | chunk = resp.raw.read(chunk_size) | ||
| 581 | if not chunk: | ||
| 582 | break | ||
| 583 | yield chunk | ||
| 584 | |||
| 585 | def written_chunks(chunks): | ||
| 586 | for chunk in chunks: | ||
| 587 | content_file.write(chunk) | ||
| 588 | yield chunk | ||
| 589 | |||
| 590 | progress_indicator = _progress_indicator | ||
| 591 | |||
| 592 | if link.netloc == PyPI.netloc: | ||
| 593 | url = show_url | ||
| 594 | else: | ||
| 595 | url = link.url_without_fragment | ||
| 596 | |||
| 597 | if show_progress: # We don't show progress on cached responses | ||
| 598 | progress_indicator = DownloadProgressProvider(progress_bar, | ||
| 599 | max=total_length) | ||
| 600 | if total_length: | ||
| 601 | logger.info("Downloading %s (%s)", url, format_size(total_length)) | ||
| 602 | else: | ||
| 603 | logger.info("Downloading %s", url) | ||
| 604 | elif cached_resp: | ||
| 605 | logger.info("Using cached %s", url) | ||
| 606 | else: | ||
| 607 | logger.info("Downloading %s", url) | ||
| 608 | |||
| 609 | logger.debug('Downloading from URL %s', link) | ||
| 610 | |||
| 611 | downloaded_chunks = written_chunks( | ||
| 612 | progress_indicator( | ||
| 613 | resp_read(CONTENT_CHUNK_SIZE), | ||
| 614 | CONTENT_CHUNK_SIZE | ||
| 615 | ) | ||
| 616 | ) | ||
| 617 | if hashes: | ||
| 618 | hashes.check_against_chunks(downloaded_chunks) | ||
| 619 | else: | ||
| 620 | consume(downloaded_chunks) | ||
| 621 | |||
| 622 | |||
| 623 | def _copy_file(filename, location, link): | ||
| 624 | copy = True | ||
| 625 | download_location = os.path.join(location, link.filename) | ||
| 626 | if os.path.exists(download_location): | ||
| 627 | response = ask_path_exists( | ||
| 628 | 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % | ||
| 629 | display_path(download_location), ('i', 'w', 'b', 'a')) | ||
| 630 | if response == 'i': | ||
| 631 | copy = False | ||
| 632 | elif response == 'w': | ||
| 633 | logger.warning('Deleting %s', display_path(download_location)) | ||
| 634 | os.remove(download_location) | ||
| 635 | elif response == 'b': | ||
| 636 | dest_file = backup_dir(download_location) | ||
| 637 | logger.warning( | ||
| 638 | 'Backing up %s to %s', | ||
| 639 | display_path(download_location), | ||
| 640 | display_path(dest_file), | ||
| 641 | ) | ||
| 642 | shutil.move(download_location, dest_file) | ||
| 643 | elif response == 'a': | ||
| 644 | sys.exit(-1) | ||
| 645 | if copy: | ||
| 646 | shutil.copy(filename, download_location) | ||
| 647 | logger.info('Saved %s', display_path(download_location)) | ||
| 648 | |||
| 649 | |||
| 650 | def unpack_http_url(link, location, download_dir=None, | ||
| 651 | session=None, hashes=None, progress_bar="on"): | ||
| 652 | if session is None: | ||
| 653 | raise TypeError( | ||
| 654 | "unpack_http_url() missing 1 required keyword argument: 'session'" | ||
| 655 | ) | ||
| 656 | |||
| 657 | with TempDirectory(kind="unpack") as temp_dir: | ||
| 658 | # If a download dir is specified, is the file already downloaded there? | ||
| 659 | already_downloaded_path = None | ||
| 660 | if download_dir: | ||
| 661 | already_downloaded_path = _check_download_dir(link, | ||
| 662 | download_dir, | ||
| 663 | hashes) | ||
| 664 | |||
| 665 | if already_downloaded_path: | ||
| 666 | from_path = already_downloaded_path | ||
| 667 | content_type = mimetypes.guess_type(from_path)[0] | ||
| 668 | else: | ||
| 669 | # let's download to a tmp dir | ||
| 670 | from_path, content_type = _download_http_url(link, | ||
| 671 | session, | ||
| 672 | temp_dir.path, | ||
| 673 | hashes, | ||
| 674 | progress_bar) | ||
| 675 | |||
| 676 | # unpack the archive to the build dir location. even when only | ||
| 677 | # downloading archives, they have to be unpacked to parse dependencies | ||
| 678 | unpack_file(from_path, location, content_type, link) | ||
| 679 | |||
| 680 | # a download dir is specified; let's copy the archive there | ||
| 681 | if download_dir and not already_downloaded_path: | ||
| 682 | _copy_file(from_path, download_dir, link) | ||
| 683 | |||
| 684 | if not already_downloaded_path: | ||
| 685 | os.unlink(from_path) | ||
| 686 | |||
| 687 | |||
| 688 | def unpack_file_url(link, location, download_dir=None, hashes=None): | ||
| 689 | """Unpack link into location. | ||
| 690 | |||
| 691 | If download_dir is provided and link points to a file, make a copy | ||
| 692 | of the link file inside download_dir. | ||
| 693 | """ | ||
| 694 | link_path = url_to_path(link.url_without_fragment) | ||
| 695 | |||
| 696 | # If it's a url to a local directory | ||
| 697 | if is_dir_url(link): | ||
| 698 | if os.path.isdir(location): | ||
| 699 | rmtree(location) | ||
| 700 | shutil.copytree(link_path, location, symlinks=True) | ||
| 701 | if download_dir: | ||
| 702 | logger.info('Link is a directory, ignoring download_dir') | ||
| 703 | return | ||
| 704 | |||
| 705 | # If --require-hashes is off, `hashes` is either empty, the | ||
| 706 | # link's embedded hash, or MissingHashes; it is required to | ||
| 707 | # match. If --require-hashes is on, we are satisfied by any | ||
| 708 | # hash in `hashes` matching: a URL-based or an option-based | ||
| 709 | # one; no internet-sourced hash will be in `hashes`. | ||
| 710 | if hashes: | ||
| 711 | hashes.check_against_path(link_path) | ||
| 712 | |||
| 713 | # If a download dir is specified, is the file already there and valid? | ||
| 714 | already_downloaded_path = None | ||
| 715 | if download_dir: | ||
| 716 | already_downloaded_path = _check_download_dir(link, | ||
| 717 | download_dir, | ||
| 718 | hashes) | ||
| 719 | |||
| 720 | if already_downloaded_path: | ||
| 721 | from_path = already_downloaded_path | ||
| 722 | else: | ||
| 723 | from_path = link_path | ||
| 724 | |||
| 725 | content_type = mimetypes.guess_type(from_path)[0] | ||
| 726 | |||
| 727 | # unpack the archive to the build dir location. even when only downloading | ||
| 728 | # archives, they have to be unpacked to parse dependencies | ||
| 729 | unpack_file(from_path, location, content_type, link) | ||
| 730 | |||
| 731 | # a download dir is specified and not already downloaded | ||
| 732 | if download_dir and not already_downloaded_path: | ||
| 733 | _copy_file(from_path, download_dir, link) | ||
| 734 | |||
| 735 | |||
| 736 | def _copy_dist_from_dir(link_path, location): | ||
| 737 | """Copy distribution files in `link_path` to `location`. | ||
| 738 | |||
| 739 | Invoked when user requests to install a local directory. E.g.: | ||
| 740 | |||
| 741 | pip install . | ||
| 742 | pip install ~/dev/git-repos/python-prompt-toolkit | ||
| 743 | |||
| 744 | """ | ||
| 745 | |||
| 746 | # Note: This is currently VERY SLOW if you have a lot of data in the | ||
| 747 | # directory, because it copies everything with `shutil.copytree`. | ||
| 748 | # What it should really do is build an sdist and install that. | ||
| 749 | # See https://github.com/pypa/pip/issues/2195 | ||
| 750 | |||
| 751 | if os.path.isdir(location): | ||
| 752 | rmtree(location) | ||
| 753 | |||
| 754 | # build an sdist | ||
| 755 | setup_py = 'setup.py' | ||
| 756 | sdist_args = [sys.executable] | ||
| 757 | sdist_args.append('-c') | ||
| 758 | sdist_args.append(SETUPTOOLS_SHIM % setup_py) | ||
| 759 | sdist_args.append('sdist') | ||
| 760 | sdist_args += ['--dist-dir', location] | ||
| 761 | logger.info('Running setup.py sdist for %s', link_path) | ||
| 762 | |||
| 763 | with indent_log(): | ||
| 764 | call_subprocess(sdist_args, cwd=link_path, show_stdout=False) | ||
| 765 | |||
| 766 | # unpack sdist into `location` | ||
| 767 | sdist = os.path.join(location, os.listdir(location)[0]) | ||
| 768 | logger.info('Unpacking sdist %s into %s', sdist, location) | ||
| 769 | unpack_file(sdist, location, content_type=None, link=None) | ||
| 770 | |||
| 771 | |||
| 772 | class PipXmlrpcTransport(xmlrpc_client.Transport): | ||
| 773 | """Provide a `xmlrpclib.Transport` implementation via a `PipSession` | ||
| 774 | object. | ||
| 775 | """ | ||
| 776 | |||
| 777 | def __init__(self, index_url, session, use_datetime=False): | ||
| 778 | xmlrpc_client.Transport.__init__(self, use_datetime) | ||
| 779 | index_parts = urllib_parse.urlparse(index_url) | ||
| 780 | self._scheme = index_parts.scheme | ||
| 781 | self._session = session | ||
| 782 | |||
| 783 | def request(self, host, handler, request_body, verbose=False): | ||
| 784 | parts = (self._scheme, host, handler, None, None, None) | ||
| 785 | url = urllib_parse.urlunparse(parts) | ||
| 786 | try: | ||
| 787 | headers = {'Content-Type': 'text/xml'} | ||
| 788 | response = self._session.post(url, data=request_body, | ||
| 789 | headers=headers, stream=True) | ||
| 790 | response.raise_for_status() | ||
| 791 | self.verbose = verbose | ||
| 792 | return self.parse_response(response.raw) | ||
| 793 | except requests.HTTPError as exc: | ||
| 794 | logger.critical( | ||
| 795 | "HTTP error %s while getting %s", | ||
| 796 | exc.response.status_code, url, | ||
| 797 | ) | ||
| 798 | raise | ||
| 799 | |||
| 800 | |||
| 801 | def unpack_url(link, location, download_dir=None, | ||
| 802 | only_download=False, session=None, hashes=None, | ||
| 803 | progress_bar="on"): | ||
| 804 | """Unpack link. | ||
| 805 | If link is a VCS link: | ||
| 806 | if only_download, export into download_dir and ignore location | ||
| 807 | else unpack into location | ||
| 808 | for other types of link: | ||
| 809 | - unpack into location | ||
| 810 | - if download_dir, copy the file into download_dir | ||
| 811 | - if only_download, mark location for deletion | ||
| 812 | |||
| 813 | :param hashes: A Hashes object, one of whose embedded hashes must match, | ||
| 814 | or HashMismatch will be raised. If the Hashes is empty, no matches are | ||
| 815 | required, and unhashable types of requirements (like VCS ones, which | ||
| 816 | would ordinarily raise HashUnsupported) are allowed. | ||
| 817 | """ | ||
| 818 | # non-editable vcs urls | ||
| 819 | if is_vcs_url(link): | ||
| 820 | unpack_vcs_link(link, location) | ||
| 821 | |||
| 822 | # file urls | ||
| 823 | elif is_file_url(link): | ||
| 824 | unpack_file_url(link, location, download_dir, hashes=hashes) | ||
| 825 | |||
| 826 | # http urls | ||
| 827 | else: | ||
| 828 | if session is None: | ||
| 829 | session = PipSession() | ||
| 830 | |||
| 831 | unpack_http_url( | ||
| 832 | link, | ||
| 833 | location, | ||
| 834 | download_dir, | ||
| 835 | session, | ||
| 836 | hashes=hashes, | ||
| 837 | progress_bar=progress_bar | ||
| 838 | ) | ||
| 839 | if only_download: | ||
| 840 | write_delete_marker_file(location) | ||
| 841 | |||
| 842 | |||
| 843 | def _download_http_url(link, session, temp_dir, hashes, progress_bar): | ||
| 844 | """Download link url into temp_dir using provided session""" | ||
| 845 | target_url = link.url.split('#', 1)[0] | ||
| 846 | try: | ||
| 847 | resp = session.get( | ||
| 848 | target_url, | ||
| 849 | # We use Accept-Encoding: identity here because requests | ||
| 850 | # defaults to accepting compressed responses. This breaks in | ||
| 851 | # a variety of ways depending on how the server is configured. | ||
| 852 | # - Some servers will notice that the file isn't a compressible | ||
| 853 | # file and will leave the file alone and with an empty | ||
| 854 | # Content-Encoding | ||
| 855 | # - Some servers will notice that the file is already | ||
| 856 | # compressed and will leave the file alone and will add a | ||
| 857 | # Content-Encoding: gzip header | ||
| 858 | # - Some servers won't notice anything at all and will take | ||
| 859 | # a file that's already been compressed and compress it again | ||
| 860 | # and set the Content-Encoding: gzip header | ||
| 861 | # By setting this to request only the identity encoding We're | ||
| 862 | # hoping to eliminate the third case. Hopefully there does not | ||
| 863 | # exist a server which when given a file will notice it is | ||
| 864 | # already compressed and that you're not asking for a | ||
| 865 | # compressed file and will then decompress it before sending | ||
| 866 | # because if that's the case I don't think it'll ever be | ||
| 867 | # possible to make this work. | ||
| 868 | headers={"Accept-Encoding": "identity"}, | ||
| 869 | stream=True, | ||
| 870 | ) | ||
| 871 | resp.raise_for_status() | ||
| 872 | except requests.HTTPError as exc: | ||
| 873 | logger.critical( | ||
| 874 | "HTTP error %s while getting %s", exc.response.status_code, link, | ||
| 875 | ) | ||
| 876 | raise | ||
| 877 | |||
| 878 | content_type = resp.headers.get('content-type', '') | ||
| 879 | filename = link.filename # fallback | ||
| 880 | # Have a look at the Content-Disposition header for a better guess | ||
| 881 | content_disposition = resp.headers.get('content-disposition') | ||
| 882 | if content_disposition: | ||
| 883 | type, params = cgi.parse_header(content_disposition) | ||
| 884 | # We use ``or`` here because we don't want to use an "empty" value | ||
| 885 | # from the filename param. | ||
| 886 | filename = params.get('filename') or filename | ||
| 887 | ext = splitext(filename)[1] | ||
| 888 | if not ext: | ||
| 889 | ext = mimetypes.guess_extension(content_type) | ||
| 890 | if ext: | ||
| 891 | filename += ext | ||
| 892 | if not ext and link.url != resp.url: | ||
| 893 | ext = os.path.splitext(resp.url)[1] | ||
| 894 | if ext: | ||
| 895 | filename += ext | ||
| 896 | file_path = os.path.join(temp_dir, filename) | ||
| 897 | with open(file_path, 'wb') as content_file: | ||
| 898 | _download_url(resp, link, content_file, hashes, progress_bar) | ||
| 899 | return file_path, content_type | ||
| 900 | |||
| 901 | |||
| 902 | def _check_download_dir(link, download_dir, hashes): | ||
| 903 | """ Check download_dir for previously downloaded file with correct hash | ||
| 904 | If a correct file is found return its path else None | ||
| 905 | """ | ||
| 906 | download_path = os.path.join(download_dir, link.filename) | ||
| 907 | if os.path.exists(download_path): | ||
| 908 | # If already downloaded, does its hash match? | ||
| 909 | logger.info('File was already downloaded %s', download_path) | ||
| 910 | if hashes: | ||
| 911 | try: | ||
| 912 | hashes.check_against_path(download_path) | ||
| 913 | except HashMismatch: | ||
| 914 | logger.warning( | ||
| 915 | 'Previously-downloaded file %s has bad hash. ' | ||
| 916 | 'Re-downloading.', | ||
| 917 | download_path | ||
| 918 | ) | ||
| 919 | os.unlink(download_path) | ||
| 920 | return None | ||
| 921 | return download_path | ||
| 922 | return None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py new file mode 100644 index 0000000..28705c8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py | |||
| @@ -0,0 +1,249 @@ | |||
| 1 | """Exceptions used throughout package""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | from itertools import chain, groupby, repeat | ||
| 5 | |||
| 6 | from pip._vendor.six import iteritems | ||
| 7 | |||
| 8 | |||
| 9 | class PipError(Exception): | ||
| 10 | """Base pip exception""" | ||
| 11 | |||
| 12 | |||
| 13 | class ConfigurationError(PipError): | ||
| 14 | """General exception in configuration""" | ||
| 15 | |||
| 16 | |||
| 17 | class InstallationError(PipError): | ||
| 18 | """General exception during installation""" | ||
| 19 | |||
| 20 | |||
| 21 | class UninstallationError(PipError): | ||
| 22 | """General exception during uninstallation""" | ||
| 23 | |||
| 24 | |||
| 25 | class DistributionNotFound(InstallationError): | ||
| 26 | """Raised when a distribution cannot be found to satisfy a requirement""" | ||
| 27 | |||
| 28 | |||
| 29 | class RequirementsFileParseError(InstallationError): | ||
| 30 | """Raised when a general error occurs parsing a requirements file line.""" | ||
| 31 | |||
| 32 | |||
| 33 | class BestVersionAlreadyInstalled(PipError): | ||
| 34 | """Raised when the most up-to-date version of a package is already | ||
| 35 | installed.""" | ||
| 36 | |||
| 37 | |||
| 38 | class BadCommand(PipError): | ||
| 39 | """Raised when virtualenv or a command is not found""" | ||
| 40 | |||
| 41 | |||
| 42 | class CommandError(PipError): | ||
| 43 | """Raised when there is an error in command-line arguments""" | ||
| 44 | |||
| 45 | |||
| 46 | class PreviousBuildDirError(PipError): | ||
| 47 | """Raised when there's a previous conflicting build directory""" | ||
| 48 | |||
| 49 | |||
| 50 | class InvalidWheelFilename(InstallationError): | ||
| 51 | """Invalid wheel filename.""" | ||
| 52 | |||
| 53 | |||
| 54 | class UnsupportedWheel(InstallationError): | ||
| 55 | """Unsupported wheel.""" | ||
| 56 | |||
| 57 | |||
| 58 | class HashErrors(InstallationError): | ||
| 59 | """Multiple HashError instances rolled into one for reporting""" | ||
| 60 | |||
| 61 | def __init__(self): | ||
| 62 | self.errors = [] | ||
| 63 | |||
| 64 | def append(self, error): | ||
| 65 | self.errors.append(error) | ||
| 66 | |||
| 67 | def __str__(self): | ||
| 68 | lines = [] | ||
| 69 | self.errors.sort(key=lambda e: e.order) | ||
| 70 | for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): | ||
| 71 | lines.append(cls.head) | ||
| 72 | lines.extend(e.body() for e in errors_of_cls) | ||
| 73 | if lines: | ||
| 74 | return '\n'.join(lines) | ||
| 75 | |||
| 76 | def __nonzero__(self): | ||
| 77 | return bool(self.errors) | ||
| 78 | |||
| 79 | def __bool__(self): | ||
| 80 | return self.__nonzero__() | ||
| 81 | |||
| 82 | |||
| 83 | class HashError(InstallationError): | ||
| 84 | """ | ||
| 85 | A failure to verify a package against known-good hashes | ||
| 86 | |||
| 87 | :cvar order: An int sorting hash exception classes by difficulty of | ||
| 88 | recovery (lower being harder), so the user doesn't bother fretting | ||
| 89 | about unpinned packages when he has deeper issues, like VCS | ||
| 90 | dependencies, to deal with. Also keeps error reports in a | ||
| 91 | deterministic order. | ||
| 92 | :cvar head: A section heading for display above potentially many | ||
| 93 | exceptions of this kind | ||
| 94 | :ivar req: The InstallRequirement that triggered this error. This is | ||
| 95 | pasted on after the exception is instantiated, because it's not | ||
| 96 | typically available earlier. | ||
| 97 | |||
| 98 | """ | ||
| 99 | req = None | ||
| 100 | head = '' | ||
| 101 | |||
| 102 | def body(self): | ||
| 103 | """Return a summary of me for display under the heading. | ||
| 104 | |||
| 105 | This default implementation simply prints a description of the | ||
| 106 | triggering requirement. | ||
| 107 | |||
| 108 | :param req: The InstallRequirement that provoked this error, with | ||
| 109 | populate_link() having already been called | ||
| 110 | |||
| 111 | """ | ||
| 112 | return ' %s' % self._requirement_name() | ||
| 113 | |||
| 114 | def __str__(self): | ||
| 115 | return '%s\n%s' % (self.head, self.body()) | ||
| 116 | |||
| 117 | def _requirement_name(self): | ||
| 118 | """Return a description of the requirement that triggered me. | ||
| 119 | |||
| 120 | This default implementation returns long description of the req, with | ||
| 121 | line numbers | ||
| 122 | |||
| 123 | """ | ||
| 124 | return str(self.req) if self.req else 'unknown package' | ||
| 125 | |||
| 126 | |||
| 127 | class VcsHashUnsupported(HashError): | ||
| 128 | """A hash was provided for a version-control-system-based requirement, but | ||
| 129 | we don't have a method for hashing those.""" | ||
| 130 | |||
| 131 | order = 0 | ||
| 132 | head = ("Can't verify hashes for these requirements because we don't " | ||
| 133 | "have a way to hash version control repositories:") | ||
| 134 | |||
| 135 | |||
| 136 | class DirectoryUrlHashUnsupported(HashError): | ||
| 137 | """A hash was provided for a version-control-system-based requirement, but | ||
| 138 | we don't have a method for hashing those.""" | ||
| 139 | |||
| 140 | order = 1 | ||
| 141 | head = ("Can't verify hashes for these file:// requirements because they " | ||
| 142 | "point to directories:") | ||
| 143 | |||
| 144 | |||
| 145 | class HashMissing(HashError): | ||
| 146 | """A hash was needed for a requirement but is absent.""" | ||
| 147 | |||
| 148 | order = 2 | ||
| 149 | head = ('Hashes are required in --require-hashes mode, but they are ' | ||
| 150 | 'missing from some requirements. Here is a list of those ' | ||
| 151 | 'requirements along with the hashes their downloaded archives ' | ||
| 152 | 'actually had. Add lines like these to your requirements files to ' | ||
| 153 | 'prevent tampering. (If you did not enable --require-hashes ' | ||
| 154 | 'manually, note that it turns on automatically when any package ' | ||
| 155 | 'has a hash.)') | ||
| 156 | |||
| 157 | def __init__(self, gotten_hash): | ||
| 158 | """ | ||
| 159 | :param gotten_hash: The hash of the (possibly malicious) archive we | ||
| 160 | just downloaded | ||
| 161 | """ | ||
| 162 | self.gotten_hash = gotten_hash | ||
| 163 | |||
| 164 | def body(self): | ||
| 165 | # Dodge circular import. | ||
| 166 | from pip._internal.utils.hashes import FAVORITE_HASH | ||
| 167 | |||
| 168 | package = None | ||
| 169 | if self.req: | ||
| 170 | # In the case of URL-based requirements, display the original URL | ||
| 171 | # seen in the requirements file rather than the package name, | ||
| 172 | # so the output can be directly copied into the requirements file. | ||
| 173 | package = (self.req.original_link if self.req.original_link | ||
| 174 | # In case someone feeds something downright stupid | ||
| 175 | # to InstallRequirement's constructor. | ||
| 176 | else getattr(self.req, 'req', None)) | ||
| 177 | return ' %s --hash=%s:%s' % (package or 'unknown package', | ||
| 178 | FAVORITE_HASH, | ||
| 179 | self.gotten_hash) | ||
| 180 | |||
| 181 | |||
| 182 | class HashUnpinned(HashError): | ||
| 183 | """A requirement had a hash specified but was not pinned to a specific | ||
| 184 | version.""" | ||
| 185 | |||
| 186 | order = 3 | ||
| 187 | head = ('In --require-hashes mode, all requirements must have their ' | ||
| 188 | 'versions pinned with ==. These do not:') | ||
| 189 | |||
| 190 | |||
| 191 | class HashMismatch(HashError): | ||
| 192 | """ | ||
| 193 | Distribution file hash values don't match. | ||
| 194 | |||
| 195 | :ivar package_name: The name of the package that triggered the hash | ||
| 196 | mismatch. Feel free to write to this after the exception is raise to | ||
| 197 | improve its error message. | ||
| 198 | |||
| 199 | """ | ||
| 200 | order = 4 | ||
| 201 | head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' | ||
| 202 | 'FILE. If you have updated the package versions, please update ' | ||
| 203 | 'the hashes. Otherwise, examine the package contents carefully; ' | ||
| 204 | 'someone may have tampered with them.') | ||
| 205 | |||
| 206 | def __init__(self, allowed, gots): | ||
| 207 | """ | ||
| 208 | :param allowed: A dict of algorithm names pointing to lists of allowed | ||
| 209 | hex digests | ||
| 210 | :param gots: A dict of algorithm names pointing to hashes we | ||
| 211 | actually got from the files under suspicion | ||
| 212 | """ | ||
| 213 | self.allowed = allowed | ||
| 214 | self.gots = gots | ||
| 215 | |||
| 216 | def body(self): | ||
| 217 | return ' %s:\n%s' % (self._requirement_name(), | ||
| 218 | self._hash_comparison()) | ||
| 219 | |||
| 220 | def _hash_comparison(self): | ||
| 221 | """ | ||
| 222 | Return a comparison of actual and expected hash values. | ||
| 223 | |||
| 224 | Example:: | ||
| 225 | |||
| 226 | Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde | ||
| 227 | or 123451234512345123451234512345123451234512345 | ||
| 228 | Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef | ||
| 229 | |||
| 230 | """ | ||
| 231 | def hash_then_or(hash_name): | ||
| 232 | # For now, all the decent hashes have 6-char names, so we can get | ||
| 233 | # away with hard-coding space literals. | ||
| 234 | return chain([hash_name], repeat(' or')) | ||
| 235 | |||
| 236 | lines = [] | ||
| 237 | for hash_name, expecteds in iteritems(self.allowed): | ||
| 238 | prefix = hash_then_or(hash_name) | ||
| 239 | lines.extend((' Expected %s %s' % (next(prefix), e)) | ||
| 240 | for e in expecteds) | ||
| 241 | lines.append(' Got %s\n' % | ||
| 242 | self.gots[hash_name].hexdigest()) | ||
| 243 | prefix = ' or' | ||
| 244 | return '\n'.join(lines) | ||
| 245 | |||
| 246 | |||
| 247 | class UnsupportedPythonVersion(InstallationError): | ||
| 248 | """Unsupported python version according to Requires-Python package | ||
| 249 | metadata.""" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py new file mode 100644 index 0000000..15e0bf3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py | |||
| @@ -0,0 +1,1117 @@ | |||
| 1 | """Routines related to PyPI, indexes""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import cgi | ||
| 5 | import itertools | ||
| 6 | import logging | ||
| 7 | import mimetypes | ||
| 8 | import os | ||
| 9 | import posixpath | ||
| 10 | import re | ||
| 11 | import sys | ||
| 12 | import warnings | ||
| 13 | from collections import namedtuple | ||
| 14 | |||
| 15 | from pip._vendor import html5lib, requests, six | ||
| 16 | from pip._vendor.distlib.compat import unescape | ||
| 17 | from pip._vendor.packaging import specifiers | ||
| 18 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 19 | from pip._vendor.packaging.version import parse as parse_version | ||
| 20 | from pip._vendor.requests.exceptions import SSLError | ||
| 21 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 22 | from pip._vendor.six.moves.urllib import request as urllib_request | ||
| 23 | |||
| 24 | from pip._internal.compat import ipaddress | ||
| 25 | from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path | ||
| 26 | from pip._internal.exceptions import ( | ||
| 27 | BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, | ||
| 28 | UnsupportedWheel, | ||
| 29 | ) | ||
| 30 | from pip._internal.models import PyPI | ||
| 31 | from pip._internal.pep425tags import get_supported | ||
| 32 | from pip._internal.utils.deprecation import RemovedInPip11Warning | ||
| 33 | from pip._internal.utils.logging import indent_log | ||
| 34 | from pip._internal.utils.misc import ( | ||
| 35 | ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path, | ||
| 36 | splitext, | ||
| 37 | ) | ||
| 38 | from pip._internal.utils.packaging import check_requires_python | ||
| 39 | from pip._internal.wheel import Wheel, wheel_ext | ||
| 40 | |||
| 41 | __all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] | ||
| 42 | |||
| 43 | |||
| 44 | SECURE_ORIGINS = [ | ||
| 45 | # protocol, hostname, port | ||
| 46 | # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) | ||
| 47 | ("https", "*", "*"), | ||
| 48 | ("*", "localhost", "*"), | ||
| 49 | ("*", "127.0.0.0/8", "*"), | ||
| 50 | ("*", "::1/128", "*"), | ||
| 51 | ("file", "*", None), | ||
| 52 | # ssh is always secure. | ||
| 53 | ("ssh", "*", "*"), | ||
| 54 | ] | ||
| 55 | |||
| 56 | |||
| 57 | logger = logging.getLogger(__name__) | ||
| 58 | |||
| 59 | |||
| 60 | class InstallationCandidate(object): | ||
| 61 | |||
| 62 | def __init__(self, project, version, location): | ||
| 63 | self.project = project | ||
| 64 | self.version = parse_version(version) | ||
| 65 | self.location = location | ||
| 66 | self._key = (self.project, self.version, self.location) | ||
| 67 | |||
| 68 | def __repr__(self): | ||
| 69 | return "<InstallationCandidate({!r}, {!r}, {!r})>".format( | ||
| 70 | self.project, self.version, self.location, | ||
| 71 | ) | ||
| 72 | |||
| 73 | def __hash__(self): | ||
| 74 | return hash(self._key) | ||
| 75 | |||
| 76 | def __lt__(self, other): | ||
| 77 | return self._compare(other, lambda s, o: s < o) | ||
| 78 | |||
| 79 | def __le__(self, other): | ||
| 80 | return self._compare(other, lambda s, o: s <= o) | ||
| 81 | |||
| 82 | def __eq__(self, other): | ||
| 83 | return self._compare(other, lambda s, o: s == o) | ||
| 84 | |||
| 85 | def __ge__(self, other): | ||
| 86 | return self._compare(other, lambda s, o: s >= o) | ||
| 87 | |||
| 88 | def __gt__(self, other): | ||
| 89 | return self._compare(other, lambda s, o: s > o) | ||
| 90 | |||
| 91 | def __ne__(self, other): | ||
| 92 | return self._compare(other, lambda s, o: s != o) | ||
| 93 | |||
| 94 | def _compare(self, other, method): | ||
| 95 | if not isinstance(other, InstallationCandidate): | ||
| 96 | return NotImplemented | ||
| 97 | |||
| 98 | return method(self._key, other._key) | ||
| 99 | |||
| 100 | |||
| 101 | class PackageFinder(object): | ||
| 102 | """This finds packages. | ||
| 103 | |||
| 104 | This is meant to match easy_install's technique for looking for | ||
| 105 | packages, by reading pages and looking for appropriate links. | ||
| 106 | """ | ||
| 107 | |||
| 108 | def __init__(self, find_links, index_urls, allow_all_prereleases=False, | ||
| 109 | trusted_hosts=None, process_dependency_links=False, | ||
| 110 | session=None, format_control=None, platform=None, | ||
| 111 | versions=None, abi=None, implementation=None): | ||
| 112 | """Create a PackageFinder. | ||
| 113 | |||
| 114 | :param format_control: A FormatControl object or None. Used to control | ||
| 115 | the selection of source packages / binary packages when consulting | ||
| 116 | the index and links. | ||
| 117 | :param platform: A string or None. If None, searches for packages | ||
| 118 | that are supported by the current system. Otherwise, will find | ||
| 119 | packages that can be built on the platform passed in. These | ||
| 120 | packages will only be downloaded for distribution: they will | ||
| 121 | not be built locally. | ||
| 122 | :param versions: A list of strings or None. This is passed directly | ||
| 123 | to pep425tags.py in the get_supported() method. | ||
| 124 | :param abi: A string or None. This is passed directly | ||
| 125 | to pep425tags.py in the get_supported() method. | ||
| 126 | :param implementation: A string or None. This is passed directly | ||
| 127 | to pep425tags.py in the get_supported() method. | ||
| 128 | """ | ||
| 129 | if session is None: | ||
| 130 | raise TypeError( | ||
| 131 | "PackageFinder() missing 1 required keyword argument: " | ||
| 132 | "'session'" | ||
| 133 | ) | ||
| 134 | |||
| 135 | # Build find_links. If an argument starts with ~, it may be | ||
| 136 | # a local file relative to a home directory. So try normalizing | ||
| 137 | # it and if it exists, use the normalized version. | ||
| 138 | # This is deliberately conservative - it might be fine just to | ||
| 139 | # blindly normalize anything starting with a ~... | ||
| 140 | self.find_links = [] | ||
| 141 | for link in find_links: | ||
| 142 | if link.startswith('~'): | ||
| 143 | new_link = normalize_path(link) | ||
| 144 | if os.path.exists(new_link): | ||
| 145 | link = new_link | ||
| 146 | self.find_links.append(link) | ||
| 147 | |||
| 148 | self.index_urls = index_urls | ||
| 149 | self.dependency_links = [] | ||
| 150 | |||
| 151 | # These are boring links that have already been logged somehow: | ||
| 152 | self.logged_links = set() | ||
| 153 | |||
| 154 | self.format_control = format_control or FormatControl(set(), set()) | ||
| 155 | |||
| 156 | # Domains that we won't emit warnings for when not using HTTPS | ||
| 157 | self.secure_origins = [ | ||
| 158 | ("*", host, "*") | ||
| 159 | for host in (trusted_hosts if trusted_hosts else []) | ||
| 160 | ] | ||
| 161 | |||
| 162 | # Do we want to allow _all_ pre-releases? | ||
| 163 | self.allow_all_prereleases = allow_all_prereleases | ||
| 164 | |||
| 165 | # Do we process dependency links? | ||
| 166 | self.process_dependency_links = process_dependency_links | ||
| 167 | |||
| 168 | # The Session we'll use to make requests | ||
| 169 | self.session = session | ||
| 170 | |||
| 171 | # The valid tags to check potential found wheel candidates against | ||
| 172 | self.valid_tags = get_supported( | ||
| 173 | versions=versions, | ||
| 174 | platform=platform, | ||
| 175 | abi=abi, | ||
| 176 | impl=implementation, | ||
| 177 | ) | ||
| 178 | |||
| 179 | # If we don't have TLS enabled, then WARN if anyplace we're looking | ||
| 180 | # relies on TLS. | ||
| 181 | if not HAS_TLS: | ||
| 182 | for link in itertools.chain(self.index_urls, self.find_links): | ||
| 183 | parsed = urllib_parse.urlparse(link) | ||
| 184 | if parsed.scheme == "https": | ||
| 185 | logger.warning( | ||
| 186 | "pip is configured with locations that require " | ||
| 187 | "TLS/SSL, however the ssl module in Python is not " | ||
| 188 | "available." | ||
| 189 | ) | ||
| 190 | break | ||
| 191 | |||
| 192 | def get_formatted_locations(self): | ||
| 193 | lines = [] | ||
| 194 | if self.index_urls and self.index_urls != [PyPI.simple_url]: | ||
| 195 | lines.append( | ||
| 196 | "Looking in indexes: {}".format(", ".join(self.index_urls)) | ||
| 197 | ) | ||
| 198 | if self.find_links: | ||
| 199 | lines.append( | ||
| 200 | "Looking in links: {}".format(", ".join(self.find_links)) | ||
| 201 | ) | ||
| 202 | return "\n".join(lines) | ||
| 203 | |||
| 204 | def add_dependency_links(self, links): | ||
| 205 | # # FIXME: this shouldn't be global list this, it should only | ||
| 206 | # # apply to requirements of the package that specifies the | ||
| 207 | # # dependency_links value | ||
| 208 | # # FIXME: also, we should track comes_from (i.e., use Link) | ||
| 209 | if self.process_dependency_links: | ||
| 210 | warnings.warn( | ||
| 211 | "Dependency Links processing has been deprecated and will be " | ||
| 212 | "removed in a future release.", | ||
| 213 | RemovedInPip11Warning, | ||
| 214 | ) | ||
| 215 | self.dependency_links.extend(links) | ||
| 216 | |||
| 217 | @staticmethod | ||
| 218 | def _sort_locations(locations, expand_dir=False): | ||
| 219 | """ | ||
| 220 | Sort locations into "files" (archives) and "urls", and return | ||
| 221 | a pair of lists (files,urls) | ||
| 222 | """ | ||
| 223 | files = [] | ||
| 224 | urls = [] | ||
| 225 | |||
| 226 | # puts the url for the given file path into the appropriate list | ||
| 227 | def sort_path(path): | ||
| 228 | url = path_to_url(path) | ||
| 229 | if mimetypes.guess_type(url, strict=False)[0] == 'text/html': | ||
| 230 | urls.append(url) | ||
| 231 | else: | ||
| 232 | files.append(url) | ||
| 233 | |||
| 234 | for url in locations: | ||
| 235 | |||
| 236 | is_local_path = os.path.exists(url) | ||
| 237 | is_file_url = url.startswith('file:') | ||
| 238 | |||
| 239 | if is_local_path or is_file_url: | ||
| 240 | if is_local_path: | ||
| 241 | path = url | ||
| 242 | else: | ||
| 243 | path = url_to_path(url) | ||
| 244 | if os.path.isdir(path): | ||
| 245 | if expand_dir: | ||
| 246 | path = os.path.realpath(path) | ||
| 247 | for item in os.listdir(path): | ||
| 248 | sort_path(os.path.join(path, item)) | ||
| 249 | elif is_file_url: | ||
| 250 | urls.append(url) | ||
| 251 | elif os.path.isfile(path): | ||
| 252 | sort_path(path) | ||
| 253 | else: | ||
| 254 | logger.warning( | ||
| 255 | "Url '%s' is ignored: it is neither a file " | ||
| 256 | "nor a directory.", url, | ||
| 257 | ) | ||
| 258 | elif is_url(url): | ||
| 259 | # Only add url with clear scheme | ||
| 260 | urls.append(url) | ||
| 261 | else: | ||
| 262 | logger.warning( | ||
| 263 | "Url '%s' is ignored. It is either a non-existing " | ||
| 264 | "path or lacks a specific scheme.", url, | ||
| 265 | ) | ||
| 266 | |||
| 267 | return files, urls | ||
| 268 | |||
| 269 | def _candidate_sort_key(self, candidate): | ||
| 270 | """ | ||
| 271 | Function used to generate link sort key for link tuples. | ||
| 272 | The greater the return value, the more preferred it is. | ||
| 273 | If not finding wheels, then sorted by version only. | ||
| 274 | If finding wheels, then the sort order is by version, then: | ||
| 275 | 1. existing installs | ||
| 276 | 2. wheels ordered via Wheel.support_index_min(self.valid_tags) | ||
| 277 | 3. source archives | ||
| 278 | Note: it was considered to embed this logic into the Link | ||
| 279 | comparison operators, but then different sdist links | ||
| 280 | with the same version, would have to be considered equal | ||
| 281 | """ | ||
| 282 | support_num = len(self.valid_tags) | ||
| 283 | build_tag = tuple() | ||
| 284 | if candidate.location.is_wheel: | ||
| 285 | # can raise InvalidWheelFilename | ||
| 286 | wheel = Wheel(candidate.location.filename) | ||
| 287 | if not wheel.supported(self.valid_tags): | ||
| 288 | raise UnsupportedWheel( | ||
| 289 | "%s is not a supported wheel for this platform. It " | ||
| 290 | "can't be sorted." % wheel.filename | ||
| 291 | ) | ||
| 292 | pri = -(wheel.support_index_min(self.valid_tags)) | ||
| 293 | if wheel.build_tag is not None: | ||
| 294 | match = re.match(r'^(\d+)(.*)$', wheel.build_tag) | ||
| 295 | build_tag_groups = match.groups() | ||
| 296 | build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) | ||
| 297 | else: # sdist | ||
| 298 | pri = -(support_num) | ||
| 299 | return (candidate.version, build_tag, pri) | ||
| 300 | |||
| 301 | def _validate_secure_origin(self, logger, location): | ||
| 302 | # Determine if this url used a secure transport mechanism | ||
| 303 | parsed = urllib_parse.urlparse(str(location)) | ||
| 304 | origin = (parsed.scheme, parsed.hostname, parsed.port) | ||
| 305 | |||
| 306 | # The protocol to use to see if the protocol matches. | ||
| 307 | # Don't count the repository type as part of the protocol: in | ||
| 308 | # cases such as "git+ssh", only use "ssh". (I.e., Only verify against | ||
| 309 | # the last scheme.) | ||
| 310 | protocol = origin[0].rsplit('+', 1)[-1] | ||
| 311 | |||
| 312 | # Determine if our origin is a secure origin by looking through our | ||
| 313 | # hardcoded list of secure origins, as well as any additional ones | ||
| 314 | # configured on this PackageFinder instance. | ||
| 315 | for secure_origin in (SECURE_ORIGINS + self.secure_origins): | ||
| 316 | if protocol != secure_origin[0] and secure_origin[0] != "*": | ||
| 317 | continue | ||
| 318 | |||
| 319 | try: | ||
| 320 | # We need to do this decode dance to ensure that we have a | ||
| 321 | # unicode object, even on Python 2.x. | ||
| 322 | addr = ipaddress.ip_address( | ||
| 323 | origin[1] | ||
| 324 | if ( | ||
| 325 | isinstance(origin[1], six.text_type) or | ||
| 326 | origin[1] is None | ||
| 327 | ) | ||
| 328 | else origin[1].decode("utf8") | ||
| 329 | ) | ||
| 330 | network = ipaddress.ip_network( | ||
| 331 | secure_origin[1] | ||
| 332 | if isinstance(secure_origin[1], six.text_type) | ||
| 333 | else secure_origin[1].decode("utf8") | ||
| 334 | ) | ||
| 335 | except ValueError: | ||
| 336 | # We don't have both a valid address or a valid network, so | ||
| 337 | # we'll check this origin against hostnames. | ||
| 338 | if (origin[1] and | ||
| 339 | origin[1].lower() != secure_origin[1].lower() and | ||
| 340 | secure_origin[1] != "*"): | ||
| 341 | continue | ||
| 342 | else: | ||
| 343 | # We have a valid address and network, so see if the address | ||
| 344 | # is contained within the network. | ||
| 345 | if addr not in network: | ||
| 346 | continue | ||
| 347 | |||
| 348 | # Check to see if the port patches | ||
| 349 | if (origin[2] != secure_origin[2] and | ||
| 350 | secure_origin[2] != "*" and | ||
| 351 | secure_origin[2] is not None): | ||
| 352 | continue | ||
| 353 | |||
| 354 | # If we've gotten here, then this origin matches the current | ||
| 355 | # secure origin and we should return True | ||
| 356 | return True | ||
| 357 | |||
| 358 | # If we've gotten to this point, then the origin isn't secure and we | ||
| 359 | # will not accept it as a valid location to search. We will however | ||
| 360 | # log a warning that we are ignoring it. | ||
| 361 | logger.warning( | ||
| 362 | "The repository located at %s is not a trusted or secure host and " | ||
| 363 | "is being ignored. If this repository is available via HTTPS we " | ||
| 364 | "recommend you use HTTPS instead, otherwise you may silence " | ||
| 365 | "this warning and allow it anyway with '--trusted-host %s'.", | ||
| 366 | parsed.hostname, | ||
| 367 | parsed.hostname, | ||
| 368 | ) | ||
| 369 | |||
| 370 | return False | ||
| 371 | |||
| 372 | def _get_index_urls_locations(self, project_name): | ||
| 373 | """Returns the locations found via self.index_urls | ||
| 374 | |||
| 375 | Checks the url_name on the main (first in the list) index and | ||
| 376 | use this url_name to produce all locations | ||
| 377 | """ | ||
| 378 | |||
| 379 | def mkurl_pypi_url(url): | ||
| 380 | loc = posixpath.join( | ||
| 381 | url, | ||
| 382 | urllib_parse.quote(canonicalize_name(project_name))) | ||
| 383 | # For maximum compatibility with easy_install, ensure the path | ||
| 384 | # ends in a trailing slash. Although this isn't in the spec | ||
| 385 | # (and PyPI can handle it without the slash) some other index | ||
| 386 | # implementations might break if they relied on easy_install's | ||
| 387 | # behavior. | ||
| 388 | if not loc.endswith('/'): | ||
| 389 | loc = loc + '/' | ||
| 390 | return loc | ||
| 391 | |||
| 392 | return [mkurl_pypi_url(url) for url in self.index_urls] | ||
| 393 | |||
| 394 | def find_all_candidates(self, project_name): | ||
| 395 | """Find all available InstallationCandidate for project_name | ||
| 396 | |||
| 397 | This checks index_urls, find_links and dependency_links. | ||
| 398 | All versions found are returned as an InstallationCandidate list. | ||
| 399 | |||
| 400 | See _link_package_versions for details on which files are accepted | ||
| 401 | """ | ||
| 402 | index_locations = self._get_index_urls_locations(project_name) | ||
| 403 | index_file_loc, index_url_loc = self._sort_locations(index_locations) | ||
| 404 | fl_file_loc, fl_url_loc = self._sort_locations( | ||
| 405 | self.find_links, expand_dir=True, | ||
| 406 | ) | ||
| 407 | dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) | ||
| 408 | |||
| 409 | file_locations = (Link(url) for url in itertools.chain( | ||
| 410 | index_file_loc, fl_file_loc, dep_file_loc, | ||
| 411 | )) | ||
| 412 | |||
| 413 | # We trust every url that the user has given us whether it was given | ||
| 414 | # via --index-url or --find-links | ||
| 415 | # We explicitly do not trust links that came from dependency_links | ||
| 416 | # We want to filter out any thing which does not have a secure origin. | ||
| 417 | url_locations = [ | ||
| 418 | link for link in itertools.chain( | ||
| 419 | (Link(url) for url in index_url_loc), | ||
| 420 | (Link(url) for url in fl_url_loc), | ||
| 421 | (Link(url) for url in dep_url_loc), | ||
| 422 | ) | ||
| 423 | if self._validate_secure_origin(logger, link) | ||
| 424 | ] | ||
| 425 | |||
| 426 | logger.debug('%d location(s) to search for versions of %s:', | ||
| 427 | len(url_locations), project_name) | ||
| 428 | |||
| 429 | for location in url_locations: | ||
| 430 | logger.debug('* %s', location) | ||
| 431 | |||
| 432 | canonical_name = canonicalize_name(project_name) | ||
| 433 | formats = fmt_ctl_formats(self.format_control, canonical_name) | ||
| 434 | search = Search(project_name, canonical_name, formats) | ||
| 435 | find_links_versions = self._package_versions( | ||
| 436 | # We trust every directly linked archive in find_links | ||
| 437 | (Link(url, '-f') for url in self.find_links), | ||
| 438 | search | ||
| 439 | ) | ||
| 440 | |||
| 441 | page_versions = [] | ||
| 442 | for page in self._get_pages(url_locations, project_name): | ||
| 443 | logger.debug('Analyzing links from page %s', page.url) | ||
| 444 | with indent_log(): | ||
| 445 | page_versions.extend( | ||
| 446 | self._package_versions(page.links, search) | ||
| 447 | ) | ||
| 448 | |||
| 449 | dependency_versions = self._package_versions( | ||
| 450 | (Link(url) for url in self.dependency_links), search | ||
| 451 | ) | ||
| 452 | if dependency_versions: | ||
| 453 | logger.debug( | ||
| 454 | 'dependency_links found: %s', | ||
| 455 | ', '.join([ | ||
| 456 | version.location.url for version in dependency_versions | ||
| 457 | ]) | ||
| 458 | ) | ||
| 459 | |||
| 460 | file_versions = self._package_versions(file_locations, search) | ||
| 461 | if file_versions: | ||
| 462 | file_versions.sort(reverse=True) | ||
| 463 | logger.debug( | ||
| 464 | 'Local files found: %s', | ||
| 465 | ', '.join([ | ||
| 466 | url_to_path(candidate.location.url) | ||
| 467 | for candidate in file_versions | ||
| 468 | ]) | ||
| 469 | ) | ||
| 470 | |||
| 471 | # This is an intentional priority ordering | ||
| 472 | return ( | ||
| 473 | file_versions + find_links_versions + page_versions + | ||
| 474 | dependency_versions | ||
| 475 | ) | ||
| 476 | |||
| 477 | def find_requirement(self, req, upgrade): | ||
| 478 | """Try to find a Link matching req | ||
| 479 | |||
| 480 | Expects req, an InstallRequirement and upgrade, a boolean | ||
| 481 | Returns a Link if found, | ||
| 482 | Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise | ||
| 483 | """ | ||
| 484 | all_candidates = self.find_all_candidates(req.name) | ||
| 485 | |||
| 486 | # Filter out anything which doesn't match our specifier | ||
| 487 | compatible_versions = set( | ||
| 488 | req.specifier.filter( | ||
| 489 | # We turn the version object into a str here because otherwise | ||
| 490 | # when we're debundled but setuptools isn't, Python will see | ||
| 491 | # packaging.version.Version and | ||
| 492 | # pkg_resources._vendor.packaging.version.Version as different | ||
| 493 | # types. This way we'll use a str as a common data interchange | ||
| 494 | # format. If we stop using the pkg_resources provided specifier | ||
| 495 | # and start using our own, we can drop the cast to str(). | ||
| 496 | [str(c.version) for c in all_candidates], | ||
| 497 | prereleases=( | ||
| 498 | self.allow_all_prereleases | ||
| 499 | if self.allow_all_prereleases else None | ||
| 500 | ), | ||
| 501 | ) | ||
| 502 | ) | ||
| 503 | applicable_candidates = [ | ||
| 504 | # Again, converting to str to deal with debundling. | ||
| 505 | c for c in all_candidates if str(c.version) in compatible_versions | ||
| 506 | ] | ||
| 507 | |||
| 508 | if applicable_candidates: | ||
| 509 | best_candidate = max(applicable_candidates, | ||
| 510 | key=self._candidate_sort_key) | ||
| 511 | else: | ||
| 512 | best_candidate = None | ||
| 513 | |||
| 514 | if req.satisfied_by is not None: | ||
| 515 | installed_version = parse_version(req.satisfied_by.version) | ||
| 516 | else: | ||
| 517 | installed_version = None | ||
| 518 | |||
| 519 | if installed_version is None and best_candidate is None: | ||
| 520 | logger.critical( | ||
| 521 | 'Could not find a version that satisfies the requirement %s ' | ||
| 522 | '(from versions: %s)', | ||
| 523 | req, | ||
| 524 | ', '.join( | ||
| 525 | sorted( | ||
| 526 | {str(c.version) for c in all_candidates}, | ||
| 527 | key=parse_version, | ||
| 528 | ) | ||
| 529 | ) | ||
| 530 | ) | ||
| 531 | |||
| 532 | raise DistributionNotFound( | ||
| 533 | 'No matching distribution found for %s' % req | ||
| 534 | ) | ||
| 535 | |||
| 536 | best_installed = False | ||
| 537 | if installed_version and ( | ||
| 538 | best_candidate is None or | ||
| 539 | best_candidate.version <= installed_version): | ||
| 540 | best_installed = True | ||
| 541 | |||
| 542 | if not upgrade and installed_version is not None: | ||
| 543 | if best_installed: | ||
| 544 | logger.debug( | ||
| 545 | 'Existing installed version (%s) is most up-to-date and ' | ||
| 546 | 'satisfies requirement', | ||
| 547 | installed_version, | ||
| 548 | ) | ||
| 549 | else: | ||
| 550 | logger.debug( | ||
| 551 | 'Existing installed version (%s) satisfies requirement ' | ||
| 552 | '(most up-to-date version is %s)', | ||
| 553 | installed_version, | ||
| 554 | best_candidate.version, | ||
| 555 | ) | ||
| 556 | return None | ||
| 557 | |||
| 558 | if best_installed: | ||
| 559 | # We have an existing version, and its the best version | ||
| 560 | logger.debug( | ||
| 561 | 'Installed version (%s) is most up-to-date (past versions: ' | ||
| 562 | '%s)', | ||
| 563 | installed_version, | ||
| 564 | ', '.join(sorted(compatible_versions, key=parse_version)) or | ||
| 565 | "none", | ||
| 566 | ) | ||
| 567 | raise BestVersionAlreadyInstalled | ||
| 568 | |||
| 569 | logger.debug( | ||
| 570 | 'Using version %s (newest of versions: %s)', | ||
| 571 | best_candidate.version, | ||
| 572 | ', '.join(sorted(compatible_versions, key=parse_version)) | ||
| 573 | ) | ||
| 574 | return best_candidate.location | ||
| 575 | |||
| 576 | def _get_pages(self, locations, project_name): | ||
| 577 | """ | ||
| 578 | Yields (page, page_url) from the given locations, skipping | ||
| 579 | locations that have errors. | ||
| 580 | """ | ||
| 581 | seen = set() | ||
| 582 | for location in locations: | ||
| 583 | if location in seen: | ||
| 584 | continue | ||
| 585 | seen.add(location) | ||
| 586 | |||
| 587 | page = self._get_page(location) | ||
| 588 | if page is None: | ||
| 589 | continue | ||
| 590 | |||
| 591 | yield page | ||
| 592 | |||
| 593 | _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') | ||
| 594 | |||
| 595 | def _sort_links(self, links): | ||
| 596 | """ | ||
| 597 | Returns elements of links in order, non-egg links first, egg links | ||
| 598 | second, while eliminating duplicates | ||
| 599 | """ | ||
| 600 | eggs, no_eggs = [], [] | ||
| 601 | seen = set() | ||
| 602 | for link in links: | ||
| 603 | if link not in seen: | ||
| 604 | seen.add(link) | ||
| 605 | if link.egg_fragment: | ||
| 606 | eggs.append(link) | ||
| 607 | else: | ||
| 608 | no_eggs.append(link) | ||
| 609 | return no_eggs + eggs | ||
| 610 | |||
| 611 | def _package_versions(self, links, search): | ||
| 612 | result = [] | ||
| 613 | for link in self._sort_links(links): | ||
| 614 | v = self._link_package_versions(link, search) | ||
| 615 | if v is not None: | ||
| 616 | result.append(v) | ||
| 617 | return result | ||
| 618 | |||
| 619 | def _log_skipped_link(self, link, reason): | ||
| 620 | if link not in self.logged_links: | ||
| 621 | logger.debug('Skipping link %s; %s', link, reason) | ||
| 622 | self.logged_links.add(link) | ||
| 623 | |||
| 624 | def _link_package_versions(self, link, search): | ||
| 625 | """Return an InstallationCandidate or None""" | ||
| 626 | version = None | ||
| 627 | if link.egg_fragment: | ||
| 628 | egg_info = link.egg_fragment | ||
| 629 | ext = link.ext | ||
| 630 | else: | ||
| 631 | egg_info, ext = link.splitext() | ||
| 632 | if not ext: | ||
| 633 | self._log_skipped_link(link, 'not a file') | ||
| 634 | return | ||
| 635 | if ext not in SUPPORTED_EXTENSIONS: | ||
| 636 | self._log_skipped_link( | ||
| 637 | link, 'unsupported archive format: %s' % ext, | ||
| 638 | ) | ||
| 639 | return | ||
| 640 | if "binary" not in search.formats and ext == wheel_ext: | ||
| 641 | self._log_skipped_link( | ||
| 642 | link, 'No binaries permitted for %s' % search.supplied, | ||
| 643 | ) | ||
| 644 | return | ||
| 645 | if "macosx10" in link.path and ext == '.zip': | ||
| 646 | self._log_skipped_link(link, 'macosx10 one') | ||
| 647 | return | ||
| 648 | if ext == wheel_ext: | ||
| 649 | try: | ||
| 650 | wheel = Wheel(link.filename) | ||
| 651 | except InvalidWheelFilename: | ||
| 652 | self._log_skipped_link(link, 'invalid wheel filename') | ||
| 653 | return | ||
| 654 | if canonicalize_name(wheel.name) != search.canonical: | ||
| 655 | self._log_skipped_link( | ||
| 656 | link, 'wrong project name (not %s)' % search.supplied) | ||
| 657 | return | ||
| 658 | |||
| 659 | if not wheel.supported(self.valid_tags): | ||
| 660 | self._log_skipped_link( | ||
| 661 | link, 'it is not compatible with this Python') | ||
| 662 | return | ||
| 663 | |||
| 664 | version = wheel.version | ||
| 665 | |||
| 666 | # This should be up by the search.ok_binary check, but see issue 2700. | ||
| 667 | if "source" not in search.formats and ext != wheel_ext: | ||
| 668 | self._log_skipped_link( | ||
| 669 | link, 'No sources permitted for %s' % search.supplied, | ||
| 670 | ) | ||
| 671 | return | ||
| 672 | |||
| 673 | if not version: | ||
| 674 | version = egg_info_matches(egg_info, search.supplied, link) | ||
| 675 | if version is None: | ||
| 676 | self._log_skipped_link( | ||
| 677 | link, 'wrong project name (not %s)' % search.supplied) | ||
| 678 | return | ||
| 679 | |||
| 680 | match = self._py_version_re.search(version) | ||
| 681 | if match: | ||
| 682 | version = version[:match.start()] | ||
| 683 | py_version = match.group(1) | ||
| 684 | if py_version != sys.version[:3]: | ||
| 685 | self._log_skipped_link( | ||
| 686 | link, 'Python version is incorrect') | ||
| 687 | return | ||
| 688 | try: | ||
| 689 | support_this_python = check_requires_python(link.requires_python) | ||
| 690 | except specifiers.InvalidSpecifier: | ||
| 691 | logger.debug("Package %s has an invalid Requires-Python entry: %s", | ||
| 692 | link.filename, link.requires_python) | ||
| 693 | support_this_python = True | ||
| 694 | |||
| 695 | if not support_this_python: | ||
| 696 | logger.debug("The package %s is incompatible with the python" | ||
| 697 | "version in use. Acceptable python versions are:%s", | ||
| 698 | link, link.requires_python) | ||
| 699 | return | ||
| 700 | logger.debug('Found link %s, version: %s', link, version) | ||
| 701 | |||
| 702 | return InstallationCandidate(search.supplied, version, link) | ||
| 703 | |||
| 704 | def _get_page(self, link): | ||
| 705 | return HTMLPage.get_page(link, session=self.session) | ||
| 706 | |||
| 707 | |||
| 708 | def egg_info_matches( | ||
| 709 | egg_info, search_name, link, | ||
| 710 | _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): | ||
| 711 | """Pull the version part out of a string. | ||
| 712 | |||
| 713 | :param egg_info: The string to parse. E.g. foo-2.1 | ||
| 714 | :param search_name: The name of the package this belongs to. None to | ||
| 715 | infer the name. Note that this cannot unambiguously parse strings | ||
| 716 | like foo-2-2 which might be foo, 2-2 or foo-2, 2. | ||
| 717 | :param link: The link the string came from, for logging on failure. | ||
| 718 | """ | ||
| 719 | match = _egg_info_re.search(egg_info) | ||
| 720 | if not match: | ||
| 721 | logger.debug('Could not parse version from link: %s', link) | ||
| 722 | return None | ||
| 723 | if search_name is None: | ||
| 724 | full_match = match.group(0) | ||
| 725 | return full_match[full_match.index('-'):] | ||
| 726 | name = match.group(0).lower() | ||
| 727 | # To match the "safe" name that pkg_resources creates: | ||
| 728 | name = name.replace('_', '-') | ||
| 729 | # project name and version must be separated by a dash | ||
| 730 | look_for = search_name.lower() + "-" | ||
| 731 | if name.startswith(look_for): | ||
| 732 | return match.group(0)[len(look_for):] | ||
| 733 | else: | ||
| 734 | return None | ||
| 735 | |||
| 736 | |||
| 737 | class HTMLPage(object): | ||
| 738 | """Represents one page, along with its URL""" | ||
| 739 | |||
| 740 | def __init__(self, content, url, headers=None): | ||
| 741 | # Determine if we have any encoding information in our headers | ||
| 742 | encoding = None | ||
| 743 | if headers and "Content-Type" in headers: | ||
| 744 | content_type, params = cgi.parse_header(headers["Content-Type"]) | ||
| 745 | |||
| 746 | if "charset" in params: | ||
| 747 | encoding = params['charset'] | ||
| 748 | |||
| 749 | self.content = content | ||
| 750 | self.parsed = html5lib.parse( | ||
| 751 | self.content, | ||
| 752 | transport_encoding=encoding, | ||
| 753 | namespaceHTMLElements=False, | ||
| 754 | ) | ||
| 755 | self.url = url | ||
| 756 | self.headers = headers | ||
| 757 | |||
| 758 | def __str__(self): | ||
| 759 | return self.url | ||
| 760 | |||
| 761 | @classmethod | ||
| 762 | def get_page(cls, link, skip_archives=True, session=None): | ||
| 763 | if session is None: | ||
| 764 | raise TypeError( | ||
| 765 | "get_page() missing 1 required keyword argument: 'session'" | ||
| 766 | ) | ||
| 767 | |||
| 768 | url = link.url | ||
| 769 | url = url.split('#', 1)[0] | ||
| 770 | |||
| 771 | # Check for VCS schemes that do not support lookup as web pages. | ||
| 772 | from pip._internal.vcs import VcsSupport | ||
| 773 | for scheme in VcsSupport.schemes: | ||
| 774 | if url.lower().startswith(scheme) and url[len(scheme)] in '+:': | ||
| 775 | logger.debug('Cannot look at %s URL %s', scheme, link) | ||
| 776 | return None | ||
| 777 | |||
| 778 | try: | ||
| 779 | if skip_archives: | ||
| 780 | filename = link.filename | ||
| 781 | for bad_ext in ARCHIVE_EXTENSIONS: | ||
| 782 | if filename.endswith(bad_ext): | ||
| 783 | content_type = cls._get_content_type( | ||
| 784 | url, session=session, | ||
| 785 | ) | ||
| 786 | if content_type.lower().startswith('text/html'): | ||
| 787 | break | ||
| 788 | else: | ||
| 789 | logger.debug( | ||
| 790 | 'Skipping page %s because of Content-Type: %s', | ||
| 791 | link, | ||
| 792 | content_type, | ||
| 793 | ) | ||
| 794 | return | ||
| 795 | |||
| 796 | logger.debug('Getting page %s', url) | ||
| 797 | |||
| 798 | # Tack index.html onto file:// URLs that point to directories | ||
| 799 | (scheme, netloc, path, params, query, fragment) = \ | ||
| 800 | urllib_parse.urlparse(url) | ||
| 801 | if (scheme == 'file' and | ||
| 802 | os.path.isdir(urllib_request.url2pathname(path))): | ||
| 803 | # add trailing slash if not present so urljoin doesn't trim | ||
| 804 | # final segment | ||
| 805 | if not url.endswith('/'): | ||
| 806 | url += '/' | ||
| 807 | url = urllib_parse.urljoin(url, 'index.html') | ||
| 808 | logger.debug(' file: URL is directory, getting %s', url) | ||
| 809 | |||
| 810 | resp = session.get( | ||
| 811 | url, | ||
| 812 | headers={ | ||
| 813 | "Accept": "text/html", | ||
| 814 | "Cache-Control": "max-age=600", | ||
| 815 | }, | ||
| 816 | ) | ||
| 817 | resp.raise_for_status() | ||
| 818 | |||
| 819 | # The check for archives above only works if the url ends with | ||
| 820 | # something that looks like an archive. However that is not a | ||
| 821 | # requirement of an url. Unless we issue a HEAD request on every | ||
| 822 | # url we cannot know ahead of time for sure if something is HTML | ||
| 823 | # or not. However we can check after we've downloaded it. | ||
| 824 | content_type = resp.headers.get('Content-Type', 'unknown') | ||
| 825 | if not content_type.lower().startswith("text/html"): | ||
| 826 | logger.debug( | ||
| 827 | 'Skipping page %s because of Content-Type: %s', | ||
| 828 | link, | ||
| 829 | content_type, | ||
| 830 | ) | ||
| 831 | return | ||
| 832 | |||
| 833 | inst = cls(resp.content, resp.url, resp.headers) | ||
| 834 | except requests.HTTPError as exc: | ||
| 835 | cls._handle_fail(link, exc, url) | ||
| 836 | except SSLError as exc: | ||
| 837 | reason = "There was a problem confirming the ssl certificate: " | ||
| 838 | reason += str(exc) | ||
| 839 | cls._handle_fail(link, reason, url, meth=logger.info) | ||
| 840 | except requests.ConnectionError as exc: | ||
| 841 | cls._handle_fail(link, "connection error: %s" % exc, url) | ||
| 842 | except requests.Timeout: | ||
| 843 | cls._handle_fail(link, "timed out", url) | ||
| 844 | else: | ||
| 845 | return inst | ||
| 846 | |||
| 847 | @staticmethod | ||
| 848 | def _handle_fail(link, reason, url, meth=None): | ||
| 849 | if meth is None: | ||
| 850 | meth = logger.debug | ||
| 851 | |||
| 852 | meth("Could not fetch URL %s: %s - skipping", link, reason) | ||
| 853 | |||
| 854 | @staticmethod | ||
| 855 | def _get_content_type(url, session): | ||
| 856 | """Get the Content-Type of the given url, using a HEAD request""" | ||
| 857 | scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) | ||
| 858 | if scheme not in {'http', 'https'}: | ||
| 859 | # FIXME: some warning or something? | ||
| 860 | # assertion error? | ||
| 861 | return '' | ||
| 862 | |||
| 863 | resp = session.head(url, allow_redirects=True) | ||
| 864 | resp.raise_for_status() | ||
| 865 | |||
| 866 | return resp.headers.get("Content-Type", "") | ||
| 867 | |||
| 868 | @cached_property | ||
| 869 | def base_url(self): | ||
| 870 | bases = [ | ||
| 871 | x for x in self.parsed.findall(".//base") | ||
| 872 | if x.get("href") is not None | ||
| 873 | ] | ||
| 874 | if bases and bases[0].get("href"): | ||
| 875 | return bases[0].get("href") | ||
| 876 | else: | ||
| 877 | return self.url | ||
| 878 | |||
| 879 | @property | ||
| 880 | def links(self): | ||
| 881 | """Yields all links in the page""" | ||
| 882 | for anchor in self.parsed.findall(".//a"): | ||
| 883 | if anchor.get("href"): | ||
| 884 | href = anchor.get("href") | ||
| 885 | url = self.clean_link( | ||
| 886 | urllib_parse.urljoin(self.base_url, href) | ||
| 887 | ) | ||
| 888 | pyrequire = anchor.get('data-requires-python') | ||
| 889 | pyrequire = unescape(pyrequire) if pyrequire else None | ||
| 890 | yield Link(url, self, requires_python=pyrequire) | ||
| 891 | |||
| 892 | _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) | ||
| 893 | |||
| 894 | def clean_link(self, url): | ||
| 895 | """Makes sure a link is fully encoded. That is, if a ' ' shows up in | ||
| 896 | the link, it will be rewritten to %20 (while not over-quoting | ||
| 897 | % or other characters).""" | ||
| 898 | return self._clean_re.sub( | ||
| 899 | lambda match: '%%%2x' % ord(match.group(0)), url) | ||
| 900 | |||
| 901 | |||
| 902 | class Link(object): | ||
| 903 | |||
| 904 | def __init__(self, url, comes_from=None, requires_python=None): | ||
| 905 | """ | ||
| 906 | Object representing a parsed link from https://pypi.org/simple/* | ||
| 907 | |||
| 908 | url: | ||
| 909 | url of the resource pointed to (href of the link) | ||
| 910 | comes_from: | ||
| 911 | instance of HTMLPage where the link was found, or string. | ||
| 912 | requires_python: | ||
| 913 | String containing the `Requires-Python` metadata field, specified | ||
| 914 | in PEP 345. This may be specified by a data-requires-python | ||
| 915 | attribute in the HTML link tag, as described in PEP 503. | ||
| 916 | """ | ||
| 917 | |||
| 918 | # url can be a UNC windows share | ||
| 919 | if url.startswith('\\\\'): | ||
| 920 | url = path_to_url(url) | ||
| 921 | |||
| 922 | self.url = url | ||
| 923 | self.comes_from = comes_from | ||
| 924 | self.requires_python = requires_python if requires_python else None | ||
| 925 | |||
| 926 | def __str__(self): | ||
| 927 | if self.requires_python: | ||
| 928 | rp = ' (requires-python:%s)' % self.requires_python | ||
| 929 | else: | ||
| 930 | rp = '' | ||
| 931 | if self.comes_from: | ||
| 932 | return '%s (from %s)%s' % (self.url, self.comes_from, rp) | ||
| 933 | else: | ||
| 934 | return str(self.url) | ||
| 935 | |||
| 936 | def __repr__(self): | ||
| 937 | return '<Link %s>' % self | ||
| 938 | |||
| 939 | def __eq__(self, other): | ||
| 940 | if not isinstance(other, Link): | ||
| 941 | return NotImplemented | ||
| 942 | return self.url == other.url | ||
| 943 | |||
| 944 | def __ne__(self, other): | ||
| 945 | if not isinstance(other, Link): | ||
| 946 | return NotImplemented | ||
| 947 | return self.url != other.url | ||
| 948 | |||
| 949 | def __lt__(self, other): | ||
| 950 | if not isinstance(other, Link): | ||
| 951 | return NotImplemented | ||
| 952 | return self.url < other.url | ||
| 953 | |||
| 954 | def __le__(self, other): | ||
| 955 | if not isinstance(other, Link): | ||
| 956 | return NotImplemented | ||
| 957 | return self.url <= other.url | ||
| 958 | |||
| 959 | def __gt__(self, other): | ||
| 960 | if not isinstance(other, Link): | ||
| 961 | return NotImplemented | ||
| 962 | return self.url > other.url | ||
| 963 | |||
| 964 | def __ge__(self, other): | ||
| 965 | if not isinstance(other, Link): | ||
| 966 | return NotImplemented | ||
| 967 | return self.url >= other.url | ||
| 968 | |||
| 969 | def __hash__(self): | ||
| 970 | return hash(self.url) | ||
| 971 | |||
| 972 | @property | ||
| 973 | def filename(self): | ||
| 974 | _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) | ||
| 975 | name = posixpath.basename(path.rstrip('/')) or netloc | ||
| 976 | name = urllib_parse.unquote(name) | ||
| 977 | assert name, ('URL %r produced no filename' % self.url) | ||
| 978 | return name | ||
| 979 | |||
| 980 | @property | ||
| 981 | def scheme(self): | ||
| 982 | return urllib_parse.urlsplit(self.url)[0] | ||
| 983 | |||
| 984 | @property | ||
| 985 | def netloc(self): | ||
| 986 | return urllib_parse.urlsplit(self.url)[1] | ||
| 987 | |||
| 988 | @property | ||
| 989 | def path(self): | ||
| 990 | return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) | ||
| 991 | |||
| 992 | def splitext(self): | ||
| 993 | return splitext(posixpath.basename(self.path.rstrip('/'))) | ||
| 994 | |||
| 995 | @property | ||
| 996 | def ext(self): | ||
| 997 | return self.splitext()[1] | ||
| 998 | |||
| 999 | @property | ||
| 1000 | def url_without_fragment(self): | ||
| 1001 | scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) | ||
| 1002 | return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) | ||
| 1003 | |||
| 1004 | _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') | ||
| 1005 | |||
| 1006 | @property | ||
| 1007 | def egg_fragment(self): | ||
| 1008 | match = self._egg_fragment_re.search(self.url) | ||
| 1009 | if not match: | ||
| 1010 | return None | ||
| 1011 | return match.group(1) | ||
| 1012 | |||
| 1013 | _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') | ||
| 1014 | |||
| 1015 | @property | ||
| 1016 | def subdirectory_fragment(self): | ||
| 1017 | match = self._subdirectory_fragment_re.search(self.url) | ||
| 1018 | if not match: | ||
| 1019 | return None | ||
| 1020 | return match.group(1) | ||
| 1021 | |||
| 1022 | _hash_re = re.compile( | ||
| 1023 | r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' | ||
| 1024 | ) | ||
| 1025 | |||
| 1026 | @property | ||
| 1027 | def hash(self): | ||
| 1028 | match = self._hash_re.search(self.url) | ||
| 1029 | if match: | ||
| 1030 | return match.group(2) | ||
| 1031 | return None | ||
| 1032 | |||
| 1033 | @property | ||
| 1034 | def hash_name(self): | ||
| 1035 | match = self._hash_re.search(self.url) | ||
| 1036 | if match: | ||
| 1037 | return match.group(1) | ||
| 1038 | return None | ||
| 1039 | |||
| 1040 | @property | ||
| 1041 | def show_url(self): | ||
| 1042 | return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) | ||
| 1043 | |||
| 1044 | @property | ||
| 1045 | def is_wheel(self): | ||
| 1046 | return self.ext == wheel_ext | ||
| 1047 | |||
| 1048 | @property | ||
| 1049 | def is_artifact(self): | ||
| 1050 | """ | ||
| 1051 | Determines if this points to an actual artifact (e.g. a tarball) or if | ||
| 1052 | it points to an "abstract" thing like a path or a VCS location. | ||
| 1053 | """ | ||
| 1054 | from pip._internal.vcs import vcs | ||
| 1055 | |||
| 1056 | if self.scheme in vcs.all_schemes: | ||
| 1057 | return False | ||
| 1058 | |||
| 1059 | return True | ||
| 1060 | |||
| 1061 | |||
| 1062 | FormatControl = namedtuple('FormatControl', 'no_binary only_binary') | ||
| 1063 | """This object has two fields, no_binary and only_binary. | ||
| 1064 | |||
| 1065 | If a field is falsy, it isn't set. If it is {':all:'}, it should match all | ||
| 1066 | packages except those listed in the other field. Only one field can be set | ||
| 1067 | to {':all:'} at a time. The rest of the time exact package name matches | ||
| 1068 | are listed, with any given package only showing up in one field at a time. | ||
| 1069 | """ | ||
| 1070 | |||
| 1071 | |||
| 1072 | def fmt_ctl_handle_mutual_exclude(value, target, other): | ||
| 1073 | new = value.split(',') | ||
| 1074 | while ':all:' in new: | ||
| 1075 | other.clear() | ||
| 1076 | target.clear() | ||
| 1077 | target.add(':all:') | ||
| 1078 | del new[:new.index(':all:') + 1] | ||
| 1079 | if ':none:' not in new: | ||
| 1080 | # Without a none, we want to discard everything as :all: covers it | ||
| 1081 | return | ||
| 1082 | for name in new: | ||
| 1083 | if name == ':none:': | ||
| 1084 | target.clear() | ||
| 1085 | continue | ||
| 1086 | name = canonicalize_name(name) | ||
| 1087 | other.discard(name) | ||
| 1088 | target.add(name) | ||
| 1089 | |||
| 1090 | |||
| 1091 | def fmt_ctl_formats(fmt_ctl, canonical_name): | ||
| 1092 | result = {"binary", "source"} | ||
| 1093 | if canonical_name in fmt_ctl.only_binary: | ||
| 1094 | result.discard('source') | ||
| 1095 | elif canonical_name in fmt_ctl.no_binary: | ||
| 1096 | result.discard('binary') | ||
| 1097 | elif ':all:' in fmt_ctl.only_binary: | ||
| 1098 | result.discard('source') | ||
| 1099 | elif ':all:' in fmt_ctl.no_binary: | ||
| 1100 | result.discard('binary') | ||
| 1101 | return frozenset(result) | ||
| 1102 | |||
| 1103 | |||
| 1104 | def fmt_ctl_no_binary(fmt_ctl): | ||
| 1105 | fmt_ctl_handle_mutual_exclude( | ||
| 1106 | ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary, | ||
| 1107 | ) | ||
| 1108 | |||
| 1109 | |||
| 1110 | Search = namedtuple('Search', 'supplied canonical formats') | ||
| 1111 | """Capture key aspects of a search. | ||
| 1112 | |||
| 1113 | :attribute supplied: The user supplied package. | ||
| 1114 | :attribute canonical: The canonical package name. | ||
| 1115 | :attribute formats: The formats allowed for this package. Should be a set | ||
| 1116 | with 'binary' or 'source' or both in it. | ||
| 1117 | """ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py new file mode 100644 index 0000000..ce8f7e9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py | |||
| @@ -0,0 +1,194 @@ | |||
| 1 | """Locations where we look for configs, install stuff, etc""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import os | ||
| 5 | import os.path | ||
| 6 | import platform | ||
| 7 | import site | ||
| 8 | import sys | ||
| 9 | import sysconfig | ||
| 10 | from distutils import sysconfig as distutils_sysconfig | ||
| 11 | from distutils.command.install import SCHEME_KEYS, install # type: ignore | ||
| 12 | |||
| 13 | from pip._internal.compat import WINDOWS, expanduser | ||
| 14 | from pip._internal.utils import appdirs | ||
| 15 | |||
| 16 | # Application Directories | ||
| 17 | USER_CACHE_DIR = appdirs.user_cache_dir("pip") | ||
| 18 | |||
| 19 | |||
| 20 | DELETE_MARKER_MESSAGE = '''\ | ||
| 21 | This file is placed here by pip to indicate the source was put | ||
| 22 | here by pip. | ||
| 23 | |||
| 24 | Once this package is successfully installed this source code will be | ||
| 25 | deleted (unless you remove this file). | ||
| 26 | ''' | ||
| 27 | PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' | ||
| 28 | |||
| 29 | |||
| 30 | def write_delete_marker_file(directory): | ||
| 31 | """ | ||
| 32 | Write the pip delete marker file into this directory. | ||
| 33 | """ | ||
| 34 | filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) | ||
| 35 | with open(filepath, 'w') as marker_fp: | ||
| 36 | marker_fp.write(DELETE_MARKER_MESSAGE) | ||
| 37 | |||
| 38 | |||
| 39 | def running_under_virtualenv(): | ||
| 40 | """ | ||
| 41 | Return True if we're running inside a virtualenv, False otherwise. | ||
| 42 | |||
| 43 | """ | ||
| 44 | if hasattr(sys, 'real_prefix'): | ||
| 45 | return True | ||
| 46 | elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): | ||
| 47 | return True | ||
| 48 | |||
| 49 | return False | ||
| 50 | |||
| 51 | |||
| 52 | def virtualenv_no_global(): | ||
| 53 | """ | ||
| 54 | Return True if in a venv and no system site packages. | ||
| 55 | """ | ||
| 56 | # this mirrors the logic in virtualenv.py for locating the | ||
| 57 | # no-global-site-packages.txt file | ||
| 58 | site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) | ||
| 59 | no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') | ||
| 60 | if running_under_virtualenv() and os.path.isfile(no_global_file): | ||
| 61 | return True | ||
| 62 | |||
| 63 | |||
| 64 | if running_under_virtualenv(): | ||
| 65 | src_prefix = os.path.join(sys.prefix, 'src') | ||
| 66 | else: | ||
| 67 | # FIXME: keep src in cwd for now (it is not a temporary folder) | ||
| 68 | try: | ||
| 69 | src_prefix = os.path.join(os.getcwd(), 'src') | ||
| 70 | except OSError: | ||
| 71 | # In case the current working directory has been renamed or deleted | ||
| 72 | sys.exit( | ||
| 73 | "The folder you are executing pip from can no longer be found." | ||
| 74 | ) | ||
| 75 | |||
| 76 | # under macOS + virtualenv sys.prefix is not properly resolved | ||
| 77 | # it is something like /path/to/python/bin/.. | ||
| 78 | # Note: using realpath due to tmp dirs on OSX being symlinks | ||
| 79 | src_prefix = os.path.abspath(src_prefix) | ||
| 80 | |||
| 81 | # FIXME doesn't account for venv linked to global site-packages | ||
| 82 | |||
| 83 | site_packages = sysconfig.get_path("purelib") | ||
| 84 | # This is because of a bug in PyPy's sysconfig module, see | ||
| 85 | # https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths | ||
| 86 | # for more information. | ||
| 87 | if platform.python_implementation().lower() == "pypy": | ||
| 88 | site_packages = distutils_sysconfig.get_python_lib() | ||
| 89 | try: | ||
| 90 | # Use getusersitepackages if this is present, as it ensures that the | ||
| 91 | # value is initialised properly. | ||
| 92 | user_site = site.getusersitepackages() | ||
| 93 | except AttributeError: | ||
| 94 | user_site = site.USER_SITE | ||
| 95 | user_dir = expanduser('~') | ||
| 96 | if WINDOWS: | ||
| 97 | bin_py = os.path.join(sys.prefix, 'Scripts') | ||
| 98 | bin_user = os.path.join(user_site, 'Scripts') | ||
| 99 | # buildout uses 'bin' on Windows too? | ||
| 100 | if not os.path.exists(bin_py): | ||
| 101 | bin_py = os.path.join(sys.prefix, 'bin') | ||
| 102 | bin_user = os.path.join(user_site, 'bin') | ||
| 103 | |||
| 104 | config_basename = 'pip.ini' | ||
| 105 | |||
| 106 | legacy_storage_dir = os.path.join(user_dir, 'pip') | ||
| 107 | legacy_config_file = os.path.join( | ||
| 108 | legacy_storage_dir, | ||
| 109 | config_basename, | ||
| 110 | ) | ||
| 111 | else: | ||
| 112 | bin_py = os.path.join(sys.prefix, 'bin') | ||
| 113 | bin_user = os.path.join(user_site, 'bin') | ||
| 114 | |||
| 115 | config_basename = 'pip.conf' | ||
| 116 | |||
| 117 | legacy_storage_dir = os.path.join(user_dir, '.pip') | ||
| 118 | legacy_config_file = os.path.join( | ||
| 119 | legacy_storage_dir, | ||
| 120 | config_basename, | ||
| 121 | ) | ||
| 122 | # Forcing to use /usr/local/bin for standard macOS framework installs | ||
| 123 | # Also log to ~/Library/Logs/ for use with the Console.app log viewer | ||
| 124 | if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': | ||
| 125 | bin_py = '/usr/local/bin' | ||
| 126 | |||
| 127 | site_config_files = [ | ||
| 128 | os.path.join(path, config_basename) | ||
| 129 | for path in appdirs.site_config_dirs('pip') | ||
| 130 | ] | ||
| 131 | |||
| 132 | venv_config_file = os.path.join(sys.prefix, config_basename) | ||
| 133 | new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename) | ||
| 134 | |||
| 135 | |||
| 136 | def distutils_scheme(dist_name, user=False, home=None, root=None, | ||
| 137 | isolated=False, prefix=None): | ||
| 138 | """ | ||
| 139 | Return a distutils install scheme | ||
| 140 | """ | ||
| 141 | from distutils.dist import Distribution | ||
| 142 | |||
| 143 | scheme = {} | ||
| 144 | |||
| 145 | if isolated: | ||
| 146 | extra_dist_args = {"script_args": ["--no-user-cfg"]} | ||
| 147 | else: | ||
| 148 | extra_dist_args = {} | ||
| 149 | dist_args = {'name': dist_name} | ||
| 150 | dist_args.update(extra_dist_args) | ||
| 151 | |||
| 152 | d = Distribution(dist_args) | ||
| 153 | d.parse_config_files() | ||
| 154 | i = d.get_command_obj('install', create=True) | ||
| 155 | # NOTE: setting user or home has the side-effect of creating the home dir | ||
| 156 | # or user base for installations during finalize_options() | ||
| 157 | # ideally, we'd prefer a scheme class that has no side-effects. | ||
| 158 | assert not (user and prefix), "user={} prefix={}".format(user, prefix) | ||
| 159 | i.user = user or i.user | ||
| 160 | if user: | ||
| 161 | i.prefix = "" | ||
| 162 | i.prefix = prefix or i.prefix | ||
| 163 | i.home = home or i.home | ||
| 164 | i.root = root or i.root | ||
| 165 | i.finalize_options() | ||
| 166 | for key in SCHEME_KEYS: | ||
| 167 | scheme[key] = getattr(i, 'install_' + key) | ||
| 168 | |||
| 169 | # install_lib specified in setup.cfg should install *everything* | ||
| 170 | # into there (i.e. it takes precedence over both purelib and | ||
| 171 | # platlib). Note, i.install_lib is *always* set after | ||
| 172 | # finalize_options(); we only want to override here if the user | ||
| 173 | # has explicitly requested it hence going back to the config | ||
| 174 | if 'install_lib' in d.get_option_dict('install'): | ||
| 175 | scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) | ||
| 176 | |||
| 177 | if running_under_virtualenv(): | ||
| 178 | scheme['headers'] = os.path.join( | ||
| 179 | sys.prefix, | ||
| 180 | 'include', | ||
| 181 | 'site', | ||
| 182 | 'python' + sys.version[:3], | ||
| 183 | dist_name, | ||
| 184 | ) | ||
| 185 | |||
| 186 | if root is not None: | ||
| 187 | path_no_drive = os.path.splitdrive( | ||
| 188 | os.path.abspath(scheme["headers"]))[1] | ||
| 189 | scheme["headers"] = os.path.join( | ||
| 190 | root, | ||
| 191 | path_no_drive[1:], | ||
| 192 | ) | ||
| 193 | |||
| 194 | return scheme | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py new file mode 100644 index 0000000..2d080a4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py | |||
| @@ -0,0 +1,4 @@ | |||
| 1 | from pip._internal.models.index import Index, PyPI | ||
| 2 | |||
| 3 | |||
| 4 | __all__ = ["Index", "PyPI"] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py new file mode 100644 index 0000000..161de50 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py | |||
| @@ -0,0 +1,15 @@ | |||
| 1 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 2 | |||
| 3 | |||
| 4 | class Index(object): | ||
| 5 | def __init__(self, url): | ||
| 6 | self.url = url | ||
| 7 | self.netloc = urllib_parse.urlsplit(url).netloc | ||
| 8 | self.simple_url = self.url_to_path('simple') | ||
| 9 | self.pypi_url = self.url_to_path('pypi') | ||
| 10 | |||
| 11 | def url_to_path(self, path): | ||
| 12 | return urllib_parse.urljoin(self.url, path) | ||
| 13 | |||
| 14 | |||
| 15 | PyPI = Index('https://pypi.org/') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py new file mode 100644 index 0000000..bab6b9f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py | |||
| @@ -0,0 +1,106 @@ | |||
| 1 | """Validation of dependencies of packages | ||
| 2 | """ | ||
| 3 | |||
| 4 | from collections import namedtuple | ||
| 5 | |||
| 6 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 7 | |||
| 8 | from pip._internal.operations.prepare import make_abstract_dist | ||
| 9 | |||
| 10 | from pip._internal.utils.misc import get_installed_distributions | ||
| 11 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 12 | |||
| 13 | if MYPY_CHECK_RUNNING: | ||
| 14 | from pip._internal.req.req_install import InstallRequirement | ||
| 15 | from typing import Any, Dict, Iterator, Set, Tuple, List | ||
| 16 | |||
| 17 | # Shorthands | ||
| 18 | PackageSet = Dict[str, 'PackageDetails'] | ||
| 19 | Missing = Tuple[str, Any] | ||
| 20 | Conflicting = Tuple[str, str, Any] | ||
| 21 | |||
| 22 | MissingDict = Dict[str, List[Missing]] | ||
| 23 | ConflictingDict = Dict[str, List[Conflicting]] | ||
| 24 | CheckResult = Tuple[MissingDict, ConflictingDict] | ||
| 25 | |||
| 26 | PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) | ||
| 27 | |||
| 28 | |||
| 29 | def create_package_set_from_installed(**kwargs): | ||
| 30 | # type: (**Any) -> PackageSet | ||
| 31 | """Converts a list of distributions into a PackageSet. | ||
| 32 | """ | ||
| 33 | # Default to using all packages installed on the system | ||
| 34 | if kwargs == {}: | ||
| 35 | kwargs = {"local_only": False, "skip": ()} | ||
| 36 | retval = {} | ||
| 37 | for dist in get_installed_distributions(**kwargs): | ||
| 38 | name = canonicalize_name(dist.project_name) | ||
| 39 | retval[name] = PackageDetails(dist.version, dist.requires()) | ||
| 40 | return retval | ||
| 41 | |||
| 42 | |||
| 43 | def check_package_set(package_set): | ||
| 44 | # type: (PackageSet) -> CheckResult | ||
| 45 | """Check if a package set is consistent | ||
| 46 | """ | ||
| 47 | missing = dict() | ||
| 48 | conflicting = dict() | ||
| 49 | |||
| 50 | for package_name in package_set: | ||
| 51 | # Info about dependencies of package_name | ||
| 52 | missing_deps = set() # type: Set[Missing] | ||
| 53 | conflicting_deps = set() # type: Set[Conflicting] | ||
| 54 | |||
| 55 | for req in package_set[package_name].requires: | ||
| 56 | name = canonicalize_name(req.project_name) # type: str | ||
| 57 | |||
| 58 | # Check if it's missing | ||
| 59 | if name not in package_set: | ||
| 60 | missed = True | ||
| 61 | if req.marker is not None: | ||
| 62 | missed = req.marker.evaluate() | ||
| 63 | if missed: | ||
| 64 | missing_deps.add((name, req)) | ||
| 65 | continue | ||
| 66 | |||
| 67 | # Check if there's a conflict | ||
| 68 | version = package_set[name].version # type: str | ||
| 69 | if not req.specifier.contains(version, prereleases=True): | ||
| 70 | conflicting_deps.add((name, version, req)) | ||
| 71 | |||
| 72 | def str_key(x): | ||
| 73 | return str(x) | ||
| 74 | |||
| 75 | if missing_deps: | ||
| 76 | missing[package_name] = sorted(missing_deps, key=str_key) | ||
| 77 | if conflicting_deps: | ||
| 78 | conflicting[package_name] = sorted(conflicting_deps, key=str_key) | ||
| 79 | |||
| 80 | return missing, conflicting | ||
| 81 | |||
| 82 | |||
| 83 | def check_install_conflicts(to_install): | ||
| 84 | # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] | ||
| 85 | """For checking if the dependency graph would be consistent after \ | ||
| 86 | installing given requirements | ||
| 87 | """ | ||
| 88 | # Start from the current state | ||
| 89 | state = create_package_set_from_installed() | ||
| 90 | _simulate_installation_of(to_install, state) | ||
| 91 | return state, check_package_set(state) | ||
| 92 | |||
| 93 | |||
| 94 | # NOTE from @pradyunsg | ||
| 95 | # This required a minor update in dependency link handling logic over at | ||
| 96 | # operations.prepare.IsSDist.dist() to get it working | ||
| 97 | def _simulate_installation_of(to_install, state): | ||
| 98 | # type: (List[InstallRequirement], PackageSet) -> None | ||
| 99 | """Computes the version of packages after installing to_install. | ||
| 100 | """ | ||
| 101 | |||
| 102 | # Modify it as installing requirement_set would (assuming no errors) | ||
| 103 | for inst_req in to_install: | ||
| 104 | dist = make_abstract_dist(inst_req).dist(finder=None) | ||
| 105 | name = canonicalize_name(dist.key) | ||
| 106 | state[name] = PackageDetails(dist.version, dist.requires()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py new file mode 100644 index 0000000..000102d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py | |||
| @@ -0,0 +1,252 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import collections | ||
| 4 | import logging | ||
| 5 | import os | ||
| 6 | import re | ||
| 7 | import warnings | ||
| 8 | |||
| 9 | from pip._vendor import pkg_resources, six | ||
| 10 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 11 | from pip._vendor.pkg_resources import RequirementParseError | ||
| 12 | |||
| 13 | from pip._internal.exceptions import InstallationError | ||
| 14 | from pip._internal.req import InstallRequirement | ||
| 15 | from pip._internal.req.req_file import COMMENT_RE | ||
| 16 | from pip._internal.utils.deprecation import RemovedInPip11Warning | ||
| 17 | from pip._internal.utils.misc import ( | ||
| 18 | dist_is_editable, get_installed_distributions, | ||
| 19 | ) | ||
| 20 | |||
| 21 | logger = logging.getLogger(__name__) | ||
| 22 | |||
| 23 | |||
| 24 | def freeze( | ||
| 25 | requirement=None, | ||
| 26 | find_links=None, local_only=None, user_only=None, skip_regex=None, | ||
| 27 | isolated=False, | ||
| 28 | wheel_cache=None, | ||
| 29 | exclude_editable=False, | ||
| 30 | skip=()): | ||
| 31 | find_links = find_links or [] | ||
| 32 | skip_match = None | ||
| 33 | |||
| 34 | if skip_regex: | ||
| 35 | skip_match = re.compile(skip_regex).search | ||
| 36 | |||
| 37 | dependency_links = [] | ||
| 38 | |||
| 39 | for dist in pkg_resources.working_set: | ||
| 40 | if dist.has_metadata('dependency_links.txt'): | ||
| 41 | dependency_links.extend( | ||
| 42 | dist.get_metadata_lines('dependency_links.txt') | ||
| 43 | ) | ||
| 44 | for link in find_links: | ||
| 45 | if '#egg=' in link: | ||
| 46 | dependency_links.append(link) | ||
| 47 | for link in find_links: | ||
| 48 | yield '-f %s' % link | ||
| 49 | installations = {} | ||
| 50 | for dist in get_installed_distributions(local_only=local_only, | ||
| 51 | skip=(), | ||
| 52 | user_only=user_only): | ||
| 53 | try: | ||
| 54 | req = FrozenRequirement.from_dist( | ||
| 55 | dist, | ||
| 56 | dependency_links | ||
| 57 | ) | ||
| 58 | except RequirementParseError: | ||
| 59 | logger.warning( | ||
| 60 | "Could not parse requirement: %s", | ||
| 61 | dist.project_name | ||
| 62 | ) | ||
| 63 | continue | ||
| 64 | if exclude_editable and req.editable: | ||
| 65 | continue | ||
| 66 | installations[req.name] = req | ||
| 67 | |||
| 68 | if requirement: | ||
| 69 | # the options that don't get turned into an InstallRequirement | ||
| 70 | # should only be emitted once, even if the same option is in multiple | ||
| 71 | # requirements files, so we need to keep track of what has been emitted | ||
| 72 | # so that we don't emit it again if it's seen again | ||
| 73 | emitted_options = set() | ||
| 74 | # keep track of which files a requirement is in so that we can | ||
| 75 | # give an accurate warning if a requirement appears multiple times. | ||
| 76 | req_files = collections.defaultdict(list) | ||
| 77 | for req_file_path in requirement: | ||
| 78 | with open(req_file_path) as req_file: | ||
| 79 | for line in req_file: | ||
| 80 | if (not line.strip() or | ||
| 81 | line.strip().startswith('#') or | ||
| 82 | (skip_match and skip_match(line)) or | ||
| 83 | line.startswith(( | ||
| 84 | '-r', '--requirement', | ||
| 85 | '-Z', '--always-unzip', | ||
| 86 | '-f', '--find-links', | ||
| 87 | '-i', '--index-url', | ||
| 88 | '--pre', | ||
| 89 | '--trusted-host', | ||
| 90 | '--process-dependency-links', | ||
| 91 | '--extra-index-url'))): | ||
| 92 | line = line.rstrip() | ||
| 93 | if line not in emitted_options: | ||
| 94 | emitted_options.add(line) | ||
| 95 | yield line | ||
| 96 | continue | ||
| 97 | |||
| 98 | if line.startswith('-e') or line.startswith('--editable'): | ||
| 99 | if line.startswith('-e'): | ||
| 100 | line = line[2:].strip() | ||
| 101 | else: | ||
| 102 | line = line[len('--editable'):].strip().lstrip('=') | ||
| 103 | line_req = InstallRequirement.from_editable( | ||
| 104 | line, | ||
| 105 | isolated=isolated, | ||
| 106 | wheel_cache=wheel_cache, | ||
| 107 | ) | ||
| 108 | else: | ||
| 109 | line_req = InstallRequirement.from_line( | ||
| 110 | COMMENT_RE.sub('', line).strip(), | ||
| 111 | isolated=isolated, | ||
| 112 | wheel_cache=wheel_cache, | ||
| 113 | ) | ||
| 114 | |||
| 115 | if not line_req.name: | ||
| 116 | logger.info( | ||
| 117 | "Skipping line in requirement file [%s] because " | ||
| 118 | "it's not clear what it would install: %s", | ||
| 119 | req_file_path, line.strip(), | ||
| 120 | ) | ||
| 121 | logger.info( | ||
| 122 | " (add #egg=PackageName to the URL to avoid" | ||
| 123 | " this warning)" | ||
| 124 | ) | ||
| 125 | elif line_req.name not in installations: | ||
| 126 | # either it's not installed, or it is installed | ||
| 127 | # but has been processed already | ||
| 128 | if not req_files[line_req.name]: | ||
| 129 | logger.warning( | ||
| 130 | "Requirement file [%s] contains %s, but that " | ||
| 131 | "package is not installed", | ||
| 132 | req_file_path, | ||
| 133 | COMMENT_RE.sub('', line).strip(), | ||
| 134 | ) | ||
| 135 | else: | ||
| 136 | req_files[line_req.name].append(req_file_path) | ||
| 137 | else: | ||
| 138 | yield str(installations[line_req.name]).rstrip() | ||
| 139 | del installations[line_req.name] | ||
| 140 | req_files[line_req.name].append(req_file_path) | ||
| 141 | |||
| 142 | # Warn about requirements that were included multiple times (in a | ||
| 143 | # single requirements file or in different requirements files). | ||
| 144 | for name, files in six.iteritems(req_files): | ||
| 145 | if len(files) > 1: | ||
| 146 | logger.warning("Requirement %s included multiple times [%s]", | ||
| 147 | name, ', '.join(sorted(set(files)))) | ||
| 148 | |||
| 149 | yield( | ||
| 150 | '## The following requirements were added by ' | ||
| 151 | 'pip freeze:' | ||
| 152 | ) | ||
| 153 | for installation in sorted( | ||
| 154 | installations.values(), key=lambda x: x.name.lower()): | ||
| 155 | if canonicalize_name(installation.name) not in skip: | ||
| 156 | yield str(installation).rstrip() | ||
| 157 | |||
| 158 | |||
| 159 | class FrozenRequirement(object): | ||
| 160 | def __init__(self, name, req, editable, comments=()): | ||
| 161 | self.name = name | ||
| 162 | self.req = req | ||
| 163 | self.editable = editable | ||
| 164 | self.comments = comments | ||
| 165 | |||
| 166 | _rev_re = re.compile(r'-r(\d+)$') | ||
| 167 | _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') | ||
| 168 | |||
| 169 | @classmethod | ||
| 170 | def from_dist(cls, dist, dependency_links): | ||
| 171 | location = os.path.normcase(os.path.abspath(dist.location)) | ||
| 172 | comments = [] | ||
| 173 | from pip._internal.vcs import vcs, get_src_requirement | ||
| 174 | if dist_is_editable(dist) and vcs.get_backend_name(location): | ||
| 175 | editable = True | ||
| 176 | try: | ||
| 177 | req = get_src_requirement(dist, location) | ||
| 178 | except InstallationError as exc: | ||
| 179 | logger.warning( | ||
| 180 | "Error when trying to get requirement for VCS system %s, " | ||
| 181 | "falling back to uneditable format", exc | ||
| 182 | ) | ||
| 183 | req = None | ||
| 184 | if req is None: | ||
| 185 | logger.warning( | ||
| 186 | 'Could not determine repository location of %s', location | ||
| 187 | ) | ||
| 188 | comments.append( | ||
| 189 | '## !! Could not determine repository location' | ||
| 190 | ) | ||
| 191 | req = dist.as_requirement() | ||
| 192 | editable = False | ||
| 193 | else: | ||
| 194 | editable = False | ||
| 195 | req = dist.as_requirement() | ||
| 196 | specs = req.specs | ||
| 197 | assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ | ||
| 198 | 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ | ||
| 199 | (specs, dist) | ||
| 200 | version = specs[0][1] | ||
| 201 | ver_match = cls._rev_re.search(version) | ||
| 202 | date_match = cls._date_re.search(version) | ||
| 203 | if ver_match or date_match: | ||
| 204 | svn_backend = vcs.get_backend('svn') | ||
| 205 | if svn_backend: | ||
| 206 | svn_location = svn_backend().get_location( | ||
| 207 | dist, | ||
| 208 | dependency_links, | ||
| 209 | ) | ||
| 210 | if not svn_location: | ||
| 211 | logger.warning( | ||
| 212 | 'Warning: cannot find svn location for %s', req, | ||
| 213 | ) | ||
| 214 | comments.append( | ||
| 215 | '## FIXME: could not find svn URL in dependency_links ' | ||
| 216 | 'for this package:' | ||
| 217 | ) | ||
| 218 | else: | ||
| 219 | warnings.warn( | ||
| 220 | "SVN editable detection based on dependency links " | ||
| 221 | "will be dropped in the future.", | ||
| 222 | RemovedInPip11Warning, | ||
| 223 | ) | ||
| 224 | comments.append( | ||
| 225 | '# Installing as editable to satisfy requirement %s:' % | ||
| 226 | req | ||
| 227 | ) | ||
| 228 | if ver_match: | ||
| 229 | rev = ver_match.group(1) | ||
| 230 | else: | ||
| 231 | rev = '{%s}' % date_match.group(1) | ||
| 232 | editable = True | ||
| 233 | req = '%s@%s#egg=%s' % ( | ||
| 234 | svn_location, | ||
| 235 | rev, | ||
| 236 | cls.egg_name(dist) | ||
| 237 | ) | ||
| 238 | return cls(dist.project_name, req, editable, comments) | ||
| 239 | |||
| 240 | @staticmethod | ||
| 241 | def egg_name(dist): | ||
| 242 | name = dist.egg_name() | ||
| 243 | match = re.search(r'-py\d\.\d$', name) | ||
| 244 | if match: | ||
| 245 | name = name[:match.start()] | ||
| 246 | return name | ||
| 247 | |||
| 248 | def __str__(self): | ||
| 249 | req = self.req | ||
| 250 | if self.editable: | ||
| 251 | req = '-e %s' % req | ||
| 252 | return '\n'.join(list(self.comments) + [str(req)]) + '\n' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py new file mode 100644 index 0000000..c1e8158 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py | |||
| @@ -0,0 +1,380 @@ | |||
| 1 | """Prepares a distribution for installation | ||
| 2 | """ | ||
| 3 | |||
| 4 | import itertools | ||
| 5 | import logging | ||
| 6 | import os | ||
| 7 | import sys | ||
| 8 | from copy import copy | ||
| 9 | |||
| 10 | from pip._vendor import pkg_resources, requests | ||
| 11 | |||
| 12 | from pip._internal.build_env import NoOpBuildEnvironment | ||
| 13 | from pip._internal.compat import expanduser | ||
| 14 | from pip._internal.download import ( | ||
| 15 | is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, | ||
| 16 | ) | ||
| 17 | from pip._internal.exceptions import ( | ||
| 18 | DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, | ||
| 19 | PreviousBuildDirError, VcsHashUnsupported, | ||
| 20 | ) | ||
| 21 | from pip._internal.index import FormatControl | ||
| 22 | from pip._internal.req.req_install import InstallRequirement | ||
| 23 | from pip._internal.utils.hashes import MissingHashes | ||
| 24 | from pip._internal.utils.logging import indent_log | ||
| 25 | from pip._internal.utils.misc import ( | ||
| 26 | call_subprocess, display_path, normalize_path, | ||
| 27 | ) | ||
| 28 | from pip._internal.utils.ui import open_spinner | ||
| 29 | from pip._internal.vcs import vcs | ||
| 30 | |||
| 31 | logger = logging.getLogger(__name__) | ||
| 32 | |||
| 33 | |||
| 34 | def make_abstract_dist(req): | ||
| 35 | """Factory to make an abstract dist object. | ||
| 36 | |||
| 37 | Preconditions: Either an editable req with a source_dir, or satisfied_by or | ||
| 38 | a wheel link, or a non-editable req with a source_dir. | ||
| 39 | |||
| 40 | :return: A concrete DistAbstraction. | ||
| 41 | """ | ||
| 42 | if req.editable: | ||
| 43 | return IsSDist(req) | ||
| 44 | elif req.link and req.link.is_wheel: | ||
| 45 | return IsWheel(req) | ||
| 46 | else: | ||
| 47 | return IsSDist(req) | ||
| 48 | |||
| 49 | |||
| 50 | def _install_build_reqs(finder, prefix, build_requirements): | ||
| 51 | # NOTE: What follows is not a very good thing. | ||
| 52 | # Eventually, this should move into the BuildEnvironment class and | ||
| 53 | # that should handle all the isolation and sub-process invocation. | ||
| 54 | finder = copy(finder) | ||
| 55 | finder.format_control = FormatControl(set(), set([":all:"])) | ||
| 56 | urls = [ | ||
| 57 | finder.find_requirement( | ||
| 58 | InstallRequirement.from_line(r), upgrade=False).url | ||
| 59 | for r in build_requirements | ||
| 60 | ] | ||
| 61 | args = [ | ||
| 62 | sys.executable, '-m', 'pip', 'install', '--ignore-installed', | ||
| 63 | '--no-user', '--prefix', prefix, | ||
| 64 | ] + list(urls) | ||
| 65 | |||
| 66 | with open_spinner("Installing build dependencies") as spinner: | ||
| 67 | call_subprocess(args, show_stdout=False, spinner=spinner) | ||
| 68 | |||
| 69 | |||
| 70 | class DistAbstraction(object): | ||
| 71 | """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. | ||
| 72 | |||
| 73 | The requirements for anything installable are as follows: | ||
| 74 | - we must be able to determine the requirement name | ||
| 75 | (or we can't correctly handle the non-upgrade case). | ||
| 76 | - we must be able to generate a list of run-time dependencies | ||
| 77 | without installing any additional packages (or we would | ||
| 78 | have to either burn time by doing temporary isolated installs | ||
| 79 | or alternatively violate pips 'don't start installing unless | ||
| 80 | all requirements are available' rule - neither of which are | ||
| 81 | desirable). | ||
| 82 | - for packages with setup requirements, we must also be able | ||
| 83 | to determine their requirements without installing additional | ||
| 84 | packages (for the same reason as run-time dependencies) | ||
| 85 | - we must be able to create a Distribution object exposing the | ||
| 86 | above metadata. | ||
| 87 | """ | ||
| 88 | |||
| 89 | def __init__(self, req): | ||
| 90 | self.req = req | ||
| 91 | |||
| 92 | def dist(self, finder): | ||
| 93 | """Return a setuptools Dist object.""" | ||
| 94 | raise NotImplementedError(self.dist) | ||
| 95 | |||
| 96 | def prep_for_dist(self, finder): | ||
| 97 | """Ensure that we can get a Dist for this requirement.""" | ||
| 98 | raise NotImplementedError(self.dist) | ||
| 99 | |||
| 100 | |||
| 101 | class IsWheel(DistAbstraction): | ||
| 102 | |||
| 103 | def dist(self, finder): | ||
| 104 | return list(pkg_resources.find_distributions( | ||
| 105 | self.req.source_dir))[0] | ||
| 106 | |||
| 107 | def prep_for_dist(self, finder, build_isolation): | ||
| 108 | # FIXME:https://github.com/pypa/pip/issues/1112 | ||
| 109 | pass | ||
| 110 | |||
| 111 | |||
| 112 | class IsSDist(DistAbstraction): | ||
| 113 | |||
| 114 | def dist(self, finder): | ||
| 115 | dist = self.req.get_dist() | ||
| 116 | # FIXME: shouldn't be globally added. | ||
| 117 | if finder and dist.has_metadata('dependency_links.txt'): | ||
| 118 | finder.add_dependency_links( | ||
| 119 | dist.get_metadata_lines('dependency_links.txt') | ||
| 120 | ) | ||
| 121 | return dist | ||
| 122 | |||
| 123 | def prep_for_dist(self, finder, build_isolation): | ||
| 124 | # Before calling "setup.py egg_info", we need to set-up the build | ||
| 125 | # environment. | ||
| 126 | build_requirements, isolate = self.req.get_pep_518_info() | ||
| 127 | should_isolate = build_isolation and isolate | ||
| 128 | |||
| 129 | minimum_requirements = ('setuptools', 'wheel') | ||
| 130 | missing_requirements = set(minimum_requirements) - set( | ||
| 131 | pkg_resources.Requirement(r).key | ||
| 132 | for r in build_requirements | ||
| 133 | ) | ||
| 134 | if missing_requirements: | ||
| 135 | def format_reqs(rs): | ||
| 136 | return ' and '.join(map(repr, sorted(rs))) | ||
| 137 | logger.warning( | ||
| 138 | "Missing build time requirements in pyproject.toml for %s: " | ||
| 139 | "%s.", self.req, format_reqs(missing_requirements) | ||
| 140 | ) | ||
| 141 | logger.warning( | ||
| 142 | "This version of pip does not implement PEP 517 so it cannot " | ||
| 143 | "build a wheel without %s.", format_reqs(minimum_requirements) | ||
| 144 | ) | ||
| 145 | |||
| 146 | if should_isolate: | ||
| 147 | with self.req.build_env: | ||
| 148 | pass | ||
| 149 | _install_build_reqs(finder, self.req.build_env.path, | ||
| 150 | build_requirements) | ||
| 151 | else: | ||
| 152 | self.req.build_env = NoOpBuildEnvironment(no_clean=False) | ||
| 153 | |||
| 154 | self.req.run_egg_info() | ||
| 155 | self.req.assert_source_matches_version() | ||
| 156 | |||
| 157 | |||
| 158 | class Installed(DistAbstraction): | ||
| 159 | |||
| 160 | def dist(self, finder): | ||
| 161 | return self.req.satisfied_by | ||
| 162 | |||
| 163 | def prep_for_dist(self, finder): | ||
| 164 | pass | ||
| 165 | |||
| 166 | |||
| 167 | class RequirementPreparer(object): | ||
| 168 | """Prepares a Requirement | ||
| 169 | """ | ||
| 170 | |||
| 171 | def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, | ||
| 172 | progress_bar, build_isolation): | ||
| 173 | super(RequirementPreparer, self).__init__() | ||
| 174 | |||
| 175 | self.src_dir = src_dir | ||
| 176 | self.build_dir = build_dir | ||
| 177 | |||
| 178 | # Where still packed archives should be written to. If None, they are | ||
| 179 | # not saved, and are deleted immediately after unpacking. | ||
| 180 | self.download_dir = download_dir | ||
| 181 | |||
| 182 | # Where still-packed .whl files should be written to. If None, they are | ||
| 183 | # written to the download_dir parameter. Separate to download_dir to | ||
| 184 | # permit only keeping wheel archives for pip wheel. | ||
| 185 | if wheel_download_dir: | ||
| 186 | wheel_download_dir = normalize_path(wheel_download_dir) | ||
| 187 | self.wheel_download_dir = wheel_download_dir | ||
| 188 | |||
| 189 | # NOTE | ||
| 190 | # download_dir and wheel_download_dir overlap semantically and may | ||
| 191 | # be combined if we're willing to have non-wheel archives present in | ||
| 192 | # the wheelhouse output by 'pip wheel'. | ||
| 193 | |||
| 194 | self.progress_bar = progress_bar | ||
| 195 | |||
| 196 | # Is build isolation allowed? | ||
| 197 | self.build_isolation = build_isolation | ||
| 198 | |||
| 199 | @property | ||
| 200 | def _download_should_save(self): | ||
| 201 | # TODO: Modify to reduce indentation needed | ||
| 202 | if self.download_dir: | ||
| 203 | self.download_dir = expanduser(self.download_dir) | ||
| 204 | if os.path.exists(self.download_dir): | ||
| 205 | return True | ||
| 206 | else: | ||
| 207 | logger.critical('Could not find download directory') | ||
| 208 | raise InstallationError( | ||
| 209 | "Could not find or access download directory '%s'" | ||
| 210 | % display_path(self.download_dir)) | ||
| 211 | return False | ||
| 212 | |||
| 213 | def prepare_linked_requirement(self, req, session, finder, | ||
| 214 | upgrade_allowed, require_hashes): | ||
| 215 | """Prepare a requirement that would be obtained from req.link | ||
| 216 | """ | ||
| 217 | # TODO: Breakup into smaller functions | ||
| 218 | if req.link and req.link.scheme == 'file': | ||
| 219 | path = url_to_path(req.link.url) | ||
| 220 | logger.info('Processing %s', display_path(path)) | ||
| 221 | else: | ||
| 222 | logger.info('Collecting %s', req) | ||
| 223 | |||
| 224 | with indent_log(): | ||
| 225 | # @@ if filesystem packages are not marked | ||
| 226 | # editable in a req, a non deterministic error | ||
| 227 | # occurs when the script attempts to unpack the | ||
| 228 | # build directory | ||
| 229 | req.ensure_has_source_dir(self.build_dir) | ||
| 230 | # If a checkout exists, it's unwise to keep going. version | ||
| 231 | # inconsistencies are logged later, but do not fail the | ||
| 232 | # installation. | ||
| 233 | # FIXME: this won't upgrade when there's an existing | ||
| 234 | # package unpacked in `req.source_dir` | ||
| 235 | # package unpacked in `req.source_dir` | ||
| 236 | if os.path.exists(os.path.join(req.source_dir, 'setup.py')): | ||
| 237 | raise PreviousBuildDirError( | ||
| 238 | "pip can't proceed with requirements '%s' due to a" | ||
| 239 | " pre-existing build directory (%s). This is " | ||
| 240 | "likely due to a previous installation that failed" | ||
| 241 | ". pip is being responsible and not assuming it " | ||
| 242 | "can delete this. Please delete it and try again." | ||
| 243 | % (req, req.source_dir) | ||
| 244 | ) | ||
| 245 | req.populate_link(finder, upgrade_allowed, require_hashes) | ||
| 246 | |||
| 247 | # We can't hit this spot and have populate_link return None. | ||
| 248 | # req.satisfied_by is None here (because we're | ||
| 249 | # guarded) and upgrade has no impact except when satisfied_by | ||
| 250 | # is not None. | ||
| 251 | # Then inside find_requirement existing_applicable -> False | ||
| 252 | # If no new versions are found, DistributionNotFound is raised, | ||
| 253 | # otherwise a result is guaranteed. | ||
| 254 | assert req.link | ||
| 255 | link = req.link | ||
| 256 | |||
| 257 | # Now that we have the real link, we can tell what kind of | ||
| 258 | # requirements we have and raise some more informative errors | ||
| 259 | # than otherwise. (For example, we can raise VcsHashUnsupported | ||
| 260 | # for a VCS URL rather than HashMissing.) | ||
| 261 | if require_hashes: | ||
| 262 | # We could check these first 2 conditions inside | ||
| 263 | # unpack_url and save repetition of conditions, but then | ||
| 264 | # we would report less-useful error messages for | ||
| 265 | # unhashable requirements, complaining that there's no | ||
| 266 | # hash provided. | ||
| 267 | if is_vcs_url(link): | ||
| 268 | raise VcsHashUnsupported() | ||
| 269 | elif is_file_url(link) and is_dir_url(link): | ||
| 270 | raise DirectoryUrlHashUnsupported() | ||
| 271 | if not req.original_link and not req.is_pinned: | ||
| 272 | # Unpinned packages are asking for trouble when a new | ||
| 273 | # version is uploaded. This isn't a security check, but | ||
| 274 | # it saves users a surprising hash mismatch in the | ||
| 275 | # future. | ||
| 276 | # | ||
| 277 | # file:/// URLs aren't pinnable, so don't complain | ||
| 278 | # about them not being pinned. | ||
| 279 | raise HashUnpinned() | ||
| 280 | |||
| 281 | hashes = req.hashes(trust_internet=not require_hashes) | ||
| 282 | if require_hashes and not hashes: | ||
| 283 | # Known-good hashes are missing for this requirement, so | ||
| 284 | # shim it with a facade object that will provoke hash | ||
| 285 | # computation and then raise a HashMissing exception | ||
| 286 | # showing the user what the hash should be. | ||
| 287 | hashes = MissingHashes() | ||
| 288 | |||
| 289 | try: | ||
| 290 | download_dir = self.download_dir | ||
| 291 | # We always delete unpacked sdists after pip ran. | ||
| 292 | autodelete_unpacked = True | ||
| 293 | if req.link.is_wheel and self.wheel_download_dir: | ||
| 294 | # when doing 'pip wheel` we download wheels to a | ||
| 295 | # dedicated dir. | ||
| 296 | download_dir = self.wheel_download_dir | ||
| 297 | if req.link.is_wheel: | ||
| 298 | if download_dir: | ||
| 299 | # When downloading, we only unpack wheels to get | ||
| 300 | # metadata. | ||
| 301 | autodelete_unpacked = True | ||
| 302 | else: | ||
| 303 | # When installing a wheel, we use the unpacked | ||
| 304 | # wheel. | ||
| 305 | autodelete_unpacked = False | ||
| 306 | unpack_url( | ||
| 307 | req.link, req.source_dir, | ||
| 308 | download_dir, autodelete_unpacked, | ||
| 309 | session=session, hashes=hashes, | ||
| 310 | progress_bar=self.progress_bar | ||
| 311 | ) | ||
| 312 | except requests.HTTPError as exc: | ||
| 313 | logger.critical( | ||
| 314 | 'Could not install requirement %s because of error %s', | ||
| 315 | req, | ||
| 316 | exc, | ||
| 317 | ) | ||
| 318 | raise InstallationError( | ||
| 319 | 'Could not install requirement %s because of HTTP ' | ||
| 320 | 'error %s for URL %s' % | ||
| 321 | (req, exc, req.link) | ||
| 322 | ) | ||
| 323 | abstract_dist = make_abstract_dist(req) | ||
| 324 | abstract_dist.prep_for_dist(finder, self.build_isolation) | ||
| 325 | if self._download_should_save: | ||
| 326 | # Make a .zip of the source_dir we already created. | ||
| 327 | if req.link.scheme in vcs.all_schemes: | ||
| 328 | req.archive(self.download_dir) | ||
| 329 | return abstract_dist | ||
| 330 | |||
| 331 | def prepare_editable_requirement(self, req, require_hashes, use_user_site, | ||
| 332 | finder): | ||
| 333 | """Prepare an editable requirement | ||
| 334 | """ | ||
| 335 | assert req.editable, "cannot prepare a non-editable req as editable" | ||
| 336 | |||
| 337 | logger.info('Obtaining %s', req) | ||
| 338 | |||
| 339 | with indent_log(): | ||
| 340 | if require_hashes: | ||
| 341 | raise InstallationError( | ||
| 342 | 'The editable requirement %s cannot be installed when ' | ||
| 343 | 'requiring hashes, because there is no single file to ' | ||
| 344 | 'hash.' % req | ||
| 345 | ) | ||
| 346 | req.ensure_has_source_dir(self.src_dir) | ||
| 347 | req.update_editable(not self._download_should_save) | ||
| 348 | |||
| 349 | abstract_dist = make_abstract_dist(req) | ||
| 350 | abstract_dist.prep_for_dist(finder, self.build_isolation) | ||
| 351 | |||
| 352 | if self._download_should_save: | ||
| 353 | req.archive(self.download_dir) | ||
| 354 | req.check_if_exists(use_user_site) | ||
| 355 | |||
| 356 | return abstract_dist | ||
| 357 | |||
| 358 | def prepare_installed_requirement(self, req, require_hashes, skip_reason): | ||
| 359 | """Prepare an already-installed requirement | ||
| 360 | """ | ||
| 361 | assert req.satisfied_by, "req should have been satisfied but isn't" | ||
| 362 | assert skip_reason is not None, ( | ||
| 363 | "did not get skip reason skipped but req.satisfied_by " | ||
| 364 | "is set to %r" % (req.satisfied_by,) | ||
| 365 | ) | ||
| 366 | logger.info( | ||
| 367 | 'Requirement %s: %s (%s)', | ||
| 368 | skip_reason, req, req.satisfied_by.version | ||
| 369 | ) | ||
| 370 | with indent_log(): | ||
| 371 | if require_hashes: | ||
| 372 | logger.debug( | ||
| 373 | 'Since it is already installed, we are trusting this ' | ||
| 374 | 'package without checking its hash. To ensure a ' | ||
| 375 | 'completely repeatable environment, install into an ' | ||
| 376 | 'empty virtualenv.' | ||
| 377 | ) | ||
| 378 | abstract_dist = Installed(req) | ||
| 379 | |||
| 380 | return abstract_dist | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py new file mode 100644 index 0000000..5d31310 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py | |||
| @@ -0,0 +1,317 @@ | |||
| 1 | """Generate and work with PEP 425 Compatibility Tags.""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import distutils.util | ||
| 5 | import logging | ||
| 6 | import platform | ||
| 7 | import re | ||
| 8 | import sys | ||
| 9 | import sysconfig | ||
| 10 | import warnings | ||
| 11 | from collections import OrderedDict | ||
| 12 | |||
| 13 | import pip._internal.utils.glibc | ||
| 14 | |||
| 15 | logger = logging.getLogger(__name__) | ||
| 16 | |||
| 17 | _osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') | ||
| 18 | |||
| 19 | |||
| 20 | def get_config_var(var): | ||
| 21 | try: | ||
| 22 | return sysconfig.get_config_var(var) | ||
| 23 | except IOError as e: # Issue #1074 | ||
| 24 | warnings.warn("{}".format(e), RuntimeWarning) | ||
| 25 | return None | ||
| 26 | |||
| 27 | |||
| 28 | def get_abbr_impl(): | ||
| 29 | """Return abbreviated implementation name.""" | ||
| 30 | if hasattr(sys, 'pypy_version_info'): | ||
| 31 | pyimpl = 'pp' | ||
| 32 | elif sys.platform.startswith('java'): | ||
| 33 | pyimpl = 'jy' | ||
| 34 | elif sys.platform == 'cli': | ||
| 35 | pyimpl = 'ip' | ||
| 36 | else: | ||
| 37 | pyimpl = 'cp' | ||
| 38 | return pyimpl | ||
| 39 | |||
| 40 | |||
| 41 | def get_impl_ver(): | ||
| 42 | """Return implementation version.""" | ||
| 43 | impl_ver = get_config_var("py_version_nodot") | ||
| 44 | if not impl_ver or get_abbr_impl() == 'pp': | ||
| 45 | impl_ver = ''.join(map(str, get_impl_version_info())) | ||
| 46 | return impl_ver | ||
| 47 | |||
| 48 | |||
| 49 | def get_impl_version_info(): | ||
| 50 | """Return sys.version_info-like tuple for use in decrementing the minor | ||
| 51 | version.""" | ||
| 52 | if get_abbr_impl() == 'pp': | ||
| 53 | # as per https://github.com/pypa/pip/issues/2882 | ||
| 54 | return (sys.version_info[0], sys.pypy_version_info.major, | ||
| 55 | sys.pypy_version_info.minor) | ||
| 56 | else: | ||
| 57 | return sys.version_info[0], sys.version_info[1] | ||
| 58 | |||
| 59 | |||
| 60 | def get_impl_tag(): | ||
| 61 | """ | ||
| 62 | Returns the Tag for this specific implementation. | ||
| 63 | """ | ||
| 64 | return "{}{}".format(get_abbr_impl(), get_impl_ver()) | ||
| 65 | |||
| 66 | |||
| 67 | def get_flag(var, fallback, expected=True, warn=True): | ||
| 68 | """Use a fallback method for determining SOABI flags if the needed config | ||
| 69 | var is unset or unavailable.""" | ||
| 70 | val = get_config_var(var) | ||
| 71 | if val is None: | ||
| 72 | if warn: | ||
| 73 | logger.debug("Config variable '%s' is unset, Python ABI tag may " | ||
| 74 | "be incorrect", var) | ||
| 75 | return fallback() | ||
| 76 | return val == expected | ||
| 77 | |||
| 78 | |||
| 79 | def get_abi_tag(): | ||
| 80 | """Return the ABI tag based on SOABI (if available) or emulate SOABI | ||
| 81 | (CPython 2, PyPy).""" | ||
| 82 | soabi = get_config_var('SOABI') | ||
| 83 | impl = get_abbr_impl() | ||
| 84 | if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): | ||
| 85 | d = '' | ||
| 86 | m = '' | ||
| 87 | u = '' | ||
| 88 | if get_flag('Py_DEBUG', | ||
| 89 | lambda: hasattr(sys, 'gettotalrefcount'), | ||
| 90 | warn=(impl == 'cp')): | ||
| 91 | d = 'd' | ||
| 92 | if get_flag('WITH_PYMALLOC', | ||
| 93 | lambda: impl == 'cp', | ||
| 94 | warn=(impl == 'cp')): | ||
| 95 | m = 'm' | ||
| 96 | if get_flag('Py_UNICODE_SIZE', | ||
| 97 | lambda: sys.maxunicode == 0x10ffff, | ||
| 98 | expected=4, | ||
| 99 | warn=(impl == 'cp' and | ||
| 100 | sys.version_info < (3, 3))) \ | ||
| 101 | and sys.version_info < (3, 3): | ||
| 102 | u = 'u' | ||
| 103 | abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) | ||
| 104 | elif soabi and soabi.startswith('cpython-'): | ||
| 105 | abi = 'cp' + soabi.split('-')[1] | ||
| 106 | elif soabi: | ||
| 107 | abi = soabi.replace('.', '_').replace('-', '_') | ||
| 108 | else: | ||
| 109 | abi = None | ||
| 110 | return abi | ||
| 111 | |||
| 112 | |||
| 113 | def _is_running_32bit(): | ||
| 114 | return sys.maxsize == 2147483647 | ||
| 115 | |||
| 116 | |||
| 117 | def get_platform(): | ||
| 118 | """Return our platform name 'win32', 'linux_x86_64'""" | ||
| 119 | if sys.platform == 'darwin': | ||
| 120 | # distutils.util.get_platform() returns the release based on the value | ||
| 121 | # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may | ||
| 122 | # be significantly older than the user's current machine. | ||
| 123 | release, _, machine = platform.mac_ver() | ||
| 124 | split_ver = release.split('.') | ||
| 125 | |||
| 126 | if machine == "x86_64" and _is_running_32bit(): | ||
| 127 | machine = "i386" | ||
| 128 | elif machine == "ppc64" and _is_running_32bit(): | ||
| 129 | machine = "ppc" | ||
| 130 | |||
| 131 | return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine) | ||
| 132 | |||
| 133 | # XXX remove distutils dependency | ||
| 134 | result = distutils.util.get_platform().replace('.', '_').replace('-', '_') | ||
| 135 | if result == "linux_x86_64" and _is_running_32bit(): | ||
| 136 | # 32 bit Python program (running on a 64 bit Linux): pip should only | ||
| 137 | # install and run 32 bit compiled extensions in that case. | ||
| 138 | result = "linux_i686" | ||
| 139 | |||
| 140 | return result | ||
| 141 | |||
| 142 | |||
| 143 | def is_manylinux1_compatible(): | ||
| 144 | # Only Linux, and only x86-64 / i686 | ||
| 145 | if get_platform() not in {"linux_x86_64", "linux_i686"}: | ||
| 146 | return False | ||
| 147 | |||
| 148 | # Check for presence of _manylinux module | ||
| 149 | try: | ||
| 150 | import _manylinux | ||
| 151 | return bool(_manylinux.manylinux1_compatible) | ||
| 152 | except (ImportError, AttributeError): | ||
| 153 | # Fall through to heuristic check below | ||
| 154 | pass | ||
| 155 | |||
| 156 | # Check glibc version. CentOS 5 uses glibc 2.5. | ||
| 157 | return pip._internal.utils.glibc.have_compatible_glibc(2, 5) | ||
| 158 | |||
| 159 | |||
| 160 | def get_darwin_arches(major, minor, machine): | ||
| 161 | """Return a list of supported arches (including group arches) for | ||
| 162 | the given major, minor and machine architecture of an macOS machine. | ||
| 163 | """ | ||
| 164 | arches = [] | ||
| 165 | |||
| 166 | def _supports_arch(major, minor, arch): | ||
| 167 | # Looking at the application support for macOS versions in the chart | ||
| 168 | # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears | ||
| 169 | # our timeline looks roughly like: | ||
| 170 | # | ||
| 171 | # 10.0 - Introduces ppc support. | ||
| 172 | # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 | ||
| 173 | # and x86_64 support is CLI only, and cannot be used for GUI | ||
| 174 | # applications. | ||
| 175 | # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. | ||
| 176 | # 10.6 - Drops support for ppc64 | ||
| 177 | # 10.7 - Drops support for ppc | ||
| 178 | # | ||
| 179 | # Given that we do not know if we're installing a CLI or a GUI | ||
| 180 | # application, we must be conservative and assume it might be a GUI | ||
| 181 | # application and behave as if ppc64 and x86_64 support did not occur | ||
| 182 | # until 10.5. | ||
| 183 | # | ||
| 184 | # Note: The above information is taken from the "Application support" | ||
| 185 | # column in the chart not the "Processor support" since I believe | ||
| 186 | # that we care about what instruction sets an application can use | ||
| 187 | # not which processors the OS supports. | ||
| 188 | if arch == 'ppc': | ||
| 189 | return (major, minor) <= (10, 5) | ||
| 190 | if arch == 'ppc64': | ||
| 191 | return (major, minor) == (10, 5) | ||
| 192 | if arch == 'i386': | ||
| 193 | return (major, minor) >= (10, 4) | ||
| 194 | if arch == 'x86_64': | ||
| 195 | return (major, minor) >= (10, 5) | ||
| 196 | if arch in groups: | ||
| 197 | for garch in groups[arch]: | ||
| 198 | if _supports_arch(major, minor, garch): | ||
| 199 | return True | ||
| 200 | return False | ||
| 201 | |||
| 202 | groups = OrderedDict([ | ||
| 203 | ("fat", ("i386", "ppc")), | ||
| 204 | ("intel", ("x86_64", "i386")), | ||
| 205 | ("fat64", ("x86_64", "ppc64")), | ||
| 206 | ("fat32", ("x86_64", "i386", "ppc")), | ||
| 207 | ]) | ||
| 208 | |||
| 209 | if _supports_arch(major, minor, machine): | ||
| 210 | arches.append(machine) | ||
| 211 | |||
| 212 | for garch in groups: | ||
| 213 | if machine in groups[garch] and _supports_arch(major, minor, garch): | ||
| 214 | arches.append(garch) | ||
| 215 | |||
| 216 | arches.append('universal') | ||
| 217 | |||
| 218 | return arches | ||
| 219 | |||
| 220 | |||
| 221 | def get_supported(versions=None, noarch=False, platform=None, | ||
| 222 | impl=None, abi=None): | ||
| 223 | """Return a list of supported tags for each version specified in | ||
| 224 | `versions`. | ||
| 225 | |||
| 226 | :param versions: a list of string versions, of the form ["33", "32"], | ||
| 227 | or None. The first version will be assumed to support our ABI. | ||
| 228 | :param platform: specify the exact platform you want valid | ||
| 229 | tags for, or None. If None, use the local system platform. | ||
| 230 | :param impl: specify the exact implementation you want valid | ||
| 231 | tags for, or None. If None, use the local interpreter impl. | ||
| 232 | :param abi: specify the exact abi you want valid | ||
| 233 | tags for, or None. If None, use the local interpreter abi. | ||
| 234 | """ | ||
| 235 | supported = [] | ||
| 236 | |||
| 237 | # Versions must be given with respect to the preference | ||
| 238 | if versions is None: | ||
| 239 | versions = [] | ||
| 240 | version_info = get_impl_version_info() | ||
| 241 | major = version_info[:-1] | ||
| 242 | # Support all previous minor Python versions. | ||
| 243 | for minor in range(version_info[-1], -1, -1): | ||
| 244 | versions.append(''.join(map(str, major + (minor,)))) | ||
| 245 | |||
| 246 | impl = impl or get_abbr_impl() | ||
| 247 | |||
| 248 | abis = [] | ||
| 249 | |||
| 250 | abi = abi or get_abi_tag() | ||
| 251 | if abi: | ||
| 252 | abis[0:0] = [abi] | ||
| 253 | |||
| 254 | abi3s = set() | ||
| 255 | import imp | ||
| 256 | for suffix in imp.get_suffixes(): | ||
| 257 | if suffix[0].startswith('.abi'): | ||
| 258 | abi3s.add(suffix[0].split('.', 2)[1]) | ||
| 259 | |||
| 260 | abis.extend(sorted(list(abi3s))) | ||
| 261 | |||
| 262 | abis.append('none') | ||
| 263 | |||
| 264 | if not noarch: | ||
| 265 | arch = platform or get_platform() | ||
| 266 | if arch.startswith('macosx'): | ||
| 267 | # support macosx-10.6-intel on macosx-10.9-x86_64 | ||
| 268 | match = _osx_arch_pat.match(arch) | ||
| 269 | if match: | ||
| 270 | name, major, minor, actual_arch = match.groups() | ||
| 271 | tpl = '{}_{}_%i_%s'.format(name, major) | ||
| 272 | arches = [] | ||
| 273 | for m in reversed(range(int(minor) + 1)): | ||
| 274 | for a in get_darwin_arches(int(major), m, actual_arch): | ||
| 275 | arches.append(tpl % (m, a)) | ||
| 276 | else: | ||
| 277 | # arch pattern didn't match (?!) | ||
| 278 | arches = [arch] | ||
| 279 | elif platform is None and is_manylinux1_compatible(): | ||
| 280 | arches = [arch.replace('linux', 'manylinux1'), arch] | ||
| 281 | else: | ||
| 282 | arches = [arch] | ||
| 283 | |||
| 284 | # Current version, current API (built specifically for our Python): | ||
| 285 | for abi in abis: | ||
| 286 | for arch in arches: | ||
| 287 | supported.append(('%s%s' % (impl, versions[0]), abi, arch)) | ||
| 288 | |||
| 289 | # abi3 modules compatible with older version of Python | ||
| 290 | for version in versions[1:]: | ||
| 291 | # abi3 was introduced in Python 3.2 | ||
| 292 | if version in {'31', '30'}: | ||
| 293 | break | ||
| 294 | for abi in abi3s: # empty set if not Python 3 | ||
| 295 | for arch in arches: | ||
| 296 | supported.append(("%s%s" % (impl, version), abi, arch)) | ||
| 297 | |||
| 298 | # Has binaries, does not use the Python API: | ||
| 299 | for arch in arches: | ||
| 300 | supported.append(('py%s' % (versions[0][0]), 'none', arch)) | ||
| 301 | |||
| 302 | # No abi / arch, but requires our implementation: | ||
| 303 | supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) | ||
| 304 | # Tagged specifically as being cross-version compatible | ||
| 305 | # (with just the major version specified) | ||
| 306 | supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) | ||
| 307 | |||
| 308 | # No abi / arch, generic Python | ||
| 309 | for i, version in enumerate(versions): | ||
| 310 | supported.append(('py%s' % (version,), 'none', 'any')) | ||
| 311 | if i == 0: | ||
| 312 | supported.append(('py%s' % (version[0]), 'none', 'any')) | ||
| 313 | |||
| 314 | return supported | ||
| 315 | |||
| 316 | |||
| 317 | implementation_tag = get_impl_tag() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py new file mode 100644 index 0000000..07ae607 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py | |||
| @@ -0,0 +1,69 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | |||
| 5 | from .req_install import InstallRequirement | ||
| 6 | from .req_set import RequirementSet | ||
| 7 | from .req_file import parse_requirements | ||
| 8 | from pip._internal.utils.logging import indent_log | ||
| 9 | |||
| 10 | |||
| 11 | __all__ = [ | ||
| 12 | "RequirementSet", "InstallRequirement", | ||
| 13 | "parse_requirements", "install_given_reqs", | ||
| 14 | ] | ||
| 15 | |||
| 16 | logger = logging.getLogger(__name__) | ||
| 17 | |||
| 18 | |||
| 19 | def install_given_reqs(to_install, install_options, global_options=(), | ||
| 20 | *args, **kwargs): | ||
| 21 | """ | ||
| 22 | Install everything in the given list. | ||
| 23 | |||
| 24 | (to be called after having downloaded and unpacked the packages) | ||
| 25 | """ | ||
| 26 | |||
| 27 | if to_install: | ||
| 28 | logger.info( | ||
| 29 | 'Installing collected packages: %s', | ||
| 30 | ', '.join([req.name for req in to_install]), | ||
| 31 | ) | ||
| 32 | |||
| 33 | with indent_log(): | ||
| 34 | for requirement in to_install: | ||
| 35 | if requirement.conflicts_with: | ||
| 36 | logger.info( | ||
| 37 | 'Found existing installation: %s', | ||
| 38 | requirement.conflicts_with, | ||
| 39 | ) | ||
| 40 | with indent_log(): | ||
| 41 | uninstalled_pathset = requirement.uninstall( | ||
| 42 | auto_confirm=True | ||
| 43 | ) | ||
| 44 | try: | ||
| 45 | requirement.install( | ||
| 46 | install_options, | ||
| 47 | global_options, | ||
| 48 | *args, | ||
| 49 | **kwargs | ||
| 50 | ) | ||
| 51 | except: | ||
| 52 | should_rollback = ( | ||
| 53 | requirement.conflicts_with and | ||
| 54 | not requirement.install_succeeded | ||
| 55 | ) | ||
| 56 | # if install did not succeed, rollback previous uninstall | ||
| 57 | if should_rollback: | ||
| 58 | uninstalled_pathset.rollback() | ||
| 59 | raise | ||
| 60 | else: | ||
| 61 | should_commit = ( | ||
| 62 | requirement.conflicts_with and | ||
| 63 | requirement.install_succeeded | ||
| 64 | ) | ||
| 65 | if should_commit: | ||
| 66 | uninstalled_pathset.commit() | ||
| 67 | requirement.remove_temporary_source() | ||
| 68 | |||
| 69 | return to_install | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py new file mode 100644 index 0000000..9e6ef41 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py | |||
| @@ -0,0 +1,338 @@ | |||
| 1 | """ | ||
| 2 | Requirements file parsing | ||
| 3 | """ | ||
| 4 | |||
| 5 | from __future__ import absolute_import | ||
| 6 | |||
| 7 | import optparse | ||
| 8 | import os | ||
| 9 | import re | ||
| 10 | import shlex | ||
| 11 | import sys | ||
| 12 | |||
| 13 | from pip._vendor.six.moves import filterfalse | ||
| 14 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 15 | |||
| 16 | from pip._internal import cmdoptions | ||
| 17 | from pip._internal.download import get_file_content | ||
| 18 | from pip._internal.exceptions import RequirementsFileParseError | ||
| 19 | from pip._internal.req.req_install import InstallRequirement | ||
| 20 | |||
| 21 | __all__ = ['parse_requirements'] | ||
| 22 | |||
| 23 | SCHEME_RE = re.compile(r'^(http|https|file):', re.I) | ||
| 24 | COMMENT_RE = re.compile(r'(^|\s)+#.*$') | ||
| 25 | |||
| 26 | # Matches environment variable-style values in '${MY_VARIABLE_1}' with the | ||
| 27 | # variable name consisting of only uppercase letters, digits or the '_' | ||
| 28 | # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, | ||
| 29 | # 2013 Edition. | ||
| 30 | ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') | ||
| 31 | |||
| 32 | SUPPORTED_OPTIONS = [ | ||
| 33 | cmdoptions.constraints, | ||
| 34 | cmdoptions.editable, | ||
| 35 | cmdoptions.requirements, | ||
| 36 | cmdoptions.no_index, | ||
| 37 | cmdoptions.index_url, | ||
| 38 | cmdoptions.find_links, | ||
| 39 | cmdoptions.extra_index_url, | ||
| 40 | cmdoptions.always_unzip, | ||
| 41 | cmdoptions.no_binary, | ||
| 42 | cmdoptions.only_binary, | ||
| 43 | cmdoptions.pre, | ||
| 44 | cmdoptions.process_dependency_links, | ||
| 45 | cmdoptions.trusted_host, | ||
| 46 | cmdoptions.require_hashes, | ||
| 47 | ] | ||
| 48 | |||
| 49 | # options to be passed to requirements | ||
| 50 | SUPPORTED_OPTIONS_REQ = [ | ||
| 51 | cmdoptions.install_options, | ||
| 52 | cmdoptions.global_options, | ||
| 53 | cmdoptions.hash, | ||
| 54 | ] | ||
| 55 | |||
| 56 | # the 'dest' string values | ||
| 57 | SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] | ||
| 58 | |||
| 59 | |||
| 60 | def parse_requirements(filename, finder=None, comes_from=None, options=None, | ||
| 61 | session=None, constraint=False, wheel_cache=None): | ||
| 62 | """Parse a requirements file and yield InstallRequirement instances. | ||
| 63 | |||
| 64 | :param filename: Path or url of requirements file. | ||
| 65 | :param finder: Instance of pip.index.PackageFinder. | ||
| 66 | :param comes_from: Origin description of requirements. | ||
| 67 | :param options: cli options. | ||
| 68 | :param session: Instance of pip.download.PipSession. | ||
| 69 | :param constraint: If true, parsing a constraint file rather than | ||
| 70 | requirements file. | ||
| 71 | :param wheel_cache: Instance of pip.wheel.WheelCache | ||
| 72 | """ | ||
| 73 | if session is None: | ||
| 74 | raise TypeError( | ||
| 75 | "parse_requirements() missing 1 required keyword argument: " | ||
| 76 | "'session'" | ||
| 77 | ) | ||
| 78 | |||
| 79 | _, content = get_file_content( | ||
| 80 | filename, comes_from=comes_from, session=session | ||
| 81 | ) | ||
| 82 | |||
| 83 | lines_enum = preprocess(content, options) | ||
| 84 | |||
| 85 | for line_number, line in lines_enum: | ||
| 86 | req_iter = process_line(line, filename, line_number, finder, | ||
| 87 | comes_from, options, session, wheel_cache, | ||
| 88 | constraint=constraint) | ||
| 89 | for req in req_iter: | ||
| 90 | yield req | ||
| 91 | |||
| 92 | |||
| 93 | def preprocess(content, options): | ||
| 94 | """Split, filter, and join lines, and return a line iterator | ||
| 95 | |||
| 96 | :param content: the content of the requirements file | ||
| 97 | :param options: cli options | ||
| 98 | """ | ||
| 99 | lines_enum = enumerate(content.splitlines(), start=1) | ||
| 100 | lines_enum = join_lines(lines_enum) | ||
| 101 | lines_enum = ignore_comments(lines_enum) | ||
| 102 | lines_enum = skip_regex(lines_enum, options) | ||
| 103 | lines_enum = expand_env_variables(lines_enum) | ||
| 104 | return lines_enum | ||
| 105 | |||
| 106 | |||
| 107 | def process_line(line, filename, line_number, finder=None, comes_from=None, | ||
| 108 | options=None, session=None, wheel_cache=None, | ||
| 109 | constraint=False): | ||
| 110 | """Process a single requirements line; This can result in creating/yielding | ||
| 111 | requirements, or updating the finder. | ||
| 112 | |||
| 113 | For lines that contain requirements, the only options that have an effect | ||
| 114 | are from SUPPORTED_OPTIONS_REQ, and they are scoped to the | ||
| 115 | requirement. Other options from SUPPORTED_OPTIONS may be present, but are | ||
| 116 | ignored. | ||
| 117 | |||
| 118 | For lines that do not contain requirements, the only options that have an | ||
| 119 | effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may | ||
| 120 | be present, but are ignored. These lines may contain multiple options | ||
| 121 | (although our docs imply only one is supported), and all our parsed and | ||
| 122 | affect the finder. | ||
| 123 | |||
| 124 | :param constraint: If True, parsing a constraints file. | ||
| 125 | :param options: OptionParser options that we may update | ||
| 126 | """ | ||
| 127 | parser = build_parser(line) | ||
| 128 | defaults = parser.get_default_values() | ||
| 129 | defaults.index_url = None | ||
| 130 | if finder: | ||
| 131 | # `finder.format_control` will be updated during parsing | ||
| 132 | defaults.format_control = finder.format_control | ||
| 133 | args_str, options_str = break_args_options(line) | ||
| 134 | if sys.version_info < (2, 7, 3): | ||
| 135 | # Prior to 2.7.3, shlex cannot deal with unicode entries | ||
| 136 | options_str = options_str.encode('utf8') | ||
| 137 | opts, _ = parser.parse_args(shlex.split(options_str), defaults) | ||
| 138 | |||
| 139 | # preserve for the nested code path | ||
| 140 | line_comes_from = '%s %s (line %s)' % ( | ||
| 141 | '-c' if constraint else '-r', filename, line_number, | ||
| 142 | ) | ||
| 143 | |||
| 144 | # yield a line requirement | ||
| 145 | if args_str: | ||
| 146 | isolated = options.isolated_mode if options else False | ||
| 147 | if options: | ||
| 148 | cmdoptions.check_install_build_global(options, opts) | ||
| 149 | # get the options that apply to requirements | ||
| 150 | req_options = {} | ||
| 151 | for dest in SUPPORTED_OPTIONS_REQ_DEST: | ||
| 152 | if dest in opts.__dict__ and opts.__dict__[dest]: | ||
| 153 | req_options[dest] = opts.__dict__[dest] | ||
| 154 | yield InstallRequirement.from_line( | ||
| 155 | args_str, line_comes_from, constraint=constraint, | ||
| 156 | isolated=isolated, options=req_options, wheel_cache=wheel_cache | ||
| 157 | ) | ||
| 158 | |||
| 159 | # yield an editable requirement | ||
| 160 | elif opts.editables: | ||
| 161 | isolated = options.isolated_mode if options else False | ||
| 162 | yield InstallRequirement.from_editable( | ||
| 163 | opts.editables[0], comes_from=line_comes_from, | ||
| 164 | constraint=constraint, isolated=isolated, wheel_cache=wheel_cache | ||
| 165 | ) | ||
| 166 | |||
| 167 | # parse a nested requirements file | ||
| 168 | elif opts.requirements or opts.constraints: | ||
| 169 | if opts.requirements: | ||
| 170 | req_path = opts.requirements[0] | ||
| 171 | nested_constraint = False | ||
| 172 | else: | ||
| 173 | req_path = opts.constraints[0] | ||
| 174 | nested_constraint = True | ||
| 175 | # original file is over http | ||
| 176 | if SCHEME_RE.search(filename): | ||
| 177 | # do a url join so relative paths work | ||
| 178 | req_path = urllib_parse.urljoin(filename, req_path) | ||
| 179 | # original file and nested file are paths | ||
| 180 | elif not SCHEME_RE.search(req_path): | ||
| 181 | # do a join so relative paths work | ||
| 182 | req_path = os.path.join(os.path.dirname(filename), req_path) | ||
| 183 | # TODO: Why not use `comes_from='-r {} (line {})'` here as well? | ||
| 184 | parser = parse_requirements( | ||
| 185 | req_path, finder, comes_from, options, session, | ||
| 186 | constraint=nested_constraint, wheel_cache=wheel_cache | ||
| 187 | ) | ||
| 188 | for req in parser: | ||
| 189 | yield req | ||
| 190 | |||
| 191 | # percolate hash-checking option upward | ||
| 192 | elif opts.require_hashes: | ||
| 193 | options.require_hashes = opts.require_hashes | ||
| 194 | |||
| 195 | # set finder options | ||
| 196 | elif finder: | ||
| 197 | if opts.index_url: | ||
| 198 | finder.index_urls = [opts.index_url] | ||
| 199 | if opts.no_index is True: | ||
| 200 | finder.index_urls = [] | ||
| 201 | if opts.extra_index_urls: | ||
| 202 | finder.index_urls.extend(opts.extra_index_urls) | ||
| 203 | if opts.find_links: | ||
| 204 | # FIXME: it would be nice to keep track of the source | ||
| 205 | # of the find_links: support a find-links local path | ||
| 206 | # relative to a requirements file. | ||
| 207 | value = opts.find_links[0] | ||
| 208 | req_dir = os.path.dirname(os.path.abspath(filename)) | ||
| 209 | relative_to_reqs_file = os.path.join(req_dir, value) | ||
| 210 | if os.path.exists(relative_to_reqs_file): | ||
| 211 | value = relative_to_reqs_file | ||
| 212 | finder.find_links.append(value) | ||
| 213 | if opts.pre: | ||
| 214 | finder.allow_all_prereleases = True | ||
| 215 | if opts.process_dependency_links: | ||
| 216 | finder.process_dependency_links = True | ||
| 217 | if opts.trusted_hosts: | ||
| 218 | finder.secure_origins.extend( | ||
| 219 | ("*", host, "*") for host in opts.trusted_hosts) | ||
| 220 | |||
| 221 | |||
| 222 | def break_args_options(line): | ||
| 223 | """Break up the line into an args and options string. We only want to shlex | ||
| 224 | (and then optparse) the options, not the args. args can contain markers | ||
| 225 | which are corrupted by shlex. | ||
| 226 | """ | ||
| 227 | tokens = line.split(' ') | ||
| 228 | args = [] | ||
| 229 | options = tokens[:] | ||
| 230 | for token in tokens: | ||
| 231 | if token.startswith('-') or token.startswith('--'): | ||
| 232 | break | ||
| 233 | else: | ||
| 234 | args.append(token) | ||
| 235 | options.pop(0) | ||
| 236 | return ' '.join(args), ' '.join(options) | ||
| 237 | |||
| 238 | |||
| 239 | def build_parser(line): | ||
| 240 | """ | ||
| 241 | Return a parser for parsing requirement lines | ||
| 242 | """ | ||
| 243 | parser = optparse.OptionParser(add_help_option=False) | ||
| 244 | |||
| 245 | option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ | ||
| 246 | for option_factory in option_factories: | ||
| 247 | option = option_factory() | ||
| 248 | parser.add_option(option) | ||
| 249 | |||
| 250 | # By default optparse sys.exits on parsing errors. We want to wrap | ||
| 251 | # that in our own exception. | ||
| 252 | def parser_exit(self, msg): | ||
| 253 | # add offending line | ||
| 254 | msg = 'Invalid requirement: %s\n%s' % (line, msg) | ||
| 255 | raise RequirementsFileParseError(msg) | ||
| 256 | parser.exit = parser_exit | ||
| 257 | |||
| 258 | return parser | ||
| 259 | |||
| 260 | |||
| 261 | def join_lines(lines_enum): | ||
| 262 | """Joins a line ending in '\' with the previous line (except when following | ||
| 263 | comments). The joined line takes on the index of the first line. | ||
| 264 | """ | ||
| 265 | primary_line_number = None | ||
| 266 | new_line = [] | ||
| 267 | for line_number, line in lines_enum: | ||
| 268 | if not line.endswith('\\') or COMMENT_RE.match(line): | ||
| 269 | if COMMENT_RE.match(line): | ||
| 270 | # this ensures comments are always matched later | ||
| 271 | line = ' ' + line | ||
| 272 | if new_line: | ||
| 273 | new_line.append(line) | ||
| 274 | yield primary_line_number, ''.join(new_line) | ||
| 275 | new_line = [] | ||
| 276 | else: | ||
| 277 | yield line_number, line | ||
| 278 | else: | ||
| 279 | if not new_line: | ||
| 280 | primary_line_number = line_number | ||
| 281 | new_line.append(line.strip('\\')) | ||
| 282 | |||
| 283 | # last line contains \ | ||
| 284 | if new_line: | ||
| 285 | yield primary_line_number, ''.join(new_line) | ||
| 286 | |||
| 287 | # TODO: handle space after '\'. | ||
| 288 | |||
| 289 | |||
| 290 | def ignore_comments(lines_enum): | ||
| 291 | """ | ||
| 292 | Strips comments and filter empty lines. | ||
| 293 | """ | ||
| 294 | for line_number, line in lines_enum: | ||
| 295 | line = COMMENT_RE.sub('', line) | ||
| 296 | line = line.strip() | ||
| 297 | if line: | ||
| 298 | yield line_number, line | ||
| 299 | |||
| 300 | |||
| 301 | def skip_regex(lines_enum, options): | ||
| 302 | """ | ||
| 303 | Skip lines that match '--skip-requirements-regex' pattern | ||
| 304 | |||
| 305 | Note: the regex pattern is only built once | ||
| 306 | """ | ||
| 307 | skip_regex = options.skip_requirements_regex if options else None | ||
| 308 | if skip_regex: | ||
| 309 | pattern = re.compile(skip_regex) | ||
| 310 | lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) | ||
| 311 | return lines_enum | ||
| 312 | |||
| 313 | |||
| 314 | def expand_env_variables(lines_enum): | ||
| 315 | """Replace all environment variables that can be retrieved via `os.getenv`. | ||
| 316 | |||
| 317 | The only allowed format for environment variables defined in the | ||
| 318 | requirement file is `${MY_VARIABLE_1}` to ensure two things: | ||
| 319 | |||
| 320 | 1. Strings that contain a `$` aren't accidentally (partially) expanded. | ||
| 321 | 2. Ensure consistency across platforms for requirement files. | ||
| 322 | |||
| 323 | These points are the result of a discusssion on the `github pull | ||
| 324 | request #3514 <https://github.com/pypa/pip/pull/3514>`_. | ||
| 325 | |||
| 326 | Valid characters in variable names follow the `POSIX standard | ||
| 327 | <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited | ||
| 328 | to uppercase letter, digits and the `_` (underscore). | ||
| 329 | """ | ||
| 330 | for line_number, line in lines_enum: | ||
| 331 | for env_var, var_name in ENV_VAR_RE.findall(line): | ||
| 332 | value = os.getenv(var_name) | ||
| 333 | if not value: | ||
| 334 | continue | ||
| 335 | |||
| 336 | line = line.replace(env_var, value) | ||
| 337 | |||
| 338 | yield line_number, line | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py new file mode 100644 index 0000000..9dd1523 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py | |||
| @@ -0,0 +1,1115 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | import re | ||
| 6 | import shutil | ||
| 7 | import sys | ||
| 8 | import sysconfig | ||
| 9 | import traceback | ||
| 10 | import warnings | ||
| 11 | import zipfile | ||
| 12 | from distutils.util import change_root | ||
| 13 | from email.parser import FeedParser # type: ignore | ||
| 14 | |||
| 15 | from pip._vendor import pkg_resources, pytoml, six | ||
| 16 | from pip._vendor.packaging import specifiers | ||
| 17 | from pip._vendor.packaging.markers import Marker | ||
| 18 | from pip._vendor.packaging.requirements import InvalidRequirement, Requirement | ||
| 19 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 20 | from pip._vendor.packaging.version import parse as parse_version | ||
| 21 | from pip._vendor.packaging.version import Version | ||
| 22 | from pip._vendor.pkg_resources import RequirementParseError, parse_requirements | ||
| 23 | |||
| 24 | from pip._internal import wheel | ||
| 25 | from pip._internal.build_env import BuildEnvironment | ||
| 26 | from pip._internal.compat import native_str | ||
| 27 | from pip._internal.download import ( | ||
| 28 | is_archive_file, is_url, path_to_url, url_to_path, | ||
| 29 | ) | ||
| 30 | from pip._internal.exceptions import InstallationError, UninstallationError | ||
| 31 | from pip._internal.locations import ( | ||
| 32 | PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, | ||
| 33 | ) | ||
| 34 | from pip._internal.req.req_uninstall import UninstallPathSet | ||
| 35 | from pip._internal.utils.deprecation import RemovedInPip11Warning | ||
| 36 | from pip._internal.utils.hashes import Hashes | ||
| 37 | from pip._internal.utils.logging import indent_log | ||
| 38 | from pip._internal.utils.misc import ( | ||
| 39 | _make_build_dir, ask_path_exists, backup_dir, call_subprocess, | ||
| 40 | display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, | ||
| 41 | get_installed_version, is_installable_dir, read_text_file, rmtree, | ||
| 42 | ) | ||
| 43 | from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | ||
| 44 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 45 | from pip._internal.utils.ui import open_spinner | ||
| 46 | from pip._internal.vcs import vcs | ||
| 47 | from pip._internal.wheel import Wheel, move_wheel_files | ||
| 48 | |||
| 49 | logger = logging.getLogger(__name__) | ||
| 50 | |||
| 51 | operators = specifiers.Specifier._operators.keys() | ||
| 52 | |||
| 53 | |||
| 54 | def _strip_extras(path): | ||
| 55 | m = re.match(r'^(.+)(\[[^\]]+\])$', path) | ||
| 56 | extras = None | ||
| 57 | if m: | ||
| 58 | path_no_extras = m.group(1) | ||
| 59 | extras = m.group(2) | ||
| 60 | else: | ||
| 61 | path_no_extras = path | ||
| 62 | |||
| 63 | return path_no_extras, extras | ||
| 64 | |||
| 65 | |||
| 66 | class InstallRequirement(object): | ||
| 67 | """ | ||
| 68 | Represents something that may be installed later on, may have information | ||
| 69 | about where to fetch the relavant requirement and also contains logic for | ||
| 70 | installing the said requirement. | ||
| 71 | """ | ||
| 72 | |||
| 73 | def __init__(self, req, comes_from, source_dir=None, editable=False, | ||
| 74 | link=None, update=True, markers=None, | ||
| 75 | isolated=False, options=None, wheel_cache=None, | ||
| 76 | constraint=False, extras=()): | ||
| 77 | assert req is None or isinstance(req, Requirement), req | ||
| 78 | self.req = req | ||
| 79 | self.comes_from = comes_from | ||
| 80 | self.constraint = constraint | ||
| 81 | if source_dir is not None: | ||
| 82 | self.source_dir = os.path.normpath(os.path.abspath(source_dir)) | ||
| 83 | else: | ||
| 84 | self.source_dir = None | ||
| 85 | self.editable = editable | ||
| 86 | |||
| 87 | self._wheel_cache = wheel_cache | ||
| 88 | if link is not None: | ||
| 89 | self.link = self.original_link = link | ||
| 90 | else: | ||
| 91 | from pip._internal.index import Link | ||
| 92 | self.link = self.original_link = req and req.url and Link(req.url) | ||
| 93 | |||
| 94 | if extras: | ||
| 95 | self.extras = extras | ||
| 96 | elif req: | ||
| 97 | self.extras = { | ||
| 98 | pkg_resources.safe_extra(extra) for extra in req.extras | ||
| 99 | } | ||
| 100 | else: | ||
| 101 | self.extras = set() | ||
| 102 | if markers is not None: | ||
| 103 | self.markers = markers | ||
| 104 | else: | ||
| 105 | self.markers = req and req.marker | ||
| 106 | self._egg_info_path = None | ||
| 107 | # This holds the pkg_resources.Distribution object if this requirement | ||
| 108 | # is already available: | ||
| 109 | self.satisfied_by = None | ||
| 110 | # This hold the pkg_resources.Distribution object if this requirement | ||
| 111 | # conflicts with another installed distribution: | ||
| 112 | self.conflicts_with = None | ||
| 113 | # Temporary build location | ||
| 114 | self._temp_build_dir = TempDirectory(kind="req-build") | ||
| 115 | # Used to store the global directory where the _temp_build_dir should | ||
| 116 | # have been created. Cf _correct_build_location method. | ||
| 117 | self._ideal_build_dir = None | ||
| 118 | # True if the editable should be updated: | ||
| 119 | self.update = update | ||
| 120 | # Set to True after successful installation | ||
| 121 | self.install_succeeded = None | ||
| 122 | # UninstallPathSet of uninstalled distribution (for possible rollback) | ||
| 123 | self.uninstalled_pathset = None | ||
| 124 | self.options = options if options else {} | ||
| 125 | # Set to True after successful preparation of this requirement | ||
| 126 | self.prepared = False | ||
| 127 | self.is_direct = False | ||
| 128 | |||
| 129 | self.isolated = isolated | ||
| 130 | self.build_env = BuildEnvironment(no_clean=True) | ||
| 131 | |||
| 132 | @classmethod | ||
| 133 | def from_editable(cls, editable_req, comes_from=None, isolated=False, | ||
| 134 | options=None, wheel_cache=None, constraint=False): | ||
| 135 | from pip._internal.index import Link | ||
| 136 | |||
| 137 | name, url, extras_override = parse_editable(editable_req) | ||
| 138 | if url.startswith('file:'): | ||
| 139 | source_dir = url_to_path(url) | ||
| 140 | else: | ||
| 141 | source_dir = None | ||
| 142 | |||
| 143 | if name is not None: | ||
| 144 | try: | ||
| 145 | req = Requirement(name) | ||
| 146 | except InvalidRequirement: | ||
| 147 | raise InstallationError("Invalid requirement: '%s'" % name) | ||
| 148 | else: | ||
| 149 | req = None | ||
| 150 | return cls( | ||
| 151 | req, comes_from, source_dir=source_dir, | ||
| 152 | editable=True, | ||
| 153 | link=Link(url), | ||
| 154 | constraint=constraint, | ||
| 155 | isolated=isolated, | ||
| 156 | options=options if options else {}, | ||
| 157 | wheel_cache=wheel_cache, | ||
| 158 | extras=extras_override or (), | ||
| 159 | ) | ||
| 160 | |||
| 161 | @classmethod | ||
| 162 | def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None): | ||
| 163 | try: | ||
| 164 | req = Requirement(req) | ||
| 165 | except InvalidRequirement: | ||
| 166 | raise InstallationError("Invalid requirement: '%s'" % req) | ||
| 167 | if req.url: | ||
| 168 | raise InstallationError( | ||
| 169 | "Direct url requirement (like %s) are not allowed for " | ||
| 170 | "dependencies" % req | ||
| 171 | ) | ||
| 172 | return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache) | ||
| 173 | |||
| 174 | @classmethod | ||
| 175 | def from_line( | ||
| 176 | cls, name, comes_from=None, isolated=False, options=None, | ||
| 177 | wheel_cache=None, constraint=False): | ||
| 178 | """Creates an InstallRequirement from a name, which might be a | ||
| 179 | requirement, directory containing 'setup.py', filename, or URL. | ||
| 180 | """ | ||
| 181 | from pip._internal.index import Link | ||
| 182 | |||
| 183 | if is_url(name): | ||
| 184 | marker_sep = '; ' | ||
| 185 | else: | ||
| 186 | marker_sep = ';' | ||
| 187 | if marker_sep in name: | ||
| 188 | name, markers = name.split(marker_sep, 1) | ||
| 189 | markers = markers.strip() | ||
| 190 | if not markers: | ||
| 191 | markers = None | ||
| 192 | else: | ||
| 193 | markers = Marker(markers) | ||
| 194 | else: | ||
| 195 | markers = None | ||
| 196 | name = name.strip() | ||
| 197 | req = None | ||
| 198 | path = os.path.normpath(os.path.abspath(name)) | ||
| 199 | link = None | ||
| 200 | extras = None | ||
| 201 | |||
| 202 | if is_url(name): | ||
| 203 | link = Link(name) | ||
| 204 | else: | ||
| 205 | p, extras = _strip_extras(path) | ||
| 206 | looks_like_dir = os.path.isdir(p) and ( | ||
| 207 | os.path.sep in name or | ||
| 208 | (os.path.altsep is not None and os.path.altsep in name) or | ||
| 209 | name.startswith('.') | ||
| 210 | ) | ||
| 211 | if looks_like_dir: | ||
| 212 | if not is_installable_dir(p): | ||
| 213 | raise InstallationError( | ||
| 214 | "Directory %r is not installable. File 'setup.py' " | ||
| 215 | "not found." % name | ||
| 216 | ) | ||
| 217 | link = Link(path_to_url(p)) | ||
| 218 | elif is_archive_file(p): | ||
| 219 | if not os.path.isfile(p): | ||
| 220 | logger.warning( | ||
| 221 | 'Requirement %r looks like a filename, but the ' | ||
| 222 | 'file does not exist', | ||
| 223 | name | ||
| 224 | ) | ||
| 225 | link = Link(path_to_url(p)) | ||
| 226 | |||
| 227 | # it's a local file, dir, or url | ||
| 228 | if link: | ||
| 229 | # Handle relative file URLs | ||
| 230 | if link.scheme == 'file' and re.search(r'\.\./', link.url): | ||
| 231 | link = Link( | ||
| 232 | path_to_url(os.path.normpath(os.path.abspath(link.path)))) | ||
| 233 | # wheel file | ||
| 234 | if link.is_wheel: | ||
| 235 | wheel = Wheel(link.filename) # can raise InvalidWheelFilename | ||
| 236 | req = "%s==%s" % (wheel.name, wheel.version) | ||
| 237 | else: | ||
| 238 | # set the req to the egg fragment. when it's not there, this | ||
| 239 | # will become an 'unnamed' requirement | ||
| 240 | req = link.egg_fragment | ||
| 241 | |||
| 242 | # a requirement specifier | ||
| 243 | else: | ||
| 244 | req = name | ||
| 245 | |||
| 246 | if extras: | ||
| 247 | extras = Requirement("placeholder" + extras.lower()).extras | ||
| 248 | else: | ||
| 249 | extras = () | ||
| 250 | if req is not None: | ||
| 251 | try: | ||
| 252 | req = Requirement(req) | ||
| 253 | except InvalidRequirement: | ||
| 254 | if os.path.sep in req: | ||
| 255 | add_msg = "It looks like a path." | ||
| 256 | add_msg += deduce_helpful_msg(req) | ||
| 257 | elif '=' in req and not any(op in req for op in operators): | ||
| 258 | add_msg = "= is not a valid operator. Did you mean == ?" | ||
| 259 | else: | ||
| 260 | add_msg = traceback.format_exc() | ||
| 261 | raise InstallationError( | ||
| 262 | "Invalid requirement: '%s'\n%s" % (req, add_msg)) | ||
| 263 | return cls( | ||
| 264 | req, comes_from, link=link, markers=markers, | ||
| 265 | isolated=isolated, | ||
| 266 | options=options if options else {}, | ||
| 267 | wheel_cache=wheel_cache, | ||
| 268 | constraint=constraint, | ||
| 269 | extras=extras, | ||
| 270 | ) | ||
| 271 | |||
| 272 | def __str__(self): | ||
| 273 | if self.req: | ||
| 274 | s = str(self.req) | ||
| 275 | if self.link: | ||
| 276 | s += ' from %s' % self.link.url | ||
| 277 | else: | ||
| 278 | s = self.link.url if self.link else None | ||
| 279 | if self.satisfied_by is not None: | ||
| 280 | s += ' in %s' % display_path(self.satisfied_by.location) | ||
| 281 | if self.comes_from: | ||
| 282 | if isinstance(self.comes_from, six.string_types): | ||
| 283 | comes_from = self.comes_from | ||
| 284 | else: | ||
| 285 | comes_from = self.comes_from.from_path() | ||
| 286 | if comes_from: | ||
| 287 | s += ' (from %s)' % comes_from | ||
| 288 | return s | ||
| 289 | |||
| 290 | def __repr__(self): | ||
| 291 | return '<%s object: %s editable=%r>' % ( | ||
| 292 | self.__class__.__name__, str(self), self.editable) | ||
| 293 | |||
| 294 | def populate_link(self, finder, upgrade, require_hashes): | ||
| 295 | """Ensure that if a link can be found for this, that it is found. | ||
| 296 | |||
| 297 | Note that self.link may still be None - if Upgrade is False and the | ||
| 298 | requirement is already installed. | ||
| 299 | |||
| 300 | If require_hashes is True, don't use the wheel cache, because cached | ||
| 301 | wheels, always built locally, have different hashes than the files | ||
| 302 | downloaded from the index server and thus throw false hash mismatches. | ||
| 303 | Furthermore, cached wheels at present have undeterministic contents due | ||
| 304 | to file modification times. | ||
| 305 | """ | ||
| 306 | if self.link is None: | ||
| 307 | self.link = finder.find_requirement(self, upgrade) | ||
| 308 | if self._wheel_cache is not None and not require_hashes: | ||
| 309 | old_link = self.link | ||
| 310 | self.link = self._wheel_cache.get(self.link, self.name) | ||
| 311 | if old_link != self.link: | ||
| 312 | logger.debug('Using cached wheel link: %s', self.link) | ||
| 313 | |||
| 314 | @property | ||
| 315 | def specifier(self): | ||
| 316 | return self.req.specifier | ||
| 317 | |||
| 318 | @property | ||
| 319 | def is_pinned(self): | ||
| 320 | """Return whether I am pinned to an exact version. | ||
| 321 | |||
| 322 | For example, some-package==1.2 is pinned; some-package>1.2 is not. | ||
| 323 | """ | ||
| 324 | specifiers = self.specifier | ||
| 325 | return (len(specifiers) == 1 and | ||
| 326 | next(iter(specifiers)).operator in {'==', '==='}) | ||
| 327 | |||
| 328 | def from_path(self): | ||
| 329 | if self.req is None: | ||
| 330 | return None | ||
| 331 | s = str(self.req) | ||
| 332 | if self.comes_from: | ||
| 333 | if isinstance(self.comes_from, six.string_types): | ||
| 334 | comes_from = self.comes_from | ||
| 335 | else: | ||
| 336 | comes_from = self.comes_from.from_path() | ||
| 337 | if comes_from: | ||
| 338 | s += '->' + comes_from | ||
| 339 | return s | ||
| 340 | |||
| 341 | def build_location(self, build_dir): | ||
| 342 | assert build_dir is not None | ||
| 343 | if self._temp_build_dir.path is not None: | ||
| 344 | return self._temp_build_dir.path | ||
| 345 | if self.req is None: | ||
| 346 | # for requirement via a path to a directory: the name of the | ||
| 347 | # package is not available yet so we create a temp directory | ||
| 348 | # Once run_egg_info will have run, we'll be able | ||
| 349 | # to fix it via _correct_build_location | ||
| 350 | # Some systems have /tmp as a symlink which confuses custom | ||
| 351 | # builds (such as numpy). Thus, we ensure that the real path | ||
| 352 | # is returned. | ||
| 353 | self._temp_build_dir.create() | ||
| 354 | self._ideal_build_dir = build_dir | ||
| 355 | |||
| 356 | return self._temp_build_dir.path | ||
| 357 | if self.editable: | ||
| 358 | name = self.name.lower() | ||
| 359 | else: | ||
| 360 | name = self.name | ||
| 361 | # FIXME: Is there a better place to create the build_dir? (hg and bzr | ||
| 362 | # need this) | ||
| 363 | if not os.path.exists(build_dir): | ||
| 364 | logger.debug('Creating directory %s', build_dir) | ||
| 365 | _make_build_dir(build_dir) | ||
| 366 | return os.path.join(build_dir, name) | ||
| 367 | |||
| 368 | def _correct_build_location(self): | ||
| 369 | """Move self._temp_build_dir to self._ideal_build_dir/self.req.name | ||
| 370 | |||
| 371 | For some requirements (e.g. a path to a directory), the name of the | ||
| 372 | package is not available until we run egg_info, so the build_location | ||
| 373 | will return a temporary directory and store the _ideal_build_dir. | ||
| 374 | |||
| 375 | This is only called by self.egg_info_path to fix the temporary build | ||
| 376 | directory. | ||
| 377 | """ | ||
| 378 | if self.source_dir is not None: | ||
| 379 | return | ||
| 380 | assert self.req is not None | ||
| 381 | assert self._temp_build_dir.path | ||
| 382 | assert self._ideal_build_dir.path | ||
| 383 | old_location = self._temp_build_dir.path | ||
| 384 | self._temp_build_dir.path = None | ||
| 385 | |||
| 386 | new_location = self.build_location(self._ideal_build_dir) | ||
| 387 | if os.path.exists(new_location): | ||
| 388 | raise InstallationError( | ||
| 389 | 'A package already exists in %s; please remove it to continue' | ||
| 390 | % display_path(new_location)) | ||
| 391 | logger.debug( | ||
| 392 | 'Moving package %s from %s to new location %s', | ||
| 393 | self, display_path(old_location), display_path(new_location), | ||
| 394 | ) | ||
| 395 | shutil.move(old_location, new_location) | ||
| 396 | self._temp_build_dir.path = new_location | ||
| 397 | self._ideal_build_dir = None | ||
| 398 | self.source_dir = os.path.normpath(os.path.abspath(new_location)) | ||
| 399 | self._egg_info_path = None | ||
| 400 | |||
| 401 | @property | ||
| 402 | def name(self): | ||
| 403 | if self.req is None: | ||
| 404 | return None | ||
| 405 | return native_str(pkg_resources.safe_name(self.req.name)) | ||
| 406 | |||
| 407 | @property | ||
| 408 | def setup_py_dir(self): | ||
| 409 | return os.path.join( | ||
| 410 | self.source_dir, | ||
| 411 | self.link and self.link.subdirectory_fragment or '') | ||
| 412 | |||
| 413 | @property | ||
| 414 | def setup_py(self): | ||
| 415 | assert self.source_dir, "No source dir for %s" % self | ||
| 416 | |||
| 417 | setup_py = os.path.join(self.setup_py_dir, 'setup.py') | ||
| 418 | |||
| 419 | # Python2 __file__ should not be unicode | ||
| 420 | if six.PY2 and isinstance(setup_py, six.text_type): | ||
| 421 | setup_py = setup_py.encode(sys.getfilesystemencoding()) | ||
| 422 | |||
| 423 | return setup_py | ||
| 424 | |||
| 425 | @property | ||
| 426 | def pyproject_toml(self): | ||
| 427 | assert self.source_dir, "No source dir for %s" % self | ||
| 428 | |||
| 429 | pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml') | ||
| 430 | |||
| 431 | # Python2 __file__ should not be unicode | ||
| 432 | if six.PY2 and isinstance(pp_toml, six.text_type): | ||
| 433 | pp_toml = pp_toml.encode(sys.getfilesystemencoding()) | ||
| 434 | |||
| 435 | return pp_toml | ||
| 436 | |||
| 437 | def get_pep_518_info(self): | ||
| 438 | """Get a list of the packages required to build the project, if any, | ||
| 439 | and a flag indicating whether pyproject.toml is present, indicating | ||
| 440 | that the build should be isolated. | ||
| 441 | |||
| 442 | Build requirements can be specified in a pyproject.toml, as described | ||
| 443 | in PEP 518. If this file exists but doesn't specify build | ||
| 444 | requirements, pip will default to installing setuptools and wheel. | ||
| 445 | """ | ||
| 446 | if os.path.isfile(self.pyproject_toml): | ||
| 447 | with open(self.pyproject_toml) as f: | ||
| 448 | pp_toml = pytoml.load(f) | ||
| 449 | build_sys = pp_toml.get('build-system', {}) | ||
| 450 | return (build_sys.get('requires', ['setuptools', 'wheel']), True) | ||
| 451 | return (['setuptools', 'wheel'], False) | ||
| 452 | |||
| 453 | def run_egg_info(self): | ||
| 454 | assert self.source_dir | ||
| 455 | if self.name: | ||
| 456 | logger.debug( | ||
| 457 | 'Running setup.py (path:%s) egg_info for package %s', | ||
| 458 | self.setup_py, self.name, | ||
| 459 | ) | ||
| 460 | else: | ||
| 461 | logger.debug( | ||
| 462 | 'Running setup.py (path:%s) egg_info for package from %s', | ||
| 463 | self.setup_py, self.link, | ||
| 464 | ) | ||
| 465 | |||
| 466 | with indent_log(): | ||
| 467 | script = SETUPTOOLS_SHIM % self.setup_py | ||
| 468 | base_cmd = [sys.executable, '-c', script] | ||
| 469 | if self.isolated: | ||
| 470 | base_cmd += ["--no-user-cfg"] | ||
| 471 | egg_info_cmd = base_cmd + ['egg_info'] | ||
| 472 | # We can't put the .egg-info files at the root, because then the | ||
| 473 | # source code will be mistaken for an installed egg, causing | ||
| 474 | # problems | ||
| 475 | if self.editable: | ||
| 476 | egg_base_option = [] | ||
| 477 | else: | ||
| 478 | egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') | ||
| 479 | ensure_dir(egg_info_dir) | ||
| 480 | egg_base_option = ['--egg-base', 'pip-egg-info'] | ||
| 481 | with self.build_env: | ||
| 482 | call_subprocess( | ||
| 483 | egg_info_cmd + egg_base_option, | ||
| 484 | cwd=self.setup_py_dir, | ||
| 485 | show_stdout=False, | ||
| 486 | command_desc='python setup.py egg_info') | ||
| 487 | |||
| 488 | if not self.req: | ||
| 489 | if isinstance(parse_version(self.pkg_info()["Version"]), Version): | ||
| 490 | op = "==" | ||
| 491 | else: | ||
| 492 | op = "===" | ||
| 493 | self.req = Requirement( | ||
| 494 | "".join([ | ||
| 495 | self.pkg_info()["Name"], | ||
| 496 | op, | ||
| 497 | self.pkg_info()["Version"], | ||
| 498 | ]) | ||
| 499 | ) | ||
| 500 | self._correct_build_location() | ||
| 501 | else: | ||
| 502 | metadata_name = canonicalize_name(self.pkg_info()["Name"]) | ||
| 503 | if canonicalize_name(self.req.name) != metadata_name: | ||
| 504 | logger.warning( | ||
| 505 | 'Running setup.py (path:%s) egg_info for package %s ' | ||
| 506 | 'produced metadata for project name %s. Fix your ' | ||
| 507 | '#egg=%s fragments.', | ||
| 508 | self.setup_py, self.name, metadata_name, self.name | ||
| 509 | ) | ||
| 510 | self.req = Requirement(metadata_name) | ||
| 511 | |||
| 512 | def egg_info_data(self, filename): | ||
| 513 | if self.satisfied_by is not None: | ||
| 514 | if not self.satisfied_by.has_metadata(filename): | ||
| 515 | return None | ||
| 516 | return self.satisfied_by.get_metadata(filename) | ||
| 517 | assert self.source_dir | ||
| 518 | filename = self.egg_info_path(filename) | ||
| 519 | if not os.path.exists(filename): | ||
| 520 | return None | ||
| 521 | data = read_text_file(filename) | ||
| 522 | return data | ||
| 523 | |||
| 524 | def egg_info_path(self, filename): | ||
| 525 | if self._egg_info_path is None: | ||
| 526 | if self.editable: | ||
| 527 | base = self.source_dir | ||
| 528 | else: | ||
| 529 | base = os.path.join(self.setup_py_dir, 'pip-egg-info') | ||
| 530 | filenames = os.listdir(base) | ||
| 531 | if self.editable: | ||
| 532 | filenames = [] | ||
| 533 | for root, dirs, files in os.walk(base): | ||
| 534 | for dir in vcs.dirnames: | ||
| 535 | if dir in dirs: | ||
| 536 | dirs.remove(dir) | ||
| 537 | # Iterate over a copy of ``dirs``, since mutating | ||
| 538 | # a list while iterating over it can cause trouble. | ||
| 539 | # (See https://github.com/pypa/pip/pull/462.) | ||
| 540 | for dir in list(dirs): | ||
| 541 | # Don't search in anything that looks like a virtualenv | ||
| 542 | # environment | ||
| 543 | if ( | ||
| 544 | os.path.lexists( | ||
| 545 | os.path.join(root, dir, 'bin', 'python') | ||
| 546 | ) or | ||
| 547 | os.path.exists( | ||
| 548 | os.path.join( | ||
| 549 | root, dir, 'Scripts', 'Python.exe' | ||
| 550 | ) | ||
| 551 | )): | ||
| 552 | dirs.remove(dir) | ||
| 553 | # Also don't search through tests | ||
| 554 | elif dir == 'test' or dir == 'tests': | ||
| 555 | dirs.remove(dir) | ||
| 556 | filenames.extend([os.path.join(root, dir) | ||
| 557 | for dir in dirs]) | ||
| 558 | filenames = [f for f in filenames if f.endswith('.egg-info')] | ||
| 559 | |||
| 560 | if not filenames: | ||
| 561 | raise InstallationError( | ||
| 562 | 'No files/directories in %s (from %s)' % (base, filename) | ||
| 563 | ) | ||
| 564 | assert filenames, \ | ||
| 565 | "No files/directories in %s (from %s)" % (base, filename) | ||
| 566 | |||
| 567 | # if we have more than one match, we pick the toplevel one. This | ||
| 568 | # can easily be the case if there is a dist folder which contains | ||
| 569 | # an extracted tarball for testing purposes. | ||
| 570 | if len(filenames) > 1: | ||
| 571 | filenames.sort( | ||
| 572 | key=lambda x: x.count(os.path.sep) + | ||
| 573 | (os.path.altsep and x.count(os.path.altsep) or 0) | ||
| 574 | ) | ||
| 575 | self._egg_info_path = os.path.join(base, filenames[0]) | ||
| 576 | return os.path.join(self._egg_info_path, filename) | ||
| 577 | |||
| 578 | def pkg_info(self): | ||
| 579 | p = FeedParser() | ||
| 580 | data = self.egg_info_data('PKG-INFO') | ||
| 581 | if not data: | ||
| 582 | logger.warning( | ||
| 583 | 'No PKG-INFO file found in %s', | ||
| 584 | display_path(self.egg_info_path('PKG-INFO')), | ||
| 585 | ) | ||
| 586 | p.feed(data or '') | ||
| 587 | return p.close() | ||
| 588 | |||
| 589 | _requirements_section_re = re.compile(r'\[(.*?)\]') | ||
| 590 | |||
| 591 | @property | ||
| 592 | def installed_version(self): | ||
| 593 | return get_installed_version(self.name) | ||
| 594 | |||
| 595 | def assert_source_matches_version(self): | ||
| 596 | assert self.source_dir | ||
| 597 | version = self.pkg_info()['version'] | ||
| 598 | if self.req.specifier and version not in self.req.specifier: | ||
| 599 | logger.warning( | ||
| 600 | 'Requested %s, but installing version %s', | ||
| 601 | self, | ||
| 602 | version, | ||
| 603 | ) | ||
| 604 | else: | ||
| 605 | logger.debug( | ||
| 606 | 'Source in %s has version %s, which satisfies requirement %s', | ||
| 607 | display_path(self.source_dir), | ||
| 608 | version, | ||
| 609 | self, | ||
| 610 | ) | ||
| 611 | |||
| 612 | def update_editable(self, obtain=True): | ||
| 613 | if not self.link: | ||
| 614 | logger.debug( | ||
| 615 | "Cannot update repository at %s; repository location is " | ||
| 616 | "unknown", | ||
| 617 | self.source_dir, | ||
| 618 | ) | ||
| 619 | return | ||
| 620 | assert self.editable | ||
| 621 | assert self.source_dir | ||
| 622 | if self.link.scheme == 'file': | ||
| 623 | # Static paths don't get updated | ||
| 624 | return | ||
| 625 | assert '+' in self.link.url, "bad url: %r" % self.link.url | ||
| 626 | if not self.update: | ||
| 627 | return | ||
| 628 | vc_type, url = self.link.url.split('+', 1) | ||
| 629 | backend = vcs.get_backend(vc_type) | ||
| 630 | if backend: | ||
| 631 | vcs_backend = backend(self.link.url) | ||
| 632 | if obtain: | ||
| 633 | vcs_backend.obtain(self.source_dir) | ||
| 634 | else: | ||
| 635 | vcs_backend.export(self.source_dir) | ||
| 636 | else: | ||
| 637 | assert 0, ( | ||
| 638 | 'Unexpected version control type (in %s): %s' | ||
| 639 | % (self.link, vc_type)) | ||
| 640 | |||
| 641 | def uninstall(self, auto_confirm=False, verbose=False, | ||
| 642 | use_user_site=False): | ||
| 643 | """ | ||
| 644 | Uninstall the distribution currently satisfying this requirement. | ||
| 645 | |||
| 646 | Prompts before removing or modifying files unless | ||
| 647 | ``auto_confirm`` is True. | ||
| 648 | |||
| 649 | Refuses to delete or modify files outside of ``sys.prefix`` - | ||
| 650 | thus uninstallation within a virtual environment can only | ||
| 651 | modify that virtual environment, even if the virtualenv is | ||
| 652 | linked to global site-packages. | ||
| 653 | |||
| 654 | """ | ||
| 655 | if not self.check_if_exists(use_user_site): | ||
| 656 | logger.warning("Skipping %s as it is not installed.", self.name) | ||
| 657 | return | ||
| 658 | dist = self.satisfied_by or self.conflicts_with | ||
| 659 | |||
| 660 | uninstalled_pathset = UninstallPathSet.from_dist(dist) | ||
| 661 | uninstalled_pathset.remove(auto_confirm, verbose) | ||
| 662 | return uninstalled_pathset | ||
| 663 | |||
| 664 | def archive(self, build_dir): | ||
| 665 | assert self.source_dir | ||
| 666 | create_archive = True | ||
| 667 | archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) | ||
| 668 | archive_path = os.path.join(build_dir, archive_name) | ||
| 669 | if os.path.exists(archive_path): | ||
| 670 | response = ask_path_exists( | ||
| 671 | 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % | ||
| 672 | display_path(archive_path), ('i', 'w', 'b', 'a')) | ||
| 673 | if response == 'i': | ||
| 674 | create_archive = False | ||
| 675 | elif response == 'w': | ||
| 676 | logger.warning('Deleting %s', display_path(archive_path)) | ||
| 677 | os.remove(archive_path) | ||
| 678 | elif response == 'b': | ||
| 679 | dest_file = backup_dir(archive_path) | ||
| 680 | logger.warning( | ||
| 681 | 'Backing up %s to %s', | ||
| 682 | display_path(archive_path), | ||
| 683 | display_path(dest_file), | ||
| 684 | ) | ||
| 685 | shutil.move(archive_path, dest_file) | ||
| 686 | elif response == 'a': | ||
| 687 | sys.exit(-1) | ||
| 688 | if create_archive: | ||
| 689 | zip = zipfile.ZipFile( | ||
| 690 | archive_path, 'w', zipfile.ZIP_DEFLATED, | ||
| 691 | allowZip64=True | ||
| 692 | ) | ||
| 693 | dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) | ||
| 694 | for dirpath, dirnames, filenames in os.walk(dir): | ||
| 695 | if 'pip-egg-info' in dirnames: | ||
| 696 | dirnames.remove('pip-egg-info') | ||
| 697 | for dirname in dirnames: | ||
| 698 | dirname = os.path.join(dirpath, dirname) | ||
| 699 | name = self._clean_zip_name(dirname, dir) | ||
| 700 | zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') | ||
| 701 | zipdir.external_attr = 0x1ED << 16 # 0o755 | ||
| 702 | zip.writestr(zipdir, '') | ||
| 703 | for filename in filenames: | ||
| 704 | if filename == PIP_DELETE_MARKER_FILENAME: | ||
| 705 | continue | ||
| 706 | filename = os.path.join(dirpath, filename) | ||
| 707 | name = self._clean_zip_name(filename, dir) | ||
| 708 | zip.write(filename, self.name + '/' + name) | ||
| 709 | zip.close() | ||
| 710 | logger.info('Saved %s', display_path(archive_path)) | ||
| 711 | |||
| 712 | def _clean_zip_name(self, name, prefix): | ||
| 713 | assert name.startswith(prefix + os.path.sep), ( | ||
| 714 | "name %r doesn't start with prefix %r" % (name, prefix) | ||
| 715 | ) | ||
| 716 | name = name[len(prefix) + 1:] | ||
| 717 | name = name.replace(os.path.sep, '/') | ||
| 718 | return name | ||
| 719 | |||
| 720 | def match_markers(self, extras_requested=None): | ||
| 721 | if not extras_requested: | ||
| 722 | # Provide an extra to safely evaluate the markers | ||
| 723 | # without matching any extra | ||
| 724 | extras_requested = ('',) | ||
| 725 | if self.markers is not None: | ||
| 726 | return any( | ||
| 727 | self.markers.evaluate({'extra': extra}) | ||
| 728 | for extra in extras_requested) | ||
| 729 | else: | ||
| 730 | return True | ||
| 731 | |||
| 732 | def install(self, install_options, global_options=None, root=None, | ||
| 733 | home=None, prefix=None, warn_script_location=True, | ||
| 734 | use_user_site=False, pycompile=True): | ||
| 735 | global_options = global_options if global_options is not None else [] | ||
| 736 | if self.editable: | ||
| 737 | self.install_editable( | ||
| 738 | install_options, global_options, prefix=prefix, | ||
| 739 | ) | ||
| 740 | return | ||
| 741 | if self.is_wheel: | ||
| 742 | version = wheel.wheel_version(self.source_dir) | ||
| 743 | wheel.check_compatibility(version, self.name) | ||
| 744 | |||
| 745 | self.move_wheel_files( | ||
| 746 | self.source_dir, root=root, prefix=prefix, home=home, | ||
| 747 | warn_script_location=warn_script_location, | ||
| 748 | use_user_site=use_user_site, pycompile=pycompile, | ||
| 749 | ) | ||
| 750 | self.install_succeeded = True | ||
| 751 | return | ||
| 752 | |||
| 753 | # Extend the list of global and install options passed on to | ||
| 754 | # the setup.py call with the ones from the requirements file. | ||
| 755 | # Options specified in requirements file override those | ||
| 756 | # specified on the command line, since the last option given | ||
| 757 | # to setup.py is the one that is used. | ||
| 758 | global_options = list(global_options) + \ | ||
| 759 | self.options.get('global_options', []) | ||
| 760 | install_options = list(install_options) + \ | ||
| 761 | self.options.get('install_options', []) | ||
| 762 | |||
| 763 | if self.isolated: | ||
| 764 | global_options = global_options + ["--no-user-cfg"] | ||
| 765 | |||
| 766 | with TempDirectory(kind="record") as temp_dir: | ||
| 767 | record_filename = os.path.join(temp_dir.path, 'install-record.txt') | ||
| 768 | install_args = self.get_install_args( | ||
| 769 | global_options, record_filename, root, prefix, pycompile, | ||
| 770 | ) | ||
| 771 | msg = 'Running setup.py install for %s' % (self.name,) | ||
| 772 | with open_spinner(msg) as spinner: | ||
| 773 | with indent_log(): | ||
| 774 | with self.build_env: | ||
| 775 | call_subprocess( | ||
| 776 | install_args + install_options, | ||
| 777 | cwd=self.setup_py_dir, | ||
| 778 | show_stdout=False, | ||
| 779 | spinner=spinner, | ||
| 780 | ) | ||
| 781 | |||
| 782 | if not os.path.exists(record_filename): | ||
| 783 | logger.debug('Record file %s not found', record_filename) | ||
| 784 | return | ||
| 785 | self.install_succeeded = True | ||
| 786 | |||
| 787 | def prepend_root(path): | ||
| 788 | if root is None or not os.path.isabs(path): | ||
| 789 | return path | ||
| 790 | else: | ||
| 791 | return change_root(root, path) | ||
| 792 | |||
| 793 | with open(record_filename) as f: | ||
| 794 | for line in f: | ||
| 795 | directory = os.path.dirname(line) | ||
| 796 | if directory.endswith('.egg-info'): | ||
| 797 | egg_info_dir = prepend_root(directory) | ||
| 798 | break | ||
| 799 | else: | ||
| 800 | logger.warning( | ||
| 801 | 'Could not find .egg-info directory in install record' | ||
| 802 | ' for %s', | ||
| 803 | self, | ||
| 804 | ) | ||
| 805 | # FIXME: put the record somewhere | ||
| 806 | # FIXME: should this be an error? | ||
| 807 | return | ||
| 808 | new_lines = [] | ||
| 809 | with open(record_filename) as f: | ||
| 810 | for line in f: | ||
| 811 | filename = line.strip() | ||
| 812 | if os.path.isdir(filename): | ||
| 813 | filename += os.path.sep | ||
| 814 | new_lines.append( | ||
| 815 | os.path.relpath(prepend_root(filename), egg_info_dir) | ||
| 816 | ) | ||
| 817 | new_lines.sort() | ||
| 818 | ensure_dir(egg_info_dir) | ||
| 819 | inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') | ||
| 820 | with open(inst_files_path, 'w') as f: | ||
| 821 | f.write('\n'.join(new_lines) + '\n') | ||
| 822 | |||
| 823 | def ensure_has_source_dir(self, parent_dir): | ||
| 824 | """Ensure that a source_dir is set. | ||
| 825 | |||
| 826 | This will create a temporary build dir if the name of the requirement | ||
| 827 | isn't known yet. | ||
| 828 | |||
| 829 | :param parent_dir: The ideal pip parent_dir for the source_dir. | ||
| 830 | Generally src_dir for editables and build_dir for sdists. | ||
| 831 | :return: self.source_dir | ||
| 832 | """ | ||
| 833 | if self.source_dir is None: | ||
| 834 | self.source_dir = self.build_location(parent_dir) | ||
| 835 | return self.source_dir | ||
| 836 | |||
| 837 | def get_install_args(self, global_options, record_filename, root, prefix, | ||
| 838 | pycompile): | ||
| 839 | install_args = [sys.executable, "-u"] | ||
| 840 | install_args.append('-c') | ||
| 841 | install_args.append(SETUPTOOLS_SHIM % self.setup_py) | ||
| 842 | install_args += list(global_options) + \ | ||
| 843 | ['install', '--record', record_filename] | ||
| 844 | install_args += ['--single-version-externally-managed'] | ||
| 845 | |||
| 846 | if root is not None: | ||
| 847 | install_args += ['--root', root] | ||
| 848 | if prefix is not None: | ||
| 849 | install_args += ['--prefix', prefix] | ||
| 850 | |||
| 851 | if pycompile: | ||
| 852 | install_args += ["--compile"] | ||
| 853 | else: | ||
| 854 | install_args += ["--no-compile"] | ||
| 855 | |||
| 856 | if running_under_virtualenv(): | ||
| 857 | py_ver_str = 'python' + sysconfig.get_python_version() | ||
| 858 | install_args += ['--install-headers', | ||
| 859 | os.path.join(sys.prefix, 'include', 'site', | ||
| 860 | py_ver_str, self.name)] | ||
| 861 | |||
| 862 | return install_args | ||
| 863 | |||
| 864 | def remove_temporary_source(self): | ||
| 865 | """Remove the source files from this requirement, if they are marked | ||
| 866 | for deletion""" | ||
| 867 | if self.source_dir and os.path.exists( | ||
| 868 | os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): | ||
| 869 | logger.debug('Removing source in %s', self.source_dir) | ||
| 870 | rmtree(self.source_dir) | ||
| 871 | self.source_dir = None | ||
| 872 | self._temp_build_dir.cleanup() | ||
| 873 | self.build_env.cleanup() | ||
| 874 | |||
| 875 | def install_editable(self, install_options, | ||
| 876 | global_options=(), prefix=None): | ||
| 877 | logger.info('Running setup.py develop for %s', self.name) | ||
| 878 | |||
| 879 | if self.isolated: | ||
| 880 | global_options = list(global_options) + ["--no-user-cfg"] | ||
| 881 | |||
| 882 | if prefix: | ||
| 883 | prefix_param = ['--prefix={}'.format(prefix)] | ||
| 884 | install_options = list(install_options) + prefix_param | ||
| 885 | |||
| 886 | with indent_log(): | ||
| 887 | # FIXME: should we do --install-headers here too? | ||
| 888 | with self.build_env: | ||
| 889 | call_subprocess( | ||
| 890 | [ | ||
| 891 | sys.executable, | ||
| 892 | '-c', | ||
| 893 | SETUPTOOLS_SHIM % self.setup_py | ||
| 894 | ] + | ||
| 895 | list(global_options) + | ||
| 896 | ['develop', '--no-deps'] + | ||
| 897 | list(install_options), | ||
| 898 | |||
| 899 | cwd=self.setup_py_dir, | ||
| 900 | show_stdout=False, | ||
| 901 | ) | ||
| 902 | |||
| 903 | self.install_succeeded = True | ||
| 904 | |||
| 905 | def check_if_exists(self, use_user_site): | ||
| 906 | """Find an installed distribution that satisfies or conflicts | ||
| 907 | with this requirement, and set self.satisfied_by or | ||
| 908 | self.conflicts_with appropriately. | ||
| 909 | """ | ||
| 910 | if self.req is None: | ||
| 911 | return False | ||
| 912 | try: | ||
| 913 | # get_distribution() will resolve the entire list of requirements | ||
| 914 | # anyway, and we've already determined that we need the requirement | ||
| 915 | # in question, so strip the marker so that we don't try to | ||
| 916 | # evaluate it. | ||
| 917 | no_marker = Requirement(str(self.req)) | ||
| 918 | no_marker.marker = None | ||
| 919 | self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) | ||
| 920 | if self.editable and self.satisfied_by: | ||
| 921 | self.conflicts_with = self.satisfied_by | ||
| 922 | # when installing editables, nothing pre-existing should ever | ||
| 923 | # satisfy | ||
| 924 | self.satisfied_by = None | ||
| 925 | return True | ||
| 926 | except pkg_resources.DistributionNotFound: | ||
| 927 | return False | ||
| 928 | except pkg_resources.VersionConflict: | ||
| 929 | existing_dist = pkg_resources.get_distribution( | ||
| 930 | self.req.name | ||
| 931 | ) | ||
| 932 | if use_user_site: | ||
| 933 | if dist_in_usersite(existing_dist): | ||
| 934 | self.conflicts_with = existing_dist | ||
| 935 | elif (running_under_virtualenv() and | ||
| 936 | dist_in_site_packages(existing_dist)): | ||
| 937 | raise InstallationError( | ||
| 938 | "Will not install to the user site because it will " | ||
| 939 | "lack sys.path precedence to %s in %s" % | ||
| 940 | (existing_dist.project_name, existing_dist.location) | ||
| 941 | ) | ||
| 942 | else: | ||
| 943 | self.conflicts_with = existing_dist | ||
| 944 | return True | ||
| 945 | |||
| 946 | @property | ||
| 947 | def is_wheel(self): | ||
| 948 | return self.link and self.link.is_wheel | ||
| 949 | |||
| 950 | def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, | ||
| 951 | warn_script_location=True, use_user_site=False, | ||
| 952 | pycompile=True): | ||
| 953 | move_wheel_files( | ||
| 954 | self.name, self.req, wheeldir, | ||
| 955 | user=use_user_site, | ||
| 956 | home=home, | ||
| 957 | root=root, | ||
| 958 | prefix=prefix, | ||
| 959 | pycompile=pycompile, | ||
| 960 | isolated=self.isolated, | ||
| 961 | warn_script_location=warn_script_location, | ||
| 962 | ) | ||
| 963 | |||
| 964 | def get_dist(self): | ||
| 965 | """Return a pkg_resources.Distribution built from self.egg_info_path""" | ||
| 966 | egg_info = self.egg_info_path('').rstrip(os.path.sep) | ||
| 967 | base_dir = os.path.dirname(egg_info) | ||
| 968 | metadata = pkg_resources.PathMetadata(base_dir, egg_info) | ||
| 969 | dist_name = os.path.splitext(os.path.basename(egg_info))[0] | ||
| 970 | return pkg_resources.Distribution( | ||
| 971 | os.path.dirname(egg_info), | ||
| 972 | project_name=dist_name, | ||
| 973 | metadata=metadata, | ||
| 974 | ) | ||
| 975 | |||
| 976 | @property | ||
| 977 | def has_hash_options(self): | ||
| 978 | """Return whether any known-good hashes are specified as options. | ||
| 979 | |||
| 980 | These activate --require-hashes mode; hashes specified as part of a | ||
| 981 | URL do not. | ||
| 982 | |||
| 983 | """ | ||
| 984 | return bool(self.options.get('hashes', {})) | ||
| 985 | |||
| 986 | def hashes(self, trust_internet=True): | ||
| 987 | """Return a hash-comparer that considers my option- and URL-based | ||
| 988 | hashes to be known-good. | ||
| 989 | |||
| 990 | Hashes in URLs--ones embedded in the requirements file, not ones | ||
| 991 | downloaded from an index server--are almost peers with ones from | ||
| 992 | flags. They satisfy --require-hashes (whether it was implicitly or | ||
| 993 | explicitly activated) but do not activate it. md5 and sha224 are not | ||
| 994 | allowed in flags, which should nudge people toward good algos. We | ||
| 995 | always OR all hashes together, even ones from URLs. | ||
| 996 | |||
| 997 | :param trust_internet: Whether to trust URL-based (#md5=...) hashes | ||
| 998 | downloaded from the internet, as by populate_link() | ||
| 999 | |||
| 1000 | """ | ||
| 1001 | good_hashes = self.options.get('hashes', {}).copy() | ||
| 1002 | link = self.link if trust_internet else self.original_link | ||
| 1003 | if link and link.hash: | ||
| 1004 | good_hashes.setdefault(link.hash_name, []).append(link.hash) | ||
| 1005 | return Hashes(good_hashes) | ||
| 1006 | |||
| 1007 | |||
| 1008 | def _strip_postfix(req): | ||
| 1009 | """ | ||
| 1010 | Strip req postfix ( -dev, 0.2, etc ) | ||
| 1011 | """ | ||
| 1012 | # FIXME: use package_to_requirement? | ||
| 1013 | match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) | ||
| 1014 | if match: | ||
| 1015 | # Strip off -dev, -0.2, etc. | ||
| 1016 | warnings.warn( | ||
| 1017 | "#egg cleanup for editable urls will be dropped in the future", | ||
| 1018 | RemovedInPip11Warning, | ||
| 1019 | ) | ||
| 1020 | req = match.group(1) | ||
| 1021 | return req | ||
| 1022 | |||
| 1023 | |||
| 1024 | def parse_editable(editable_req): | ||
| 1025 | """Parses an editable requirement into: | ||
| 1026 | - a requirement name | ||
| 1027 | - an URL | ||
| 1028 | - extras | ||
| 1029 | - editable options | ||
| 1030 | Accepted requirements: | ||
| 1031 | svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir | ||
| 1032 | .[some_extra] | ||
| 1033 | """ | ||
| 1034 | |||
| 1035 | from pip._internal.index import Link | ||
| 1036 | |||
| 1037 | url = editable_req | ||
| 1038 | |||
| 1039 | # If a file path is specified with extras, strip off the extras. | ||
| 1040 | url_no_extras, extras = _strip_extras(url) | ||
| 1041 | |||
| 1042 | if os.path.isdir(url_no_extras): | ||
| 1043 | if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): | ||
| 1044 | raise InstallationError( | ||
| 1045 | "Directory %r is not installable. File 'setup.py' not found." % | ||
| 1046 | url_no_extras | ||
| 1047 | ) | ||
| 1048 | # Treating it as code that has already been checked out | ||
| 1049 | url_no_extras = path_to_url(url_no_extras) | ||
| 1050 | |||
| 1051 | if url_no_extras.lower().startswith('file:'): | ||
| 1052 | package_name = Link(url_no_extras).egg_fragment | ||
| 1053 | if extras: | ||
| 1054 | return ( | ||
| 1055 | package_name, | ||
| 1056 | url_no_extras, | ||
| 1057 | Requirement("placeholder" + extras.lower()).extras, | ||
| 1058 | ) | ||
| 1059 | else: | ||
| 1060 | return package_name, url_no_extras, None | ||
| 1061 | |||
| 1062 | for version_control in vcs: | ||
| 1063 | if url.lower().startswith('%s:' % version_control): | ||
| 1064 | url = '%s+%s' % (version_control, url) | ||
| 1065 | break | ||
| 1066 | |||
| 1067 | if '+' not in url: | ||
| 1068 | raise InstallationError( | ||
| 1069 | '%s should either be a path to a local project or a VCS url ' | ||
| 1070 | 'beginning with svn+, git+, hg+, or bzr+' % | ||
| 1071 | editable_req | ||
| 1072 | ) | ||
| 1073 | |||
| 1074 | vc_type = url.split('+', 1)[0].lower() | ||
| 1075 | |||
| 1076 | if not vcs.get_backend(vc_type): | ||
| 1077 | error_message = 'For --editable=%s only ' % editable_req + \ | ||
| 1078 | ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ | ||
| 1079 | ' is currently supported' | ||
| 1080 | raise InstallationError(error_message) | ||
| 1081 | |||
| 1082 | package_name = Link(url).egg_fragment | ||
| 1083 | if not package_name: | ||
| 1084 | raise InstallationError( | ||
| 1085 | "Could not detect requirement name for '%s', please specify one " | ||
| 1086 | "with #egg=your_package_name" % editable_req | ||
| 1087 | ) | ||
| 1088 | return _strip_postfix(package_name), url, None | ||
| 1089 | |||
| 1090 | |||
| 1091 | def deduce_helpful_msg(req): | ||
| 1092 | """Returns helpful msg in case requirements file does not exist, | ||
| 1093 | or cannot be parsed. | ||
| 1094 | |||
| 1095 | :params req: Requirements file path | ||
| 1096 | """ | ||
| 1097 | msg = "" | ||
| 1098 | if os.path.exists(req): | ||
| 1099 | msg = " It does exist." | ||
| 1100 | # Try to parse and check if it is a requirements file. | ||
| 1101 | try: | ||
| 1102 | with open(req, 'r') as fp: | ||
| 1103 | # parse first line only | ||
| 1104 | next(parse_requirements(fp.read())) | ||
| 1105 | msg += " The argument you provided " + \ | ||
| 1106 | "(%s) appears to be a" % (req) + \ | ||
| 1107 | " requirements file. If that is the" + \ | ||
| 1108 | " case, use the '-r' flag to install" + \ | ||
| 1109 | " the packages specified within it." | ||
| 1110 | except RequirementParseError: | ||
| 1111 | logger.debug("Cannot parse '%s' as requirements \ | ||
| 1112 | file" % (req), exc_info=1) | ||
| 1113 | else: | ||
| 1114 | msg += " File '%s' does not exist." % (req) | ||
| 1115 | return msg | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py new file mode 100644 index 0000000..78b7d32 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py | |||
| @@ -0,0 +1,164 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | from collections import OrderedDict | ||
| 5 | |||
| 6 | from pip._internal.exceptions import InstallationError | ||
| 7 | from pip._internal.utils.logging import indent_log | ||
| 8 | from pip._internal.wheel import Wheel | ||
| 9 | |||
| 10 | logger = logging.getLogger(__name__) | ||
| 11 | |||
| 12 | |||
| 13 | class RequirementSet(object): | ||
| 14 | |||
| 15 | def __init__(self, require_hashes=False): | ||
| 16 | """Create a RequirementSet. | ||
| 17 | |||
| 18 | :param wheel_cache: The pip wheel cache, for passing to | ||
| 19 | InstallRequirement. | ||
| 20 | """ | ||
| 21 | |||
| 22 | self.requirements = OrderedDict() | ||
| 23 | self.require_hashes = require_hashes | ||
| 24 | |||
| 25 | # Mapping of alias: real_name | ||
| 26 | self.requirement_aliases = {} | ||
| 27 | self.unnamed_requirements = [] | ||
| 28 | self.successfully_downloaded = [] | ||
| 29 | self.reqs_to_cleanup = [] | ||
| 30 | |||
| 31 | def __str__(self): | ||
| 32 | reqs = [req for req in self.requirements.values() | ||
| 33 | if not req.comes_from] | ||
| 34 | reqs.sort(key=lambda req: req.name.lower()) | ||
| 35 | return ' '.join([str(req.req) for req in reqs]) | ||
| 36 | |||
| 37 | def __repr__(self): | ||
| 38 | reqs = [req for req in self.requirements.values()] | ||
| 39 | reqs.sort(key=lambda req: req.name.lower()) | ||
| 40 | reqs_str = ', '.join([str(req.req) for req in reqs]) | ||
| 41 | return ('<%s object; %d requirement(s): %s>' | ||
| 42 | % (self.__class__.__name__, len(reqs), reqs_str)) | ||
| 43 | |||
| 44 | def add_requirement(self, install_req, parent_req_name=None, | ||
| 45 | extras_requested=None): | ||
| 46 | """Add install_req as a requirement to install. | ||
| 47 | |||
| 48 | :param parent_req_name: The name of the requirement that needed this | ||
| 49 | added. The name is used because when multiple unnamed requirements | ||
| 50 | resolve to the same name, we could otherwise end up with dependency | ||
| 51 | links that point outside the Requirements set. parent_req must | ||
| 52 | already be added. Note that None implies that this is a user | ||
| 53 | supplied requirement, vs an inferred one. | ||
| 54 | :param extras_requested: an iterable of extras used to evaluate the | ||
| 55 | environment markers. | ||
| 56 | :return: Additional requirements to scan. That is either [] if | ||
| 57 | the requirement is not applicable, or [install_req] if the | ||
| 58 | requirement is applicable and has just been added. | ||
| 59 | """ | ||
| 60 | name = install_req.name | ||
| 61 | if not install_req.match_markers(extras_requested): | ||
| 62 | logger.info("Ignoring %s: markers '%s' don't match your " | ||
| 63 | "environment", install_req.name, | ||
| 64 | install_req.markers) | ||
| 65 | return [], None | ||
| 66 | |||
| 67 | # This check has to come after we filter requirements with the | ||
| 68 | # environment markers. | ||
| 69 | if install_req.link and install_req.link.is_wheel: | ||
| 70 | wheel = Wheel(install_req.link.filename) | ||
| 71 | if not wheel.supported(): | ||
| 72 | raise InstallationError( | ||
| 73 | "%s is not a supported wheel on this platform." % | ||
| 74 | wheel.filename | ||
| 75 | ) | ||
| 76 | |||
| 77 | # This next bit is really a sanity check. | ||
| 78 | assert install_req.is_direct == (parent_req_name is None), ( | ||
| 79 | "a direct req shouldn't have a parent and also, " | ||
| 80 | "a non direct req should have a parent" | ||
| 81 | ) | ||
| 82 | |||
| 83 | if not name: | ||
| 84 | # url or path requirement w/o an egg fragment | ||
| 85 | self.unnamed_requirements.append(install_req) | ||
| 86 | return [install_req], None | ||
| 87 | else: | ||
| 88 | try: | ||
| 89 | existing_req = self.get_requirement(name) | ||
| 90 | except KeyError: | ||
| 91 | existing_req = None | ||
| 92 | if (parent_req_name is None and existing_req and not | ||
| 93 | existing_req.constraint and | ||
| 94 | existing_req.extras == install_req.extras and not | ||
| 95 | existing_req.req.specifier == install_req.req.specifier): | ||
| 96 | raise InstallationError( | ||
| 97 | 'Double requirement given: %s (already in %s, name=%r)' | ||
| 98 | % (install_req, existing_req, name)) | ||
| 99 | if not existing_req: | ||
| 100 | # Add requirement | ||
| 101 | self.requirements[name] = install_req | ||
| 102 | # FIXME: what about other normalizations? E.g., _ vs. -? | ||
| 103 | if name.lower() != name: | ||
| 104 | self.requirement_aliases[name.lower()] = name | ||
| 105 | result = [install_req] | ||
| 106 | else: | ||
| 107 | # Assume there's no need to scan, and that we've already | ||
| 108 | # encountered this for scanning. | ||
| 109 | result = [] | ||
| 110 | if not install_req.constraint and existing_req.constraint: | ||
| 111 | if (install_req.link and not (existing_req.link and | ||
| 112 | install_req.link.path == existing_req.link.path)): | ||
| 113 | self.reqs_to_cleanup.append(install_req) | ||
| 114 | raise InstallationError( | ||
| 115 | "Could not satisfy constraints for '%s': " | ||
| 116 | "installation from path or url cannot be " | ||
| 117 | "constrained to a version" % name, | ||
| 118 | ) | ||
| 119 | # If we're now installing a constraint, mark the existing | ||
| 120 | # object for real installation. | ||
| 121 | existing_req.constraint = False | ||
| 122 | existing_req.extras = tuple( | ||
| 123 | sorted(set(existing_req.extras).union( | ||
| 124 | set(install_req.extras)))) | ||
| 125 | logger.debug("Setting %s extras to: %s", | ||
| 126 | existing_req, existing_req.extras) | ||
| 127 | # And now we need to scan this. | ||
| 128 | result = [existing_req] | ||
| 129 | # Canonicalise to the already-added object for the backref | ||
| 130 | # check below. | ||
| 131 | install_req = existing_req | ||
| 132 | |||
| 133 | # We return install_req here to allow for the caller to add it to | ||
| 134 | # the dependency information for the parent package. | ||
| 135 | return result, install_req | ||
| 136 | |||
| 137 | def has_requirement(self, project_name): | ||
| 138 | name = project_name.lower() | ||
| 139 | if (name in self.requirements and | ||
| 140 | not self.requirements[name].constraint or | ||
| 141 | name in self.requirement_aliases and | ||
| 142 | not self.requirements[self.requirement_aliases[name]].constraint): | ||
| 143 | return True | ||
| 144 | return False | ||
| 145 | |||
| 146 | @property | ||
| 147 | def has_requirements(self): | ||
| 148 | return list(req for req in self.requirements.values() if not | ||
| 149 | req.constraint) or self.unnamed_requirements | ||
| 150 | |||
| 151 | def get_requirement(self, project_name): | ||
| 152 | for name in project_name, project_name.lower(): | ||
| 153 | if name in self.requirements: | ||
| 154 | return self.requirements[name] | ||
| 155 | if name in self.requirement_aliases: | ||
| 156 | return self.requirements[self.requirement_aliases[name]] | ||
| 157 | raise KeyError("No project with the name %r" % project_name) | ||
| 158 | |||
| 159 | def cleanup_files(self): | ||
| 160 | """Clean up files, remove builds.""" | ||
| 161 | logger.debug('Cleaning up...') | ||
| 162 | with indent_log(): | ||
| 163 | for req in self.reqs_to_cleanup: | ||
| 164 | req.remove_temporary_source() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000..a47520f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py | |||
| @@ -0,0 +1,455 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import csv | ||
| 4 | import functools | ||
| 5 | import logging | ||
| 6 | import os | ||
| 7 | import sys | ||
| 8 | import sysconfig | ||
| 9 | |||
| 10 | from pip._vendor import pkg_resources | ||
| 11 | |||
| 12 | from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache | ||
| 13 | from pip._internal.exceptions import UninstallationError | ||
| 14 | from pip._internal.locations import bin_py, bin_user | ||
| 15 | from pip._internal.utils.logging import indent_log | ||
| 16 | from pip._internal.utils.misc import ( | ||
| 17 | FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, | ||
| 18 | normalize_path, renames, | ||
| 19 | ) | ||
| 20 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | def _script_names(dist, script_name, is_gui): | ||
| 26 | """Create the fully qualified name of the files created by | ||
| 27 | {console,gui}_scripts for the given ``dist``. | ||
| 28 | Returns the list of file names | ||
| 29 | """ | ||
| 30 | if dist_in_usersite(dist): | ||
| 31 | bin_dir = bin_user | ||
| 32 | else: | ||
| 33 | bin_dir = bin_py | ||
| 34 | exe_name = os.path.join(bin_dir, script_name) | ||
| 35 | paths_to_remove = [exe_name] | ||
| 36 | if WINDOWS: | ||
| 37 | paths_to_remove.append(exe_name + '.exe') | ||
| 38 | paths_to_remove.append(exe_name + '.exe.manifest') | ||
| 39 | if is_gui: | ||
| 40 | paths_to_remove.append(exe_name + '-script.pyw') | ||
| 41 | else: | ||
| 42 | paths_to_remove.append(exe_name + '-script.py') | ||
| 43 | return paths_to_remove | ||
| 44 | |||
| 45 | |||
| 46 | def _unique(fn): | ||
| 47 | @functools.wraps(fn) | ||
| 48 | def unique(*args, **kw): | ||
| 49 | seen = set() | ||
| 50 | for item in fn(*args, **kw): | ||
| 51 | if item not in seen: | ||
| 52 | seen.add(item) | ||
| 53 | yield item | ||
| 54 | return unique | ||
| 55 | |||
| 56 | |||
| 57 | @_unique | ||
| 58 | def uninstallation_paths(dist): | ||
| 59 | """ | ||
| 60 | Yield all the uninstallation paths for dist based on RECORD-without-.pyc | ||
| 61 | |||
| 62 | Yield paths to all the files in RECORD. For each .py file in RECORD, add | ||
| 63 | the .pyc in the same directory. | ||
| 64 | |||
| 65 | UninstallPathSet.add() takes care of the __pycache__ .pyc. | ||
| 66 | """ | ||
| 67 | r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) | ||
| 68 | for row in r: | ||
| 69 | path = os.path.join(dist.location, row[0]) | ||
| 70 | yield path | ||
| 71 | if path.endswith('.py'): | ||
| 72 | dn, fn = os.path.split(path) | ||
| 73 | base = fn[:-3] | ||
| 74 | path = os.path.join(dn, base + '.pyc') | ||
| 75 | yield path | ||
| 76 | |||
| 77 | |||
| 78 | def compact(paths): | ||
| 79 | """Compact a path set to contain the minimal number of paths | ||
| 80 | necessary to contain all paths in the set. If /a/path/ and | ||
| 81 | /a/path/to/a/file.txt are both in the set, leave only the | ||
| 82 | shorter path.""" | ||
| 83 | |||
| 84 | sep = os.path.sep | ||
| 85 | short_paths = set() | ||
| 86 | for path in sorted(paths, key=len): | ||
| 87 | should_add = any( | ||
| 88 | path.startswith(shortpath.rstrip("*")) and | ||
| 89 | path[len(shortpath.rstrip("*").rstrip(sep))] == sep | ||
| 90 | for shortpath in short_paths | ||
| 91 | ) | ||
| 92 | if not should_add: | ||
| 93 | short_paths.add(path) | ||
| 94 | return short_paths | ||
| 95 | |||
| 96 | |||
| 97 | def compress_for_output_listing(paths): | ||
| 98 | """Returns a tuple of 2 sets of which paths to display to user | ||
| 99 | |||
| 100 | The first set contains paths that would be deleted. Files of a package | ||
| 101 | are not added and the top-level directory of the package has a '*' added | ||
| 102 | at the end - to signify that all it's contents are removed. | ||
| 103 | |||
| 104 | The second set contains files that would have been skipped in the above | ||
| 105 | folders. | ||
| 106 | """ | ||
| 107 | |||
| 108 | will_remove = list(paths) | ||
| 109 | will_skip = set() | ||
| 110 | |||
| 111 | # Determine folders and files | ||
| 112 | folders = set() | ||
| 113 | files = set() | ||
| 114 | for path in will_remove: | ||
| 115 | if path.endswith(".pyc"): | ||
| 116 | continue | ||
| 117 | if path.endswith("__init__.py") or ".dist-info" in path: | ||
| 118 | folders.add(os.path.dirname(path)) | ||
| 119 | files.add(path) | ||
| 120 | |||
| 121 | folders = compact(folders) | ||
| 122 | |||
| 123 | # This walks the tree using os.walk to not miss extra folders | ||
| 124 | # that might get added. | ||
| 125 | for folder in folders: | ||
| 126 | for dirpath, _, dirfiles in os.walk(folder): | ||
| 127 | for fname in dirfiles: | ||
| 128 | if fname.endswith(".pyc"): | ||
| 129 | continue | ||
| 130 | |||
| 131 | file_ = os.path.normcase(os.path.join(dirpath, fname)) | ||
| 132 | if os.path.isfile(file_) and file_ not in files: | ||
| 133 | # We are skipping this file. Add it to the set. | ||
| 134 | will_skip.add(file_) | ||
| 135 | |||
| 136 | will_remove = files | { | ||
| 137 | os.path.join(folder, "*") for folder in folders | ||
| 138 | } | ||
| 139 | |||
| 140 | return will_remove, will_skip | ||
| 141 | |||
| 142 | |||
| 143 | class UninstallPathSet(object): | ||
| 144 | """A set of file paths to be removed in the uninstallation of a | ||
| 145 | requirement.""" | ||
| 146 | def __init__(self, dist): | ||
| 147 | self.paths = set() | ||
| 148 | self._refuse = set() | ||
| 149 | self.pth = {} | ||
| 150 | self.dist = dist | ||
| 151 | self.save_dir = TempDirectory(kind="uninstall") | ||
| 152 | self._moved_paths = [] | ||
| 153 | |||
| 154 | def _permitted(self, path): | ||
| 155 | """ | ||
| 156 | Return True if the given path is one we are permitted to | ||
| 157 | remove/modify, False otherwise. | ||
| 158 | |||
| 159 | """ | ||
| 160 | return is_local(path) | ||
| 161 | |||
| 162 | def add(self, path): | ||
| 163 | head, tail = os.path.split(path) | ||
| 164 | |||
| 165 | # we normalize the head to resolve parent directory symlinks, but not | ||
| 166 | # the tail, since we only want to uninstall symlinks, not their targets | ||
| 167 | path = os.path.join(normalize_path(head), os.path.normcase(tail)) | ||
| 168 | |||
| 169 | if not os.path.exists(path): | ||
| 170 | return | ||
| 171 | if self._permitted(path): | ||
| 172 | self.paths.add(path) | ||
| 173 | else: | ||
| 174 | self._refuse.add(path) | ||
| 175 | |||
| 176 | # __pycache__ files can show up after 'installed-files.txt' is created, | ||
| 177 | # due to imports | ||
| 178 | if os.path.splitext(path)[1] == '.py' and uses_pycache: | ||
| 179 | self.add(cache_from_source(path)) | ||
| 180 | |||
| 181 | def add_pth(self, pth_file, entry): | ||
| 182 | pth_file = normalize_path(pth_file) | ||
| 183 | if self._permitted(pth_file): | ||
| 184 | if pth_file not in self.pth: | ||
| 185 | self.pth[pth_file] = UninstallPthEntries(pth_file) | ||
| 186 | self.pth[pth_file].add(entry) | ||
| 187 | else: | ||
| 188 | self._refuse.add(pth_file) | ||
| 189 | |||
| 190 | def _stash(self, path): | ||
| 191 | return os.path.join( | ||
| 192 | self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep) | ||
| 193 | ) | ||
| 194 | |||
| 195 | def remove(self, auto_confirm=False, verbose=False): | ||
| 196 | """Remove paths in ``self.paths`` with confirmation (unless | ||
| 197 | ``auto_confirm`` is True).""" | ||
| 198 | |||
| 199 | if not self.paths: | ||
| 200 | logger.info( | ||
| 201 | "Can't uninstall '%s'. No files were found to uninstall.", | ||
| 202 | self.dist.project_name, | ||
| 203 | ) | ||
| 204 | return | ||
| 205 | |||
| 206 | dist_name_version = ( | ||
| 207 | self.dist.project_name + "-" + self.dist.version | ||
| 208 | ) | ||
| 209 | logger.info('Uninstalling %s:', dist_name_version) | ||
| 210 | |||
| 211 | with indent_log(): | ||
| 212 | if auto_confirm or self._allowed_to_proceed(verbose): | ||
| 213 | self.save_dir.create() | ||
| 214 | |||
| 215 | for path in sorted(compact(self.paths)): | ||
| 216 | new_path = self._stash(path) | ||
| 217 | logger.debug('Removing file or directory %s', path) | ||
| 218 | self._moved_paths.append(path) | ||
| 219 | renames(path, new_path) | ||
| 220 | for pth in self.pth.values(): | ||
| 221 | pth.remove() | ||
| 222 | |||
| 223 | logger.info('Successfully uninstalled %s', dist_name_version) | ||
| 224 | |||
| 225 | def _allowed_to_proceed(self, verbose): | ||
| 226 | """Display which files would be deleted and prompt for confirmation | ||
| 227 | """ | ||
| 228 | |||
| 229 | def _display(msg, paths): | ||
| 230 | if not paths: | ||
| 231 | return | ||
| 232 | |||
| 233 | logger.info(msg) | ||
| 234 | with indent_log(): | ||
| 235 | for path in sorted(compact(paths)): | ||
| 236 | logger.info(path) | ||
| 237 | |||
| 238 | if not verbose: | ||
| 239 | will_remove, will_skip = compress_for_output_listing(self.paths) | ||
| 240 | else: | ||
| 241 | # In verbose mode, display all the files that are going to be | ||
| 242 | # deleted. | ||
| 243 | will_remove = list(self.paths) | ||
| 244 | will_skip = set() | ||
| 245 | |||
| 246 | _display('Would remove:', will_remove) | ||
| 247 | _display('Would not remove (might be manually added):', will_skip) | ||
| 248 | _display('Would not remove (outside of prefix):', self._refuse) | ||
| 249 | |||
| 250 | return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' | ||
| 251 | |||
| 252 | def rollback(self): | ||
| 253 | """Rollback the changes previously made by remove().""" | ||
| 254 | if self.save_dir.path is None: | ||
| 255 | logger.error( | ||
| 256 | "Can't roll back %s; was not uninstalled", | ||
| 257 | self.dist.project_name, | ||
| 258 | ) | ||
| 259 | return False | ||
| 260 | logger.info('Rolling back uninstall of %s', self.dist.project_name) | ||
| 261 | for path in self._moved_paths: | ||
| 262 | tmp_path = self._stash(path) | ||
| 263 | logger.debug('Replacing %s', path) | ||
| 264 | renames(tmp_path, path) | ||
| 265 | for pth in self.pth.values(): | ||
| 266 | pth.rollback() | ||
| 267 | |||
| 268 | def commit(self): | ||
| 269 | """Remove temporary save dir: rollback will no longer be possible.""" | ||
| 270 | self.save_dir.cleanup() | ||
| 271 | self._moved_paths = [] | ||
| 272 | |||
| 273 | @classmethod | ||
| 274 | def from_dist(cls, dist): | ||
| 275 | dist_path = normalize_path(dist.location) | ||
| 276 | if not dist_is_local(dist): | ||
| 277 | logger.info( | ||
| 278 | "Not uninstalling %s at %s, outside environment %s", | ||
| 279 | dist.key, | ||
| 280 | dist_path, | ||
| 281 | sys.prefix, | ||
| 282 | ) | ||
| 283 | return cls(dist) | ||
| 284 | |||
| 285 | if dist_path in {p for p in {sysconfig.get_path("stdlib"), | ||
| 286 | sysconfig.get_path("platstdlib")} | ||
| 287 | if p}: | ||
| 288 | logger.info( | ||
| 289 | "Not uninstalling %s at %s, as it is in the standard library.", | ||
| 290 | dist.key, | ||
| 291 | dist_path, | ||
| 292 | ) | ||
| 293 | return cls(dist) | ||
| 294 | |||
| 295 | paths_to_remove = cls(dist) | ||
| 296 | develop_egg_link = egg_link_path(dist) | ||
| 297 | develop_egg_link_egg_info = '{}.egg-info'.format( | ||
| 298 | pkg_resources.to_filename(dist.project_name)) | ||
| 299 | egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) | ||
| 300 | # Special case for distutils installed package | ||
| 301 | distutils_egg_info = getattr(dist._provider, 'path', None) | ||
| 302 | |||
| 303 | # Uninstall cases order do matter as in the case of 2 installs of the | ||
| 304 | # same package, pip needs to uninstall the currently detected version | ||
| 305 | if (egg_info_exists and dist.egg_info.endswith('.egg-info') and | ||
| 306 | not dist.egg_info.endswith(develop_egg_link_egg_info)): | ||
| 307 | # if dist.egg_info.endswith(develop_egg_link_egg_info), we | ||
| 308 | # are in fact in the develop_egg_link case | ||
| 309 | paths_to_remove.add(dist.egg_info) | ||
| 310 | if dist.has_metadata('installed-files.txt'): | ||
| 311 | for installed_file in dist.get_metadata( | ||
| 312 | 'installed-files.txt').splitlines(): | ||
| 313 | path = os.path.normpath( | ||
| 314 | os.path.join(dist.egg_info, installed_file) | ||
| 315 | ) | ||
| 316 | paths_to_remove.add(path) | ||
| 317 | # FIXME: need a test for this elif block | ||
| 318 | # occurs with --single-version-externally-managed/--record outside | ||
| 319 | # of pip | ||
| 320 | elif dist.has_metadata('top_level.txt'): | ||
| 321 | if dist.has_metadata('namespace_packages.txt'): | ||
| 322 | namespaces = dist.get_metadata('namespace_packages.txt') | ||
| 323 | else: | ||
| 324 | namespaces = [] | ||
| 325 | for top_level_pkg in [ | ||
| 326 | p for p | ||
| 327 | in dist.get_metadata('top_level.txt').splitlines() | ||
| 328 | if p and p not in namespaces]: | ||
| 329 | path = os.path.join(dist.location, top_level_pkg) | ||
| 330 | paths_to_remove.add(path) | ||
| 331 | paths_to_remove.add(path + '.py') | ||
| 332 | paths_to_remove.add(path + '.pyc') | ||
| 333 | paths_to_remove.add(path + '.pyo') | ||
| 334 | |||
| 335 | elif distutils_egg_info: | ||
| 336 | raise UninstallationError( | ||
| 337 | "Cannot uninstall {!r}. It is a distutils installed project " | ||
| 338 | "and thus we cannot accurately determine which files belong " | ||
| 339 | "to it which would lead to only a partial uninstall.".format( | ||
| 340 | dist.project_name, | ||
| 341 | ) | ||
| 342 | ) | ||
| 343 | |||
| 344 | elif dist.location.endswith('.egg'): | ||
| 345 | # package installed by easy_install | ||
| 346 | # We cannot match on dist.egg_name because it can slightly vary | ||
| 347 | # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg | ||
| 348 | paths_to_remove.add(dist.location) | ||
| 349 | easy_install_egg = os.path.split(dist.location)[1] | ||
| 350 | easy_install_pth = os.path.join(os.path.dirname(dist.location), | ||
| 351 | 'easy-install.pth') | ||
| 352 | paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) | ||
| 353 | |||
| 354 | elif egg_info_exists and dist.egg_info.endswith('.dist-info'): | ||
| 355 | for path in uninstallation_paths(dist): | ||
| 356 | paths_to_remove.add(path) | ||
| 357 | |||
| 358 | elif develop_egg_link: | ||
| 359 | # develop egg | ||
| 360 | with open(develop_egg_link, 'r') as fh: | ||
| 361 | link_pointer = os.path.normcase(fh.readline().strip()) | ||
| 362 | assert (link_pointer == dist.location), ( | ||
| 363 | 'Egg-link %s does not match installed location of %s ' | ||
| 364 | '(at %s)' % (link_pointer, dist.project_name, dist.location) | ||
| 365 | ) | ||
| 366 | paths_to_remove.add(develop_egg_link) | ||
| 367 | easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), | ||
| 368 | 'easy-install.pth') | ||
| 369 | paths_to_remove.add_pth(easy_install_pth, dist.location) | ||
| 370 | |||
| 371 | else: | ||
| 372 | logger.debug( | ||
| 373 | 'Not sure how to uninstall: %s - Check: %s', | ||
| 374 | dist, dist.location, | ||
| 375 | ) | ||
| 376 | |||
| 377 | # find distutils scripts= scripts | ||
| 378 | if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): | ||
| 379 | for script in dist.metadata_listdir('scripts'): | ||
| 380 | if dist_in_usersite(dist): | ||
| 381 | bin_dir = bin_user | ||
| 382 | else: | ||
| 383 | bin_dir = bin_py | ||
| 384 | paths_to_remove.add(os.path.join(bin_dir, script)) | ||
| 385 | if WINDOWS: | ||
| 386 | paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') | ||
| 387 | |||
| 388 | # find console_scripts | ||
| 389 | _scripts_to_remove = [] | ||
| 390 | console_scripts = dist.get_entry_map(group='console_scripts') | ||
| 391 | for name in console_scripts.keys(): | ||
| 392 | _scripts_to_remove.extend(_script_names(dist, name, False)) | ||
| 393 | # find gui_scripts | ||
| 394 | gui_scripts = dist.get_entry_map(group='gui_scripts') | ||
| 395 | for name in gui_scripts.keys(): | ||
| 396 | _scripts_to_remove.extend(_script_names(dist, name, True)) | ||
| 397 | |||
| 398 | for s in _scripts_to_remove: | ||
| 399 | paths_to_remove.add(s) | ||
| 400 | |||
| 401 | return paths_to_remove | ||
| 402 | |||
| 403 | |||
| 404 | class UninstallPthEntries(object): | ||
| 405 | def __init__(self, pth_file): | ||
| 406 | if not os.path.isfile(pth_file): | ||
| 407 | raise UninstallationError( | ||
| 408 | "Cannot remove entries from nonexistent file %s" % pth_file | ||
| 409 | ) | ||
| 410 | self.file = pth_file | ||
| 411 | self.entries = set() | ||
| 412 | self._saved_lines = None | ||
| 413 | |||
| 414 | def add(self, entry): | ||
| 415 | entry = os.path.normcase(entry) | ||
| 416 | # On Windows, os.path.normcase converts the entry to use | ||
| 417 | # backslashes. This is correct for entries that describe absolute | ||
| 418 | # paths outside of site-packages, but all the others use forward | ||
| 419 | # slashes. | ||
| 420 | if WINDOWS and not os.path.splitdrive(entry)[0]: | ||
| 421 | entry = entry.replace('\\', '/') | ||
| 422 | self.entries.add(entry) | ||
| 423 | |||
| 424 | def remove(self): | ||
| 425 | logger.debug('Removing pth entries from %s:', self.file) | ||
| 426 | with open(self.file, 'rb') as fh: | ||
| 427 | # windows uses '\r\n' with py3k, but uses '\n' with py2.x | ||
| 428 | lines = fh.readlines() | ||
| 429 | self._saved_lines = lines | ||
| 430 | if any(b'\r\n' in line for line in lines): | ||
| 431 | endline = '\r\n' | ||
| 432 | else: | ||
| 433 | endline = '\n' | ||
| 434 | # handle missing trailing newline | ||
| 435 | if lines and not lines[-1].endswith(endline.encode("utf-8")): | ||
| 436 | lines[-1] = lines[-1] + endline.encode("utf-8") | ||
| 437 | for entry in self.entries: | ||
| 438 | try: | ||
| 439 | logger.debug('Removing entry: %s', entry) | ||
| 440 | lines.remove((entry + endline).encode("utf-8")) | ||
| 441 | except ValueError: | ||
| 442 | pass | ||
| 443 | with open(self.file, 'wb') as fh: | ||
| 444 | fh.writelines(lines) | ||
| 445 | |||
| 446 | def rollback(self): | ||
| 447 | if self._saved_lines is None: | ||
| 448 | logger.error( | ||
| 449 | 'Cannot roll back changes to %s, none were made', self.file | ||
| 450 | ) | ||
| 451 | return False | ||
| 452 | logger.debug('Rolling %s back to previous state', self.file) | ||
| 453 | with open(self.file, 'wb') as fh: | ||
| 454 | fh.writelines(self._saved_lines) | ||
| 455 | return True | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py new file mode 100644 index 0000000..189827e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py | |||
| @@ -0,0 +1,354 @@ | |||
| 1 | """Dependency Resolution | ||
| 2 | |||
| 3 | The dependency resolution in pip is performed as follows: | ||
| 4 | |||
| 5 | for top-level requirements: | ||
| 6 | a. only one spec allowed per project, regardless of conflicts or not. | ||
| 7 | otherwise a "double requirement" exception is raised | ||
| 8 | b. they override sub-dependency requirements. | ||
| 9 | for sub-dependencies | ||
| 10 | a. "first found, wins" (where the order is breadth first) | ||
| 11 | """ | ||
| 12 | |||
| 13 | import logging | ||
| 14 | from collections import defaultdict | ||
| 15 | from itertools import chain | ||
| 16 | |||
| 17 | from pip._internal.exceptions import ( | ||
| 18 | BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, | ||
| 19 | UnsupportedPythonVersion, | ||
| 20 | ) | ||
| 21 | |||
| 22 | from pip._internal.req.req_install import InstallRequirement | ||
| 23 | from pip._internal.utils.logging import indent_log | ||
| 24 | from pip._internal.utils.misc import dist_in_usersite, ensure_dir | ||
| 25 | from pip._internal.utils.packaging import check_dist_requires_python | ||
| 26 | |||
| 27 | logger = logging.getLogger(__name__) | ||
| 28 | |||
| 29 | |||
| 30 | class Resolver(object): | ||
| 31 | """Resolves which packages need to be installed/uninstalled to perform \ | ||
| 32 | the requested operation without breaking the requirements of any package. | ||
| 33 | """ | ||
| 34 | |||
| 35 | _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} | ||
| 36 | |||
| 37 | def __init__(self, preparer, session, finder, wheel_cache, use_user_site, | ||
| 38 | ignore_dependencies, ignore_installed, ignore_requires_python, | ||
| 39 | force_reinstall, isolated, upgrade_strategy): | ||
| 40 | super(Resolver, self).__init__() | ||
| 41 | assert upgrade_strategy in self._allowed_strategies | ||
| 42 | |||
| 43 | self.preparer = preparer | ||
| 44 | self.finder = finder | ||
| 45 | self.session = session | ||
| 46 | |||
| 47 | # NOTE: This would eventually be replaced with a cache that can give | ||
| 48 | # information about both sdist and wheels transparently. | ||
| 49 | self.wheel_cache = wheel_cache | ||
| 50 | |||
| 51 | self.require_hashes = None # This is set in resolve | ||
| 52 | |||
| 53 | self.upgrade_strategy = upgrade_strategy | ||
| 54 | self.force_reinstall = force_reinstall | ||
| 55 | self.isolated = isolated | ||
| 56 | self.ignore_dependencies = ignore_dependencies | ||
| 57 | self.ignore_installed = ignore_installed | ||
| 58 | self.ignore_requires_python = ignore_requires_python | ||
| 59 | self.use_user_site = use_user_site | ||
| 60 | |||
| 61 | self._discovered_dependencies = defaultdict(list) | ||
| 62 | |||
| 63 | def resolve(self, requirement_set): | ||
| 64 | """Resolve what operations need to be done | ||
| 65 | |||
| 66 | As a side-effect of this method, the packages (and their dependencies) | ||
| 67 | are downloaded, unpacked and prepared for installation. This | ||
| 68 | preparation is done by ``pip.operations.prepare``. | ||
| 69 | |||
| 70 | Once PyPI has static dependency metadata available, it would be | ||
| 71 | possible to move the preparation to become a step separated from | ||
| 72 | dependency resolution. | ||
| 73 | """ | ||
| 74 | # make the wheelhouse | ||
| 75 | if self.preparer.wheel_download_dir: | ||
| 76 | ensure_dir(self.preparer.wheel_download_dir) | ||
| 77 | |||
| 78 | # If any top-level requirement has a hash specified, enter | ||
| 79 | # hash-checking mode, which requires hashes from all. | ||
| 80 | root_reqs = ( | ||
| 81 | requirement_set.unnamed_requirements + | ||
| 82 | list(requirement_set.requirements.values()) | ||
| 83 | ) | ||
| 84 | self.require_hashes = ( | ||
| 85 | requirement_set.require_hashes or | ||
| 86 | any(req.has_hash_options for req in root_reqs) | ||
| 87 | ) | ||
| 88 | |||
| 89 | # Display where finder is looking for packages | ||
| 90 | locations = self.finder.get_formatted_locations() | ||
| 91 | if locations: | ||
| 92 | logger.info(locations) | ||
| 93 | |||
| 94 | # Actually prepare the files, and collect any exceptions. Most hash | ||
| 95 | # exceptions cannot be checked ahead of time, because | ||
| 96 | # req.populate_link() needs to be called before we can make decisions | ||
| 97 | # based on link type. | ||
| 98 | discovered_reqs = [] | ||
| 99 | hash_errors = HashErrors() | ||
| 100 | for req in chain(root_reqs, discovered_reqs): | ||
| 101 | try: | ||
| 102 | discovered_reqs.extend( | ||
| 103 | self._resolve_one(requirement_set, req) | ||
| 104 | ) | ||
| 105 | except HashError as exc: | ||
| 106 | exc.req = req | ||
| 107 | hash_errors.append(exc) | ||
| 108 | |||
| 109 | if hash_errors: | ||
| 110 | raise hash_errors | ||
| 111 | |||
| 112 | def _is_upgrade_allowed(self, req): | ||
| 113 | if self.upgrade_strategy == "to-satisfy-only": | ||
| 114 | return False | ||
| 115 | elif self.upgrade_strategy == "eager": | ||
| 116 | return True | ||
| 117 | else: | ||
| 118 | assert self.upgrade_strategy == "only-if-needed" | ||
| 119 | return req.is_direct | ||
| 120 | |||
| 121 | def _set_req_to_reinstall(self, req): | ||
| 122 | """ | ||
| 123 | Set a requirement to be installed. | ||
| 124 | """ | ||
| 125 | # Don't uninstall the conflict if doing a user install and the | ||
| 126 | # conflict is not a user install. | ||
| 127 | if not self.use_user_site or dist_in_usersite(req.satisfied_by): | ||
| 128 | req.conflicts_with = req.satisfied_by | ||
| 129 | req.satisfied_by = None | ||
| 130 | |||
| 131 | # XXX: Stop passing requirement_set for options | ||
| 132 | def _check_skip_installed(self, req_to_install): | ||
| 133 | """Check if req_to_install should be skipped. | ||
| 134 | |||
| 135 | This will check if the req is installed, and whether we should upgrade | ||
| 136 | or reinstall it, taking into account all the relevant user options. | ||
| 137 | |||
| 138 | After calling this req_to_install will only have satisfied_by set to | ||
| 139 | None if the req_to_install is to be upgraded/reinstalled etc. Any | ||
| 140 | other value will be a dist recording the current thing installed that | ||
| 141 | satisfies the requirement. | ||
| 142 | |||
| 143 | Note that for vcs urls and the like we can't assess skipping in this | ||
| 144 | routine - we simply identify that we need to pull the thing down, | ||
| 145 | then later on it is pulled down and introspected to assess upgrade/ | ||
| 146 | reinstalls etc. | ||
| 147 | |||
| 148 | :return: A text reason for why it was skipped, or None. | ||
| 149 | """ | ||
| 150 | if self.ignore_installed: | ||
| 151 | return None | ||
| 152 | |||
| 153 | req_to_install.check_if_exists(self.use_user_site) | ||
| 154 | if not req_to_install.satisfied_by: | ||
| 155 | return None | ||
| 156 | |||
| 157 | if self.force_reinstall: | ||
| 158 | self._set_req_to_reinstall(req_to_install) | ||
| 159 | return None | ||
| 160 | |||
| 161 | if not self._is_upgrade_allowed(req_to_install): | ||
| 162 | if self.upgrade_strategy == "only-if-needed": | ||
| 163 | return 'not upgraded as not directly required' | ||
| 164 | return 'already satisfied' | ||
| 165 | |||
| 166 | # Check for the possibility of an upgrade. For link-based | ||
| 167 | # requirements we have to pull the tree down and inspect to assess | ||
| 168 | # the version #, so it's handled way down. | ||
| 169 | if not req_to_install.link: | ||
| 170 | try: | ||
| 171 | self.finder.find_requirement(req_to_install, upgrade=True) | ||
| 172 | except BestVersionAlreadyInstalled: | ||
| 173 | # Then the best version is installed. | ||
| 174 | return 'already up-to-date' | ||
| 175 | except DistributionNotFound: | ||
| 176 | # No distribution found, so we squash the error. It will | ||
| 177 | # be raised later when we re-try later to do the install. | ||
| 178 | # Why don't we just raise here? | ||
| 179 | pass | ||
| 180 | |||
| 181 | self._set_req_to_reinstall(req_to_install) | ||
| 182 | return None | ||
| 183 | |||
| 184 | def _get_abstract_dist_for(self, req): | ||
| 185 | """Takes a InstallRequirement and returns a single AbstractDist \ | ||
| 186 | representing a prepared variant of the same. | ||
| 187 | """ | ||
| 188 | assert self.require_hashes is not None, ( | ||
| 189 | "require_hashes should have been set in Resolver.resolve()" | ||
| 190 | ) | ||
| 191 | |||
| 192 | if req.editable: | ||
| 193 | return self.preparer.prepare_editable_requirement( | ||
| 194 | req, self.require_hashes, self.use_user_site, self.finder, | ||
| 195 | ) | ||
| 196 | |||
| 197 | # satisfied_by is only evaluated by calling _check_skip_installed, | ||
| 198 | # so it must be None here. | ||
| 199 | assert req.satisfied_by is None | ||
| 200 | skip_reason = self._check_skip_installed(req) | ||
| 201 | |||
| 202 | if req.satisfied_by: | ||
| 203 | return self.preparer.prepare_installed_requirement( | ||
| 204 | req, self.require_hashes, skip_reason | ||
| 205 | ) | ||
| 206 | |||
| 207 | upgrade_allowed = self._is_upgrade_allowed(req) | ||
| 208 | abstract_dist = self.preparer.prepare_linked_requirement( | ||
| 209 | req, self.session, self.finder, upgrade_allowed, | ||
| 210 | self.require_hashes | ||
| 211 | ) | ||
| 212 | |||
| 213 | # NOTE | ||
| 214 | # The following portion is for determining if a certain package is | ||
| 215 | # going to be re-installed/upgraded or not and reporting to the user. | ||
| 216 | # This should probably get cleaned up in a future refactor. | ||
| 217 | |||
| 218 | # req.req is only avail after unpack for URL | ||
| 219 | # pkgs repeat check_if_exists to uninstall-on-upgrade | ||
| 220 | # (#14) | ||
| 221 | if not self.ignore_installed: | ||
| 222 | req.check_if_exists(self.use_user_site) | ||
| 223 | |||
| 224 | if req.satisfied_by: | ||
| 225 | should_modify = ( | ||
| 226 | self.upgrade_strategy != "to-satisfy-only" or | ||
| 227 | self.force_reinstall or | ||
| 228 | self.ignore_installed or | ||
| 229 | req.link.scheme == 'file' | ||
| 230 | ) | ||
| 231 | if should_modify: | ||
| 232 | self._set_req_to_reinstall(req) | ||
| 233 | else: | ||
| 234 | logger.info( | ||
| 235 | 'Requirement already satisfied (use --upgrade to upgrade):' | ||
| 236 | ' %s', req, | ||
| 237 | ) | ||
| 238 | |||
| 239 | return abstract_dist | ||
| 240 | |||
| 241 | def _resolve_one(self, requirement_set, req_to_install): | ||
| 242 | """Prepare a single requirements file. | ||
| 243 | |||
| 244 | :return: A list of additional InstallRequirements to also install. | ||
| 245 | """ | ||
| 246 | # Tell user what we are doing for this requirement: | ||
| 247 | # obtain (editable), skipping, processing (local url), collecting | ||
| 248 | # (remote url or package name) | ||
| 249 | if req_to_install.constraint or req_to_install.prepared: | ||
| 250 | return [] | ||
| 251 | |||
| 252 | req_to_install.prepared = True | ||
| 253 | |||
| 254 | # register tmp src for cleanup in case something goes wrong | ||
| 255 | requirement_set.reqs_to_cleanup.append(req_to_install) | ||
| 256 | |||
| 257 | abstract_dist = self._get_abstract_dist_for(req_to_install) | ||
| 258 | |||
| 259 | # Parse and return dependencies | ||
| 260 | dist = abstract_dist.dist(self.finder) | ||
| 261 | try: | ||
| 262 | check_dist_requires_python(dist) | ||
| 263 | except UnsupportedPythonVersion as err: | ||
| 264 | if self.ignore_requires_python: | ||
| 265 | logger.warning(err.args[0]) | ||
| 266 | else: | ||
| 267 | raise | ||
| 268 | |||
| 269 | more_reqs = [] | ||
| 270 | |||
| 271 | def add_req(subreq, extras_requested): | ||
| 272 | sub_install_req = InstallRequirement.from_req( | ||
| 273 | str(subreq), | ||
| 274 | req_to_install, | ||
| 275 | isolated=self.isolated, | ||
| 276 | wheel_cache=self.wheel_cache, | ||
| 277 | ) | ||
| 278 | parent_req_name = req_to_install.name | ||
| 279 | to_scan_again, add_to_parent = requirement_set.add_requirement( | ||
| 280 | sub_install_req, | ||
| 281 | parent_req_name=parent_req_name, | ||
| 282 | extras_requested=extras_requested, | ||
| 283 | ) | ||
| 284 | if parent_req_name and add_to_parent: | ||
| 285 | self._discovered_dependencies[parent_req_name].append( | ||
| 286 | add_to_parent | ||
| 287 | ) | ||
| 288 | more_reqs.extend(to_scan_again) | ||
| 289 | |||
| 290 | with indent_log(): | ||
| 291 | # We add req_to_install before its dependencies, so that we | ||
| 292 | # can refer to it when adding dependencies. | ||
| 293 | if not requirement_set.has_requirement(req_to_install.name): | ||
| 294 | # 'unnamed' requirements will get added here | ||
| 295 | req_to_install.is_direct = True | ||
| 296 | requirement_set.add_requirement( | ||
| 297 | req_to_install, parent_req_name=None, | ||
| 298 | ) | ||
| 299 | |||
| 300 | if not self.ignore_dependencies: | ||
| 301 | if req_to_install.extras: | ||
| 302 | logger.debug( | ||
| 303 | "Installing extra requirements: %r", | ||
| 304 | ','.join(req_to_install.extras), | ||
| 305 | ) | ||
| 306 | missing_requested = sorted( | ||
| 307 | set(req_to_install.extras) - set(dist.extras) | ||
| 308 | ) | ||
| 309 | for missing in missing_requested: | ||
| 310 | logger.warning( | ||
| 311 | '%s does not provide the extra \'%s\'', | ||
| 312 | dist, missing | ||
| 313 | ) | ||
| 314 | |||
| 315 | available_requested = sorted( | ||
| 316 | set(dist.extras) & set(req_to_install.extras) | ||
| 317 | ) | ||
| 318 | for subreq in dist.requires(available_requested): | ||
| 319 | add_req(subreq, extras_requested=available_requested) | ||
| 320 | |||
| 321 | if not req_to_install.editable and not req_to_install.satisfied_by: | ||
| 322 | # XXX: --no-install leads this to report 'Successfully | ||
| 323 | # downloaded' for only non-editable reqs, even though we took | ||
| 324 | # action on them. | ||
| 325 | requirement_set.successfully_downloaded.append(req_to_install) | ||
| 326 | |||
| 327 | return more_reqs | ||
| 328 | |||
| 329 | def get_installation_order(self, req_set): | ||
| 330 | """Create the installation order. | ||
| 331 | |||
| 332 | The installation order is topological - requirements are installed | ||
| 333 | before the requiring thing. We break cycles at an arbitrary point, | ||
| 334 | and make no other guarantees. | ||
| 335 | """ | ||
| 336 | # The current implementation, which we may change at any point | ||
| 337 | # installs the user specified things in the order given, except when | ||
| 338 | # dependencies must come earlier to achieve topological order. | ||
| 339 | order = [] | ||
| 340 | ordered_reqs = set() | ||
| 341 | |||
| 342 | def schedule(req): | ||
| 343 | if req.satisfied_by or req in ordered_reqs: | ||
| 344 | return | ||
| 345 | if req.constraint: | ||
| 346 | return | ||
| 347 | ordered_reqs.add(req) | ||
| 348 | for dep in self._discovered_dependencies[req.name]: | ||
| 349 | schedule(dep) | ||
| 350 | order.append(req) | ||
| 351 | |||
| 352 | for install_req in req_set.requirements.values(): | ||
| 353 | schedule(install_req) | ||
| 354 | return order | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py new file mode 100644 index 0000000..2b56931 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py | |||
| @@ -0,0 +1,8 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | SUCCESS = 0 | ||
| 4 | ERROR = 1 | ||
| 5 | UNKNOWN_ERROR = 2 | ||
| 6 | VIRTUALENV_NOT_FOUND = 3 | ||
| 7 | PREVIOUS_BUILD_DIR_ERROR = 4 | ||
| 8 | NO_MATCHES_FOUND = 23 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..0eb87ca --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py | |||
| @@ -0,0 +1,258 @@ | |||
| 1 | """ | ||
| 2 | This code was taken from https://github.com/ActiveState/appdirs and modified | ||
| 3 | to suit our purposes. | ||
| 4 | """ | ||
| 5 | from __future__ import absolute_import | ||
| 6 | |||
| 7 | import os | ||
| 8 | import sys | ||
| 9 | |||
| 10 | from pip._vendor.six import PY2, text_type | ||
| 11 | |||
| 12 | from pip._internal.compat import WINDOWS, expanduser | ||
| 13 | |||
| 14 | |||
| 15 | def user_cache_dir(appname): | ||
| 16 | r""" | ||
| 17 | Return full path to the user-specific cache dir for this application. | ||
| 18 | |||
| 19 | "appname" is the name of application. | ||
| 20 | |||
| 21 | Typical user cache directories are: | ||
| 22 | macOS: ~/Library/Caches/<AppName> | ||
| 23 | Unix: ~/.cache/<AppName> (XDG default) | ||
| 24 | Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache | ||
| 25 | |||
| 26 | On Windows the only suggestion in the MSDN docs is that local settings go | ||
| 27 | in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the | ||
| 28 | non-roaming app data dir (the default returned by `user_data_dir`). Apps | ||
| 29 | typically put cache data somewhere *under* the given dir here. Some | ||
| 30 | examples: | ||
| 31 | ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache | ||
| 32 | ...\Acme\SuperApp\Cache\1.0 | ||
| 33 | |||
| 34 | OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. | ||
| 35 | """ | ||
| 36 | if WINDOWS: | ||
| 37 | # Get the base path | ||
| 38 | path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) | ||
| 39 | |||
| 40 | # When using Python 2, return paths as bytes on Windows like we do on | ||
| 41 | # other operating systems. See helper function docs for more details. | ||
| 42 | if PY2 and isinstance(path, text_type): | ||
| 43 | path = _win_path_to_bytes(path) | ||
| 44 | |||
| 45 | # Add our app name and Cache directory to it | ||
| 46 | path = os.path.join(path, appname, "Cache") | ||
| 47 | elif sys.platform == "darwin": | ||
| 48 | # Get the base path | ||
| 49 | path = expanduser("~/Library/Caches") | ||
| 50 | |||
| 51 | # Add our app name to it | ||
| 52 | path = os.path.join(path, appname) | ||
| 53 | else: | ||
| 54 | # Get the base path | ||
| 55 | path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) | ||
| 56 | |||
| 57 | # Add our app name to it | ||
| 58 | path = os.path.join(path, appname) | ||
| 59 | |||
| 60 | return path | ||
| 61 | |||
| 62 | |||
| 63 | def user_data_dir(appname, roaming=False): | ||
| 64 | r""" | ||
| 65 | Return full path to the user-specific data dir for this application. | ||
| 66 | |||
| 67 | "appname" is the name of application. | ||
| 68 | If None, just the system directory is returned. | ||
| 69 | "roaming" (boolean, default False) can be set True to use the Windows | ||
| 70 | roaming appdata directory. That means that for users on a Windows | ||
| 71 | network setup for roaming profiles, this user data will be | ||
| 72 | sync'd on login. See | ||
| 73 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
| 74 | for a discussion of issues. | ||
| 75 | |||
| 76 | Typical user data directories are: | ||
| 77 | macOS: ~/Library/Application Support/<AppName> | ||
| 78 | if it exists, else ~/.config/<AppName> | ||
| 79 | Unix: ~/.local/share/<AppName> # or in | ||
| 80 | $XDG_DATA_HOME, if defined | ||
| 81 | Win XP (not roaming): C:\Documents and Settings\<username>\ ... | ||
| 82 | ...Application Data\<AppName> | ||
| 83 | Win XP (roaming): C:\Documents and Settings\<username>\Local ... | ||
| 84 | ...Settings\Application Data\<AppName> | ||
| 85 | Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> | ||
| 86 | Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> | ||
| 87 | |||
| 88 | For Unix, we follow the XDG spec and support $XDG_DATA_HOME. | ||
| 89 | That means, by default "~/.local/share/<AppName>". | ||
| 90 | """ | ||
| 91 | if WINDOWS: | ||
| 92 | const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" | ||
| 93 | path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) | ||
| 94 | elif sys.platform == "darwin": | ||
| 95 | path = os.path.join( | ||
| 96 | expanduser('~/Library/Application Support/'), | ||
| 97 | appname, | ||
| 98 | ) if os.path.isdir(os.path.join( | ||
| 99 | expanduser('~/Library/Application Support/'), | ||
| 100 | appname, | ||
| 101 | ) | ||
| 102 | ) else os.path.join( | ||
| 103 | expanduser('~/.config/'), | ||
| 104 | appname, | ||
| 105 | ) | ||
| 106 | else: | ||
| 107 | path = os.path.join( | ||
| 108 | os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), | ||
| 109 | appname, | ||
| 110 | ) | ||
| 111 | |||
| 112 | return path | ||
| 113 | |||
| 114 | |||
| 115 | def user_config_dir(appname, roaming=True): | ||
| 116 | """Return full path to the user-specific config dir for this application. | ||
| 117 | |||
| 118 | "appname" is the name of application. | ||
| 119 | If None, just the system directory is returned. | ||
| 120 | "roaming" (boolean, default True) can be set False to not use the | ||
| 121 | Windows roaming appdata directory. That means that for users on a | ||
| 122 | Windows network setup for roaming profiles, this user data will be | ||
| 123 | sync'd on login. See | ||
| 124 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
| 125 | for a discussion of issues. | ||
| 126 | |||
| 127 | Typical user data directories are: | ||
| 128 | macOS: same as user_data_dir | ||
| 129 | Unix: ~/.config/<AppName> | ||
| 130 | Win *: same as user_data_dir | ||
| 131 | |||
| 132 | For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. | ||
| 133 | That means, by default "~/.config/<AppName>". | ||
| 134 | """ | ||
| 135 | if WINDOWS: | ||
| 136 | path = user_data_dir(appname, roaming=roaming) | ||
| 137 | elif sys.platform == "darwin": | ||
| 138 | path = user_data_dir(appname) | ||
| 139 | else: | ||
| 140 | path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) | ||
| 141 | path = os.path.join(path, appname) | ||
| 142 | |||
| 143 | return path | ||
| 144 | |||
| 145 | |||
| 146 | # for the discussion regarding site_config_dirs locations | ||
| 147 | # see <https://github.com/pypa/pip/issues/1733> | ||
| 148 | def site_config_dirs(appname): | ||
| 149 | r"""Return a list of potential user-shared config dirs for this application. | ||
| 150 | |||
| 151 | "appname" is the name of application. | ||
| 152 | |||
| 153 | Typical user config directories are: | ||
| 154 | macOS: /Library/Application Support/<AppName>/ | ||
| 155 | Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in | ||
| 156 | $XDG_CONFIG_DIRS | ||
| 157 | Win XP: C:\Documents and Settings\All Users\Application ... | ||
| 158 | ...Data\<AppName>\ | ||
| 159 | Vista: (Fail! "C:\ProgramData" is a hidden *system* directory | ||
| 160 | on Vista.) | ||
| 161 | Win 7: Hidden, but writeable on Win 7: | ||
| 162 | C:\ProgramData\<AppName>\ | ||
| 163 | """ | ||
| 164 | if WINDOWS: | ||
| 165 | path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) | ||
| 166 | pathlist = [os.path.join(path, appname)] | ||
| 167 | elif sys.platform == 'darwin': | ||
| 168 | pathlist = [os.path.join('/Library/Application Support', appname)] | ||
| 169 | else: | ||
| 170 | # try looking in $XDG_CONFIG_DIRS | ||
| 171 | xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') | ||
| 172 | if xdg_config_dirs: | ||
| 173 | pathlist = [ | ||
| 174 | os.path.join(expanduser(x), appname) | ||
| 175 | for x in xdg_config_dirs.split(os.pathsep) | ||
| 176 | ] | ||
| 177 | else: | ||
| 178 | pathlist = [] | ||
| 179 | |||
| 180 | # always look in /etc directly as well | ||
| 181 | pathlist.append('/etc') | ||
| 182 | |||
| 183 | return pathlist | ||
| 184 | |||
| 185 | |||
| 186 | # -- Windows support functions -- | ||
| 187 | |||
| 188 | def _get_win_folder_from_registry(csidl_name): | ||
| 189 | """ | ||
| 190 | This is a fallback technique at best. I'm not sure if using the | ||
| 191 | registry for this guarantees us the correct answer for all CSIDL_* | ||
| 192 | names. | ||
| 193 | """ | ||
| 194 | import _winreg | ||
| 195 | |||
| 196 | shell_folder_name = { | ||
| 197 | "CSIDL_APPDATA": "AppData", | ||
| 198 | "CSIDL_COMMON_APPDATA": "Common AppData", | ||
| 199 | "CSIDL_LOCAL_APPDATA": "Local AppData", | ||
| 200 | }[csidl_name] | ||
| 201 | |||
| 202 | key = _winreg.OpenKey( | ||
| 203 | _winreg.HKEY_CURRENT_USER, | ||
| 204 | r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | ||
| 205 | ) | ||
| 206 | directory, _type = _winreg.QueryValueEx(key, shell_folder_name) | ||
| 207 | return directory | ||
| 208 | |||
| 209 | |||
| 210 | def _get_win_folder_with_ctypes(csidl_name): | ||
| 211 | csidl_const = { | ||
| 212 | "CSIDL_APPDATA": 26, | ||
| 213 | "CSIDL_COMMON_APPDATA": 35, | ||
| 214 | "CSIDL_LOCAL_APPDATA": 28, | ||
| 215 | }[csidl_name] | ||
| 216 | |||
| 217 | buf = ctypes.create_unicode_buffer(1024) | ||
| 218 | ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) | ||
| 219 | |||
| 220 | # Downgrade to short path name if have highbit chars. See | ||
| 221 | # <http://bugs.activestate.com/show_bug.cgi?id=85099>. | ||
| 222 | has_high_char = False | ||
| 223 | for c in buf: | ||
| 224 | if ord(c) > 255: | ||
| 225 | has_high_char = True | ||
| 226 | break | ||
| 227 | if has_high_char: | ||
| 228 | buf2 = ctypes.create_unicode_buffer(1024) | ||
| 229 | if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): | ||
| 230 | buf = buf2 | ||
| 231 | |||
| 232 | return buf.value | ||
| 233 | |||
| 234 | |||
| 235 | if WINDOWS: | ||
| 236 | try: | ||
| 237 | import ctypes | ||
| 238 | _get_win_folder = _get_win_folder_with_ctypes | ||
| 239 | except ImportError: | ||
| 240 | _get_win_folder = _get_win_folder_from_registry | ||
| 241 | |||
| 242 | |||
| 243 | def _win_path_to_bytes(path): | ||
| 244 | """Encode Windows paths to bytes. Only used on Python 2. | ||
| 245 | |||
| 246 | Motivation is to be consistent with other operating systems where paths | ||
| 247 | are also returned as bytes. This avoids problems mixing bytes and Unicode | ||
| 248 | elsewhere in the codebase. For more details and discussion see | ||
| 249 | <https://github.com/pypa/pip/issues/3463>. | ||
| 250 | |||
| 251 | If encoding using ASCII and MBCS fails, return the original Unicode path. | ||
| 252 | """ | ||
| 253 | for encoding in ('ASCII', 'MBCS'): | ||
| 254 | try: | ||
| 255 | return path.encode(encoding) | ||
| 256 | except (UnicodeEncodeError, LookupError): | ||
| 257 | pass | ||
| 258 | return path | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..c0e3884 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py | |||
| @@ -0,0 +1,77 @@ | |||
| 1 | """ | ||
| 2 | A module that implements tooling to enable easy warnings about deprecations. | ||
| 3 | """ | ||
| 4 | from __future__ import absolute_import | ||
| 5 | |||
| 6 | import logging | ||
| 7 | import warnings | ||
| 8 | |||
| 9 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 10 | |||
| 11 | if MYPY_CHECK_RUNNING: | ||
| 12 | from typing import Any | ||
| 13 | |||
| 14 | |||
| 15 | class PipDeprecationWarning(Warning): | ||
| 16 | pass | ||
| 17 | |||
| 18 | |||
| 19 | class Pending(object): | ||
| 20 | pass | ||
| 21 | |||
| 22 | |||
| 23 | class RemovedInPip11Warning(PipDeprecationWarning): | ||
| 24 | pass | ||
| 25 | |||
| 26 | |||
| 27 | class RemovedInPip12Warning(PipDeprecationWarning, Pending): | ||
| 28 | pass | ||
| 29 | |||
| 30 | |||
| 31 | # Warnings <-> Logging Integration | ||
| 32 | |||
| 33 | |||
| 34 | _warnings_showwarning = None # type: Any | ||
| 35 | |||
| 36 | |||
| 37 | def _showwarning(message, category, filename, lineno, file=None, line=None): | ||
| 38 | if file is not None: | ||
| 39 | if _warnings_showwarning is not None: | ||
| 40 | _warnings_showwarning( | ||
| 41 | message, category, filename, lineno, file, line, | ||
| 42 | ) | ||
| 43 | else: | ||
| 44 | if issubclass(category, PipDeprecationWarning): | ||
| 45 | # We use a specially named logger which will handle all of the | ||
| 46 | # deprecation messages for pip. | ||
| 47 | logger = logging.getLogger("pip._internal.deprecations") | ||
| 48 | |||
| 49 | # This is purposely using the % formatter here instead of letting | ||
| 50 | # the logging module handle the interpolation. This is because we | ||
| 51 | # want it to appear as if someone typed this entire message out. | ||
| 52 | log_message = "DEPRECATION: %s" % message | ||
| 53 | |||
| 54 | # PipDeprecationWarnings that are Pending still have at least 2 | ||
| 55 | # versions to go until they are removed so they can just be | ||
| 56 | # warnings. Otherwise, they will be removed in the very next | ||
| 57 | # version of pip. We want these to be more obvious so we use the | ||
| 58 | # ERROR logging level. | ||
| 59 | if issubclass(category, Pending): | ||
| 60 | logger.warning(log_message) | ||
| 61 | else: | ||
| 62 | logger.error(log_message) | ||
| 63 | else: | ||
| 64 | _warnings_showwarning( | ||
| 65 | message, category, filename, lineno, file, line, | ||
| 66 | ) | ||
| 67 | |||
| 68 | |||
| 69 | def install_warning_logger(): | ||
| 70 | # Enable our Deprecation Warnings | ||
| 71 | warnings.simplefilter("default", PipDeprecationWarning, append=True) | ||
| 72 | |||
| 73 | global _warnings_showwarning | ||
| 74 | |||
| 75 | if _warnings_showwarning is None: | ||
| 76 | _warnings_showwarning = warnings.showwarning | ||
| 77 | warnings.showwarning = _showwarning | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py new file mode 100644 index 0000000..831f3f6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py | |||
| @@ -0,0 +1,33 @@ | |||
| 1 | import codecs | ||
| 2 | import locale | ||
| 3 | import re | ||
| 4 | import sys | ||
| 5 | |||
| 6 | BOMS = [ | ||
| 7 | (codecs.BOM_UTF8, 'utf8'), | ||
| 8 | (codecs.BOM_UTF16, 'utf16'), | ||
| 9 | (codecs.BOM_UTF16_BE, 'utf16-be'), | ||
| 10 | (codecs.BOM_UTF16_LE, 'utf16-le'), | ||
| 11 | (codecs.BOM_UTF32, 'utf32'), | ||
| 12 | (codecs.BOM_UTF32_BE, 'utf32-be'), | ||
| 13 | (codecs.BOM_UTF32_LE, 'utf32-le'), | ||
| 14 | ] | ||
| 15 | |||
| 16 | ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') | ||
| 17 | |||
| 18 | |||
| 19 | def auto_decode(data): | ||
| 20 | """Check a bytes string for a BOM to correctly detect the encoding | ||
| 21 | |||
| 22 | Fallback to locale.getpreferredencoding(False) like open() on Python3""" | ||
| 23 | for bom, encoding in BOMS: | ||
| 24 | if data.startswith(bom): | ||
| 25 | return data[len(bom):].decode(encoding) | ||
| 26 | # Lets check the first two lines as in PEP263 | ||
| 27 | for line in data.split(b'\n')[:2]: | ||
| 28 | if line[0:1] == b'#' and ENCODING_RE.search(line): | ||
| 29 | encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') | ||
| 30 | return data.decode(encoding) | ||
| 31 | return data.decode( | ||
| 32 | locale.getpreferredencoding(False) or sys.getdefaultencoding(), | ||
| 33 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..94fa2c6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py | |||
| @@ -0,0 +1,28 @@ | |||
| 1 | import os | ||
| 2 | import os.path | ||
| 3 | |||
| 4 | from pip._internal.compat import get_path_uid | ||
| 5 | |||
| 6 | |||
| 7 | def check_path_owner(path): | ||
| 8 | # If we don't have a way to check the effective uid of this process, then | ||
| 9 | # we'll just assume that we own the directory. | ||
| 10 | if not hasattr(os, "geteuid"): | ||
| 11 | return True | ||
| 12 | |||
| 13 | previous = None | ||
| 14 | while path != previous: | ||
| 15 | if os.path.lexists(path): | ||
| 16 | # Check if path is writable by current user. | ||
| 17 | if os.geteuid() == 0: | ||
| 18 | # Special handling for root user in order to handle properly | ||
| 19 | # cases where users use sudo without -H flag. | ||
| 20 | try: | ||
| 21 | path_uid = get_path_uid(path) | ||
| 22 | except OSError: | ||
| 23 | return False | ||
| 24 | return path_uid == 0 | ||
| 25 | else: | ||
| 26 | return os.access(path, os.W_OK) | ||
| 27 | else: | ||
| 28 | previous, path = path, os.path.dirname(path) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..5900a10 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py | |||
| @@ -0,0 +1,84 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import ctypes | ||
| 4 | import re | ||
| 5 | import warnings | ||
| 6 | |||
| 7 | |||
| 8 | def glibc_version_string(): | ||
| 9 | "Returns glibc version string, or None if not using glibc." | ||
| 10 | |||
| 11 | # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen | ||
| 12 | # manpage says, "If filename is NULL, then the returned handle is for the | ||
| 13 | # main program". This way we can let the linker do the work to figure out | ||
| 14 | # which libc our process is actually using. | ||
| 15 | process_namespace = ctypes.CDLL(None) | ||
| 16 | try: | ||
| 17 | gnu_get_libc_version = process_namespace.gnu_get_libc_version | ||
| 18 | except AttributeError: | ||
| 19 | # Symbol doesn't exist -> therefore, we are not linked to | ||
| 20 | # glibc. | ||
| 21 | return None | ||
| 22 | |||
| 23 | # Call gnu_get_libc_version, which returns a string like "2.5" | ||
| 24 | gnu_get_libc_version.restype = ctypes.c_char_p | ||
| 25 | version_str = gnu_get_libc_version() | ||
| 26 | # py2 / py3 compatibility: | ||
| 27 | if not isinstance(version_str, str): | ||
| 28 | version_str = version_str.decode("ascii") | ||
| 29 | |||
| 30 | return version_str | ||
| 31 | |||
| 32 | |||
| 33 | # Separated out from have_compatible_glibc for easier unit testing | ||
| 34 | def check_glibc_version(version_str, required_major, minimum_minor): | ||
| 35 | # Parse string and check against requested version. | ||
| 36 | # | ||
| 37 | # We use a regexp instead of str.split because we want to discard any | ||
| 38 | # random junk that might come after the minor version -- this might happen | ||
| 39 | # in patched/forked versions of glibc (e.g. Linaro's version of glibc | ||
| 40 | # uses version strings like "2.20-2014.11"). See gh-3588. | ||
| 41 | m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) | ||
| 42 | if not m: | ||
| 43 | warnings.warn("Expected glibc version with 2 components major.minor," | ||
| 44 | " got: %s" % version_str, RuntimeWarning) | ||
| 45 | return False | ||
| 46 | return (int(m.group("major")) == required_major and | ||
| 47 | int(m.group("minor")) >= minimum_minor) | ||
| 48 | |||
| 49 | |||
| 50 | def have_compatible_glibc(required_major, minimum_minor): | ||
| 51 | version_str = glibc_version_string() | ||
| 52 | if version_str is None: | ||
| 53 | return False | ||
| 54 | return check_glibc_version(version_str, required_major, minimum_minor) | ||
| 55 | |||
| 56 | |||
| 57 | # platform.libc_ver regularly returns completely nonsensical glibc | ||
| 58 | # versions. E.g. on my computer, platform says: | ||
| 59 | # | ||
| 60 | # ~$ python2.7 -c 'import platform; print(platform.libc_ver())' | ||
| 61 | # ('glibc', '2.7') | ||
| 62 | # ~$ python3.5 -c 'import platform; print(platform.libc_ver())' | ||
| 63 | # ('glibc', '2.9') | ||
| 64 | # | ||
| 65 | # But the truth is: | ||
| 66 | # | ||
| 67 | # ~$ ldd --version | ||
| 68 | # ldd (Debian GLIBC 2.22-11) 2.22 | ||
| 69 | # | ||
| 70 | # This is unfortunate, because it means that the linehaul data on libc | ||
| 71 | # versions that was generated by pip 8.1.2 and earlier is useless and | ||
| 72 | # misleading. Solution: instead of using platform, use our code that actually | ||
| 73 | # works. | ||
| 74 | def libc_ver(): | ||
| 75 | """Try to determine the glibc version | ||
| 76 | |||
| 77 | Returns a tuple of strings (lib, version) which default to empty strings | ||
| 78 | in case the lookup fails. | ||
| 79 | """ | ||
| 80 | glibc_version = glibc_version_string() | ||
| 81 | if glibc_version is None: | ||
| 82 | return ("", "") | ||
| 83 | else: | ||
| 84 | return ("glibc", glibc_version) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..8cf6367 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py | |||
| @@ -0,0 +1,94 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import hashlib | ||
| 4 | |||
| 5 | from pip._vendor.six import iteritems, iterkeys, itervalues | ||
| 6 | |||
| 7 | from pip._internal.exceptions import ( | ||
| 8 | HashMismatch, HashMissing, InstallationError, | ||
| 9 | ) | ||
| 10 | from pip._internal.utils.misc import read_chunks | ||
| 11 | |||
| 12 | # The recommended hash algo of the moment. Change this whenever the state of | ||
| 13 | # the art changes; it won't hurt backward compatibility. | ||
| 14 | FAVORITE_HASH = 'sha256' | ||
| 15 | |||
| 16 | |||
| 17 | # Names of hashlib algorithms allowed by the --hash option and ``pip hash`` | ||
| 18 | # Currently, those are the ones at least as collision-resistant as sha256. | ||
| 19 | STRONG_HASHES = ['sha256', 'sha384', 'sha512'] | ||
| 20 | |||
| 21 | |||
| 22 | class Hashes(object): | ||
| 23 | """A wrapper that builds multiple hashes at once and checks them against | ||
| 24 | known-good values | ||
| 25 | |||
| 26 | """ | ||
| 27 | def __init__(self, hashes=None): | ||
| 28 | """ | ||
| 29 | :param hashes: A dict of algorithm names pointing to lists of allowed | ||
| 30 | hex digests | ||
| 31 | """ | ||
| 32 | self._allowed = {} if hashes is None else hashes | ||
| 33 | |||
| 34 | def check_against_chunks(self, chunks): | ||
| 35 | """Check good hashes against ones built from iterable of chunks of | ||
| 36 | data. | ||
| 37 | |||
| 38 | Raise HashMismatch if none match. | ||
| 39 | |||
| 40 | """ | ||
| 41 | gots = {} | ||
| 42 | for hash_name in iterkeys(self._allowed): | ||
| 43 | try: | ||
| 44 | gots[hash_name] = hashlib.new(hash_name) | ||
| 45 | except (ValueError, TypeError): | ||
| 46 | raise InstallationError('Unknown hash name: %s' % hash_name) | ||
| 47 | |||
| 48 | for chunk in chunks: | ||
| 49 | for hash in itervalues(gots): | ||
| 50 | hash.update(chunk) | ||
| 51 | |||
| 52 | for hash_name, got in iteritems(gots): | ||
| 53 | if got.hexdigest() in self._allowed[hash_name]: | ||
| 54 | return | ||
| 55 | self._raise(gots) | ||
| 56 | |||
| 57 | def _raise(self, gots): | ||
| 58 | raise HashMismatch(self._allowed, gots) | ||
| 59 | |||
| 60 | def check_against_file(self, file): | ||
| 61 | """Check good hashes against a file-like object | ||
| 62 | |||
| 63 | Raise HashMismatch if none match. | ||
| 64 | |||
| 65 | """ | ||
| 66 | return self.check_against_chunks(read_chunks(file)) | ||
| 67 | |||
| 68 | def check_against_path(self, path): | ||
| 69 | with open(path, 'rb') as file: | ||
| 70 | return self.check_against_file(file) | ||
| 71 | |||
| 72 | def __nonzero__(self): | ||
| 73 | """Return whether I know any known-good hashes.""" | ||
| 74 | return bool(self._allowed) | ||
| 75 | |||
| 76 | def __bool__(self): | ||
| 77 | return self.__nonzero__() | ||
| 78 | |||
| 79 | |||
| 80 | class MissingHashes(Hashes): | ||
| 81 | """A workalike for Hashes used when we're missing a hash for a requirement | ||
| 82 | |||
| 83 | It computes the actual hash of the requirement and raises a HashMissing | ||
| 84 | exception showing it to the user. | ||
| 85 | |||
| 86 | """ | ||
| 87 | def __init__(self): | ||
| 88 | """Don't offer the ``hashes`` kwarg.""" | ||
| 89 | # Pass our favorite hash in to generate a "gotten hash". With the | ||
| 90 | # empty list, it will never match, so an error will always raise. | ||
| 91 | super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) | ||
| 92 | |||
| 93 | def _raise(self, gots): | ||
| 94 | raise HashMissing(gots[FAVORITE_HASH].hexdigest()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py new file mode 100644 index 0000000..1fb3e8a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py | |||
| @@ -0,0 +1,132 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import contextlib | ||
| 4 | import logging | ||
| 5 | import logging.handlers | ||
| 6 | import os | ||
| 7 | |||
| 8 | from pip._internal.compat import WINDOWS | ||
| 9 | from pip._internal.utils.misc import ensure_dir | ||
| 10 | |||
| 11 | try: | ||
| 12 | import threading | ||
| 13 | except ImportError: | ||
| 14 | import dummy_threading as threading # type: ignore | ||
| 15 | |||
| 16 | |||
| 17 | try: | ||
| 18 | from pip._vendor import colorama | ||
| 19 | # Lots of different errors can come from this, including SystemError and | ||
| 20 | # ImportError. | ||
| 21 | except Exception: | ||
| 22 | colorama = None | ||
| 23 | |||
| 24 | |||
| 25 | _log_state = threading.local() | ||
| 26 | _log_state.indentation = 0 | ||
| 27 | |||
| 28 | |||
| 29 | @contextlib.contextmanager | ||
| 30 | def indent_log(num=2): | ||
| 31 | """ | ||
| 32 | A context manager which will cause the log output to be indented for any | ||
| 33 | log messages emitted inside it. | ||
| 34 | """ | ||
| 35 | _log_state.indentation += num | ||
| 36 | try: | ||
| 37 | yield | ||
| 38 | finally: | ||
| 39 | _log_state.indentation -= num | ||
| 40 | |||
| 41 | |||
| 42 | def get_indentation(): | ||
| 43 | return getattr(_log_state, 'indentation', 0) | ||
| 44 | |||
| 45 | |||
| 46 | class IndentingFormatter(logging.Formatter): | ||
| 47 | |||
| 48 | def format(self, record): | ||
| 49 | """ | ||
| 50 | Calls the standard formatter, but will indent all of the log messages | ||
| 51 | by our current indentation level. | ||
| 52 | """ | ||
| 53 | formatted = logging.Formatter.format(self, record) | ||
| 54 | formatted = "".join([ | ||
| 55 | (" " * get_indentation()) + line | ||
| 56 | for line in formatted.splitlines(True) | ||
| 57 | ]) | ||
| 58 | return formatted | ||
| 59 | |||
| 60 | |||
| 61 | def _color_wrap(*colors): | ||
| 62 | def wrapped(inp): | ||
| 63 | return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) | ||
| 64 | return wrapped | ||
| 65 | |||
| 66 | |||
| 67 | class ColorizedStreamHandler(logging.StreamHandler): | ||
| 68 | |||
| 69 | # Don't build up a list of colors if we don't have colorama | ||
| 70 | if colorama: | ||
| 71 | COLORS = [ | ||
| 72 | # This needs to be in order from highest logging level to lowest. | ||
| 73 | (logging.ERROR, _color_wrap(colorama.Fore.RED)), | ||
| 74 | (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), | ||
| 75 | ] | ||
| 76 | else: | ||
| 77 | COLORS = [] | ||
| 78 | |||
| 79 | def __init__(self, stream=None, no_color=None): | ||
| 80 | logging.StreamHandler.__init__(self, stream) | ||
| 81 | self._no_color = no_color | ||
| 82 | |||
| 83 | if WINDOWS and colorama: | ||
| 84 | self.stream = colorama.AnsiToWin32(self.stream) | ||
| 85 | |||
| 86 | def should_color(self): | ||
| 87 | # Don't colorize things if we do not have colorama or if told not to | ||
| 88 | if not colorama or self._no_color: | ||
| 89 | return False | ||
| 90 | |||
| 91 | real_stream = ( | ||
| 92 | self.stream if not isinstance(self.stream, colorama.AnsiToWin32) | ||
| 93 | else self.stream.wrapped | ||
| 94 | ) | ||
| 95 | |||
| 96 | # If the stream is a tty we should color it | ||
| 97 | if hasattr(real_stream, "isatty") and real_stream.isatty(): | ||
| 98 | return True | ||
| 99 | |||
| 100 | # If we have an ASNI term we should color it | ||
| 101 | if os.environ.get("TERM") == "ANSI": | ||
| 102 | return True | ||
| 103 | |||
| 104 | # If anything else we should not color it | ||
| 105 | return False | ||
| 106 | |||
| 107 | def format(self, record): | ||
| 108 | msg = logging.StreamHandler.format(self, record) | ||
| 109 | |||
| 110 | if self.should_color(): | ||
| 111 | for level, color in self.COLORS: | ||
| 112 | if record.levelno >= level: | ||
| 113 | msg = color(msg) | ||
| 114 | break | ||
| 115 | |||
| 116 | return msg | ||
| 117 | |||
| 118 | |||
| 119 | class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): | ||
| 120 | |||
| 121 | def _open(self): | ||
| 122 | ensure_dir(os.path.dirname(self.baseFilename)) | ||
| 123 | return logging.handlers.RotatingFileHandler._open(self) | ||
| 124 | |||
| 125 | |||
| 126 | class MaxLevelFilter(logging.Filter): | ||
| 127 | |||
| 128 | def __init__(self, level): | ||
| 129 | self.level = level | ||
| 130 | |||
| 131 | def filter(self, record): | ||
| 132 | return record.levelno < self.level | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py new file mode 100644 index 0000000..db84a7c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py | |||
| @@ -0,0 +1,851 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import contextlib | ||
| 4 | import errno | ||
| 5 | import io | ||
| 6 | import locale | ||
| 7 | # we have a submodule named 'logging' which would shadow this if we used the | ||
| 8 | # regular name: | ||
| 9 | import logging as std_logging | ||
| 10 | import os | ||
| 11 | import posixpath | ||
| 12 | import re | ||
| 13 | import shutil | ||
| 14 | import stat | ||
| 15 | import subprocess | ||
| 16 | import sys | ||
| 17 | import tarfile | ||
| 18 | import zipfile | ||
| 19 | from collections import deque | ||
| 20 | |||
| 21 | from pip._vendor import pkg_resources | ||
| 22 | # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is | ||
| 23 | # why we ignore the type on this import. | ||
| 24 | from pip._vendor.retrying import retry # type: ignore | ||
| 25 | from pip._vendor.six import PY2 | ||
| 26 | from pip._vendor.six.moves import input | ||
| 27 | |||
| 28 | from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs | ||
| 29 | from pip._internal.exceptions import InstallationError | ||
| 30 | from pip._internal.locations import ( | ||
| 31 | running_under_virtualenv, site_packages, user_site, virtualenv_no_global, | ||
| 32 | write_delete_marker_file, | ||
| 33 | ) | ||
| 34 | |||
| 35 | if PY2: | ||
| 36 | from io import BytesIO as StringIO | ||
| 37 | else: | ||
| 38 | from io import StringIO | ||
| 39 | |||
| 40 | __all__ = ['rmtree', 'display_path', 'backup_dir', | ||
| 41 | 'ask', 'splitext', | ||
| 42 | 'format_size', 'is_installable_dir', | ||
| 43 | 'is_svn_page', 'file_contents', | ||
| 44 | 'split_leading_dir', 'has_leading_dir', | ||
| 45 | 'normalize_path', | ||
| 46 | 'renames', 'get_prog', | ||
| 47 | 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', | ||
| 48 | 'captured_stdout', 'ensure_dir', | ||
| 49 | 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', | ||
| 50 | 'get_installed_version'] | ||
| 51 | |||
| 52 | |||
| 53 | logger = std_logging.getLogger(__name__) | ||
| 54 | |||
| 55 | BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') | ||
| 56 | XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') | ||
| 57 | ZIP_EXTENSIONS = ('.zip', '.whl') | ||
| 58 | TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') | ||
| 59 | ARCHIVE_EXTENSIONS = ( | ||
| 60 | ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) | ||
| 61 | SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS | ||
| 62 | try: | ||
| 63 | import bz2 # noqa | ||
| 64 | SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS | ||
| 65 | except ImportError: | ||
| 66 | logger.debug('bz2 module is not available') | ||
| 67 | |||
| 68 | try: | ||
| 69 | # Only for Python 3.3+ | ||
| 70 | import lzma # noqa | ||
| 71 | SUPPORTED_EXTENSIONS += XZ_EXTENSIONS | ||
| 72 | except ImportError: | ||
| 73 | logger.debug('lzma module is not available') | ||
| 74 | |||
| 75 | |||
| 76 | def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): | ||
| 77 | try: | ||
| 78 | return __import__(pkg_or_module_string) | ||
| 79 | except ImportError: | ||
| 80 | raise ExceptionType(*args, **kwargs) | ||
| 81 | |||
| 82 | |||
| 83 | def ensure_dir(path): | ||
| 84 | """os.path.makedirs without EEXIST.""" | ||
| 85 | try: | ||
| 86 | os.makedirs(path) | ||
| 87 | except OSError as e: | ||
| 88 | if e.errno != errno.EEXIST: | ||
| 89 | raise | ||
| 90 | |||
| 91 | |||
| 92 | def get_prog(): | ||
| 93 | try: | ||
| 94 | prog = os.path.basename(sys.argv[0]) | ||
| 95 | if prog in ('__main__.py', '-c'): | ||
| 96 | return "%s -m pip" % sys.executable | ||
| 97 | else: | ||
| 98 | return prog | ||
| 99 | except (AttributeError, TypeError, IndexError): | ||
| 100 | pass | ||
| 101 | return 'pip' | ||
| 102 | |||
| 103 | |||
| 104 | # Retry every half second for up to 3 seconds | ||
| 105 | @retry(stop_max_delay=3000, wait_fixed=500) | ||
| 106 | def rmtree(dir, ignore_errors=False): | ||
| 107 | shutil.rmtree(dir, ignore_errors=ignore_errors, | ||
| 108 | onerror=rmtree_errorhandler) | ||
| 109 | |||
| 110 | |||
| 111 | def rmtree_errorhandler(func, path, exc_info): | ||
| 112 | """On Windows, the files in .svn are read-only, so when rmtree() tries to | ||
| 113 | remove them, an exception is thrown. We catch that here, remove the | ||
| 114 | read-only attribute, and hopefully continue without problems.""" | ||
| 115 | # if file type currently read only | ||
| 116 | if os.stat(path).st_mode & stat.S_IREAD: | ||
| 117 | # convert to read/write | ||
| 118 | os.chmod(path, stat.S_IWRITE) | ||
| 119 | # use the original function to repeat the operation | ||
| 120 | func(path) | ||
| 121 | return | ||
| 122 | else: | ||
| 123 | raise | ||
| 124 | |||
| 125 | |||
| 126 | def display_path(path): | ||
| 127 | """Gives the display value for a given path, making it relative to cwd | ||
| 128 | if possible.""" | ||
| 129 | path = os.path.normcase(os.path.abspath(path)) | ||
| 130 | if sys.version_info[0] == 2: | ||
| 131 | path = path.decode(sys.getfilesystemencoding(), 'replace') | ||
| 132 | path = path.encode(sys.getdefaultencoding(), 'replace') | ||
| 133 | if path.startswith(os.getcwd() + os.path.sep): | ||
| 134 | path = '.' + path[len(os.getcwd()):] | ||
| 135 | return path | ||
| 136 | |||
| 137 | |||
| 138 | def backup_dir(dir, ext='.bak'): | ||
| 139 | """Figure out the name of a directory to back up the given dir to | ||
| 140 | (adding .bak, .bak2, etc)""" | ||
| 141 | n = 1 | ||
| 142 | extension = ext | ||
| 143 | while os.path.exists(dir + extension): | ||
| 144 | n += 1 | ||
| 145 | extension = ext + str(n) | ||
| 146 | return dir + extension | ||
| 147 | |||
| 148 | |||
| 149 | def ask_path_exists(message, options): | ||
| 150 | for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): | ||
| 151 | if action in options: | ||
| 152 | return action | ||
| 153 | return ask(message, options) | ||
| 154 | |||
| 155 | |||
| 156 | def ask(message, options): | ||
| 157 | """Ask the message interactively, with the given possible responses""" | ||
| 158 | while 1: | ||
| 159 | if os.environ.get('PIP_NO_INPUT'): | ||
| 160 | raise Exception( | ||
| 161 | 'No input was expected ($PIP_NO_INPUT set); question: %s' % | ||
| 162 | message | ||
| 163 | ) | ||
| 164 | response = input(message) | ||
| 165 | response = response.strip().lower() | ||
| 166 | if response not in options: | ||
| 167 | print( | ||
| 168 | 'Your response (%r) was not one of the expected responses: ' | ||
| 169 | '%s' % (response, ', '.join(options)) | ||
| 170 | ) | ||
| 171 | else: | ||
| 172 | return response | ||
| 173 | |||
| 174 | |||
| 175 | def format_size(bytes): | ||
| 176 | if bytes > 1000 * 1000: | ||
| 177 | return '%.1fMB' % (bytes / 1000.0 / 1000) | ||
| 178 | elif bytes > 10 * 1000: | ||
| 179 | return '%ikB' % (bytes / 1000) | ||
| 180 | elif bytes > 1000: | ||
| 181 | return '%.1fkB' % (bytes / 1000.0) | ||
| 182 | else: | ||
| 183 | return '%ibytes' % bytes | ||
| 184 | |||
| 185 | |||
| 186 | def is_installable_dir(path): | ||
| 187 | """Return True if `path` is a directory containing a setup.py file.""" | ||
| 188 | if not os.path.isdir(path): | ||
| 189 | return False | ||
| 190 | setup_py = os.path.join(path, 'setup.py') | ||
| 191 | if os.path.isfile(setup_py): | ||
| 192 | return True | ||
| 193 | return False | ||
| 194 | |||
| 195 | |||
| 196 | def is_svn_page(html): | ||
| 197 | """ | ||
| 198 | Returns true if the page appears to be the index page of an svn repository | ||
| 199 | """ | ||
| 200 | return (re.search(r'<title>[^<]*Revision \d+:', html) and | ||
| 201 | re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) | ||
| 202 | |||
| 203 | |||
| 204 | def file_contents(filename): | ||
| 205 | with open(filename, 'rb') as fp: | ||
| 206 | return fp.read().decode('utf-8') | ||
| 207 | |||
| 208 | |||
| 209 | def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): | ||
| 210 | """Yield pieces of data from a file-like object until EOF.""" | ||
| 211 | while True: | ||
| 212 | chunk = file.read(size) | ||
| 213 | if not chunk: | ||
| 214 | break | ||
| 215 | yield chunk | ||
| 216 | |||
| 217 | |||
| 218 | def split_leading_dir(path): | ||
| 219 | path = path.lstrip('/').lstrip('\\') | ||
| 220 | if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or | ||
| 221 | '\\' not in path): | ||
| 222 | return path.split('/', 1) | ||
| 223 | elif '\\' in path: | ||
| 224 | return path.split('\\', 1) | ||
| 225 | else: | ||
| 226 | return path, '' | ||
| 227 | |||
| 228 | |||
| 229 | def has_leading_dir(paths): | ||
| 230 | """Returns true if all the paths have the same leading path name | ||
| 231 | (i.e., everything is in one subdirectory in an archive)""" | ||
| 232 | common_prefix = None | ||
| 233 | for path in paths: | ||
| 234 | prefix, rest = split_leading_dir(path) | ||
| 235 | if not prefix: | ||
| 236 | return False | ||
| 237 | elif common_prefix is None: | ||
| 238 | common_prefix = prefix | ||
| 239 | elif prefix != common_prefix: | ||
| 240 | return False | ||
| 241 | return True | ||
| 242 | |||
| 243 | |||
| 244 | def normalize_path(path, resolve_symlinks=True): | ||
| 245 | """ | ||
| 246 | Convert a path to its canonical, case-normalized, absolute version. | ||
| 247 | |||
| 248 | """ | ||
| 249 | path = expanduser(path) | ||
| 250 | if resolve_symlinks: | ||
| 251 | path = os.path.realpath(path) | ||
| 252 | else: | ||
| 253 | path = os.path.abspath(path) | ||
| 254 | return os.path.normcase(path) | ||
| 255 | |||
| 256 | |||
| 257 | def splitext(path): | ||
| 258 | """Like os.path.splitext, but take off .tar too""" | ||
| 259 | base, ext = posixpath.splitext(path) | ||
| 260 | if base.lower().endswith('.tar'): | ||
| 261 | ext = base[-4:] + ext | ||
| 262 | base = base[:-4] | ||
| 263 | return base, ext | ||
| 264 | |||
| 265 | |||
| 266 | def renames(old, new): | ||
| 267 | """Like os.renames(), but handles renaming across devices.""" | ||
| 268 | # Implementation borrowed from os.renames(). | ||
| 269 | head, tail = os.path.split(new) | ||
| 270 | if head and tail and not os.path.exists(head): | ||
| 271 | os.makedirs(head) | ||
| 272 | |||
| 273 | shutil.move(old, new) | ||
| 274 | |||
| 275 | head, tail = os.path.split(old) | ||
| 276 | if head and tail: | ||
| 277 | try: | ||
| 278 | os.removedirs(head) | ||
| 279 | except OSError: | ||
| 280 | pass | ||
| 281 | |||
| 282 | |||
| 283 | def is_local(path): | ||
| 284 | """ | ||
| 285 | Return True if path is within sys.prefix, if we're running in a virtualenv. | ||
| 286 | |||
| 287 | If we're not in a virtualenv, all paths are considered "local." | ||
| 288 | |||
| 289 | """ | ||
| 290 | if not running_under_virtualenv(): | ||
| 291 | return True | ||
| 292 | return normalize_path(path).startswith(normalize_path(sys.prefix)) | ||
| 293 | |||
| 294 | |||
| 295 | def dist_is_local(dist): | ||
| 296 | """ | ||
| 297 | Return True if given Distribution object is installed locally | ||
| 298 | (i.e. within current virtualenv). | ||
| 299 | |||
| 300 | Always True if we're not in a virtualenv. | ||
| 301 | |||
| 302 | """ | ||
| 303 | return is_local(dist_location(dist)) | ||
| 304 | |||
| 305 | |||
| 306 | def dist_in_usersite(dist): | ||
| 307 | """ | ||
| 308 | Return True if given Distribution is installed in user site. | ||
| 309 | """ | ||
| 310 | norm_path = normalize_path(dist_location(dist)) | ||
| 311 | return norm_path.startswith(normalize_path(user_site)) | ||
| 312 | |||
| 313 | |||
| 314 | def dist_in_site_packages(dist): | ||
| 315 | """ | ||
| 316 | Return True if given Distribution is installed in | ||
| 317 | sysconfig.get_python_lib(). | ||
| 318 | """ | ||
| 319 | return normalize_path( | ||
| 320 | dist_location(dist) | ||
| 321 | ).startswith(normalize_path(site_packages)) | ||
| 322 | |||
| 323 | |||
| 324 | def dist_is_editable(dist): | ||
| 325 | """Is distribution an editable install?""" | ||
| 326 | for path_item in sys.path: | ||
| 327 | egg_link = os.path.join(path_item, dist.project_name + '.egg-link') | ||
| 328 | if os.path.isfile(egg_link): | ||
| 329 | return True | ||
| 330 | return False | ||
| 331 | |||
| 332 | |||
| 333 | def get_installed_distributions(local_only=True, | ||
| 334 | skip=stdlib_pkgs, | ||
| 335 | include_editables=True, | ||
| 336 | editables_only=False, | ||
| 337 | user_only=False): | ||
| 338 | """ | ||
| 339 | Return a list of installed Distribution objects. | ||
| 340 | |||
| 341 | If ``local_only`` is True (default), only return installations | ||
| 342 | local to the current virtualenv, if in a virtualenv. | ||
| 343 | |||
| 344 | ``skip`` argument is an iterable of lower-case project names to | ||
| 345 | ignore; defaults to stdlib_pkgs | ||
| 346 | |||
| 347 | If ``include_editables`` is False, don't report editables. | ||
| 348 | |||
| 349 | If ``editables_only`` is True , only report editables. | ||
| 350 | |||
| 351 | If ``user_only`` is True , only report installations in the user | ||
| 352 | site directory. | ||
| 353 | |||
| 354 | """ | ||
| 355 | if local_only: | ||
| 356 | local_test = dist_is_local | ||
| 357 | else: | ||
| 358 | def local_test(d): | ||
| 359 | return True | ||
| 360 | |||
| 361 | if include_editables: | ||
| 362 | def editable_test(d): | ||
| 363 | return True | ||
| 364 | else: | ||
| 365 | def editable_test(d): | ||
| 366 | return not dist_is_editable(d) | ||
| 367 | |||
| 368 | if editables_only: | ||
| 369 | def editables_only_test(d): | ||
| 370 | return dist_is_editable(d) | ||
| 371 | else: | ||
| 372 | def editables_only_test(d): | ||
| 373 | return True | ||
| 374 | |||
| 375 | if user_only: | ||
| 376 | user_test = dist_in_usersite | ||
| 377 | else: | ||
| 378 | def user_test(d): | ||
| 379 | return True | ||
| 380 | |||
| 381 | return [d for d in pkg_resources.working_set | ||
| 382 | if local_test(d) and | ||
| 383 | d.key not in skip and | ||
| 384 | editable_test(d) and | ||
| 385 | editables_only_test(d) and | ||
| 386 | user_test(d) | ||
| 387 | ] | ||
| 388 | |||
| 389 | |||
| 390 | def egg_link_path(dist): | ||
| 391 | """ | ||
| 392 | Return the path for the .egg-link file if it exists, otherwise, None. | ||
| 393 | |||
| 394 | There's 3 scenarios: | ||
| 395 | 1) not in a virtualenv | ||
| 396 | try to find in site.USER_SITE, then site_packages | ||
| 397 | 2) in a no-global virtualenv | ||
| 398 | try to find in site_packages | ||
| 399 | 3) in a yes-global virtualenv | ||
| 400 | try to find in site_packages, then site.USER_SITE | ||
| 401 | (don't look in global location) | ||
| 402 | |||
| 403 | For #1 and #3, there could be odd cases, where there's an egg-link in 2 | ||
| 404 | locations. | ||
| 405 | |||
| 406 | This method will just return the first one found. | ||
| 407 | """ | ||
| 408 | sites = [] | ||
| 409 | if running_under_virtualenv(): | ||
| 410 | if virtualenv_no_global(): | ||
| 411 | sites.append(site_packages) | ||
| 412 | else: | ||
| 413 | sites.append(site_packages) | ||
| 414 | if user_site: | ||
| 415 | sites.append(user_site) | ||
| 416 | else: | ||
| 417 | if user_site: | ||
| 418 | sites.append(user_site) | ||
| 419 | sites.append(site_packages) | ||
| 420 | |||
| 421 | for site in sites: | ||
| 422 | egglink = os.path.join(site, dist.project_name) + '.egg-link' | ||
| 423 | if os.path.isfile(egglink): | ||
| 424 | return egglink | ||
| 425 | |||
| 426 | |||
| 427 | def dist_location(dist): | ||
| 428 | """ | ||
| 429 | Get the site-packages location of this distribution. Generally | ||
| 430 | this is dist.location, except in the case of develop-installed | ||
| 431 | packages, where dist.location is the source code location, and we | ||
| 432 | want to know where the egg-link file is. | ||
| 433 | |||
| 434 | """ | ||
| 435 | egg_link = egg_link_path(dist) | ||
| 436 | if egg_link: | ||
| 437 | return egg_link | ||
| 438 | return dist.location | ||
| 439 | |||
| 440 | |||
| 441 | def current_umask(): | ||
| 442 | """Get the current umask which involves having to set it temporarily.""" | ||
| 443 | mask = os.umask(0) | ||
| 444 | os.umask(mask) | ||
| 445 | return mask | ||
| 446 | |||
| 447 | |||
| 448 | def unzip_file(filename, location, flatten=True): | ||
| 449 | """ | ||
| 450 | Unzip the file (with path `filename`) to the destination `location`. All | ||
| 451 | files are written based on system defaults and umask (i.e. permissions are | ||
| 452 | not preserved), except that regular file members with any execute | ||
| 453 | permissions (user, group, or world) have "chmod +x" applied after being | ||
| 454 | written. Note that for windows, any execute changes using os.chmod are | ||
| 455 | no-ops per the python docs. | ||
| 456 | """ | ||
| 457 | ensure_dir(location) | ||
| 458 | zipfp = open(filename, 'rb') | ||
| 459 | try: | ||
| 460 | zip = zipfile.ZipFile(zipfp, allowZip64=True) | ||
| 461 | leading = has_leading_dir(zip.namelist()) and flatten | ||
| 462 | for info in zip.infolist(): | ||
| 463 | name = info.filename | ||
| 464 | data = zip.read(name) | ||
| 465 | fn = name | ||
| 466 | if leading: | ||
| 467 | fn = split_leading_dir(name)[1] | ||
| 468 | fn = os.path.join(location, fn) | ||
| 469 | dir = os.path.dirname(fn) | ||
| 470 | if fn.endswith('/') or fn.endswith('\\'): | ||
| 471 | # A directory | ||
| 472 | ensure_dir(fn) | ||
| 473 | else: | ||
| 474 | ensure_dir(dir) | ||
| 475 | fp = open(fn, 'wb') | ||
| 476 | try: | ||
| 477 | fp.write(data) | ||
| 478 | finally: | ||
| 479 | fp.close() | ||
| 480 | mode = info.external_attr >> 16 | ||
| 481 | # if mode and regular file and any execute permissions for | ||
| 482 | # user/group/world? | ||
| 483 | if mode and stat.S_ISREG(mode) and mode & 0o111: | ||
| 484 | # make dest file have execute for user/group/world | ||
| 485 | # (chmod +x) no-op on windows per python docs | ||
| 486 | os.chmod(fn, (0o777 - current_umask() | 0o111)) | ||
| 487 | finally: | ||
| 488 | zipfp.close() | ||
| 489 | |||
| 490 | |||
| 491 | def untar_file(filename, location): | ||
| 492 | """ | ||
| 493 | Untar the file (with path `filename`) to the destination `location`. | ||
| 494 | All files are written based on system defaults and umask (i.e. permissions | ||
| 495 | are not preserved), except that regular file members with any execute | ||
| 496 | permissions (user, group, or world) have "chmod +x" applied after being | ||
| 497 | written. Note that for windows, any execute changes using os.chmod are | ||
| 498 | no-ops per the python docs. | ||
| 499 | """ | ||
| 500 | ensure_dir(location) | ||
| 501 | if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): | ||
| 502 | mode = 'r:gz' | ||
| 503 | elif filename.lower().endswith(BZ2_EXTENSIONS): | ||
| 504 | mode = 'r:bz2' | ||
| 505 | elif filename.lower().endswith(XZ_EXTENSIONS): | ||
| 506 | mode = 'r:xz' | ||
| 507 | elif filename.lower().endswith('.tar'): | ||
| 508 | mode = 'r' | ||
| 509 | else: | ||
| 510 | logger.warning( | ||
| 511 | 'Cannot determine compression type for file %s', filename, | ||
| 512 | ) | ||
| 513 | mode = 'r:*' | ||
| 514 | tar = tarfile.open(filename, mode) | ||
| 515 | try: | ||
| 516 | # note: python<=2.5 doesn't seem to know about pax headers, filter them | ||
| 517 | leading = has_leading_dir([ | ||
| 518 | member.name for member in tar.getmembers() | ||
| 519 | if member.name != 'pax_global_header' | ||
| 520 | ]) | ||
| 521 | for member in tar.getmembers(): | ||
| 522 | fn = member.name | ||
| 523 | if fn == 'pax_global_header': | ||
| 524 | continue | ||
| 525 | if leading: | ||
| 526 | fn = split_leading_dir(fn)[1] | ||
| 527 | path = os.path.join(location, fn) | ||
| 528 | if member.isdir(): | ||
| 529 | ensure_dir(path) | ||
| 530 | elif member.issym(): | ||
| 531 | try: | ||
| 532 | tar._extract_member(member, path) | ||
| 533 | except Exception as exc: | ||
| 534 | # Some corrupt tar files seem to produce this | ||
| 535 | # (specifically bad symlinks) | ||
| 536 | logger.warning( | ||
| 537 | 'In the tar file %s the member %s is invalid: %s', | ||
| 538 | filename, member.name, exc, | ||
| 539 | ) | ||
| 540 | continue | ||
| 541 | else: | ||
| 542 | try: | ||
| 543 | fp = tar.extractfile(member) | ||
| 544 | except (KeyError, AttributeError) as exc: | ||
| 545 | # Some corrupt tar files seem to produce this | ||
| 546 | # (specifically bad symlinks) | ||
| 547 | logger.warning( | ||
| 548 | 'In the tar file %s the member %s is invalid: %s', | ||
| 549 | filename, member.name, exc, | ||
| 550 | ) | ||
| 551 | continue | ||
| 552 | ensure_dir(os.path.dirname(path)) | ||
| 553 | with open(path, 'wb') as destfp: | ||
| 554 | shutil.copyfileobj(fp, destfp) | ||
| 555 | fp.close() | ||
| 556 | # Update the timestamp (useful for cython compiled files) | ||
| 557 | tar.utime(member, path) | ||
| 558 | # member have any execute permissions for user/group/world? | ||
| 559 | if member.mode & 0o111: | ||
| 560 | # make dest file have execute for user/group/world | ||
| 561 | # no-op on windows per python docs | ||
| 562 | os.chmod(path, (0o777 - current_umask() | 0o111)) | ||
| 563 | finally: | ||
| 564 | tar.close() | ||
| 565 | |||
| 566 | |||
| 567 | def unpack_file(filename, location, content_type, link): | ||
| 568 | filename = os.path.realpath(filename) | ||
| 569 | if (content_type == 'application/zip' or | ||
| 570 | filename.lower().endswith(ZIP_EXTENSIONS) or | ||
| 571 | zipfile.is_zipfile(filename)): | ||
| 572 | unzip_file( | ||
| 573 | filename, | ||
| 574 | location, | ||
| 575 | flatten=not filename.endswith('.whl') | ||
| 576 | ) | ||
| 577 | elif (content_type == 'application/x-gzip' or | ||
| 578 | tarfile.is_tarfile(filename) or | ||
| 579 | filename.lower().endswith( | ||
| 580 | TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): | ||
| 581 | untar_file(filename, location) | ||
| 582 | elif (content_type and content_type.startswith('text/html') and | ||
| 583 | is_svn_page(file_contents(filename))): | ||
| 584 | # We don't really care about this | ||
| 585 | from pip._internal.vcs.subversion import Subversion | ||
| 586 | Subversion('svn+' + link.url).unpack(location) | ||
| 587 | else: | ||
| 588 | # FIXME: handle? | ||
| 589 | # FIXME: magic signatures? | ||
| 590 | logger.critical( | ||
| 591 | 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' | ||
| 592 | 'cannot detect archive format', | ||
| 593 | filename, location, content_type, | ||
| 594 | ) | ||
| 595 | raise InstallationError( | ||
| 596 | 'Cannot determine archive format of %s' % location | ||
| 597 | ) | ||
| 598 | |||
| 599 | |||
| 600 | def call_subprocess(cmd, show_stdout=True, cwd=None, | ||
| 601 | on_returncode='raise', | ||
| 602 | command_desc=None, | ||
| 603 | extra_environ=None, unset_environ=None, spinner=None): | ||
| 604 | """ | ||
| 605 | Args: | ||
| 606 | unset_environ: an iterable of environment variable names to unset | ||
| 607 | prior to calling subprocess.Popen(). | ||
| 608 | """ | ||
| 609 | if unset_environ is None: | ||
| 610 | unset_environ = [] | ||
| 611 | # This function's handling of subprocess output is confusing and I | ||
| 612 | # previously broke it terribly, so as penance I will write a long comment | ||
| 613 | # explaining things. | ||
| 614 | # | ||
| 615 | # The obvious thing that affects output is the show_stdout= | ||
| 616 | # kwarg. show_stdout=True means, let the subprocess write directly to our | ||
| 617 | # stdout. Even though it is nominally the default, it is almost never used | ||
| 618 | # inside pip (and should not be used in new code without a very good | ||
| 619 | # reason); as of 2016-02-22 it is only used in a few places inside the VCS | ||
| 620 | # wrapper code. Ideally we should get rid of it entirely, because it | ||
| 621 | # creates a lot of complexity here for a rarely used feature. | ||
| 622 | # | ||
| 623 | # Most places in pip set show_stdout=False. What this means is: | ||
| 624 | # - We connect the child stdout to a pipe, which we read. | ||
| 625 | # - By default, we hide the output but show a spinner -- unless the | ||
| 626 | # subprocess exits with an error, in which case we show the output. | ||
| 627 | # - If the --verbose option was passed (= loglevel is DEBUG), then we show | ||
| 628 | # the output unconditionally. (But in this case we don't want to show | ||
| 629 | # the output a second time if it turns out that there was an error.) | ||
| 630 | # | ||
| 631 | # stderr is always merged with stdout (even if show_stdout=True). | ||
| 632 | if show_stdout: | ||
| 633 | stdout = None | ||
| 634 | else: | ||
| 635 | stdout = subprocess.PIPE | ||
| 636 | if command_desc is None: | ||
| 637 | cmd_parts = [] | ||
| 638 | for part in cmd: | ||
| 639 | if ' ' in part or '\n' in part or '"' in part or "'" in part: | ||
| 640 | part = '"%s"' % part.replace('"', '\\"') | ||
| 641 | cmd_parts.append(part) | ||
| 642 | command_desc = ' '.join(cmd_parts) | ||
| 643 | logger.debug("Running command %s", command_desc) | ||
| 644 | env = os.environ.copy() | ||
| 645 | if extra_environ: | ||
| 646 | env.update(extra_environ) | ||
| 647 | for name in unset_environ: | ||
| 648 | env.pop(name, None) | ||
| 649 | try: | ||
| 650 | proc = subprocess.Popen( | ||
| 651 | cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, | ||
| 652 | stdout=stdout, cwd=cwd, env=env, | ||
| 653 | ) | ||
| 654 | proc.stdin.close() | ||
| 655 | except Exception as exc: | ||
| 656 | logger.critical( | ||
| 657 | "Error %s while executing command %s", exc, command_desc, | ||
| 658 | ) | ||
| 659 | raise | ||
| 660 | all_output = [] | ||
| 661 | if stdout is not None: | ||
| 662 | while True: | ||
| 663 | line = console_to_str(proc.stdout.readline()) | ||
| 664 | if not line: | ||
| 665 | break | ||
| 666 | line = line.rstrip() | ||
| 667 | all_output.append(line + '\n') | ||
| 668 | if logger.getEffectiveLevel() <= std_logging.DEBUG: | ||
| 669 | # Show the line immediately | ||
| 670 | logger.debug(line) | ||
| 671 | else: | ||
| 672 | # Update the spinner | ||
| 673 | if spinner is not None: | ||
| 674 | spinner.spin() | ||
| 675 | try: | ||
| 676 | proc.wait() | ||
| 677 | finally: | ||
| 678 | if proc.stdout: | ||
| 679 | proc.stdout.close() | ||
| 680 | if spinner is not None: | ||
| 681 | if proc.returncode: | ||
| 682 | spinner.finish("error") | ||
| 683 | else: | ||
| 684 | spinner.finish("done") | ||
| 685 | if proc.returncode: | ||
| 686 | if on_returncode == 'raise': | ||
| 687 | if (logger.getEffectiveLevel() > std_logging.DEBUG and | ||
| 688 | not show_stdout): | ||
| 689 | logger.info( | ||
| 690 | 'Complete output from command %s:', command_desc, | ||
| 691 | ) | ||
| 692 | logger.info( | ||
| 693 | ''.join(all_output) + | ||
| 694 | '\n----------------------------------------' | ||
| 695 | ) | ||
| 696 | raise InstallationError( | ||
| 697 | 'Command "%s" failed with error code %s in %s' | ||
| 698 | % (command_desc, proc.returncode, cwd)) | ||
| 699 | elif on_returncode == 'warn': | ||
| 700 | logger.warning( | ||
| 701 | 'Command "%s" had error code %s in %s', | ||
| 702 | command_desc, proc.returncode, cwd, | ||
| 703 | ) | ||
| 704 | elif on_returncode == 'ignore': | ||
| 705 | pass | ||
| 706 | else: | ||
| 707 | raise ValueError('Invalid value: on_returncode=%s' % | ||
| 708 | repr(on_returncode)) | ||
| 709 | if not show_stdout: | ||
| 710 | return ''.join(all_output) | ||
| 711 | |||
| 712 | |||
| 713 | def read_text_file(filename): | ||
| 714 | """Return the contents of *filename*. | ||
| 715 | |||
| 716 | Try to decode the file contents with utf-8, the preferred system encoding | ||
| 717 | (e.g., cp1252 on some Windows machines), and latin1, in that order. | ||
| 718 | Decoding a byte string with latin1 will never raise an error. In the worst | ||
| 719 | case, the returned string will contain some garbage characters. | ||
| 720 | |||
| 721 | """ | ||
| 722 | with open(filename, 'rb') as fp: | ||
| 723 | data = fp.read() | ||
| 724 | |||
| 725 | encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] | ||
| 726 | for enc in encodings: | ||
| 727 | try: | ||
| 728 | data = data.decode(enc) | ||
| 729 | except UnicodeDecodeError: | ||
| 730 | continue | ||
| 731 | break | ||
| 732 | |||
| 733 | assert type(data) != bytes # Latin1 should have worked. | ||
| 734 | return data | ||
| 735 | |||
| 736 | |||
| 737 | def _make_build_dir(build_dir): | ||
| 738 | os.makedirs(build_dir) | ||
| 739 | write_delete_marker_file(build_dir) | ||
| 740 | |||
| 741 | |||
| 742 | class FakeFile(object): | ||
| 743 | """Wrap a list of lines in an object with readline() to make | ||
| 744 | ConfigParser happy.""" | ||
| 745 | def __init__(self, lines): | ||
| 746 | self._gen = (l for l in lines) | ||
| 747 | |||
| 748 | def readline(self): | ||
| 749 | try: | ||
| 750 | try: | ||
| 751 | return next(self._gen) | ||
| 752 | except NameError: | ||
| 753 | return self._gen.next() | ||
| 754 | except StopIteration: | ||
| 755 | return '' | ||
| 756 | |||
| 757 | def __iter__(self): | ||
| 758 | return self._gen | ||
| 759 | |||
| 760 | |||
| 761 | class StreamWrapper(StringIO): | ||
| 762 | |||
| 763 | @classmethod | ||
| 764 | def from_stream(cls, orig_stream): | ||
| 765 | cls.orig_stream = orig_stream | ||
| 766 | return cls() | ||
| 767 | |||
| 768 | # compileall.compile_dir() needs stdout.encoding to print to stdout | ||
| 769 | @property | ||
| 770 | def encoding(self): | ||
| 771 | return self.orig_stream.encoding | ||
| 772 | |||
| 773 | |||
| 774 | @contextlib.contextmanager | ||
| 775 | def captured_output(stream_name): | ||
| 776 | """Return a context manager used by captured_stdout/stdin/stderr | ||
| 777 | that temporarily replaces the sys stream *stream_name* with a StringIO. | ||
| 778 | |||
| 779 | Taken from Lib/support/__init__.py in the CPython repo. | ||
| 780 | """ | ||
| 781 | orig_stdout = getattr(sys, stream_name) | ||
| 782 | setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) | ||
| 783 | try: | ||
| 784 | yield getattr(sys, stream_name) | ||
| 785 | finally: | ||
| 786 | setattr(sys, stream_name, orig_stdout) | ||
| 787 | |||
| 788 | |||
| 789 | def captured_stdout(): | ||
| 790 | """Capture the output of sys.stdout: | ||
| 791 | |||
| 792 | with captured_stdout() as stdout: | ||
| 793 | print('hello') | ||
| 794 | self.assertEqual(stdout.getvalue(), 'hello\n') | ||
| 795 | |||
| 796 | Taken from Lib/support/__init__.py in the CPython repo. | ||
| 797 | """ | ||
| 798 | return captured_output('stdout') | ||
| 799 | |||
| 800 | |||
| 801 | class cached_property(object): | ||
| 802 | """A property that is only computed once per instance and then replaces | ||
| 803 | itself with an ordinary attribute. Deleting the attribute resets the | ||
| 804 | property. | ||
| 805 | |||
| 806 | Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 | ||
| 807 | """ | ||
| 808 | |||
| 809 | def __init__(self, func): | ||
| 810 | self.__doc__ = getattr(func, '__doc__') | ||
| 811 | self.func = func | ||
| 812 | |||
| 813 | def __get__(self, obj, cls): | ||
| 814 | if obj is None: | ||
| 815 | # We're being accessed from the class itself, not from an object | ||
| 816 | return self | ||
| 817 | value = obj.__dict__[self.func.__name__] = self.func(obj) | ||
| 818 | return value | ||
| 819 | |||
| 820 | |||
| 821 | def get_installed_version(dist_name, lookup_dirs=None): | ||
| 822 | """Get the installed version of dist_name avoiding pkg_resources cache""" | ||
| 823 | # Create a requirement that we'll look for inside of setuptools. | ||
| 824 | req = pkg_resources.Requirement.parse(dist_name) | ||
| 825 | |||
| 826 | # We want to avoid having this cached, so we need to construct a new | ||
| 827 | # working set each time. | ||
| 828 | if lookup_dirs is None: | ||
| 829 | working_set = pkg_resources.WorkingSet() | ||
| 830 | else: | ||
| 831 | working_set = pkg_resources.WorkingSet(lookup_dirs) | ||
| 832 | |||
| 833 | # Get the installed distribution from our working set | ||
| 834 | dist = working_set.find(req) | ||
| 835 | |||
| 836 | # Check to see if we got an installed distribution or not, if we did | ||
| 837 | # we want to return it's version. | ||
| 838 | return dist.version if dist else None | ||
| 839 | |||
| 840 | |||
| 841 | def consume(iterator): | ||
| 842 | """Consume an iterable at C speed.""" | ||
| 843 | deque(iterator, maxlen=0) | ||
| 844 | |||
| 845 | |||
| 846 | # Simulates an enum | ||
| 847 | def enum(*sequential, **named): | ||
| 848 | enums = dict(zip(sequential, range(len(sequential))), **named) | ||
| 849 | reverse = {value: key for key, value in enums.items()} | ||
| 850 | enums['reverse_mapping'] = reverse | ||
| 851 | return type('Enum', (), enums) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py new file mode 100644 index 0000000..f4572ab --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py | |||
| @@ -0,0 +1,163 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import datetime | ||
| 4 | import json | ||
| 5 | import logging | ||
| 6 | import os.path | ||
| 7 | import sys | ||
| 8 | |||
| 9 | from pip._vendor import lockfile | ||
| 10 | from pip._vendor.packaging import version as packaging_version | ||
| 11 | |||
| 12 | from pip._internal.compat import WINDOWS | ||
| 13 | from pip._internal.index import PackageFinder | ||
| 14 | from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv | ||
| 15 | from pip._internal.utils.filesystem import check_path_owner | ||
| 16 | from pip._internal.utils.misc import ensure_dir, get_installed_version | ||
| 17 | |||
| 18 | SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" | ||
| 19 | |||
| 20 | |||
| 21 | logger = logging.getLogger(__name__) | ||
| 22 | |||
| 23 | |||
| 24 | class VirtualenvSelfCheckState(object): | ||
| 25 | def __init__(self): | ||
| 26 | self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") | ||
| 27 | |||
| 28 | # Load the existing state | ||
| 29 | try: | ||
| 30 | with open(self.statefile_path) as statefile: | ||
| 31 | self.state = json.load(statefile) | ||
| 32 | except (IOError, ValueError): | ||
| 33 | self.state = {} | ||
| 34 | |||
| 35 | def save(self, pypi_version, current_time): | ||
| 36 | # Attempt to write out our version check file | ||
| 37 | with open(self.statefile_path, "w") as statefile: | ||
| 38 | json.dump( | ||
| 39 | { | ||
| 40 | "last_check": current_time.strftime(SELFCHECK_DATE_FMT), | ||
| 41 | "pypi_version": pypi_version, | ||
| 42 | }, | ||
| 43 | statefile, | ||
| 44 | sort_keys=True, | ||
| 45 | separators=(",", ":") | ||
| 46 | ) | ||
| 47 | |||
| 48 | |||
| 49 | class GlobalSelfCheckState(object): | ||
| 50 | def __init__(self): | ||
| 51 | self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") | ||
| 52 | |||
| 53 | # Load the existing state | ||
| 54 | try: | ||
| 55 | with open(self.statefile_path) as statefile: | ||
| 56 | self.state = json.load(statefile)[sys.prefix] | ||
| 57 | except (IOError, ValueError, KeyError): | ||
| 58 | self.state = {} | ||
| 59 | |||
| 60 | def save(self, pypi_version, current_time): | ||
| 61 | # Check to make sure that we own the directory | ||
| 62 | if not check_path_owner(os.path.dirname(self.statefile_path)): | ||
| 63 | return | ||
| 64 | |||
| 65 | # Now that we've ensured the directory is owned by this user, we'll go | ||
| 66 | # ahead and make sure that all our directories are created. | ||
| 67 | ensure_dir(os.path.dirname(self.statefile_path)) | ||
| 68 | |||
| 69 | # Attempt to write out our version check file | ||
| 70 | with lockfile.LockFile(self.statefile_path): | ||
| 71 | if os.path.exists(self.statefile_path): | ||
| 72 | with open(self.statefile_path) as statefile: | ||
| 73 | state = json.load(statefile) | ||
| 74 | else: | ||
| 75 | state = {} | ||
| 76 | |||
| 77 | state[sys.prefix] = { | ||
| 78 | "last_check": current_time.strftime(SELFCHECK_DATE_FMT), | ||
| 79 | "pypi_version": pypi_version, | ||
| 80 | } | ||
| 81 | |||
| 82 | with open(self.statefile_path, "w") as statefile: | ||
| 83 | json.dump(state, statefile, sort_keys=True, | ||
| 84 | separators=(",", ":")) | ||
| 85 | |||
| 86 | |||
| 87 | def load_selfcheck_statefile(): | ||
| 88 | if running_under_virtualenv(): | ||
| 89 | return VirtualenvSelfCheckState() | ||
| 90 | else: | ||
| 91 | return GlobalSelfCheckState() | ||
| 92 | |||
| 93 | |||
| 94 | def pip_version_check(session, options): | ||
| 95 | """Check for an update for pip. | ||
| 96 | |||
| 97 | Limit the frequency of checks to once per week. State is stored either in | ||
| 98 | the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix | ||
| 99 | of the pip script path. | ||
| 100 | """ | ||
| 101 | installed_version = get_installed_version("pip") | ||
| 102 | if not installed_version: | ||
| 103 | return | ||
| 104 | |||
| 105 | pip_version = packaging_version.parse(installed_version) | ||
| 106 | pypi_version = None | ||
| 107 | |||
| 108 | try: | ||
| 109 | state = load_selfcheck_statefile() | ||
| 110 | |||
| 111 | current_time = datetime.datetime.utcnow() | ||
| 112 | # Determine if we need to refresh the state | ||
| 113 | if "last_check" in state.state and "pypi_version" in state.state: | ||
| 114 | last_check = datetime.datetime.strptime( | ||
| 115 | state.state["last_check"], | ||
| 116 | SELFCHECK_DATE_FMT | ||
| 117 | ) | ||
| 118 | if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: | ||
| 119 | pypi_version = state.state["pypi_version"] | ||
| 120 | |||
| 121 | # Refresh the version if we need to or just see if we need to warn | ||
| 122 | if pypi_version is None: | ||
| 123 | # Lets use PackageFinder to see what the latest pip version is | ||
| 124 | finder = PackageFinder( | ||
| 125 | find_links=options.find_links, | ||
| 126 | index_urls=[options.index_url] + options.extra_index_urls, | ||
| 127 | allow_all_prereleases=False, # Explicitly set to False | ||
| 128 | trusted_hosts=options.trusted_hosts, | ||
| 129 | process_dependency_links=options.process_dependency_links, | ||
| 130 | session=session, | ||
| 131 | ) | ||
| 132 | all_candidates = finder.find_all_candidates("pip") | ||
| 133 | if not all_candidates: | ||
| 134 | return | ||
| 135 | pypi_version = str( | ||
| 136 | max(all_candidates, key=lambda c: c.version).version | ||
| 137 | ) | ||
| 138 | |||
| 139 | # save that we've performed a check | ||
| 140 | state.save(pypi_version, current_time) | ||
| 141 | |||
| 142 | remote_version = packaging_version.parse(pypi_version) | ||
| 143 | |||
| 144 | # Determine if our pypi_version is older | ||
| 145 | if (pip_version < remote_version and | ||
| 146 | pip_version.base_version != remote_version.base_version): | ||
| 147 | # Advise "python -m pip" on Windows to avoid issues | ||
| 148 | # with overwriting pip.exe. | ||
| 149 | if WINDOWS: | ||
| 150 | pip_cmd = "python -m pip" | ||
| 151 | else: | ||
| 152 | pip_cmd = "pip" | ||
| 153 | logger.warning( | ||
| 154 | "You are using pip version %s, however version %s is " | ||
| 155 | "available.\nYou should consider upgrading via the " | ||
| 156 | "'%s install --upgrade pip' command.", | ||
| 157 | pip_version, pypi_version, pip_cmd | ||
| 158 | ) | ||
| 159 | except Exception: | ||
| 160 | logger.debug( | ||
| 161 | "There was an error checking the latest version of pip", | ||
| 162 | exc_info=True, | ||
| 163 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..d523953 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py | |||
| @@ -0,0 +1,70 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import sys | ||
| 5 | from email.parser import FeedParser # type: ignore | ||
| 6 | |||
| 7 | from pip._vendor import pkg_resources | ||
| 8 | from pip._vendor.packaging import specifiers, version | ||
| 9 | |||
| 10 | from pip._internal import exceptions | ||
| 11 | |||
| 12 | logger = logging.getLogger(__name__) | ||
| 13 | |||
| 14 | |||
| 15 | def check_requires_python(requires_python): | ||
| 16 | """ | ||
| 17 | Check if the python version in use match the `requires_python` specifier. | ||
| 18 | |||
| 19 | Returns `True` if the version of python in use matches the requirement. | ||
| 20 | Returns `False` if the version of python in use does not matches the | ||
| 21 | requirement. | ||
| 22 | |||
| 23 | Raises an InvalidSpecifier if `requires_python` have an invalid format. | ||
| 24 | """ | ||
| 25 | if requires_python is None: | ||
| 26 | # The package provides no information | ||
| 27 | return True | ||
| 28 | requires_python_specifier = specifiers.SpecifierSet(requires_python) | ||
| 29 | |||
| 30 | # We only use major.minor.micro | ||
| 31 | python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) | ||
| 32 | return python_version in requires_python_specifier | ||
| 33 | |||
| 34 | |||
| 35 | def get_metadata(dist): | ||
| 36 | if (isinstance(dist, pkg_resources.DistInfoDistribution) and | ||
| 37 | dist.has_metadata('METADATA')): | ||
| 38 | return dist.get_metadata('METADATA') | ||
| 39 | elif dist.has_metadata('PKG-INFO'): | ||
| 40 | return dist.get_metadata('PKG-INFO') | ||
| 41 | |||
| 42 | |||
| 43 | def check_dist_requires_python(dist): | ||
| 44 | metadata = get_metadata(dist) | ||
| 45 | feed_parser = FeedParser() | ||
| 46 | feed_parser.feed(metadata) | ||
| 47 | pkg_info_dict = feed_parser.close() | ||
| 48 | requires_python = pkg_info_dict.get('Requires-Python') | ||
| 49 | try: | ||
| 50 | if not check_requires_python(requires_python): | ||
| 51 | raise exceptions.UnsupportedPythonVersion( | ||
| 52 | "%s requires Python '%s' but the running Python is %s" % ( | ||
| 53 | dist.project_name, | ||
| 54 | requires_python, | ||
| 55 | '.'.join(map(str, sys.version_info[:3])),) | ||
| 56 | ) | ||
| 57 | except specifiers.InvalidSpecifier as e: | ||
| 58 | logger.warning( | ||
| 59 | "Package %s has an invalid Requires-Python entry %s - %s", | ||
| 60 | dist.project_name, requires_python, e, | ||
| 61 | ) | ||
| 62 | return | ||
| 63 | |||
| 64 | |||
| 65 | def get_installer(dist): | ||
| 66 | if dist.has_metadata('INSTALLER'): | ||
| 67 | for line in dist.get_metadata_lines('INSTALLER'): | ||
| 68 | if line.strip(): | ||
| 69 | return line.strip() | ||
| 70 | return '' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000..9d32174 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py | |||
| @@ -0,0 +1,8 @@ | |||
| 1 | # Shim to wrap setup.py invocation with setuptools | ||
| 2 | SETUPTOOLS_SHIM = ( | ||
| 3 | "import setuptools, tokenize;__file__=%r;" | ||
| 4 | "f=getattr(tokenize, 'open', open)(__file__);" | ||
| 5 | "code=f.read().replace('\\r\\n', '\\n');" | ||
| 6 | "f.close();" | ||
| 7 | "exec(compile(code, __file__, 'exec'))" | ||
| 8 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..25bc0d9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py | |||
| @@ -0,0 +1,82 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os.path | ||
| 5 | import tempfile | ||
| 6 | |||
| 7 | from pip._internal.utils.misc import rmtree | ||
| 8 | |||
| 9 | logger = logging.getLogger(__name__) | ||
| 10 | |||
| 11 | |||
| 12 | class TempDirectory(object): | ||
| 13 | """Helper class that owns and cleans up a temporary directory. | ||
| 14 | |||
| 15 | This class can be used as a context manager or as an OO representation of a | ||
| 16 | temporary directory. | ||
| 17 | |||
| 18 | Attributes: | ||
| 19 | path | ||
| 20 | Location to the created temporary directory or None | ||
| 21 | delete | ||
| 22 | Whether the directory should be deleted when exiting | ||
| 23 | (when used as a contextmanager) | ||
| 24 | |||
| 25 | Methods: | ||
| 26 | create() | ||
| 27 | Creates a temporary directory and stores its path in the path | ||
| 28 | attribute. | ||
| 29 | cleanup() | ||
| 30 | Deletes the temporary directory and sets path attribute to None | ||
| 31 | |||
| 32 | When used as a context manager, a temporary directory is created on | ||
| 33 | entering the context and, if the delete attribute is True, on exiting the | ||
| 34 | context the created directory is deleted. | ||
| 35 | """ | ||
| 36 | |||
| 37 | def __init__(self, path=None, delete=None, kind="temp"): | ||
| 38 | super(TempDirectory, self).__init__() | ||
| 39 | |||
| 40 | if path is None and delete is None: | ||
| 41 | # If we were not given an explicit directory, and we were not given | ||
| 42 | # an explicit delete option, then we'll default to deleting. | ||
| 43 | delete = True | ||
| 44 | |||
| 45 | self.path = path | ||
| 46 | self.delete = delete | ||
| 47 | self.kind = kind | ||
| 48 | |||
| 49 | def __repr__(self): | ||
| 50 | return "<{} {!r}>".format(self.__class__.__name__, self.path) | ||
| 51 | |||
| 52 | def __enter__(self): | ||
| 53 | self.create() | ||
| 54 | return self | ||
| 55 | |||
| 56 | def __exit__(self, exc, value, tb): | ||
| 57 | if self.delete: | ||
| 58 | self.cleanup() | ||
| 59 | |||
| 60 | def create(self): | ||
| 61 | """Create a temporary directory and store it's path in self.path | ||
| 62 | """ | ||
| 63 | if self.path is not None: | ||
| 64 | logger.debug( | ||
| 65 | "Skipped creation of temporary directory: {}".format(self.path) | ||
| 66 | ) | ||
| 67 | return | ||
| 68 | # We realpath here because some systems have their default tmpdir | ||
| 69 | # symlinked to another directory. This tends to confuse build | ||
| 70 | # scripts, so we canonicalize the path by traversing potential | ||
| 71 | # symlinks here. | ||
| 72 | self.path = os.path.realpath( | ||
| 73 | tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) | ||
| 74 | ) | ||
| 75 | logger.debug("Created temporary directory: {}".format(self.path)) | ||
| 76 | |||
| 77 | def cleanup(self): | ||
| 78 | """Remove the temporary directory created and reset state | ||
| 79 | """ | ||
| 80 | if self.path is not None and os.path.exists(self.path): | ||
| 81 | rmtree(self.path) | ||
| 82 | self.path = None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py new file mode 100644 index 0000000..4e25ae6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py | |||
| @@ -0,0 +1,29 @@ | |||
| 1 | """For neatly implementing static typing in pip. | ||
| 2 | |||
| 3 | `mypy` - the static type analysis tool we use - uses the `typing` module, which | ||
| 4 | provides core functionality fundamental to mypy's functioning. | ||
| 5 | |||
| 6 | Generally, `typing` would be imported at runtime and used in that fashion - | ||
| 7 | it acts as a no-op at runtime and does not have any run-time overhead by | ||
| 8 | design. | ||
| 9 | |||
| 10 | As it turns out, `typing` is not vendorable - it uses separate sources for | ||
| 11 | Python 2/Python 3. Thus, this codebase can not expect it to be present. | ||
| 12 | To work around this, mypy allows the typing import to be behind a False-y | ||
| 13 | optional to prevent it from running at runtime and type-comments can be used | ||
| 14 | to remove the need for the types to be accessible directly during runtime. | ||
| 15 | |||
| 16 | This module provides the False-y guard in a nicely named fashion so that a | ||
| 17 | curious maintainer can reach here to read this. | ||
| 18 | |||
| 19 | In pip, all static-typing related imports should be guarded as follows: | ||
| 20 | |||
| 21 | from pip.utils.typing import MYPY_CHECK_RUNNING | ||
| 22 | |||
| 23 | if MYPY_CHECK_RUNNING: | ||
| 24 | from typing import ... | ||
| 25 | |||
| 26 | Ref: https://github.com/python/mypy/issues/3216 | ||
| 27 | """ | ||
| 28 | |||
| 29 | MYPY_CHECK_RUNNING = False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py new file mode 100644 index 0000000..d97ea36 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py | |||
| @@ -0,0 +1,421 @@ | |||
| 1 | from __future__ import absolute_import, division | ||
| 2 | |||
| 3 | import contextlib | ||
| 4 | import itertools | ||
| 5 | import logging | ||
| 6 | import sys | ||
| 7 | import time | ||
| 8 | from signal import SIGINT, default_int_handler, signal | ||
| 9 | |||
| 10 | from pip._vendor import six | ||
| 11 | from pip._vendor.progress.bar import ( | ||
| 12 | Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, | ||
| 13 | ShadyBar, | ||
| 14 | ) | ||
| 15 | from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin | ||
| 16 | from pip._vendor.progress.spinner import Spinner | ||
| 17 | |||
| 18 | from pip._internal.compat import WINDOWS | ||
| 19 | from pip._internal.utils.logging import get_indentation | ||
| 20 | from pip._internal.utils.misc import format_size | ||
| 21 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 22 | |||
| 23 | if MYPY_CHECK_RUNNING: | ||
| 24 | from typing import Any | ||
| 25 | |||
| 26 | try: | ||
| 27 | from pip._vendor import colorama | ||
| 28 | # Lots of different errors can come from this, including SystemError and | ||
| 29 | # ImportError. | ||
| 30 | except Exception: | ||
| 31 | colorama = None | ||
| 32 | |||
| 33 | logger = logging.getLogger(__name__) | ||
| 34 | |||
| 35 | |||
| 36 | def _select_progress_class(preferred, fallback): | ||
| 37 | encoding = getattr(preferred.file, "encoding", None) | ||
| 38 | |||
| 39 | # If we don't know what encoding this file is in, then we'll just assume | ||
| 40 | # that it doesn't support unicode and use the ASCII bar. | ||
| 41 | if not encoding: | ||
| 42 | return fallback | ||
| 43 | |||
| 44 | # Collect all of the possible characters we want to use with the preferred | ||
| 45 | # bar. | ||
| 46 | characters = [ | ||
| 47 | getattr(preferred, "empty_fill", six.text_type()), | ||
| 48 | getattr(preferred, "fill", six.text_type()), | ||
| 49 | ] | ||
| 50 | characters += list(getattr(preferred, "phases", [])) | ||
| 51 | |||
| 52 | # Try to decode the characters we're using for the bar using the encoding | ||
| 53 | # of the given file, if this works then we'll assume that we can use the | ||
| 54 | # fancier bar and if not we'll fall back to the plaintext bar. | ||
| 55 | try: | ||
| 56 | six.text_type().join(characters).encode(encoding) | ||
| 57 | except UnicodeEncodeError: | ||
| 58 | return fallback | ||
| 59 | else: | ||
| 60 | return preferred | ||
| 61 | |||
| 62 | |||
| 63 | _BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any | ||
| 64 | |||
| 65 | |||
| 66 | class InterruptibleMixin(object): | ||
| 67 | """ | ||
| 68 | Helper to ensure that self.finish() gets called on keyboard interrupt. | ||
| 69 | |||
| 70 | This allows downloads to be interrupted without leaving temporary state | ||
| 71 | (like hidden cursors) behind. | ||
| 72 | |||
| 73 | This class is similar to the progress library's existing SigIntMixin | ||
| 74 | helper, but as of version 1.2, that helper has the following problems: | ||
| 75 | |||
| 76 | 1. It calls sys.exit(). | ||
| 77 | 2. It discards the existing SIGINT handler completely. | ||
| 78 | 3. It leaves its own handler in place even after an uninterrupted finish, | ||
| 79 | which will have unexpected delayed effects if the user triggers an | ||
| 80 | unrelated keyboard interrupt some time after a progress-displaying | ||
| 81 | download has already completed, for example. | ||
| 82 | """ | ||
| 83 | |||
| 84 | def __init__(self, *args, **kwargs): | ||
| 85 | """ | ||
| 86 | Save the original SIGINT handler for later. | ||
| 87 | """ | ||
| 88 | super(InterruptibleMixin, self).__init__(*args, **kwargs) | ||
| 89 | |||
| 90 | self.original_handler = signal(SIGINT, self.handle_sigint) | ||
| 91 | |||
| 92 | # If signal() returns None, the previous handler was not installed from | ||
| 93 | # Python, and we cannot restore it. This probably should not happen, | ||
| 94 | # but if it does, we must restore something sensible instead, at least. | ||
| 95 | # The least bad option should be Python's default SIGINT handler, which | ||
| 96 | # just raises KeyboardInterrupt. | ||
| 97 | if self.original_handler is None: | ||
| 98 | self.original_handler = default_int_handler | ||
| 99 | |||
| 100 | def finish(self): | ||
| 101 | """ | ||
| 102 | Restore the original SIGINT handler after finishing. | ||
| 103 | |||
| 104 | This should happen regardless of whether the progress display finishes | ||
| 105 | normally, or gets interrupted. | ||
| 106 | """ | ||
| 107 | super(InterruptibleMixin, self).finish() | ||
| 108 | signal(SIGINT, self.original_handler) | ||
| 109 | |||
| 110 | def handle_sigint(self, signum, frame): | ||
| 111 | """ | ||
| 112 | Call self.finish() before delegating to the original SIGINT handler. | ||
| 113 | |||
| 114 | This handler should only be in place while the progress display is | ||
| 115 | active. | ||
| 116 | """ | ||
| 117 | self.finish() | ||
| 118 | self.original_handler(signum, frame) | ||
| 119 | |||
| 120 | |||
| 121 | class SilentBar(Bar): | ||
| 122 | |||
| 123 | def update(self): | ||
| 124 | pass | ||
| 125 | |||
| 126 | |||
| 127 | class BlueEmojiBar(IncrementalBar): | ||
| 128 | |||
| 129 | suffix = "%(percent)d%%" | ||
| 130 | bar_prefix = " " | ||
| 131 | bar_suffix = " " | ||
| 132 | phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any | ||
| 133 | |||
| 134 | |||
| 135 | class DownloadProgressMixin(object): | ||
| 136 | |||
| 137 | def __init__(self, *args, **kwargs): | ||
| 138 | super(DownloadProgressMixin, self).__init__(*args, **kwargs) | ||
| 139 | self.message = (" " * (get_indentation() + 2)) + self.message | ||
| 140 | |||
| 141 | @property | ||
| 142 | def downloaded(self): | ||
| 143 | return format_size(self.index) | ||
| 144 | |||
| 145 | @property | ||
| 146 | def download_speed(self): | ||
| 147 | # Avoid zero division errors... | ||
| 148 | if self.avg == 0.0: | ||
| 149 | return "..." | ||
| 150 | return format_size(1 / self.avg) + "/s" | ||
| 151 | |||
| 152 | @property | ||
| 153 | def pretty_eta(self): | ||
| 154 | if self.eta: | ||
| 155 | return "eta %s" % self.eta_td | ||
| 156 | return "" | ||
| 157 | |||
| 158 | def iter(self, it, n=1): | ||
| 159 | for x in it: | ||
| 160 | yield x | ||
| 161 | self.next(n) | ||
| 162 | self.finish() | ||
| 163 | |||
| 164 | |||
| 165 | class WindowsMixin(object): | ||
| 166 | |||
| 167 | def __init__(self, *args, **kwargs): | ||
| 168 | # The Windows terminal does not support the hide/show cursor ANSI codes | ||
| 169 | # even with colorama. So we'll ensure that hide_cursor is False on | ||
| 170 | # Windows. | ||
| 171 | # This call neds to go before the super() call, so that hide_cursor | ||
| 172 | # is set in time. The base progress bar class writes the "hide cursor" | ||
| 173 | # code to the terminal in its init, so if we don't set this soon | ||
| 174 | # enough, we get a "hide" with no corresponding "show"... | ||
| 175 | if WINDOWS and self.hide_cursor: | ||
| 176 | self.hide_cursor = False | ||
| 177 | |||
| 178 | super(WindowsMixin, self).__init__(*args, **kwargs) | ||
| 179 | |||
| 180 | # Check if we are running on Windows and we have the colorama module, | ||
| 181 | # if we do then wrap our file with it. | ||
| 182 | if WINDOWS and colorama: | ||
| 183 | self.file = colorama.AnsiToWin32(self.file) | ||
| 184 | # The progress code expects to be able to call self.file.isatty() | ||
| 185 | # but the colorama.AnsiToWin32() object doesn't have that, so we'll | ||
| 186 | # add it. | ||
| 187 | self.file.isatty = lambda: self.file.wrapped.isatty() | ||
| 188 | # The progress code expects to be able to call self.file.flush() | ||
| 189 | # but the colorama.AnsiToWin32() object doesn't have that, so we'll | ||
| 190 | # add it. | ||
| 191 | self.file.flush = lambda: self.file.wrapped.flush() | ||
| 192 | |||
| 193 | |||
| 194 | class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, | ||
| 195 | DownloadProgressMixin): | ||
| 196 | |||
| 197 | file = sys.stdout | ||
| 198 | message = "%(percent)d%%" | ||
| 199 | suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" | ||
| 200 | |||
| 201 | # NOTE: The "type: ignore" comments on the following classes are there to | ||
| 202 | # work around https://github.com/python/typing/issues/241 | ||
| 203 | |||
| 204 | |||
| 205 | class DefaultDownloadProgressBar(BaseDownloadProgressBar, | ||
| 206 | _BaseBar): # type: ignore | ||
| 207 | pass | ||
| 208 | |||
| 209 | |||
| 210 | class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore | ||
| 211 | pass | ||
| 212 | |||
| 213 | |||
| 214 | class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore | ||
| 215 | IncrementalBar): | ||
| 216 | pass | ||
| 217 | |||
| 218 | |||
| 219 | class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore | ||
| 220 | ChargingBar): | ||
| 221 | pass | ||
| 222 | |||
| 223 | |||
| 224 | class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore | ||
| 225 | pass | ||
| 226 | |||
| 227 | |||
| 228 | class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore | ||
| 229 | FillingSquaresBar): | ||
| 230 | pass | ||
| 231 | |||
| 232 | |||
| 233 | class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore | ||
| 234 | FillingCirclesBar): | ||
| 235 | pass | ||
| 236 | |||
| 237 | |||
| 238 | class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore | ||
| 239 | BlueEmojiBar): | ||
| 240 | pass | ||
| 241 | |||
| 242 | |||
| 243 | class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, | ||
| 244 | DownloadProgressMixin, WritelnMixin, Spinner): | ||
| 245 | |||
| 246 | file = sys.stdout | ||
| 247 | suffix = "%(downloaded)s %(download_speed)s" | ||
| 248 | |||
| 249 | def next_phase(self): | ||
| 250 | if not hasattr(self, "_phaser"): | ||
| 251 | self._phaser = itertools.cycle(self.phases) | ||
| 252 | return next(self._phaser) | ||
| 253 | |||
| 254 | def update(self): | ||
| 255 | message = self.message % self | ||
| 256 | phase = self.next_phase() | ||
| 257 | suffix = self.suffix % self | ||
| 258 | line = ''.join([ | ||
| 259 | message, | ||
| 260 | " " if message else "", | ||
| 261 | phase, | ||
| 262 | " " if suffix else "", | ||
| 263 | suffix, | ||
| 264 | ]) | ||
| 265 | |||
| 266 | self.writeln(line) | ||
| 267 | |||
| 268 | |||
| 269 | BAR_TYPES = { | ||
| 270 | "off": (DownloadSilentBar, DownloadSilentBar), | ||
| 271 | "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), | ||
| 272 | "ascii": (DownloadIncrementalBar, DownloadProgressSpinner), | ||
| 273 | "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), | ||
| 274 | "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) | ||
| 275 | } | ||
| 276 | |||
| 277 | |||
| 278 | def DownloadProgressProvider(progress_bar, max=None): | ||
| 279 | if max is None or max == 0: | ||
| 280 | return BAR_TYPES[progress_bar][1]().iter | ||
| 281 | else: | ||
| 282 | return BAR_TYPES[progress_bar][0](max=max).iter | ||
| 283 | |||
| 284 | |||
| 285 | ################################################################ | ||
| 286 | # Generic "something is happening" spinners | ||
| 287 | # | ||
| 288 | # We don't even try using progress.spinner.Spinner here because it's actually | ||
| 289 | # simpler to reimplement from scratch than to coerce their code into doing | ||
| 290 | # what we need. | ||
| 291 | ################################################################ | ||
| 292 | |||
| 293 | @contextlib.contextmanager | ||
| 294 | def hidden_cursor(file): | ||
| 295 | # The Windows terminal does not support the hide/show cursor ANSI codes, | ||
| 296 | # even via colorama. So don't even try. | ||
| 297 | if WINDOWS: | ||
| 298 | yield | ||
| 299 | # We don't want to clutter the output with control characters if we're | ||
| 300 | # writing to a file, or if the user is running with --quiet. | ||
| 301 | # See https://github.com/pypa/pip/issues/3418 | ||
| 302 | elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: | ||
| 303 | yield | ||
| 304 | else: | ||
| 305 | file.write(HIDE_CURSOR) | ||
| 306 | try: | ||
| 307 | yield | ||
| 308 | finally: | ||
| 309 | file.write(SHOW_CURSOR) | ||
| 310 | |||
| 311 | |||
| 312 | class RateLimiter(object): | ||
| 313 | def __init__(self, min_update_interval_seconds): | ||
| 314 | self._min_update_interval_seconds = min_update_interval_seconds | ||
| 315 | self._last_update = 0 | ||
| 316 | |||
| 317 | def ready(self): | ||
| 318 | now = time.time() | ||
| 319 | delta = now - self._last_update | ||
| 320 | return delta >= self._min_update_interval_seconds | ||
| 321 | |||
| 322 | def reset(self): | ||
| 323 | self._last_update = time.time() | ||
| 324 | |||
| 325 | |||
| 326 | class InteractiveSpinner(object): | ||
| 327 | def __init__(self, message, file=None, spin_chars="-\\|/", | ||
| 328 | # Empirically, 8 updates/second looks nice | ||
| 329 | min_update_interval_seconds=0.125): | ||
| 330 | self._message = message | ||
| 331 | if file is None: | ||
| 332 | file = sys.stdout | ||
| 333 | self._file = file | ||
| 334 | self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||
| 335 | self._finished = False | ||
| 336 | |||
| 337 | self._spin_cycle = itertools.cycle(spin_chars) | ||
| 338 | |||
| 339 | self._file.write(" " * get_indentation() + self._message + " ... ") | ||
| 340 | self._width = 0 | ||
| 341 | |||
| 342 | def _write(self, status): | ||
| 343 | assert not self._finished | ||
| 344 | # Erase what we wrote before by backspacing to the beginning, writing | ||
| 345 | # spaces to overwrite the old text, and then backspacing again | ||
| 346 | backup = "\b" * self._width | ||
| 347 | self._file.write(backup + " " * self._width + backup) | ||
| 348 | # Now we have a blank slate to add our status | ||
| 349 | self._file.write(status) | ||
| 350 | self._width = len(status) | ||
| 351 | self._file.flush() | ||
| 352 | self._rate_limiter.reset() | ||
| 353 | |||
| 354 | def spin(self): | ||
| 355 | if self._finished: | ||
| 356 | return | ||
| 357 | if not self._rate_limiter.ready(): | ||
| 358 | return | ||
| 359 | self._write(next(self._spin_cycle)) | ||
| 360 | |||
| 361 | def finish(self, final_status): | ||
| 362 | if self._finished: | ||
| 363 | return | ||
| 364 | self._write(final_status) | ||
| 365 | self._file.write("\n") | ||
| 366 | self._file.flush() | ||
| 367 | self._finished = True | ||
| 368 | |||
| 369 | |||
| 370 | # Used for dumb terminals, non-interactive installs (no tty), etc. | ||
| 371 | # We still print updates occasionally (once every 60 seconds by default) to | ||
| 372 | # act as a keep-alive for systems like Travis-CI that take lack-of-output as | ||
| 373 | # an indication that a task has frozen. | ||
| 374 | class NonInteractiveSpinner(object): | ||
| 375 | def __init__(self, message, min_update_interval_seconds=60): | ||
| 376 | self._message = message | ||
| 377 | self._finished = False | ||
| 378 | self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||
| 379 | self._update("started") | ||
| 380 | |||
| 381 | def _update(self, status): | ||
| 382 | assert not self._finished | ||
| 383 | self._rate_limiter.reset() | ||
| 384 | logger.info("%s: %s", self._message, status) | ||
| 385 | |||
| 386 | def spin(self): | ||
| 387 | if self._finished: | ||
| 388 | return | ||
| 389 | if not self._rate_limiter.ready(): | ||
| 390 | return | ||
| 391 | self._update("still running...") | ||
| 392 | |||
| 393 | def finish(self, final_status): | ||
| 394 | if self._finished: | ||
| 395 | return | ||
| 396 | self._update("finished with status '%s'" % (final_status,)) | ||
| 397 | self._finished = True | ||
| 398 | |||
| 399 | |||
| 400 | @contextlib.contextmanager | ||
| 401 | def open_spinner(message): | ||
| 402 | # Interactive spinner goes directly to sys.stdout rather than being routed | ||
| 403 | # through the logging system, but it acts like it has level INFO, | ||
| 404 | # i.e. it's only displayed if we're at level INFO or better. | ||
| 405 | # Non-interactive spinner goes through the logging system, so it is always | ||
| 406 | # in sync with logging configuration. | ||
| 407 | if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: | ||
| 408 | spinner = InteractiveSpinner(message) | ||
| 409 | else: | ||
| 410 | spinner = NonInteractiveSpinner(message) | ||
| 411 | try: | ||
| 412 | with hidden_cursor(sys.stdout): | ||
| 413 | yield spinner | ||
| 414 | except KeyboardInterrupt: | ||
| 415 | spinner.finish("canceled") | ||
| 416 | raise | ||
| 417 | except Exception: | ||
| 418 | spinner.finish("error") | ||
| 419 | raise | ||
| 420 | else: | ||
| 421 | spinner.finish("done") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py new file mode 100644 index 0000000..bff94fa --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py | |||
| @@ -0,0 +1,471 @@ | |||
| 1 | """Handles all VCS (version control) support""" | ||
| 2 | from __future__ import absolute_import | ||
| 3 | |||
| 4 | import copy | ||
| 5 | import errno | ||
| 6 | import logging | ||
| 7 | import os | ||
| 8 | import shutil | ||
| 9 | import sys | ||
| 10 | |||
| 11 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 12 | |||
| 13 | from pip._internal.exceptions import BadCommand | ||
| 14 | from pip._internal.utils.misc import ( | ||
| 15 | display_path, backup_dir, call_subprocess, rmtree, ask_path_exists, | ||
| 16 | ) | ||
| 17 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 18 | |||
| 19 | if MYPY_CHECK_RUNNING: | ||
| 20 | from typing import Dict, Optional, Tuple | ||
| 21 | from pip._internal.basecommand import Command | ||
| 22 | |||
| 23 | __all__ = ['vcs', 'get_src_requirement'] | ||
| 24 | |||
| 25 | |||
| 26 | logger = logging.getLogger(__name__) | ||
| 27 | |||
| 28 | |||
| 29 | class RevOptions(object): | ||
| 30 | |||
| 31 | """ | ||
| 32 | Encapsulates a VCS-specific revision to install, along with any VCS | ||
| 33 | install options. | ||
| 34 | |||
| 35 | Instances of this class should be treated as if immutable. | ||
| 36 | """ | ||
| 37 | |||
| 38 | def __init__(self, vcs, rev=None, extra_args=None): | ||
| 39 | """ | ||
| 40 | Args: | ||
| 41 | vcs: a VersionControl object. | ||
| 42 | rev: the name of the revision to install. | ||
| 43 | extra_args: a list of extra options. | ||
| 44 | """ | ||
| 45 | if extra_args is None: | ||
| 46 | extra_args = [] | ||
| 47 | |||
| 48 | self.extra_args = extra_args | ||
| 49 | self.rev = rev | ||
| 50 | self.vcs = vcs | ||
| 51 | |||
| 52 | def __repr__(self): | ||
| 53 | return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev) | ||
| 54 | |||
| 55 | @property | ||
| 56 | def arg_rev(self): | ||
| 57 | if self.rev is None: | ||
| 58 | return self.vcs.default_arg_rev | ||
| 59 | |||
| 60 | return self.rev | ||
| 61 | |||
| 62 | def to_args(self): | ||
| 63 | """ | ||
| 64 | Return the VCS-specific command arguments. | ||
| 65 | """ | ||
| 66 | args = [] | ||
| 67 | rev = self.arg_rev | ||
| 68 | if rev is not None: | ||
| 69 | args += self.vcs.get_base_rev_args(rev) | ||
| 70 | args += self.extra_args | ||
| 71 | |||
| 72 | return args | ||
| 73 | |||
| 74 | def to_display(self): | ||
| 75 | if not self.rev: | ||
| 76 | return '' | ||
| 77 | |||
| 78 | return ' (to revision {})'.format(self.rev) | ||
| 79 | |||
| 80 | def make_new(self, rev): | ||
| 81 | """ | ||
| 82 | Make a copy of the current instance, but with a new rev. | ||
| 83 | |||
| 84 | Args: | ||
| 85 | rev: the name of the revision for the new object. | ||
| 86 | """ | ||
| 87 | return self.vcs.make_rev_options(rev, extra_args=self.extra_args) | ||
| 88 | |||
| 89 | |||
| 90 | class VcsSupport(object): | ||
| 91 | _registry = {} # type: Dict[str, Command] | ||
| 92 | schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] | ||
| 93 | |||
| 94 | def __init__(self): | ||
| 95 | # Register more schemes with urlparse for various version control | ||
| 96 | # systems | ||
| 97 | urllib_parse.uses_netloc.extend(self.schemes) | ||
| 98 | # Python >= 2.7.4, 3.3 doesn't have uses_fragment | ||
| 99 | if getattr(urllib_parse, 'uses_fragment', None): | ||
| 100 | urllib_parse.uses_fragment.extend(self.schemes) | ||
| 101 | super(VcsSupport, self).__init__() | ||
| 102 | |||
| 103 | def __iter__(self): | ||
| 104 | return self._registry.__iter__() | ||
| 105 | |||
| 106 | @property | ||
| 107 | def backends(self): | ||
| 108 | return list(self._registry.values()) | ||
| 109 | |||
| 110 | @property | ||
| 111 | def dirnames(self): | ||
| 112 | return [backend.dirname for backend in self.backends] | ||
| 113 | |||
| 114 | @property | ||
| 115 | def all_schemes(self): | ||
| 116 | schemes = [] | ||
| 117 | for backend in self.backends: | ||
| 118 | schemes.extend(backend.schemes) | ||
| 119 | return schemes | ||
| 120 | |||
| 121 | def register(self, cls): | ||
| 122 | if not hasattr(cls, 'name'): | ||
| 123 | logger.warning('Cannot register VCS %s', cls.__name__) | ||
| 124 | return | ||
| 125 | if cls.name not in self._registry: | ||
| 126 | self._registry[cls.name] = cls | ||
| 127 | logger.debug('Registered VCS backend: %s', cls.name) | ||
| 128 | |||
| 129 | def unregister(self, cls=None, name=None): | ||
| 130 | if name in self._registry: | ||
| 131 | del self._registry[name] | ||
| 132 | elif cls in self._registry.values(): | ||
| 133 | del self._registry[cls.name] | ||
| 134 | else: | ||
| 135 | logger.warning('Cannot unregister because no class or name given') | ||
| 136 | |||
| 137 | def get_backend_name(self, location): | ||
| 138 | """ | ||
| 139 | Return the name of the version control backend if found at given | ||
| 140 | location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') | ||
| 141 | """ | ||
| 142 | for vc_type in self._registry.values(): | ||
| 143 | if vc_type.controls_location(location): | ||
| 144 | logger.debug('Determine that %s uses VCS: %s', | ||
| 145 | location, vc_type.name) | ||
| 146 | return vc_type.name | ||
| 147 | return None | ||
| 148 | |||
| 149 | def get_backend(self, name): | ||
| 150 | name = name.lower() | ||
| 151 | if name in self._registry: | ||
| 152 | return self._registry[name] | ||
| 153 | |||
| 154 | def get_backend_from_location(self, location): | ||
| 155 | vc_type = self.get_backend_name(location) | ||
| 156 | if vc_type: | ||
| 157 | return self.get_backend(vc_type) | ||
| 158 | return None | ||
| 159 | |||
| 160 | |||
| 161 | vcs = VcsSupport() | ||
| 162 | |||
| 163 | |||
| 164 | class VersionControl(object): | ||
| 165 | name = '' | ||
| 166 | dirname = '' | ||
| 167 | # List of supported schemes for this Version Control | ||
| 168 | schemes = () # type: Tuple[str, ...] | ||
| 169 | # Iterable of environment variable names to pass to call_subprocess(). | ||
| 170 | unset_environ = () # type: Tuple[str, ...] | ||
| 171 | default_arg_rev = None # type: Optional[str] | ||
| 172 | |||
| 173 | def __init__(self, url=None, *args, **kwargs): | ||
| 174 | self.url = url | ||
| 175 | super(VersionControl, self).__init__(*args, **kwargs) | ||
| 176 | |||
| 177 | def get_base_rev_args(self, rev): | ||
| 178 | """ | ||
| 179 | Return the base revision arguments for a vcs command. | ||
| 180 | |||
| 181 | Args: | ||
| 182 | rev: the name of a revision to install. Cannot be None. | ||
| 183 | """ | ||
| 184 | raise NotImplementedError | ||
| 185 | |||
| 186 | def make_rev_options(self, rev=None, extra_args=None): | ||
| 187 | """ | ||
| 188 | Return a RevOptions object. | ||
| 189 | |||
| 190 | Args: | ||
| 191 | rev: the name of a revision to install. | ||
| 192 | extra_args: a list of extra options. | ||
| 193 | """ | ||
| 194 | return RevOptions(self, rev, extra_args=extra_args) | ||
| 195 | |||
| 196 | def _is_local_repository(self, repo): | ||
| 197 | """ | ||
| 198 | posix absolute paths start with os.path.sep, | ||
| 199 | win32 ones start with drive (like c:\\folder) | ||
| 200 | """ | ||
| 201 | drive, tail = os.path.splitdrive(repo) | ||
| 202 | return repo.startswith(os.path.sep) or drive | ||
| 203 | |||
| 204 | # See issue #1083 for why this method was introduced: | ||
| 205 | # https://github.com/pypa/pip/issues/1083 | ||
| 206 | def translate_egg_surname(self, surname): | ||
| 207 | # For example, Django has branches of the form "stable/1.7.x". | ||
| 208 | return surname.replace('/', '_') | ||
| 209 | |||
| 210 | def export(self, location): | ||
| 211 | """ | ||
| 212 | Export the repository at the url to the destination location | ||
| 213 | i.e. only download the files, without vcs informations | ||
| 214 | """ | ||
| 215 | raise NotImplementedError | ||
| 216 | |||
| 217 | def get_url_rev(self): | ||
| 218 | """ | ||
| 219 | Returns the correct repository URL and revision by parsing the given | ||
| 220 | repository URL | ||
| 221 | """ | ||
| 222 | error_message = ( | ||
| 223 | "Sorry, '%s' is a malformed VCS url. " | ||
| 224 | "The format is <vcs>+<protocol>://<url>, " | ||
| 225 | "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" | ||
| 226 | ) | ||
| 227 | assert '+' in self.url, error_message % self.url | ||
| 228 | url = self.url.split('+', 1)[1] | ||
| 229 | scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) | ||
| 230 | rev = None | ||
| 231 | if '@' in path: | ||
| 232 | path, rev = path.rsplit('@', 1) | ||
| 233 | url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) | ||
| 234 | return url, rev | ||
| 235 | |||
| 236 | def get_info(self, location): | ||
| 237 | """ | ||
| 238 | Returns (url, revision), where both are strings | ||
| 239 | """ | ||
| 240 | assert not location.rstrip('/').endswith(self.dirname), \ | ||
| 241 | 'Bad directory: %s' % location | ||
| 242 | return self.get_url(location), self.get_revision(location) | ||
| 243 | |||
| 244 | def normalize_url(self, url): | ||
| 245 | """ | ||
| 246 | Normalize a URL for comparison by unquoting it and removing any | ||
| 247 | trailing slash. | ||
| 248 | """ | ||
| 249 | return urllib_parse.unquote(url).rstrip('/') | ||
| 250 | |||
| 251 | def compare_urls(self, url1, url2): | ||
| 252 | """ | ||
| 253 | Compare two repo URLs for identity, ignoring incidental differences. | ||
| 254 | """ | ||
| 255 | return (self.normalize_url(url1) == self.normalize_url(url2)) | ||
| 256 | |||
| 257 | def obtain(self, dest): | ||
| 258 | """ | ||
| 259 | Called when installing or updating an editable package, takes the | ||
| 260 | source path of the checkout. | ||
| 261 | """ | ||
| 262 | raise NotImplementedError | ||
| 263 | |||
| 264 | def switch(self, dest, url, rev_options): | ||
| 265 | """ | ||
| 266 | Switch the repo at ``dest`` to point to ``URL``. | ||
| 267 | |||
| 268 | Args: | ||
| 269 | rev_options: a RevOptions object. | ||
| 270 | """ | ||
| 271 | raise NotImplementedError | ||
| 272 | |||
| 273 | def update(self, dest, rev_options): | ||
| 274 | """ | ||
| 275 | Update an already-existing repo to the given ``rev_options``. | ||
| 276 | |||
| 277 | Args: | ||
| 278 | rev_options: a RevOptions object. | ||
| 279 | """ | ||
| 280 | raise NotImplementedError | ||
| 281 | |||
| 282 | def is_commit_id_equal(self, dest, name): | ||
| 283 | """ | ||
| 284 | Return whether the id of the current commit equals the given name. | ||
| 285 | |||
| 286 | Args: | ||
| 287 | dest: the repository directory. | ||
| 288 | name: a string name. | ||
| 289 | """ | ||
| 290 | raise NotImplementedError | ||
| 291 | |||
| 292 | def check_destination(self, dest, url, rev_options): | ||
| 293 | """ | ||
| 294 | Prepare a location to receive a checkout/clone. | ||
| 295 | |||
| 296 | Return True if the location is ready for (and requires) a | ||
| 297 | checkout/clone, False otherwise. | ||
| 298 | |||
| 299 | Args: | ||
| 300 | rev_options: a RevOptions object. | ||
| 301 | """ | ||
| 302 | checkout = True | ||
| 303 | prompt = False | ||
| 304 | rev_display = rev_options.to_display() | ||
| 305 | if os.path.exists(dest): | ||
| 306 | checkout = False | ||
| 307 | if os.path.exists(os.path.join(dest, self.dirname)): | ||
| 308 | existing_url = self.get_url(dest) | ||
| 309 | if self.compare_urls(existing_url, url): | ||
| 310 | logger.debug( | ||
| 311 | '%s in %s exists, and has correct URL (%s)', | ||
| 312 | self.repo_name.title(), | ||
| 313 | display_path(dest), | ||
| 314 | url, | ||
| 315 | ) | ||
| 316 | if not self.is_commit_id_equal(dest, rev_options.rev): | ||
| 317 | logger.info( | ||
| 318 | 'Updating %s %s%s', | ||
| 319 | display_path(dest), | ||
| 320 | self.repo_name, | ||
| 321 | rev_display, | ||
| 322 | ) | ||
| 323 | self.update(dest, rev_options) | ||
| 324 | else: | ||
| 325 | logger.info( | ||
| 326 | 'Skipping because already up-to-date.') | ||
| 327 | else: | ||
| 328 | logger.warning( | ||
| 329 | '%s %s in %s exists with URL %s', | ||
| 330 | self.name, | ||
| 331 | self.repo_name, | ||
| 332 | display_path(dest), | ||
| 333 | existing_url, | ||
| 334 | ) | ||
| 335 | prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', | ||
| 336 | ('s', 'i', 'w', 'b')) | ||
| 337 | else: | ||
| 338 | logger.warning( | ||
| 339 | 'Directory %s already exists, and is not a %s %s.', | ||
| 340 | dest, | ||
| 341 | self.name, | ||
| 342 | self.repo_name, | ||
| 343 | ) | ||
| 344 | prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) | ||
| 345 | if prompt: | ||
| 346 | logger.warning( | ||
| 347 | 'The plan is to install the %s repository %s', | ||
| 348 | self.name, | ||
| 349 | url, | ||
| 350 | ) | ||
| 351 | response = ask_path_exists('What to do? %s' % prompt[0], | ||
| 352 | prompt[1]) | ||
| 353 | |||
| 354 | if response == 's': | ||
| 355 | logger.info( | ||
| 356 | 'Switching %s %s to %s%s', | ||
| 357 | self.repo_name, | ||
| 358 | display_path(dest), | ||
| 359 | url, | ||
| 360 | rev_display, | ||
| 361 | ) | ||
| 362 | self.switch(dest, url, rev_options) | ||
| 363 | elif response == 'i': | ||
| 364 | # do nothing | ||
| 365 | pass | ||
| 366 | elif response == 'w': | ||
| 367 | logger.warning('Deleting %s', display_path(dest)) | ||
| 368 | rmtree(dest) | ||
| 369 | checkout = True | ||
| 370 | elif response == 'b': | ||
| 371 | dest_dir = backup_dir(dest) | ||
| 372 | logger.warning( | ||
| 373 | 'Backing up %s to %s', display_path(dest), dest_dir, | ||
| 374 | ) | ||
| 375 | shutil.move(dest, dest_dir) | ||
| 376 | checkout = True | ||
| 377 | elif response == 'a': | ||
| 378 | sys.exit(-1) | ||
| 379 | return checkout | ||
| 380 | |||
| 381 | def unpack(self, location): | ||
| 382 | """ | ||
| 383 | Clean up current location and download the url repository | ||
| 384 | (and vcs infos) into location | ||
| 385 | """ | ||
| 386 | if os.path.exists(location): | ||
| 387 | rmtree(location) | ||
| 388 | self.obtain(location) | ||
| 389 | |||
| 390 | def get_src_requirement(self, dist, location): | ||
| 391 | """ | ||
| 392 | Return a string representing the requirement needed to | ||
| 393 | redownload the files currently present in location, something | ||
| 394 | like: | ||
| 395 | {repository_url}@{revision}#egg={project_name}-{version_identifier} | ||
| 396 | """ | ||
| 397 | raise NotImplementedError | ||
| 398 | |||
| 399 | def get_url(self, location): | ||
| 400 | """ | ||
| 401 | Return the url used at location | ||
| 402 | Used in get_info or check_destination | ||
| 403 | """ | ||
| 404 | raise NotImplementedError | ||
| 405 | |||
| 406 | def get_revision(self, location): | ||
| 407 | """ | ||
| 408 | Return the current commit id of the files at the given location. | ||
| 409 | """ | ||
| 410 | raise NotImplementedError | ||
| 411 | |||
| 412 | def run_command(self, cmd, show_stdout=True, cwd=None, | ||
| 413 | on_returncode='raise', | ||
| 414 | command_desc=None, | ||
| 415 | extra_environ=None, spinner=None): | ||
| 416 | """ | ||
| 417 | Run a VCS subcommand | ||
| 418 | This is simply a wrapper around call_subprocess that adds the VCS | ||
| 419 | command name, and checks that the VCS is available | ||
| 420 | """ | ||
| 421 | cmd = [self.name] + cmd | ||
| 422 | try: | ||
| 423 | return call_subprocess(cmd, show_stdout, cwd, | ||
| 424 | on_returncode, | ||
| 425 | command_desc, extra_environ, | ||
| 426 | unset_environ=self.unset_environ, | ||
| 427 | spinner=spinner) | ||
| 428 | except OSError as e: | ||
| 429 | # errno.ENOENT = no such file or directory | ||
| 430 | # In other words, the VCS executable isn't available | ||
| 431 | if e.errno == errno.ENOENT: | ||
| 432 | raise BadCommand( | ||
| 433 | 'Cannot find command %r - do you have ' | ||
| 434 | '%r installed and in your ' | ||
| 435 | 'PATH?' % (self.name, self.name)) | ||
| 436 | else: | ||
| 437 | raise # re-raise exception if a different error occurred | ||
| 438 | |||
| 439 | @classmethod | ||
| 440 | def controls_location(cls, location): | ||
| 441 | """ | ||
| 442 | Check if a location is controlled by the vcs. | ||
| 443 | It is meant to be overridden to implement smarter detection | ||
| 444 | mechanisms for specific vcs. | ||
| 445 | """ | ||
| 446 | logger.debug('Checking in %s for %s (%s)...', | ||
| 447 | location, cls.dirname, cls.name) | ||
| 448 | path = os.path.join(location, cls.dirname) | ||
| 449 | return os.path.exists(path) | ||
| 450 | |||
| 451 | |||
| 452 | def get_src_requirement(dist, location): | ||
| 453 | version_control = vcs.get_backend_from_location(location) | ||
| 454 | if version_control: | ||
| 455 | try: | ||
| 456 | return version_control().get_src_requirement(dist, | ||
| 457 | location) | ||
| 458 | except BadCommand: | ||
| 459 | logger.warning( | ||
| 460 | 'cannot determine version of editable source in %s ' | ||
| 461 | '(%s command not found in path)', | ||
| 462 | location, | ||
| 463 | version_control.name, | ||
| 464 | ) | ||
| 465 | return dist.as_requirement() | ||
| 466 | logger.warning( | ||
| 467 | 'cannot determine version of editable source in %s (is not SVN ' | ||
| 468 | 'checkout, Git clone, Mercurial clone or Bazaar branch)', | ||
| 469 | location, | ||
| 470 | ) | ||
| 471 | return dist.as_requirement() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000..6ed629a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py | |||
| @@ -0,0 +1,113 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | |||
| 6 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 7 | |||
| 8 | from pip._internal.download import path_to_url | ||
| 9 | from pip._internal.utils.misc import display_path, rmtree | ||
| 10 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 11 | from pip._internal.vcs import VersionControl, vcs | ||
| 12 | |||
| 13 | logger = logging.getLogger(__name__) | ||
| 14 | |||
| 15 | |||
| 16 | class Bazaar(VersionControl): | ||
| 17 | name = 'bzr' | ||
| 18 | dirname = '.bzr' | ||
| 19 | repo_name = 'branch' | ||
| 20 | schemes = ( | ||
| 21 | 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', | ||
| 22 | 'bzr+lp', | ||
| 23 | ) | ||
| 24 | |||
| 25 | def __init__(self, url=None, *args, **kwargs): | ||
| 26 | super(Bazaar, self).__init__(url, *args, **kwargs) | ||
| 27 | # This is only needed for python <2.7.5 | ||
| 28 | # Register lp but do not expose as a scheme to support bzr+lp. | ||
| 29 | if getattr(urllib_parse, 'uses_fragment', None): | ||
| 30 | urllib_parse.uses_fragment.extend(['lp']) | ||
| 31 | |||
| 32 | def get_base_rev_args(self, rev): | ||
| 33 | return ['-r', rev] | ||
| 34 | |||
| 35 | def export(self, location): | ||
| 36 | """ | ||
| 37 | Export the Bazaar repository at the url to the destination location | ||
| 38 | """ | ||
| 39 | # Remove the location to make sure Bazaar can export it correctly | ||
| 40 | if os.path.exists(location): | ||
| 41 | rmtree(location) | ||
| 42 | |||
| 43 | with TempDirectory(kind="export") as temp_dir: | ||
| 44 | self.unpack(temp_dir.path) | ||
| 45 | |||
| 46 | self.run_command( | ||
| 47 | ['export', location], | ||
| 48 | cwd=temp_dir.path, show_stdout=False, | ||
| 49 | ) | ||
| 50 | |||
| 51 | def switch(self, dest, url, rev_options): | ||
| 52 | self.run_command(['switch', url], cwd=dest) | ||
| 53 | |||
| 54 | def update(self, dest, rev_options): | ||
| 55 | cmd_args = ['pull', '-q'] + rev_options.to_args() | ||
| 56 | self.run_command(cmd_args, cwd=dest) | ||
| 57 | |||
| 58 | def obtain(self, dest): | ||
| 59 | url, rev = self.get_url_rev() | ||
| 60 | rev_options = self.make_rev_options(rev) | ||
| 61 | if self.check_destination(dest, url, rev_options): | ||
| 62 | rev_display = rev_options.to_display() | ||
| 63 | logger.info( | ||
| 64 | 'Checking out %s%s to %s', | ||
| 65 | url, | ||
| 66 | rev_display, | ||
| 67 | display_path(dest), | ||
| 68 | ) | ||
| 69 | cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] | ||
| 70 | self.run_command(cmd_args) | ||
| 71 | |||
| 72 | def get_url_rev(self): | ||
| 73 | # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it | ||
| 74 | url, rev = super(Bazaar, self).get_url_rev() | ||
| 75 | if url.startswith('ssh://'): | ||
| 76 | url = 'bzr+' + url | ||
| 77 | return url, rev | ||
| 78 | |||
| 79 | def get_url(self, location): | ||
| 80 | urls = self.run_command(['info'], show_stdout=False, cwd=location) | ||
| 81 | for line in urls.splitlines(): | ||
| 82 | line = line.strip() | ||
| 83 | for x in ('checkout of branch: ', | ||
| 84 | 'parent branch: '): | ||
| 85 | if line.startswith(x): | ||
| 86 | repo = line.split(x)[1] | ||
| 87 | if self._is_local_repository(repo): | ||
| 88 | return path_to_url(repo) | ||
| 89 | return repo | ||
| 90 | return None | ||
| 91 | |||
| 92 | def get_revision(self, location): | ||
| 93 | revision = self.run_command( | ||
| 94 | ['revno'], show_stdout=False, cwd=location, | ||
| 95 | ) | ||
| 96 | return revision.splitlines()[-1] | ||
| 97 | |||
| 98 | def get_src_requirement(self, dist, location): | ||
| 99 | repo = self.get_url(location) | ||
| 100 | if not repo: | ||
| 101 | return None | ||
| 102 | if not repo.lower().startswith('bzr:'): | ||
| 103 | repo = 'bzr+' + repo | ||
| 104 | egg_project_name = dist.egg_name().split('-', 1)[0] | ||
| 105 | current_rev = self.get_revision(location) | ||
| 106 | return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) | ||
| 107 | |||
| 108 | def is_commit_id_equal(self, dest, name): | ||
| 109 | """Always assume the versions don't match""" | ||
| 110 | return False | ||
| 111 | |||
| 112 | |||
| 113 | vcs.register(Bazaar) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py new file mode 100644 index 0000000..7a63dfa --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py | |||
| @@ -0,0 +1,311 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os.path | ||
| 5 | import re | ||
| 6 | |||
| 7 | from pip._vendor.packaging.version import parse as parse_version | ||
| 8 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 9 | from pip._vendor.six.moves.urllib import request as urllib_request | ||
| 10 | |||
| 11 | from pip._internal.compat import samefile | ||
| 12 | from pip._internal.exceptions import BadCommand | ||
| 13 | from pip._internal.utils.misc import display_path | ||
| 14 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 15 | from pip._internal.vcs import VersionControl, vcs | ||
| 16 | |||
| 17 | urlsplit = urllib_parse.urlsplit | ||
| 18 | urlunsplit = urllib_parse.urlunsplit | ||
| 19 | |||
| 20 | |||
| 21 | logger = logging.getLogger(__name__) | ||
| 22 | |||
| 23 | |||
| 24 | HASH_REGEX = re.compile('[a-fA-F0-9]{40}') | ||
| 25 | |||
| 26 | |||
| 27 | def looks_like_hash(sha): | ||
| 28 | return bool(HASH_REGEX.match(sha)) | ||
| 29 | |||
| 30 | |||
| 31 | class Git(VersionControl): | ||
| 32 | name = 'git' | ||
| 33 | dirname = '.git' | ||
| 34 | repo_name = 'clone' | ||
| 35 | schemes = ( | ||
| 36 | 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', | ||
| 37 | ) | ||
| 38 | # Prevent the user's environment variables from interfering with pip: | ||
| 39 | # https://github.com/pypa/pip/issues/1130 | ||
| 40 | unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') | ||
| 41 | default_arg_rev = 'HEAD' | ||
| 42 | |||
| 43 | def __init__(self, url=None, *args, **kwargs): | ||
| 44 | |||
| 45 | # Works around an apparent Git bug | ||
| 46 | # (see http://article.gmane.org/gmane.comp.version-control.git/146500) | ||
| 47 | if url: | ||
| 48 | scheme, netloc, path, query, fragment = urlsplit(url) | ||
| 49 | if scheme.endswith('file'): | ||
| 50 | initial_slashes = path[:-len(path.lstrip('/'))] | ||
| 51 | newpath = ( | ||
| 52 | initial_slashes + | ||
| 53 | urllib_request.url2pathname(path) | ||
| 54 | .replace('\\', '/').lstrip('/') | ||
| 55 | ) | ||
| 56 | url = urlunsplit((scheme, netloc, newpath, query, fragment)) | ||
| 57 | after_plus = scheme.find('+') + 1 | ||
| 58 | url = scheme[:after_plus] + urlunsplit( | ||
| 59 | (scheme[after_plus:], netloc, newpath, query, fragment), | ||
| 60 | ) | ||
| 61 | |||
| 62 | super(Git, self).__init__(url, *args, **kwargs) | ||
| 63 | |||
| 64 | def get_base_rev_args(self, rev): | ||
| 65 | return [rev] | ||
| 66 | |||
| 67 | def get_git_version(self): | ||
| 68 | VERSION_PFX = 'git version ' | ||
| 69 | version = self.run_command(['version'], show_stdout=False) | ||
| 70 | if version.startswith(VERSION_PFX): | ||
| 71 | version = version[len(VERSION_PFX):].split()[0] | ||
| 72 | else: | ||
| 73 | version = '' | ||
| 74 | # get first 3 positions of the git version becasue | ||
| 75 | # on windows it is x.y.z.windows.t, and this parses as | ||
| 76 | # LegacyVersion which always smaller than a Version. | ||
| 77 | version = '.'.join(version.split('.')[:3]) | ||
| 78 | return parse_version(version) | ||
| 79 | |||
| 80 | def export(self, location): | ||
| 81 | """Export the Git repository at the url to the destination location""" | ||
| 82 | if not location.endswith('/'): | ||
| 83 | location = location + '/' | ||
| 84 | |||
| 85 | with TempDirectory(kind="export") as temp_dir: | ||
| 86 | self.unpack(temp_dir.path) | ||
| 87 | self.run_command( | ||
| 88 | ['checkout-index', '-a', '-f', '--prefix', location], | ||
| 89 | show_stdout=False, cwd=temp_dir.path | ||
| 90 | ) | ||
| 91 | |||
| 92 | def get_revision_sha(self, dest, rev): | ||
| 93 | """ | ||
| 94 | Return a commit hash for the given revision if it names a remote | ||
| 95 | branch or tag. Otherwise, return None. | ||
| 96 | |||
| 97 | Args: | ||
| 98 | dest: the repository directory. | ||
| 99 | rev: the revision name. | ||
| 100 | """ | ||
| 101 | # Pass rev to pre-filter the list. | ||
| 102 | output = self.run_command(['show-ref', rev], cwd=dest, | ||
| 103 | show_stdout=False, on_returncode='ignore') | ||
| 104 | refs = {} | ||
| 105 | for line in output.strip().splitlines(): | ||
| 106 | try: | ||
| 107 | sha, ref = line.split() | ||
| 108 | except ValueError: | ||
| 109 | # Include the offending line to simplify troubleshooting if | ||
| 110 | # this error ever occurs. | ||
| 111 | raise ValueError('unexpected show-ref line: {!r}'.format(line)) | ||
| 112 | |||
| 113 | refs[ref] = sha | ||
| 114 | |||
| 115 | branch_ref = 'refs/remotes/origin/{}'.format(rev) | ||
| 116 | tag_ref = 'refs/tags/{}'.format(rev) | ||
| 117 | |||
| 118 | return refs.get(branch_ref) or refs.get(tag_ref) | ||
| 119 | |||
| 120 | def check_rev_options(self, dest, rev_options): | ||
| 121 | """Check the revision options before checkout. | ||
| 122 | |||
| 123 | Returns a new RevOptions object for the SHA1 of the branch or tag | ||
| 124 | if found. | ||
| 125 | |||
| 126 | Args: | ||
| 127 | rev_options: a RevOptions object. | ||
| 128 | """ | ||
| 129 | rev = rev_options.arg_rev | ||
| 130 | sha = self.get_revision_sha(dest, rev) | ||
| 131 | |||
| 132 | if sha is not None: | ||
| 133 | return rev_options.make_new(sha) | ||
| 134 | |||
| 135 | # Do not show a warning for the common case of something that has | ||
| 136 | # the form of a Git commit hash. | ||
| 137 | if not looks_like_hash(rev): | ||
| 138 | logger.warning( | ||
| 139 | "Did not find branch or tag '%s', assuming revision or ref.", | ||
| 140 | rev, | ||
| 141 | ) | ||
| 142 | return rev_options | ||
| 143 | |||
| 144 | def is_commit_id_equal(self, dest, name): | ||
| 145 | """ | ||
| 146 | Return whether the current commit hash equals the given name. | ||
| 147 | |||
| 148 | Args: | ||
| 149 | dest: the repository directory. | ||
| 150 | name: a string name. | ||
| 151 | """ | ||
| 152 | if not name: | ||
| 153 | # Then avoid an unnecessary subprocess call. | ||
| 154 | return False | ||
| 155 | |||
| 156 | return self.get_revision(dest) == name | ||
| 157 | |||
| 158 | def switch(self, dest, url, rev_options): | ||
| 159 | self.run_command(['config', 'remote.origin.url', url], cwd=dest) | ||
| 160 | cmd_args = ['checkout', '-q'] + rev_options.to_args() | ||
| 161 | self.run_command(cmd_args, cwd=dest) | ||
| 162 | |||
| 163 | self.update_submodules(dest) | ||
| 164 | |||
| 165 | def update(self, dest, rev_options): | ||
| 166 | # First fetch changes from the default remote | ||
| 167 | if self.get_git_version() >= parse_version('1.9.0'): | ||
| 168 | # fetch tags in addition to everything else | ||
| 169 | self.run_command(['fetch', '-q', '--tags'], cwd=dest) | ||
| 170 | else: | ||
| 171 | self.run_command(['fetch', '-q'], cwd=dest) | ||
| 172 | # Then reset to wanted revision (maybe even origin/master) | ||
| 173 | rev_options = self.check_rev_options(dest, rev_options) | ||
| 174 | cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() | ||
| 175 | self.run_command(cmd_args, cwd=dest) | ||
| 176 | #: update submodules | ||
| 177 | self.update_submodules(dest) | ||
| 178 | |||
| 179 | def obtain(self, dest): | ||
| 180 | url, rev = self.get_url_rev() | ||
| 181 | rev_options = self.make_rev_options(rev) | ||
| 182 | if self.check_destination(dest, url, rev_options): | ||
| 183 | rev_display = rev_options.to_display() | ||
| 184 | logger.info( | ||
| 185 | 'Cloning %s%s to %s', url, rev_display, display_path(dest), | ||
| 186 | ) | ||
| 187 | self.run_command(['clone', '-q', url, dest]) | ||
| 188 | |||
| 189 | if rev: | ||
| 190 | rev_options = self.check_rev_options(dest, rev_options) | ||
| 191 | # Only do a checkout if the current commit id doesn't match | ||
| 192 | # the requested revision. | ||
| 193 | if not self.is_commit_id_equal(dest, rev_options.rev): | ||
| 194 | rev = rev_options.rev | ||
| 195 | # Only fetch the revision if it's a ref | ||
| 196 | if rev.startswith('refs/'): | ||
| 197 | self.run_command( | ||
| 198 | ['fetch', '-q', url] + rev_options.to_args(), | ||
| 199 | cwd=dest, | ||
| 200 | ) | ||
| 201 | # Change the revision to the SHA of the ref we fetched | ||
| 202 | rev = 'FETCH_HEAD' | ||
| 203 | self.run_command(['checkout', '-q', rev], cwd=dest) | ||
| 204 | |||
| 205 | #: repo may contain submodules | ||
| 206 | self.update_submodules(dest) | ||
| 207 | |||
| 208 | def get_url(self, location): | ||
| 209 | """Return URL of the first remote encountered.""" | ||
| 210 | remotes = self.run_command( | ||
| 211 | ['config', '--get-regexp', r'remote\..*\.url'], | ||
| 212 | show_stdout=False, cwd=location, | ||
| 213 | ) | ||
| 214 | remotes = remotes.splitlines() | ||
| 215 | found_remote = remotes[0] | ||
| 216 | for remote in remotes: | ||
| 217 | if remote.startswith('remote.origin.url '): | ||
| 218 | found_remote = remote | ||
| 219 | break | ||
| 220 | url = found_remote.split(' ')[1] | ||
| 221 | return url.strip() | ||
| 222 | |||
| 223 | def get_revision(self, location): | ||
| 224 | current_rev = self.run_command( | ||
| 225 | ['rev-parse', 'HEAD'], show_stdout=False, cwd=location, | ||
| 226 | ) | ||
| 227 | return current_rev.strip() | ||
| 228 | |||
| 229 | def _get_subdirectory(self, location): | ||
| 230 | """Return the relative path of setup.py to the git repo root.""" | ||
| 231 | # find the repo root | ||
| 232 | git_dir = self.run_command(['rev-parse', '--git-dir'], | ||
| 233 | show_stdout=False, cwd=location).strip() | ||
| 234 | if not os.path.isabs(git_dir): | ||
| 235 | git_dir = os.path.join(location, git_dir) | ||
| 236 | root_dir = os.path.join(git_dir, '..') | ||
| 237 | # find setup.py | ||
| 238 | orig_location = location | ||
| 239 | while not os.path.exists(os.path.join(location, 'setup.py')): | ||
| 240 | last_location = location | ||
| 241 | location = os.path.dirname(location) | ||
| 242 | if location == last_location: | ||
| 243 | # We've traversed up to the root of the filesystem without | ||
| 244 | # finding setup.py | ||
| 245 | logger.warning( | ||
| 246 | "Could not find setup.py for directory %s (tried all " | ||
| 247 | "parent directories)", | ||
| 248 | orig_location, | ||
| 249 | ) | ||
| 250 | return None | ||
| 251 | # relative path of setup.py to repo root | ||
| 252 | if samefile(root_dir, location): | ||
| 253 | return None | ||
| 254 | return os.path.relpath(location, root_dir) | ||
| 255 | |||
| 256 | def get_src_requirement(self, dist, location): | ||
| 257 | repo = self.get_url(location) | ||
| 258 | if not repo.lower().startswith('git:'): | ||
| 259 | repo = 'git+' + repo | ||
| 260 | egg_project_name = dist.egg_name().split('-', 1)[0] | ||
| 261 | if not repo: | ||
| 262 | return None | ||
| 263 | current_rev = self.get_revision(location) | ||
| 264 | req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) | ||
| 265 | subdirectory = self._get_subdirectory(location) | ||
| 266 | if subdirectory: | ||
| 267 | req += '&subdirectory=' + subdirectory | ||
| 268 | return req | ||
| 269 | |||
| 270 | def get_url_rev(self): | ||
| 271 | """ | ||
| 272 | Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. | ||
| 273 | That's required because although they use SSH they sometimes doesn't | ||
| 274 | work with a ssh:// scheme (e.g. Github). But we need a scheme for | ||
| 275 | parsing. Hence we remove it again afterwards and return it as a stub. | ||
| 276 | """ | ||
| 277 | if '://' not in self.url: | ||
| 278 | assert 'file:' not in self.url | ||
| 279 | self.url = self.url.replace('git+', 'git+ssh://') | ||
| 280 | url, rev = super(Git, self).get_url_rev() | ||
| 281 | url = url.replace('ssh://', '') | ||
| 282 | else: | ||
| 283 | url, rev = super(Git, self).get_url_rev() | ||
| 284 | |||
| 285 | return url, rev | ||
| 286 | |||
| 287 | def update_submodules(self, location): | ||
| 288 | if not os.path.exists(os.path.join(location, '.gitmodules')): | ||
| 289 | return | ||
| 290 | self.run_command( | ||
| 291 | ['submodule', 'update', '--init', '--recursive', '-q'], | ||
| 292 | cwd=location, | ||
| 293 | ) | ||
| 294 | |||
| 295 | @classmethod | ||
| 296 | def controls_location(cls, location): | ||
| 297 | if super(Git, cls).controls_location(location): | ||
| 298 | return True | ||
| 299 | try: | ||
| 300 | r = cls().run_command(['rev-parse'], | ||
| 301 | cwd=location, | ||
| 302 | show_stdout=False, | ||
| 303 | on_returncode='ignore') | ||
| 304 | return not r | ||
| 305 | except BadCommand: | ||
| 306 | logger.debug("could not determine if %s is under git control " | ||
| 307 | "because git is not available", location) | ||
| 308 | return False | ||
| 309 | |||
| 310 | |||
| 311 | vcs.register(Git) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000..3936473 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py | |||
| @@ -0,0 +1,105 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | |||
| 6 | from pip._vendor.six.moves import configparser | ||
| 7 | |||
| 8 | from pip._internal.download import path_to_url | ||
| 9 | from pip._internal.utils.misc import display_path | ||
| 10 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 11 | from pip._internal.vcs import VersionControl, vcs | ||
| 12 | |||
| 13 | logger = logging.getLogger(__name__) | ||
| 14 | |||
| 15 | |||
| 16 | class Mercurial(VersionControl): | ||
| 17 | name = 'hg' | ||
| 18 | dirname = '.hg' | ||
| 19 | repo_name = 'clone' | ||
| 20 | schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') | ||
| 21 | |||
| 22 | def get_base_rev_args(self, rev): | ||
| 23 | return [rev] | ||
| 24 | |||
| 25 | def export(self, location): | ||
| 26 | """Export the Hg repository at the url to the destination location""" | ||
| 27 | with TempDirectory(kind="export") as temp_dir: | ||
| 28 | self.unpack(temp_dir.path) | ||
| 29 | |||
| 30 | self.run_command( | ||
| 31 | ['archive', location], show_stdout=False, cwd=temp_dir.path | ||
| 32 | ) | ||
| 33 | |||
| 34 | def switch(self, dest, url, rev_options): | ||
| 35 | repo_config = os.path.join(dest, self.dirname, 'hgrc') | ||
| 36 | config = configparser.SafeConfigParser() | ||
| 37 | try: | ||
| 38 | config.read(repo_config) | ||
| 39 | config.set('paths', 'default', url) | ||
| 40 | with open(repo_config, 'w') as config_file: | ||
| 41 | config.write(config_file) | ||
| 42 | except (OSError, configparser.NoSectionError) as exc: | ||
| 43 | logger.warning( | ||
| 44 | 'Could not switch Mercurial repository to %s: %s', url, exc, | ||
| 45 | ) | ||
| 46 | else: | ||
| 47 | cmd_args = ['update', '-q'] + rev_options.to_args() | ||
| 48 | self.run_command(cmd_args, cwd=dest) | ||
| 49 | |||
| 50 | def update(self, dest, rev_options): | ||
| 51 | self.run_command(['pull', '-q'], cwd=dest) | ||
| 52 | cmd_args = ['update', '-q'] + rev_options.to_args() | ||
| 53 | self.run_command(cmd_args, cwd=dest) | ||
| 54 | |||
| 55 | def obtain(self, dest): | ||
| 56 | url, rev = self.get_url_rev() | ||
| 57 | rev_options = self.make_rev_options(rev) | ||
| 58 | if self.check_destination(dest, url, rev_options): | ||
| 59 | rev_display = rev_options.to_display() | ||
| 60 | logger.info( | ||
| 61 | 'Cloning hg %s%s to %s', | ||
| 62 | url, | ||
| 63 | rev_display, | ||
| 64 | display_path(dest), | ||
| 65 | ) | ||
| 66 | self.run_command(['clone', '--noupdate', '-q', url, dest]) | ||
| 67 | cmd_args = ['update', '-q'] + rev_options.to_args() | ||
| 68 | self.run_command(cmd_args, cwd=dest) | ||
| 69 | |||
| 70 | def get_url(self, location): | ||
| 71 | url = self.run_command( | ||
| 72 | ['showconfig', 'paths.default'], | ||
| 73 | show_stdout=False, cwd=location).strip() | ||
| 74 | if self._is_local_repository(url): | ||
| 75 | url = path_to_url(url) | ||
| 76 | return url.strip() | ||
| 77 | |||
| 78 | def get_revision(self, location): | ||
| 79 | current_revision = self.run_command( | ||
| 80 | ['parents', '--template={rev}'], | ||
| 81 | show_stdout=False, cwd=location).strip() | ||
| 82 | return current_revision | ||
| 83 | |||
| 84 | def get_revision_hash(self, location): | ||
| 85 | current_rev_hash = self.run_command( | ||
| 86 | ['parents', '--template={node}'], | ||
| 87 | show_stdout=False, cwd=location).strip() | ||
| 88 | return current_rev_hash | ||
| 89 | |||
| 90 | def get_src_requirement(self, dist, location): | ||
| 91 | repo = self.get_url(location) | ||
| 92 | if not repo.lower().startswith('hg:'): | ||
| 93 | repo = 'hg+' + repo | ||
| 94 | egg_project_name = dist.egg_name().split('-', 1)[0] | ||
| 95 | if not repo: | ||
| 96 | return None | ||
| 97 | current_rev_hash = self.get_revision_hash(location) | ||
| 98 | return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name) | ||
| 99 | |||
| 100 | def is_commit_id_equal(self, dest, name): | ||
| 101 | """Always assume the versions don't match""" | ||
| 102 | return False | ||
| 103 | |||
| 104 | |||
| 105 | vcs.register(Mercurial) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000..95e5440 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py | |||
| @@ -0,0 +1,271 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | import re | ||
| 6 | |||
| 7 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 8 | |||
| 9 | from pip._internal.index import Link | ||
| 10 | from pip._internal.utils.logging import indent_log | ||
| 11 | from pip._internal.utils.misc import display_path, rmtree | ||
| 12 | from pip._internal.vcs import VersionControl, vcs | ||
| 13 | |||
| 14 | _svn_xml_url_re = re.compile('url="([^"]+)"') | ||
| 15 | _svn_rev_re = re.compile(r'committed-rev="(\d+)"') | ||
| 16 | _svn_url_re = re.compile(r'URL: (.+)') | ||
| 17 | _svn_revision_re = re.compile(r'Revision: (.+)') | ||
| 18 | _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') | ||
| 19 | _svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') | ||
| 20 | |||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | class Subversion(VersionControl): | ||
| 26 | name = 'svn' | ||
| 27 | dirname = '.svn' | ||
| 28 | repo_name = 'checkout' | ||
| 29 | schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') | ||
| 30 | |||
| 31 | def get_base_rev_args(self, rev): | ||
| 32 | return ['-r', rev] | ||
| 33 | |||
| 34 | def get_info(self, location): | ||
| 35 | """Returns (url, revision), where both are strings""" | ||
| 36 | assert not location.rstrip('/').endswith(self.dirname), \ | ||
| 37 | 'Bad directory: %s' % location | ||
| 38 | output = self.run_command( | ||
| 39 | ['info', location], | ||
| 40 | show_stdout=False, | ||
| 41 | extra_environ={'LANG': 'C'}, | ||
| 42 | ) | ||
| 43 | match = _svn_url_re.search(output) | ||
| 44 | if not match: | ||
| 45 | logger.warning( | ||
| 46 | 'Cannot determine URL of svn checkout %s', | ||
| 47 | display_path(location), | ||
| 48 | ) | ||
| 49 | logger.debug('Output that cannot be parsed: \n%s', output) | ||
| 50 | return None, None | ||
| 51 | url = match.group(1).strip() | ||
| 52 | match = _svn_revision_re.search(output) | ||
| 53 | if not match: | ||
| 54 | logger.warning( | ||
| 55 | 'Cannot determine revision of svn checkout %s', | ||
| 56 | display_path(location), | ||
| 57 | ) | ||
| 58 | logger.debug('Output that cannot be parsed: \n%s', output) | ||
| 59 | return url, None | ||
| 60 | return url, match.group(1) | ||
| 61 | |||
| 62 | def export(self, location): | ||
| 63 | """Export the svn repository at the url to the destination location""" | ||
| 64 | url, rev = self.get_url_rev() | ||
| 65 | rev_options = get_rev_options(self, url, rev) | ||
| 66 | url = self.remove_auth_from_url(url) | ||
| 67 | logger.info('Exporting svn repository %s to %s', url, location) | ||
| 68 | with indent_log(): | ||
| 69 | if os.path.exists(location): | ||
| 70 | # Subversion doesn't like to check out over an existing | ||
| 71 | # directory --force fixes this, but was only added in svn 1.5 | ||
| 72 | rmtree(location) | ||
| 73 | cmd_args = ['export'] + rev_options.to_args() + [url, location] | ||
| 74 | self.run_command(cmd_args, show_stdout=False) | ||
| 75 | |||
| 76 | def switch(self, dest, url, rev_options): | ||
| 77 | cmd_args = ['switch'] + rev_options.to_args() + [url, dest] | ||
| 78 | self.run_command(cmd_args) | ||
| 79 | |||
| 80 | def update(self, dest, rev_options): | ||
| 81 | cmd_args = ['update'] + rev_options.to_args() + [dest] | ||
| 82 | self.run_command(cmd_args) | ||
| 83 | |||
| 84 | def obtain(self, dest): | ||
| 85 | url, rev = self.get_url_rev() | ||
| 86 | rev_options = get_rev_options(self, url, rev) | ||
| 87 | url = self.remove_auth_from_url(url) | ||
| 88 | if self.check_destination(dest, url, rev_options): | ||
| 89 | rev_display = rev_options.to_display() | ||
| 90 | logger.info( | ||
| 91 | 'Checking out %s%s to %s', | ||
| 92 | url, | ||
| 93 | rev_display, | ||
| 94 | display_path(dest), | ||
| 95 | ) | ||
| 96 | cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] | ||
| 97 | self.run_command(cmd_args) | ||
| 98 | |||
| 99 | def get_location(self, dist, dependency_links): | ||
| 100 | for url in dependency_links: | ||
| 101 | egg_fragment = Link(url).egg_fragment | ||
| 102 | if not egg_fragment: | ||
| 103 | continue | ||
| 104 | if '-' in egg_fragment: | ||
| 105 | # FIXME: will this work when a package has - in the name? | ||
| 106 | key = '-'.join(egg_fragment.split('-')[:-1]).lower() | ||
| 107 | else: | ||
| 108 | key = egg_fragment | ||
| 109 | if key == dist.key: | ||
| 110 | return url.split('#', 1)[0] | ||
| 111 | return None | ||
| 112 | |||
| 113 | def get_revision(self, location): | ||
| 114 | """ | ||
| 115 | Return the maximum revision for all files under a given location | ||
| 116 | """ | ||
| 117 | # Note: taken from setuptools.command.egg_info | ||
| 118 | revision = 0 | ||
| 119 | |||
| 120 | for base, dirs, files in os.walk(location): | ||
| 121 | if self.dirname not in dirs: | ||
| 122 | dirs[:] = [] | ||
| 123 | continue # no sense walking uncontrolled subdirs | ||
| 124 | dirs.remove(self.dirname) | ||
| 125 | entries_fn = os.path.join(base, self.dirname, 'entries') | ||
| 126 | if not os.path.exists(entries_fn): | ||
| 127 | # FIXME: should we warn? | ||
| 128 | continue | ||
| 129 | |||
| 130 | dirurl, localrev = self._get_svn_url_rev(base) | ||
| 131 | |||
| 132 | if base == location: | ||
| 133 | base = dirurl + '/' # save the root url | ||
| 134 | elif not dirurl or not dirurl.startswith(base): | ||
| 135 | dirs[:] = [] | ||
| 136 | continue # not part of the same svn tree, skip it | ||
| 137 | revision = max(revision, localrev) | ||
| 138 | return revision | ||
| 139 | |||
| 140 | def get_url_rev(self): | ||
| 141 | # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it | ||
| 142 | url, rev = super(Subversion, self).get_url_rev() | ||
| 143 | if url.startswith('ssh://'): | ||
| 144 | url = 'svn+' + url | ||
| 145 | return url, rev | ||
| 146 | |||
| 147 | def get_url(self, location): | ||
| 148 | # In cases where the source is in a subdirectory, not alongside | ||
| 149 | # setup.py we have to look up in the location until we find a real | ||
| 150 | # setup.py | ||
| 151 | orig_location = location | ||
| 152 | while not os.path.exists(os.path.join(location, 'setup.py')): | ||
| 153 | last_location = location | ||
| 154 | location = os.path.dirname(location) | ||
| 155 | if location == last_location: | ||
| 156 | # We've traversed up to the root of the filesystem without | ||
| 157 | # finding setup.py | ||
| 158 | logger.warning( | ||
| 159 | "Could not find setup.py for directory %s (tried all " | ||
| 160 | "parent directories)", | ||
| 161 | orig_location, | ||
| 162 | ) | ||
| 163 | return None | ||
| 164 | |||
| 165 | return self._get_svn_url_rev(location)[0] | ||
| 166 | |||
| 167 | def _get_svn_url_rev(self, location): | ||
| 168 | from pip._internal.exceptions import InstallationError | ||
| 169 | |||
| 170 | entries_path = os.path.join(location, self.dirname, 'entries') | ||
| 171 | if os.path.exists(entries_path): | ||
| 172 | with open(entries_path) as f: | ||
| 173 | data = f.read() | ||
| 174 | else: # subversion >= 1.7 does not have the 'entries' file | ||
| 175 | data = '' | ||
| 176 | |||
| 177 | if (data.startswith('8') or | ||
| 178 | data.startswith('9') or | ||
| 179 | data.startswith('10')): | ||
| 180 | data = list(map(str.splitlines, data.split('\n\x0c\n'))) | ||
| 181 | del data[0][0] # get rid of the '8' | ||
| 182 | url = data[0][3] | ||
| 183 | revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] | ||
| 184 | elif data.startswith('<?xml'): | ||
| 185 | match = _svn_xml_url_re.search(data) | ||
| 186 | if not match: | ||
| 187 | raise ValueError('Badly formatted data: %r' % data) | ||
| 188 | url = match.group(1) # get repository URL | ||
| 189 | revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] | ||
| 190 | else: | ||
| 191 | try: | ||
| 192 | # subversion >= 1.7 | ||
| 193 | xml = self.run_command( | ||
| 194 | ['info', '--xml', location], | ||
| 195 | show_stdout=False, | ||
| 196 | ) | ||
| 197 | url = _svn_info_xml_url_re.search(xml).group(1) | ||
| 198 | revs = [ | ||
| 199 | int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) | ||
| 200 | ] | ||
| 201 | except InstallationError: | ||
| 202 | url, revs = None, [] | ||
| 203 | |||
| 204 | if revs: | ||
| 205 | rev = max(revs) | ||
| 206 | else: | ||
| 207 | rev = 0 | ||
| 208 | |||
| 209 | return url, rev | ||
| 210 | |||
| 211 | def get_src_requirement(self, dist, location): | ||
| 212 | repo = self.get_url(location) | ||
| 213 | if repo is None: | ||
| 214 | return None | ||
| 215 | # FIXME: why not project name? | ||
| 216 | egg_project_name = dist.egg_name().split('-', 1)[0] | ||
| 217 | rev = self.get_revision(location) | ||
| 218 | return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) | ||
| 219 | |||
| 220 | def is_commit_id_equal(self, dest, name): | ||
| 221 | """Always assume the versions don't match""" | ||
| 222 | return False | ||
| 223 | |||
| 224 | @staticmethod | ||
| 225 | def remove_auth_from_url(url): | ||
| 226 | # Return a copy of url with 'username:password@' removed. | ||
| 227 | # username/pass params are passed to subversion through flags | ||
| 228 | # and are not recognized in the url. | ||
| 229 | |||
| 230 | # parsed url | ||
| 231 | purl = urllib_parse.urlsplit(url) | ||
| 232 | stripped_netloc = \ | ||
| 233 | purl.netloc.split('@')[-1] | ||
| 234 | |||
| 235 | # stripped url | ||
| 236 | url_pieces = ( | ||
| 237 | purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment | ||
| 238 | ) | ||
| 239 | surl = urllib_parse.urlunsplit(url_pieces) | ||
| 240 | return surl | ||
| 241 | |||
| 242 | |||
| 243 | def get_rev_options(vcs, url, rev): | ||
| 244 | """ | ||
| 245 | Return a RevOptions object. | ||
| 246 | """ | ||
| 247 | r = urllib_parse.urlsplit(url) | ||
| 248 | if hasattr(r, 'username'): | ||
| 249 | # >= Python-2.5 | ||
| 250 | username, password = r.username, r.password | ||
| 251 | else: | ||
| 252 | netloc = r[1] | ||
| 253 | if '@' in netloc: | ||
| 254 | auth = netloc.split('@')[0] | ||
| 255 | if ':' in auth: | ||
| 256 | username, password = auth.split(':', 1) | ||
| 257 | else: | ||
| 258 | username, password = auth, None | ||
| 259 | else: | ||
| 260 | username, password = None, None | ||
| 261 | |||
| 262 | extra_args = [] | ||
| 263 | if username: | ||
| 264 | extra_args += ['--username', username] | ||
| 265 | if password: | ||
| 266 | extra_args += ['--password', password] | ||
| 267 | |||
| 268 | return vcs.make_rev_options(rev, extra_args=extra_args) | ||
| 269 | |||
| 270 | |||
| 271 | vcs.register(Subversion) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py new file mode 100644 index 0000000..36459dd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py | |||
| @@ -0,0 +1,817 @@ | |||
| 1 | """ | ||
| 2 | Support for installing and building the "wheel" binary package format. | ||
| 3 | """ | ||
| 4 | from __future__ import absolute_import | ||
| 5 | |||
| 6 | import collections | ||
| 7 | import compileall | ||
| 8 | import copy | ||
| 9 | import csv | ||
| 10 | import hashlib | ||
| 11 | import logging | ||
| 12 | import os.path | ||
| 13 | import re | ||
| 14 | import shutil | ||
| 15 | import stat | ||
| 16 | import sys | ||
| 17 | import warnings | ||
| 18 | from base64 import urlsafe_b64encode | ||
| 19 | from email.parser import Parser | ||
| 20 | |||
| 21 | from pip._vendor import pkg_resources | ||
| 22 | from pip._vendor.distlib.scripts import ScriptMaker | ||
| 23 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 24 | from pip._vendor.six import StringIO | ||
| 25 | |||
| 26 | from pip._internal import pep425tags | ||
| 27 | from pip._internal.build_env import BuildEnvironment | ||
| 28 | from pip._internal.download import path_to_url, unpack_url | ||
| 29 | from pip._internal.exceptions import ( | ||
| 30 | InstallationError, InvalidWheelFilename, UnsupportedWheel, | ||
| 31 | ) | ||
| 32 | from pip._internal.locations import ( | ||
| 33 | PIP_DELETE_MARKER_FILENAME, distutils_scheme, | ||
| 34 | ) | ||
| 35 | from pip._internal.utils.logging import indent_log | ||
| 36 | from pip._internal.utils.misc import ( | ||
| 37 | call_subprocess, captured_stdout, ensure_dir, read_chunks, | ||
| 38 | ) | ||
| 39 | from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | ||
| 40 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 41 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 42 | from pip._internal.utils.ui import open_spinner | ||
| 43 | |||
| 44 | if MYPY_CHECK_RUNNING: | ||
| 45 | from typing import Dict, List, Optional | ||
| 46 | |||
| 47 | wheel_ext = '.whl' | ||
| 48 | |||
| 49 | VERSION_COMPATIBLE = (1, 0) | ||
| 50 | |||
| 51 | |||
| 52 | logger = logging.getLogger(__name__) | ||
| 53 | |||
| 54 | |||
| 55 | def rehash(path, algo='sha256', blocksize=1 << 20): | ||
| 56 | """Return (hash, length) for path using hashlib.new(algo)""" | ||
| 57 | h = hashlib.new(algo) | ||
| 58 | length = 0 | ||
| 59 | with open(path, 'rb') as f: | ||
| 60 | for block in read_chunks(f, size=blocksize): | ||
| 61 | length += len(block) | ||
| 62 | h.update(block) | ||
| 63 | digest = 'sha256=' + urlsafe_b64encode( | ||
| 64 | h.digest() | ||
| 65 | ).decode('latin1').rstrip('=') | ||
| 66 | return (digest, length) | ||
| 67 | |||
| 68 | |||
| 69 | def open_for_csv(name, mode): | ||
| 70 | if sys.version_info[0] < 3: | ||
| 71 | nl = {} | ||
| 72 | bin = 'b' | ||
| 73 | else: | ||
| 74 | nl = {'newline': ''} | ||
| 75 | bin = '' | ||
| 76 | return open(name, mode + bin, **nl) | ||
| 77 | |||
| 78 | |||
| 79 | def fix_script(path): | ||
| 80 | """Replace #!python with #!/path/to/python | ||
| 81 | Return True if file was changed.""" | ||
| 82 | # XXX RECORD hashes will need to be updated | ||
| 83 | if os.path.isfile(path): | ||
| 84 | with open(path, 'rb') as script: | ||
| 85 | firstline = script.readline() | ||
| 86 | if not firstline.startswith(b'#!python'): | ||
| 87 | return False | ||
| 88 | exename = sys.executable.encode(sys.getfilesystemencoding()) | ||
| 89 | firstline = b'#!' + exename + os.linesep.encode("ascii") | ||
| 90 | rest = script.read() | ||
| 91 | with open(path, 'wb') as script: | ||
| 92 | script.write(firstline) | ||
| 93 | script.write(rest) | ||
| 94 | return True | ||
| 95 | |||
| 96 | |||
| 97 | dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) | ||
| 98 | \.dist-info$""", re.VERBOSE) | ||
| 99 | |||
| 100 | |||
| 101 | def root_is_purelib(name, wheeldir): | ||
| 102 | """ | ||
| 103 | Return True if the extracted wheel in wheeldir should go into purelib. | ||
| 104 | """ | ||
| 105 | name_folded = name.replace("-", "_") | ||
| 106 | for item in os.listdir(wheeldir): | ||
| 107 | match = dist_info_re.match(item) | ||
| 108 | if match and match.group('name') == name_folded: | ||
| 109 | with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: | ||
| 110 | for line in wheel: | ||
| 111 | line = line.lower().rstrip() | ||
| 112 | if line == "root-is-purelib: true": | ||
| 113 | return True | ||
| 114 | return False | ||
| 115 | |||
| 116 | |||
| 117 | def get_entrypoints(filename): | ||
| 118 | if not os.path.exists(filename): | ||
| 119 | return {}, {} | ||
| 120 | |||
| 121 | # This is done because you can pass a string to entry_points wrappers which | ||
| 122 | # means that they may or may not be valid INI files. The attempt here is to | ||
| 123 | # strip leading and trailing whitespace in order to make them valid INI | ||
| 124 | # files. | ||
| 125 | with open(filename) as fp: | ||
| 126 | data = StringIO() | ||
| 127 | for line in fp: | ||
| 128 | data.write(line.strip()) | ||
| 129 | data.write("\n") | ||
| 130 | data.seek(0) | ||
| 131 | |||
| 132 | # get the entry points and then the script names | ||
| 133 | entry_points = pkg_resources.EntryPoint.parse_map(data) | ||
| 134 | console = entry_points.get('console_scripts', {}) | ||
| 135 | gui = entry_points.get('gui_scripts', {}) | ||
| 136 | |||
| 137 | def _split_ep(s): | ||
| 138 | """get the string representation of EntryPoint, remove space and split | ||
| 139 | on '='""" | ||
| 140 | return str(s).replace(" ", "").split("=") | ||
| 141 | |||
| 142 | # convert the EntryPoint objects into strings with module:function | ||
| 143 | console = dict(_split_ep(v) for v in console.values()) | ||
| 144 | gui = dict(_split_ep(v) for v in gui.values()) | ||
| 145 | return console, gui | ||
| 146 | |||
| 147 | |||
| 148 | def message_about_scripts_not_on_PATH(scripts): | ||
| 149 | # type: (List[str]) -> Optional[str] | ||
| 150 | """Determine if any scripts are not on PATH and format a warning. | ||
| 151 | |||
| 152 | Returns a warning message if one or more scripts are not on PATH, | ||
| 153 | otherwise None. | ||
| 154 | """ | ||
| 155 | if not scripts: | ||
| 156 | return None | ||
| 157 | |||
| 158 | # Group scripts by the path they were installed in | ||
| 159 | grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] | ||
| 160 | for destfile in scripts: | ||
| 161 | parent_dir = os.path.dirname(destfile) | ||
| 162 | script_name = os.path.basename(destfile) | ||
| 163 | grouped_by_dir[parent_dir].add(script_name) | ||
| 164 | |||
| 165 | # We don't want to warn for directories that are on PATH. | ||
| 166 | not_warn_dirs = [ | ||
| 167 | os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep) | ||
| 168 | ] | ||
| 169 | # If an executable sits with sys.executable, we don't warn for it. | ||
| 170 | # This covers the case of venv invocations without activating the venv. | ||
| 171 | not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) | ||
| 172 | warn_for = { | ||
| 173 | parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() | ||
| 174 | if os.path.normcase(parent_dir) not in not_warn_dirs | ||
| 175 | } | ||
| 176 | if not warn_for: | ||
| 177 | return None | ||
| 178 | |||
| 179 | # Format a message | ||
| 180 | msg_lines = [] | ||
| 181 | for parent_dir, scripts in warn_for.items(): | ||
| 182 | scripts = sorted(scripts) | ||
| 183 | if len(scripts) == 1: | ||
| 184 | start_text = "script {} is".format(scripts[0]) | ||
| 185 | else: | ||
| 186 | start_text = "scripts {} are".format( | ||
| 187 | ", ".join(scripts[:-1]) + " and " + scripts[-1] | ||
| 188 | ) | ||
| 189 | |||
| 190 | msg_lines.append( | ||
| 191 | "The {} installed in '{}' which is not on PATH." | ||
| 192 | .format(start_text, parent_dir) | ||
| 193 | ) | ||
| 194 | |||
| 195 | last_line_fmt = ( | ||
| 196 | "Consider adding {} to PATH or, if you prefer " | ||
| 197 | "to suppress this warning, use --no-warn-script-location." | ||
| 198 | ) | ||
| 199 | if len(msg_lines) == 1: | ||
| 200 | msg_lines.append(last_line_fmt.format("this directory")) | ||
| 201 | else: | ||
| 202 | msg_lines.append(last_line_fmt.format("these directories")) | ||
| 203 | |||
| 204 | # Returns the formatted multiline message | ||
| 205 | return "\n".join(msg_lines) | ||
| 206 | |||
| 207 | |||
| 208 | def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, | ||
| 209 | pycompile=True, scheme=None, isolated=False, prefix=None, | ||
| 210 | warn_script_location=True): | ||
| 211 | """Install a wheel""" | ||
| 212 | |||
| 213 | if not scheme: | ||
| 214 | scheme = distutils_scheme( | ||
| 215 | name, user=user, home=home, root=root, isolated=isolated, | ||
| 216 | prefix=prefix, | ||
| 217 | ) | ||
| 218 | |||
| 219 | if root_is_purelib(name, wheeldir): | ||
| 220 | lib_dir = scheme['purelib'] | ||
| 221 | else: | ||
| 222 | lib_dir = scheme['platlib'] | ||
| 223 | |||
| 224 | info_dir = [] | ||
| 225 | data_dirs = [] | ||
| 226 | source = wheeldir.rstrip(os.path.sep) + os.path.sep | ||
| 227 | |||
| 228 | # Record details of the files moved | ||
| 229 | # installed = files copied from the wheel to the destination | ||
| 230 | # changed = files changed while installing (scripts #! line typically) | ||
| 231 | # generated = files newly generated during the install (script wrappers) | ||
| 232 | installed = {} | ||
| 233 | changed = set() | ||
| 234 | generated = [] | ||
| 235 | |||
| 236 | # Compile all of the pyc files that we're going to be installing | ||
| 237 | if pycompile: | ||
| 238 | with captured_stdout() as stdout: | ||
| 239 | with warnings.catch_warnings(): | ||
| 240 | warnings.filterwarnings('ignore') | ||
| 241 | compileall.compile_dir(source, force=True, quiet=True) | ||
| 242 | logger.debug(stdout.getvalue()) | ||
| 243 | |||
| 244 | def normpath(src, p): | ||
| 245 | return os.path.relpath(src, p).replace(os.path.sep, '/') | ||
| 246 | |||
| 247 | def record_installed(srcfile, destfile, modified=False): | ||
| 248 | """Map archive RECORD paths to installation RECORD paths.""" | ||
| 249 | oldpath = normpath(srcfile, wheeldir) | ||
| 250 | newpath = normpath(destfile, lib_dir) | ||
| 251 | installed[oldpath] = newpath | ||
| 252 | if modified: | ||
| 253 | changed.add(destfile) | ||
| 254 | |||
| 255 | def clobber(source, dest, is_base, fixer=None, filter=None): | ||
| 256 | ensure_dir(dest) # common for the 'include' path | ||
| 257 | |||
| 258 | for dir, subdirs, files in os.walk(source): | ||
| 259 | basedir = dir[len(source):].lstrip(os.path.sep) | ||
| 260 | destdir = os.path.join(dest, basedir) | ||
| 261 | if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): | ||
| 262 | continue | ||
| 263 | for s in subdirs: | ||
| 264 | destsubdir = os.path.join(dest, basedir, s) | ||
| 265 | if is_base and basedir == '' and destsubdir.endswith('.data'): | ||
| 266 | data_dirs.append(s) | ||
| 267 | continue | ||
| 268 | elif (is_base and | ||
| 269 | s.endswith('.dist-info') and | ||
| 270 | canonicalize_name(s).startswith( | ||
| 271 | canonicalize_name(req.name))): | ||
| 272 | assert not info_dir, ('Multiple .dist-info directories: ' + | ||
| 273 | destsubdir + ', ' + | ||
| 274 | ', '.join(info_dir)) | ||
| 275 | info_dir.append(destsubdir) | ||
| 276 | for f in files: | ||
| 277 | # Skip unwanted files | ||
| 278 | if filter and filter(f): | ||
| 279 | continue | ||
| 280 | srcfile = os.path.join(dir, f) | ||
| 281 | destfile = os.path.join(dest, basedir, f) | ||
| 282 | # directory creation is lazy and after the file filtering above | ||
| 283 | # to ensure we don't install empty dirs; empty dirs can't be | ||
| 284 | # uninstalled. | ||
| 285 | ensure_dir(destdir) | ||
| 286 | |||
| 287 | # We use copyfile (not move, copy, or copy2) to be extra sure | ||
| 288 | # that we are not moving directories over (copyfile fails for | ||
| 289 | # directories) as well as to ensure that we are not copying | ||
| 290 | # over any metadata because we want more control over what | ||
| 291 | # metadata we actually copy over. | ||
| 292 | shutil.copyfile(srcfile, destfile) | ||
| 293 | |||
| 294 | # Copy over the metadata for the file, currently this only | ||
| 295 | # includes the atime and mtime. | ||
| 296 | st = os.stat(srcfile) | ||
| 297 | if hasattr(os, "utime"): | ||
| 298 | os.utime(destfile, (st.st_atime, st.st_mtime)) | ||
| 299 | |||
| 300 | # If our file is executable, then make our destination file | ||
| 301 | # executable. | ||
| 302 | if os.access(srcfile, os.X_OK): | ||
| 303 | st = os.stat(srcfile) | ||
| 304 | permissions = ( | ||
| 305 | st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | ||
| 306 | ) | ||
| 307 | os.chmod(destfile, permissions) | ||
| 308 | |||
| 309 | changed = False | ||
| 310 | if fixer: | ||
| 311 | changed = fixer(destfile) | ||
| 312 | record_installed(srcfile, destfile, changed) | ||
| 313 | |||
| 314 | clobber(source, lib_dir, True) | ||
| 315 | |||
| 316 | assert info_dir, "%s .dist-info directory not found" % req | ||
| 317 | |||
| 318 | # Get the defined entry points | ||
| 319 | ep_file = os.path.join(info_dir[0], 'entry_points.txt') | ||
| 320 | console, gui = get_entrypoints(ep_file) | ||
| 321 | |||
| 322 | def is_entrypoint_wrapper(name): | ||
| 323 | # EP, EP.exe and EP-script.py are scripts generated for | ||
| 324 | # entry point EP by setuptools | ||
| 325 | if name.lower().endswith('.exe'): | ||
| 326 | matchname = name[:-4] | ||
| 327 | elif name.lower().endswith('-script.py'): | ||
| 328 | matchname = name[:-10] | ||
| 329 | elif name.lower().endswith(".pya"): | ||
| 330 | matchname = name[:-4] | ||
| 331 | else: | ||
| 332 | matchname = name | ||
| 333 | # Ignore setuptools-generated scripts | ||
| 334 | return (matchname in console or matchname in gui) | ||
| 335 | |||
| 336 | for datadir in data_dirs: | ||
| 337 | fixer = None | ||
| 338 | filter = None | ||
| 339 | for subdir in os.listdir(os.path.join(wheeldir, datadir)): | ||
| 340 | fixer = None | ||
| 341 | if subdir == 'scripts': | ||
| 342 | fixer = fix_script | ||
| 343 | filter = is_entrypoint_wrapper | ||
| 344 | source = os.path.join(wheeldir, datadir, subdir) | ||
| 345 | dest = scheme[subdir] | ||
| 346 | clobber(source, dest, False, fixer=fixer, filter=filter) | ||
| 347 | |||
| 348 | maker = ScriptMaker(None, scheme['scripts']) | ||
| 349 | |||
| 350 | # Ensure old scripts are overwritten. | ||
| 351 | # See https://github.com/pypa/pip/issues/1800 | ||
| 352 | maker.clobber = True | ||
| 353 | |||
| 354 | # Ensure we don't generate any variants for scripts because this is almost | ||
| 355 | # never what somebody wants. | ||
| 356 | # See https://bitbucket.org/pypa/distlib/issue/35/ | ||
| 357 | maker.variants = {''} | ||
| 358 | |||
| 359 | # This is required because otherwise distlib creates scripts that are not | ||
| 360 | # executable. | ||
| 361 | # See https://bitbucket.org/pypa/distlib/issue/32/ | ||
| 362 | maker.set_mode = True | ||
| 363 | |||
| 364 | # Simplify the script and fix the fact that the default script swallows | ||
| 365 | # every single stack trace. | ||
| 366 | # See https://bitbucket.org/pypa/distlib/issue/34/ | ||
| 367 | # See https://bitbucket.org/pypa/distlib/issue/33/ | ||
| 368 | def _get_script_text(entry): | ||
| 369 | if entry.suffix is None: | ||
| 370 | raise InstallationError( | ||
| 371 | "Invalid script entry point: %s for req: %s - A callable " | ||
| 372 | "suffix is required. Cf https://packaging.python.org/en/" | ||
| 373 | "latest/distributing.html#console-scripts for more " | ||
| 374 | "information." % (entry, req) | ||
| 375 | ) | ||
| 376 | return maker.script_template % { | ||
| 377 | "module": entry.prefix, | ||
| 378 | "import_name": entry.suffix.split(".")[0], | ||
| 379 | "func": entry.suffix, | ||
| 380 | } | ||
| 381 | |||
| 382 | maker._get_script_text = _get_script_text | ||
| 383 | maker.script_template = r"""# -*- coding: utf-8 -*- | ||
| 384 | import re | ||
| 385 | import sys | ||
| 386 | |||
| 387 | from %(module)s import %(import_name)s | ||
| 388 | |||
| 389 | if __name__ == '__main__': | ||
| 390 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | ||
| 391 | sys.exit(%(func)s()) | ||
| 392 | """ | ||
| 393 | |||
| 394 | # Special case pip and setuptools to generate versioned wrappers | ||
| 395 | # | ||
| 396 | # The issue is that some projects (specifically, pip and setuptools) use | ||
| 397 | # code in setup.py to create "versioned" entry points - pip2.7 on Python | ||
| 398 | # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into | ||
| 399 | # the wheel metadata at build time, and so if the wheel is installed with | ||
| 400 | # a *different* version of Python the entry points will be wrong. The | ||
| 401 | # correct fix for this is to enhance the metadata to be able to describe | ||
| 402 | # such versioned entry points, but that won't happen till Metadata 2.0 is | ||
| 403 | # available. | ||
| 404 | # In the meantime, projects using versioned entry points will either have | ||
| 405 | # incorrect versioned entry points, or they will not be able to distribute | ||
| 406 | # "universal" wheels (i.e., they will need a wheel per Python version). | ||
| 407 | # | ||
| 408 | # Because setuptools and pip are bundled with _ensurepip and virtualenv, | ||
| 409 | # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we | ||
| 410 | # override the versioned entry points in the wheel and generate the | ||
| 411 | # correct ones. This code is purely a short-term measure until Metadata 2.0 | ||
| 412 | # is available. | ||
| 413 | # | ||
| 414 | # To add the level of hack in this section of code, in order to support | ||
| 415 | # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment | ||
| 416 | # variable which will control which version scripts get installed. | ||
| 417 | # | ||
| 418 | # ENSUREPIP_OPTIONS=altinstall | ||
| 419 | # - Only pipX.Y and easy_install-X.Y will be generated and installed | ||
| 420 | # ENSUREPIP_OPTIONS=install | ||
| 421 | # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note | ||
| 422 | # that this option is technically if ENSUREPIP_OPTIONS is set and is | ||
| 423 | # not altinstall | ||
| 424 | # DEFAULT | ||
| 425 | # - The default behavior is to install pip, pipX, pipX.Y, easy_install | ||
| 426 | # and easy_install-X.Y. | ||
| 427 | pip_script = console.pop('pip', None) | ||
| 428 | if pip_script: | ||
| 429 | if "ENSUREPIP_OPTIONS" not in os.environ: | ||
| 430 | spec = 'pip = ' + pip_script | ||
| 431 | generated.extend(maker.make(spec)) | ||
| 432 | |||
| 433 | if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": | ||
| 434 | spec = 'pip%s = %s' % (sys.version[:1], pip_script) | ||
| 435 | generated.extend(maker.make(spec)) | ||
| 436 | |||
| 437 | spec = 'pip%s = %s' % (sys.version[:3], pip_script) | ||
| 438 | generated.extend(maker.make(spec)) | ||
| 439 | # Delete any other versioned pip entry points | ||
| 440 | pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] | ||
| 441 | for k in pip_ep: | ||
| 442 | del console[k] | ||
| 443 | easy_install_script = console.pop('easy_install', None) | ||
| 444 | if easy_install_script: | ||
| 445 | if "ENSUREPIP_OPTIONS" not in os.environ: | ||
| 446 | spec = 'easy_install = ' + easy_install_script | ||
| 447 | generated.extend(maker.make(spec)) | ||
| 448 | |||
| 449 | spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) | ||
| 450 | generated.extend(maker.make(spec)) | ||
| 451 | # Delete any other versioned easy_install entry points | ||
| 452 | easy_install_ep = [ | ||
| 453 | k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) | ||
| 454 | ] | ||
| 455 | for k in easy_install_ep: | ||
| 456 | del console[k] | ||
| 457 | |||
| 458 | # Generate the console and GUI entry points specified in the wheel | ||
| 459 | if len(console) > 0: | ||
| 460 | generated_console_scripts = maker.make_multiple( | ||
| 461 | ['%s = %s' % kv for kv in console.items()] | ||
| 462 | ) | ||
| 463 | generated.extend(generated_console_scripts) | ||
| 464 | |||
| 465 | if warn_script_location: | ||
| 466 | msg = message_about_scripts_not_on_PATH(generated_console_scripts) | ||
| 467 | if msg is not None: | ||
| 468 | logger.warn(msg) | ||
| 469 | |||
| 470 | if len(gui) > 0: | ||
| 471 | generated.extend( | ||
| 472 | maker.make_multiple( | ||
| 473 | ['%s = %s' % kv for kv in gui.items()], | ||
| 474 | {'gui': True} | ||
| 475 | ) | ||
| 476 | ) | ||
| 477 | |||
| 478 | # Record pip as the installer | ||
| 479 | installer = os.path.join(info_dir[0], 'INSTALLER') | ||
| 480 | temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') | ||
| 481 | with open(temp_installer, 'wb') as installer_file: | ||
| 482 | installer_file.write(b'pip\n') | ||
| 483 | shutil.move(temp_installer, installer) | ||
| 484 | generated.append(installer) | ||
| 485 | |||
| 486 | # Record details of all files installed | ||
| 487 | record = os.path.join(info_dir[0], 'RECORD') | ||
| 488 | temp_record = os.path.join(info_dir[0], 'RECORD.pip') | ||
| 489 | with open_for_csv(record, 'r') as record_in: | ||
| 490 | with open_for_csv(temp_record, 'w+') as record_out: | ||
| 491 | reader = csv.reader(record_in) | ||
| 492 | writer = csv.writer(record_out) | ||
| 493 | for row in reader: | ||
| 494 | row[0] = installed.pop(row[0], row[0]) | ||
| 495 | if row[0] in changed: | ||
| 496 | row[1], row[2] = rehash(row[0]) | ||
| 497 | writer.writerow(row) | ||
| 498 | for f in generated: | ||
| 499 | h, l = rehash(f) | ||
| 500 | writer.writerow((normpath(f, lib_dir), h, l)) | ||
| 501 | for f in installed: | ||
| 502 | writer.writerow((installed[f], '', '')) | ||
| 503 | shutil.move(temp_record, record) | ||
| 504 | |||
| 505 | |||
| 506 | def wheel_version(source_dir): | ||
| 507 | """ | ||
| 508 | Return the Wheel-Version of an extracted wheel, if possible. | ||
| 509 | |||
| 510 | Otherwise, return False if we couldn't parse / extract it. | ||
| 511 | """ | ||
| 512 | try: | ||
| 513 | dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] | ||
| 514 | |||
| 515 | wheel_data = dist.get_metadata('WHEEL') | ||
| 516 | wheel_data = Parser().parsestr(wheel_data) | ||
| 517 | |||
| 518 | version = wheel_data['Wheel-Version'].strip() | ||
| 519 | version = tuple(map(int, version.split('.'))) | ||
| 520 | return version | ||
| 521 | except: | ||
| 522 | return False | ||
| 523 | |||
| 524 | |||
| 525 | def check_compatibility(version, name): | ||
| 526 | """ | ||
| 527 | Raises errors or warns if called with an incompatible Wheel-Version. | ||
| 528 | |||
| 529 | Pip should refuse to install a Wheel-Version that's a major series | ||
| 530 | ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when | ||
| 531 | installing a version only minor version ahead (e.g 1.2 > 1.1). | ||
| 532 | |||
| 533 | version: a 2-tuple representing a Wheel-Version (Major, Minor) | ||
| 534 | name: name of wheel or package to raise exception about | ||
| 535 | |||
| 536 | :raises UnsupportedWheel: when an incompatible Wheel-Version is given | ||
| 537 | """ | ||
| 538 | if not version: | ||
| 539 | raise UnsupportedWheel( | ||
| 540 | "%s is in an unsupported or invalid wheel" % name | ||
| 541 | ) | ||
| 542 | if version[0] > VERSION_COMPATIBLE[0]: | ||
| 543 | raise UnsupportedWheel( | ||
| 544 | "%s's Wheel-Version (%s) is not compatible with this version " | ||
| 545 | "of pip" % (name, '.'.join(map(str, version))) | ||
| 546 | ) | ||
| 547 | elif version > VERSION_COMPATIBLE: | ||
| 548 | logger.warning( | ||
| 549 | 'Installing from a newer Wheel-Version (%s)', | ||
| 550 | '.'.join(map(str, version)), | ||
| 551 | ) | ||
| 552 | |||
| 553 | |||
| 554 | class Wheel(object): | ||
| 555 | """A wheel file""" | ||
| 556 | |||
| 557 | # TODO: maybe move the install code into this class | ||
| 558 | |||
| 559 | wheel_file_re = re.compile( | ||
| 560 | r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) | ||
| 561 | ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) | ||
| 562 | \.whl|\.dist-info)$""", | ||
| 563 | re.VERBOSE | ||
| 564 | ) | ||
| 565 | |||
| 566 | def __init__(self, filename): | ||
| 567 | """ | ||
| 568 | :raises InvalidWheelFilename: when the filename is invalid for a wheel | ||
| 569 | """ | ||
| 570 | wheel_info = self.wheel_file_re.match(filename) | ||
| 571 | if not wheel_info: | ||
| 572 | raise InvalidWheelFilename( | ||
| 573 | "%s is not a valid wheel filename." % filename | ||
| 574 | ) | ||
| 575 | self.filename = filename | ||
| 576 | self.name = wheel_info.group('name').replace('_', '-') | ||
| 577 | # we'll assume "_" means "-" due to wheel naming scheme | ||
| 578 | # (https://github.com/pypa/pip/issues/1150) | ||
| 579 | self.version = wheel_info.group('ver').replace('_', '-') | ||
| 580 | self.build_tag = wheel_info.group('build') | ||
| 581 | self.pyversions = wheel_info.group('pyver').split('.') | ||
| 582 | self.abis = wheel_info.group('abi').split('.') | ||
| 583 | self.plats = wheel_info.group('plat').split('.') | ||
| 584 | |||
| 585 | # All the tag combinations from this file | ||
| 586 | self.file_tags = { | ||
| 587 | (x, y, z) for x in self.pyversions | ||
| 588 | for y in self.abis for z in self.plats | ||
| 589 | } | ||
| 590 | |||
| 591 | def support_index_min(self, tags=None): | ||
| 592 | """ | ||
| 593 | Return the lowest index that one of the wheel's file_tag combinations | ||
| 594 | achieves in the supported_tags list e.g. if there are 8 supported tags, | ||
| 595 | and one of the file tags is first in the list, then return 0. Returns | ||
| 596 | None is the wheel is not supported. | ||
| 597 | """ | ||
| 598 | if tags is None: # for mock | ||
| 599 | tags = pep425tags.get_supported() | ||
| 600 | indexes = [tags.index(c) for c in self.file_tags if c in tags] | ||
| 601 | return min(indexes) if indexes else None | ||
| 602 | |||
| 603 | def supported(self, tags=None): | ||
| 604 | """Is this wheel supported on this system?""" | ||
| 605 | if tags is None: # for mock | ||
| 606 | tags = pep425tags.get_supported() | ||
| 607 | return bool(set(tags).intersection(self.file_tags)) | ||
| 608 | |||
| 609 | |||
| 610 | class WheelBuilder(object): | ||
| 611 | """Build wheels from a RequirementSet.""" | ||
| 612 | |||
| 613 | def __init__(self, finder, preparer, wheel_cache, | ||
| 614 | build_options=None, global_options=None, no_clean=False): | ||
| 615 | self.finder = finder | ||
| 616 | self.preparer = preparer | ||
| 617 | self.wheel_cache = wheel_cache | ||
| 618 | |||
| 619 | self._wheel_dir = preparer.wheel_download_dir | ||
| 620 | |||
| 621 | self.build_options = build_options or [] | ||
| 622 | self.global_options = global_options or [] | ||
| 623 | self.no_clean = no_clean | ||
| 624 | |||
| 625 | def _build_one(self, req, output_dir, python_tag=None): | ||
| 626 | """Build one wheel. | ||
| 627 | |||
| 628 | :return: The filename of the built wheel, or None if the build failed. | ||
| 629 | """ | ||
| 630 | # Install build deps into temporary directory (PEP 518) | ||
| 631 | with req.build_env: | ||
| 632 | return self._build_one_inside_env(req, output_dir, | ||
| 633 | python_tag=python_tag) | ||
| 634 | |||
| 635 | def _build_one_inside_env(self, req, output_dir, python_tag=None): | ||
| 636 | with TempDirectory(kind="wheel") as temp_dir: | ||
| 637 | if self.__build_one(req, temp_dir.path, python_tag=python_tag): | ||
| 638 | try: | ||
| 639 | wheel_name = os.listdir(temp_dir.path)[0] | ||
| 640 | wheel_path = os.path.join(output_dir, wheel_name) | ||
| 641 | shutil.move( | ||
| 642 | os.path.join(temp_dir.path, wheel_name), wheel_path | ||
| 643 | ) | ||
| 644 | logger.info('Stored in directory: %s', output_dir) | ||
| 645 | return wheel_path | ||
| 646 | except: | ||
| 647 | pass | ||
| 648 | # Ignore return, we can't do anything else useful. | ||
| 649 | self._clean_one(req) | ||
| 650 | return None | ||
| 651 | |||
| 652 | def _base_setup_args(self, req): | ||
| 653 | # NOTE: Eventually, we'd want to also -S to the flags here, when we're | ||
| 654 | # isolating. Currently, it breaks Python in virtualenvs, because it | ||
| 655 | # relies on site.py to find parts of the standard library outside the | ||
| 656 | # virtualenv. | ||
| 657 | return [ | ||
| 658 | sys.executable, '-u', '-c', | ||
| 659 | SETUPTOOLS_SHIM % req.setup_py | ||
| 660 | ] + list(self.global_options) | ||
| 661 | |||
| 662 | def __build_one(self, req, tempd, python_tag=None): | ||
| 663 | base_args = self._base_setup_args(req) | ||
| 664 | |||
| 665 | spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) | ||
| 666 | with open_spinner(spin_message) as spinner: | ||
| 667 | logger.debug('Destination directory: %s', tempd) | ||
| 668 | wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ | ||
| 669 | + self.build_options | ||
| 670 | |||
| 671 | if python_tag is not None: | ||
| 672 | wheel_args += ["--python-tag", python_tag] | ||
| 673 | |||
| 674 | try: | ||
| 675 | call_subprocess(wheel_args, cwd=req.setup_py_dir, | ||
| 676 | show_stdout=False, spinner=spinner) | ||
| 677 | return True | ||
| 678 | except: | ||
| 679 | spinner.finish("error") | ||
| 680 | logger.error('Failed building wheel for %s', req.name) | ||
| 681 | return False | ||
| 682 | |||
| 683 | def _clean_one(self, req): | ||
| 684 | base_args = self._base_setup_args(req) | ||
| 685 | |||
| 686 | logger.info('Running setup.py clean for %s', req.name) | ||
| 687 | clean_args = base_args + ['clean', '--all'] | ||
| 688 | try: | ||
| 689 | call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) | ||
| 690 | return True | ||
| 691 | except: | ||
| 692 | logger.error('Failed cleaning build dir for %s', req.name) | ||
| 693 | return False | ||
| 694 | |||
| 695 | def build(self, requirements, session, autobuilding=False): | ||
| 696 | """Build wheels. | ||
| 697 | |||
| 698 | :param unpack: If True, replace the sdist we built from with the | ||
| 699 | newly built wheel, in preparation for installation. | ||
| 700 | :return: True if all the wheels built correctly. | ||
| 701 | """ | ||
| 702 | from pip._internal import index | ||
| 703 | |||
| 704 | building_is_possible = self._wheel_dir or ( | ||
| 705 | autobuilding and self.wheel_cache.cache_dir | ||
| 706 | ) | ||
| 707 | assert building_is_possible | ||
| 708 | |||
| 709 | buildset = [] | ||
| 710 | for req in requirements: | ||
| 711 | if req.constraint: | ||
| 712 | continue | ||
| 713 | if req.is_wheel: | ||
| 714 | if not autobuilding: | ||
| 715 | logger.info( | ||
| 716 | 'Skipping %s, due to already being wheel.', req.name, | ||
| 717 | ) | ||
| 718 | elif autobuilding and req.editable: | ||
| 719 | pass | ||
| 720 | elif autobuilding and not req.source_dir: | ||
| 721 | pass | ||
| 722 | elif autobuilding and req.link and not req.link.is_artifact: | ||
| 723 | # VCS checkout. Build wheel just for this run. | ||
| 724 | buildset.append((req, True)) | ||
| 725 | else: | ||
| 726 | ephem_cache = False | ||
| 727 | if autobuilding: | ||
| 728 | link = req.link | ||
| 729 | base, ext = link.splitext() | ||
| 730 | if index.egg_info_matches(base, None, link) is None: | ||
| 731 | # E.g. local directory. Build wheel just for this run. | ||
| 732 | ephem_cache = True | ||
| 733 | if "binary" not in index.fmt_ctl_formats( | ||
| 734 | self.finder.format_control, | ||
| 735 | canonicalize_name(req.name)): | ||
| 736 | logger.info( | ||
| 737 | "Skipping bdist_wheel for %s, due to binaries " | ||
| 738 | "being disabled for it.", req.name, | ||
| 739 | ) | ||
| 740 | continue | ||
| 741 | buildset.append((req, ephem_cache)) | ||
| 742 | |||
| 743 | if not buildset: | ||
| 744 | return True | ||
| 745 | |||
| 746 | # Build the wheels. | ||
| 747 | logger.info( | ||
| 748 | 'Building wheels for collected packages: %s', | ||
| 749 | ', '.join([req.name for (req, _) in buildset]), | ||
| 750 | ) | ||
| 751 | _cache = self.wheel_cache # shorter name | ||
| 752 | with indent_log(): | ||
| 753 | build_success, build_failure = [], [] | ||
| 754 | for req, ephem in buildset: | ||
| 755 | python_tag = None | ||
| 756 | if autobuilding: | ||
| 757 | python_tag = pep425tags.implementation_tag | ||
| 758 | if ephem: | ||
| 759 | output_dir = _cache.get_ephem_path_for_link(req.link) | ||
| 760 | else: | ||
| 761 | output_dir = _cache.get_path_for_link(req.link) | ||
| 762 | try: | ||
| 763 | ensure_dir(output_dir) | ||
| 764 | except OSError as e: | ||
| 765 | logger.warning("Building wheel for %s failed: %s", | ||
| 766 | req.name, e) | ||
| 767 | build_failure.append(req) | ||
| 768 | continue | ||
| 769 | else: | ||
| 770 | output_dir = self._wheel_dir | ||
| 771 | wheel_file = self._build_one( | ||
| 772 | req, output_dir, | ||
| 773 | python_tag=python_tag, | ||
| 774 | ) | ||
| 775 | if wheel_file: | ||
| 776 | build_success.append(req) | ||
| 777 | if autobuilding: | ||
| 778 | # XXX: This is mildly duplicative with prepare_files, | ||
| 779 | # but not close enough to pull out to a single common | ||
| 780 | # method. | ||
| 781 | # The code below assumes temporary source dirs - | ||
| 782 | # prevent it doing bad things. | ||
| 783 | if req.source_dir and not os.path.exists(os.path.join( | ||
| 784 | req.source_dir, PIP_DELETE_MARKER_FILENAME)): | ||
| 785 | raise AssertionError( | ||
| 786 | "bad source dir - missing marker") | ||
| 787 | # Delete the source we built the wheel from | ||
| 788 | req.remove_temporary_source() | ||
| 789 | # set the build directory again - name is known from | ||
| 790 | # the work prepare_files did. | ||
| 791 | req.source_dir = req.build_location( | ||
| 792 | self.preparer.build_dir | ||
| 793 | ) | ||
| 794 | # Update the link for this. | ||
| 795 | req.link = index.Link(path_to_url(wheel_file)) | ||
| 796 | assert req.link.is_wheel | ||
| 797 | # extract the wheel into the dir | ||
| 798 | unpack_url( | ||
| 799 | req.link, req.source_dir, None, False, | ||
| 800 | session=session, | ||
| 801 | ) | ||
| 802 | else: | ||
| 803 | build_failure.append(req) | ||
| 804 | |||
| 805 | # notify success/failure | ||
| 806 | if build_success: | ||
| 807 | logger.info( | ||
| 808 | 'Successfully built %s', | ||
| 809 | ' '.join([req.name for req in build_success]), | ||
| 810 | ) | ||
| 811 | if build_failure: | ||
| 812 | logger.info( | ||
| 813 | 'Failed to build %s', | ||
| 814 | ' '.join([req.name for req in build_failure]), | ||
| 815 | ) | ||
| 816 | # Return True if all builds were successful | ||
| 817 | return len(build_failure) == 0 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/__init__.py new file mode 100644 index 0000000..607757f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/__init__.py | |||
| @@ -0,0 +1,109 @@ | |||
| 1 | """ | ||
| 2 | pip._vendor is for vendoring dependencies of pip to prevent needing pip to | ||
| 3 | depend on something external. | ||
| 4 | |||
| 5 | Files inside of pip._vendor should be considered immutable and should only be | ||
| 6 | updated to versions from upstream. | ||
| 7 | """ | ||
| 8 | from __future__ import absolute_import | ||
| 9 | |||
| 10 | import glob | ||
| 11 | import os.path | ||
| 12 | import sys | ||
| 13 | |||
| 14 | # Downstream redistributors which have debundled our dependencies should also | ||
| 15 | # patch this value to be true. This will trigger the additional patching | ||
| 16 | # to cause things like "six" to be available as pip. | ||
| 17 | DEBUNDLED = False | ||
| 18 | |||
| 19 | # By default, look in this directory for a bunch of .whl files which we will | ||
| 20 | # add to the beginning of sys.path before attempting to import anything. This | ||
| 21 | # is done to support downstream re-distributors like Debian and Fedora who | ||
| 22 | # wish to create their own Wheels for our dependencies to aid in debundling. | ||
| 23 | WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) | ||
| 24 | |||
| 25 | |||
| 26 | # Define a small helper function to alias our vendored modules to the real ones | ||
| 27 | # if the vendored ones do not exist. This idea of this was taken from | ||
| 28 | # https://github.com/kennethreitz/requests/pull/2567. | ||
| 29 | def vendored(modulename): | ||
| 30 | vendored_name = "{0}.{1}".format(__name__, modulename) | ||
| 31 | |||
| 32 | try: | ||
| 33 | __import__(vendored_name, globals(), locals(), level=0) | ||
| 34 | except ImportError: | ||
| 35 | try: | ||
| 36 | __import__(modulename, globals(), locals(), level=0) | ||
| 37 | except ImportError: | ||
| 38 | # We can just silently allow import failures to pass here. If we | ||
| 39 | # got to this point it means that ``import pip._vendor.whatever`` | ||
| 40 | # failed and so did ``import whatever``. Since we're importing this | ||
| 41 | # upfront in an attempt to alias imports, not erroring here will | ||
| 42 | # just mean we get a regular import error whenever pip *actually* | ||
| 43 | # tries to import one of these modules to use it, which actually | ||
| 44 | # gives us a better error message than we would have otherwise | ||
| 45 | # gotten. | ||
| 46 | pass | ||
| 47 | else: | ||
| 48 | sys.modules[vendored_name] = sys.modules[modulename] | ||
| 49 | base, head = vendored_name.rsplit(".", 1) | ||
| 50 | setattr(sys.modules[base], head, sys.modules[modulename]) | ||
| 51 | |||
| 52 | |||
| 53 | # If we're operating in a debundled setup, then we want to go ahead and trigger | ||
| 54 | # the aliasing of our vendored libraries as well as looking for wheels to add | ||
| 55 | # to our sys.path. This will cause all of this code to be a no-op typically | ||
| 56 | # however downstream redistributors can enable it in a consistent way across | ||
| 57 | # all platforms. | ||
| 58 | if DEBUNDLED: | ||
| 59 | # Actually look inside of WHEEL_DIR to find .whl files and add them to the | ||
| 60 | # front of our sys.path. | ||
| 61 | sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path | ||
| 62 | |||
| 63 | # Actually alias all of our vendored dependencies. | ||
| 64 | vendored("cachecontrol") | ||
| 65 | vendored("colorama") | ||
| 66 | vendored("distlib") | ||
| 67 | vendored("distro") | ||
| 68 | vendored("html5lib") | ||
| 69 | vendored("lockfile") | ||
| 70 | vendored("six") | ||
| 71 | vendored("six.moves") | ||
| 72 | vendored("six.moves.urllib") | ||
| 73 | vendored("six.moves.urllib.parse") | ||
| 74 | vendored("packaging") | ||
| 75 | vendored("packaging.version") | ||
| 76 | vendored("packaging.specifiers") | ||
| 77 | vendored("pkg_resources") | ||
| 78 | vendored("progress") | ||
| 79 | vendored("pytoml") | ||
| 80 | vendored("retrying") | ||
| 81 | vendored("requests") | ||
| 82 | vendored("requests.packages") | ||
| 83 | vendored("requests.packages.urllib3") | ||
| 84 | vendored("requests.packages.urllib3._collections") | ||
| 85 | vendored("requests.packages.urllib3.connection") | ||
| 86 | vendored("requests.packages.urllib3.connectionpool") | ||
| 87 | vendored("requests.packages.urllib3.contrib") | ||
| 88 | vendored("requests.packages.urllib3.contrib.ntlmpool") | ||
| 89 | vendored("requests.packages.urllib3.contrib.pyopenssl") | ||
| 90 | vendored("requests.packages.urllib3.exceptions") | ||
| 91 | vendored("requests.packages.urllib3.fields") | ||
| 92 | vendored("requests.packages.urllib3.filepost") | ||
| 93 | vendored("requests.packages.urllib3.packages") | ||
| 94 | vendored("requests.packages.urllib3.packages.ordered_dict") | ||
| 95 | vendored("requests.packages.urllib3.packages.six") | ||
| 96 | vendored("requests.packages.urllib3.packages.ssl_match_hostname") | ||
| 97 | vendored("requests.packages.urllib3.packages.ssl_match_hostname." | ||
| 98 | "_implementation") | ||
| 99 | vendored("requests.packages.urllib3.poolmanager") | ||
| 100 | vendored("requests.packages.urllib3.request") | ||
| 101 | vendored("requests.packages.urllib3.response") | ||
| 102 | vendored("requests.packages.urllib3.util") | ||
| 103 | vendored("requests.packages.urllib3.util.connection") | ||
| 104 | vendored("requests.packages.urllib3.util.request") | ||
| 105 | vendored("requests.packages.urllib3.util.response") | ||
| 106 | vendored("requests.packages.urllib3.util.retry") | ||
| 107 | vendored("requests.packages.urllib3.util.ssl_") | ||
| 108 | vendored("requests.packages.urllib3.util.timeout") | ||
| 109 | vendored("requests.packages.urllib3.util.url") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/appdirs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/appdirs.py new file mode 100644 index 0000000..7ff6a07 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/appdirs.py | |||
| @@ -0,0 +1,604 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # -*- coding: utf-8 -*- | ||
| 3 | # Copyright (c) 2005-2010 ActiveState Software Inc. | ||
| 4 | # Copyright (c) 2013 Eddy Petrișor | ||
| 5 | |||
| 6 | """Utilities for determining application-specific dirs. | ||
| 7 | |||
| 8 | See <http://github.com/ActiveState/appdirs> for details and usage. | ||
| 9 | """ | ||
| 10 | # Dev Notes: | ||
| 11 | # - MSDN on where to store app data files: | ||
| 12 | # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 | ||
| 13 | # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html | ||
| 14 | # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html | ||
| 15 | |||
| 16 | __version_info__ = (1, 4, 3) | ||
| 17 | __version__ = '.'.join(map(str, __version_info__)) | ||
| 18 | |||
| 19 | |||
| 20 | import sys | ||
| 21 | import os | ||
| 22 | |||
| 23 | PY3 = sys.version_info[0] == 3 | ||
| 24 | |||
| 25 | if PY3: | ||
| 26 | unicode = str | ||
| 27 | |||
| 28 | if sys.platform.startswith('java'): | ||
| 29 | import platform | ||
| 30 | os_name = platform.java_ver()[3][0] | ||
| 31 | if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. | ||
| 32 | system = 'win32' | ||
| 33 | elif os_name.startswith('Mac'): # "Mac OS X", etc. | ||
| 34 | system = 'darwin' | ||
| 35 | else: # "Linux", "SunOS", "FreeBSD", etc. | ||
| 36 | # Setting this to "linux2" is not ideal, but only Windows or Mac | ||
| 37 | # are actually checked for and the rest of the module expects | ||
| 38 | # *sys.platform* style strings. | ||
| 39 | system = 'linux2' | ||
| 40 | else: | ||
| 41 | system = sys.platform | ||
| 42 | |||
| 43 | |||
| 44 | |||
| 45 | def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): | ||
| 46 | r"""Return full path to the user-specific data dir for this application. | ||
| 47 | |||
| 48 | "appname" is the name of application. | ||
| 49 | If None, just the system directory is returned. | ||
| 50 | "appauthor" (only used on Windows) is the name of the | ||
| 51 | appauthor or distributing body for this application. Typically | ||
| 52 | it is the owning company name. This falls back to appname. You may | ||
| 53 | pass False to disable it. | ||
| 54 | "version" is an optional version path element to append to the | ||
| 55 | path. You might want to use this if you want multiple versions | ||
| 56 | of your app to be able to run independently. If used, this | ||
| 57 | would typically be "<major>.<minor>". | ||
| 58 | Only applied when appname is present. | ||
| 59 | "roaming" (boolean, default False) can be set True to use the Windows | ||
| 60 | roaming appdata directory. That means that for users on a Windows | ||
| 61 | network setup for roaming profiles, this user data will be | ||
| 62 | sync'd on login. See | ||
| 63 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
| 64 | for a discussion of issues. | ||
| 65 | |||
| 66 | Typical user data directories are: | ||
| 67 | Mac OS X: ~/Library/Application Support/<AppName> | ||
| 68 | Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined | ||
| 69 | Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> | ||
| 70 | Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> | ||
| 71 | Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> | ||
| 72 | Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> | ||
| 73 | |||
| 74 | For Unix, we follow the XDG spec and support $XDG_DATA_HOME. | ||
| 75 | That means, by default "~/.local/share/<AppName>". | ||
| 76 | """ | ||
| 77 | if system == "win32": | ||
| 78 | if appauthor is None: | ||
| 79 | appauthor = appname | ||
| 80 | const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" | ||
| 81 | path = os.path.normpath(_get_win_folder(const)) | ||
| 82 | if appname: | ||
| 83 | if appauthor is not False: | ||
| 84 | path = os.path.join(path, appauthor, appname) | ||
| 85 | else: | ||
| 86 | path = os.path.join(path, appname) | ||
| 87 | elif system == 'darwin': | ||
| 88 | path = os.path.expanduser('~/Library/Application Support/') | ||
| 89 | if appname: | ||
| 90 | path = os.path.join(path, appname) | ||
| 91 | else: | ||
| 92 | path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) | ||
| 93 | if appname: | ||
| 94 | path = os.path.join(path, appname) | ||
| 95 | if appname and version: | ||
| 96 | path = os.path.join(path, version) | ||
| 97 | return path | ||
| 98 | |||
| 99 | |||
| 100 | def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): | ||
| 101 | r"""Return full path to the user-shared data dir for this application. | ||
| 102 | |||
| 103 | "appname" is the name of application. | ||
| 104 | If None, just the system directory is returned. | ||
| 105 | "appauthor" (only used on Windows) is the name of the | ||
| 106 | appauthor or distributing body for this application. Typically | ||
| 107 | it is the owning company name. This falls back to appname. You may | ||
| 108 | pass False to disable it. | ||
| 109 | "version" is an optional version path element to append to the | ||
| 110 | path. You might want to use this if you want multiple versions | ||
| 111 | of your app to be able to run independently. If used, this | ||
| 112 | would typically be "<major>.<minor>". | ||
| 113 | Only applied when appname is present. | ||
| 114 | "multipath" is an optional parameter only applicable to *nix | ||
| 115 | which indicates that the entire list of data dirs should be | ||
| 116 | returned. By default, the first item from XDG_DATA_DIRS is | ||
| 117 | returned, or '/usr/local/share/<AppName>', | ||
| 118 | if XDG_DATA_DIRS is not set | ||
| 119 | |||
| 120 | Typical site data directories are: | ||
| 121 | Mac OS X: /Library/Application Support/<AppName> | ||
| 122 | Unix: /usr/local/share/<AppName> or /usr/share/<AppName> | ||
| 123 | Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> | ||
| 124 | Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) | ||
| 125 | Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. | ||
| 126 | |||
| 127 | For Unix, this is using the $XDG_DATA_DIRS[0] default. | ||
| 128 | |||
| 129 | WARNING: Do not use this on Windows. See the Vista-Fail note above for why. | ||
| 130 | """ | ||
| 131 | if system == "win32": | ||
| 132 | if appauthor is None: | ||
| 133 | appauthor = appname | ||
| 134 | path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) | ||
| 135 | if appname: | ||
| 136 | if appauthor is not False: | ||
| 137 | path = os.path.join(path, appauthor, appname) | ||
| 138 | else: | ||
| 139 | path = os.path.join(path, appname) | ||
| 140 | elif system == 'darwin': | ||
| 141 | path = os.path.expanduser('/Library/Application Support') | ||
| 142 | if appname: | ||
| 143 | path = os.path.join(path, appname) | ||
| 144 | else: | ||
| 145 | # XDG default for $XDG_DATA_DIRS | ||
| 146 | # only first, if multipath is False | ||
| 147 | path = os.getenv('XDG_DATA_DIRS', | ||
| 148 | os.pathsep.join(['/usr/local/share', '/usr/share'])) | ||
| 149 | pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] | ||
| 150 | if appname: | ||
| 151 | if version: | ||
| 152 | appname = os.path.join(appname, version) | ||
| 153 | pathlist = [os.sep.join([x, appname]) for x in pathlist] | ||
| 154 | |||
| 155 | if multipath: | ||
| 156 | path = os.pathsep.join(pathlist) | ||
| 157 | else: | ||
| 158 | path = pathlist[0] | ||
| 159 | return path | ||
| 160 | |||
| 161 | if appname and version: | ||
| 162 | path = os.path.join(path, version) | ||
| 163 | return path | ||
| 164 | |||
| 165 | |||
| 166 | def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): | ||
| 167 | r"""Return full path to the user-specific config dir for this application. | ||
| 168 | |||
| 169 | "appname" is the name of application. | ||
| 170 | If None, just the system directory is returned. | ||
| 171 | "appauthor" (only used on Windows) is the name of the | ||
| 172 | appauthor or distributing body for this application. Typically | ||
| 173 | it is the owning company name. This falls back to appname. You may | ||
| 174 | pass False to disable it. | ||
| 175 | "version" is an optional version path element to append to the | ||
| 176 | path. You might want to use this if you want multiple versions | ||
| 177 | of your app to be able to run independently. If used, this | ||
| 178 | would typically be "<major>.<minor>". | ||
| 179 | Only applied when appname is present. | ||
| 180 | "roaming" (boolean, default False) can be set True to use the Windows | ||
| 181 | roaming appdata directory. That means that for users on a Windows | ||
| 182 | network setup for roaming profiles, this user data will be | ||
| 183 | sync'd on login. See | ||
| 184 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
| 185 | for a discussion of issues. | ||
| 186 | |||
| 187 | Typical user config directories are: | ||
| 188 | Mac OS X: same as user_data_dir | ||
| 189 | Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined | ||
| 190 | Win *: same as user_data_dir | ||
| 191 | |||
| 192 | For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. | ||
| 193 | That means, by default "~/.config/<AppName>". | ||
| 194 | """ | ||
| 195 | if system in ["win32", "darwin"]: | ||
| 196 | path = user_data_dir(appname, appauthor, None, roaming) | ||
| 197 | else: | ||
| 198 | path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) | ||
| 199 | if appname: | ||
| 200 | path = os.path.join(path, appname) | ||
| 201 | if appname and version: | ||
| 202 | path = os.path.join(path, version) | ||
| 203 | return path | ||
| 204 | |||
| 205 | |||
| 206 | def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): | ||
| 207 | r"""Return full path to the user-shared data dir for this application. | ||
| 208 | |||
| 209 | "appname" is the name of application. | ||
| 210 | If None, just the system directory is returned. | ||
| 211 | "appauthor" (only used on Windows) is the name of the | ||
| 212 | appauthor or distributing body for this application. Typically | ||
| 213 | it is the owning company name. This falls back to appname. You may | ||
| 214 | pass False to disable it. | ||
| 215 | "version" is an optional version path element to append to the | ||
| 216 | path. You might want to use this if you want multiple versions | ||
| 217 | of your app to be able to run independently. If used, this | ||
| 218 | would typically be "<major>.<minor>". | ||
| 219 | Only applied when appname is present. | ||
| 220 | "multipath" is an optional parameter only applicable to *nix | ||
| 221 | which indicates that the entire list of config dirs should be | ||
| 222 | returned. By default, the first item from XDG_CONFIG_DIRS is | ||
| 223 | returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set | ||
| 224 | |||
| 225 | Typical site config directories are: | ||
| 226 | Mac OS X: same as site_data_dir | ||
| 227 | Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in | ||
| 228 | $XDG_CONFIG_DIRS | ||
| 229 | Win *: same as site_data_dir | ||
| 230 | Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) | ||
| 231 | |||
| 232 | For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False | ||
| 233 | |||
| 234 | WARNING: Do not use this on Windows. See the Vista-Fail note above for why. | ||
| 235 | """ | ||
| 236 | if system in ["win32", "darwin"]: | ||
| 237 | path = site_data_dir(appname, appauthor) | ||
| 238 | if appname and version: | ||
| 239 | path = os.path.join(path, version) | ||
| 240 | else: | ||
| 241 | # XDG default for $XDG_CONFIG_DIRS | ||
| 242 | # only first, if multipath is False | ||
| 243 | path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') | ||
| 244 | pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] | ||
| 245 | if appname: | ||
| 246 | if version: | ||
| 247 | appname = os.path.join(appname, version) | ||
| 248 | pathlist = [os.sep.join([x, appname]) for x in pathlist] | ||
| 249 | |||
| 250 | if multipath: | ||
| 251 | path = os.pathsep.join(pathlist) | ||
| 252 | else: | ||
| 253 | path = pathlist[0] | ||
| 254 | return path | ||
| 255 | |||
| 256 | |||
| 257 | def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): | ||
| 258 | r"""Return full path to the user-specific cache dir for this application. | ||
| 259 | |||
| 260 | "appname" is the name of application. | ||
| 261 | If None, just the system directory is returned. | ||
| 262 | "appauthor" (only used on Windows) is the name of the | ||
| 263 | appauthor or distributing body for this application. Typically | ||
| 264 | it is the owning company name. This falls back to appname. You may | ||
| 265 | pass False to disable it. | ||
| 266 | "version" is an optional version path element to append to the | ||
| 267 | path. You might want to use this if you want multiple versions | ||
| 268 | of your app to be able to run independently. If used, this | ||
| 269 | would typically be "<major>.<minor>". | ||
| 270 | Only applied when appname is present. | ||
| 271 | "opinion" (boolean) can be False to disable the appending of | ||
| 272 | "Cache" to the base app data dir for Windows. See | ||
| 273 | discussion below. | ||
| 274 | |||
| 275 | Typical user cache directories are: | ||
| 276 | Mac OS X: ~/Library/Caches/<AppName> | ||
| 277 | Unix: ~/.cache/<AppName> (XDG default) | ||
| 278 | Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache | ||
| 279 | Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache | ||
| 280 | |||
| 281 | On Windows the only suggestion in the MSDN docs is that local settings go in | ||
| 282 | the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming | ||
| 283 | app data dir (the default returned by `user_data_dir` above). Apps typically | ||
| 284 | put cache data somewhere *under* the given dir here. Some examples: | ||
| 285 | ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache | ||
| 286 | ...\Acme\SuperApp\Cache\1.0 | ||
| 287 | OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. | ||
| 288 | This can be disabled with the `opinion=False` option. | ||
| 289 | """ | ||
| 290 | if system == "win32": | ||
| 291 | if appauthor is None: | ||
| 292 | appauthor = appname | ||
| 293 | path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) | ||
| 294 | if appname: | ||
| 295 | if appauthor is not False: | ||
| 296 | path = os.path.join(path, appauthor, appname) | ||
| 297 | else: | ||
| 298 | path = os.path.join(path, appname) | ||
| 299 | if opinion: | ||
| 300 | path = os.path.join(path, "Cache") | ||
| 301 | elif system == 'darwin': | ||
| 302 | path = os.path.expanduser('~/Library/Caches') | ||
| 303 | if appname: | ||
| 304 | path = os.path.join(path, appname) | ||
| 305 | else: | ||
| 306 | path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) | ||
| 307 | if appname: | ||
| 308 | path = os.path.join(path, appname) | ||
| 309 | if appname and version: | ||
| 310 | path = os.path.join(path, version) | ||
| 311 | return path | ||
| 312 | |||
| 313 | |||
| 314 | def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): | ||
| 315 | r"""Return full path to the user-specific state dir for this application. | ||
| 316 | |||
| 317 | "appname" is the name of application. | ||
| 318 | If None, just the system directory is returned. | ||
| 319 | "appauthor" (only used on Windows) is the name of the | ||
| 320 | appauthor or distributing body for this application. Typically | ||
| 321 | it is the owning company name. This falls back to appname. You may | ||
| 322 | pass False to disable it. | ||
| 323 | "version" is an optional version path element to append to the | ||
| 324 | path. You might want to use this if you want multiple versions | ||
| 325 | of your app to be able to run independently. If used, this | ||
| 326 | would typically be "<major>.<minor>". | ||
| 327 | Only applied when appname is present. | ||
| 328 | "roaming" (boolean, default False) can be set True to use the Windows | ||
| 329 | roaming appdata directory. That means that for users on a Windows | ||
| 330 | network setup for roaming profiles, this user data will be | ||
| 331 | sync'd on login. See | ||
| 332 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
| 333 | for a discussion of issues. | ||
| 334 | |||
| 335 | Typical user state directories are: | ||
| 336 | Mac OS X: same as user_data_dir | ||
| 337 | Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined | ||
| 338 | Win *: same as user_data_dir | ||
| 339 | |||
| 340 | For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> | ||
| 341 | to extend the XDG spec and support $XDG_STATE_HOME. | ||
| 342 | |||
| 343 | That means, by default "~/.local/state/<AppName>". | ||
| 344 | """ | ||
| 345 | if system in ["win32", "darwin"]: | ||
| 346 | path = user_data_dir(appname, appauthor, None, roaming) | ||
| 347 | else: | ||
| 348 | path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) | ||
| 349 | if appname: | ||
| 350 | path = os.path.join(path, appname) | ||
| 351 | if appname and version: | ||
| 352 | path = os.path.join(path, version) | ||
| 353 | return path | ||
| 354 | |||
| 355 | |||
| 356 | def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): | ||
| 357 | r"""Return full path to the user-specific log dir for this application. | ||
| 358 | |||
| 359 | "appname" is the name of application. | ||
| 360 | If None, just the system directory is returned. | ||
| 361 | "appauthor" (only used on Windows) is the name of the | ||
| 362 | appauthor or distributing body for this application. Typically | ||
| 363 | it is the owning company name. This falls back to appname. You may | ||
| 364 | pass False to disable it. | ||
| 365 | "version" is an optional version path element to append to the | ||
| 366 | path. You might want to use this if you want multiple versions | ||
| 367 | of your app to be able to run independently. If used, this | ||
| 368 | would typically be "<major>.<minor>". | ||
| 369 | Only applied when appname is present. | ||
| 370 | "opinion" (boolean) can be False to disable the appending of | ||
| 371 | "Logs" to the base app data dir for Windows, and "log" to the | ||
| 372 | base cache dir for Unix. See discussion below. | ||
| 373 | |||
| 374 | Typical user log directories are: | ||
| 375 | Mac OS X: ~/Library/Logs/<AppName> | ||
| 376 | Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined | ||
| 377 | Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs | ||
| 378 | Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs | ||
| 379 | |||
| 380 | On Windows the only suggestion in the MSDN docs is that local settings | ||
| 381 | go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in | ||
| 382 | examples of what some windows apps use for a logs dir.) | ||
| 383 | |||
| 384 | OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` | ||
| 385 | value for Windows and appends "log" to the user cache dir for Unix. | ||
| 386 | This can be disabled with the `opinion=False` option. | ||
| 387 | """ | ||
| 388 | if system == "darwin": | ||
| 389 | path = os.path.join( | ||
| 390 | os.path.expanduser('~/Library/Logs'), | ||
| 391 | appname) | ||
| 392 | elif system == "win32": | ||
| 393 | path = user_data_dir(appname, appauthor, version) | ||
| 394 | version = False | ||
| 395 | if opinion: | ||
| 396 | path = os.path.join(path, "Logs") | ||
| 397 | else: | ||
| 398 | path = user_cache_dir(appname, appauthor, version) | ||
| 399 | version = False | ||
| 400 | if opinion: | ||
| 401 | path = os.path.join(path, "log") | ||
| 402 | if appname and version: | ||
| 403 | path = os.path.join(path, version) | ||
| 404 | return path | ||
| 405 | |||
| 406 | |||
| 407 | class AppDirs(object): | ||
| 408 | """Convenience wrapper for getting application dirs.""" | ||
| 409 | def __init__(self, appname=None, appauthor=None, version=None, | ||
| 410 | roaming=False, multipath=False): | ||
| 411 | self.appname = appname | ||
| 412 | self.appauthor = appauthor | ||
| 413 | self.version = version | ||
| 414 | self.roaming = roaming | ||
| 415 | self.multipath = multipath | ||
| 416 | |||
| 417 | @property | ||
| 418 | def user_data_dir(self): | ||
| 419 | return user_data_dir(self.appname, self.appauthor, | ||
| 420 | version=self.version, roaming=self.roaming) | ||
| 421 | |||
| 422 | @property | ||
| 423 | def site_data_dir(self): | ||
| 424 | return site_data_dir(self.appname, self.appauthor, | ||
| 425 | version=self.version, multipath=self.multipath) | ||
| 426 | |||
| 427 | @property | ||
| 428 | def user_config_dir(self): | ||
| 429 | return user_config_dir(self.appname, self.appauthor, | ||
| 430 | version=self.version, roaming=self.roaming) | ||
| 431 | |||
| 432 | @property | ||
| 433 | def site_config_dir(self): | ||
| 434 | return site_config_dir(self.appname, self.appauthor, | ||
| 435 | version=self.version, multipath=self.multipath) | ||
| 436 | |||
| 437 | @property | ||
| 438 | def user_cache_dir(self): | ||
| 439 | return user_cache_dir(self.appname, self.appauthor, | ||
| 440 | version=self.version) | ||
| 441 | |||
| 442 | @property | ||
| 443 | def user_state_dir(self): | ||
| 444 | return user_state_dir(self.appname, self.appauthor, | ||
| 445 | version=self.version) | ||
| 446 | |||
| 447 | @property | ||
| 448 | def user_log_dir(self): | ||
| 449 | return user_log_dir(self.appname, self.appauthor, | ||
| 450 | version=self.version) | ||
| 451 | |||
| 452 | |||
| 453 | #---- internal support stuff | ||
| 454 | |||
| 455 | def _get_win_folder_from_registry(csidl_name): | ||
| 456 | """This is a fallback technique at best. I'm not sure if using the | ||
| 457 | registry for this guarantees us the correct answer for all CSIDL_* | ||
| 458 | names. | ||
| 459 | """ | ||
| 460 | if PY3: | ||
| 461 | import winreg as _winreg | ||
| 462 | else: | ||
| 463 | import _winreg | ||
| 464 | |||
| 465 | shell_folder_name = { | ||
| 466 | "CSIDL_APPDATA": "AppData", | ||
| 467 | "CSIDL_COMMON_APPDATA": "Common AppData", | ||
| 468 | "CSIDL_LOCAL_APPDATA": "Local AppData", | ||
| 469 | }[csidl_name] | ||
| 470 | |||
| 471 | key = _winreg.OpenKey( | ||
| 472 | _winreg.HKEY_CURRENT_USER, | ||
| 473 | r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | ||
| 474 | ) | ||
| 475 | dir, type = _winreg.QueryValueEx(key, shell_folder_name) | ||
| 476 | return dir | ||
| 477 | |||
| 478 | |||
| 479 | def _get_win_folder_with_pywin32(csidl_name): | ||
| 480 | from win32com.shell import shellcon, shell | ||
| 481 | dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) | ||
| 482 | # Try to make this a unicode path because SHGetFolderPath does | ||
| 483 | # not return unicode strings when there is unicode data in the | ||
| 484 | # path. | ||
| 485 | try: | ||
| 486 | dir = unicode(dir) | ||
| 487 | |||
| 488 | # Downgrade to short path name if have highbit chars. See | ||
| 489 | # <http://bugs.activestate.com/show_bug.cgi?id=85099>. | ||
| 490 | has_high_char = False | ||
| 491 | for c in dir: | ||
| 492 | if ord(c) > 255: | ||
| 493 | has_high_char = True | ||
| 494 | break | ||
| 495 | if has_high_char: | ||
| 496 | try: | ||
| 497 | import win32api | ||
| 498 | dir = win32api.GetShortPathName(dir) | ||
| 499 | except ImportError: | ||
| 500 | pass | ||
| 501 | except UnicodeError: | ||
| 502 | pass | ||
| 503 | return dir | ||
| 504 | |||
| 505 | |||
| 506 | def _get_win_folder_with_ctypes(csidl_name): | ||
| 507 | import ctypes | ||
| 508 | |||
| 509 | csidl_const = { | ||
| 510 | "CSIDL_APPDATA": 26, | ||
| 511 | "CSIDL_COMMON_APPDATA": 35, | ||
| 512 | "CSIDL_LOCAL_APPDATA": 28, | ||
| 513 | }[csidl_name] | ||
| 514 | |||
| 515 | buf = ctypes.create_unicode_buffer(1024) | ||
| 516 | ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) | ||
| 517 | |||
| 518 | # Downgrade to short path name if have highbit chars. See | ||
| 519 | # <http://bugs.activestate.com/show_bug.cgi?id=85099>. | ||
| 520 | has_high_char = False | ||
| 521 | for c in buf: | ||
| 522 | if ord(c) > 255: | ||
| 523 | has_high_char = True | ||
| 524 | break | ||
| 525 | if has_high_char: | ||
| 526 | buf2 = ctypes.create_unicode_buffer(1024) | ||
| 527 | if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): | ||
| 528 | buf = buf2 | ||
| 529 | |||
| 530 | return buf.value | ||
| 531 | |||
| 532 | def _get_win_folder_with_jna(csidl_name): | ||
| 533 | import array | ||
| 534 | from com.sun import jna | ||
| 535 | from com.sun.jna.platform import win32 | ||
| 536 | |||
| 537 | buf_size = win32.WinDef.MAX_PATH * 2 | ||
| 538 | buf = array.zeros('c', buf_size) | ||
| 539 | shell = win32.Shell32.INSTANCE | ||
| 540 | shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) | ||
| 541 | dir = jna.Native.toString(buf.tostring()).rstrip("\0") | ||
| 542 | |||
| 543 | # Downgrade to short path name if have highbit chars. See | ||
| 544 | # <http://bugs.activestate.com/show_bug.cgi?id=85099>. | ||
| 545 | has_high_char = False | ||
| 546 | for c in dir: | ||
| 547 | if ord(c) > 255: | ||
| 548 | has_high_char = True | ||
| 549 | break | ||
| 550 | if has_high_char: | ||
| 551 | buf = array.zeros('c', buf_size) | ||
| 552 | kernel = win32.Kernel32.INSTANCE | ||
| 553 | if kernel.GetShortPathName(dir, buf, buf_size): | ||
| 554 | dir = jna.Native.toString(buf.tostring()).rstrip("\0") | ||
| 555 | |||
| 556 | return dir | ||
| 557 | |||
| 558 | if system == "win32": | ||
| 559 | try: | ||
| 560 | from ctypes import windll | ||
| 561 | _get_win_folder = _get_win_folder_with_ctypes | ||
| 562 | except ImportError: | ||
| 563 | try: | ||
| 564 | import com.sun.jna | ||
| 565 | _get_win_folder = _get_win_folder_with_jna | ||
| 566 | except ImportError: | ||
| 567 | _get_win_folder = _get_win_folder_from_registry | ||
| 568 | |||
| 569 | |||
| 570 | #---- self test code | ||
| 571 | |||
| 572 | if __name__ == "__main__": | ||
| 573 | appname = "MyApp" | ||
| 574 | appauthor = "MyCompany" | ||
| 575 | |||
| 576 | props = ("user_data_dir", | ||
| 577 | "user_config_dir", | ||
| 578 | "user_cache_dir", | ||
| 579 | "user_state_dir", | ||
| 580 | "user_log_dir", | ||
| 581 | "site_data_dir", | ||
| 582 | "site_config_dir") | ||
| 583 | |||
| 584 | print("-- app dirs %s --" % __version__) | ||
| 585 | |||
| 586 | print("-- app dirs (with optional 'version')") | ||
| 587 | dirs = AppDirs(appname, appauthor, version="1.0") | ||
| 588 | for prop in props: | ||
| 589 | print("%s: %s" % (prop, getattr(dirs, prop))) | ||
| 590 | |||
| 591 | print("\n-- app dirs (without optional 'version')") | ||
| 592 | dirs = AppDirs(appname, appauthor) | ||
| 593 | for prop in props: | ||
| 594 | print("%s: %s" % (prop, getattr(dirs, prop))) | ||
| 595 | |||
| 596 | print("\n-- app dirs (without optional 'appauthor')") | ||
| 597 | dirs = AppDirs(appname) | ||
| 598 | for prop in props: | ||
| 599 | print("%s: %s" % (prop, getattr(dirs, prop))) | ||
| 600 | |||
| 601 | print("\n-- app dirs (with disabled 'appauthor')") | ||
| 602 | dirs = AppDirs(appname, appauthor=False) | ||
| 603 | for prop in props: | ||
| 604 | print("%s: %s" % (prop, getattr(dirs, prop))) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..ced6d94 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py | |||
| @@ -0,0 +1,11 @@ | |||
| 1 | """CacheControl import Interface. | ||
| 2 | |||
| 3 | Make it easy to import from cachecontrol without long namespaces. | ||
| 4 | """ | ||
| 5 | __author__ = 'Eric Larson' | ||
| 6 | __email__ = 'eric@ionrock.org' | ||
| 7 | __version__ = '0.12.4' | ||
| 8 | |||
| 9 | from .wrapper import CacheControl | ||
| 10 | from .adapter import CacheControlAdapter | ||
| 11 | from .controller import CacheController | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..10bc01e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py | |||
| @@ -0,0 +1,60 @@ | |||
| 1 | import logging | ||
| 2 | |||
| 3 | from pip._vendor import requests | ||
| 4 | |||
| 5 | from pip._vendor.cachecontrol.adapter import CacheControlAdapter | ||
| 6 | from pip._vendor.cachecontrol.cache import DictCache | ||
| 7 | from pip._vendor.cachecontrol.controller import logger | ||
| 8 | |||
| 9 | from argparse import ArgumentParser | ||
| 10 | |||
| 11 | |||
| 12 | def setup_logging(): | ||
| 13 | logger.setLevel(logging.DEBUG) | ||
| 14 | handler = logging.StreamHandler() | ||
| 15 | logger.addHandler(handler) | ||
| 16 | |||
| 17 | |||
| 18 | def get_session(): | ||
| 19 | adapter = CacheControlAdapter( | ||
| 20 | DictCache(), | ||
| 21 | cache_etags=True, | ||
| 22 | serializer=None, | ||
| 23 | heuristic=None, | ||
| 24 | ) | ||
| 25 | sess = requests.Session() | ||
| 26 | sess.mount('http://', adapter) | ||
| 27 | sess.mount('https://', adapter) | ||
| 28 | |||
| 29 | sess.cache_controller = adapter.controller | ||
| 30 | return sess | ||
| 31 | |||
| 32 | |||
| 33 | def get_args(): | ||
| 34 | parser = ArgumentParser() | ||
| 35 | parser.add_argument('url', help='The URL to try and cache') | ||
| 36 | return parser.parse_args() | ||
| 37 | |||
| 38 | |||
| 39 | def main(args=None): | ||
| 40 | args = get_args() | ||
| 41 | sess = get_session() | ||
| 42 | |||
| 43 | # Make a request to get a response | ||
| 44 | resp = sess.get(args.url) | ||
| 45 | |||
| 46 | # Turn on logging | ||
| 47 | setup_logging() | ||
| 48 | |||
| 49 | # try setting the cache | ||
| 50 | sess.cache_controller.cache_response(resp.request, resp.raw) | ||
| 51 | |||
| 52 | # Now try to get it | ||
| 53 | if sess.cache_controller.cached_request(resp.request): | ||
| 54 | print('Cached!') | ||
| 55 | else: | ||
| 56 | print('Not cached :(') | ||
| 57 | |||
| 58 | |||
| 59 | if __name__ == '__main__': | ||
| 60 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..03c95c9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py | |||
| @@ -0,0 +1,134 @@ | |||
| 1 | import types | ||
| 2 | import functools | ||
| 3 | import zlib | ||
| 4 | |||
| 5 | from pip._vendor.requests.adapters import HTTPAdapter | ||
| 6 | |||
| 7 | from .controller import CacheController | ||
| 8 | from .cache import DictCache | ||
| 9 | from .filewrapper import CallbackFileWrapper | ||
| 10 | |||
| 11 | |||
| 12 | class CacheControlAdapter(HTTPAdapter): | ||
| 13 | invalidating_methods = set(['PUT', 'DELETE']) | ||
| 14 | |||
| 15 | def __init__(self, cache=None, | ||
| 16 | cache_etags=True, | ||
| 17 | controller_class=None, | ||
| 18 | serializer=None, | ||
| 19 | heuristic=None, | ||
| 20 | cacheable_methods=None, | ||
| 21 | *args, **kw): | ||
| 22 | super(CacheControlAdapter, self).__init__(*args, **kw) | ||
| 23 | self.cache = cache or DictCache() | ||
| 24 | self.heuristic = heuristic | ||
| 25 | self.cacheable_methods = cacheable_methods or ('GET',) | ||
| 26 | |||
| 27 | controller_factory = controller_class or CacheController | ||
| 28 | self.controller = controller_factory( | ||
| 29 | self.cache, | ||
| 30 | cache_etags=cache_etags, | ||
| 31 | serializer=serializer, | ||
| 32 | ) | ||
| 33 | |||
| 34 | def send(self, request, cacheable_methods=None, **kw): | ||
| 35 | """ | ||
| 36 | Send a request. Use the request information to see if it | ||
| 37 | exists in the cache and cache the response if we need to and can. | ||
| 38 | """ | ||
| 39 | cacheable = cacheable_methods or self.cacheable_methods | ||
| 40 | if request.method in cacheable: | ||
| 41 | try: | ||
| 42 | cached_response = self.controller.cached_request(request) | ||
| 43 | except zlib.error: | ||
| 44 | cached_response = None | ||
| 45 | if cached_response: | ||
| 46 | return self.build_response(request, cached_response, | ||
| 47 | from_cache=True) | ||
| 48 | |||
| 49 | # check for etags and add headers if appropriate | ||
| 50 | request.headers.update( | ||
| 51 | self.controller.conditional_headers(request) | ||
| 52 | ) | ||
| 53 | |||
| 54 | resp = super(CacheControlAdapter, self).send(request, **kw) | ||
| 55 | |||
| 56 | return resp | ||
| 57 | |||
| 58 | def build_response(self, request, response, from_cache=False, | ||
| 59 | cacheable_methods=None): | ||
| 60 | """ | ||
| 61 | Build a response by making a request or using the cache. | ||
| 62 | |||
| 63 | This will end up calling send and returning a potentially | ||
| 64 | cached response | ||
| 65 | """ | ||
| 66 | cacheable = cacheable_methods or self.cacheable_methods | ||
| 67 | if not from_cache and request.method in cacheable: | ||
| 68 | # Check for any heuristics that might update headers | ||
| 69 | # before trying to cache. | ||
| 70 | if self.heuristic: | ||
| 71 | response = self.heuristic.apply(response) | ||
| 72 | |||
| 73 | # apply any expiration heuristics | ||
| 74 | if response.status == 304: | ||
| 75 | # We must have sent an ETag request. This could mean | ||
| 76 | # that we've been expired already or that we simply | ||
| 77 | # have an etag. In either case, we want to try and | ||
| 78 | # update the cache if that is the case. | ||
| 79 | cached_response = self.controller.update_cached_response( | ||
| 80 | request, response | ||
| 81 | ) | ||
| 82 | |||
| 83 | if cached_response is not response: | ||
| 84 | from_cache = True | ||
| 85 | |||
| 86 | # We are done with the server response, read a | ||
| 87 | # possible response body (compliant servers will | ||
| 88 | # not return one, but we cannot be 100% sure) and | ||
| 89 | # release the connection back to the pool. | ||
| 90 | response.read(decode_content=False) | ||
| 91 | response.release_conn() | ||
| 92 | |||
| 93 | response = cached_response | ||
| 94 | |||
| 95 | # We always cache the 301 responses | ||
| 96 | elif response.status == 301: | ||
| 97 | self.controller.cache_response(request, response) | ||
| 98 | else: | ||
| 99 | # Wrap the response file with a wrapper that will cache the | ||
| 100 | # response when the stream has been consumed. | ||
| 101 | response._fp = CallbackFileWrapper( | ||
| 102 | response._fp, | ||
| 103 | functools.partial( | ||
| 104 | self.controller.cache_response, | ||
| 105 | request, | ||
| 106 | response, | ||
| 107 | ) | ||
| 108 | ) | ||
| 109 | if response.chunked: | ||
| 110 | super_update_chunk_length = response._update_chunk_length | ||
| 111 | |||
| 112 | def _update_chunk_length(self): | ||
| 113 | super_update_chunk_length() | ||
| 114 | if self.chunk_left == 0: | ||
| 115 | self._fp._close() | ||
| 116 | response._update_chunk_length = types.MethodType(_update_chunk_length, response) | ||
| 117 | |||
| 118 | resp = super(CacheControlAdapter, self).build_response( | ||
| 119 | request, response | ||
| 120 | ) | ||
| 121 | |||
| 122 | # See if we should invalidate the cache. | ||
| 123 | if request.method in self.invalidating_methods and resp.ok: | ||
| 124 | cache_url = self.controller.cache_url(request.url) | ||
| 125 | self.cache.delete(cache_url) | ||
| 126 | |||
| 127 | # Give the request a from_cache attr to let people use it | ||
| 128 | resp.from_cache = from_cache | ||
| 129 | |||
| 130 | return resp | ||
| 131 | |||
| 132 | def close(self): | ||
| 133 | self.cache.close() | ||
| 134 | super(CacheControlAdapter, self).close() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..04d1488 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py | |||
| @@ -0,0 +1,39 @@ | |||
| 1 | """ | ||
| 2 | The cache object API for implementing caches. The default is a thread | ||
| 3 | safe in-memory dictionary. | ||
| 4 | """ | ||
| 5 | from threading import Lock | ||
| 6 | |||
| 7 | |||
| 8 | class BaseCache(object): | ||
| 9 | |||
| 10 | def get(self, key): | ||
| 11 | raise NotImplemented() | ||
| 12 | |||
| 13 | def set(self, key, value): | ||
| 14 | raise NotImplemented() | ||
| 15 | |||
| 16 | def delete(self, key): | ||
| 17 | raise NotImplemented() | ||
| 18 | |||
| 19 | def close(self): | ||
| 20 | pass | ||
| 21 | |||
| 22 | |||
| 23 | class DictCache(BaseCache): | ||
| 24 | |||
| 25 | def __init__(self, init_dict=None): | ||
| 26 | self.lock = Lock() | ||
| 27 | self.data = init_dict or {} | ||
| 28 | |||
| 29 | def get(self, key): | ||
| 30 | return self.data.get(key, None) | ||
| 31 | |||
| 32 | def set(self, key, value): | ||
| 33 | with self.lock: | ||
| 34 | self.data.update({key: value}) | ||
| 35 | |||
| 36 | def delete(self, key): | ||
| 37 | with self.lock: | ||
| 38 | if key in self.data: | ||
| 39 | self.data.pop(key) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..1193f26 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py | |||
| @@ -0,0 +1,2 @@ | |||
| 1 | from .file_cache import FileCache # noqa | ||
| 2 | from .redis_cache import RedisCache # noqa | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..f7eb890 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py | |||
| @@ -0,0 +1,133 @@ | |||
| 1 | import hashlib | ||
| 2 | import os | ||
| 3 | from textwrap import dedent | ||
| 4 | |||
| 5 | from ..cache import BaseCache | ||
| 6 | from ..controller import CacheController | ||
| 7 | |||
| 8 | try: | ||
| 9 | FileNotFoundError | ||
| 10 | except NameError: | ||
| 11 | # py2.X | ||
| 12 | FileNotFoundError = OSError | ||
| 13 | |||
| 14 | |||
| 15 | def _secure_open_write(filename, fmode): | ||
| 16 | # We only want to write to this file, so open it in write only mode | ||
| 17 | flags = os.O_WRONLY | ||
| 18 | |||
| 19 | # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only | ||
| 20 | # will open *new* files. | ||
| 21 | # We specify this because we want to ensure that the mode we pass is the | ||
| 22 | # mode of the file. | ||
| 23 | flags |= os.O_CREAT | os.O_EXCL | ||
| 24 | |||
| 25 | # Do not follow symlinks to prevent someone from making a symlink that | ||
| 26 | # we follow and insecurely open a cache file. | ||
| 27 | if hasattr(os, "O_NOFOLLOW"): | ||
| 28 | flags |= os.O_NOFOLLOW | ||
| 29 | |||
| 30 | # On Windows we'll mark this file as binary | ||
| 31 | if hasattr(os, "O_BINARY"): | ||
| 32 | flags |= os.O_BINARY | ||
| 33 | |||
| 34 | # Before we open our file, we want to delete any existing file that is | ||
| 35 | # there | ||
| 36 | try: | ||
| 37 | os.remove(filename) | ||
| 38 | except (IOError, OSError): | ||
| 39 | # The file must not exist already, so we can just skip ahead to opening | ||
| 40 | pass | ||
| 41 | |||
| 42 | # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a | ||
| 43 | # race condition happens between the os.remove and this line, that an | ||
| 44 | # error will be raised. Because we utilize a lockfile this should only | ||
| 45 | # happen if someone is attempting to attack us. | ||
| 46 | fd = os.open(filename, flags, fmode) | ||
| 47 | try: | ||
| 48 | return os.fdopen(fd, "wb") | ||
| 49 | except: | ||
| 50 | # An error occurred wrapping our FD in a file object | ||
| 51 | os.close(fd) | ||
| 52 | raise | ||
| 53 | |||
| 54 | |||
| 55 | class FileCache(BaseCache): | ||
| 56 | def __init__(self, directory, forever=False, filemode=0o0600, | ||
| 57 | dirmode=0o0700, use_dir_lock=None, lock_class=None): | ||
| 58 | |||
| 59 | if use_dir_lock is not None and lock_class is not None: | ||
| 60 | raise ValueError("Cannot use use_dir_lock and lock_class together") | ||
| 61 | |||
| 62 | try: | ||
| 63 | from pip._vendor.lockfile import LockFile | ||
| 64 | from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile | ||
| 65 | except ImportError: | ||
| 66 | notice = dedent(""" | ||
| 67 | NOTE: In order to use the FileCache you must have | ||
| 68 | lockfile installed. You can install it via pip: | ||
| 69 | pip install lockfile | ||
| 70 | """) | ||
| 71 | raise ImportError(notice) | ||
| 72 | else: | ||
| 73 | if use_dir_lock: | ||
| 74 | lock_class = MkdirLockFile | ||
| 75 | |||
| 76 | elif lock_class is None: | ||
| 77 | lock_class = LockFile | ||
| 78 | |||
| 79 | self.directory = directory | ||
| 80 | self.forever = forever | ||
| 81 | self.filemode = filemode | ||
| 82 | self.dirmode = dirmode | ||
| 83 | self.lock_class = lock_class | ||
| 84 | |||
| 85 | @staticmethod | ||
| 86 | def encode(x): | ||
| 87 | return hashlib.sha224(x.encode()).hexdigest() | ||
| 88 | |||
| 89 | def _fn(self, name): | ||
| 90 | # NOTE: This method should not change as some may depend on it. | ||
| 91 | # See: https://github.com/ionrock/cachecontrol/issues/63 | ||
| 92 | hashed = self.encode(name) | ||
| 93 | parts = list(hashed[:5]) + [hashed] | ||
| 94 | return os.path.join(self.directory, *parts) | ||
| 95 | |||
| 96 | def get(self, key): | ||
| 97 | name = self._fn(key) | ||
| 98 | if not os.path.exists(name): | ||
| 99 | return None | ||
| 100 | |||
| 101 | with open(name, 'rb') as fh: | ||
| 102 | return fh.read() | ||
| 103 | |||
| 104 | def set(self, key, value): | ||
| 105 | name = self._fn(key) | ||
| 106 | |||
| 107 | # Make sure the directory exists | ||
| 108 | try: | ||
| 109 | os.makedirs(os.path.dirname(name), self.dirmode) | ||
| 110 | except (IOError, OSError): | ||
| 111 | pass | ||
| 112 | |||
| 113 | with self.lock_class(name) as lock: | ||
| 114 | # Write our actual file | ||
| 115 | with _secure_open_write(lock.path, self.filemode) as fh: | ||
| 116 | fh.write(value) | ||
| 117 | |||
| 118 | def delete(self, key): | ||
| 119 | name = self._fn(key) | ||
| 120 | if not self.forever: | ||
| 121 | try: | ||
| 122 | os.remove(name) | ||
| 123 | except FileNotFoundError: | ||
| 124 | pass | ||
| 125 | |||
| 126 | |||
| 127 | def url_to_file_path(url, filecache): | ||
| 128 | """Return the file cache path based on the URL. | ||
| 129 | |||
| 130 | This does not ensure the file exists! | ||
| 131 | """ | ||
| 132 | key = CacheController.cache_url(url) | ||
| 133 | return filecache._fn(key) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..db1e09d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py | |||
| @@ -0,0 +1,43 @@ | |||
| 1 | from __future__ import division | ||
| 2 | |||
| 3 | from datetime import datetime | ||
| 4 | from pip._vendor.cachecontrol.cache import BaseCache | ||
| 5 | |||
| 6 | |||
| 7 | def total_seconds(td): | ||
| 8 | """Python 2.6 compatability""" | ||
| 9 | if hasattr(td, 'total_seconds'): | ||
| 10 | return int(td.total_seconds()) | ||
| 11 | |||
| 12 | ms = td.microseconds | ||
| 13 | secs = (td.seconds + td.days * 24 * 3600) | ||
| 14 | return int((ms + secs * 10**6) / 10**6) | ||
| 15 | |||
| 16 | |||
| 17 | class RedisCache(BaseCache): | ||
| 18 | |||
| 19 | def __init__(self, conn): | ||
| 20 | self.conn = conn | ||
| 21 | |||
| 22 | def get(self, key): | ||
| 23 | return self.conn.get(key) | ||
| 24 | |||
| 25 | def set(self, key, value, expires=None): | ||
| 26 | if not expires: | ||
| 27 | self.conn.set(key, value) | ||
| 28 | else: | ||
| 29 | expires = expires - datetime.utcnow() | ||
| 30 | self.conn.setex(key, total_seconds(expires), value) | ||
| 31 | |||
| 32 | def delete(self, key): | ||
| 33 | self.conn.delete(key) | ||
| 34 | |||
| 35 | def clear(self): | ||
| 36 | """Helper for clearing all the keys in a database. Use with | ||
| 37 | caution!""" | ||
| 38 | for key in self.conn.keys(): | ||
| 39 | self.conn.delete(key) | ||
| 40 | |||
| 41 | def close(self): | ||
| 42 | """Redis uses connection pooling, no need to close the connection.""" | ||
| 43 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..e3f3243 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py | |||
| @@ -0,0 +1,29 @@ | |||
| 1 | try: | ||
| 2 | from urllib.parse import urljoin | ||
| 3 | except ImportError: | ||
| 4 | from urlparse import urljoin | ||
| 5 | |||
| 6 | |||
| 7 | try: | ||
| 8 | import cPickle as pickle | ||
| 9 | except ImportError: | ||
| 10 | import pickle | ||
| 11 | |||
| 12 | |||
| 13 | # Handle the case where the requests module has been patched to not have | ||
| 14 | # urllib3 bundled as part of its source. | ||
| 15 | try: | ||
| 16 | from pip._vendor.requests.packages.urllib3.response import HTTPResponse | ||
| 17 | except ImportError: | ||
| 18 | from pip._vendor.urllib3.response import HTTPResponse | ||
| 19 | |||
| 20 | try: | ||
| 21 | from pip._vendor.requests.packages.urllib3.util import is_fp_closed | ||
| 22 | except ImportError: | ||
| 23 | from pip._vendor.urllib3.util import is_fp_closed | ||
| 24 | |||
| 25 | # Replicate some six behaviour | ||
| 26 | try: | ||
| 27 | text_type = unicode | ||
| 28 | except NameError: | ||
| 29 | text_type = str | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..bf4cc7f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py | |||
| @@ -0,0 +1,373 @@ | |||
| 1 | """ | ||
| 2 | The httplib2 algorithms ported for use with requests. | ||
| 3 | """ | ||
| 4 | import logging | ||
| 5 | import re | ||
| 6 | import calendar | ||
| 7 | import time | ||
| 8 | from email.utils import parsedate_tz | ||
| 9 | |||
| 10 | from pip._vendor.requests.structures import CaseInsensitiveDict | ||
| 11 | |||
| 12 | from .cache import DictCache | ||
| 13 | from .serialize import Serializer | ||
| 14 | |||
| 15 | |||
| 16 | logger = logging.getLogger(__name__) | ||
| 17 | |||
| 18 | URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") | ||
| 19 | |||
| 20 | |||
| 21 | def parse_uri(uri): | ||
| 22 | """Parses a URI using the regex given in Appendix B of RFC 3986. | ||
| 23 | |||
| 24 | (scheme, authority, path, query, fragment) = parse_uri(uri) | ||
| 25 | """ | ||
| 26 | groups = URI.match(uri).groups() | ||
| 27 | return (groups[1], groups[3], groups[4], groups[6], groups[8]) | ||
| 28 | |||
| 29 | |||
| 30 | class CacheController(object): | ||
| 31 | """An interface to see if request should cached or not. | ||
| 32 | """ | ||
| 33 | def __init__(self, cache=None, cache_etags=True, serializer=None, | ||
| 34 | status_codes=None): | ||
| 35 | self.cache = cache or DictCache() | ||
| 36 | self.cache_etags = cache_etags | ||
| 37 | self.serializer = serializer or Serializer() | ||
| 38 | self.cacheable_status_codes = status_codes or (200, 203, 300, 301) | ||
| 39 | |||
| 40 | @classmethod | ||
| 41 | def _urlnorm(cls, uri): | ||
| 42 | """Normalize the URL to create a safe key for the cache""" | ||
| 43 | (scheme, authority, path, query, fragment) = parse_uri(uri) | ||
| 44 | if not scheme or not authority: | ||
| 45 | raise Exception("Only absolute URIs are allowed. uri = %s" % uri) | ||
| 46 | |||
| 47 | scheme = scheme.lower() | ||
| 48 | authority = authority.lower() | ||
| 49 | |||
| 50 | if not path: | ||
| 51 | path = "/" | ||
| 52 | |||
| 53 | # Could do syntax based normalization of the URI before | ||
| 54 | # computing the digest. See Section 6.2.2 of Std 66. | ||
| 55 | request_uri = query and "?".join([path, query]) or path | ||
| 56 | defrag_uri = scheme + "://" + authority + request_uri | ||
| 57 | |||
| 58 | return defrag_uri | ||
| 59 | |||
| 60 | @classmethod | ||
| 61 | def cache_url(cls, uri): | ||
| 62 | return cls._urlnorm(uri) | ||
| 63 | |||
| 64 | def parse_cache_control(self, headers): | ||
| 65 | known_directives = { | ||
| 66 | # https://tools.ietf.org/html/rfc7234#section-5.2 | ||
| 67 | 'max-age': (int, True,), | ||
| 68 | 'max-stale': (int, False,), | ||
| 69 | 'min-fresh': (int, True,), | ||
| 70 | 'no-cache': (None, False,), | ||
| 71 | 'no-store': (None, False,), | ||
| 72 | 'no-transform': (None, False,), | ||
| 73 | 'only-if-cached' : (None, False,), | ||
| 74 | 'must-revalidate': (None, False,), | ||
| 75 | 'public': (None, False,), | ||
| 76 | 'private': (None, False,), | ||
| 77 | 'proxy-revalidate': (None, False,), | ||
| 78 | 's-maxage': (int, True,) | ||
| 79 | } | ||
| 80 | |||
| 81 | cc_headers = headers.get('cache-control', | ||
| 82 | headers.get('Cache-Control', '')) | ||
| 83 | |||
| 84 | retval = {} | ||
| 85 | |||
| 86 | for cc_directive in cc_headers.split(','): | ||
| 87 | parts = cc_directive.split('=', 1) | ||
| 88 | directive = parts[0].strip() | ||
| 89 | |||
| 90 | try: | ||
| 91 | typ, required = known_directives[directive] | ||
| 92 | except KeyError: | ||
| 93 | logger.debug('Ignoring unknown cache-control directive: %s', | ||
| 94 | directive) | ||
| 95 | continue | ||
| 96 | |||
| 97 | if not typ or not required: | ||
| 98 | retval[directive] = None | ||
| 99 | if typ: | ||
| 100 | try: | ||
| 101 | retval[directive] = typ(parts[1].strip()) | ||
| 102 | except IndexError: | ||
| 103 | if required: | ||
| 104 | logger.debug('Missing value for cache-control ' | ||
| 105 | 'directive: %s', directive) | ||
| 106 | except ValueError: | ||
| 107 | logger.debug('Invalid value for cache-control directive ' | ||
| 108 | '%s, must be %s', directive, typ.__name__) | ||
| 109 | |||
| 110 | return retval | ||
| 111 | |||
| 112 | def cached_request(self, request): | ||
| 113 | """ | ||
| 114 | Return a cached response if it exists in the cache, otherwise | ||
| 115 | return False. | ||
| 116 | """ | ||
| 117 | cache_url = self.cache_url(request.url) | ||
| 118 | logger.debug('Looking up "%s" in the cache', cache_url) | ||
| 119 | cc = self.parse_cache_control(request.headers) | ||
| 120 | |||
| 121 | # Bail out if the request insists on fresh data | ||
| 122 | if 'no-cache' in cc: | ||
| 123 | logger.debug('Request header has "no-cache", cache bypassed') | ||
| 124 | return False | ||
| 125 | |||
| 126 | if 'max-age' in cc and cc['max-age'] == 0: | ||
| 127 | logger.debug('Request header has "max_age" as 0, cache bypassed') | ||
| 128 | return False | ||
| 129 | |||
| 130 | # Request allows serving from the cache, let's see if we find something | ||
| 131 | cache_data = self.cache.get(cache_url) | ||
| 132 | if cache_data is None: | ||
| 133 | logger.debug('No cache entry available') | ||
| 134 | return False | ||
| 135 | |||
| 136 | # Check whether it can be deserialized | ||
| 137 | resp = self.serializer.loads(request, cache_data) | ||
| 138 | if not resp: | ||
| 139 | logger.warning('Cache entry deserialization failed, entry ignored') | ||
| 140 | return False | ||
| 141 | |||
| 142 | # If we have a cached 301, return it immediately. We don't | ||
| 143 | # need to test our response for other headers b/c it is | ||
| 144 | # intrinsically "cacheable" as it is Permanent. | ||
| 145 | # See: | ||
| 146 | # https://tools.ietf.org/html/rfc7231#section-6.4.2 | ||
| 147 | # | ||
| 148 | # Client can try to refresh the value by repeating the request | ||
| 149 | # with cache busting headers as usual (ie no-cache). | ||
| 150 | if resp.status == 301: | ||
| 151 | msg = ('Returning cached "301 Moved Permanently" response ' | ||
| 152 | '(ignoring date and etag information)') | ||
| 153 | logger.debug(msg) | ||
| 154 | return resp | ||
| 155 | |||
| 156 | headers = CaseInsensitiveDict(resp.headers) | ||
| 157 | if not headers or 'date' not in headers: | ||
| 158 | if 'etag' not in headers: | ||
| 159 | # Without date or etag, the cached response can never be used | ||
| 160 | # and should be deleted. | ||
| 161 | logger.debug('Purging cached response: no date or etag') | ||
| 162 | self.cache.delete(cache_url) | ||
| 163 | logger.debug('Ignoring cached response: no date') | ||
| 164 | return False | ||
| 165 | |||
| 166 | now = time.time() | ||
| 167 | date = calendar.timegm( | ||
| 168 | parsedate_tz(headers['date']) | ||
| 169 | ) | ||
| 170 | current_age = max(0, now - date) | ||
| 171 | logger.debug('Current age based on date: %i', current_age) | ||
| 172 | |||
| 173 | # TODO: There is an assumption that the result will be a | ||
| 174 | # urllib3 response object. This may not be best since we | ||
| 175 | # could probably avoid instantiating or constructing the | ||
| 176 | # response until we know we need it. | ||
| 177 | resp_cc = self.parse_cache_control(headers) | ||
| 178 | |||
| 179 | # determine freshness | ||
| 180 | freshness_lifetime = 0 | ||
| 181 | |||
| 182 | # Check the max-age pragma in the cache control header | ||
| 183 | if 'max-age' in resp_cc: | ||
| 184 | freshness_lifetime = resp_cc['max-age'] | ||
| 185 | logger.debug('Freshness lifetime from max-age: %i', | ||
| 186 | freshness_lifetime) | ||
| 187 | |||
| 188 | # If there isn't a max-age, check for an expires header | ||
| 189 | elif 'expires' in headers: | ||
| 190 | expires = parsedate_tz(headers['expires']) | ||
| 191 | if expires is not None: | ||
| 192 | expire_time = calendar.timegm(expires) - date | ||
| 193 | freshness_lifetime = max(0, expire_time) | ||
| 194 | logger.debug("Freshness lifetime from expires: %i", | ||
| 195 | freshness_lifetime) | ||
| 196 | |||
| 197 | # Determine if we are setting freshness limit in the | ||
| 198 | # request. Note, this overrides what was in the response. | ||
| 199 | if 'max-age' in cc: | ||
| 200 | freshness_lifetime = cc['max-age'] | ||
| 201 | logger.debug('Freshness lifetime from request max-age: %i', | ||
| 202 | freshness_lifetime) | ||
| 203 | |||
| 204 | if 'min-fresh' in cc: | ||
| 205 | min_fresh = cc['min-fresh'] | ||
| 206 | # adjust our current age by our min fresh | ||
| 207 | current_age += min_fresh | ||
| 208 | logger.debug('Adjusted current age from min-fresh: %i', | ||
| 209 | current_age) | ||
| 210 | |||
| 211 | # Return entry if it is fresh enough | ||
| 212 | if freshness_lifetime > current_age: | ||
| 213 | logger.debug('The response is "fresh", returning cached response') | ||
| 214 | logger.debug('%i > %i', freshness_lifetime, current_age) | ||
| 215 | return resp | ||
| 216 | |||
| 217 | # we're not fresh. If we don't have an Etag, clear it out | ||
| 218 | if 'etag' not in headers: | ||
| 219 | logger.debug( | ||
| 220 | 'The cached response is "stale" with no etag, purging' | ||
| 221 | ) | ||
| 222 | self.cache.delete(cache_url) | ||
| 223 | |||
| 224 | # return the original handler | ||
| 225 | return False | ||
| 226 | |||
| 227 | def conditional_headers(self, request): | ||
| 228 | cache_url = self.cache_url(request.url) | ||
| 229 | resp = self.serializer.loads(request, self.cache.get(cache_url)) | ||
| 230 | new_headers = {} | ||
| 231 | |||
| 232 | if resp: | ||
| 233 | headers = CaseInsensitiveDict(resp.headers) | ||
| 234 | |||
| 235 | if 'etag' in headers: | ||
| 236 | new_headers['If-None-Match'] = headers['ETag'] | ||
| 237 | |||
| 238 | if 'last-modified' in headers: | ||
| 239 | new_headers['If-Modified-Since'] = headers['Last-Modified'] | ||
| 240 | |||
| 241 | return new_headers | ||
| 242 | |||
| 243 | def cache_response(self, request, response, body=None, | ||
| 244 | status_codes=None): | ||
| 245 | """ | ||
| 246 | Algorithm for caching requests. | ||
| 247 | |||
| 248 | This assumes a requests Response object. | ||
| 249 | """ | ||
| 250 | # From httplib2: Don't cache 206's since we aren't going to | ||
| 251 | # handle byte range requests | ||
| 252 | cacheable_status_codes = status_codes or self.cacheable_status_codes | ||
| 253 | if response.status not in cacheable_status_codes: | ||
| 254 | logger.debug( | ||
| 255 | 'Status code %s not in %s', | ||
| 256 | response.status, | ||
| 257 | cacheable_status_codes | ||
| 258 | ) | ||
| 259 | return | ||
| 260 | |||
| 261 | response_headers = CaseInsensitiveDict(response.headers) | ||
| 262 | |||
| 263 | # If we've been given a body, our response has a Content-Length, that | ||
| 264 | # Content-Length is valid then we can check to see if the body we've | ||
| 265 | # been given matches the expected size, and if it doesn't we'll just | ||
| 266 | # skip trying to cache it. | ||
| 267 | if (body is not None and | ||
| 268 | "content-length" in response_headers and | ||
| 269 | response_headers["content-length"].isdigit() and | ||
| 270 | int(response_headers["content-length"]) != len(body)): | ||
| 271 | return | ||
| 272 | |||
| 273 | cc_req = self.parse_cache_control(request.headers) | ||
| 274 | cc = self.parse_cache_control(response_headers) | ||
| 275 | |||
| 276 | cache_url = self.cache_url(request.url) | ||
| 277 | logger.debug('Updating cache with response from "%s"', cache_url) | ||
| 278 | |||
| 279 | # Delete it from the cache if we happen to have it stored there | ||
| 280 | no_store = False | ||
| 281 | if 'no-store' in cc: | ||
| 282 | no_store = True | ||
| 283 | logger.debug('Response header has "no-store"') | ||
| 284 | if 'no-store' in cc_req: | ||
| 285 | no_store = True | ||
| 286 | logger.debug('Request header has "no-store"') | ||
| 287 | if no_store and self.cache.get(cache_url): | ||
| 288 | logger.debug('Purging existing cache entry to honor "no-store"') | ||
| 289 | self.cache.delete(cache_url) | ||
| 290 | |||
| 291 | # If we've been given an etag, then keep the response | ||
| 292 | if self.cache_etags and 'etag' in response_headers: | ||
| 293 | logger.debug('Caching due to etag') | ||
| 294 | self.cache.set( | ||
| 295 | cache_url, | ||
| 296 | self.serializer.dumps(request, response, body=body), | ||
| 297 | ) | ||
| 298 | |||
| 299 | # Add to the cache any 301s. We do this before looking that | ||
| 300 | # the Date headers. | ||
| 301 | elif response.status == 301: | ||
| 302 | logger.debug('Caching permanant redirect') | ||
| 303 | self.cache.set( | ||
| 304 | cache_url, | ||
| 305 | self.serializer.dumps(request, response) | ||
| 306 | ) | ||
| 307 | |||
| 308 | # Add to the cache if the response headers demand it. If there | ||
| 309 | # is no date header then we can't do anything about expiring | ||
| 310 | # the cache. | ||
| 311 | elif 'date' in response_headers: | ||
| 312 | # cache when there is a max-age > 0 | ||
| 313 | if 'max-age' in cc and cc['max-age'] > 0: | ||
| 314 | logger.debug('Caching b/c date exists and max-age > 0') | ||
| 315 | self.cache.set( | ||
| 316 | cache_url, | ||
| 317 | self.serializer.dumps(request, response, body=body), | ||
| 318 | ) | ||
| 319 | |||
| 320 | # If the request can expire, it means we should cache it | ||
| 321 | # in the meantime. | ||
| 322 | elif 'expires' in response_headers: | ||
| 323 | if response_headers['expires']: | ||
| 324 | logger.debug('Caching b/c of expires header') | ||
| 325 | self.cache.set( | ||
| 326 | cache_url, | ||
| 327 | self.serializer.dumps(request, response, body=body), | ||
| 328 | ) | ||
| 329 | |||
| 330 | def update_cached_response(self, request, response): | ||
| 331 | """On a 304 we will get a new set of headers that we want to | ||
| 332 | update our cached value with, assuming we have one. | ||
| 333 | |||
| 334 | This should only ever be called when we've sent an ETag and | ||
| 335 | gotten a 304 as the response. | ||
| 336 | """ | ||
| 337 | cache_url = self.cache_url(request.url) | ||
| 338 | |||
| 339 | cached_response = self.serializer.loads( | ||
| 340 | request, | ||
| 341 | self.cache.get(cache_url) | ||
| 342 | ) | ||
| 343 | |||
| 344 | if not cached_response: | ||
| 345 | # we didn't have a cached response | ||
| 346 | return response | ||
| 347 | |||
| 348 | # Lets update our headers with the headers from the new request: | ||
| 349 | # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 | ||
| 350 | # | ||
| 351 | # The server isn't supposed to send headers that would make | ||
| 352 | # the cached body invalid. But... just in case, we'll be sure | ||
| 353 | # to strip out ones we know that might be problmatic due to | ||
| 354 | # typical assumptions. | ||
| 355 | excluded_headers = [ | ||
| 356 | "content-length", | ||
| 357 | ] | ||
| 358 | |||
| 359 | cached_response.headers.update( | ||
| 360 | dict((k, v) for k, v in response.headers.items() | ||
| 361 | if k.lower() not in excluded_headers) | ||
| 362 | ) | ||
| 363 | |||
| 364 | # we want a 200 b/c we have content via the cache | ||
| 365 | cached_response.status = 200 | ||
| 366 | |||
| 367 | # update our cache | ||
| 368 | self.cache.set( | ||
| 369 | cache_url, | ||
| 370 | self.serializer.dumps(request, cached_response), | ||
| 371 | ) | ||
| 372 | |||
| 373 | return cached_response | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..83ce912 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py | |||
| @@ -0,0 +1,78 @@ | |||
| 1 | from io import BytesIO | ||
| 2 | |||
| 3 | |||
| 4 | class CallbackFileWrapper(object): | ||
| 5 | """ | ||
| 6 | Small wrapper around a fp object which will tee everything read into a | ||
| 7 | buffer, and when that file is closed it will execute a callback with the | ||
| 8 | contents of that buffer. | ||
| 9 | |||
| 10 | All attributes are proxied to the underlying file object. | ||
| 11 | |||
| 12 | This class uses members with a double underscore (__) leading prefix so as | ||
| 13 | not to accidentally shadow an attribute. | ||
| 14 | """ | ||
| 15 | |||
| 16 | def __init__(self, fp, callback): | ||
| 17 | self.__buf = BytesIO() | ||
| 18 | self.__fp = fp | ||
| 19 | self.__callback = callback | ||
| 20 | |||
| 21 | def __getattr__(self, name): | ||
| 22 | # The vaguaries of garbage collection means that self.__fp is | ||
| 23 | # not always set. By using __getattribute__ and the private | ||
| 24 | # name[0] allows looking up the attribute value and raising an | ||
| 25 | # AttributeError when it doesn't exist. This stop thigns from | ||
| 26 | # infinitely recursing calls to getattr in the case where | ||
| 27 | # self.__fp hasn't been set. | ||
| 28 | # | ||
| 29 | # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers | ||
| 30 | fp = self.__getattribute__('_CallbackFileWrapper__fp') | ||
| 31 | return getattr(fp, name) | ||
| 32 | |||
| 33 | def __is_fp_closed(self): | ||
| 34 | try: | ||
| 35 | return self.__fp.fp is None | ||
| 36 | except AttributeError: | ||
| 37 | pass | ||
| 38 | |||
| 39 | try: | ||
| 40 | return self.__fp.closed | ||
| 41 | except AttributeError: | ||
| 42 | pass | ||
| 43 | |||
| 44 | # We just don't cache it then. | ||
| 45 | # TODO: Add some logging here... | ||
| 46 | return False | ||
| 47 | |||
| 48 | def _close(self): | ||
| 49 | if self.__callback: | ||
| 50 | self.__callback(self.__buf.getvalue()) | ||
| 51 | |||
| 52 | # We assign this to None here, because otherwise we can get into | ||
| 53 | # really tricky problems where the CPython interpreter dead locks | ||
| 54 | # because the callback is holding a reference to something which | ||
| 55 | # has a __del__ method. Setting this to None breaks the cycle | ||
| 56 | # and allows the garbage collector to do it's thing normally. | ||
| 57 | self.__callback = None | ||
| 58 | |||
| 59 | def read(self, amt=None): | ||
| 60 | data = self.__fp.read(amt) | ||
| 61 | self.__buf.write(data) | ||
| 62 | if self.__is_fp_closed(): | ||
| 63 | self._close() | ||
| 64 | |||
| 65 | return data | ||
| 66 | |||
| 67 | def _safe_read(self, amt): | ||
| 68 | data = self.__fp._safe_read(amt) | ||
| 69 | if amt == 2 and data == b'\r\n': | ||
| 70 | # urllib executes this read to toss the CRLF at the end | ||
| 71 | # of the chunk. | ||
| 72 | return data | ||
| 73 | |||
| 74 | self.__buf.write(data) | ||
| 75 | if self.__is_fp_closed(): | ||
| 76 | self._close() | ||
| 77 | |||
| 78 | return data | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..aad333d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py | |||
| @@ -0,0 +1,138 @@ | |||
| 1 | import calendar | ||
| 2 | import time | ||
| 3 | |||
| 4 | from email.utils import formatdate, parsedate, parsedate_tz | ||
| 5 | |||
| 6 | from datetime import datetime, timedelta | ||
| 7 | |||
| 8 | TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" | ||
| 9 | |||
| 10 | |||
| 11 | def expire_after(delta, date=None): | ||
| 12 | date = date or datetime.utcnow() | ||
| 13 | return date + delta | ||
| 14 | |||
| 15 | |||
| 16 | def datetime_to_header(dt): | ||
| 17 | return formatdate(calendar.timegm(dt.timetuple())) | ||
| 18 | |||
| 19 | |||
| 20 | class BaseHeuristic(object): | ||
| 21 | |||
| 22 | def warning(self, response): | ||
| 23 | """ | ||
| 24 | Return a valid 1xx warning header value describing the cache | ||
| 25 | adjustments. | ||
| 26 | |||
| 27 | The response is provided too allow warnings like 113 | ||
| 28 | http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need | ||
| 29 | to explicitly say response is over 24 hours old. | ||
| 30 | """ | ||
| 31 | return '110 - "Response is Stale"' | ||
| 32 | |||
| 33 | def update_headers(self, response): | ||
| 34 | """Update the response headers with any new headers. | ||
| 35 | |||
| 36 | NOTE: This SHOULD always include some Warning header to | ||
| 37 | signify that the response was cached by the client, not | ||
| 38 | by way of the provided headers. | ||
| 39 | """ | ||
| 40 | return {} | ||
| 41 | |||
| 42 | def apply(self, response): | ||
| 43 | updated_headers = self.update_headers(response) | ||
| 44 | |||
| 45 | if updated_headers: | ||
| 46 | response.headers.update(updated_headers) | ||
| 47 | warning_header_value = self.warning(response) | ||
| 48 | if warning_header_value is not None: | ||
| 49 | response.headers.update({'Warning': warning_header_value}) | ||
| 50 | |||
| 51 | return response | ||
| 52 | |||
| 53 | |||
| 54 | class OneDayCache(BaseHeuristic): | ||
| 55 | """ | ||
| 56 | Cache the response by providing an expires 1 day in the | ||
| 57 | future. | ||
| 58 | """ | ||
| 59 | def update_headers(self, response): | ||
| 60 | headers = {} | ||
| 61 | |||
| 62 | if 'expires' not in response.headers: | ||
| 63 | date = parsedate(response.headers['date']) | ||
| 64 | expires = expire_after(timedelta(days=1), | ||
| 65 | date=datetime(*date[:6])) | ||
| 66 | headers['expires'] = datetime_to_header(expires) | ||
| 67 | headers['cache-control'] = 'public' | ||
| 68 | return headers | ||
| 69 | |||
| 70 | |||
| 71 | class ExpiresAfter(BaseHeuristic): | ||
| 72 | """ | ||
| 73 | Cache **all** requests for a defined time period. | ||
| 74 | """ | ||
| 75 | |||
| 76 | def __init__(self, **kw): | ||
| 77 | self.delta = timedelta(**kw) | ||
| 78 | |||
| 79 | def update_headers(self, response): | ||
| 80 | expires = expire_after(self.delta) | ||
| 81 | return { | ||
| 82 | 'expires': datetime_to_header(expires), | ||
| 83 | 'cache-control': 'public', | ||
| 84 | } | ||
| 85 | |||
| 86 | def warning(self, response): | ||
| 87 | tmpl = '110 - Automatically cached for %s. Response might be stale' | ||
| 88 | return tmpl % self.delta | ||
| 89 | |||
| 90 | |||
| 91 | class LastModified(BaseHeuristic): | ||
| 92 | """ | ||
| 93 | If there is no Expires header already, fall back on Last-Modified | ||
| 94 | using the heuristic from | ||
| 95 | http://tools.ietf.org/html/rfc7234#section-4.2.2 | ||
| 96 | to calculate a reasonable value. | ||
| 97 | |||
| 98 | Firefox also does something like this per | ||
| 99 | https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ | ||
| 100 | http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 | ||
| 101 | Unlike mozilla we limit this to 24-hr. | ||
| 102 | """ | ||
| 103 | cacheable_by_default_statuses = set([ | ||
| 104 | 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 | ||
| 105 | ]) | ||
| 106 | |||
| 107 | def update_headers(self, resp): | ||
| 108 | headers = resp.headers | ||
| 109 | |||
| 110 | if 'expires' in headers: | ||
| 111 | return {} | ||
| 112 | |||
| 113 | if 'cache-control' in headers and headers['cache-control'] != 'public': | ||
| 114 | return {} | ||
| 115 | |||
| 116 | if resp.status not in self.cacheable_by_default_statuses: | ||
| 117 | return {} | ||
| 118 | |||
| 119 | if 'date' not in headers or 'last-modified' not in headers: | ||
| 120 | return {} | ||
| 121 | |||
| 122 | date = calendar.timegm(parsedate_tz(headers['date'])) | ||
| 123 | last_modified = parsedate(headers['last-modified']) | ||
| 124 | if date is None or last_modified is None: | ||
| 125 | return {} | ||
| 126 | |||
| 127 | now = time.time() | ||
| 128 | current_age = max(0, now - date) | ||
| 129 | delta = date - calendar.timegm(last_modified) | ||
| 130 | freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) | ||
| 131 | if freshness_lifetime <= current_age: | ||
| 132 | return {} | ||
| 133 | |||
| 134 | expires = date + freshness_lifetime | ||
| 135 | return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} | ||
| 136 | |||
| 137 | def warning(self, resp): | ||
| 138 | return None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..cd21cae --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py | |||
| @@ -0,0 +1,194 @@ | |||
| 1 | import base64 | ||
| 2 | import io | ||
| 3 | import json | ||
| 4 | import zlib | ||
| 5 | |||
| 6 | from pip._vendor import msgpack | ||
| 7 | from pip._vendor.requests.structures import CaseInsensitiveDict | ||
| 8 | |||
| 9 | from .compat import HTTPResponse, pickle, text_type | ||
| 10 | |||
| 11 | |||
| 12 | def _b64_decode_bytes(b): | ||
| 13 | return base64.b64decode(b.encode("ascii")) | ||
| 14 | |||
| 15 | |||
| 16 | def _b64_decode_str(s): | ||
| 17 | return _b64_decode_bytes(s).decode("utf8") | ||
| 18 | |||
| 19 | |||
| 20 | class Serializer(object): | ||
| 21 | |||
| 22 | def dumps(self, request, response, body=None): | ||
| 23 | response_headers = CaseInsensitiveDict(response.headers) | ||
| 24 | |||
| 25 | if body is None: | ||
| 26 | body = response.read(decode_content=False) | ||
| 27 | |||
| 28 | # NOTE: 99% sure this is dead code. I'm only leaving it | ||
| 29 | # here b/c I don't have a test yet to prove | ||
| 30 | # it. Basically, before using | ||
| 31 | # `cachecontrol.filewrapper.CallbackFileWrapper`, | ||
| 32 | # this made an effort to reset the file handle. The | ||
| 33 | # `CallbackFileWrapper` short circuits this code by | ||
| 34 | # setting the body as the content is consumed, the | ||
| 35 | # result being a `body` argument is *always* passed | ||
| 36 | # into cache_response, and in turn, | ||
| 37 | # `Serializer.dump`. | ||
| 38 | response._fp = io.BytesIO(body) | ||
| 39 | |||
| 40 | # NOTE: This is all a bit weird, but it's really important that on | ||
| 41 | # Python 2.x these objects are unicode and not str, even when | ||
| 42 | # they contain only ascii. The problem here is that msgpack | ||
| 43 | # understands the difference between unicode and bytes and we | ||
| 44 | # have it set to differentiate between them, however Python 2 | ||
| 45 | # doesn't know the difference. Forcing these to unicode will be | ||
| 46 | # enough to have msgpack know the difference. | ||
| 47 | data = { | ||
| 48 | u"response": { | ||
| 49 | u"body": body, | ||
| 50 | u"headers": dict( | ||
| 51 | (text_type(k), text_type(v)) | ||
| 52 | for k, v in response.headers.items() | ||
| 53 | ), | ||
| 54 | u"status": response.status, | ||
| 55 | u"version": response.version, | ||
| 56 | u"reason": text_type(response.reason), | ||
| 57 | u"strict": response.strict, | ||
| 58 | u"decode_content": response.decode_content, | ||
| 59 | }, | ||
| 60 | } | ||
| 61 | |||
| 62 | # Construct our vary headers | ||
| 63 | data[u"vary"] = {} | ||
| 64 | if u"vary" in response_headers: | ||
| 65 | varied_headers = response_headers[u'vary'].split(',') | ||
| 66 | for header in varied_headers: | ||
| 67 | header = header.strip() | ||
| 68 | header_value = request.headers.get(header, None) | ||
| 69 | if header_value is not None: | ||
| 70 | header_value = text_type(header_value) | ||
| 71 | data[u"vary"][header] = header_value | ||
| 72 | |||
| 73 | return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) | ||
| 74 | |||
| 75 | def loads(self, request, data): | ||
| 76 | # Short circuit if we've been given an empty set of data | ||
| 77 | if not data: | ||
| 78 | return | ||
| 79 | |||
| 80 | # Determine what version of the serializer the data was serialized | ||
| 81 | # with | ||
| 82 | try: | ||
| 83 | ver, data = data.split(b",", 1) | ||
| 84 | except ValueError: | ||
| 85 | ver = b"cc=0" | ||
| 86 | |||
| 87 | # Make sure that our "ver" is actually a version and isn't a false | ||
| 88 | # positive from a , being in the data stream. | ||
| 89 | if ver[:3] != b"cc=": | ||
| 90 | data = ver + data | ||
| 91 | ver = b"cc=0" | ||
| 92 | |||
| 93 | # Get the version number out of the cc=N | ||
| 94 | ver = ver.split(b"=", 1)[-1].decode("ascii") | ||
| 95 | |||
| 96 | # Dispatch to the actual load method for the given version | ||
| 97 | try: | ||
| 98 | return getattr(self, "_loads_v{0}".format(ver))(request, data) | ||
| 99 | except AttributeError: | ||
| 100 | # This is a version we don't have a loads function for, so we'll | ||
| 101 | # just treat it as a miss and return None | ||
| 102 | return | ||
| 103 | |||
| 104 | def prepare_response(self, request, cached): | ||
| 105 | """Verify our vary headers match and construct a real urllib3 | ||
| 106 | HTTPResponse object. | ||
| 107 | """ | ||
| 108 | # Special case the '*' Vary value as it means we cannot actually | ||
| 109 | # determine if the cached response is suitable for this request. | ||
| 110 | if "*" in cached.get("vary", {}): | ||
| 111 | return | ||
| 112 | |||
| 113 | # Ensure that the Vary headers for the cached response match our | ||
| 114 | # request | ||
| 115 | for header, value in cached.get("vary", {}).items(): | ||
| 116 | if request.headers.get(header, None) != value: | ||
| 117 | return | ||
| 118 | |||
| 119 | body_raw = cached["response"].pop("body") | ||
| 120 | |||
| 121 | headers = CaseInsensitiveDict(data=cached['response']['headers']) | ||
| 122 | if headers.get('transfer-encoding', '') == 'chunked': | ||
| 123 | headers.pop('transfer-encoding') | ||
| 124 | |||
| 125 | cached['response']['headers'] = headers | ||
| 126 | |||
| 127 | try: | ||
| 128 | body = io.BytesIO(body_raw) | ||
| 129 | except TypeError: | ||
| 130 | # This can happen if cachecontrol serialized to v1 format (pickle) | ||
| 131 | # using Python 2. A Python 2 str(byte string) will be unpickled as | ||
| 132 | # a Python 3 str (unicode string), which will cause the above to | ||
| 133 | # fail with: | ||
| 134 | # | ||
| 135 | # TypeError: 'str' does not support the buffer interface | ||
| 136 | body = io.BytesIO(body_raw.encode('utf8')) | ||
| 137 | |||
| 138 | return HTTPResponse( | ||
| 139 | body=body, | ||
| 140 | preload_content=False, | ||
| 141 | **cached["response"] | ||
| 142 | ) | ||
| 143 | |||
| 144 | def _loads_v0(self, request, data): | ||
| 145 | # The original legacy cache data. This doesn't contain enough | ||
| 146 | # information to construct everything we need, so we'll treat this as | ||
| 147 | # a miss. | ||
| 148 | return | ||
| 149 | |||
| 150 | def _loads_v1(self, request, data): | ||
| 151 | try: | ||
| 152 | cached = pickle.loads(data) | ||
| 153 | except ValueError: | ||
| 154 | return | ||
| 155 | |||
| 156 | return self.prepare_response(request, cached) | ||
| 157 | |||
| 158 | def _loads_v2(self, request, data): | ||
| 159 | try: | ||
| 160 | cached = json.loads(zlib.decompress(data).decode("utf8")) | ||
| 161 | except (ValueError, zlib.error): | ||
| 162 | return | ||
| 163 | |||
| 164 | # We need to decode the items that we've base64 encoded | ||
| 165 | cached["response"]["body"] = _b64_decode_bytes( | ||
| 166 | cached["response"]["body"] | ||
| 167 | ) | ||
| 168 | cached["response"]["headers"] = dict( | ||
| 169 | (_b64_decode_str(k), _b64_decode_str(v)) | ||
| 170 | for k, v in cached["response"]["headers"].items() | ||
| 171 | ) | ||
| 172 | cached["response"]["reason"] = _b64_decode_str( | ||
| 173 | cached["response"]["reason"], | ||
| 174 | ) | ||
| 175 | cached["vary"] = dict( | ||
| 176 | (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) | ||
| 177 | for k, v in cached["vary"].items() | ||
| 178 | ) | ||
| 179 | |||
| 180 | return self.prepare_response(request, cached) | ||
| 181 | |||
| 182 | def _loads_v3(self, request, data): | ||
| 183 | # Due to Python 2 encoding issues, it's impossible to know for sure | ||
| 184 | # exactly how to load v3 entries, thus we'll treat these as a miss so | ||
| 185 | # that they get rewritten out as v4 entries. | ||
| 186 | return | ||
| 187 | |||
| 188 | def _loads_v4(self, request, data): | ||
| 189 | try: | ||
| 190 | cached = msgpack.loads(data, encoding='utf-8') | ||
| 191 | except ValueError: | ||
| 192 | return | ||
| 193 | |||
| 194 | return self.prepare_response(request, cached) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..2ceac99 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py | |||
| @@ -0,0 +1,27 @@ | |||
| 1 | from .adapter import CacheControlAdapter | ||
| 2 | from .cache import DictCache | ||
| 3 | |||
| 4 | |||
| 5 | def CacheControl(sess, | ||
| 6 | cache=None, | ||
| 7 | cache_etags=True, | ||
| 8 | serializer=None, | ||
| 9 | heuristic=None, | ||
| 10 | controller_class=None, | ||
| 11 | adapter_class=None, | ||
| 12 | cacheable_methods=None): | ||
| 13 | |||
| 14 | cache = cache or DictCache() | ||
| 15 | adapter_class = adapter_class or CacheControlAdapter | ||
| 16 | adapter = adapter_class( | ||
| 17 | cache, | ||
| 18 | cache_etags=cache_etags, | ||
| 19 | serializer=serializer, | ||
| 20 | heuristic=heuristic, | ||
| 21 | controller_class=controller_class, | ||
| 22 | cacheable_methods=cacheable_methods | ||
| 23 | ) | ||
| 24 | sess.mount('http://', adapter) | ||
| 25 | sess.mount('https://', adapter) | ||
| 26 | |||
| 27 | return sess | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__init__.py new file mode 100644 index 0000000..3d73ece --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__init__.py | |||
| @@ -0,0 +1,3 @@ | |||
| 1 | from .core import where, old_where | ||
| 2 | |||
| 3 | __version__ = "2018.01.18" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__main__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__main__.py new file mode 100644 index 0000000..e30b50d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/__main__.py | |||
| @@ -0,0 +1,2 @@ | |||
| 1 | from certifi import where | ||
| 2 | print(where()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/cacert.pem b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/cacert.pem new file mode 100644 index 0000000..101ac98 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/cacert.pem | |||
| @@ -0,0 +1,4433 @@ | |||
| 1 | |||
| 2 | # Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA | ||
| 3 | # Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA | ||
| 4 | # Label: "GlobalSign Root CA" | ||
| 5 | # Serial: 4835703278459707669005204 | ||
| 6 | # MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a | ||
| 7 | # SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c | ||
| 8 | # SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 | ||
| 9 | -----BEGIN CERTIFICATE----- | ||
| 10 | MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG | ||
| 11 | A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv | ||
| 12 | b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw | ||
| 13 | MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i | ||
| 14 | YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT | ||
| 15 | aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ | ||
| 16 | jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp | ||
| 17 | xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp | ||
| 18 | 1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG | ||
| 19 | snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ | ||
| 20 | U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 | ||
| 21 | 9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E | ||
| 22 | BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B | ||
| 23 | AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz | ||
| 24 | yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE | ||
| 25 | 38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP | ||
| 26 | AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad | ||
| 27 | DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME | ||
| 28 | HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== | ||
| 29 | -----END CERTIFICATE----- | ||
| 30 | |||
| 31 | # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 | ||
| 32 | # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 | ||
| 33 | # Label: "GlobalSign Root CA - R2" | ||
| 34 | # Serial: 4835703278459682885658125 | ||
| 35 | # MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 | ||
| 36 | # SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe | ||
| 37 | # SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e | ||
| 38 | -----BEGIN CERTIFICATE----- | ||
| 39 | MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G | ||
| 40 | A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp | ||
| 41 | Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 | ||
| 42 | MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG | ||
| 43 | A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI | ||
| 44 | hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL | ||
| 45 | v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 | ||
| 46 | eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq | ||
| 47 | tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd | ||
| 48 | C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa | ||
| 49 | zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB | ||
| 50 | mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH | ||
| 51 | V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n | ||
| 52 | bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG | ||
| 53 | 3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs | ||
| 54 | J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO | ||
| 55 | 291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS | ||
| 56 | ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd | ||
| 57 | AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 | ||
| 58 | TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== | ||
| 59 | -----END CERTIFICATE----- | ||
| 60 | |||
| 61 | # Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only | ||
| 62 | # Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only | ||
| 63 | # Label: "Verisign Class 3 Public Primary Certification Authority - G3" | ||
| 64 | # Serial: 206684696279472310254277870180966723415 | ||
| 65 | # MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 | ||
| 66 | # SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 | ||
| 67 | # SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 | ||
| 68 | -----BEGIN CERTIFICATE----- | ||
| 69 | MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw | ||
| 70 | CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl | ||
| 71 | cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu | ||
| 72 | LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT | ||
| 73 | aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp | ||
| 74 | dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD | ||
| 75 | VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT | ||
| 76 | aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ | ||
| 77 | bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu | ||
| 78 | IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg | ||
| 79 | LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b | ||
| 80 | N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t | ||
| 81 | KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu | ||
| 82 | kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm | ||
| 83 | CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ | ||
| 84 | Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu | ||
| 85 | imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te | ||
| 86 | 2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe | ||
| 87 | DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC | ||
| 88 | /Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p | ||
| 89 | F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt | ||
| 90 | TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== | ||
| 91 | -----END CERTIFICATE----- | ||
| 92 | |||
| 93 | # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited | ||
| 94 | # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited | ||
| 95 | # Label: "Entrust.net Premium 2048 Secure Server CA" | ||
| 96 | # Serial: 946069240 | ||
| 97 | # MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 | ||
| 98 | # SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 | ||
| 99 | # SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 | ||
| 100 | -----BEGIN CERTIFICATE----- | ||
| 101 | MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML | ||
| 102 | RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp | ||
| 103 | bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 | ||
| 104 | IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp | ||
| 105 | ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 | ||
| 106 | MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 | ||
| 107 | LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp | ||
| 108 | YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG | ||
| 109 | A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp | ||
| 110 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq | ||
| 111 | K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe | ||
| 112 | sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX | ||
| 113 | MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT | ||
| 114 | XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ | ||
| 115 | HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH | ||
| 116 | 4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV | ||
| 117 | HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub | ||
| 118 | j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo | ||
| 119 | U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf | ||
| 120 | zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b | ||
| 121 | u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ | ||
| 122 | bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er | ||
| 123 | fF6adulZkMV8gzURZVE= | ||
| 124 | -----END CERTIFICATE----- | ||
| 125 | |||
| 126 | # Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust | ||
| 127 | # Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust | ||
| 128 | # Label: "Baltimore CyberTrust Root" | ||
| 129 | # Serial: 33554617 | ||
| 130 | # MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 | ||
| 131 | # SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 | ||
| 132 | # SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb | ||
| 133 | -----BEGIN CERTIFICATE----- | ||
| 134 | MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ | ||
| 135 | RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD | ||
| 136 | VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX | ||
| 137 | DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y | ||
| 138 | ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy | ||
| 139 | VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr | ||
| 140 | mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr | ||
| 141 | IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK | ||
| 142 | mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu | ||
| 143 | XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy | ||
| 144 | dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye | ||
| 145 | jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 | ||
| 146 | BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 | ||
| 147 | DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 | ||
| 148 | 9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx | ||
| 149 | jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 | ||
| 150 | Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz | ||
| 151 | ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS | ||
| 152 | R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp | ||
| 153 | -----END CERTIFICATE----- | ||
| 154 | |||
| 155 | # Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network | ||
| 156 | # Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network | ||
| 157 | # Label: "AddTrust External Root" | ||
| 158 | # Serial: 1 | ||
| 159 | # MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f | ||
| 160 | # SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 | ||
| 161 | # SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 | ||
| 162 | -----BEGIN CERTIFICATE----- | ||
| 163 | MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU | ||
| 164 | MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs | ||
| 165 | IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 | ||
| 166 | MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux | ||
| 167 | FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h | ||
| 168 | bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v | ||
| 169 | dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt | ||
| 170 | H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 | ||
| 171 | uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX | ||
| 172 | mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX | ||
| 173 | a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN | ||
| 174 | E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 | ||
| 175 | WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD | ||
| 176 | VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 | ||
| 177 | Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU | ||
| 178 | cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx | ||
| 179 | IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN | ||
| 180 | AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH | ||
| 181 | YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 | ||
| 182 | 6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC | ||
| 183 | Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX | ||
| 184 | c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a | ||
| 185 | mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= | ||
| 186 | -----END CERTIFICATE----- | ||
| 187 | |||
| 188 | # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. | ||
| 189 | # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. | ||
| 190 | # Label: "Entrust Root Certification Authority" | ||
| 191 | # Serial: 1164660820 | ||
| 192 | # MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 | ||
| 193 | # SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 | ||
| 194 | # SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c | ||
| 195 | -----BEGIN CERTIFICATE----- | ||
| 196 | MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC | ||
| 197 | VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 | ||
| 198 | Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW | ||
| 199 | KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl | ||
| 200 | cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw | ||
| 201 | NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw | ||
| 202 | NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy | ||
| 203 | ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV | ||
| 204 | BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ | ||
| 205 | KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo | ||
| 206 | Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 | ||
| 207 | 4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 | ||
| 208 | KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI | ||
| 209 | rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi | ||
| 210 | 94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB | ||
| 211 | sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi | ||
| 212 | gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo | ||
| 213 | kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE | ||
| 214 | vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA | ||
| 215 | A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t | ||
| 216 | O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua | ||
| 217 | AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP | ||
| 218 | 9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ | ||
| 219 | eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m | ||
| 220 | 0vdXcDazv/wor3ElhVsT/h5/WrQ8 | ||
| 221 | -----END CERTIFICATE----- | ||
| 222 | |||
| 223 | # Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. | ||
| 224 | # Subject: CN=GeoTrust Global CA O=GeoTrust Inc. | ||
| 225 | # Label: "GeoTrust Global CA" | ||
| 226 | # Serial: 144470 | ||
| 227 | # MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 | ||
| 228 | # SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 | ||
| 229 | # SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a | ||
| 230 | -----BEGIN CERTIFICATE----- | ||
| 231 | MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT | ||
| 232 | MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i | ||
| 233 | YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG | ||
| 234 | EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg | ||
| 235 | R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 | ||
| 236 | 9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq | ||
| 237 | fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv | ||
| 238 | iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU | ||
| 239 | 1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ | ||
| 240 | bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW | ||
| 241 | MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA | ||
| 242 | ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l | ||
| 243 | uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn | ||
| 244 | Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS | ||
| 245 | tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF | ||
| 246 | PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un | ||
| 247 | hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV | ||
| 248 | 5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== | ||
| 249 | -----END CERTIFICATE----- | ||
| 250 | |||
| 251 | # Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. | ||
| 252 | # Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. | ||
| 253 | # Label: "GeoTrust Universal CA" | ||
| 254 | # Serial: 1 | ||
| 255 | # MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 | ||
| 256 | # SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 | ||
| 257 | # SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 | ||
| 258 | -----BEGIN CERTIFICATE----- | ||
| 259 | MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW | ||
| 260 | MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy | ||
| 261 | c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE | ||
| 262 | BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 | ||
| 263 | IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV | ||
| 264 | VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 | ||
| 265 | cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT | ||
| 266 | QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh | ||
| 267 | F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v | ||
| 268 | c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w | ||
| 269 | mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd | ||
| 270 | VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX | ||
| 271 | teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ | ||
| 272 | f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe | ||
| 273 | Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ | ||
| 274 | nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB | ||
| 275 | /wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY | ||
| 276 | MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG | ||
| 277 | 9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc | ||
| 278 | aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX | ||
| 279 | IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn | ||
| 280 | ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z | ||
| 281 | uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN | ||
| 282 | Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja | ||
| 283 | QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW | ||
| 284 | koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 | ||
| 285 | ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt | ||
| 286 | DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm | ||
| 287 | bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= | ||
| 288 | -----END CERTIFICATE----- | ||
| 289 | |||
| 290 | # Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. | ||
| 291 | # Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. | ||
| 292 | # Label: "GeoTrust Universal CA 2" | ||
| 293 | # Serial: 1 | ||
| 294 | # MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 | ||
| 295 | # SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 | ||
| 296 | # SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b | ||
| 297 | -----BEGIN CERTIFICATE----- | ||
| 298 | MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW | ||
| 299 | MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy | ||
| 300 | c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD | ||
| 301 | VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 | ||
| 302 | c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC | ||
| 303 | AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 | ||
| 304 | WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG | ||
| 305 | FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq | ||
| 306 | XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL | ||
| 307 | se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb | ||
| 308 | KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd | ||
| 309 | IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 | ||
| 310 | y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt | ||
| 311 | hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc | ||
| 312 | QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 | ||
| 313 | Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV | ||
| 314 | HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV | ||
| 315 | HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ | ||
| 316 | KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z | ||
| 317 | dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ | ||
| 318 | L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr | ||
| 319 | Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo | ||
| 320 | ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY | ||
| 321 | T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz | ||
| 322 | GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m | ||
| 323 | 1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV | ||
| 324 | OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH | ||
| 325 | 6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX | ||
| 326 | QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS | ||
| 327 | -----END CERTIFICATE----- | ||
| 328 | |||
| 329 | # Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association | ||
| 330 | # Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association | ||
| 331 | # Label: "Visa eCommerce Root" | ||
| 332 | # Serial: 25952180776285836048024890241505565794 | ||
| 333 | # MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 | ||
| 334 | # SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 | ||
| 335 | # SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 | ||
| 336 | -----BEGIN CERTIFICATE----- | ||
| 337 | MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr | ||
| 338 | MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl | ||
| 339 | cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv | ||
| 340 | bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw | ||
| 341 | CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h | ||
| 342 | dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l | ||
| 343 | cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h | ||
| 344 | 2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E | ||
| 345 | lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV | ||
| 346 | ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq | ||
| 347 | 299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t | ||
| 348 | vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL | ||
| 349 | dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD | ||
| 350 | AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF | ||
| 351 | AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR | ||
| 352 | zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 | ||
| 353 | LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd | ||
| 354 | 7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw | ||
| 355 | ++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt | ||
| 356 | 398znM/jra6O1I7mT1GvFpLgXPYHDw== | ||
| 357 | -----END CERTIFICATE----- | ||
| 358 | |||
| 359 | # Issuer: CN=AAA Certificate Services O=Comodo CA Limited | ||
| 360 | # Subject: CN=AAA Certificate Services O=Comodo CA Limited | ||
| 361 | # Label: "Comodo AAA Services root" | ||
| 362 | # Serial: 1 | ||
| 363 | # MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 | ||
| 364 | # SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 | ||
| 365 | # SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 | ||
| 366 | -----BEGIN CERTIFICATE----- | ||
| 367 | MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb | ||
| 368 | MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow | ||
| 369 | GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj | ||
| 370 | YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL | ||
| 371 | MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE | ||
| 372 | BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM | ||
| 373 | GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP | ||
| 374 | ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua | ||
| 375 | BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe | ||
| 376 | 3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 | ||
| 377 | YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR | ||
| 378 | rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm | ||
| 379 | ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU | ||
| 380 | oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF | ||
| 381 | MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v | ||
| 382 | QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t | ||
| 383 | b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF | ||
| 384 | AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q | ||
| 385 | GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz | ||
| 386 | Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 | ||
| 387 | G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi | ||
| 388 | l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 | ||
| 389 | smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== | ||
| 390 | -----END CERTIFICATE----- | ||
| 391 | |||
| 392 | # Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority | ||
| 393 | # Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority | ||
| 394 | # Label: "QuoVadis Root CA" | ||
| 395 | # Serial: 985026699 | ||
| 396 | # MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 | ||
| 397 | # SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 | ||
| 398 | # SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 | ||
| 399 | -----BEGIN CERTIFICATE----- | ||
| 400 | MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC | ||
| 401 | TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 | ||
| 402 | aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 | ||
| 403 | aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz | ||
| 404 | MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw | ||
| 405 | IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR | ||
| 406 | dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG | ||
| 407 | 9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp | ||
| 408 | li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D | ||
| 409 | rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ | ||
| 410 | WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug | ||
| 411 | F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU | ||
| 412 | xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC | ||
| 413 | Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv | ||
| 414 | dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw | ||
| 415 | ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl | ||
| 416 | IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh | ||
| 417 | c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy | ||
| 418 | ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh | ||
| 419 | Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI | ||
| 420 | KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T | ||
| 421 | KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq | ||
| 422 | y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p | ||
| 423 | dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD | ||
| 424 | VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL | ||
| 425 | MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk | ||
| 426 | fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 | ||
| 427 | 7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R | ||
| 428 | cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y | ||
| 429 | mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW | ||
| 430 | xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK | ||
| 431 | SnQ2+Q== | ||
| 432 | -----END CERTIFICATE----- | ||
| 433 | |||
| 434 | # Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited | ||
| 435 | # Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited | ||
| 436 | # Label: "QuoVadis Root CA 2" | ||
| 437 | # Serial: 1289 | ||
| 438 | # MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b | ||
| 439 | # SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 | ||
| 440 | # SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 | ||
| 441 | -----BEGIN CERTIFICATE----- | ||
| 442 | MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x | ||
| 443 | GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv | ||
| 444 | b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV | ||
| 445 | BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W | ||
| 446 | YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa | ||
| 447 | GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg | ||
| 448 | Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J | ||
| 449 | WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB | ||
| 450 | rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp | ||
| 451 | +ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 | ||
| 452 | ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i | ||
| 453 | Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz | ||
| 454 | PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og | ||
| 455 | /zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH | ||
| 456 | oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI | ||
| 457 | yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud | ||
| 458 | EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 | ||
| 459 | A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL | ||
| 460 | MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT | ||
| 461 | ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f | ||
| 462 | BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn | ||
| 463 | g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl | ||
| 464 | fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K | ||
| 465 | WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha | ||
| 466 | B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc | ||
| 467 | hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR | ||
| 468 | TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD | ||
| 469 | mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z | ||
| 470 | ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y | ||
| 471 | 4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza | ||
| 472 | 8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u | ||
| 473 | -----END CERTIFICATE----- | ||
| 474 | |||
| 475 | # Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited | ||
| 476 | # Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited | ||
| 477 | # Label: "QuoVadis Root CA 3" | ||
| 478 | # Serial: 1478 | ||
| 479 | # MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf | ||
| 480 | # SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 | ||
| 481 | # SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 | ||
| 482 | -----BEGIN CERTIFICATE----- | ||
| 483 | MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x | ||
| 484 | GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv | ||
| 485 | b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV | ||
| 486 | BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W | ||
| 487 | YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM | ||
| 488 | V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB | ||
| 489 | 4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr | ||
| 490 | H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd | ||
| 491 | 8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv | ||
| 492 | vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT | ||
| 493 | mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe | ||
| 494 | btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc | ||
| 495 | T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt | ||
| 496 | WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ | ||
| 497 | c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A | ||
| 498 | 4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD | ||
| 499 | VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG | ||
| 500 | CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 | ||
| 501 | aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 | ||
| 502 | aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu | ||
| 503 | dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw | ||
| 504 | czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G | ||
| 505 | A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC | ||
| 506 | TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg | ||
| 507 | Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 | ||
| 508 | 7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem | ||
| 509 | d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd | ||
| 510 | +LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B | ||
| 511 | 4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN | ||
| 512 | t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x | ||
| 513 | DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 | ||
| 514 | k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s | ||
| 515 | zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j | ||
| 516 | Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT | ||
| 517 | mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK | ||
| 518 | 4SVhM7JZG+Ju1zdXtg2pEto= | ||
| 519 | -----END CERTIFICATE----- | ||
| 520 | |||
| 521 | # Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 | ||
| 522 | # Subject: O=SECOM Trust.net OU=Security Communication RootCA1 | ||
| 523 | # Label: "Security Communication Root CA" | ||
| 524 | # Serial: 0 | ||
| 525 | # MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a | ||
| 526 | # SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 | ||
| 527 | # SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c | ||
| 528 | -----BEGIN CERTIFICATE----- | ||
| 529 | MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY | ||
| 530 | MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t | ||
| 531 | dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 | ||
| 532 | WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD | ||
| 533 | VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 | ||
| 534 | DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 | ||
| 535 | 9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ | ||
| 536 | DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 | ||
| 537 | Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N | ||
| 538 | QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ | ||
| 539 | xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G | ||
| 540 | A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T | ||
| 541 | AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG | ||
| 542 | kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr | ||
| 543 | Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 | ||
| 544 | Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU | ||
| 545 | JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot | ||
| 546 | RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== | ||
| 547 | -----END CERTIFICATE----- | ||
| 548 | |||
| 549 | # Issuer: CN=Sonera Class2 CA O=Sonera | ||
| 550 | # Subject: CN=Sonera Class2 CA O=Sonera | ||
| 551 | # Label: "Sonera Class 2 Root CA" | ||
| 552 | # Serial: 29 | ||
| 553 | # MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb | ||
| 554 | # SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 | ||
| 555 | # SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 | ||
| 556 | -----BEGIN CERTIFICATE----- | ||
| 557 | MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP | ||
| 558 | MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx | ||
| 559 | MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV | ||
| 560 | BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI | ||
| 561 | hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o | ||
| 562 | Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt | ||
| 563 | 5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s | ||
| 564 | 3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej | ||
| 565 | vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu | ||
| 566 | 8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw | ||
| 567 | DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG | ||
| 568 | MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil | ||
| 569 | zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ | ||
| 570 | 3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD | ||
| 571 | FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 | ||
| 572 | Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 | ||
| 573 | ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M | ||
| 574 | -----END CERTIFICATE----- | ||
| 575 | |||
| 576 | # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com | ||
| 577 | # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com | ||
| 578 | # Label: "XRamp Global CA Root" | ||
| 579 | # Serial: 107108908803651509692980124233745014957 | ||
| 580 | # MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 | ||
| 581 | # SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 | ||
| 582 | # SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 | ||
| 583 | -----BEGIN CERTIFICATE----- | ||
| 584 | MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB | ||
| 585 | gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk | ||
| 586 | MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY | ||
| 587 | UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx | ||
| 588 | NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 | ||
| 589 | dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy | ||
| 590 | dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB | ||
| 591 | dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 | ||
| 592 | 38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP | ||
| 593 | KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q | ||
| 594 | DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 | ||
| 595 | qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa | ||
| 596 | JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi | ||
| 597 | PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P | ||
| 598 | BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs | ||
| 599 | jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 | ||
| 600 | eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD | ||
| 601 | ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR | ||
| 602 | vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt | ||
| 603 | qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa | ||
| 604 | IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy | ||
| 605 | i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ | ||
| 606 | O+7ETPTsJ3xCwnR8gooJybQDJbw= | ||
| 607 | -----END CERTIFICATE----- | ||
| 608 | |||
| 609 | # Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority | ||
| 610 | # Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority | ||
| 611 | # Label: "Go Daddy Class 2 CA" | ||
| 612 | # Serial: 0 | ||
| 613 | # MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 | ||
| 614 | # SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 | ||
| 615 | # SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 | ||
| 616 | -----BEGIN CERTIFICATE----- | ||
| 617 | MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh | ||
| 618 | MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE | ||
| 619 | YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 | ||
| 620 | MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo | ||
| 621 | ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg | ||
| 622 | MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN | ||
| 623 | ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA | ||
| 624 | PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w | ||
| 625 | wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi | ||
| 626 | EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY | ||
| 627 | avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ | ||
| 628 | YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE | ||
| 629 | sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h | ||
| 630 | /t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 | ||
| 631 | IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj | ||
| 632 | YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD | ||
| 633 | ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy | ||
| 634 | OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P | ||
| 635 | TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ | ||
| 636 | HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER | ||
| 637 | dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf | ||
| 638 | ReYNnyicsbkqWletNw+vHX/bvZ8= | ||
| 639 | -----END CERTIFICATE----- | ||
| 640 | |||
| 641 | # Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority | ||
| 642 | # Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority | ||
| 643 | # Label: "Starfield Class 2 CA" | ||
| 644 | # Serial: 0 | ||
| 645 | # MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 | ||
| 646 | # SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a | ||
| 647 | # SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 | ||
| 648 | -----BEGIN CERTIFICATE----- | ||
| 649 | MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl | ||
| 650 | MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp | ||
| 651 | U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw | ||
| 652 | NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE | ||
| 653 | ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp | ||
| 654 | ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 | ||
| 655 | DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf | ||
| 656 | 8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN | ||
| 657 | +lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 | ||
| 658 | X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa | ||
| 659 | K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA | ||
| 660 | 1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G | ||
| 661 | A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR | ||
| 662 | zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 | ||
| 663 | YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD | ||
| 664 | bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w | ||
| 665 | DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 | ||
| 666 | L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D | ||
| 667 | eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl | ||
| 668 | xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp | ||
| 669 | VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY | ||
| 670 | WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= | ||
| 671 | -----END CERTIFICATE----- | ||
| 672 | |||
| 673 | # Issuer: O=Government Root Certification Authority | ||
| 674 | # Subject: O=Government Root Certification Authority | ||
| 675 | # Label: "Taiwan GRCA" | ||
| 676 | # Serial: 42023070807708724159991140556527066870 | ||
| 677 | # MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e | ||
| 678 | # SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 | ||
| 679 | # SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 | ||
| 680 | -----BEGIN CERTIFICATE----- | ||
| 681 | MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ | ||
| 682 | MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj | ||
| 683 | YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow | ||
| 684 | PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp | ||
| 685 | Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB | ||
| 686 | AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR | ||
| 687 | IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q | ||
| 688 | gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy | ||
| 689 | yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts | ||
| 690 | F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 | ||
| 691 | jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx | ||
| 692 | ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC | ||
| 693 | VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK | ||
| 694 | YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH | ||
| 695 | EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN | ||
| 696 | Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud | ||
| 697 | DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE | ||
| 698 | MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK | ||
| 699 | UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ | ||
| 700 | TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf | ||
| 701 | qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK | ||
| 702 | ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE | ||
| 703 | JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 | ||
| 704 | hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 | ||
| 705 | EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm | ||
| 706 | nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX | ||
| 707 | udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz | ||
| 708 | ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe | ||
| 709 | LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl | ||
| 710 | pYYsfPQS | ||
| 711 | -----END CERTIFICATE----- | ||
| 712 | |||
| 713 | # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 714 | # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 715 | # Label: "DigiCert Assured ID Root CA" | ||
| 716 | # Serial: 17154717934120587862167794914071425081 | ||
| 717 | # MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 | ||
| 718 | # SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 | ||
| 719 | # SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c | ||
| 720 | -----BEGIN CERTIFICATE----- | ||
| 721 | MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl | ||
| 722 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 723 | d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv | ||
| 724 | b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG | ||
| 725 | EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl | ||
| 726 | cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi | ||
| 727 | MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c | ||
| 728 | JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP | ||
| 729 | mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ | ||
| 730 | wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 | ||
| 731 | VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ | ||
| 732 | AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB | ||
| 733 | AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW | ||
| 734 | BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun | ||
| 735 | pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC | ||
| 736 | dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf | ||
| 737 | fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm | ||
| 738 | NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx | ||
| 739 | H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe | ||
| 740 | +o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== | ||
| 741 | -----END CERTIFICATE----- | ||
| 742 | |||
| 743 | # Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 744 | # Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 745 | # Label: "DigiCert Global Root CA" | ||
| 746 | # Serial: 10944719598952040374951832963794454346 | ||
| 747 | # MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e | ||
| 748 | # SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 | ||
| 749 | # SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 | ||
| 750 | -----BEGIN CERTIFICATE----- | ||
| 751 | MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh | ||
| 752 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 753 | d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD | ||
| 754 | QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT | ||
| 755 | MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j | ||
| 756 | b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG | ||
| 757 | 9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB | ||
| 758 | CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 | ||
| 759 | nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt | ||
| 760 | 43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P | ||
| 761 | T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 | ||
| 762 | gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO | ||
| 763 | BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR | ||
| 764 | TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw | ||
| 765 | DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr | ||
| 766 | hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg | ||
| 767 | 06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF | ||
| 768 | PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls | ||
| 769 | YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk | ||
| 770 | CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= | ||
| 771 | -----END CERTIFICATE----- | ||
| 772 | |||
| 773 | # Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 774 | # Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com | ||
| 775 | # Label: "DigiCert High Assurance EV Root CA" | ||
| 776 | # Serial: 3553400076410547919724730734378100087 | ||
| 777 | # MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a | ||
| 778 | # SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 | ||
| 779 | # SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf | ||
| 780 | -----BEGIN CERTIFICATE----- | ||
| 781 | MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs | ||
| 782 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 783 | d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j | ||
| 784 | ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL | ||
| 785 | MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 | ||
| 786 | LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug | ||
| 787 | RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm | ||
| 788 | +9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW | ||
| 789 | PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM | ||
| 790 | xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB | ||
| 791 | Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 | ||
| 792 | hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg | ||
| 793 | EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF | ||
| 794 | MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA | ||
| 795 | FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec | ||
| 796 | nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z | ||
| 797 | eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF | ||
| 798 | hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 | ||
| 799 | Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe | ||
| 800 | vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep | ||
| 801 | +OkuE6N36B9K | ||
| 802 | -----END CERTIFICATE----- | ||
| 803 | |||
| 804 | # Issuer: CN=Class 2 Primary CA O=Certplus | ||
| 805 | # Subject: CN=Class 2 Primary CA O=Certplus | ||
| 806 | # Label: "Certplus Class 2 Primary CA" | ||
| 807 | # Serial: 177770208045934040241468760488327595043 | ||
| 808 | # MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b | ||
| 809 | # SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb | ||
| 810 | # SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb | ||
| 811 | -----BEGIN CERTIFICATE----- | ||
| 812 | MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw | ||
| 813 | PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz | ||
| 814 | cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 | ||
| 815 | MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz | ||
| 816 | IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ | ||
| 817 | ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR | ||
| 818 | VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL | ||
| 819 | kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd | ||
| 820 | EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas | ||
| 821 | H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 | ||
| 822 | HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud | ||
| 823 | DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 | ||
| 824 | QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu | ||
| 825 | Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ | ||
| 826 | AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 | ||
| 827 | yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR | ||
| 828 | FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA | ||
| 829 | ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB | ||
| 830 | kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 | ||
| 831 | l7+ijrRU | ||
| 832 | -----END CERTIFICATE----- | ||
| 833 | |||
| 834 | # Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. | ||
| 835 | # Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. | ||
| 836 | # Label: "DST Root CA X3" | ||
| 837 | # Serial: 91299735575339953335919266965803778155 | ||
| 838 | # MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 | ||
| 839 | # SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 | ||
| 840 | # SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 | ||
| 841 | -----BEGIN CERTIFICATE----- | ||
| 842 | MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ | ||
| 843 | MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT | ||
| 844 | DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow | ||
| 845 | PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD | ||
| 846 | Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB | ||
| 847 | AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O | ||
| 848 | rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq | ||
| 849 | OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b | ||
| 850 | xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw | ||
| 851 | 7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD | ||
| 852 | aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV | ||
| 853 | HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG | ||
| 854 | SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 | ||
| 855 | ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr | ||
| 856 | AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz | ||
| 857 | R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 | ||
| 858 | JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo | ||
| 859 | Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ | ||
| 860 | -----END CERTIFICATE----- | ||
| 861 | |||
| 862 | # Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG | ||
| 863 | # Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG | ||
| 864 | # Label: "SwissSign Gold CA - G2" | ||
| 865 | # Serial: 13492815561806991280 | ||
| 866 | # MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 | ||
| 867 | # SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 | ||
| 868 | # SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 | ||
| 869 | -----BEGIN CERTIFICATE----- | ||
| 870 | MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV | ||
| 871 | BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln | ||
| 872 | biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF | ||
| 873 | MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT | ||
| 874 | d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC | ||
| 875 | CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 | ||
| 876 | 76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ | ||
| 877 | bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c | ||
| 878 | 6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE | ||
| 879 | emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd | ||
| 880 | MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt | ||
| 881 | MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y | ||
| 882 | MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y | ||
| 883 | FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi | ||
| 884 | aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM | ||
| 885 | gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB | ||
| 886 | qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 | ||
| 887 | lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn | ||
| 888 | 8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov | ||
| 889 | L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 | ||
| 890 | 45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO | ||
| 891 | UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 | ||
| 892 | O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC | ||
| 893 | bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv | ||
| 894 | GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a | ||
| 895 | 77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC | ||
| 896 | hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 | ||
| 897 | 92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp | ||
| 898 | Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w | ||
| 899 | ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt | ||
| 900 | Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ | ||
| 901 | -----END CERTIFICATE----- | ||
| 902 | |||
| 903 | # Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG | ||
| 904 | # Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG | ||
| 905 | # Label: "SwissSign Silver CA - G2" | ||
| 906 | # Serial: 5700383053117599563 | ||
| 907 | # MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 | ||
| 908 | # SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb | ||
| 909 | # SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 | ||
| 910 | -----BEGIN CERTIFICATE----- | ||
| 911 | MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE | ||
| 912 | BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu | ||
| 913 | IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow | ||
| 914 | RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY | ||
| 915 | U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A | ||
| 916 | MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv | ||
| 917 | Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br | ||
| 918 | YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF | ||
| 919 | nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH | ||
| 920 | 6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt | ||
| 921 | eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ | ||
| 922 | c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ | ||
| 923 | MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH | ||
| 924 | HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf | ||
| 925 | jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 | ||
| 926 | 5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB | ||
| 927 | rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU | ||
| 928 | F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c | ||
| 929 | wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 | ||
| 930 | cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB | ||
| 931 | AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp | ||
| 932 | WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 | ||
| 933 | xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ | ||
| 934 | 2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ | ||
| 935 | IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 | ||
| 936 | aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X | ||
| 937 | em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR | ||
| 938 | dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ | ||
| 939 | OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ | ||
| 940 | hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy | ||
| 941 | tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u | ||
| 942 | -----END CERTIFICATE----- | ||
| 943 | |||
| 944 | # Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. | ||
| 945 | # Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. | ||
| 946 | # Label: "GeoTrust Primary Certification Authority" | ||
| 947 | # Serial: 32798226551256963324313806436981982369 | ||
| 948 | # MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf | ||
| 949 | # SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 | ||
| 950 | # SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c | ||
| 951 | -----BEGIN CERTIFICATE----- | ||
| 952 | MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY | ||
| 953 | MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo | ||
| 954 | R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx | ||
| 955 | MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK | ||
| 956 | Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp | ||
| 957 | ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | ||
| 958 | AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 | ||
| 959 | AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA | ||
| 960 | ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 | ||
| 961 | 7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W | ||
| 962 | kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI | ||
| 963 | mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G | ||
| 964 | A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ | ||
| 965 | KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 | ||
| 966 | 6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl | ||
| 967 | 4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K | ||
| 968 | oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj | ||
| 969 | UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU | ||
| 970 | AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= | ||
| 971 | -----END CERTIFICATE----- | ||
| 972 | |||
| 973 | # Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only | ||
| 974 | # Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only | ||
| 975 | # Label: "thawte Primary Root CA" | ||
| 976 | # Serial: 69529181992039203566298953787712940909 | ||
| 977 | # MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 | ||
| 978 | # SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 | ||
| 979 | # SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f | ||
| 980 | -----BEGIN CERTIFICATE----- | ||
| 981 | MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB | ||
| 982 | qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf | ||
| 983 | Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw | ||
| 984 | MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV | ||
| 985 | BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw | ||
| 986 | NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j | ||
| 987 | LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG | ||
| 988 | A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl | ||
| 989 | IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG | ||
| 990 | SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs | ||
| 991 | W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta | ||
| 992 | 3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk | ||
| 993 | 6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 | ||
| 994 | Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J | ||
| 995 | NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA | ||
| 996 | MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP | ||
| 997 | r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU | ||
| 998 | DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz | ||
| 999 | YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX | ||
| 1000 | xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 | ||
| 1001 | /qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ | ||
| 1002 | LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 | ||
| 1003 | jVaMaA== | ||
| 1004 | -----END CERTIFICATE----- | ||
| 1005 | |||
| 1006 | # Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only | ||
| 1007 | # Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only | ||
| 1008 | # Label: "VeriSign Class 3 Public Primary Certification Authority - G5" | ||
| 1009 | # Serial: 33037644167568058970164719475676101450 | ||
| 1010 | # MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c | ||
| 1011 | # SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 | ||
| 1012 | # SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df | ||
| 1013 | -----BEGIN CERTIFICATE----- | ||
| 1014 | MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB | ||
| 1015 | yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL | ||
| 1016 | ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp | ||
| 1017 | U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW | ||
| 1018 | ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 | ||
| 1019 | aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL | ||
| 1020 | MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW | ||
| 1021 | ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln | ||
| 1022 | biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp | ||
| 1023 | U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y | ||
| 1024 | aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 | ||
| 1025 | nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex | ||
| 1026 | t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz | ||
| 1027 | SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG | ||
| 1028 | BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ | ||
| 1029 | rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ | ||
| 1030 | NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E | ||
| 1031 | BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH | ||
| 1032 | BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy | ||
| 1033 | aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv | ||
| 1034 | MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE | ||
| 1035 | p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y | ||
| 1036 | 5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK | ||
| 1037 | WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ | ||
| 1038 | 4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N | ||
| 1039 | hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq | ||
| 1040 | -----END CERTIFICATE----- | ||
| 1041 | |||
| 1042 | # Issuer: CN=SecureTrust CA O=SecureTrust Corporation | ||
| 1043 | # Subject: CN=SecureTrust CA O=SecureTrust Corporation | ||
| 1044 | # Label: "SecureTrust CA" | ||
| 1045 | # Serial: 17199774589125277788362757014266862032 | ||
| 1046 | # MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 | ||
| 1047 | # SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 | ||
| 1048 | # SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 | ||
| 1049 | -----BEGIN CERTIFICATE----- | ||
| 1050 | MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI | ||
| 1051 | MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x | ||
| 1052 | FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz | ||
| 1053 | MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv | ||
| 1054 | cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN | ||
| 1055 | AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz | ||
| 1056 | Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO | ||
| 1057 | 0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao | ||
| 1058 | wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj | ||
| 1059 | 7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS | ||
| 1060 | 8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT | ||
| 1061 | BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB | ||
| 1062 | /zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg | ||
| 1063 | JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC | ||
| 1064 | NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 | ||
| 1065 | 6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ | ||
| 1066 | 3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm | ||
| 1067 | D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS | ||
| 1068 | CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR | ||
| 1069 | 3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= | ||
| 1070 | -----END CERTIFICATE----- | ||
| 1071 | |||
| 1072 | # Issuer: CN=Secure Global CA O=SecureTrust Corporation | ||
| 1073 | # Subject: CN=Secure Global CA O=SecureTrust Corporation | ||
| 1074 | # Label: "Secure Global CA" | ||
| 1075 | # Serial: 9751836167731051554232119481456978597 | ||
| 1076 | # MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de | ||
| 1077 | # SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b | ||
| 1078 | # SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 | ||
| 1079 | -----BEGIN CERTIFICATE----- | ||
| 1080 | MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK | ||
| 1081 | MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x | ||
| 1082 | GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx | ||
| 1083 | MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg | ||
| 1084 | Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG | ||
| 1085 | SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ | ||
| 1086 | iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa | ||
| 1087 | /FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ | ||
| 1088 | jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI | ||
| 1089 | HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 | ||
| 1090 | sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w | ||
| 1091 | gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF | ||
| 1092 | MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw | ||
| 1093 | KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG | ||
| 1094 | AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L | ||
| 1095 | URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO | ||
| 1096 | H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm | ||
| 1097 | I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY | ||
| 1098 | iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc | ||
| 1099 | f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW | ||
| 1100 | -----END CERTIFICATE----- | ||
| 1101 | |||
| 1102 | # Issuer: CN=COMODO Certification Authority O=COMODO CA Limited | ||
| 1103 | # Subject: CN=COMODO Certification Authority O=COMODO CA Limited | ||
| 1104 | # Label: "COMODO Certification Authority" | ||
| 1105 | # Serial: 104350513648249232941998508985834464573 | ||
| 1106 | # MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 | ||
| 1107 | # SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b | ||
| 1108 | # SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 | ||
| 1109 | -----BEGIN CERTIFICATE----- | ||
| 1110 | MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB | ||
| 1111 | gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G | ||
| 1112 | A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV | ||
| 1113 | BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw | ||
| 1114 | MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl | ||
| 1115 | YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P | ||
| 1116 | RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 | ||
| 1117 | aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 | ||
| 1118 | UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI | ||
| 1119 | 2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 | ||
| 1120 | Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp | ||
| 1121 | +2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ | ||
| 1122 | DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O | ||
| 1123 | nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW | ||
| 1124 | /zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g | ||
| 1125 | PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u | ||
| 1126 | QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY | ||
| 1127 | SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv | ||
| 1128 | IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ | ||
| 1129 | RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 | ||
| 1130 | zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd | ||
| 1131 | BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB | ||
| 1132 | ZQ== | ||
| 1133 | -----END CERTIFICATE----- | ||
| 1134 | |||
| 1135 | # Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. | ||
| 1136 | # Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. | ||
| 1137 | # Label: "Network Solutions Certificate Authority" | ||
| 1138 | # Serial: 116697915152937497490437556386812487904 | ||
| 1139 | # MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e | ||
| 1140 | # SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce | ||
| 1141 | # SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c | ||
| 1142 | -----BEGIN CERTIFICATE----- | ||
| 1143 | MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi | ||
| 1144 | MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu | ||
| 1145 | MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp | ||
| 1146 | dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV | ||
| 1147 | UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO | ||
| 1148 | ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG | ||
| 1149 | SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz | ||
| 1150 | c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP | ||
| 1151 | OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl | ||
| 1152 | mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF | ||
| 1153 | BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 | ||
| 1154 | qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw | ||
| 1155 | gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB | ||
| 1156 | BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu | ||
| 1157 | bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp | ||
| 1158 | dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 | ||
| 1159 | 6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ | ||
| 1160 | h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH | ||
| 1161 | /nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv | ||
| 1162 | wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN | ||
| 1163 | pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey | ||
| 1164 | -----END CERTIFICATE----- | ||
| 1165 | |||
| 1166 | # Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited | ||
| 1167 | # Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited | ||
| 1168 | # Label: "COMODO ECC Certification Authority" | ||
| 1169 | # Serial: 41578283867086692638256921589707938090 | ||
| 1170 | # MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 | ||
| 1171 | # SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 | ||
| 1172 | # SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 | ||
| 1173 | -----BEGIN CERTIFICATE----- | ||
| 1174 | MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL | ||
| 1175 | MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE | ||
| 1176 | BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT | ||
| 1177 | IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw | ||
| 1178 | MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy | ||
| 1179 | ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N | ||
| 1180 | T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv | ||
| 1181 | biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR | ||
| 1182 | FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J | ||
| 1183 | cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW | ||
| 1184 | BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ | ||
| 1185 | BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm | ||
| 1186 | fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv | ||
| 1187 | GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= | ||
| 1188 | -----END CERTIFICATE----- | ||
| 1189 | |||
| 1190 | # Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed | ||
| 1191 | # Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed | ||
| 1192 | # Label: "OISTE WISeKey Global Root GA CA" | ||
| 1193 | # Serial: 86718877871133159090080555911823548314 | ||
| 1194 | # MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 | ||
| 1195 | # SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 | ||
| 1196 | # SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 | ||
| 1197 | -----BEGIN CERTIFICATE----- | ||
| 1198 | MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB | ||
| 1199 | ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly | ||
| 1200 | aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl | ||
| 1201 | ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w | ||
| 1202 | NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G | ||
| 1203 | A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD | ||
| 1204 | VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX | ||
| 1205 | SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A | ||
| 1206 | MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR | ||
| 1207 | VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 | ||
| 1208 | w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF | ||
| 1209 | mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg | ||
| 1210 | 4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 | ||
| 1211 | 4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw | ||
| 1212 | DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw | ||
| 1213 | EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx | ||
| 1214 | SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 | ||
| 1215 | ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 | ||
| 1216 | vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa | ||
| 1217 | hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi | ||
| 1218 | Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ | ||
| 1219 | /L7fCg0= | ||
| 1220 | -----END CERTIFICATE----- | ||
| 1221 | |||
| 1222 | # Issuer: CN=Certigna O=Dhimyotis | ||
| 1223 | # Subject: CN=Certigna O=Dhimyotis | ||
| 1224 | # Label: "Certigna" | ||
| 1225 | # Serial: 18364802974209362175 | ||
| 1226 | # MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff | ||
| 1227 | # SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 | ||
| 1228 | # SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d | ||
| 1229 | -----BEGIN CERTIFICATE----- | ||
| 1230 | MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV | ||
| 1231 | BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X | ||
| 1232 | DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ | ||
| 1233 | BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 | ||
| 1234 | DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 | ||
| 1235 | QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny | ||
| 1236 | gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw | ||
| 1237 | zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q | ||
| 1238 | 130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 | ||
| 1239 | JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw | ||
| 1240 | DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw | ||
| 1241 | ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT | ||
| 1242 | AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj | ||
| 1243 | AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG | ||
| 1244 | 9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h | ||
| 1245 | bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc | ||
| 1246 | fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu | ||
| 1247 | HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w | ||
| 1248 | t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw | ||
| 1249 | WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== | ||
| 1250 | -----END CERTIFICATE----- | ||
| 1251 | |||
| 1252 | # Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center | ||
| 1253 | # Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center | ||
| 1254 | # Label: "Deutsche Telekom Root CA 2" | ||
| 1255 | # Serial: 38 | ||
| 1256 | # MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 | ||
| 1257 | # SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf | ||
| 1258 | # SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 | ||
| 1259 | -----BEGIN CERTIFICATE----- | ||
| 1260 | MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc | ||
| 1261 | MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj | ||
| 1262 | IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB | ||
| 1263 | IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE | ||
| 1264 | RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl | ||
| 1265 | U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 | ||
| 1266 | IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU | ||
| 1267 | ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC | ||
| 1268 | QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr | ||
| 1269 | rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S | ||
| 1270 | NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc | ||
| 1271 | QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH | ||
| 1272 | txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP | ||
| 1273 | BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC | ||
| 1274 | AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp | ||
| 1275 | tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa | ||
| 1276 | IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl | ||
| 1277 | 6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ | ||
| 1278 | xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU | ||
| 1279 | Cm26OWMohpLzGITY+9HPBVZkVw== | ||
| 1280 | -----END CERTIFICATE----- | ||
| 1281 | |||
| 1282 | # Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc | ||
| 1283 | # Subject: CN=Cybertrust Global Root O=Cybertrust, Inc | ||
| 1284 | # Label: "Cybertrust Global Root" | ||
| 1285 | # Serial: 4835703278459682877484360 | ||
| 1286 | # MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 | ||
| 1287 | # SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 | ||
| 1288 | # SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 | ||
| 1289 | -----BEGIN CERTIFICATE----- | ||
| 1290 | MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG | ||
| 1291 | A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh | ||
| 1292 | bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE | ||
| 1293 | ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS | ||
| 1294 | b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 | ||
| 1295 | 7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS | ||
| 1296 | J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y | ||
| 1297 | HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP | ||
| 1298 | t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz | ||
| 1299 | FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY | ||
| 1300 | XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ | ||
| 1301 | MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw | ||
| 1302 | hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js | ||
| 1303 | MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA | ||
| 1304 | A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj | ||
| 1305 | Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx | ||
| 1306 | XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o | ||
| 1307 | omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc | ||
| 1308 | A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW | ||
| 1309 | WL1WMRJOEcgh4LMRkWXbtKaIOM5V | ||
| 1310 | -----END CERTIFICATE----- | ||
| 1311 | |||
| 1312 | # Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority | ||
| 1313 | # Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority | ||
| 1314 | # Label: "ePKI Root Certification Authority" | ||
| 1315 | # Serial: 28956088682735189655030529057352760477 | ||
| 1316 | # MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 | ||
| 1317 | # SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 | ||
| 1318 | # SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 | ||
| 1319 | -----BEGIN CERTIFICATE----- | ||
| 1320 | MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe | ||
| 1321 | MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 | ||
| 1322 | ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe | ||
| 1323 | Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw | ||
| 1324 | IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL | ||
| 1325 | SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF | ||
| 1326 | AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH | ||
| 1327 | SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh | ||
| 1328 | ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X | ||
| 1329 | DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 | ||
| 1330 | TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ | ||
| 1331 | fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA | ||
| 1332 | sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU | ||
| 1333 | WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS | ||
| 1334 | nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH | ||
| 1335 | dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip | ||
| 1336 | NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC | ||
| 1337 | AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF | ||
| 1338 | MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH | ||
| 1339 | ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB | ||
| 1340 | uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl | ||
| 1341 | PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP | ||
| 1342 | JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ | ||
| 1343 | gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 | ||
| 1344 | j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 | ||
| 1345 | 5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB | ||
| 1346 | o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS | ||
| 1347 | /jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z | ||
| 1348 | Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE | ||
| 1349 | W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D | ||
| 1350 | hNQ+IIX3Sj0rnP0qCglN6oH4EZw= | ||
| 1351 | -----END CERTIFICATE----- | ||
| 1352 | |||
| 1353 | # Issuer: O=certSIGN OU=certSIGN ROOT CA | ||
| 1354 | # Subject: O=certSIGN OU=certSIGN ROOT CA | ||
| 1355 | # Label: "certSIGN ROOT CA" | ||
| 1356 | # Serial: 35210227249154 | ||
| 1357 | # MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 | ||
| 1358 | # SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b | ||
| 1359 | # SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb | ||
| 1360 | -----BEGIN CERTIFICATE----- | ||
| 1361 | MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT | ||
| 1362 | AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD | ||
| 1363 | QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP | ||
| 1364 | MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC | ||
| 1365 | ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do | ||
| 1366 | 0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ | ||
| 1367 | UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d | ||
| 1368 | RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ | ||
| 1369 | OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv | ||
| 1370 | JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C | ||
| 1371 | AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O | ||
| 1372 | BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ | ||
| 1373 | LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY | ||
| 1374 | MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ | ||
| 1375 | 44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I | ||
| 1376 | Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw | ||
| 1377 | i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN | ||
| 1378 | 9u6wWk5JRFRYX0KD | ||
| 1379 | -----END CERTIFICATE----- | ||
| 1380 | |||
| 1381 | # Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only | ||
| 1382 | # Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only | ||
| 1383 | # Label: "GeoTrust Primary Certification Authority - G3" | ||
| 1384 | # Serial: 28809105769928564313984085209975885599 | ||
| 1385 | # MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 | ||
| 1386 | # SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd | ||
| 1387 | # SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 | ||
| 1388 | -----BEGIN CERTIFICATE----- | ||
| 1389 | MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB | ||
| 1390 | mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT | ||
| 1391 | MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s | ||
| 1392 | eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv | ||
| 1393 | cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ | ||
| 1394 | BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg | ||
| 1395 | MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 | ||
| 1396 | BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg | ||
| 1397 | LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz | ||
| 1398 | +uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm | ||
| 1399 | hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn | ||
| 1400 | 5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W | ||
| 1401 | JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL | ||
| 1402 | DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC | ||
| 1403 | huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw | ||
| 1404 | HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB | ||
| 1405 | AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB | ||
| 1406 | zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN | ||
| 1407 | kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD | ||
| 1408 | AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH | ||
| 1409 | SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G | ||
| 1410 | spki4cErx5z481+oghLrGREt | ||
| 1411 | -----END CERTIFICATE----- | ||
| 1412 | |||
| 1413 | # Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only | ||
| 1414 | # Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only | ||
| 1415 | # Label: "thawte Primary Root CA - G2" | ||
| 1416 | # Serial: 71758320672825410020661621085256472406 | ||
| 1417 | # MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f | ||
| 1418 | # SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 | ||
| 1419 | # SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 | ||
| 1420 | -----BEGIN CERTIFICATE----- | ||
| 1421 | MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL | ||
| 1422 | MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp | ||
| 1423 | IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi | ||
| 1424 | BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw | ||
| 1425 | MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh | ||
| 1426 | d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig | ||
| 1427 | YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v | ||
| 1428 | dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ | ||
| 1429 | BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 | ||
| 1430 | papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E | ||
| 1431 | BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K | ||
| 1432 | DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 | ||
| 1433 | KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox | ||
| 1434 | XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== | ||
| 1435 | -----END CERTIFICATE----- | ||
| 1436 | |||
| 1437 | # Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only | ||
| 1438 | # Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only | ||
| 1439 | # Label: "thawte Primary Root CA - G3" | ||
| 1440 | # Serial: 127614157056681299805556476275995414779 | ||
| 1441 | # MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 | ||
| 1442 | # SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 | ||
| 1443 | # SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c | ||
| 1444 | -----BEGIN CERTIFICATE----- | ||
| 1445 | MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB | ||
| 1446 | rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf | ||
| 1447 | Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw | ||
| 1448 | MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV | ||
| 1449 | BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa | ||
| 1450 | Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl | ||
| 1451 | LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u | ||
| 1452 | MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl | ||
| 1453 | ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz | ||
| 1454 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm | ||
| 1455 | gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 | ||
| 1456 | YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf | ||
| 1457 | b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 | ||
| 1458 | 9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S | ||
| 1459 | zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk | ||
| 1460 | OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV | ||
| 1461 | HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA | ||
| 1462 | 2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW | ||
| 1463 | oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu | ||
| 1464 | t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c | ||
| 1465 | KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM | ||
| 1466 | m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu | ||
| 1467 | MdRAGmI0Nj81Aa6sY6A= | ||
| 1468 | -----END CERTIFICATE----- | ||
| 1469 | |||
| 1470 | # Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only | ||
| 1471 | # Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only | ||
| 1472 | # Label: "GeoTrust Primary Certification Authority - G2" | ||
| 1473 | # Serial: 80682863203381065782177908751794619243 | ||
| 1474 | # MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a | ||
| 1475 | # SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 | ||
| 1476 | # SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 | ||
| 1477 | -----BEGIN CERTIFICATE----- | ||
| 1478 | MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL | ||
| 1479 | MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj | ||
| 1480 | KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 | ||
| 1481 | MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 | ||
| 1482 | eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV | ||
| 1483 | BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw | ||
| 1484 | NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV | ||
| 1485 | BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH | ||
| 1486 | MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL | ||
| 1487 | So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal | ||
| 1488 | tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO | ||
| 1489 | BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG | ||
| 1490 | CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT | ||
| 1491 | qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz | ||
| 1492 | rD6ogRLQy7rQkgu2npaqBA+K | ||
| 1493 | -----END CERTIFICATE----- | ||
| 1494 | |||
| 1495 | # Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only | ||
| 1496 | # Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only | ||
| 1497 | # Label: "VeriSign Universal Root Certification Authority" | ||
| 1498 | # Serial: 85209574734084581917763752644031726877 | ||
| 1499 | # MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 | ||
| 1500 | # SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 | ||
| 1501 | # SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c | ||
| 1502 | -----BEGIN CERTIFICATE----- | ||
| 1503 | MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB | ||
| 1504 | vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL | ||
| 1505 | ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp | ||
| 1506 | U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W | ||
| 1507 | ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe | ||
| 1508 | Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX | ||
| 1509 | MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 | ||
| 1510 | IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y | ||
| 1511 | IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh | ||
| 1512 | bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF | ||
| 1513 | AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF | ||
| 1514 | 9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH | ||
| 1515 | H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H | ||
| 1516 | LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN | ||
| 1517 | /BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT | ||
| 1518 | rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud | ||
| 1519 | EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw | ||
| 1520 | WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs | ||
| 1521 | exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud | ||
| 1522 | DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 | ||
| 1523 | sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ | ||
| 1524 | seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz | ||
| 1525 | 4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ | ||
| 1526 | BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR | ||
| 1527 | lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 | ||
| 1528 | 7M2CYfE45k+XmCpajQ== | ||
| 1529 | -----END CERTIFICATE----- | ||
| 1530 | |||
| 1531 | # Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only | ||
| 1532 | # Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only | ||
| 1533 | # Label: "VeriSign Class 3 Public Primary Certification Authority - G4" | ||
| 1534 | # Serial: 63143484348153506665311985501458640051 | ||
| 1535 | # MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 | ||
| 1536 | # SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a | ||
| 1537 | # SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 | ||
| 1538 | -----BEGIN CERTIFICATE----- | ||
| 1539 | MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL | ||
| 1540 | MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW | ||
| 1541 | ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln | ||
| 1542 | biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp | ||
| 1543 | U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y | ||
| 1544 | aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG | ||
| 1545 | A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp | ||
| 1546 | U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg | ||
| 1547 | SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln | ||
| 1548 | biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 | ||
| 1549 | IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm | ||
| 1550 | GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve | ||
| 1551 | fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw | ||
| 1552 | AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ | ||
| 1553 | aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj | ||
| 1554 | aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW | ||
| 1555 | kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC | ||
| 1556 | 4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga | ||
| 1557 | FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== | ||
| 1558 | -----END CERTIFICATE----- | ||
| 1559 | |||
| 1560 | # Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) | ||
| 1561 | # Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) | ||
| 1562 | # Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" | ||
| 1563 | # Serial: 80544274841616 | ||
| 1564 | # MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 | ||
| 1565 | # SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 | ||
| 1566 | # SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 | ||
| 1567 | -----BEGIN CERTIFICATE----- | ||
| 1568 | MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG | ||
| 1569 | EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 | ||
| 1570 | MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl | ||
| 1571 | cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR | ||
| 1572 | dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB | ||
| 1573 | pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM | ||
| 1574 | b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm | ||
| 1575 | aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz | ||
| 1576 | IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A | ||
| 1577 | MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT | ||
| 1578 | lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz | ||
| 1579 | AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 | ||
| 1580 | VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG | ||
| 1581 | ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 | ||
| 1582 | BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG | ||
| 1583 | AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M | ||
| 1584 | U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh | ||
| 1585 | bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C | ||
| 1586 | +C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC | ||
| 1587 | bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F | ||
| 1588 | uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 | ||
| 1589 | XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= | ||
| 1590 | -----END CERTIFICATE----- | ||
| 1591 | |||
| 1592 | # Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden | ||
| 1593 | # Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden | ||
| 1594 | # Label: "Staat der Nederlanden Root CA - G2" | ||
| 1595 | # Serial: 10000012 | ||
| 1596 | # MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a | ||
| 1597 | # SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 | ||
| 1598 | # SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f | ||
| 1599 | -----BEGIN CERTIFICATE----- | ||
| 1600 | MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO | ||
| 1601 | TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh | ||
| 1602 | dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX | ||
| 1603 | DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl | ||
| 1604 | ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv | ||
| 1605 | b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 | ||
| 1606 | qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp | ||
| 1607 | uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU | ||
| 1608 | Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE | ||
| 1609 | pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp | ||
| 1610 | 5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M | ||
| 1611 | UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN | ||
| 1612 | GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy | ||
| 1613 | 5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv | ||
| 1614 | 6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK | ||
| 1615 | eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 | ||
| 1616 | B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ | ||
| 1617 | BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov | ||
| 1618 | L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV | ||
| 1619 | HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG | ||
| 1620 | SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS | ||
| 1621 | CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen | ||
| 1622 | 5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 | ||
| 1623 | IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK | ||
| 1624 | gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL | ||
| 1625 | +63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL | ||
| 1626 | vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm | ||
| 1627 | bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk | ||
| 1628 | N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC | ||
| 1629 | Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z | ||
| 1630 | ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== | ||
| 1631 | -----END CERTIFICATE----- | ||
| 1632 | |||
| 1633 | # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post | ||
| 1634 | # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post | ||
| 1635 | # Label: "Hongkong Post Root CA 1" | ||
| 1636 | # Serial: 1000 | ||
| 1637 | # MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca | ||
| 1638 | # SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 | ||
| 1639 | # SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 | ||
| 1640 | -----BEGIN CERTIFICATE----- | ||
| 1641 | MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx | ||
| 1642 | FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg | ||
| 1643 | Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG | ||
| 1644 | A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr | ||
| 1645 | b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | ||
| 1646 | AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ | ||
| 1647 | jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn | ||
| 1648 | PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh | ||
| 1649 | ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 | ||
| 1650 | nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h | ||
| 1651 | q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED | ||
| 1652 | MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC | ||
| 1653 | mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 | ||
| 1654 | 7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB | ||
| 1655 | oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs | ||
| 1656 | EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO | ||
| 1657 | fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi | ||
| 1658 | AmvZWg== | ||
| 1659 | -----END CERTIFICATE----- | ||
| 1660 | |||
| 1661 | # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. | ||
| 1662 | # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. | ||
| 1663 | # Label: "SecureSign RootCA11" | ||
| 1664 | # Serial: 1 | ||
| 1665 | # MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 | ||
| 1666 | # SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 | ||
| 1667 | # SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 | ||
| 1668 | -----BEGIN CERTIFICATE----- | ||
| 1669 | MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr | ||
| 1670 | MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG | ||
| 1671 | A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 | ||
| 1672 | MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp | ||
| 1673 | Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD | ||
| 1674 | QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz | ||
| 1675 | i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 | ||
| 1676 | h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV | ||
| 1677 | MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 | ||
| 1678 | UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni | ||
| 1679 | 8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC | ||
| 1680 | h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD | ||
| 1681 | VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB | ||
| 1682 | AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm | ||
| 1683 | KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ | ||
| 1684 | X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr | ||
| 1685 | QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 | ||
| 1686 | pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN | ||
| 1687 | QSdJQO7e5iNEOdyhIta6A/I= | ||
| 1688 | -----END CERTIFICATE----- | ||
| 1689 | |||
| 1690 | # Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. | ||
| 1691 | # Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. | ||
| 1692 | # Label: "Microsec e-Szigno Root CA 2009" | ||
| 1693 | # Serial: 14014712776195784473 | ||
| 1694 | # MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 | ||
| 1695 | # SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e | ||
| 1696 | # SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 | ||
| 1697 | -----BEGIN CERTIFICATE----- | ||
| 1698 | MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD | ||
| 1699 | VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 | ||
| 1700 | ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G | ||
| 1701 | CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y | ||
| 1702 | OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx | ||
| 1703 | FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp | ||
| 1704 | Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o | ||
| 1705 | dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP | ||
| 1706 | kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc | ||
| 1707 | cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U | ||
| 1708 | fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 | ||
| 1709 | N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC | ||
| 1710 | xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 | ||
| 1711 | +rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G | ||
| 1712 | A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM | ||
| 1713 | Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG | ||
| 1714 | SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h | ||
| 1715 | mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk | ||
| 1716 | ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 | ||
| 1717 | tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c | ||
| 1718 | 2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t | ||
| 1719 | HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW | ||
| 1720 | -----END CERTIFICATE----- | ||
| 1721 | |||
| 1722 | # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 | ||
| 1723 | # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 | ||
| 1724 | # Label: "GlobalSign Root CA - R3" | ||
| 1725 | # Serial: 4835703278459759426209954 | ||
| 1726 | # MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 | ||
| 1727 | # SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad | ||
| 1728 | # SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b | ||
| 1729 | -----BEGIN CERTIFICATE----- | ||
| 1730 | MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G | ||
| 1731 | A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp | ||
| 1732 | Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 | ||
| 1733 | MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG | ||
| 1734 | A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI | ||
| 1735 | hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 | ||
| 1736 | RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT | ||
| 1737 | gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm | ||
| 1738 | KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd | ||
| 1739 | QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ | ||
| 1740 | XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw | ||
| 1741 | DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o | ||
| 1742 | LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU | ||
| 1743 | RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp | ||
| 1744 | jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK | ||
| 1745 | 6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX | ||
| 1746 | mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs | ||
| 1747 | Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH | ||
| 1748 | WD9f | ||
| 1749 | -----END CERTIFICATE----- | ||
| 1750 | |||
| 1751 | # Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 | ||
| 1752 | # Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 | ||
| 1753 | # Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" | ||
| 1754 | # Serial: 6047274297262753887 | ||
| 1755 | # MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 | ||
| 1756 | # SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa | ||
| 1757 | # SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef | ||
| 1758 | -----BEGIN CERTIFICATE----- | ||
| 1759 | MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE | ||
| 1760 | BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h | ||
| 1761 | cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy | ||
| 1762 | MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg | ||
| 1763 | Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi | ||
| 1764 | MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 | ||
| 1765 | thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM | ||
| 1766 | cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG | ||
| 1767 | L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i | ||
| 1768 | NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h | ||
| 1769 | X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b | ||
| 1770 | m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy | ||
| 1771 | Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja | ||
| 1772 | EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T | ||
| 1773 | KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF | ||
| 1774 | 6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh | ||
| 1775 | OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD | ||
| 1776 | VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD | ||
| 1777 | VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp | ||
| 1778 | cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv | ||
| 1779 | ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl | ||
| 1780 | AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF | ||
| 1781 | 661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 | ||
| 1782 | am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 | ||
| 1783 | ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 | ||
| 1784 | PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS | ||
| 1785 | 3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k | ||
| 1786 | SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF | ||
| 1787 | 3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM | ||
| 1788 | ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g | ||
| 1789 | StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz | ||
| 1790 | Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB | ||
| 1791 | jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V | ||
| 1792 | -----END CERTIFICATE----- | ||
| 1793 | |||
| 1794 | # Issuer: CN=Izenpe.com O=IZENPE S.A. | ||
| 1795 | # Subject: CN=Izenpe.com O=IZENPE S.A. | ||
| 1796 | # Label: "Izenpe.com" | ||
| 1797 | # Serial: 917563065490389241595536686991402621 | ||
| 1798 | # MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 | ||
| 1799 | # SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 | ||
| 1800 | # SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f | ||
| 1801 | -----BEGIN CERTIFICATE----- | ||
| 1802 | MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 | ||
| 1803 | MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 | ||
| 1804 | ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD | ||
| 1805 | VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j | ||
| 1806 | b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq | ||
| 1807 | scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO | ||
| 1808 | xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H | ||
| 1809 | LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX | ||
| 1810 | uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD | ||
| 1811 | yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ | ||
| 1812 | JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q | ||
| 1813 | rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN | ||
| 1814 | BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L | ||
| 1815 | hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB | ||
| 1816 | QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ | ||
| 1817 | HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu | ||
| 1818 | Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg | ||
| 1819 | QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB | ||
| 1820 | BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx | ||
| 1821 | MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC | ||
| 1822 | AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA | ||
| 1823 | A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb | ||
| 1824 | laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 | ||
| 1825 | awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo | ||
| 1826 | JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw | ||
| 1827 | LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT | ||
| 1828 | VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk | ||
| 1829 | LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb | ||
| 1830 | UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ | ||
| 1831 | QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ | ||
| 1832 | naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls | ||
| 1833 | QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== | ||
| 1834 | -----END CERTIFICATE----- | ||
| 1835 | |||
| 1836 | # Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. | ||
| 1837 | # Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. | ||
| 1838 | # Label: "Chambers of Commerce Root - 2008" | ||
| 1839 | # Serial: 11806822484801597146 | ||
| 1840 | # MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 | ||
| 1841 | # SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c | ||
| 1842 | # SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 | ||
| 1843 | -----BEGIN CERTIFICATE----- | ||
| 1844 | MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD | ||
| 1845 | VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 | ||
| 1846 | IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 | ||
| 1847 | MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz | ||
| 1848 | IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz | ||
| 1849 | MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj | ||
| 1850 | dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw | ||
| 1851 | EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp | ||
| 1852 | MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G | ||
| 1853 | CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 | ||
| 1854 | 28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq | ||
| 1855 | VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q | ||
| 1856 | DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR | ||
| 1857 | 5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL | ||
| 1858 | ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a | ||
| 1859 | Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl | ||
| 1860 | UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s | ||
| 1861 | +12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 | ||
| 1862 | Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj | ||
| 1863 | ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx | ||
| 1864 | hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV | ||
| 1865 | HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 | ||
| 1866 | +HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN | ||
| 1867 | YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t | ||
| 1868 | L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy | ||
| 1869 | ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt | ||
| 1870 | IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV | ||
| 1871 | HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w | ||
| 1872 | DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW | ||
| 1873 | PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF | ||
| 1874 | 5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 | ||
| 1875 | glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH | ||
| 1876 | FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 | ||
| 1877 | pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD | ||
| 1878 | xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG | ||
| 1879 | tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq | ||
| 1880 | jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De | ||
| 1881 | fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg | ||
| 1882 | OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ | ||
| 1883 | d0jQ | ||
| 1884 | -----END CERTIFICATE----- | ||
| 1885 | |||
| 1886 | # Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. | ||
| 1887 | # Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. | ||
| 1888 | # Label: "Global Chambersign Root - 2008" | ||
| 1889 | # Serial: 14541511773111788494 | ||
| 1890 | # MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 | ||
| 1891 | # SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c | ||
| 1892 | # SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca | ||
| 1893 | -----BEGIN CERTIFICATE----- | ||
| 1894 | MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD | ||
| 1895 | VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 | ||
| 1896 | IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 | ||
| 1897 | MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD | ||
| 1898 | aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx | ||
| 1899 | MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy | ||
| 1900 | cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG | ||
| 1901 | A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl | ||
| 1902 | BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI | ||
| 1903 | hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed | ||
| 1904 | KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 | ||
| 1905 | G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 | ||
| 1906 | zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 | ||
| 1907 | ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG | ||
| 1908 | HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 | ||
| 1909 | Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V | ||
| 1910 | yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e | ||
| 1911 | beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r | ||
| 1912 | 6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh | ||
| 1913 | wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog | ||
| 1914 | zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW | ||
| 1915 | BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr | ||
| 1916 | ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp | ||
| 1917 | ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk | ||
| 1918 | cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt | ||
| 1919 | YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC | ||
| 1920 | CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow | ||
| 1921 | KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI | ||
| 1922 | hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ | ||
| 1923 | UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz | ||
| 1924 | X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x | ||
| 1925 | fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz | ||
| 1926 | a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd | ||
| 1927 | Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd | ||
| 1928 | SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O | ||
| 1929 | AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso | ||
| 1930 | M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge | ||
| 1931 | v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z | ||
| 1932 | 09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B | ||
| 1933 | -----END CERTIFICATE----- | ||
| 1934 | |||
| 1935 | # Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. | ||
| 1936 | # Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. | ||
| 1937 | # Label: "Go Daddy Root Certificate Authority - G2" | ||
| 1938 | # Serial: 0 | ||
| 1939 | # MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 | ||
| 1940 | # SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b | ||
| 1941 | # SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da | ||
| 1942 | -----BEGIN CERTIFICATE----- | ||
| 1943 | MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx | ||
| 1944 | EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT | ||
| 1945 | EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp | ||
| 1946 | ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz | ||
| 1947 | NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH | ||
| 1948 | EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE | ||
| 1949 | AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw | ||
| 1950 | DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD | ||
| 1951 | E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH | ||
| 1952 | /PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy | ||
| 1953 | DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh | ||
| 1954 | GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR | ||
| 1955 | tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA | ||
| 1956 | AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE | ||
| 1957 | FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX | ||
| 1958 | WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu | ||
| 1959 | 9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr | ||
| 1960 | gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo | ||
| 1961 | 2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO | ||
| 1962 | LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI | ||
| 1963 | 4uJEvlz36hz1 | ||
| 1964 | -----END CERTIFICATE----- | ||
| 1965 | |||
| 1966 | # Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. | ||
| 1967 | # Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. | ||
| 1968 | # Label: "Starfield Root Certificate Authority - G2" | ||
| 1969 | # Serial: 0 | ||
| 1970 | # MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 | ||
| 1971 | # SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e | ||
| 1972 | # SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 | ||
| 1973 | -----BEGIN CERTIFICATE----- | ||
| 1974 | MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx | ||
| 1975 | EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT | ||
| 1976 | HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs | ||
| 1977 | ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw | ||
| 1978 | MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 | ||
| 1979 | b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj | ||
| 1980 | aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp | ||
| 1981 | Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC | ||
| 1982 | ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg | ||
| 1983 | nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 | ||
| 1984 | HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N | ||
| 1985 | Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN | ||
| 1986 | dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 | ||
| 1987 | HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO | ||
| 1988 | BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G | ||
| 1989 | CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU | ||
| 1990 | sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 | ||
| 1991 | 4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg | ||
| 1992 | 8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K | ||
| 1993 | pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 | ||
| 1994 | mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 | ||
| 1995 | -----END CERTIFICATE----- | ||
| 1996 | |||
| 1997 | # Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. | ||
| 1998 | # Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. | ||
| 1999 | # Label: "Starfield Services Root Certificate Authority - G2" | ||
| 2000 | # Serial: 0 | ||
| 2001 | # MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 | ||
| 2002 | # SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f | ||
| 2003 | # SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 | ||
| 2004 | -----BEGIN CERTIFICATE----- | ||
| 2005 | MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx | ||
| 2006 | EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT | ||
| 2007 | HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs | ||
| 2008 | ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 | ||
| 2009 | MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD | ||
| 2010 | VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy | ||
| 2011 | ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy | ||
| 2012 | dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI | ||
| 2013 | hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p | ||
| 2014 | OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 | ||
| 2015 | 8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K | ||
| 2016 | Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe | ||
| 2017 | hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk | ||
| 2018 | 6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw | ||
| 2019 | DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q | ||
| 2020 | AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI | ||
| 2021 | bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB | ||
| 2022 | ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z | ||
| 2023 | qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd | ||
| 2024 | iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn | ||
| 2025 | 0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN | ||
| 2026 | sSi6 | ||
| 2027 | -----END CERTIFICATE----- | ||
| 2028 | |||
| 2029 | # Issuer: CN=AffirmTrust Commercial O=AffirmTrust | ||
| 2030 | # Subject: CN=AffirmTrust Commercial O=AffirmTrust | ||
| 2031 | # Label: "AffirmTrust Commercial" | ||
| 2032 | # Serial: 8608355977964138876 | ||
| 2033 | # MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 | ||
| 2034 | # SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 | ||
| 2035 | # SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 | ||
| 2036 | -----BEGIN CERTIFICATE----- | ||
| 2037 | MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE | ||
| 2038 | BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz | ||
| 2039 | dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL | ||
| 2040 | MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp | ||
| 2041 | cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | ||
| 2042 | AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP | ||
| 2043 | Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr | ||
| 2044 | ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL | ||
| 2045 | MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 | ||
| 2046 | yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr | ||
| 2047 | VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ | ||
| 2048 | nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ | ||
| 2049 | KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG | ||
| 2050 | XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj | ||
| 2051 | vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt | ||
| 2052 | Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g | ||
| 2053 | N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC | ||
| 2054 | nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= | ||
| 2055 | -----END CERTIFICATE----- | ||
| 2056 | |||
| 2057 | # Issuer: CN=AffirmTrust Networking O=AffirmTrust | ||
| 2058 | # Subject: CN=AffirmTrust Networking O=AffirmTrust | ||
| 2059 | # Label: "AffirmTrust Networking" | ||
| 2060 | # Serial: 8957382827206547757 | ||
| 2061 | # MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f | ||
| 2062 | # SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f | ||
| 2063 | # SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b | ||
| 2064 | -----BEGIN CERTIFICATE----- | ||
| 2065 | MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE | ||
| 2066 | BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz | ||
| 2067 | dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL | ||
| 2068 | MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp | ||
| 2069 | cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | ||
| 2070 | AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y | ||
| 2071 | YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua | ||
| 2072 | kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL | ||
| 2073 | QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp | ||
| 2074 | 6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG | ||
| 2075 | yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i | ||
| 2076 | QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ | ||
| 2077 | KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO | ||
| 2078 | tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu | ||
| 2079 | QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ | ||
| 2080 | Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u | ||
| 2081 | olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 | ||
| 2082 | x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= | ||
| 2083 | -----END CERTIFICATE----- | ||
| 2084 | |||
| 2085 | # Issuer: CN=AffirmTrust Premium O=AffirmTrust | ||
| 2086 | # Subject: CN=AffirmTrust Premium O=AffirmTrust | ||
| 2087 | # Label: "AffirmTrust Premium" | ||
| 2088 | # Serial: 7893706540734352110 | ||
| 2089 | # MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 | ||
| 2090 | # SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 | ||
| 2091 | # SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a | ||
| 2092 | -----BEGIN CERTIFICATE----- | ||
| 2093 | MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE | ||
| 2094 | BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz | ||
| 2095 | dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG | ||
| 2096 | A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U | ||
| 2097 | cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf | ||
| 2098 | qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ | ||
| 2099 | JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ | ||
| 2100 | +jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS | ||
| 2101 | s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 | ||
| 2102 | HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 | ||
| 2103 | 70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG | ||
| 2104 | V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S | ||
| 2105 | qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S | ||
| 2106 | 5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia | ||
| 2107 | C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX | ||
| 2108 | OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE | ||
| 2109 | FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ | ||
| 2110 | BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 | ||
| 2111 | KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg | ||
| 2112 | Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B | ||
| 2113 | 8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ | ||
| 2114 | MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc | ||
| 2115 | 0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ | ||
| 2116 | u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF | ||
| 2117 | u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH | ||
| 2118 | YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 | ||
| 2119 | GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO | ||
| 2120 | RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e | ||
| 2121 | KeC2uAloGRwYQw== | ||
| 2122 | -----END CERTIFICATE----- | ||
| 2123 | |||
| 2124 | # Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust | ||
| 2125 | # Subject: CN=AffirmTrust Premium ECC O=AffirmTrust | ||
| 2126 | # Label: "AffirmTrust Premium ECC" | ||
| 2127 | # Serial: 8401224907861490260 | ||
| 2128 | # MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d | ||
| 2129 | # SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb | ||
| 2130 | # SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 | ||
| 2131 | -----BEGIN CERTIFICATE----- | ||
| 2132 | MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC | ||
| 2133 | VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ | ||
| 2134 | cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ | ||
| 2135 | BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt | ||
| 2136 | VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D | ||
| 2137 | 0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 | ||
| 2138 | ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G | ||
| 2139 | A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G | ||
| 2140 | A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs | ||
| 2141 | aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I | ||
| 2142 | flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== | ||
| 2143 | -----END CERTIFICATE----- | ||
| 2144 | |||
| 2145 | # Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority | ||
| 2146 | # Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority | ||
| 2147 | # Label: "Certum Trusted Network CA" | ||
| 2148 | # Serial: 279744 | ||
| 2149 | # MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 | ||
| 2150 | # SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e | ||
| 2151 | # SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e | ||
| 2152 | -----BEGIN CERTIFICATE----- | ||
| 2153 | MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM | ||
| 2154 | MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D | ||
| 2155 | ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU | ||
| 2156 | cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 | ||
| 2157 | WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg | ||
| 2158 | Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw | ||
| 2159 | IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B | ||
| 2160 | AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH | ||
| 2161 | UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM | ||
| 2162 | TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU | ||
| 2163 | BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM | ||
| 2164 | kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x | ||
| 2165 | AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV | ||
| 2166 | HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV | ||
| 2167 | HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y | ||
| 2168 | sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL | ||
| 2169 | I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 | ||
| 2170 | J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY | ||
| 2171 | VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI | ||
| 2172 | 03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= | ||
| 2173 | -----END CERTIFICATE----- | ||
| 2174 | |||
| 2175 | # Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA | ||
| 2176 | # Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA | ||
| 2177 | # Label: "TWCA Root Certification Authority" | ||
| 2178 | # Serial: 1 | ||
| 2179 | # MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 | ||
| 2180 | # SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 | ||
| 2181 | # SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 | ||
| 2182 | -----BEGIN CERTIFICATE----- | ||
| 2183 | MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES | ||
| 2184 | MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU | ||
| 2185 | V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz | ||
| 2186 | WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO | ||
| 2187 | LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm | ||
| 2188 | aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB | ||
| 2189 | AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE | ||
| 2190 | AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH | ||
| 2191 | K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX | ||
| 2192 | RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z | ||
| 2193 | rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx | ||
| 2194 | 3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV | ||
| 2195 | HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq | ||
| 2196 | hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC | ||
| 2197 | MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls | ||
| 2198 | XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D | ||
| 2199 | lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn | ||
| 2200 | aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ | ||
| 2201 | YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== | ||
| 2202 | -----END CERTIFICATE----- | ||
| 2203 | |||
| 2204 | # Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 | ||
| 2205 | # Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 | ||
| 2206 | # Label: "Security Communication RootCA2" | ||
| 2207 | # Serial: 0 | ||
| 2208 | # MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 | ||
| 2209 | # SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 | ||
| 2210 | # SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 | ||
| 2211 | -----BEGIN CERTIFICATE----- | ||
| 2212 | MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl | ||
| 2213 | MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe | ||
| 2214 | U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX | ||
| 2215 | DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy | ||
| 2216 | dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj | ||
| 2217 | YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV | ||
| 2218 | OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr | ||
| 2219 | zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM | ||
| 2220 | VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ | ||
| 2221 | hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO | ||
| 2222 | ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw | ||
| 2223 | awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs | ||
| 2224 | OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 | ||
| 2225 | DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF | ||
| 2226 | coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc | ||
| 2227 | okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 | ||
| 2228 | t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy | ||
| 2229 | 1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ | ||
| 2230 | SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 | ||
| 2231 | -----END CERTIFICATE----- | ||
| 2232 | |||
| 2233 | # Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 2234 | # Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 2235 | # Label: "Hellenic Academic and Research Institutions RootCA 2011" | ||
| 2236 | # Serial: 0 | ||
| 2237 | # MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 | ||
| 2238 | # SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d | ||
| 2239 | # SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 | ||
| 2240 | -----BEGIN CERTIFICATE----- | ||
| 2241 | MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix | ||
| 2242 | RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 | ||
| 2243 | dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p | ||
| 2244 | YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw | ||
| 2245 | NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK | ||
| 2246 | EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl | ||
| 2247 | cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl | ||
| 2248 | c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB | ||
| 2249 | BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz | ||
| 2250 | dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ | ||
| 2251 | fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns | ||
| 2252 | bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD | ||
| 2253 | 75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP | ||
| 2254 | FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV | ||
| 2255 | HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp | ||
| 2256 | 5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu | ||
| 2257 | b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA | ||
| 2258 | A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p | ||
| 2259 | 6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 | ||
| 2260 | TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 | ||
| 2261 | dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys | ||
| 2262 | Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI | ||
| 2263 | l7WdmplNsDz4SgCbZN2fOUvRJ9e4 | ||
| 2264 | -----END CERTIFICATE----- | ||
| 2265 | |||
| 2266 | # Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 | ||
| 2267 | # Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 | ||
| 2268 | # Label: "Actalis Authentication Root CA" | ||
| 2269 | # Serial: 6271844772424770508 | ||
| 2270 | # MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 | ||
| 2271 | # SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac | ||
| 2272 | # SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 | ||
| 2273 | -----BEGIN CERTIFICATE----- | ||
| 2274 | MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE | ||
| 2275 | BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w | ||
| 2276 | MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 | ||
| 2277 | IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC | ||
| 2278 | SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 | ||
| 2279 | ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB | ||
| 2280 | MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv | ||
| 2281 | UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX | ||
| 2282 | 4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 | ||
| 2283 | KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ | ||
| 2284 | gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb | ||
| 2285 | rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ | ||
| 2286 | 51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F | ||
| 2287 | be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe | ||
| 2288 | KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F | ||
| 2289 | v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn | ||
| 2290 | fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 | ||
| 2291 | jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz | ||
| 2292 | ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt | ||
| 2293 | ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL | ||
| 2294 | e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 | ||
| 2295 | jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz | ||
| 2296 | WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V | ||
| 2297 | SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j | ||
| 2298 | pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX | ||
| 2299 | X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok | ||
| 2300 | fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R | ||
| 2301 | K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU | ||
| 2302 | ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU | ||
| 2303 | LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT | ||
| 2304 | LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== | ||
| 2305 | -----END CERTIFICATE----- | ||
| 2306 | |||
| 2307 | # Issuer: O=Trustis Limited OU=Trustis FPS Root CA | ||
| 2308 | # Subject: O=Trustis Limited OU=Trustis FPS Root CA | ||
| 2309 | # Label: "Trustis FPS Root CA" | ||
| 2310 | # Serial: 36053640375399034304724988975563710553 | ||
| 2311 | # MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d | ||
| 2312 | # SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 | ||
| 2313 | # SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d | ||
| 2314 | -----BEGIN CERTIFICATE----- | ||
| 2315 | MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF | ||
| 2316 | MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL | ||
| 2317 | ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx | ||
| 2318 | MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc | ||
| 2319 | MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD | ||
| 2320 | ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ | ||
| 2321 | AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH | ||
| 2322 | iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj | ||
| 2323 | vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA | ||
| 2324 | 0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB | ||
| 2325 | OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ | ||
| 2326 | BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E | ||
| 2327 | FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 | ||
| 2328 | GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW | ||
| 2329 | zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 | ||
| 2330 | 1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE | ||
| 2331 | f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F | ||
| 2332 | jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN | ||
| 2333 | ZetX2fNXlrtIzYE= | ||
| 2334 | -----END CERTIFICATE----- | ||
| 2335 | |||
| 2336 | # Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 | ||
| 2337 | # Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 | ||
| 2338 | # Label: "Buypass Class 2 Root CA" | ||
| 2339 | # Serial: 2 | ||
| 2340 | # MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 | ||
| 2341 | # SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 | ||
| 2342 | # SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 | ||
| 2343 | -----BEGIN CERTIFICATE----- | ||
| 2344 | MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd | ||
| 2345 | MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg | ||
| 2346 | Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow | ||
| 2347 | TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw | ||
| 2348 | HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB | ||
| 2349 | BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr | ||
| 2350 | 6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV | ||
| 2351 | L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 | ||
| 2352 | 1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx | ||
| 2353 | MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ | ||
| 2354 | QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB | ||
| 2355 | arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr | ||
| 2356 | Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi | ||
| 2357 | FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS | ||
| 2358 | P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN | ||
| 2359 | 9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP | ||
| 2360 | AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz | ||
| 2361 | uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h | ||
| 2362 | 9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s | ||
| 2363 | A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t | ||
| 2364 | OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo | ||
| 2365 | +fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 | ||
| 2366 | KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 | ||
| 2367 | DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us | ||
| 2368 | H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ | ||
| 2369 | I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 | ||
| 2370 | 5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h | ||
| 2371 | 3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz | ||
| 2372 | Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= | ||
| 2373 | -----END CERTIFICATE----- | ||
| 2374 | |||
| 2375 | # Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 | ||
| 2376 | # Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 | ||
| 2377 | # Label: "Buypass Class 3 Root CA" | ||
| 2378 | # Serial: 2 | ||
| 2379 | # MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec | ||
| 2380 | # SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 | ||
| 2381 | # SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d | ||
| 2382 | -----BEGIN CERTIFICATE----- | ||
| 2383 | MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd | ||
| 2384 | MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg | ||
| 2385 | Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow | ||
| 2386 | TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw | ||
| 2387 | HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB | ||
| 2388 | BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y | ||
| 2389 | ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E | ||
| 2390 | N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 | ||
| 2391 | tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX | ||
| 2392 | 0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c | ||
| 2393 | /3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X | ||
| 2394 | KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY | ||
| 2395 | zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS | ||
| 2396 | O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D | ||
| 2397 | 34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP | ||
| 2398 | K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 | ||
| 2399 | AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv | ||
| 2400 | Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj | ||
| 2401 | QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV | ||
| 2402 | cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS | ||
| 2403 | IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 | ||
| 2404 | HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa | ||
| 2405 | O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv | ||
| 2406 | 033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u | ||
| 2407 | dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE | ||
| 2408 | kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 | ||
| 2409 | 3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD | ||
| 2410 | u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq | ||
| 2411 | 4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= | ||
| 2412 | -----END CERTIFICATE----- | ||
| 2413 | |||
| 2414 | # Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | ||
| 2415 | # Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | ||
| 2416 | # Label: "T-TeleSec GlobalRoot Class 3" | ||
| 2417 | # Serial: 1 | ||
| 2418 | # MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef | ||
| 2419 | # SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 | ||
| 2420 | # SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd | ||
| 2421 | -----BEGIN CERTIFICATE----- | ||
| 2422 | MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx | ||
| 2423 | KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd | ||
| 2424 | BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl | ||
| 2425 | YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 | ||
| 2426 | OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy | ||
| 2427 | aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 | ||
| 2428 | ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G | ||
| 2429 | CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN | ||
| 2430 | 8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ | ||
| 2431 | RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 | ||
| 2432 | hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 | ||
| 2433 | ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM | ||
| 2434 | EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj | ||
| 2435 | QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 | ||
| 2436 | A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy | ||
| 2437 | WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ | ||
| 2438 | 1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 | ||
| 2439 | 6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT | ||
| 2440 | 91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml | ||
| 2441 | e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p | ||
| 2442 | TpPDpFQUWw== | ||
| 2443 | -----END CERTIFICATE----- | ||
| 2444 | |||
| 2445 | # Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus | ||
| 2446 | # Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus | ||
| 2447 | # Label: "EE Certification Centre Root CA" | ||
| 2448 | # Serial: 112324828676200291871926431888494945866 | ||
| 2449 | # MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f | ||
| 2450 | # SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 | ||
| 2451 | # SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 | ||
| 2452 | -----BEGIN CERTIFICATE----- | ||
| 2453 | MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 | ||
| 2454 | MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 | ||
| 2455 | czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG | ||
| 2456 | CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy | ||
| 2457 | MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl | ||
| 2458 | ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS | ||
| 2459 | b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB | ||
| 2460 | AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy | ||
| 2461 | euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO | ||
| 2462 | bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw | ||
| 2463 | WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d | ||
| 2464 | MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE | ||
| 2465 | 1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD | ||
| 2466 | VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ | ||
| 2467 | zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB | ||
| 2468 | BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF | ||
| 2469 | BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV | ||
| 2470 | v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG | ||
| 2471 | E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u | ||
| 2472 | uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW | ||
| 2473 | iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v | ||
| 2474 | GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= | ||
| 2475 | -----END CERTIFICATE----- | ||
| 2476 | |||
| 2477 | # Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH | ||
| 2478 | # Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH | ||
| 2479 | # Label: "D-TRUST Root Class 3 CA 2 2009" | ||
| 2480 | # Serial: 623603 | ||
| 2481 | # MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f | ||
| 2482 | # SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 | ||
| 2483 | # SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 | ||
| 2484 | -----BEGIN CERTIFICATE----- | ||
| 2485 | MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF | ||
| 2486 | MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD | ||
| 2487 | bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha | ||
| 2488 | ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM | ||
| 2489 | HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB | ||
| 2490 | BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 | ||
| 2491 | UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 | ||
| 2492 | tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R | ||
| 2493 | ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM | ||
| 2494 | lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp | ||
| 2495 | /hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G | ||
| 2496 | A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G | ||
| 2497 | A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj | ||
| 2498 | dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy | ||
| 2499 | MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl | ||
| 2500 | cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js | ||
| 2501 | L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL | ||
| 2502 | BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni | ||
| 2503 | acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 | ||
| 2504 | o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K | ||
| 2505 | zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 | ||
| 2506 | PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y | ||
| 2507 | Johw1+qRzT65ysCQblrGXnRl11z+o+I= | ||
| 2508 | -----END CERTIFICATE----- | ||
| 2509 | |||
| 2510 | # Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH | ||
| 2511 | # Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH | ||
| 2512 | # Label: "D-TRUST Root Class 3 CA 2 EV 2009" | ||
| 2513 | # Serial: 623604 | ||
| 2514 | # MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 | ||
| 2515 | # SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 | ||
| 2516 | # SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 | ||
| 2517 | -----BEGIN CERTIFICATE----- | ||
| 2518 | MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF | ||
| 2519 | MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD | ||
| 2520 | bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw | ||
| 2521 | NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV | ||
| 2522 | BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI | ||
| 2523 | hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn | ||
| 2524 | ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 | ||
| 2525 | 3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z | ||
| 2526 | qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR | ||
| 2527 | p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 | ||
| 2528 | HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw | ||
| 2529 | ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea | ||
| 2530 | HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw | ||
| 2531 | Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh | ||
| 2532 | c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E | ||
| 2533 | RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt | ||
| 2534 | dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku | ||
| 2535 | Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp | ||
| 2536 | 3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 | ||
| 2537 | nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF | ||
| 2538 | CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na | ||
| 2539 | xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX | ||
| 2540 | KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 | ||
| 2541 | -----END CERTIFICATE----- | ||
| 2542 | |||
| 2543 | # Issuer: CN=CA Disig Root R2 O=Disig a.s. | ||
| 2544 | # Subject: CN=CA Disig Root R2 O=Disig a.s. | ||
| 2545 | # Label: "CA Disig Root R2" | ||
| 2546 | # Serial: 10572350602393338211 | ||
| 2547 | # MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 | ||
| 2548 | # SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 | ||
| 2549 | # SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 | ||
| 2550 | -----BEGIN CERTIFICATE----- | ||
| 2551 | MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV | ||
| 2552 | BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu | ||
| 2553 | MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy | ||
| 2554 | MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx | ||
| 2555 | EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw | ||
| 2556 | ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe | ||
| 2557 | NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH | ||
| 2558 | PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I | ||
| 2559 | x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe | ||
| 2560 | QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR | ||
| 2561 | yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO | ||
| 2562 | QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 | ||
| 2563 | H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ | ||
| 2564 | QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD | ||
| 2565 | i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs | ||
| 2566 | nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 | ||
| 2567 | rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud | ||
| 2568 | DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI | ||
| 2569 | hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM | ||
| 2570 | tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf | ||
| 2571 | GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb | ||
| 2572 | lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka | ||
| 2573 | +elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal | ||
| 2574 | TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i | ||
| 2575 | nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 | ||
| 2576 | gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr | ||
| 2577 | G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os | ||
| 2578 | zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x | ||
| 2579 | L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL | ||
| 2580 | -----END CERTIFICATE----- | ||
| 2581 | |||
| 2582 | # Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV | ||
| 2583 | # Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV | ||
| 2584 | # Label: "ACCVRAIZ1" | ||
| 2585 | # Serial: 6828503384748696800 | ||
| 2586 | # MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 | ||
| 2587 | # SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 | ||
| 2588 | # SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 | ||
| 2589 | -----BEGIN CERTIFICATE----- | ||
| 2590 | MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE | ||
| 2591 | AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw | ||
| 2592 | CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ | ||
| 2593 | BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND | ||
| 2594 | VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb | ||
| 2595 | qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY | ||
| 2596 | HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo | ||
| 2597 | G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA | ||
| 2598 | lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr | ||
| 2599 | IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ | ||
| 2600 | 0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH | ||
| 2601 | k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 | ||
| 2602 | 4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO | ||
| 2603 | m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa | ||
| 2604 | cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl | ||
| 2605 | uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI | ||
| 2606 | KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls | ||
| 2607 | ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG | ||
| 2608 | AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 | ||
| 2609 | VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT | ||
| 2610 | VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG | ||
| 2611 | CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA | ||
| 2612 | cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA | ||
| 2613 | QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA | ||
| 2614 | 7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA | ||
| 2615 | cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA | ||
| 2616 | QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA | ||
| 2617 | czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu | ||
| 2618 | aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt | ||
| 2619 | aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud | ||
| 2620 | DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF | ||
| 2621 | BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp | ||
| 2622 | D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU | ||
| 2623 | JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m | ||
| 2624 | AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD | ||
| 2625 | vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms | ||
| 2626 | tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH | ||
| 2627 | 7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h | ||
| 2628 | I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA | ||
| 2629 | h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF | ||
| 2630 | d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H | ||
| 2631 | pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 | ||
| 2632 | -----END CERTIFICATE----- | ||
| 2633 | |||
| 2634 | # Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA | ||
| 2635 | # Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA | ||
| 2636 | # Label: "TWCA Global Root CA" | ||
| 2637 | # Serial: 3262 | ||
| 2638 | # MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 | ||
| 2639 | # SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 | ||
| 2640 | # SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b | ||
| 2641 | -----BEGIN CERTIFICATE----- | ||
| 2642 | MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx | ||
| 2643 | EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT | ||
| 2644 | VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 | ||
| 2645 | NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT | ||
| 2646 | B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG | ||
| 2647 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF | ||
| 2648 | 10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz | ||
| 2649 | 0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh | ||
| 2650 | MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH | ||
| 2651 | zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc | ||
| 2652 | 46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 | ||
| 2653 | yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi | ||
| 2654 | laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP | ||
| 2655 | oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA | ||
| 2656 | BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE | ||
| 2657 | qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm | ||
| 2658 | 4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB | ||
| 2659 | /zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL | ||
| 2660 | 1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn | ||
| 2661 | LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF | ||
| 2662 | H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo | ||
| 2663 | RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ | ||
| 2664 | nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh | ||
| 2665 | 15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW | ||
| 2666 | 6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW | ||
| 2667 | nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j | ||
| 2668 | wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz | ||
| 2669 | aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy | ||
| 2670 | KwbQBM0= | ||
| 2671 | -----END CERTIFICATE----- | ||
| 2672 | |||
| 2673 | # Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera | ||
| 2674 | # Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera | ||
| 2675 | # Label: "TeliaSonera Root CA v1" | ||
| 2676 | # Serial: 199041966741090107964904287217786801558 | ||
| 2677 | # MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c | ||
| 2678 | # SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 | ||
| 2679 | # SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 | ||
| 2680 | -----BEGIN CERTIFICATE----- | ||
| 2681 | MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw | ||
| 2682 | NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv | ||
| 2683 | b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD | ||
| 2684 | VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 | ||
| 2685 | MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F | ||
| 2686 | VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 | ||
| 2687 | 7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X | ||
| 2688 | Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ | ||
| 2689 | /jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs | ||
| 2690 | 81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm | ||
| 2691 | dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe | ||
| 2692 | Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu | ||
| 2693 | sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 | ||
| 2694 | pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs | ||
| 2695 | slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ | ||
| 2696 | arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD | ||
| 2697 | VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG | ||
| 2698 | 9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl | ||
| 2699 | dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx | ||
| 2700 | 0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj | ||
| 2701 | TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed | ||
| 2702 | Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 | ||
| 2703 | Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI | ||
| 2704 | OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 | ||
| 2705 | vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW | ||
| 2706 | t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn | ||
| 2707 | HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx | ||
| 2708 | SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= | ||
| 2709 | -----END CERTIFICATE----- | ||
| 2710 | |||
| 2711 | # Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi | ||
| 2712 | # Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi | ||
| 2713 | # Label: "E-Tugra Certification Authority" | ||
| 2714 | # Serial: 7667447206703254355 | ||
| 2715 | # MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 | ||
| 2716 | # SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 | ||
| 2717 | # SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c | ||
| 2718 | -----BEGIN CERTIFICATE----- | ||
| 2719 | MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV | ||
| 2720 | BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC | ||
| 2721 | aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV | ||
| 2722 | BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 | ||
| 2723 | Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz | ||
| 2724 | MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ | ||
| 2725 | BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp | ||
| 2726 | em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN | ||
| 2727 | ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 | ||
| 2728 | MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY | ||
| 2729 | B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH | ||
| 2730 | D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF | ||
| 2731 | Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo | ||
| 2732 | q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D | ||
| 2733 | k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH | ||
| 2734 | fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut | ||
| 2735 | dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM | ||
| 2736 | ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 | ||
| 2737 | zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn | ||
| 2738 | rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX | ||
| 2739 | U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 | ||
| 2740 | Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 | ||
| 2741 | XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF | ||
| 2742 | Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR | ||
| 2743 | HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY | ||
| 2744 | GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c | ||
| 2745 | 77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 | ||
| 2746 | +GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK | ||
| 2747 | vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 | ||
| 2748 | FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl | ||
| 2749 | yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P | ||
| 2750 | AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD | ||
| 2751 | y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d | ||
| 2752 | NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== | ||
| 2753 | -----END CERTIFICATE----- | ||
| 2754 | |||
| 2755 | # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | ||
| 2756 | # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | ||
| 2757 | # Label: "T-TeleSec GlobalRoot Class 2" | ||
| 2758 | # Serial: 1 | ||
| 2759 | # MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a | ||
| 2760 | # SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 | ||
| 2761 | # SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 | ||
| 2762 | -----BEGIN CERTIFICATE----- | ||
| 2763 | MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx | ||
| 2764 | KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd | ||
| 2765 | BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl | ||
| 2766 | YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 | ||
| 2767 | OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy | ||
| 2768 | aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 | ||
| 2769 | ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G | ||
| 2770 | CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd | ||
| 2771 | AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC | ||
| 2772 | FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi | ||
| 2773 | 1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq | ||
| 2774 | jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ | ||
| 2775 | wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj | ||
| 2776 | QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ | ||
| 2777 | WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy | ||
| 2778 | NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC | ||
| 2779 | uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw | ||
| 2780 | IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 | ||
| 2781 | g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN | ||
| 2782 | 9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP | ||
| 2783 | BSeOE6Fuwg== | ||
| 2784 | -----END CERTIFICATE----- | ||
| 2785 | |||
| 2786 | # Issuer: CN=Atos TrustedRoot 2011 O=Atos | ||
| 2787 | # Subject: CN=Atos TrustedRoot 2011 O=Atos | ||
| 2788 | # Label: "Atos TrustedRoot 2011" | ||
| 2789 | # Serial: 6643877497813316402 | ||
| 2790 | # MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 | ||
| 2791 | # SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 | ||
| 2792 | # SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 | ||
| 2793 | -----BEGIN CERTIFICATE----- | ||
| 2794 | MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE | ||
| 2795 | AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG | ||
| 2796 | EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM | ||
| 2797 | FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC | ||
| 2798 | REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp | ||
| 2799 | Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM | ||
| 2800 | VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ | ||
| 2801 | SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ | ||
| 2802 | 4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L | ||
| 2803 | cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi | ||
| 2804 | eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV | ||
| 2805 | HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG | ||
| 2806 | A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 | ||
| 2807 | DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j | ||
| 2808 | vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP | ||
| 2809 | DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc | ||
| 2810 | maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D | ||
| 2811 | lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv | ||
| 2812 | KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed | ||
| 2813 | -----END CERTIFICATE----- | ||
| 2814 | |||
| 2815 | # Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited | ||
| 2816 | # Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited | ||
| 2817 | # Label: "QuoVadis Root CA 1 G3" | ||
| 2818 | # Serial: 687049649626669250736271037606554624078720034195 | ||
| 2819 | # MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab | ||
| 2820 | # SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 | ||
| 2821 | # SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 | ||
| 2822 | -----BEGIN CERTIFICATE----- | ||
| 2823 | MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL | ||
| 2824 | BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | ||
| 2825 | BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 | ||
| 2826 | MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | ||
| 2827 | aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG | ||
| 2828 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV | ||
| 2829 | wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe | ||
| 2830 | rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 | ||
| 2831 | 68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh | ||
| 2832 | 4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp | ||
| 2833 | UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o | ||
| 2834 | abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc | ||
| 2835 | 3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G | ||
| 2836 | KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt | ||
| 2837 | hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO | ||
| 2838 | Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt | ||
| 2839 | zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | ||
| 2840 | BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD | ||
| 2841 | ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC | ||
| 2842 | MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 | ||
| 2843 | cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN | ||
| 2844 | qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 | ||
| 2845 | YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv | ||
| 2846 | b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 | ||
| 2847 | 8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k | ||
| 2848 | NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj | ||
| 2849 | ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp | ||
| 2850 | q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt | ||
| 2851 | nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD | ||
| 2852 | -----END CERTIFICATE----- | ||
| 2853 | |||
| 2854 | # Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited | ||
| 2855 | # Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited | ||
| 2856 | # Label: "QuoVadis Root CA 2 G3" | ||
| 2857 | # Serial: 390156079458959257446133169266079962026824725800 | ||
| 2858 | # MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 | ||
| 2859 | # SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 | ||
| 2860 | # SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 | ||
| 2861 | -----BEGIN CERTIFICATE----- | ||
| 2862 | MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL | ||
| 2863 | BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | ||
| 2864 | BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 | ||
| 2865 | MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | ||
| 2866 | aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG | ||
| 2867 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf | ||
| 2868 | qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW | ||
| 2869 | n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym | ||
| 2870 | c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ | ||
| 2871 | O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 | ||
| 2872 | o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j | ||
| 2873 | IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq | ||
| 2874 | IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz | ||
| 2875 | 8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh | ||
| 2876 | vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l | ||
| 2877 | 7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG | ||
| 2878 | cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | ||
| 2879 | BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD | ||
| 2880 | ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 | ||
| 2881 | AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC | ||
| 2882 | roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga | ||
| 2883 | W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n | ||
| 2884 | lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE | ||
| 2885 | +V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV | ||
| 2886 | csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd | ||
| 2887 | dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg | ||
| 2888 | KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM | ||
| 2889 | HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 | ||
| 2890 | WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M | ||
| 2891 | -----END CERTIFICATE----- | ||
| 2892 | |||
| 2893 | # Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited | ||
| 2894 | # Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited | ||
| 2895 | # Label: "QuoVadis Root CA 3 G3" | ||
| 2896 | # Serial: 268090761170461462463995952157327242137089239581 | ||
| 2897 | # MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 | ||
| 2898 | # SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d | ||
| 2899 | # SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 | ||
| 2900 | -----BEGIN CERTIFICATE----- | ||
| 2901 | MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL | ||
| 2902 | BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | ||
| 2903 | BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 | ||
| 2904 | MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | ||
| 2905 | aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG | ||
| 2906 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR | ||
| 2907 | /xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu | ||
| 2908 | FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR | ||
| 2909 | U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c | ||
| 2910 | ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR | ||
| 2911 | FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k | ||
| 2912 | A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw | ||
| 2913 | eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl | ||
| 2914 | sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp | ||
| 2915 | VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q | ||
| 2916 | A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ | ||
| 2917 | ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | ||
| 2918 | BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD | ||
| 2919 | ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px | ||
| 2920 | KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI | ||
| 2921 | FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv | ||
| 2922 | oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg | ||
| 2923 | u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP | ||
| 2924 | 0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf | ||
| 2925 | 3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl | ||
| 2926 | 8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ | ||
| 2927 | DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN | ||
| 2928 | PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ | ||
| 2929 | ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 | ||
| 2930 | -----END CERTIFICATE----- | ||
| 2931 | |||
| 2932 | # Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com | ||
| 2933 | # Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com | ||
| 2934 | # Label: "DigiCert Assured ID Root G2" | ||
| 2935 | # Serial: 15385348160840213938643033620894905419 | ||
| 2936 | # MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d | ||
| 2937 | # SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f | ||
| 2938 | # SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 | ||
| 2939 | -----BEGIN CERTIFICATE----- | ||
| 2940 | MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl | ||
| 2941 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 2942 | d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv | ||
| 2943 | b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG | ||
| 2944 | EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl | ||
| 2945 | cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi | ||
| 2946 | MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA | ||
| 2947 | n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc | ||
| 2948 | biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp | ||
| 2949 | EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA | ||
| 2950 | bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu | ||
| 2951 | YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB | ||
| 2952 | AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW | ||
| 2953 | BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI | ||
| 2954 | QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I | ||
| 2955 | 0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni | ||
| 2956 | lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 | ||
| 2957 | B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv | ||
| 2958 | ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo | ||
| 2959 | IhNzbM8m9Yop5w== | ||
| 2960 | -----END CERTIFICATE----- | ||
| 2961 | |||
| 2962 | # Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com | ||
| 2963 | # Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com | ||
| 2964 | # Label: "DigiCert Assured ID Root G3" | ||
| 2965 | # Serial: 15459312981008553731928384953135426796 | ||
| 2966 | # MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb | ||
| 2967 | # SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 | ||
| 2968 | # SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 | ||
| 2969 | -----BEGIN CERTIFICATE----- | ||
| 2970 | MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw | ||
| 2971 | CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu | ||
| 2972 | ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg | ||
| 2973 | RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV | ||
| 2974 | UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu | ||
| 2975 | Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq | ||
| 2976 | hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf | ||
| 2977 | Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q | ||
| 2978 | RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ | ||
| 2979 | BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD | ||
| 2980 | AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY | ||
| 2981 | JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv | ||
| 2982 | 6pZjamVFkpUBtA== | ||
| 2983 | -----END CERTIFICATE----- | ||
| 2984 | |||
| 2985 | # Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com | ||
| 2986 | # Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com | ||
| 2987 | # Label: "DigiCert Global Root G2" | ||
| 2988 | # Serial: 4293743540046975378534879503202253541 | ||
| 2989 | # MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 | ||
| 2990 | # SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 | ||
| 2991 | # SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f | ||
| 2992 | -----BEGIN CERTIFICATE----- | ||
| 2993 | MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh | ||
| 2994 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 2995 | d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH | ||
| 2996 | MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT | ||
| 2997 | MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j | ||
| 2998 | b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG | ||
| 2999 | 9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI | ||
| 3000 | 2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx | ||
| 3001 | 1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ | ||
| 3002 | q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz | ||
| 3003 | tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ | ||
| 3004 | vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP | ||
| 3005 | BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV | ||
| 3006 | 5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY | ||
| 3007 | 1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 | ||
| 3008 | NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG | ||
| 3009 | Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 | ||
| 3010 | 8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe | ||
| 3011 | pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl | ||
| 3012 | MrY= | ||
| 3013 | -----END CERTIFICATE----- | ||
| 3014 | |||
| 3015 | # Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com | ||
| 3016 | # Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com | ||
| 3017 | # Label: "DigiCert Global Root G3" | ||
| 3018 | # Serial: 7089244469030293291760083333884364146 | ||
| 3019 | # MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca | ||
| 3020 | # SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e | ||
| 3021 | # SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 | ||
| 3022 | -----BEGIN CERTIFICATE----- | ||
| 3023 | MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw | ||
| 3024 | CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu | ||
| 3025 | ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe | ||
| 3026 | Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw | ||
| 3027 | EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x | ||
| 3028 | IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF | ||
| 3029 | K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG | ||
| 3030 | fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO | ||
| 3031 | Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd | ||
| 3032 | BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx | ||
| 3033 | AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ | ||
| 3034 | oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 | ||
| 3035 | sycX | ||
| 3036 | -----END CERTIFICATE----- | ||
| 3037 | |||
| 3038 | # Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com | ||
| 3039 | # Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com | ||
| 3040 | # Label: "DigiCert Trusted Root G4" | ||
| 3041 | # Serial: 7451500558977370777930084869016614236 | ||
| 3042 | # MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 | ||
| 3043 | # SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 | ||
| 3044 | # SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 | ||
| 3045 | -----BEGIN CERTIFICATE----- | ||
| 3046 | MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi | ||
| 3047 | MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | ||
| 3048 | d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg | ||
| 3049 | RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV | ||
| 3050 | UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu | ||
| 3051 | Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG | ||
| 3052 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y | ||
| 3053 | ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If | ||
| 3054 | xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV | ||
| 3055 | ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO | ||
| 3056 | DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ | ||
| 3057 | jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ | ||
| 3058 | CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi | ||
| 3059 | EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM | ||
| 3060 | fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY | ||
| 3061 | uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK | ||
| 3062 | chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t | ||
| 3063 | 9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | ||
| 3064 | hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD | ||
| 3065 | ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 | ||
| 3066 | SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd | ||
| 3067 | +SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc | ||
| 3068 | fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa | ||
| 3069 | sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N | ||
| 3070 | cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N | ||
| 3071 | 0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie | ||
| 3072 | 4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI | ||
| 3073 | r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 | ||
| 3074 | /YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm | ||
| 3075 | gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ | ||
| 3076 | -----END CERTIFICATE----- | ||
| 3077 | |||
| 3078 | # Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited | ||
| 3079 | # Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited | ||
| 3080 | # Label: "COMODO RSA Certification Authority" | ||
| 3081 | # Serial: 101909084537582093308941363524873193117 | ||
| 3082 | # MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 | ||
| 3083 | # SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 | ||
| 3084 | # SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 | ||
| 3085 | -----BEGIN CERTIFICATE----- | ||
| 3086 | MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB | ||
| 3087 | hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G | ||
| 3088 | A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV | ||
| 3089 | BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 | ||
| 3090 | MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT | ||
| 3091 | EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR | ||
| 3092 | Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh | ||
| 3093 | dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR | ||
| 3094 | 6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X | ||
| 3095 | pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC | ||
| 3096 | 9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV | ||
| 3097 | /erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf | ||
| 3098 | Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z | ||
| 3099 | +pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w | ||
| 3100 | qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah | ||
| 3101 | SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC | ||
| 3102 | u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf | ||
| 3103 | Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq | ||
| 3104 | crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E | ||
| 3105 | FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB | ||
| 3106 | /wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl | ||
| 3107 | wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM | ||
| 3108 | 4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV | ||
| 3109 | 2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna | ||
| 3110 | FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ | ||
| 3111 | CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK | ||
| 3112 | boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke | ||
| 3113 | jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL | ||
| 3114 | S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb | ||
| 3115 | QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl | ||
| 3116 | 0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB | ||
| 3117 | NVOFBkpdn627G190 | ||
| 3118 | -----END CERTIFICATE----- | ||
| 3119 | |||
| 3120 | # Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network | ||
| 3121 | # Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network | ||
| 3122 | # Label: "USERTrust RSA Certification Authority" | ||
| 3123 | # Serial: 2645093764781058787591871645665788717 | ||
| 3124 | # MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 | ||
| 3125 | # SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e | ||
| 3126 | # SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 | ||
| 3127 | -----BEGIN CERTIFICATE----- | ||
| 3128 | MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB | ||
| 3129 | iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl | ||
| 3130 | cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV | ||
| 3131 | BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw | ||
| 3132 | MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV | ||
| 3133 | BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU | ||
| 3134 | aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy | ||
| 3135 | dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | ||
| 3136 | AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B | ||
| 3137 | 3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY | ||
| 3138 | tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ | ||
| 3139 | Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 | ||
| 3140 | VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT | ||
| 3141 | 79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 | ||
| 3142 | c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT | ||
| 3143 | Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l | ||
| 3144 | c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee | ||
| 3145 | UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE | ||
| 3146 | Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd | ||
| 3147 | BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G | ||
| 3148 | A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF | ||
| 3149 | Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO | ||
| 3150 | VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 | ||
| 3151 | ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs | ||
| 3152 | 8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR | ||
| 3153 | iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze | ||
| 3154 | Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ | ||
| 3155 | XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ | ||
| 3156 | qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB | ||
| 3157 | VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB | ||
| 3158 | L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG | ||
| 3159 | jjxDah2nGN59PRbxYvnKkKj9 | ||
| 3160 | -----END CERTIFICATE----- | ||
| 3161 | |||
| 3162 | # Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network | ||
| 3163 | # Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network | ||
| 3164 | # Label: "USERTrust ECC Certification Authority" | ||
| 3165 | # Serial: 123013823720199481456569720443997572134 | ||
| 3166 | # MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 | ||
| 3167 | # SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 | ||
| 3168 | # SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a | ||
| 3169 | -----BEGIN CERTIFICATE----- | ||
| 3170 | MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL | ||
| 3171 | MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl | ||
| 3172 | eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT | ||
| 3173 | JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx | ||
| 3174 | MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT | ||
| 3175 | Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg | ||
| 3176 | VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm | ||
| 3177 | aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo | ||
| 3178 | I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng | ||
| 3179 | o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G | ||
| 3180 | A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD | ||
| 3181 | VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB | ||
| 3182 | zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW | ||
| 3183 | RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= | ||
| 3184 | -----END CERTIFICATE----- | ||
| 3185 | |||
| 3186 | # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 | ||
| 3187 | # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 | ||
| 3188 | # Label: "GlobalSign ECC Root CA - R4" | ||
| 3189 | # Serial: 14367148294922964480859022125800977897474 | ||
| 3190 | # MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e | ||
| 3191 | # SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb | ||
| 3192 | # SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c | ||
| 3193 | -----BEGIN CERTIFICATE----- | ||
| 3194 | MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk | ||
| 3195 | MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH | ||
| 3196 | bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX | ||
| 3197 | DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD | ||
| 3198 | QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu | ||
| 3199 | MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ | ||
| 3200 | FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw | ||
| 3201 | DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F | ||
| 3202 | uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX | ||
| 3203 | kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs | ||
| 3204 | ewv4n4Q= | ||
| 3205 | -----END CERTIFICATE----- | ||
| 3206 | |||
| 3207 | # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 | ||
| 3208 | # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 | ||
| 3209 | # Label: "GlobalSign ECC Root CA - R5" | ||
| 3210 | # Serial: 32785792099990507226680698011560947931244 | ||
| 3211 | # MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 | ||
| 3212 | # SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa | ||
| 3213 | # SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 | ||
| 3214 | -----BEGIN CERTIFICATE----- | ||
| 3215 | MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk | ||
| 3216 | MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH | ||
| 3217 | bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX | ||
| 3218 | DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD | ||
| 3219 | QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu | ||
| 3220 | MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc | ||
| 3221 | 8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke | ||
| 3222 | hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD | ||
| 3223 | VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI | ||
| 3224 | KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg | ||
| 3225 | 515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO | ||
| 3226 | xwy8p2Fp8fc74SrL+SvzZpA3 | ||
| 3227 | -----END CERTIFICATE----- | ||
| 3228 | |||
| 3229 | # Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden | ||
| 3230 | # Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden | ||
| 3231 | # Label: "Staat der Nederlanden Root CA - G3" | ||
| 3232 | # Serial: 10003001 | ||
| 3233 | # MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 | ||
| 3234 | # SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc | ||
| 3235 | # SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 | ||
| 3236 | -----BEGIN CERTIFICATE----- | ||
| 3237 | MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO | ||
| 3238 | TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh | ||
| 3239 | dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX | ||
| 3240 | DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl | ||
| 3241 | ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv | ||
| 3242 | b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP | ||
| 3243 | cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW | ||
| 3244 | IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX | ||
| 3245 | xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy | ||
| 3246 | KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR | ||
| 3247 | 9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az | ||
| 3248 | 5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 | ||
| 3249 | 6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 | ||
| 3250 | Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP | ||
| 3251 | bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt | ||
| 3252 | BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt | ||
| 3253 | XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF | ||
| 3254 | MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd | ||
| 3255 | INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD | ||
| 3256 | U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp | ||
| 3257 | LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 | ||
| 3258 | Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp | ||
| 3259 | gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh | ||
| 3260 | /WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw | ||
| 3261 | 0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A | ||
| 3262 | fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq | ||
| 3263 | 4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR | ||
| 3264 | 1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ | ||
| 3265 | QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM | ||
| 3266 | 94B7IWcnMFk= | ||
| 3267 | -----END CERTIFICATE----- | ||
| 3268 | |||
| 3269 | # Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden | ||
| 3270 | # Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden | ||
| 3271 | # Label: "Staat der Nederlanden EV Root CA" | ||
| 3272 | # Serial: 10000013 | ||
| 3273 | # MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba | ||
| 3274 | # SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb | ||
| 3275 | # SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a | ||
| 3276 | -----BEGIN CERTIFICATE----- | ||
| 3277 | MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO | ||
| 3278 | TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh | ||
| 3279 | dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y | ||
| 3280 | MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg | ||
| 3281 | TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS | ||
| 3282 | b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS | ||
| 3283 | M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC | ||
| 3284 | UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d | ||
| 3285 | Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p | ||
| 3286 | rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l | ||
| 3287 | pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb | ||
| 3288 | j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC | ||
| 3289 | KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS | ||
| 3290 | /ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X | ||
| 3291 | cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH | ||
| 3292 | 1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP | ||
| 3293 | px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB | ||
| 3294 | /zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 | ||
| 3295 | MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI | ||
| 3296 | eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u | ||
| 3297 | 2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS | ||
| 3298 | v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC | ||
| 3299 | wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy | ||
| 3300 | CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e | ||
| 3301 | vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 | ||
| 3302 | Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa | ||
| 3303 | Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL | ||
| 3304 | eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 | ||
| 3305 | FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc | ||
| 3306 | 7uzXLg== | ||
| 3307 | -----END CERTIFICATE----- | ||
| 3308 | |||
| 3309 | # Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust | ||
| 3310 | # Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust | ||
| 3311 | # Label: "IdenTrust Commercial Root CA 1" | ||
| 3312 | # Serial: 13298821034946342390520003877796839426 | ||
| 3313 | # MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 | ||
| 3314 | # SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 | ||
| 3315 | # SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae | ||
| 3316 | -----BEGIN CERTIFICATE----- | ||
| 3317 | MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK | ||
| 3318 | MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu | ||
| 3319 | VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw | ||
| 3320 | MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw | ||
| 3321 | JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG | ||
| 3322 | SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT | ||
| 3323 | 3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU | ||
| 3324 | +ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp | ||
| 3325 | S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 | ||
| 3326 | bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi | ||
| 3327 | T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL | ||
| 3328 | vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK | ||
| 3329 | Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK | ||
| 3330 | dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT | ||
| 3331 | c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv | ||
| 3332 | l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N | ||
| 3333 | iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB | ||
| 3334 | /zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD | ||
| 3335 | ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH | ||
| 3336 | 6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt | ||
| 3337 | LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 | ||
| 3338 | nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 | ||
| 3339 | +wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK | ||
| 3340 | W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT | ||
| 3341 | AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq | ||
| 3342 | l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG | ||
| 3343 | 4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ | ||
| 3344 | mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A | ||
| 3345 | 7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H | ||
| 3346 | -----END CERTIFICATE----- | ||
| 3347 | |||
| 3348 | # Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust | ||
| 3349 | # Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust | ||
| 3350 | # Label: "IdenTrust Public Sector Root CA 1" | ||
| 3351 | # Serial: 13298821034946342390521976156843933698 | ||
| 3352 | # MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba | ||
| 3353 | # SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd | ||
| 3354 | # SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f | ||
| 3355 | -----BEGIN CERTIFICATE----- | ||
| 3356 | MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN | ||
| 3357 | MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu | ||
| 3358 | VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN | ||
| 3359 | MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 | ||
| 3360 | MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi | ||
| 3361 | MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 | ||
| 3362 | ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy | ||
| 3363 | RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS | ||
| 3364 | bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF | ||
| 3365 | /YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R | ||
| 3366 | 3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw | ||
| 3367 | EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy | ||
| 3368 | 9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V | ||
| 3369 | GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ | ||
| 3370 | 2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV | ||
| 3371 | WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD | ||
| 3372 | W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ | ||
| 3373 | BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN | ||
| 3374 | AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj | ||
| 3375 | t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV | ||
| 3376 | DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 | ||
| 3377 | TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G | ||
| 3378 | lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW | ||
| 3379 | mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df | ||
| 3380 | WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 | ||
| 3381 | +bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ | ||
| 3382 | tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA | ||
| 3383 | GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv | ||
| 3384 | 8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c | ||
| 3385 | -----END CERTIFICATE----- | ||
| 3386 | |||
| 3387 | # Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only | ||
| 3388 | # Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only | ||
| 3389 | # Label: "Entrust Root Certification Authority - G2" | ||
| 3390 | # Serial: 1246989352 | ||
| 3391 | # MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 | ||
| 3392 | # SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 | ||
| 3393 | # SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 | ||
| 3394 | -----BEGIN CERTIFICATE----- | ||
| 3395 | MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC | ||
| 3396 | VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 | ||
| 3397 | cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs | ||
| 3398 | IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz | ||
| 3399 | dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy | ||
| 3400 | NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu | ||
| 3401 | dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt | ||
| 3402 | dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 | ||
| 3403 | aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj | ||
| 3404 | YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK | ||
| 3405 | AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T | ||
| 3406 | RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN | ||
| 3407 | cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW | ||
| 3408 | wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 | ||
| 3409 | U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 | ||
| 3410 | jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP | ||
| 3411 | BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN | ||
| 3412 | BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ | ||
| 3413 | jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ | ||
| 3414 | Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v | ||
| 3415 | 1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R | ||
| 3416 | nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH | ||
| 3417 | VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== | ||
| 3418 | -----END CERTIFICATE----- | ||
| 3419 | |||
| 3420 | # Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only | ||
| 3421 | # Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only | ||
| 3422 | # Label: "Entrust Root Certification Authority - EC1" | ||
| 3423 | # Serial: 51543124481930649114116133369 | ||
| 3424 | # MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc | ||
| 3425 | # SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 | ||
| 3426 | # SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 | ||
| 3427 | -----BEGIN CERTIFICATE----- | ||
| 3428 | MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG | ||
| 3429 | A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 | ||
| 3430 | d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu | ||
| 3431 | dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq | ||
| 3432 | RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy | ||
| 3433 | MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD | ||
| 3434 | VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 | ||
| 3435 | L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g | ||
| 3436 | Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD | ||
| 3437 | ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi | ||
| 3438 | A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt | ||
| 3439 | ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH | ||
| 3440 | Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O | ||
| 3441 | BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC | ||
| 3442 | R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX | ||
| 3443 | hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G | ||
| 3444 | -----END CERTIFICATE----- | ||
| 3445 | |||
| 3446 | # Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority | ||
| 3447 | # Subject: CN=CFCA EV ROOT O=China Financial Certification Authority | ||
| 3448 | # Label: "CFCA EV ROOT" | ||
| 3449 | # Serial: 407555286 | ||
| 3450 | # MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 | ||
| 3451 | # SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 | ||
| 3452 | # SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd | ||
| 3453 | -----BEGIN CERTIFICATE----- | ||
| 3454 | MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD | ||
| 3455 | TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y | ||
| 3456 | aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx | ||
| 3457 | MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j | ||
| 3458 | aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP | ||
| 3459 | T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 | ||
| 3460 | sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL | ||
| 3461 | TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 | ||
| 3462 | /ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp | ||
| 3463 | 7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz | ||
| 3464 | EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt | ||
| 3465 | hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP | ||
| 3466 | a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot | ||
| 3467 | aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg | ||
| 3468 | TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV | ||
| 3469 | PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv | ||
| 3470 | cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL | ||
| 3471 | tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd | ||
| 3472 | BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB | ||
| 3473 | ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT | ||
| 3474 | ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL | ||
| 3475 | jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS | ||
| 3476 | ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy | ||
| 3477 | P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 | ||
| 3478 | xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d | ||
| 3479 | Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN | ||
| 3480 | 5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe | ||
| 3481 | /v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z | ||
| 3482 | AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ | ||
| 3483 | 5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su | ||
| 3484 | -----END CERTIFICATE----- | ||
| 3485 | |||
| 3486 | # Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. | ||
| 3487 | # Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. | ||
| 3488 | # Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" | ||
| 3489 | # Serial: 156233699172481 | ||
| 3490 | # MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e | ||
| 3491 | # SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb | ||
| 3492 | # SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 | ||
| 3493 | -----BEGIN CERTIFICATE----- | ||
| 3494 | MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE | ||
| 3495 | BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn | ||
| 3496 | aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg | ||
| 3497 | QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg | ||
| 3498 | SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 | ||
| 3499 | MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD | ||
| 3500 | VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 | ||
| 3501 | dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF | ||
| 3502 | bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB | ||
| 3503 | IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom | ||
| 3504 | /4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR | ||
| 3505 | Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 | ||
| 3506 | 4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z | ||
| 3507 | 5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 | ||
| 3508 | hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID | ||
| 3509 | AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ | ||
| 3510 | BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX | ||
| 3511 | SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l | ||
| 3512 | VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq | ||
| 3513 | URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf | ||
| 3514 | peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF | ||
| 3515 | Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW | ||
| 3516 | +qtB4Uu2NQvAmxU= | ||
| 3517 | -----END CERTIFICATE----- | ||
| 3518 | |||
| 3519 | # Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 | ||
| 3520 | # Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 | ||
| 3521 | # Label: "Certinomis - Root CA" | ||
| 3522 | # Serial: 1 | ||
| 3523 | # MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f | ||
| 3524 | # SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 | ||
| 3525 | # SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 | ||
| 3526 | -----BEGIN CERTIFICATE----- | ||
| 3527 | MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET | ||
| 3528 | MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb | ||
| 3529 | BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz | ||
| 3530 | MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx | ||
| 3531 | FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g | ||
| 3532 | Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 | ||
| 3533 | fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl | ||
| 3534 | LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV | ||
| 3535 | WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF | ||
| 3536 | TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb | ||
| 3537 | 5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc | ||
| 3538 | CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri | ||
| 3539 | wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ | ||
| 3540 | wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG | ||
| 3541 | m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 | ||
| 3542 | F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng | ||
| 3543 | WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB | ||
| 3544 | BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 | ||
| 3545 | 2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF | ||
| 3546 | AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ | ||
| 3547 | 0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw | ||
| 3548 | F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS | ||
| 3549 | g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj | ||
| 3550 | qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN | ||
| 3551 | h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ | ||
| 3552 | ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V | ||
| 3553 | btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj | ||
| 3554 | Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ | ||
| 3555 | 8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW | ||
| 3556 | gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= | ||
| 3557 | -----END CERTIFICATE----- | ||
| 3558 | |||
| 3559 | # Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed | ||
| 3560 | # Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed | ||
| 3561 | # Label: "OISTE WISeKey Global Root GB CA" | ||
| 3562 | # Serial: 157768595616588414422159278966750757568 | ||
| 3563 | # MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d | ||
| 3564 | # SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed | ||
| 3565 | # SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 | ||
| 3566 | -----BEGIN CERTIFICATE----- | ||
| 3567 | MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt | ||
| 3568 | MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg | ||
| 3569 | Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i | ||
| 3570 | YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x | ||
| 3571 | CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG | ||
| 3572 | b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh | ||
| 3573 | bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 | ||
| 3574 | HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx | ||
| 3575 | WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX | ||
| 3576 | 1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk | ||
| 3577 | u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P | ||
| 3578 | 99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r | ||
| 3579 | M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw | ||
| 3580 | AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB | ||
| 3581 | BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh | ||
| 3582 | cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 | ||
| 3583 | gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO | ||
| 3584 | ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf | ||
| 3585 | aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic | ||
| 3586 | Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= | ||
| 3587 | -----END CERTIFICATE----- | ||
| 3588 | |||
| 3589 | # Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. | ||
| 3590 | # Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. | ||
| 3591 | # Label: "SZAFIR ROOT CA2" | ||
| 3592 | # Serial: 357043034767186914217277344587386743377558296292 | ||
| 3593 | # MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 | ||
| 3594 | # SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de | ||
| 3595 | # SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe | ||
| 3596 | -----BEGIN CERTIFICATE----- | ||
| 3597 | MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL | ||
| 3598 | BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 | ||
| 3599 | ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw | ||
| 3600 | NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L | ||
| 3601 | cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg | ||
| 3602 | Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN | ||
| 3603 | QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT | ||
| 3604 | 3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw | ||
| 3605 | 3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 | ||
| 3606 | 3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 | ||
| 3607 | BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN | ||
| 3608 | XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD | ||
| 3609 | AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF | ||
| 3610 | AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw | ||
| 3611 | 8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG | ||
| 3612 | nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP | ||
| 3613 | oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy | ||
| 3614 | d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg | ||
| 3615 | LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== | ||
| 3616 | -----END CERTIFICATE----- | ||
| 3617 | |||
| 3618 | # Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority | ||
| 3619 | # Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority | ||
| 3620 | # Label: "Certum Trusted Network CA 2" | ||
| 3621 | # Serial: 44979900017204383099463764357512596969 | ||
| 3622 | # MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 | ||
| 3623 | # SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 | ||
| 3624 | # SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 | ||
| 3625 | -----BEGIN CERTIFICATE----- | ||
| 3626 | MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB | ||
| 3627 | gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu | ||
| 3628 | QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG | ||
| 3629 | A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz | ||
| 3630 | OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ | ||
| 3631 | VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp | ||
| 3632 | ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 | ||
| 3633 | b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA | ||
| 3634 | DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn | ||
| 3635 | 0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB | ||
| 3636 | OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE | ||
| 3637 | fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E | ||
| 3638 | Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m | ||
| 3639 | o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i | ||
| 3640 | sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW | ||
| 3641 | OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez | ||
| 3642 | Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS | ||
| 3643 | adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n | ||
| 3644 | 3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD | ||
| 3645 | AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC | ||
| 3646 | AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ | ||
| 3647 | F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf | ||
| 3648 | CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 | ||
| 3649 | XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm | ||
| 3650 | djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ | ||
| 3651 | WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb | ||
| 3652 | AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq | ||
| 3653 | P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko | ||
| 3654 | b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj | ||
| 3655 | XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P | ||
| 3656 | 5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi | ||
| 3657 | DrW5viSP | ||
| 3658 | -----END CERTIFICATE----- | ||
| 3659 | |||
| 3660 | # Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 3661 | # Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 3662 | # Label: "Hellenic Academic and Research Institutions RootCA 2015" | ||
| 3663 | # Serial: 0 | ||
| 3664 | # MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce | ||
| 3665 | # SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 | ||
| 3666 | # SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 | ||
| 3667 | -----BEGIN CERTIFICATE----- | ||
| 3668 | MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix | ||
| 3669 | DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k | ||
| 3670 | IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT | ||
| 3671 | N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v | ||
| 3672 | dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG | ||
| 3673 | A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh | ||
| 3674 | ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx | ||
| 3675 | QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 | ||
| 3676 | dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC | ||
| 3677 | AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA | ||
| 3678 | 4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 | ||
| 3679 | AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 | ||
| 3680 | 4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C | ||
| 3681 | ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV | ||
| 3682 | 9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD | ||
| 3683 | gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 | ||
| 3684 | Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq | ||
| 3685 | NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko | ||
| 3686 | LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc | ||
| 3687 | Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV | ||
| 3688 | HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd | ||
| 3689 | ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I | ||
| 3690 | XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI | ||
| 3691 | M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot | ||
| 3692 | 9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V | ||
| 3693 | Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea | ||
| 3694 | j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh | ||
| 3695 | X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ | ||
| 3696 | l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf | ||
| 3697 | bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 | ||
| 3698 | pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK | ||
| 3699 | e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 | ||
| 3700 | vm9qp/UsQu0yrbYhnr68 | ||
| 3701 | -----END CERTIFICATE----- | ||
| 3702 | |||
| 3703 | # Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 3704 | # Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | ||
| 3705 | # Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" | ||
| 3706 | # Serial: 0 | ||
| 3707 | # MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef | ||
| 3708 | # SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 | ||
| 3709 | # SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 | ||
| 3710 | -----BEGIN CERTIFICATE----- | ||
| 3711 | MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN | ||
| 3712 | BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl | ||
| 3713 | c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl | ||
| 3714 | bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv | ||
| 3715 | b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ | ||
| 3716 | BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj | ||
| 3717 | YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 | ||
| 3718 | MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 | ||
| 3719 | dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg | ||
| 3720 | QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa | ||
| 3721 | jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC | ||
| 3722 | MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi | ||
| 3723 | C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep | ||
| 3724 | lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof | ||
| 3725 | TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR | ||
| 3726 | -----END CERTIFICATE----- | ||
| 3727 | |||
| 3728 | # Issuer: CN=Certplus Root CA G1 O=Certplus | ||
| 3729 | # Subject: CN=Certplus Root CA G1 O=Certplus | ||
| 3730 | # Label: "Certplus Root CA G1" | ||
| 3731 | # Serial: 1491911565779898356709731176965615564637713 | ||
| 3732 | # MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 | ||
| 3733 | # SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 | ||
| 3734 | # SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e | ||
| 3735 | -----BEGIN CERTIFICATE----- | ||
| 3736 | MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA | ||
| 3737 | MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy | ||
| 3738 | dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa | ||
| 3739 | MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy | ||
| 3740 | dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB | ||
| 3741 | ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a | ||
| 3742 | iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt | ||
| 3743 | 6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP | ||
| 3744 | 0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f | ||
| 3745 | 6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE | ||
| 3746 | EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN | ||
| 3747 | 1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc | ||
| 3748 | h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT | ||
| 3749 | mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV | ||
| 3750 | 4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO | ||
| 3751 | WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud | ||
| 3752 | DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd | ||
| 3753 | Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq | ||
| 3754 | hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh | ||
| 3755 | 66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 | ||
| 3756 | /SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS | ||
| 3757 | S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j | ||
| 3758 | 2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R | ||
| 3759 | Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr | ||
| 3760 | RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy | ||
| 3761 | 6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV | ||
| 3762 | V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 | ||
| 3763 | g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl | ||
| 3764 | ++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= | ||
| 3765 | -----END CERTIFICATE----- | ||
| 3766 | |||
| 3767 | # Issuer: CN=Certplus Root CA G2 O=Certplus | ||
| 3768 | # Subject: CN=Certplus Root CA G2 O=Certplus | ||
| 3769 | # Label: "Certplus Root CA G2" | ||
| 3770 | # Serial: 1492087096131536844209563509228951875861589 | ||
| 3771 | # MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 | ||
| 3772 | # SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a | ||
| 3773 | # SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 | ||
| 3774 | -----BEGIN CERTIFICATE----- | ||
| 3775 | MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x | ||
| 3776 | CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs | ||
| 3777 | dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x | ||
| 3778 | CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs | ||
| 3779 | dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat | ||
| 3780 | 93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x | ||
| 3781 | Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P | ||
| 3782 | AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj | ||
| 3783 | FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG | ||
| 3784 | SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch | ||
| 3785 | p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal | ||
| 3786 | U5ORGpOucGpnutee5WEaXw== | ||
| 3787 | -----END CERTIFICATE----- | ||
| 3788 | |||
| 3789 | # Issuer: CN=OpenTrust Root CA G1 O=OpenTrust | ||
| 3790 | # Subject: CN=OpenTrust Root CA G1 O=OpenTrust | ||
| 3791 | # Label: "OpenTrust Root CA G1" | ||
| 3792 | # Serial: 1492036577811947013770400127034825178844775 | ||
| 3793 | # MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da | ||
| 3794 | # SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e | ||
| 3795 | # SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 | ||
| 3796 | -----BEGIN CERTIFICATE----- | ||
| 3797 | MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA | ||
| 3798 | MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w | ||
| 3799 | ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw | ||
| 3800 | MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU | ||
| 3801 | T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | ||
| 3802 | AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b | ||
| 3803 | wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX | ||
| 3804 | /uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 | ||
| 3805 | 77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP | ||
| 3806 | uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx | ||
| 3807 | p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx | ||
| 3808 | Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 | ||
| 3809 | TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W | ||
| 3810 | G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw | ||
| 3811 | vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY | ||
| 3812 | EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO | ||
| 3813 | BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 | ||
| 3814 | 2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw | ||
| 3815 | DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E | ||
| 3816 | PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf | ||
| 3817 | gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS | ||
| 3818 | FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 | ||
| 3819 | V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P | ||
| 3820 | XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I | ||
| 3821 | i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t | ||
| 3822 | TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 | ||
| 3823 | 09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky | ||
| 3824 | Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ | ||
| 3825 | AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj | ||
| 3826 | 1oxx | ||
| 3827 | -----END CERTIFICATE----- | ||
| 3828 | |||
| 3829 | # Issuer: CN=OpenTrust Root CA G2 O=OpenTrust | ||
| 3830 | # Subject: CN=OpenTrust Root CA G2 O=OpenTrust | ||
| 3831 | # Label: "OpenTrust Root CA G2" | ||
| 3832 | # Serial: 1492012448042702096986875987676935573415441 | ||
| 3833 | # MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb | ||
| 3834 | # SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b | ||
| 3835 | # SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 | ||
| 3836 | -----BEGIN CERTIFICATE----- | ||
| 3837 | MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA | ||
| 3838 | MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w | ||
| 3839 | ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw | ||
| 3840 | MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU | ||
| 3841 | T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | ||
| 3842 | AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh | ||
| 3843 | /LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e | ||
| 3844 | CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 | ||
| 3845 | 1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE | ||
| 3846 | FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS | ||
| 3847 | gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X | ||
| 3848 | G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy | ||
| 3849 | YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH | ||
| 3850 | vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 | ||
| 3851 | t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ | ||
| 3852 | gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO | ||
| 3853 | BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 | ||
| 3854 | 5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w | ||
| 3855 | DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz | ||
| 3856 | Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 | ||
| 3857 | nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT | ||
| 3858 | RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT | ||
| 3859 | wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 | ||
| 3860 | t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa | ||
| 3861 | TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 | ||
| 3862 | o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU | ||
| 3863 | 3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA | ||
| 3864 | iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f | ||
| 3865 | WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM | ||
| 3866 | S1IK | ||
| 3867 | -----END CERTIFICATE----- | ||
| 3868 | |||
| 3869 | # Issuer: CN=OpenTrust Root CA G3 O=OpenTrust | ||
| 3870 | # Subject: CN=OpenTrust Root CA G3 O=OpenTrust | ||
| 3871 | # Label: "OpenTrust Root CA G3" | ||
| 3872 | # Serial: 1492104908271485653071219941864171170455615 | ||
| 3873 | # MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 | ||
| 3874 | # SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 | ||
| 3875 | # SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 | ||
| 3876 | -----BEGIN CERTIFICATE----- | ||
| 3877 | MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx | ||
| 3878 | CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U | ||
| 3879 | cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow | ||
| 3880 | QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl | ||
| 3881 | blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm | ||
| 3882 | 3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d | ||
| 3883 | oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G | ||
| 3884 | A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 | ||
| 3885 | DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK | ||
| 3886 | BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q | ||
| 3887 | j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx | ||
| 3888 | 4nxp5V2a+EEfOzmTk51V6s2N8fvB | ||
| 3889 | -----END CERTIFICATE----- | ||
| 3890 | |||
| 3891 | # Issuer: CN=ISRG Root X1 O=Internet Security Research Group | ||
| 3892 | # Subject: CN=ISRG Root X1 O=Internet Security Research Group | ||
| 3893 | # Label: "ISRG Root X1" | ||
| 3894 | # Serial: 172886928669790476064670243504169061120 | ||
| 3895 | # MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e | ||
| 3896 | # SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 | ||
| 3897 | # SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 | ||
| 3898 | -----BEGIN CERTIFICATE----- | ||
| 3899 | MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw | ||
| 3900 | TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh | ||
| 3901 | cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 | ||
| 3902 | WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu | ||
| 3903 | ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY | ||
| 3904 | MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc | ||
| 3905 | h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ | ||
| 3906 | 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U | ||
| 3907 | A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW | ||
| 3908 | T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH | ||
| 3909 | B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC | ||
| 3910 | B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv | ||
| 3911 | KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn | ||
| 3912 | OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn | ||
| 3913 | jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw | ||
| 3914 | qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI | ||
| 3915 | rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV | ||
| 3916 | HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq | ||
| 3917 | hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL | ||
| 3918 | ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ | ||
| 3919 | 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK | ||
| 3920 | NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 | ||
| 3921 | ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur | ||
| 3922 | TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC | ||
| 3923 | jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc | ||
| 3924 | oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq | ||
| 3925 | 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA | ||
| 3926 | mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d | ||
| 3927 | emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= | ||
| 3928 | -----END CERTIFICATE----- | ||
| 3929 | |||
| 3930 | # Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM | ||
| 3931 | # Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM | ||
| 3932 | # Label: "AC RAIZ FNMT-RCM" | ||
| 3933 | # Serial: 485876308206448804701554682760554759 | ||
| 3934 | # MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d | ||
| 3935 | # SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 | ||
| 3936 | # SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa | ||
| 3937 | -----BEGIN CERTIFICATE----- | ||
| 3938 | MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx | ||
| 3939 | CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ | ||
| 3940 | WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ | ||
| 3941 | BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG | ||
| 3942 | Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ | ||
| 3943 | yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf | ||
| 3944 | BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz | ||
| 3945 | WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF | ||
| 3946 | tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z | ||
| 3947 | 374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC | ||
| 3948 | IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL | ||
| 3949 | mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 | ||
| 3950 | wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS | ||
| 3951 | MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 | ||
| 3952 | ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet | ||
| 3953 | UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw | ||
| 3954 | AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H | ||
| 3955 | YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 | ||
| 3956 | LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD | ||
| 3957 | nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 | ||
| 3958 | RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM | ||
| 3959 | LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf | ||
| 3960 | 77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N | ||
| 3961 | JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm | ||
| 3962 | fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp | ||
| 3963 | 6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp | ||
| 3964 | 1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B | ||
| 3965 | 9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok | ||
| 3966 | RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv | ||
| 3967 | uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= | ||
| 3968 | -----END CERTIFICATE----- | ||
| 3969 | |||
| 3970 | # Issuer: CN=Amazon Root CA 1 O=Amazon | ||
| 3971 | # Subject: CN=Amazon Root CA 1 O=Amazon | ||
| 3972 | # Label: "Amazon Root CA 1" | ||
| 3973 | # Serial: 143266978916655856878034712317230054538369994 | ||
| 3974 | # MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 | ||
| 3975 | # SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 | ||
| 3976 | # SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e | ||
| 3977 | -----BEGIN CERTIFICATE----- | ||
| 3978 | MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF | ||
| 3979 | ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 | ||
| 3980 | b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL | ||
| 3981 | MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv | ||
| 3982 | b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj | ||
| 3983 | ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM | ||
| 3984 | 9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw | ||
| 3985 | IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 | ||
| 3986 | VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L | ||
| 3987 | 93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm | ||
| 3988 | jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC | ||
| 3989 | AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA | ||
| 3990 | A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI | ||
| 3991 | U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs | ||
| 3992 | N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv | ||
| 3993 | o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU | ||
| 3994 | 5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy | ||
| 3995 | rqXRfboQnoZsG4q5WTP468SQvvG5 | ||
| 3996 | -----END CERTIFICATE----- | ||
| 3997 | |||
| 3998 | # Issuer: CN=Amazon Root CA 2 O=Amazon | ||
| 3999 | # Subject: CN=Amazon Root CA 2 O=Amazon | ||
| 4000 | # Label: "Amazon Root CA 2" | ||
| 4001 | # Serial: 143266982885963551818349160658925006970653239 | ||
| 4002 | # MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 | ||
| 4003 | # SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a | ||
| 4004 | # SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 | ||
| 4005 | -----BEGIN CERTIFICATE----- | ||
| 4006 | MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF | ||
| 4007 | ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 | ||
| 4008 | b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL | ||
| 4009 | MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv | ||
| 4010 | b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK | ||
| 4011 | gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ | ||
| 4012 | W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg | ||
| 4013 | 1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K | ||
| 4014 | 8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r | ||
| 4015 | 2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me | ||
| 4016 | z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR | ||
| 4017 | 8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj | ||
| 4018 | mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz | ||
| 4019 | 7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 | ||
| 4020 | +XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI | ||
| 4021 | 0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB | ||
| 4022 | Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm | ||
| 4023 | UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 | ||
| 4024 | LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY | ||
| 4025 | +gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS | ||
| 4026 | k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl | ||
| 4027 | 7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm | ||
| 4028 | btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl | ||
| 4029 | urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ | ||
| 4030 | fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 | ||
| 4031 | n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE | ||
| 4032 | 76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H | ||
| 4033 | 9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT | ||
| 4034 | 4PsJYGw= | ||
| 4035 | -----END CERTIFICATE----- | ||
| 4036 | |||
| 4037 | # Issuer: CN=Amazon Root CA 3 O=Amazon | ||
| 4038 | # Subject: CN=Amazon Root CA 3 O=Amazon | ||
| 4039 | # Label: "Amazon Root CA 3" | ||
| 4040 | # Serial: 143266986699090766294700635381230934788665930 | ||
| 4041 | # MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 | ||
| 4042 | # SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e | ||
| 4043 | # SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 | ||
| 4044 | -----BEGIN CERTIFICATE----- | ||
| 4045 | MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 | ||
| 4046 | MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g | ||
| 4047 | Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG | ||
| 4048 | A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg | ||
| 4049 | Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl | ||
| 4050 | ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j | ||
| 4051 | QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr | ||
| 4052 | ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr | ||
| 4053 | BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM | ||
| 4054 | YyRIHN8wfdVoOw== | ||
| 4055 | -----END CERTIFICATE----- | ||
| 4056 | |||
| 4057 | # Issuer: CN=Amazon Root CA 4 O=Amazon | ||
| 4058 | # Subject: CN=Amazon Root CA 4 O=Amazon | ||
| 4059 | # Label: "Amazon Root CA 4" | ||
| 4060 | # Serial: 143266989758080763974105200630763877849284878 | ||
| 4061 | # MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd | ||
| 4062 | # SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be | ||
| 4063 | # SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 | ||
| 4064 | -----BEGIN CERTIFICATE----- | ||
| 4065 | MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 | ||
| 4066 | MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g | ||
| 4067 | Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG | ||
| 4068 | A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg | ||
| 4069 | Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi | ||
| 4070 | 9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk | ||
| 4071 | M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB | ||
| 4072 | /zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB | ||
| 4073 | MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw | ||
| 4074 | CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW | ||
| 4075 | 1KyLa2tJElMzrdfkviT8tQp21KW8EA== | ||
| 4076 | -----END CERTIFICATE----- | ||
| 4077 | |||
| 4078 | # Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. | ||
| 4079 | # Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. | ||
| 4080 | # Label: "LuxTrust Global Root 2" | ||
| 4081 | # Serial: 59914338225734147123941058376788110305822489521 | ||
| 4082 | # MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c | ||
| 4083 | # SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f | ||
| 4084 | # SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 | ||
| 4085 | -----BEGIN CERTIFICATE----- | ||
| 4086 | MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL | ||
| 4087 | BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV | ||
| 4088 | BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw | ||
| 4089 | MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B | ||
| 4090 | LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN | ||
| 4091 | AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F | ||
| 4092 | ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem | ||
| 4093 | hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 | ||
| 4094 | EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn | ||
| 4095 | Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 | ||
| 4096 | zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ | ||
| 4097 | 96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m | ||
| 4098 | j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g | ||
| 4099 | DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ | ||
| 4100 | 8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j | ||
| 4101 | X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH | ||
| 4102 | hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB | ||
| 4103 | KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 | ||
| 4104 | Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT | ||
| 4105 | +Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL | ||
| 4106 | BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 | ||
| 4107 | BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO | ||
| 4108 | jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 | ||
| 4109 | loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c | ||
| 4110 | qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ | ||
| 4111 | 2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ | ||
| 4112 | JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre | ||
| 4113 | zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf | ||
| 4114 | LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ | ||
| 4115 | x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 | ||
| 4116 | oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr | ||
| 4117 | -----END CERTIFICATE----- | ||
| 4118 | |||
| 4119 | # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM | ||
| 4120 | # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM | ||
| 4121 | # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" | ||
| 4122 | # Serial: 1 | ||
| 4123 | # MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 | ||
| 4124 | # SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca | ||
| 4125 | # SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 | ||
| 4126 | -----BEGIN CERTIFICATE----- | ||
| 4127 | MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx | ||
| 4128 | GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp | ||
| 4129 | bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w | ||
| 4130 | KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 | ||
| 4131 | BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy | ||
| 4132 | dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG | ||
| 4133 | EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll | ||
| 4134 | IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU | ||
| 4135 | QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT | ||
| 4136 | TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg | ||
| 4137 | LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 | ||
| 4138 | a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr | ||
| 4139 | LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr | ||
| 4140 | N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X | ||
| 4141 | YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ | ||
| 4142 | iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f | ||
| 4143 | AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH | ||
| 4144 | V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL | ||
| 4145 | BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh | ||
| 4146 | AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf | ||
| 4147 | IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 | ||
| 4148 | lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c | ||
| 4149 | 8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf | ||
| 4150 | lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= | ||
| 4151 | -----END CERTIFICATE----- | ||
| 4152 | |||
| 4153 | # Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. | ||
| 4154 | # Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. | ||
| 4155 | # Label: "GDCA TrustAUTH R5 ROOT" | ||
| 4156 | # Serial: 9009899650740120186 | ||
| 4157 | # MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 | ||
| 4158 | # SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 | ||
| 4159 | # SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 | ||
| 4160 | -----BEGIN CERTIFICATE----- | ||
| 4161 | MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE | ||
| 4162 | BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ | ||
| 4163 | IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 | ||
| 4164 | MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV | ||
| 4165 | BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w | ||
| 4166 | HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF | ||
| 4167 | AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj | ||
| 4168 | Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj | ||
| 4169 | TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u | ||
| 4170 | KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj | ||
| 4171 | qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm | ||
| 4172 | MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 | ||
| 4173 | ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP | ||
| 4174 | zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk | ||
| 4175 | L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC | ||
| 4176 | jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA | ||
| 4177 | HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC | ||
| 4178 | AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB | ||
| 4179 | /wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg | ||
| 4180 | p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm | ||
| 4181 | DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 | ||
| 4182 | COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry | ||
| 4183 | L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf | ||
| 4184 | JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg | ||
| 4185 | IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io | ||
| 4186 | 2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV | ||
| 4187 | 09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ | ||
| 4188 | XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq | ||
| 4189 | T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe | ||
| 4190 | MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== | ||
| 4191 | -----END CERTIFICATE----- | ||
| 4192 | |||
| 4193 | # Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4194 | # Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4195 | # Label: "TrustCor RootCert CA-1" | ||
| 4196 | # Serial: 15752444095811006489 | ||
| 4197 | # MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 | ||
| 4198 | # SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a | ||
| 4199 | # SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c | ||
| 4200 | -----BEGIN CERTIFICATE----- | ||
| 4201 | MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD | ||
| 4202 | VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk | ||
| 4203 | MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U | ||
| 4204 | cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y | ||
| 4205 | IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB | ||
| 4206 | pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h | ||
| 4207 | IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG | ||
| 4208 | A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU | ||
| 4209 | cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB | ||
| 4210 | CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid | ||
| 4211 | RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V | ||
| 4212 | seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme | ||
| 4213 | 9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV | ||
| 4214 | EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW | ||
| 4215 | hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ | ||
| 4216 | DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw | ||
| 4217 | DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD | ||
| 4218 | ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I | ||
| 4219 | /5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf | ||
| 4220 | ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ | ||
| 4221 | yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts | ||
| 4222 | L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN | ||
| 4223 | zl/HHk484IkzlQsPpTLWPFp5LBk= | ||
| 4224 | -----END CERTIFICATE----- | ||
| 4225 | |||
| 4226 | # Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4227 | # Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4228 | # Label: "TrustCor RootCert CA-2" | ||
| 4229 | # Serial: 2711694510199101698 | ||
| 4230 | # MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 | ||
| 4231 | # SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 | ||
| 4232 | # SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 | ||
| 4233 | -----BEGIN CERTIFICATE----- | ||
| 4234 | MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV | ||
| 4235 | BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw | ||
| 4236 | IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy | ||
| 4237 | dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig | ||
| 4238 | Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk | ||
| 4239 | MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg | ||
| 4240 | Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD | ||
| 4241 | VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy | ||
| 4242 | dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | ||
| 4243 | AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ | ||
| 4244 | QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq | ||
| 4245 | 1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp | ||
| 4246 | 2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK | ||
| 4247 | DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape | ||
| 4248 | az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF | ||
| 4249 | 3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 | ||
| 4250 | oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM | ||
| 4251 | g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 | ||
| 4252 | mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh | ||
| 4253 | 8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd | ||
| 4254 | BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U | ||
| 4255 | nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw | ||
| 4256 | DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX | ||
| 4257 | dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ | ||
| 4258 | MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL | ||
| 4259 | /V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX | ||
| 4260 | CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa | ||
| 4261 | ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW | ||
| 4262 | 2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 | ||
| 4263 | N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 | ||
| 4264 | Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB | ||
| 4265 | As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp | ||
| 4266 | 5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu | ||
| 4267 | 1uwJ | ||
| 4268 | -----END CERTIFICATE----- | ||
| 4269 | |||
| 4270 | # Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4271 | # Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | ||
| 4272 | # Label: "TrustCor ECA-1" | ||
| 4273 | # Serial: 9548242946988625984 | ||
| 4274 | # MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c | ||
| 4275 | # SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd | ||
| 4276 | # SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c | ||
| 4277 | -----BEGIN CERTIFICATE----- | ||
| 4278 | MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD | ||
| 4279 | VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk | ||
| 4280 | MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U | ||
| 4281 | cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y | ||
| 4282 | IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV | ||
| 4283 | BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw | ||
| 4284 | IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy | ||
| 4285 | dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig | ||
| 4286 | RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb | ||
| 4287 | 3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA | ||
| 4288 | BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 | ||
| 4289 | 3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou | ||
| 4290 | owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ | ||
| 4291 | wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF | ||
| 4292 | ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf | ||
| 4293 | BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ | ||
| 4294 | MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv | ||
| 4295 | civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 | ||
| 4296 | AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F | ||
| 4297 | hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 | ||
| 4298 | soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI | ||
| 4299 | WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi | ||
| 4300 | tJ/X5g== | ||
| 4301 | -----END CERTIFICATE----- | ||
| 4302 | |||
| 4303 | # Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation | ||
| 4304 | # Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation | ||
| 4305 | # Label: "SSL.com Root Certification Authority RSA" | ||
| 4306 | # Serial: 8875640296558310041 | ||
| 4307 | # MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 | ||
| 4308 | # SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb | ||
| 4309 | # SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 | ||
| 4310 | -----BEGIN CERTIFICATE----- | ||
| 4311 | MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE | ||
| 4312 | BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK | ||
| 4313 | DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp | ||
| 4314 | Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz | ||
| 4315 | OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv | ||
| 4316 | dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv | ||
| 4317 | bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN | ||
| 4318 | AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R | ||
| 4319 | xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX | ||
| 4320 | qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC | ||
| 4321 | C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 | ||
| 4322 | 6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh | ||
| 4323 | /l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF | ||
| 4324 | YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E | ||
| 4325 | JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc | ||
| 4326 | US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 | ||
| 4327 | ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm | ||
| 4328 | +Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi | ||
| 4329 | M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV | ||
| 4330 | HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G | ||
| 4331 | A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV | ||
| 4332 | cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc | ||
| 4333 | Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs | ||
| 4334 | PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ | ||
| 4335 | q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 | ||
| 4336 | cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr | ||
| 4337 | a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I | ||
| 4338 | H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y | ||
| 4339 | K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu | ||
| 4340 | nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf | ||
| 4341 | oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY | ||
| 4342 | Ic2wBlX7Jz9TkHCpBB5XJ7k= | ||
| 4343 | -----END CERTIFICATE----- | ||
| 4344 | |||
| 4345 | # Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation | ||
| 4346 | # Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation | ||
| 4347 | # Label: "SSL.com Root Certification Authority ECC" | ||
| 4348 | # Serial: 8495723813297216424 | ||
| 4349 | # MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e | ||
| 4350 | # SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a | ||
| 4351 | # SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 | ||
| 4352 | -----BEGIN CERTIFICATE----- | ||
| 4353 | MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC | ||
| 4354 | VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T | ||
| 4355 | U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 | ||
| 4356 | aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz | ||
| 4357 | WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 | ||
| 4358 | b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS | ||
| 4359 | b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB | ||
| 4360 | BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI | ||
| 4361 | 7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg | ||
| 4362 | CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud | ||
| 4363 | EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD | ||
| 4364 | VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T | ||
| 4365 | kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ | ||
| 4366 | gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl | ||
| 4367 | -----END CERTIFICATE----- | ||
| 4368 | |||
| 4369 | # Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation | ||
| 4370 | # Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation | ||
| 4371 | # Label: "SSL.com EV Root Certification Authority RSA R2" | ||
| 4372 | # Serial: 6248227494352943350 | ||
| 4373 | # MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 | ||
| 4374 | # SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a | ||
| 4375 | # SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c | ||
| 4376 | -----BEGIN CERTIFICATE----- | ||
| 4377 | MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV | ||
| 4378 | BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE | ||
| 4379 | CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy | ||
| 4380 | dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy | ||
| 4381 | MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G | ||
| 4382 | A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD | ||
| 4383 | DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy | ||
| 4384 | MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq | ||
| 4385 | M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf | ||
| 4386 | OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa | ||
| 4387 | 4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 | ||
| 4388 | HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR | ||
| 4389 | aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA | ||
| 4390 | b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ | ||
| 4391 | Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV | ||
| 4392 | PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO | ||
| 4393 | pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu | ||
| 4394 | UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY | ||
| 4395 | MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV | ||
| 4396 | HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 | ||
| 4397 | 9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW | ||
| 4398 | s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 | ||
| 4399 | Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg | ||
| 4400 | cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM | ||
| 4401 | 79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz | ||
| 4402 | /bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt | ||
| 4403 | ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm | ||
| 4404 | Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK | ||
| 4405 | QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ | ||
| 4406 | w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi | ||
| 4407 | S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 | ||
| 4408 | mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== | ||
| 4409 | -----END CERTIFICATE----- | ||
| 4410 | |||
| 4411 | # Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation | ||
| 4412 | # Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation | ||
| 4413 | # Label: "SSL.com EV Root Certification Authority ECC" | ||
| 4414 | # Serial: 3182246526754555285 | ||
| 4415 | # MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 | ||
| 4416 | # SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d | ||
| 4417 | # SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 | ||
| 4418 | -----BEGIN CERTIFICATE----- | ||
| 4419 | MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC | ||
| 4420 | VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T | ||
| 4421 | U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp | ||
| 4422 | Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx | ||
| 4423 | NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv | ||
| 4424 | dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv | ||
| 4425 | bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 | ||
| 4426 | AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA | ||
| 4427 | VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku | ||
| 4428 | WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP | ||
| 4429 | MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX | ||
| 4430 | 5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ | ||
| 4431 | ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg | ||
| 4432 | h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== | ||
| 4433 | -----END CERTIFICATE----- | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/core.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/core.py new file mode 100644 index 0000000..0ac5ee1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/certifi/core.py | |||
| @@ -0,0 +1,37 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # -*- coding: utf-8 -*- | ||
| 3 | |||
| 4 | """ | ||
| 5 | certifi.py | ||
| 6 | ~~~~~~~~~~ | ||
| 7 | |||
| 8 | This module returns the installation location of cacert.pem. | ||
| 9 | """ | ||
| 10 | import os | ||
| 11 | import warnings | ||
| 12 | |||
| 13 | |||
| 14 | class DeprecatedBundleWarning(DeprecationWarning): | ||
| 15 | """ | ||
| 16 | The weak security bundle is being deprecated. Please bother your service | ||
| 17 | provider to get them to stop using cross-signed roots. | ||
| 18 | """ | ||
| 19 | |||
| 20 | |||
| 21 | def where(): | ||
| 22 | f = os.path.dirname(__file__) | ||
| 23 | |||
| 24 | return os.path.join(f, 'cacert.pem') | ||
| 25 | |||
| 26 | |||
| 27 | def old_where(): | ||
| 28 | warnings.warn( | ||
| 29 | "The weak security bundle has been removed. certifi.old_where() is now an alias " | ||
| 30 | "of certifi.where(). Please update your code to use certifi.where() instead. " | ||
| 31 | "certifi.old_where() will be removed in 2018.", | ||
| 32 | DeprecatedBundleWarning | ||
| 33 | ) | ||
| 34 | return where() | ||
| 35 | |||
| 36 | if __name__ == '__main__': | ||
| 37 | print(where()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/__init__.py new file mode 100644 index 0000000..45bf7e6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/__init__.py | |||
| @@ -0,0 +1,39 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # This library is free software; you can redistribute it and/or | ||
| 3 | # modify it under the terms of the GNU Lesser General Public | ||
| 4 | # License as published by the Free Software Foundation; either | ||
| 5 | # version 2.1 of the License, or (at your option) any later version. | ||
| 6 | # | ||
| 7 | # This library is distributed in the hope that it will be useful, | ||
| 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 10 | # Lesser General Public License for more details. | ||
| 11 | # | ||
| 12 | # You should have received a copy of the GNU Lesser General Public | ||
| 13 | # License along with this library; if not, write to the Free Software | ||
| 14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 15 | # 02110-1301 USA | ||
| 16 | ######################### END LICENSE BLOCK ######################### | ||
| 17 | |||
| 18 | |||
| 19 | from .compat import PY2, PY3 | ||
| 20 | from .universaldetector import UniversalDetector | ||
| 21 | from .version import __version__, VERSION | ||
| 22 | |||
| 23 | |||
| 24 | def detect(byte_str): | ||
| 25 | """ | ||
| 26 | Detect the encoding of the given byte string. | ||
| 27 | |||
| 28 | :param byte_str: The byte sequence to examine. | ||
| 29 | :type byte_str: ``bytes`` or ``bytearray`` | ||
| 30 | """ | ||
| 31 | if not isinstance(byte_str, bytearray): | ||
| 32 | if not isinstance(byte_str, bytes): | ||
| 33 | raise TypeError('Expected object of type bytes or bytearray, got: ' | ||
| 34 | '{0}'.format(type(byte_str))) | ||
| 35 | else: | ||
| 36 | byte_str = bytearray(byte_str) | ||
| 37 | detector = UniversalDetector() | ||
| 38 | detector.feed(byte_str) | ||
| 39 | return detector.close() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5freq.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5freq.py new file mode 100644 index 0000000..88023ae --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5freq.py | |||
| @@ -0,0 +1,386 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # Big5 frequency table | ||
| 29 | # by Taiwan's Mandarin Promotion Council | ||
| 30 | # <http://www.edu.tw:81/mandr/> | ||
| 31 | # | ||
| 32 | # 128 --> 0.42261 | ||
| 33 | # 256 --> 0.57851 | ||
| 34 | # 512 --> 0.74851 | ||
| 35 | # 1024 --> 0.89384 | ||
| 36 | # 2048 --> 0.97583 | ||
| 37 | # | ||
| 38 | # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 | ||
| 39 | # Random Distribution Ration = 512/(5401-512)=0.105 | ||
| 40 | # | ||
| 41 | # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR | ||
| 42 | |||
| 43 | BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 | ||
| 44 | |||
| 45 | #Char to FreqOrder table | ||
| 46 | BIG5_TABLE_SIZE = 5376 | ||
| 47 | |||
| 48 | BIG5_CHAR_TO_FREQ_ORDER = ( | ||
| 49 | 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 | ||
| 50 | 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 | ||
| 51 | 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 | ||
| 52 | 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 | ||
| 53 | 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 | ||
| 54 | 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 | ||
| 55 | 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 | ||
| 56 | 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 | ||
| 57 | 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 | ||
| 58 | 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 | ||
| 59 | 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 | ||
| 60 | 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 | ||
| 61 | 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 | ||
| 62 | 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 | ||
| 63 | 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 | ||
| 64 | 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 | ||
| 65 | 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 | ||
| 66 | 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 | ||
| 67 | 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 | ||
| 68 | 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 | ||
| 69 | 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 | ||
| 70 | 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 | ||
| 71 | 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 | ||
| 72 | 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 | ||
| 73 | 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 | ||
| 74 | 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 | ||
| 75 | 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 | ||
| 76 | 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 | ||
| 77 | 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 | ||
| 78 | 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 | ||
| 79 | 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 | ||
| 80 | 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 | ||
| 81 | 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 | ||
| 82 | 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 | ||
| 83 | 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 | ||
| 84 | 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 | ||
| 85 | 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 | ||
| 86 | 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 | ||
| 87 | 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 | ||
| 88 | 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 | ||
| 89 | 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 | ||
| 90 | 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 | ||
| 91 | 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 | ||
| 92 | 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 | ||
| 93 | 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 | ||
| 94 | 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 | ||
| 95 | 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 | ||
| 96 | 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 | ||
| 97 | 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 | ||
| 98 | 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 | ||
| 99 | 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 | ||
| 100 | 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 | ||
| 101 | 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 | ||
| 102 | 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 | ||
| 103 | 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 | ||
| 104 | 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 | ||
| 105 | 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 | ||
| 106 | 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 | ||
| 107 | 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 | ||
| 108 | 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 | ||
| 109 | 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 | ||
| 110 | 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 | ||
| 111 | 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 | ||
| 112 | 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 | ||
| 113 | 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 | ||
| 114 | 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 | ||
| 115 | 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 | ||
| 116 | 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 | ||
| 117 | 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 | ||
| 118 | 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 | ||
| 119 | 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 | ||
| 120 | 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 | ||
| 121 | 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 | ||
| 122 | 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 | ||
| 123 | 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 | ||
| 124 | 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 | ||
| 125 | 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 | ||
| 126 | 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 | ||
| 127 | 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 | ||
| 128 | 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 | ||
| 129 | 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 | ||
| 130 | 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 | ||
| 131 | 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 | ||
| 132 | 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 | ||
| 133 | 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 | ||
| 134 | 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 | ||
| 135 | 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 | ||
| 136 | 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 | ||
| 137 | 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 | ||
| 138 | 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 | ||
| 139 | 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 | ||
| 140 | 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 | ||
| 141 | 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 | ||
| 142 | 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 | ||
| 143 | 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 | ||
| 144 | 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 | ||
| 145 | 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 | ||
| 146 | 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 | ||
| 147 | 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 | ||
| 148 | 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 | ||
| 149 | 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 | ||
| 150 | 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 | ||
| 151 | 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 | ||
| 152 | 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 | ||
| 153 | 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 | ||
| 154 | 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 | ||
| 155 | 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 | ||
| 156 | 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 | ||
| 157 | 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 | ||
| 158 | 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 | ||
| 159 | 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 | ||
| 160 | 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 | ||
| 161 | 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 | ||
| 162 | 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 | ||
| 163 | 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 | ||
| 164 | 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 | ||
| 165 | 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 | ||
| 166 | 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 | ||
| 167 | 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 | ||
| 168 | 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 | ||
| 169 | 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 | ||
| 170 | 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 | ||
| 171 | 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 | ||
| 172 | 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 | ||
| 173 | 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 | ||
| 174 | 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 | ||
| 175 | 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 | ||
| 176 | 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 | ||
| 177 | 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 | ||
| 178 | 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 | ||
| 179 | 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 | ||
| 180 | 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 | ||
| 181 | 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 | ||
| 182 | 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 | ||
| 183 | 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 | ||
| 184 | 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 | ||
| 185 | 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 | ||
| 186 | 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 | ||
| 187 | 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 | ||
| 188 | 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 | ||
| 189 | 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 | ||
| 190 | 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 | ||
| 191 | 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 | ||
| 192 | 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 | ||
| 193 | 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 | ||
| 194 | 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 | ||
| 195 | 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 | ||
| 196 | 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 | ||
| 197 | 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 | ||
| 198 | 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 | ||
| 199 | 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 | ||
| 200 | 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 | ||
| 201 | 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 | ||
| 202 | 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 | ||
| 203 | 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 | ||
| 204 | 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 | ||
| 205 | 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 | ||
| 206 | 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 | ||
| 207 | 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 | ||
| 208 | 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 | ||
| 209 | 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 | ||
| 210 | 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 | ||
| 211 | 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 | ||
| 212 | 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 | ||
| 213 | 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 | ||
| 214 | 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 | ||
| 215 | 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 | ||
| 216 | 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 | ||
| 217 | 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 | ||
| 218 | 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 | ||
| 219 | 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 | ||
| 220 | 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 | ||
| 221 | 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 | ||
| 222 | 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 | ||
| 223 | 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 | ||
| 224 | 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 | ||
| 225 | 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 | ||
| 226 | 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 | ||
| 227 | 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 | ||
| 228 | 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 | ||
| 229 | 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 | ||
| 230 | 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 | ||
| 231 | 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 | ||
| 232 | 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 | ||
| 233 | 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 | ||
| 234 | 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 | ||
| 235 | 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 | ||
| 236 | 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 | ||
| 237 | 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 | ||
| 238 | 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 | ||
| 239 | 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 | ||
| 240 | 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 | ||
| 241 | 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 | ||
| 242 | 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 | ||
| 243 | 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 | ||
| 244 | 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 | ||
| 245 | 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 | ||
| 246 | 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 | ||
| 247 | 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 | ||
| 248 | 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 | ||
| 249 | 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 | ||
| 250 | 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 | ||
| 251 | 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 | ||
| 252 | 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 | ||
| 253 | 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 | ||
| 254 | 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 | ||
| 255 | 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 | ||
| 256 | 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 | ||
| 257 | 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 | ||
| 258 | 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 | ||
| 259 | 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 | ||
| 260 | 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 | ||
| 261 | 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 | ||
| 262 | 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 | ||
| 263 | 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 | ||
| 264 | 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 | ||
| 265 | 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 | ||
| 266 | 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 | ||
| 267 | 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 | ||
| 268 | 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 | ||
| 269 | 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 | ||
| 270 | 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 | ||
| 271 | 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 | ||
| 272 | 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 | ||
| 273 | 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 | ||
| 274 | 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 | ||
| 275 | 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 | ||
| 276 | 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 | ||
| 277 | 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 | ||
| 278 | 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 | ||
| 279 | 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 | ||
| 280 | 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 | ||
| 281 | 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 | ||
| 282 | 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 | ||
| 283 | 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 | ||
| 284 | 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 | ||
| 285 | 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 | ||
| 286 | 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 | ||
| 287 | 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 | ||
| 288 | 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 | ||
| 289 | 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 | ||
| 290 | 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 | ||
| 291 | 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 | ||
| 292 | 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 | ||
| 293 | 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 | ||
| 294 | 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 | ||
| 295 | 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 | ||
| 296 | 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 | ||
| 297 | 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 | ||
| 298 | 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 | ||
| 299 | 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 | ||
| 300 | 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 | ||
| 301 | 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 | ||
| 302 | 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 | ||
| 303 | 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 | ||
| 304 | 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 | ||
| 305 | 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 | ||
| 306 | 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 | ||
| 307 | 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 | ||
| 308 | 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 | ||
| 309 | 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 | ||
| 310 | 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 | ||
| 311 | 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 | ||
| 312 | 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 | ||
| 313 | 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 | ||
| 314 | 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 | ||
| 315 | 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 | ||
| 316 | 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 | ||
| 317 | 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 | ||
| 318 | 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 | ||
| 319 | 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 | ||
| 320 | 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 | ||
| 321 | 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 | ||
| 322 | 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 | ||
| 323 | 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 | ||
| 324 | 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 | ||
| 325 | 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 | ||
| 326 | 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 | ||
| 327 | 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 | ||
| 328 | 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 | ||
| 329 | 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 | ||
| 330 | 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 | ||
| 331 | 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 | ||
| 332 | 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 | ||
| 333 | 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 | ||
| 334 | 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 | ||
| 335 | 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 | ||
| 336 | 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 | ||
| 337 | 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 | ||
| 338 | 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 | ||
| 339 | 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 | ||
| 340 | 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 | ||
| 341 | 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 | ||
| 342 | 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 | ||
| 343 | 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 | ||
| 344 | 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 | ||
| 345 | 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 | ||
| 346 | 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 | ||
| 347 | 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 | ||
| 348 | 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 | ||
| 349 | 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 | ||
| 350 | 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 | ||
| 351 | 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 | ||
| 352 | 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 | ||
| 353 | 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 | ||
| 354 | 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 | ||
| 355 | 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 | ||
| 356 | 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 | ||
| 357 | 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 | ||
| 358 | 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 | ||
| 359 | 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 | ||
| 360 | 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 | ||
| 361 | 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 | ||
| 362 | 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 | ||
| 363 | 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 | ||
| 364 | 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 | ||
| 365 | 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 | ||
| 366 | 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 | ||
| 367 | 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 | ||
| 368 | 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 | ||
| 369 | 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 | ||
| 370 | 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 | ||
| 371 | 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 | ||
| 372 | 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 | ||
| 373 | 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 | ||
| 374 | 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 | ||
| 375 | 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 | ||
| 376 | 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 | ||
| 377 | 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 | ||
| 378 | 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 | ||
| 379 | 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 | ||
| 380 | 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 | ||
| 381 | 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 | ||
| 382 | 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 | ||
| 383 | 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 | ||
| 384 | 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 | ||
| 385 | ) | ||
| 386 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5prober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5prober.py new file mode 100644 index 0000000..5b1227a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/big5prober.py | |||
| @@ -0,0 +1,47 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .chardistribution import Big5DistributionAnalysis | ||
| 31 | from .mbcssm import BIG5_SM_MODEL | ||
| 32 | |||
| 33 | |||
| 34 | class Big5Prober(MultiByteCharSetProber): | ||
| 35 | def __init__(self): | ||
| 36 | super(Big5Prober, self).__init__() | ||
| 37 | self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) | ||
| 38 | self.distribution_analyzer = Big5DistributionAnalysis() | ||
| 39 | self.reset() | ||
| 40 | |||
| 41 | @property | ||
| 42 | def charset_name(self): | ||
| 43 | return "Big5" | ||
| 44 | |||
| 45 | @property | ||
| 46 | def language(self): | ||
| 47 | return "Chinese" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/chardistribution.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/chardistribution.py new file mode 100644 index 0000000..e5509a0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/chardistribution.py | |||
| @@ -0,0 +1,233 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, | ||
| 29 | EUCTW_TYPICAL_DISTRIBUTION_RATIO) | ||
| 30 | from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, | ||
| 31 | EUCKR_TYPICAL_DISTRIBUTION_RATIO) | ||
| 32 | from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, | ||
| 33 | GB2312_TYPICAL_DISTRIBUTION_RATIO) | ||
| 34 | from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, | ||
| 35 | BIG5_TYPICAL_DISTRIBUTION_RATIO) | ||
| 36 | from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, | ||
| 37 | JIS_TYPICAL_DISTRIBUTION_RATIO) | ||
| 38 | |||
| 39 | |||
| 40 | class CharDistributionAnalysis(object): | ||
| 41 | ENOUGH_DATA_THRESHOLD = 1024 | ||
| 42 | SURE_YES = 0.99 | ||
| 43 | SURE_NO = 0.01 | ||
| 44 | MINIMUM_DATA_THRESHOLD = 3 | ||
| 45 | |||
| 46 | def __init__(self): | ||
| 47 | # Mapping table to get frequency order from char order (get from | ||
| 48 | # GetOrder()) | ||
| 49 | self._char_to_freq_order = None | ||
| 50 | self._table_size = None # Size of above table | ||
| 51 | # This is a constant value which varies from language to language, | ||
| 52 | # used in calculating confidence. See | ||
| 53 | # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html | ||
| 54 | # for further detail. | ||
| 55 | self.typical_distribution_ratio = None | ||
| 56 | self._done = None | ||
| 57 | self._total_chars = None | ||
| 58 | self._freq_chars = None | ||
| 59 | self.reset() | ||
| 60 | |||
| 61 | def reset(self): | ||
| 62 | """reset analyser, clear any state""" | ||
| 63 | # If this flag is set to True, detection is done and conclusion has | ||
| 64 | # been made | ||
| 65 | self._done = False | ||
| 66 | self._total_chars = 0 # Total characters encountered | ||
| 67 | # The number of characters whose frequency order is less than 512 | ||
| 68 | self._freq_chars = 0 | ||
| 69 | |||
| 70 | def feed(self, char, char_len): | ||
| 71 | """feed a character with known length""" | ||
| 72 | if char_len == 2: | ||
| 73 | # we only care about 2-bytes character in our distribution analysis | ||
| 74 | order = self.get_order(char) | ||
| 75 | else: | ||
| 76 | order = -1 | ||
| 77 | if order >= 0: | ||
| 78 | self._total_chars += 1 | ||
| 79 | # order is valid | ||
| 80 | if order < self._table_size: | ||
| 81 | if 512 > self._char_to_freq_order[order]: | ||
| 82 | self._freq_chars += 1 | ||
| 83 | |||
| 84 | def get_confidence(self): | ||
| 85 | """return confidence based on existing data""" | ||
| 86 | # if we didn't receive any character in our consideration range, | ||
| 87 | # return negative answer | ||
| 88 | if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: | ||
| 89 | return self.SURE_NO | ||
| 90 | |||
| 91 | if self._total_chars != self._freq_chars: | ||
| 92 | r = (self._freq_chars / ((self._total_chars - self._freq_chars) | ||
| 93 | * self.typical_distribution_ratio)) | ||
| 94 | if r < self.SURE_YES: | ||
| 95 | return r | ||
| 96 | |||
| 97 | # normalize confidence (we don't want to be 100% sure) | ||
| 98 | return self.SURE_YES | ||
| 99 | |||
| 100 | def got_enough_data(self): | ||
| 101 | # It is not necessary to receive all data to draw conclusion. | ||
| 102 | # For charset detection, certain amount of data is enough | ||
| 103 | return self._total_chars > self.ENOUGH_DATA_THRESHOLD | ||
| 104 | |||
| 105 | def get_order(self, byte_str): | ||
| 106 | # We do not handle characters based on the original encoding string, | ||
| 107 | # but convert this encoding string to a number, here called order. | ||
| 108 | # This allows multiple encodings of a language to share one frequency | ||
| 109 | # table. | ||
| 110 | return -1 | ||
| 111 | |||
| 112 | |||
| 113 | class EUCTWDistributionAnalysis(CharDistributionAnalysis): | ||
| 114 | def __init__(self): | ||
| 115 | super(EUCTWDistributionAnalysis, self).__init__() | ||
| 116 | self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER | ||
| 117 | self._table_size = EUCTW_TABLE_SIZE | ||
| 118 | self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO | ||
| 119 | |||
| 120 | def get_order(self, byte_str): | ||
| 121 | # for euc-TW encoding, we are interested | ||
| 122 | # first byte range: 0xc4 -- 0xfe | ||
| 123 | # second byte range: 0xa1 -- 0xfe | ||
| 124 | # no validation needed here. State machine has done that | ||
| 125 | first_char = byte_str[0] | ||
| 126 | if first_char >= 0xC4: | ||
| 127 | return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 | ||
| 128 | else: | ||
| 129 | return -1 | ||
| 130 | |||
| 131 | |||
| 132 | class EUCKRDistributionAnalysis(CharDistributionAnalysis): | ||
| 133 | def __init__(self): | ||
| 134 | super(EUCKRDistributionAnalysis, self).__init__() | ||
| 135 | self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER | ||
| 136 | self._table_size = EUCKR_TABLE_SIZE | ||
| 137 | self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO | ||
| 138 | |||
| 139 | def get_order(self, byte_str): | ||
| 140 | # for euc-KR encoding, we are interested | ||
| 141 | # first byte range: 0xb0 -- 0xfe | ||
| 142 | # second byte range: 0xa1 -- 0xfe | ||
| 143 | # no validation needed here. State machine has done that | ||
| 144 | first_char = byte_str[0] | ||
| 145 | if first_char >= 0xB0: | ||
| 146 | return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 | ||
| 147 | else: | ||
| 148 | return -1 | ||
| 149 | |||
| 150 | |||
| 151 | class GB2312DistributionAnalysis(CharDistributionAnalysis): | ||
| 152 | def __init__(self): | ||
| 153 | super(GB2312DistributionAnalysis, self).__init__() | ||
| 154 | self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER | ||
| 155 | self._table_size = GB2312_TABLE_SIZE | ||
| 156 | self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO | ||
| 157 | |||
| 158 | def get_order(self, byte_str): | ||
| 159 | # for GB2312 encoding, we are interested | ||
| 160 | # first byte range: 0xb0 -- 0xfe | ||
| 161 | # second byte range: 0xa1 -- 0xfe | ||
| 162 | # no validation needed here. State machine has done that | ||
| 163 | first_char, second_char = byte_str[0], byte_str[1] | ||
| 164 | if (first_char >= 0xB0) and (second_char >= 0xA1): | ||
| 165 | return 94 * (first_char - 0xB0) + second_char - 0xA1 | ||
| 166 | else: | ||
| 167 | return -1 | ||
| 168 | |||
| 169 | |||
| 170 | class Big5DistributionAnalysis(CharDistributionAnalysis): | ||
| 171 | def __init__(self): | ||
| 172 | super(Big5DistributionAnalysis, self).__init__() | ||
| 173 | self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER | ||
| 174 | self._table_size = BIG5_TABLE_SIZE | ||
| 175 | self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO | ||
| 176 | |||
| 177 | def get_order(self, byte_str): | ||
| 178 | # for big5 encoding, we are interested | ||
| 179 | # first byte range: 0xa4 -- 0xfe | ||
| 180 | # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe | ||
| 181 | # no validation needed here. State machine has done that | ||
| 182 | first_char, second_char = byte_str[0], byte_str[1] | ||
| 183 | if first_char >= 0xA4: | ||
| 184 | if second_char >= 0xA1: | ||
| 185 | return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 | ||
| 186 | else: | ||
| 187 | return 157 * (first_char - 0xA4) + second_char - 0x40 | ||
| 188 | else: | ||
| 189 | return -1 | ||
| 190 | |||
| 191 | |||
| 192 | class SJISDistributionAnalysis(CharDistributionAnalysis): | ||
| 193 | def __init__(self): | ||
| 194 | super(SJISDistributionAnalysis, self).__init__() | ||
| 195 | self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER | ||
| 196 | self._table_size = JIS_TABLE_SIZE | ||
| 197 | self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO | ||
| 198 | |||
| 199 | def get_order(self, byte_str): | ||
| 200 | # for sjis encoding, we are interested | ||
| 201 | # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe | ||
| 202 | # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe | ||
| 203 | # no validation needed here. State machine has done that | ||
| 204 | first_char, second_char = byte_str[0], byte_str[1] | ||
| 205 | if (first_char >= 0x81) and (first_char <= 0x9F): | ||
| 206 | order = 188 * (first_char - 0x81) | ||
| 207 | elif (first_char >= 0xE0) and (first_char <= 0xEF): | ||
| 208 | order = 188 * (first_char - 0xE0 + 31) | ||
| 209 | else: | ||
| 210 | return -1 | ||
| 211 | order = order + second_char - 0x40 | ||
| 212 | if second_char > 0x7F: | ||
| 213 | order = -1 | ||
| 214 | return order | ||
| 215 | |||
| 216 | |||
| 217 | class EUCJPDistributionAnalysis(CharDistributionAnalysis): | ||
| 218 | def __init__(self): | ||
| 219 | super(EUCJPDistributionAnalysis, self).__init__() | ||
| 220 | self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER | ||
| 221 | self._table_size = JIS_TABLE_SIZE | ||
| 222 | self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO | ||
| 223 | |||
| 224 | def get_order(self, byte_str): | ||
| 225 | # for euc-JP encoding, we are interested | ||
| 226 | # first byte range: 0xa0 -- 0xfe | ||
| 227 | # second byte range: 0xa1 -- 0xfe | ||
| 228 | # no validation needed here. State machine has done that | ||
| 229 | char = byte_str[0] | ||
| 230 | if char >= 0xA0: | ||
| 231 | return 94 * (char - 0xA1) + byte_str[1] - 0xa1 | ||
| 232 | else: | ||
| 233 | return -1 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetgroupprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetgroupprober.py new file mode 100644 index 0000000..1720ddc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetgroupprober.py | |||
| @@ -0,0 +1,106 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .enums import ProbingState | ||
| 29 | from .charsetprober import CharSetProber | ||
| 30 | |||
| 31 | |||
| 32 | class CharSetGroupProber(CharSetProber): | ||
| 33 | def __init__(self, lang_filter=None): | ||
| 34 | super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) | ||
| 35 | self._active_num = 0 | ||
| 36 | self.probers = [] | ||
| 37 | self._best_guess_prober = None | ||
| 38 | |||
| 39 | def reset(self): | ||
| 40 | super(CharSetGroupProber, self).reset() | ||
| 41 | self._active_num = 0 | ||
| 42 | for prober in self.probers: | ||
| 43 | if prober: | ||
| 44 | prober.reset() | ||
| 45 | prober.active = True | ||
| 46 | self._active_num += 1 | ||
| 47 | self._best_guess_prober = None | ||
| 48 | |||
| 49 | @property | ||
| 50 | def charset_name(self): | ||
| 51 | if not self._best_guess_prober: | ||
| 52 | self.get_confidence() | ||
| 53 | if not self._best_guess_prober: | ||
| 54 | return None | ||
| 55 | return self._best_guess_prober.charset_name | ||
| 56 | |||
| 57 | @property | ||
| 58 | def language(self): | ||
| 59 | if not self._best_guess_prober: | ||
| 60 | self.get_confidence() | ||
| 61 | if not self._best_guess_prober: | ||
| 62 | return None | ||
| 63 | return self._best_guess_prober.language | ||
| 64 | |||
| 65 | def feed(self, byte_str): | ||
| 66 | for prober in self.probers: | ||
| 67 | if not prober: | ||
| 68 | continue | ||
| 69 | if not prober.active: | ||
| 70 | continue | ||
| 71 | state = prober.feed(byte_str) | ||
| 72 | if not state: | ||
| 73 | continue | ||
| 74 | if state == ProbingState.FOUND_IT: | ||
| 75 | self._best_guess_prober = prober | ||
| 76 | return self.state | ||
| 77 | elif state == ProbingState.NOT_ME: | ||
| 78 | prober.active = False | ||
| 79 | self._active_num -= 1 | ||
| 80 | if self._active_num <= 0: | ||
| 81 | self._state = ProbingState.NOT_ME | ||
| 82 | return self.state | ||
| 83 | return self.state | ||
| 84 | |||
| 85 | def get_confidence(self): | ||
| 86 | state = self.state | ||
| 87 | if state == ProbingState.FOUND_IT: | ||
| 88 | return 0.99 | ||
| 89 | elif state == ProbingState.NOT_ME: | ||
| 90 | return 0.01 | ||
| 91 | best_conf = 0.0 | ||
| 92 | self._best_guess_prober = None | ||
| 93 | for prober in self.probers: | ||
| 94 | if not prober: | ||
| 95 | continue | ||
| 96 | if not prober.active: | ||
| 97 | self.logger.debug('%s not active', prober.charset_name) | ||
| 98 | continue | ||
| 99 | conf = prober.get_confidence() | ||
| 100 | self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) | ||
| 101 | if best_conf < conf: | ||
| 102 | best_conf = conf | ||
| 103 | self._best_guess_prober = prober | ||
| 104 | if not self._best_guess_prober: | ||
| 105 | return 0.0 | ||
| 106 | return best_conf | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetprober.py new file mode 100644 index 0000000..1fc2746 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/charsetprober.py | |||
| @@ -0,0 +1,145 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # | ||
| 13 | # This library is free software; you can redistribute it and/or | ||
| 14 | # modify it under the terms of the GNU Lesser General Public | ||
| 15 | # License as published by the Free Software Foundation; either | ||
| 16 | # version 2.1 of the License, or (at your option) any later version. | ||
| 17 | # | ||
| 18 | # This library is distributed in the hope that it will be useful, | ||
| 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 21 | # Lesser General Public License for more details. | ||
| 22 | # | ||
| 23 | # You should have received a copy of the GNU Lesser General Public | ||
| 24 | # License along with this library; if not, write to the Free Software | ||
| 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 26 | # 02110-1301 USA | ||
| 27 | ######################### END LICENSE BLOCK ######################### | ||
| 28 | |||
| 29 | import logging | ||
| 30 | import re | ||
| 31 | |||
| 32 | from .enums import ProbingState | ||
| 33 | |||
| 34 | |||
| 35 | class CharSetProber(object): | ||
| 36 | |||
| 37 | SHORTCUT_THRESHOLD = 0.95 | ||
| 38 | |||
| 39 | def __init__(self, lang_filter=None): | ||
| 40 | self._state = None | ||
| 41 | self.lang_filter = lang_filter | ||
| 42 | self.logger = logging.getLogger(__name__) | ||
| 43 | |||
| 44 | def reset(self): | ||
| 45 | self._state = ProbingState.DETECTING | ||
| 46 | |||
| 47 | @property | ||
| 48 | def charset_name(self): | ||
| 49 | return None | ||
| 50 | |||
| 51 | def feed(self, buf): | ||
| 52 | pass | ||
| 53 | |||
| 54 | @property | ||
| 55 | def state(self): | ||
| 56 | return self._state | ||
| 57 | |||
| 58 | def get_confidence(self): | ||
| 59 | return 0.0 | ||
| 60 | |||
| 61 | @staticmethod | ||
| 62 | def filter_high_byte_only(buf): | ||
| 63 | buf = re.sub(b'([\x00-\x7F])+', b' ', buf) | ||
| 64 | return buf | ||
| 65 | |||
| 66 | @staticmethod | ||
| 67 | def filter_international_words(buf): | ||
| 68 | """ | ||
| 69 | We define three types of bytes: | ||
| 70 | alphabet: english alphabets [a-zA-Z] | ||
| 71 | international: international characters [\x80-\xFF] | ||
| 72 | marker: everything else [^a-zA-Z\x80-\xFF] | ||
| 73 | |||
| 74 | The input buffer can be thought to contain a series of words delimited | ||
| 75 | by markers. This function works to filter all words that contain at | ||
| 76 | least one international character. All contiguous sequences of markers | ||
| 77 | are replaced by a single space ascii character. | ||
| 78 | |||
| 79 | This filter applies to all scripts which do not use English characters. | ||
| 80 | """ | ||
| 81 | filtered = bytearray() | ||
| 82 | |||
| 83 | # This regex expression filters out only words that have at-least one | ||
| 84 | # international character. The word may include one marker character at | ||
| 85 | # the end. | ||
| 86 | words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', | ||
| 87 | buf) | ||
| 88 | |||
| 89 | for word in words: | ||
| 90 | filtered.extend(word[:-1]) | ||
| 91 | |||
| 92 | # If the last character in the word is a marker, replace it with a | ||
| 93 | # space as markers shouldn't affect our analysis (they are used | ||
| 94 | # similarly across all languages and may thus have similar | ||
| 95 | # frequencies). | ||
| 96 | last_char = word[-1:] | ||
| 97 | if not last_char.isalpha() and last_char < b'\x80': | ||
| 98 | last_char = b' ' | ||
| 99 | filtered.extend(last_char) | ||
| 100 | |||
| 101 | return filtered | ||
| 102 | |||
| 103 | @staticmethod | ||
| 104 | def filter_with_english_letters(buf): | ||
| 105 | """ | ||
| 106 | Returns a copy of ``buf`` that retains only the sequences of English | ||
| 107 | alphabet and high byte characters that are not between <> characters. | ||
| 108 | Also retains English alphabet and high byte characters immediately | ||
| 109 | before occurrences of >. | ||
| 110 | |||
| 111 | This filter can be applied to all scripts which contain both English | ||
| 112 | characters and extended ASCII characters, but is currently only used by | ||
| 113 | ``Latin1Prober``. | ||
| 114 | """ | ||
| 115 | filtered = bytearray() | ||
| 116 | in_tag = False | ||
| 117 | prev = 0 | ||
| 118 | |||
| 119 | for curr in range(len(buf)): | ||
| 120 | # Slice here to get bytes instead of an int with Python 3 | ||
| 121 | buf_char = buf[curr:curr + 1] | ||
| 122 | # Check if we're coming out of or entering an HTML tag | ||
| 123 | if buf_char == b'>': | ||
| 124 | in_tag = False | ||
| 125 | elif buf_char == b'<': | ||
| 126 | in_tag = True | ||
| 127 | |||
| 128 | # If current character is not extended-ASCII and not alphabetic... | ||
| 129 | if buf_char < b'\x80' and not buf_char.isalpha(): | ||
| 130 | # ...and we're not in a tag | ||
| 131 | if curr > prev and not in_tag: | ||
| 132 | # Keep everything after last non-extended-ASCII, | ||
| 133 | # non-alphabetic character | ||
| 134 | filtered.extend(buf[prev:curr]) | ||
| 135 | # Output a space to delimit stretch we kept | ||
| 136 | filtered.extend(b' ') | ||
| 137 | prev = curr + 1 | ||
| 138 | |||
| 139 | # If we're not in a tag... | ||
| 140 | if not in_tag: | ||
| 141 | # Keep everything after last non-extended-ASCII, non-alphabetic | ||
| 142 | # character | ||
| 143 | filtered.extend(buf[prev:]) | ||
| 144 | |||
| 145 | return filtered | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/__init__.py new file mode 100644 index 0000000..d3f5a12 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/__init__.py | |||
| @@ -0,0 +1 @@ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py new file mode 100644 index 0000000..daabecd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py | |||
| @@ -0,0 +1,85 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | """ | ||
| 3 | Script which takes one or more file paths and reports on their detected | ||
| 4 | encodings | ||
| 5 | |||
| 6 | Example:: | ||
| 7 | |||
| 8 | % chardetect somefile someotherfile | ||
| 9 | somefile: windows-1252 with confidence 0.5 | ||
| 10 | someotherfile: ascii with confidence 1.0 | ||
| 11 | |||
| 12 | If no paths are provided, it takes its input from stdin. | ||
| 13 | |||
| 14 | """ | ||
| 15 | |||
| 16 | from __future__ import absolute_import, print_function, unicode_literals | ||
| 17 | |||
| 18 | import argparse | ||
| 19 | import sys | ||
| 20 | |||
| 21 | from pip._vendor.chardet import __version__ | ||
| 22 | from pip._vendor.chardet.compat import PY2 | ||
| 23 | from pip._vendor.chardet.universaldetector import UniversalDetector | ||
| 24 | |||
| 25 | |||
| 26 | def description_of(lines, name='stdin'): | ||
| 27 | """ | ||
| 28 | Return a string describing the probable encoding of a file or | ||
| 29 | list of strings. | ||
| 30 | |||
| 31 | :param lines: The lines to get the encoding of. | ||
| 32 | :type lines: Iterable of bytes | ||
| 33 | :param name: Name of file or collection of lines | ||
| 34 | :type name: str | ||
| 35 | """ | ||
| 36 | u = UniversalDetector() | ||
| 37 | for line in lines: | ||
| 38 | line = bytearray(line) | ||
| 39 | u.feed(line) | ||
| 40 | # shortcut out of the loop to save reading further - particularly useful if we read a BOM. | ||
| 41 | if u.done: | ||
| 42 | break | ||
| 43 | u.close() | ||
| 44 | result = u.result | ||
| 45 | if PY2: | ||
| 46 | name = name.decode(sys.getfilesystemencoding(), 'ignore') | ||
| 47 | if result['encoding']: | ||
| 48 | return '{0}: {1} with confidence {2}'.format(name, result['encoding'], | ||
| 49 | result['confidence']) | ||
| 50 | else: | ||
| 51 | return '{0}: no result'.format(name) | ||
| 52 | |||
| 53 | |||
| 54 | def main(argv=None): | ||
| 55 | """ | ||
| 56 | Handles command line arguments and gets things started. | ||
| 57 | |||
| 58 | :param argv: List of arguments, as if specified on the command-line. | ||
| 59 | If None, ``sys.argv[1:]`` is used instead. | ||
| 60 | :type argv: list of str | ||
| 61 | """ | ||
| 62 | # Get command line arguments | ||
| 63 | parser = argparse.ArgumentParser( | ||
| 64 | description="Takes one or more file paths and reports their detected \ | ||
| 65 | encodings") | ||
| 66 | parser.add_argument('input', | ||
| 67 | help='File whose encoding we would like to determine. \ | ||
| 68 | (default: stdin)', | ||
| 69 | type=argparse.FileType('rb'), nargs='*', | ||
| 70 | default=[sys.stdin if PY2 else sys.stdin.buffer]) | ||
| 71 | parser.add_argument('--version', action='version', | ||
| 72 | version='%(prog)s {0}'.format(__version__)) | ||
| 73 | args = parser.parse_args(argv) | ||
| 74 | |||
| 75 | for f in args.input: | ||
| 76 | if f.isatty(): | ||
| 77 | print("You are running chardetect interactively. Press " + | ||
| 78 | "CTRL-D twice at the start of a blank line to signal the " + | ||
| 79 | "end of your input. If you want help, run chardetect " + | ||
| 80 | "--help\n", file=sys.stderr) | ||
| 81 | print(description_of(f, f.name)) | ||
| 82 | |||
| 83 | |||
| 84 | if __name__ == '__main__': | ||
| 85 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/codingstatemachine.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/codingstatemachine.py new file mode 100644 index 0000000..c562e1d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/codingstatemachine.py | |||
| @@ -0,0 +1,88 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | import logging | ||
| 29 | |||
| 30 | from .enums import MachineState | ||
| 31 | |||
| 32 | |||
| 33 | class CodingStateMachine(object): | ||
| 34 | """ | ||
| 35 | A state machine to verify a byte sequence for a particular encoding. For | ||
| 36 | each byte the detector receives, it will feed that byte to every active | ||
| 37 | state machine available, one byte at a time. The state machine changes its | ||
| 38 | state based on its previous state and the byte it receives. There are 3 | ||
| 39 | states in a state machine that are of interest to an auto-detector: | ||
| 40 | |||
| 41 | START state: This is the state to start with, or a legal byte sequence | ||
| 42 | (i.e. a valid code point) for character has been identified. | ||
| 43 | |||
| 44 | ME state: This indicates that the state machine identified a byte sequence | ||
| 45 | that is specific to the charset it is designed for and that | ||
| 46 | there is no other possible encoding which can contain this byte | ||
| 47 | sequence. This will to lead to an immediate positive answer for | ||
| 48 | the detector. | ||
| 49 | |||
| 50 | ERROR state: This indicates the state machine identified an illegal byte | ||
| 51 | sequence for that encoding. This will lead to an immediate | ||
| 52 | negative answer for this encoding. Detector will exclude this | ||
| 53 | encoding from consideration from here on. | ||
| 54 | """ | ||
| 55 | def __init__(self, sm): | ||
| 56 | self._model = sm | ||
| 57 | self._curr_byte_pos = 0 | ||
| 58 | self._curr_char_len = 0 | ||
| 59 | self._curr_state = None | ||
| 60 | self.logger = logging.getLogger(__name__) | ||
| 61 | self.reset() | ||
| 62 | |||
| 63 | def reset(self): | ||
| 64 | self._curr_state = MachineState.START | ||
| 65 | |||
| 66 | def next_state(self, c): | ||
| 67 | # for each byte we get its class | ||
| 68 | # if it is first byte, we also get byte length | ||
| 69 | byte_class = self._model['class_table'][c] | ||
| 70 | if self._curr_state == MachineState.START: | ||
| 71 | self._curr_byte_pos = 0 | ||
| 72 | self._curr_char_len = self._model['char_len_table'][byte_class] | ||
| 73 | # from byte's class and state_table, we get its next state | ||
| 74 | curr_state = (self._curr_state * self._model['class_factor'] | ||
| 75 | + byte_class) | ||
| 76 | self._curr_state = self._model['state_table'][curr_state] | ||
| 77 | self._curr_byte_pos += 1 | ||
| 78 | return self._curr_state | ||
| 79 | |||
| 80 | def get_current_charlen(self): | ||
| 81 | return self._curr_char_len | ||
| 82 | |||
| 83 | def get_coding_state_machine(self): | ||
| 84 | return self._model['name'] | ||
| 85 | |||
| 86 | @property | ||
| 87 | def language(self): | ||
| 88 | return self._model['language'] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/compat.py new file mode 100644 index 0000000..fa100a3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/compat.py | |||
| @@ -0,0 +1,34 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # Contributor(s): | ||
| 3 | # Dan Blanchard | ||
| 4 | # Ian Cordasco | ||
| 5 | # | ||
| 6 | # This library is free software; you can redistribute it and/or | ||
| 7 | # modify it under the terms of the GNU Lesser General Public | ||
| 8 | # License as published by the Free Software Foundation; either | ||
| 9 | # version 2.1 of the License, or (at your option) any later version. | ||
| 10 | # | ||
| 11 | # This library is distributed in the hope that it will be useful, | ||
| 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | # Lesser General Public License for more details. | ||
| 15 | # | ||
| 16 | # You should have received a copy of the GNU Lesser General Public | ||
| 17 | # License along with this library; if not, write to the Free Software | ||
| 18 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 19 | # 02110-1301 USA | ||
| 20 | ######################### END LICENSE BLOCK ######################### | ||
| 21 | |||
| 22 | import sys | ||
| 23 | |||
| 24 | |||
| 25 | if sys.version_info < (3, 0): | ||
| 26 | PY2 = True | ||
| 27 | PY3 = False | ||
| 28 | base_str = (str, unicode) | ||
| 29 | text_type = unicode | ||
| 30 | else: | ||
| 31 | PY2 = False | ||
| 32 | PY3 = True | ||
| 33 | base_str = (bytes, str) | ||
| 34 | text_type = str | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cp949prober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cp949prober.py new file mode 100644 index 0000000..de0ceab --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/cp949prober.py | |||
| @@ -0,0 +1,49 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .chardistribution import EUCKRDistributionAnalysis | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 31 | from .mbcssm import CP949_SM_MODEL | ||
| 32 | |||
| 33 | |||
| 34 | class CP949Prober(MultiByteCharSetProber): | ||
| 35 | def __init__(self): | ||
| 36 | super(CP949Prober, self).__init__() | ||
| 37 | self.coding_sm = CodingStateMachine(CP949_SM_MODEL) | ||
| 38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be | ||
| 39 | # not different. | ||
| 40 | self.distribution_analyzer = EUCKRDistributionAnalysis() | ||
| 41 | self.reset() | ||
| 42 | |||
| 43 | @property | ||
| 44 | def charset_name(self): | ||
| 45 | return "CP949" | ||
| 46 | |||
| 47 | @property | ||
| 48 | def language(self): | ||
| 49 | return "Korean" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/enums.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/enums.py new file mode 100644 index 0000000..c8e6001 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/enums.py | |||
| @@ -0,0 +1,76 @@ | |||
| 1 | """ | ||
| 2 | All of the Enums that are used throughout the chardet package. | ||
| 3 | |||
| 4 | :author: Dan Blanchard (dan.blanchard@gmail.com) | ||
| 5 | """ | ||
| 6 | |||
| 7 | |||
| 8 | class InputState(object): | ||
| 9 | """ | ||
| 10 | This enum represents the different states a universal detector can be in. | ||
| 11 | """ | ||
| 12 | PURE_ASCII = 0 | ||
| 13 | ESC_ASCII = 1 | ||
| 14 | HIGH_BYTE = 2 | ||
| 15 | |||
| 16 | |||
| 17 | class LanguageFilter(object): | ||
| 18 | """ | ||
| 19 | This enum represents the different language filters we can apply to a | ||
| 20 | ``UniversalDetector``. | ||
| 21 | """ | ||
| 22 | CHINESE_SIMPLIFIED = 0x01 | ||
| 23 | CHINESE_TRADITIONAL = 0x02 | ||
| 24 | JAPANESE = 0x04 | ||
| 25 | KOREAN = 0x08 | ||
| 26 | NON_CJK = 0x10 | ||
| 27 | ALL = 0x1F | ||
| 28 | CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL | ||
| 29 | CJK = CHINESE | JAPANESE | KOREAN | ||
| 30 | |||
| 31 | |||
| 32 | class ProbingState(object): | ||
| 33 | """ | ||
| 34 | This enum represents the different states a prober can be in. | ||
| 35 | """ | ||
| 36 | DETECTING = 0 | ||
| 37 | FOUND_IT = 1 | ||
| 38 | NOT_ME = 2 | ||
| 39 | |||
| 40 | |||
| 41 | class MachineState(object): | ||
| 42 | """ | ||
| 43 | This enum represents the different states a state machine can be in. | ||
| 44 | """ | ||
| 45 | START = 0 | ||
| 46 | ERROR = 1 | ||
| 47 | ITS_ME = 2 | ||
| 48 | |||
| 49 | |||
| 50 | class SequenceLikelihood(object): | ||
| 51 | """ | ||
| 52 | This enum represents the likelihood of a character following the previous one. | ||
| 53 | """ | ||
| 54 | NEGATIVE = 0 | ||
| 55 | UNLIKELY = 1 | ||
| 56 | LIKELY = 2 | ||
| 57 | POSITIVE = 3 | ||
| 58 | |||
| 59 | @classmethod | ||
| 60 | def get_num_categories(cls): | ||
| 61 | """:returns: The number of likelihood categories in the enum.""" | ||
| 62 | return 4 | ||
| 63 | |||
| 64 | |||
| 65 | class CharacterCategory(object): | ||
| 66 | """ | ||
| 67 | This enum represents the different categories language models for | ||
| 68 | ``SingleByteCharsetProber`` put characters into. | ||
| 69 | |||
| 70 | Anything less than CONTROL is considered a letter. | ||
| 71 | """ | ||
| 72 | UNDEFINED = 255 | ||
| 73 | LINE_BREAK = 254 | ||
| 74 | SYMBOL = 253 | ||
| 75 | DIGIT = 252 | ||
| 76 | CONTROL = 251 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escprober.py new file mode 100644 index 0000000..c52060d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escprober.py | |||
| @@ -0,0 +1,101 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .charsetprober import CharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .enums import LanguageFilter, ProbingState, MachineState | ||
| 31 | from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, | ||
| 32 | ISO2022KR_SM_MODEL) | ||
| 33 | |||
| 34 | |||
| 35 | class EscCharSetProber(CharSetProber): | ||
| 36 | """ | ||
| 37 | This CharSetProber uses a "code scheme" approach for detecting encodings, | ||
| 38 | whereby easily recognizable escape or shift sequences are relied on to | ||
| 39 | identify these encodings. | ||
| 40 | """ | ||
| 41 | |||
| 42 | def __init__(self, lang_filter=None): | ||
| 43 | super(EscCharSetProber, self).__init__(lang_filter=lang_filter) | ||
| 44 | self.coding_sm = [] | ||
| 45 | if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: | ||
| 46 | self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) | ||
| 47 | self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) | ||
| 48 | if self.lang_filter & LanguageFilter.JAPANESE: | ||
| 49 | self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) | ||
| 50 | if self.lang_filter & LanguageFilter.KOREAN: | ||
| 51 | self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) | ||
| 52 | self.active_sm_count = None | ||
| 53 | self._detected_charset = None | ||
| 54 | self._detected_language = None | ||
| 55 | self._state = None | ||
| 56 | self.reset() | ||
| 57 | |||
| 58 | def reset(self): | ||
| 59 | super(EscCharSetProber, self).reset() | ||
| 60 | for coding_sm in self.coding_sm: | ||
| 61 | if not coding_sm: | ||
| 62 | continue | ||
| 63 | coding_sm.active = True | ||
| 64 | coding_sm.reset() | ||
| 65 | self.active_sm_count = len(self.coding_sm) | ||
| 66 | self._detected_charset = None | ||
| 67 | self._detected_language = None | ||
| 68 | |||
| 69 | @property | ||
| 70 | def charset_name(self): | ||
| 71 | return self._detected_charset | ||
| 72 | |||
| 73 | @property | ||
| 74 | def language(self): | ||
| 75 | return self._detected_language | ||
| 76 | |||
| 77 | def get_confidence(self): | ||
| 78 | if self._detected_charset: | ||
| 79 | return 0.99 | ||
| 80 | else: | ||
| 81 | return 0.00 | ||
| 82 | |||
| 83 | def feed(self, byte_str): | ||
| 84 | for c in byte_str: | ||
| 85 | for coding_sm in self.coding_sm: | ||
| 86 | if not coding_sm or not coding_sm.active: | ||
| 87 | continue | ||
| 88 | coding_state = coding_sm.next_state(c) | ||
| 89 | if coding_state == MachineState.ERROR: | ||
| 90 | coding_sm.active = False | ||
| 91 | self.active_sm_count -= 1 | ||
| 92 | if self.active_sm_count <= 0: | ||
| 93 | self._state = ProbingState.NOT_ME | ||
| 94 | return self.state | ||
| 95 | elif coding_state == MachineState.ITS_ME: | ||
| 96 | self._state = ProbingState.FOUND_IT | ||
| 97 | self._detected_charset = coding_sm.get_coding_state_machine() | ||
| 98 | self._detected_language = coding_sm.language | ||
| 99 | return self.state | ||
| 100 | |||
| 101 | return self.state | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escsm.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escsm.py new file mode 100644 index 0000000..b837704 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/escsm.py | |||
| @@ -0,0 +1,246 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .enums import MachineState | ||
| 29 | |||
| 30 | HZ_CLS = ( | ||
| 31 | 1,0,0,0,0,0,0,0, # 00 - 07 | ||
| 32 | 0,0,0,0,0,0,0,0, # 08 - 0f | ||
| 33 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 34 | 0,0,0,1,0,0,0,0, # 18 - 1f | ||
| 35 | 0,0,0,0,0,0,0,0, # 20 - 27 | ||
| 36 | 0,0,0,0,0,0,0,0, # 28 - 2f | ||
| 37 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 38 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 39 | 0,0,0,0,0,0,0,0, # 40 - 47 | ||
| 40 | 0,0,0,0,0,0,0,0, # 48 - 4f | ||
| 41 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 42 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 43 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 44 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 45 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 46 | 0,0,0,4,0,5,2,0, # 78 - 7f | ||
| 47 | 1,1,1,1,1,1,1,1, # 80 - 87 | ||
| 48 | 1,1,1,1,1,1,1,1, # 88 - 8f | ||
| 49 | 1,1,1,1,1,1,1,1, # 90 - 97 | ||
| 50 | 1,1,1,1,1,1,1,1, # 98 - 9f | ||
| 51 | 1,1,1,1,1,1,1,1, # a0 - a7 | ||
| 52 | 1,1,1,1,1,1,1,1, # a8 - af | ||
| 53 | 1,1,1,1,1,1,1,1, # b0 - b7 | ||
| 54 | 1,1,1,1,1,1,1,1, # b8 - bf | ||
| 55 | 1,1,1,1,1,1,1,1, # c0 - c7 | ||
| 56 | 1,1,1,1,1,1,1,1, # c8 - cf | ||
| 57 | 1,1,1,1,1,1,1,1, # d0 - d7 | ||
| 58 | 1,1,1,1,1,1,1,1, # d8 - df | ||
| 59 | 1,1,1,1,1,1,1,1, # e0 - e7 | ||
| 60 | 1,1,1,1,1,1,1,1, # e8 - ef | ||
| 61 | 1,1,1,1,1,1,1,1, # f0 - f7 | ||
| 62 | 1,1,1,1,1,1,1,1, # f8 - ff | ||
| 63 | ) | ||
| 64 | |||
| 65 | HZ_ST = ( | ||
| 66 | MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 | ||
| 67 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f | ||
| 68 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 | ||
| 69 | 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f | ||
| 70 | 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 | ||
| 71 | 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f | ||
| 72 | ) | ||
| 73 | |||
| 74 | HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) | ||
| 75 | |||
| 76 | HZ_SM_MODEL = {'class_table': HZ_CLS, | ||
| 77 | 'class_factor': 6, | ||
| 78 | 'state_table': HZ_ST, | ||
| 79 | 'char_len_table': HZ_CHAR_LEN_TABLE, | ||
| 80 | 'name': "HZ-GB-2312", | ||
| 81 | 'language': 'Chinese'} | ||
| 82 | |||
| 83 | ISO2022CN_CLS = ( | ||
| 84 | 2,0,0,0,0,0,0,0, # 00 - 07 | ||
| 85 | 0,0,0,0,0,0,0,0, # 08 - 0f | ||
| 86 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 87 | 0,0,0,1,0,0,0,0, # 18 - 1f | ||
| 88 | 0,0,0,0,0,0,0,0, # 20 - 27 | ||
| 89 | 0,3,0,0,0,0,0,0, # 28 - 2f | ||
| 90 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 91 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 92 | 0,0,0,4,0,0,0,0, # 40 - 47 | ||
| 93 | 0,0,0,0,0,0,0,0, # 48 - 4f | ||
| 94 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 95 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 96 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 97 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 98 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 99 | 0,0,0,0,0,0,0,0, # 78 - 7f | ||
| 100 | 2,2,2,2,2,2,2,2, # 80 - 87 | ||
| 101 | 2,2,2,2,2,2,2,2, # 88 - 8f | ||
| 102 | 2,2,2,2,2,2,2,2, # 90 - 97 | ||
| 103 | 2,2,2,2,2,2,2,2, # 98 - 9f | ||
| 104 | 2,2,2,2,2,2,2,2, # a0 - a7 | ||
| 105 | 2,2,2,2,2,2,2,2, # a8 - af | ||
| 106 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 107 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 108 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 109 | 2,2,2,2,2,2,2,2, # c8 - cf | ||
| 110 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 111 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 112 | 2,2,2,2,2,2,2,2, # e0 - e7 | ||
| 113 | 2,2,2,2,2,2,2,2, # e8 - ef | ||
| 114 | 2,2,2,2,2,2,2,2, # f0 - f7 | ||
| 115 | 2,2,2,2,2,2,2,2, # f8 - ff | ||
| 116 | ) | ||
| 117 | |||
| 118 | ISO2022CN_ST = ( | ||
| 119 | MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 | ||
| 120 | MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f | ||
| 121 | MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 | ||
| 122 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f | ||
| 123 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 | ||
| 124 | 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f | ||
| 125 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 | ||
| 126 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f | ||
| 127 | ) | ||
| 128 | |||
| 129 | ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) | ||
| 130 | |||
| 131 | ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, | ||
| 132 | 'class_factor': 9, | ||
| 133 | 'state_table': ISO2022CN_ST, | ||
| 134 | 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, | ||
| 135 | 'name': "ISO-2022-CN", | ||
| 136 | 'language': 'Chinese'} | ||
| 137 | |||
| 138 | ISO2022JP_CLS = ( | ||
| 139 | 2,0,0,0,0,0,0,0, # 00 - 07 | ||
| 140 | 0,0,0,0,0,0,2,2, # 08 - 0f | ||
| 141 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 142 | 0,0,0,1,0,0,0,0, # 18 - 1f | ||
| 143 | 0,0,0,0,7,0,0,0, # 20 - 27 | ||
| 144 | 3,0,0,0,0,0,0,0, # 28 - 2f | ||
| 145 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 146 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 147 | 6,0,4,0,8,0,0,0, # 40 - 47 | ||
| 148 | 0,9,5,0,0,0,0,0, # 48 - 4f | ||
| 149 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 150 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 151 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 152 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 153 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 154 | 0,0,0,0,0,0,0,0, # 78 - 7f | ||
| 155 | 2,2,2,2,2,2,2,2, # 80 - 87 | ||
| 156 | 2,2,2,2,2,2,2,2, # 88 - 8f | ||
| 157 | 2,2,2,2,2,2,2,2, # 90 - 97 | ||
| 158 | 2,2,2,2,2,2,2,2, # 98 - 9f | ||
| 159 | 2,2,2,2,2,2,2,2, # a0 - a7 | ||
| 160 | 2,2,2,2,2,2,2,2, # a8 - af | ||
| 161 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 162 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 163 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 164 | 2,2,2,2,2,2,2,2, # c8 - cf | ||
| 165 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 166 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 167 | 2,2,2,2,2,2,2,2, # e0 - e7 | ||
| 168 | 2,2,2,2,2,2,2,2, # e8 - ef | ||
| 169 | 2,2,2,2,2,2,2,2, # f0 - f7 | ||
| 170 | 2,2,2,2,2,2,2,2, # f8 - ff | ||
| 171 | ) | ||
| 172 | |||
| 173 | ISO2022JP_ST = ( | ||
| 174 | MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 | ||
| 175 | MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f | ||
| 176 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 | ||
| 177 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f | ||
| 178 | MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 | ||
| 179 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f | ||
| 180 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 | ||
| 181 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f | ||
| 182 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 | ||
| 183 | ) | ||
| 184 | |||
| 185 | ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) | ||
| 186 | |||
| 187 | ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, | ||
| 188 | 'class_factor': 10, | ||
| 189 | 'state_table': ISO2022JP_ST, | ||
| 190 | 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, | ||
| 191 | 'name': "ISO-2022-JP", | ||
| 192 | 'language': 'Japanese'} | ||
| 193 | |||
| 194 | ISO2022KR_CLS = ( | ||
| 195 | 2,0,0,0,0,0,0,0, # 00 - 07 | ||
| 196 | 0,0,0,0,0,0,0,0, # 08 - 0f | ||
| 197 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 198 | 0,0,0,1,0,0,0,0, # 18 - 1f | ||
| 199 | 0,0,0,0,3,0,0,0, # 20 - 27 | ||
| 200 | 0,4,0,0,0,0,0,0, # 28 - 2f | ||
| 201 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 202 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 203 | 0,0,0,5,0,0,0,0, # 40 - 47 | ||
| 204 | 0,0,0,0,0,0,0,0, # 48 - 4f | ||
| 205 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 206 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 207 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 208 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 209 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 210 | 0,0,0,0,0,0,0,0, # 78 - 7f | ||
| 211 | 2,2,2,2,2,2,2,2, # 80 - 87 | ||
| 212 | 2,2,2,2,2,2,2,2, # 88 - 8f | ||
| 213 | 2,2,2,2,2,2,2,2, # 90 - 97 | ||
| 214 | 2,2,2,2,2,2,2,2, # 98 - 9f | ||
| 215 | 2,2,2,2,2,2,2,2, # a0 - a7 | ||
| 216 | 2,2,2,2,2,2,2,2, # a8 - af | ||
| 217 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 218 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 219 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 220 | 2,2,2,2,2,2,2,2, # c8 - cf | ||
| 221 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 222 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 223 | 2,2,2,2,2,2,2,2, # e0 - e7 | ||
| 224 | 2,2,2,2,2,2,2,2, # e8 - ef | ||
| 225 | 2,2,2,2,2,2,2,2, # f0 - f7 | ||
| 226 | 2,2,2,2,2,2,2,2, # f8 - ff | ||
| 227 | ) | ||
| 228 | |||
| 229 | ISO2022KR_ST = ( | ||
| 230 | MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 | ||
| 231 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f | ||
| 232 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 | ||
| 233 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f | ||
| 234 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 | ||
| 235 | ) | ||
| 236 | |||
| 237 | ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) | ||
| 238 | |||
| 239 | ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, | ||
| 240 | 'class_factor': 6, | ||
| 241 | 'state_table': ISO2022KR_ST, | ||
| 242 | 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, | ||
| 243 | 'name': "ISO-2022-KR", | ||
| 244 | 'language': 'Korean'} | ||
| 245 | |||
| 246 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/eucjpprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/eucjpprober.py new file mode 100644 index 0000000..a81ee1e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/eucjpprober.py | |||
| @@ -0,0 +1,92 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .enums import ProbingState, MachineState | ||
| 29 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 30 | from .codingstatemachine import CodingStateMachine | ||
| 31 | from .chardistribution import EUCJPDistributionAnalysis | ||
| 32 | from .jpcntx import EUCJPContextAnalysis | ||
| 33 | from .mbcssm import EUCJP_SM_MODEL | ||
| 34 | |||
| 35 | |||
| 36 | class EUCJPProber(MultiByteCharSetProber): | ||
| 37 | def __init__(self): | ||
| 38 | super(EUCJPProber, self).__init__() | ||
| 39 | self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) | ||
| 40 | self.distribution_analyzer = EUCJPDistributionAnalysis() | ||
| 41 | self.context_analyzer = EUCJPContextAnalysis() | ||
| 42 | self.reset() | ||
| 43 | |||
| 44 | def reset(self): | ||
| 45 | super(EUCJPProber, self).reset() | ||
| 46 | self.context_analyzer.reset() | ||
| 47 | |||
| 48 | @property | ||
| 49 | def charset_name(self): | ||
| 50 | return "EUC-JP" | ||
| 51 | |||
| 52 | @property | ||
| 53 | def language(self): | ||
| 54 | return "Japanese" | ||
| 55 | |||
| 56 | def feed(self, byte_str): | ||
| 57 | for i in range(len(byte_str)): | ||
| 58 | # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte | ||
| 59 | coding_state = self.coding_sm.next_state(byte_str[i]) | ||
| 60 | if coding_state == MachineState.ERROR: | ||
| 61 | self.logger.debug('%s %s prober hit error at byte %s', | ||
| 62 | self.charset_name, self.language, i) | ||
| 63 | self._state = ProbingState.NOT_ME | ||
| 64 | break | ||
| 65 | elif coding_state == MachineState.ITS_ME: | ||
| 66 | self._state = ProbingState.FOUND_IT | ||
| 67 | break | ||
| 68 | elif coding_state == MachineState.START: | ||
| 69 | char_len = self.coding_sm.get_current_charlen() | ||
| 70 | if i == 0: | ||
| 71 | self._last_char[1] = byte_str[0] | ||
| 72 | self.context_analyzer.feed(self._last_char, char_len) | ||
| 73 | self.distribution_analyzer.feed(self._last_char, char_len) | ||
| 74 | else: | ||
| 75 | self.context_analyzer.feed(byte_str[i - 1:i + 1], | ||
| 76 | char_len) | ||
| 77 | self.distribution_analyzer.feed(byte_str[i - 1:i + 1], | ||
| 78 | char_len) | ||
| 79 | |||
| 80 | self._last_char[0] = byte_str[-1] | ||
| 81 | |||
| 82 | if self.state == ProbingState.DETECTING: | ||
| 83 | if (self.context_analyzer.got_enough_data() and | ||
| 84 | (self.get_confidence() > self.SHORTCUT_THRESHOLD)): | ||
| 85 | self._state = ProbingState.FOUND_IT | ||
| 86 | |||
| 87 | return self.state | ||
| 88 | |||
| 89 | def get_confidence(self): | ||
| 90 | context_conf = self.context_analyzer.get_confidence() | ||
| 91 | distrib_conf = self.distribution_analyzer.get_confidence() | ||
| 92 | return max(context_conf, distrib_conf) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrfreq.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrfreq.py new file mode 100644 index 0000000..ae25c1b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrfreq.py | |||
| @@ -0,0 +1,195 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # Sampling from about 20M text materials include literature and computer technology | ||
| 29 | |||
| 30 | # 128 --> 0.79 | ||
| 31 | # 256 --> 0.92 | ||
| 32 | # 512 --> 0.986 | ||
| 33 | # 1024 --> 0.99944 | ||
| 34 | # 2048 --> 0.99999 | ||
| 35 | # | ||
| 36 | # Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 | ||
| 37 | # Random Distribution Ration = 512 / (2350-512) = 0.279. | ||
| 38 | # | ||
| 39 | # Typical Distribution Ratio | ||
| 40 | |||
| 41 | EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 | ||
| 42 | |||
| 43 | EUCKR_TABLE_SIZE = 2352 | ||
| 44 | |||
| 45 | # Char to FreqOrder table , | ||
| 46 | EUCKR_CHAR_TO_FREQ_ORDER = ( | ||
| 47 | 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, | ||
| 48 | 1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, | ||
| 49 | 1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, | ||
| 50 | 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, | ||
| 51 | 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, | ||
| 52 | 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, | ||
| 53 | 1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, | ||
| 54 | 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, | ||
| 55 | 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, | ||
| 56 | 1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, | ||
| 57 | 1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, | ||
| 58 | 1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, | ||
| 59 | 1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, | ||
| 60 | 1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, | ||
| 61 | 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, | ||
| 62 | 1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, | ||
| 63 | 1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, | ||
| 64 | 1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, | ||
| 65 | 1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, | ||
| 66 | 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, | ||
| 67 | 1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, | ||
| 68 | 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, | ||
| 69 | 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, | ||
| 70 | 1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, | ||
| 71 | 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, | ||
| 72 | 1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, | ||
| 73 | 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, | ||
| 74 | 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, | ||
| 75 | 1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, | ||
| 76 | 1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, | ||
| 77 | 1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, | ||
| 78 | 1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, | ||
| 79 | 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, | ||
| 80 | 1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, | ||
| 81 | 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, | ||
| 82 | 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, | ||
| 83 | 1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, | ||
| 84 | 1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, | ||
| 85 | 1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, | ||
| 86 | 1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, | ||
| 87 | 1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, | ||
| 88 | 1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, | ||
| 89 | 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, | ||
| 90 | 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, | ||
| 91 | 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, | ||
| 92 | 1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, | ||
| 93 | 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, | ||
| 94 | 1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, | ||
| 95 | 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, | ||
| 96 | 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, | ||
| 97 | 2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, | ||
| 98 | 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, | ||
| 99 | 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, | ||
| 100 | 2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, | ||
| 101 | 2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, | ||
| 102 | 2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, | ||
| 103 | 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, | ||
| 104 | 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, | ||
| 105 | 2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, | ||
| 106 | 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, | ||
| 107 | 1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, | ||
| 108 | 2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, | ||
| 109 | 1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, | ||
| 110 | 2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, | ||
| 111 | 2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, | ||
| 112 | 1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, | ||
| 113 | 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, | ||
| 114 | 2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, | ||
| 115 | 2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, | ||
| 116 | 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, | ||
| 117 | 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, | ||
| 118 | 2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, | ||
| 119 | 1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, | ||
| 120 | 2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, | ||
| 121 | 2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, | ||
| 122 | 2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, | ||
| 123 | 2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, | ||
| 124 | 2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, | ||
| 125 | 2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, | ||
| 126 | 1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, | ||
| 127 | 2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, | ||
| 128 | 2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, | ||
| 129 | 2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, | ||
| 130 | 2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, | ||
| 131 | 2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, | ||
| 132 | 1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, | ||
| 133 | 1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, | ||
| 134 | 2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, | ||
| 135 | 1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, | ||
| 136 | 2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, | ||
| 137 | 1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, | ||
| 138 | 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, | ||
| 139 | 2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, | ||
| 140 | 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, | ||
| 141 | 2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, | ||
| 142 | 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, | ||
| 143 | 2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, | ||
| 144 | 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, | ||
| 145 | 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, | ||
| 146 | 2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, | ||
| 147 | 1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, | ||
| 148 | 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, | ||
| 149 | 1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, | ||
| 150 | 2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, | ||
| 151 | 1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, | ||
| 152 | 2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, | ||
| 153 | 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, | ||
| 154 | 2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, | ||
| 155 | 1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, | ||
| 156 | 2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, | ||
| 157 | 1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, | ||
| 158 | 2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, | ||
| 159 | 1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, | ||
| 160 | 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, | ||
| 161 | 2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, | ||
| 162 | 2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, | ||
| 163 | 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, | ||
| 164 | 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, | ||
| 165 | 1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, | ||
| 166 | 1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, | ||
| 167 | 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, | ||
| 168 | 2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, | ||
| 169 | 2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, | ||
| 170 | 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, | ||
| 171 | 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, | ||
| 172 | 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, | ||
| 173 | 2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, | ||
| 174 | 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, | ||
| 175 | 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, | ||
| 176 | 2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, | ||
| 177 | 2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, | ||
| 178 | 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, | ||
| 179 | 2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, | ||
| 180 | 1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, | ||
| 181 | 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, | ||
| 182 | 2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, | ||
| 183 | 2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, | ||
| 184 | 2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, | ||
| 185 | 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, | ||
| 186 | 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, | ||
| 187 | 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, | ||
| 188 | 2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, | ||
| 189 | 2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, | ||
| 190 | 2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, | ||
| 191 | 1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, | ||
| 192 | 2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, | ||
| 193 | 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 | ||
| 194 | ) | ||
| 195 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrprober.py new file mode 100644 index 0000000..99d5b15 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euckrprober.py | |||
| @@ -0,0 +1,47 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .chardistribution import EUCKRDistributionAnalysis | ||
| 31 | from .mbcssm import EUCKR_SM_MODEL | ||
| 32 | |||
| 33 | |||
| 34 | class EUCKRProber(MultiByteCharSetProber): | ||
| 35 | def __init__(self): | ||
| 36 | super(EUCKRProber, self).__init__() | ||
| 37 | self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) | ||
| 38 | self.distribution_analyzer = EUCKRDistributionAnalysis() | ||
| 39 | self.reset() | ||
| 40 | |||
| 41 | @property | ||
| 42 | def charset_name(self): | ||
| 43 | return "EUC-KR" | ||
| 44 | |||
| 45 | @property | ||
| 46 | def language(self): | ||
| 47 | return "Korean" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwfreq.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwfreq.py new file mode 100644 index 0000000..5195275 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwfreq.py | |||
| @@ -0,0 +1,387 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # EUCTW frequency table | ||
| 29 | # Converted from big5 work | ||
| 30 | # by Taiwan's Mandarin Promotion Council | ||
| 31 | # <http:#www.edu.tw:81/mandr/> | ||
| 32 | |||
| 33 | # 128 --> 0.42261 | ||
| 34 | # 256 --> 0.57851 | ||
| 35 | # 512 --> 0.74851 | ||
| 36 | # 1024 --> 0.89384 | ||
| 37 | # 2048 --> 0.97583 | ||
| 38 | # | ||
| 39 | # Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 | ||
| 40 | # Random Distribution Ration = 512/(5401-512)=0.105 | ||
| 41 | # | ||
| 42 | # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR | ||
| 43 | |||
| 44 | EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 | ||
| 45 | |||
| 46 | # Char to FreqOrder table , | ||
| 47 | EUCTW_TABLE_SIZE = 5376 | ||
| 48 | |||
| 49 | EUCTW_CHAR_TO_FREQ_ORDER = ( | ||
| 50 | 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 | ||
| 51 | 3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 | ||
| 52 | 1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 | ||
| 53 | 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 | ||
| 54 | 3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 | ||
| 55 | 4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 | ||
| 56 | 7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 | ||
| 57 | 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 | ||
| 58 | 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 | ||
| 59 | 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 | ||
| 60 | 2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 | ||
| 61 | 1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 | ||
| 62 | 3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 | ||
| 63 | 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 | ||
| 64 | 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 | ||
| 65 | 3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 | ||
| 66 | 2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 | ||
| 67 | 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 | ||
| 68 | 3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 | ||
| 69 | 1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 | ||
| 70 | 7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 | ||
| 71 | 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 | ||
| 72 | 7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 | ||
| 73 | 1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 | ||
| 74 | 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 | ||
| 75 | 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 | ||
| 76 | 3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 | ||
| 77 | 3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 | ||
| 78 | 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 | ||
| 79 | 2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 | ||
| 80 | 2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 | ||
| 81 | 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 | ||
| 82 | 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 | ||
| 83 | 3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 | ||
| 84 | 1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 | ||
| 85 | 1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 | ||
| 86 | 1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 | ||
| 87 | 2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 | ||
| 88 | 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 | ||
| 89 | 4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 | ||
| 90 | 1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 | ||
| 91 | 7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 | ||
| 92 | 2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 | ||
| 93 | 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 | ||
| 94 | 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 | ||
| 95 | 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 | ||
| 96 | 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 | ||
| 97 | 7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 | ||
| 98 | 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 | ||
| 99 | 1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 | ||
| 100 | 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 | ||
| 101 | 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 | ||
| 102 | 7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 | ||
| 103 | 1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 | ||
| 104 | 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 | ||
| 105 | 3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 | ||
| 106 | 4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 | ||
| 107 | 3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 | ||
| 108 | 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 | ||
| 109 | 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 | ||
| 110 | 1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 | ||
| 111 | 4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 | ||
| 112 | 3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 | ||
| 113 | 3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 | ||
| 114 | 2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 | ||
| 115 | 7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 | ||
| 116 | 3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 | ||
| 117 | 7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 | ||
| 118 | 1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 | ||
| 119 | 2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 | ||
| 120 | 1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 | ||
| 121 | 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 | ||
| 122 | 1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 | ||
| 123 | 4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 | ||
| 124 | 3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 | ||
| 125 | 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 | ||
| 126 | 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 | ||
| 127 | 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 | ||
| 128 | 2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 | ||
| 129 | 7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 | ||
| 130 | 1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 | ||
| 131 | 2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 | ||
| 132 | 1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 | ||
| 133 | 1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 | ||
| 134 | 7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 | ||
| 135 | 7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 | ||
| 136 | 7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 | ||
| 137 | 3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 | ||
| 138 | 4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 | ||
| 139 | 1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 | ||
| 140 | 7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 | ||
| 141 | 2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 | ||
| 142 | 7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 | ||
| 143 | 3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 | ||
| 144 | 3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 | ||
| 145 | 7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 | ||
| 146 | 2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 | ||
| 147 | 7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 | ||
| 148 | 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 | ||
| 149 | 4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 | ||
| 150 | 2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 | ||
| 151 | 7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 | ||
| 152 | 3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 | ||
| 153 | 2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 | ||
| 154 | 2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 | ||
| 155 | 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 | ||
| 156 | 2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 | ||
| 157 | 1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 | ||
| 158 | 1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 | ||
| 159 | 2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 | ||
| 160 | 1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 | ||
| 161 | 7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 | ||
| 162 | 7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 | ||
| 163 | 2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 | ||
| 164 | 4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 | ||
| 165 | 1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 | ||
| 166 | 7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 | ||
| 167 | 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 | ||
| 168 | 4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 | ||
| 169 | 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 | ||
| 170 | 2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 | ||
| 171 | 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 | ||
| 172 | 1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 | ||
| 173 | 1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 | ||
| 174 | 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 | ||
| 175 | 3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 | ||
| 176 | 3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 | ||
| 177 | 1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 | ||
| 178 | 3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 | ||
| 179 | 7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 | ||
| 180 | 7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 | ||
| 181 | 1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 | ||
| 182 | 2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 | ||
| 183 | 1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 | ||
| 184 | 3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 | ||
| 185 | 2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 | ||
| 186 | 3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 | ||
| 187 | 2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 | ||
| 188 | 4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 | ||
| 189 | 4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 | ||
| 190 | 3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 | ||
| 191 | 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 | ||
| 192 | 3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 | ||
| 193 | 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 | ||
| 194 | 3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 | ||
| 195 | 3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 | ||
| 196 | 3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 | ||
| 197 | 1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 | ||
| 198 | 7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 | ||
| 199 | 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 | ||
| 200 | 7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 | ||
| 201 | 1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 | ||
| 202 | 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 | ||
| 203 | 4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 | ||
| 204 | 3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 | ||
| 205 | 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 | ||
| 206 | 2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 | ||
| 207 | 2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 | ||
| 208 | 3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 | ||
| 209 | 1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 | ||
| 210 | 4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 | ||
| 211 | 2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 | ||
| 212 | 1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 | ||
| 213 | 1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 | ||
| 214 | 2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 | ||
| 215 | 3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 | ||
| 216 | 1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 | ||
| 217 | 7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 | ||
| 218 | 1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 | ||
| 219 | 4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 | ||
| 220 | 1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 | ||
| 221 | 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 | ||
| 222 | 1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 | ||
| 223 | 3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 | ||
| 224 | 3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 | ||
| 225 | 2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 | ||
| 226 | 1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 | ||
| 227 | 4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 | ||
| 228 | 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 | ||
| 229 | 7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 | ||
| 230 | 2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 | ||
| 231 | 3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 | ||
| 232 | 4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 | ||
| 233 | 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 | ||
| 234 | 7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 | ||
| 235 | 7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 | ||
| 236 | 1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 | ||
| 237 | 4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 | ||
| 238 | 3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 | ||
| 239 | 2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 | ||
| 240 | 3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 | ||
| 241 | 3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 | ||
| 242 | 2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 | ||
| 243 | 1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 | ||
| 244 | 4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 | ||
| 245 | 3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 | ||
| 246 | 3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 | ||
| 247 | 2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 | ||
| 248 | 4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 | ||
| 249 | 7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 | ||
| 250 | 3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 | ||
| 251 | 2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 | ||
| 252 | 3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 | ||
| 253 | 1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 | ||
| 254 | 2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 | ||
| 255 | 3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 | ||
| 256 | 4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 | ||
| 257 | 2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 | ||
| 258 | 2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 | ||
| 259 | 7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 | ||
| 260 | 1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 | ||
| 261 | 2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 | ||
| 262 | 1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 | ||
| 263 | 3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 | ||
| 264 | 4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 | ||
| 265 | 2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 | ||
| 266 | 3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 | ||
| 267 | 3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 | ||
| 268 | 2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 | ||
| 269 | 4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 | ||
| 270 | 2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 | ||
| 271 | 3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 | ||
| 272 | 4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 | ||
| 273 | 7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 | ||
| 274 | 3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 | ||
| 275 | 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 | ||
| 276 | 1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 | ||
| 277 | 4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 | ||
| 278 | 1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 | ||
| 279 | 4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 | ||
| 280 | 7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 | ||
| 281 | 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 | ||
| 282 | 7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 | ||
| 283 | 2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 | ||
| 284 | 1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 | ||
| 285 | 1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 | ||
| 286 | 3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 | ||
| 287 | 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 | ||
| 288 | 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 | ||
| 289 | 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 | ||
| 290 | 3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 | ||
| 291 | 2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 | ||
| 292 | 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 | ||
| 293 | 7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 | ||
| 294 | 1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 | ||
| 295 | 3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 | ||
| 296 | 7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 | ||
| 297 | 1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 | ||
| 298 | 7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 | ||
| 299 | 4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 | ||
| 300 | 1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 | ||
| 301 | 2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 | ||
| 302 | 2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 | ||
| 303 | 4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 | ||
| 304 | 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 | ||
| 305 | 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 | ||
| 306 | 3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 | ||
| 307 | 3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 | ||
| 308 | 1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 | ||
| 309 | 2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 | ||
| 310 | 7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 | ||
| 311 | 1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 | ||
| 312 | 1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 | ||
| 313 | 3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 | ||
| 314 | 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 | ||
| 315 | 1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 | ||
| 316 | 4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 | ||
| 317 | 7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 | ||
| 318 | 2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 | ||
| 319 | 3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 | ||
| 320 | 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 | ||
| 321 | 1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 | ||
| 322 | 2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 | ||
| 323 | 2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 | ||
| 324 | 7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 | ||
| 325 | 7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 | ||
| 326 | 7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 | ||
| 327 | 2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 | ||
| 328 | 2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 | ||
| 329 | 1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 | ||
| 330 | 4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 | ||
| 331 | 3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 | ||
| 332 | 3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 | ||
| 333 | 4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 | ||
| 334 | 4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 | ||
| 335 | 2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 | ||
| 336 | 2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 | ||
| 337 | 7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 | ||
| 338 | 4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 | ||
| 339 | 7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 | ||
| 340 | 2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 | ||
| 341 | 1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 | ||
| 342 | 3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 | ||
| 343 | 4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 | ||
| 344 | 2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 | ||
| 345 | 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 | ||
| 346 | 2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 | ||
| 347 | 1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 | ||
| 348 | 2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 | ||
| 349 | 2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 | ||
| 350 | 4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 | ||
| 351 | 7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 | ||
| 352 | 1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 | ||
| 353 | 3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 | ||
| 354 | 7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 | ||
| 355 | 1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 | ||
| 356 | 8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 | ||
| 357 | 2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 | ||
| 358 | 8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 | ||
| 359 | 2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 | ||
| 360 | 2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 | ||
| 361 | 8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 | ||
| 362 | 8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 | ||
| 363 | 8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 | ||
| 364 | 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 | ||
| 365 | 8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 | ||
| 366 | 4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 | ||
| 367 | 3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 | ||
| 368 | 8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 | ||
| 369 | 1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 | ||
| 370 | 8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 | ||
| 371 | 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 | ||
| 372 | 1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 | ||
| 373 | 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 | ||
| 374 | 4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 | ||
| 375 | 1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 | ||
| 376 | 4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 | ||
| 377 | 1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 | ||
| 378 | 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 | ||
| 379 | 3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 | ||
| 380 | 4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 | ||
| 381 | 8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 | ||
| 382 | 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 | ||
| 383 | 3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 | ||
| 384 | 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 | ||
| 385 | 2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 | ||
| 386 | ) | ||
| 387 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwprober.py new file mode 100644 index 0000000..7dbc136 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/euctwprober.py | |||
| @@ -0,0 +1,46 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .chardistribution import EUCTWDistributionAnalysis | ||
| 31 | from .mbcssm import EUCTW_SM_MODEL | ||
| 32 | |||
| 33 | class EUCTWProber(MultiByteCharSetProber): | ||
| 34 | def __init__(self): | ||
| 35 | super(EUCTWProber, self).__init__() | ||
| 36 | self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) | ||
| 37 | self.distribution_analyzer = EUCTWDistributionAnalysis() | ||
| 38 | self.reset() | ||
| 39 | |||
| 40 | @property | ||
| 41 | def charset_name(self): | ||
| 42 | return "EUC-TW" | ||
| 43 | |||
| 44 | @property | ||
| 45 | def language(self): | ||
| 46 | return "Taiwan" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312freq.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312freq.py new file mode 100644 index 0000000..a0167b3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312freq.py | |||
| @@ -0,0 +1,283 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # GB2312 most frequently used character table | ||
| 29 | # | ||
| 30 | # Char to FreqOrder table , from hz6763 | ||
| 31 | |||
| 32 | # 512 --> 0.79 -- 0.79 | ||
| 33 | # 1024 --> 0.92 -- 0.13 | ||
| 34 | # 2048 --> 0.98 -- 0.06 | ||
| 35 | # 6768 --> 1.00 -- 0.02 | ||
| 36 | # | ||
| 37 | # Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 | ||
| 38 | # Random Distribution Ration = 512 / (3755 - 512) = 0.157 | ||
| 39 | # | ||
| 40 | # Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR | ||
| 41 | |||
| 42 | GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 | ||
| 43 | |||
| 44 | GB2312_TABLE_SIZE = 3760 | ||
| 45 | |||
| 46 | GB2312_CHAR_TO_FREQ_ORDER = ( | ||
| 47 | 1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, | ||
| 48 | 2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, | ||
| 49 | 2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, | ||
| 50 | 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, | ||
| 51 | 1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, | ||
| 52 | 1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, | ||
| 53 | 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, | ||
| 54 | 1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, | ||
| 55 | 2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, | ||
| 56 | 3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, | ||
| 57 | 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, | ||
| 58 | 1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, | ||
| 59 | 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, | ||
| 60 | 2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, | ||
| 61 | 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, | ||
| 62 | 2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, | ||
| 63 | 1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, | ||
| 64 | 3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, | ||
| 65 | 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, | ||
| 66 | 1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, | ||
| 67 | 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, | ||
| 68 | 2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, | ||
| 69 | 1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, | ||
| 70 | 3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, | ||
| 71 | 1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, | ||
| 72 | 2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, | ||
| 73 | 1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, | ||
| 74 | 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, | ||
| 75 | 3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, | ||
| 76 | 3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, | ||
| 77 | 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, | ||
| 78 | 3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, | ||
| 79 | 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, | ||
| 80 | 1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, | ||
| 81 | 3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, | ||
| 82 | 2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, | ||
| 83 | 1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, | ||
| 84 | 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, | ||
| 85 | 1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, | ||
| 86 | 4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, | ||
| 87 | 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, | ||
| 88 | 3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, | ||
| 89 | 3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, | ||
| 90 | 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, | ||
| 91 | 1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, | ||
| 92 | 2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, | ||
| 93 | 1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, | ||
| 94 | 1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, | ||
| 95 | 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, | ||
| 96 | 3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, | ||
| 97 | 3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, | ||
| 98 | 4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, | ||
| 99 | 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, | ||
| 100 | 3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, | ||
| 101 | 1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, | ||
| 102 | 1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, | ||
| 103 | 4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, | ||
| 104 | 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, | ||
| 105 | 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, | ||
| 106 | 3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, | ||
| 107 | 1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, | ||
| 108 | 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, | ||
| 109 | 1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, | ||
| 110 | 2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, | ||
| 111 | 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, | ||
| 112 | 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, | ||
| 113 | 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, | ||
| 114 | 3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, | ||
| 115 | 4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, | ||
| 116 | 3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, | ||
| 117 | 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, | ||
| 118 | 2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, | ||
| 119 | 2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, | ||
| 120 | 2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, | ||
| 121 | 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, | ||
| 122 | 2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, | ||
| 123 | 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, | ||
| 124 | 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, | ||
| 125 | 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, | ||
| 126 | 3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, | ||
| 127 | 2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, | ||
| 128 | 2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, | ||
| 129 | 1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, | ||
| 130 | 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, | ||
| 131 | 2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, | ||
| 132 | 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, | ||
| 133 | 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, | ||
| 134 | 1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, | ||
| 135 | 1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, | ||
| 136 | 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, | ||
| 137 | 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, | ||
| 138 | 1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, | ||
| 139 | 2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, | ||
| 140 | 3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, | ||
| 141 | 2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, | ||
| 142 | 2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, | ||
| 143 | 2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, | ||
| 144 | 3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, | ||
| 145 | 1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, | ||
| 146 | 1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, | ||
| 147 | 2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, | ||
| 148 | 1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, | ||
| 149 | 3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, | ||
| 150 | 1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, | ||
| 151 | 1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, | ||
| 152 | 3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, | ||
| 153 | 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, | ||
| 154 | 2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, | ||
| 155 | 1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, | ||
| 156 | 4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, | ||
| 157 | 1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, | ||
| 158 | 1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, | ||
| 159 | 3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, | ||
| 160 | 1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, | ||
| 161 | 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, | ||
| 162 | 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, | ||
| 163 | 1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, | ||
| 164 | 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, | ||
| 165 | 1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, | ||
| 166 | 1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, | ||
| 167 | 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, | ||
| 168 | 3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, | ||
| 169 | 4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, | ||
| 170 | 3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, | ||
| 171 | 2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, | ||
| 172 | 2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, | ||
| 173 | 1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, | ||
| 174 | 3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, | ||
| 175 | 2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, | ||
| 176 | 1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, | ||
| 177 | 1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, | ||
| 178 | 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, | ||
| 179 | 2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, | ||
| 180 | 2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, | ||
| 181 | 3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, | ||
| 182 | 4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, | ||
| 183 | 3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, | ||
| 184 | 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, | ||
| 185 | 3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, | ||
| 186 | 2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, | ||
| 187 | 1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, | ||
| 188 | 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, | ||
| 189 | 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, | ||
| 190 | 3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, | ||
| 191 | 4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, | ||
| 192 | 2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, | ||
| 193 | 1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, | ||
| 194 | 1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, | ||
| 195 | 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, | ||
| 196 | 1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, | ||
| 197 | 3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, | ||
| 198 | 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, | ||
| 199 | 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, | ||
| 200 | 1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, | ||
| 201 | 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, | ||
| 202 | 1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, | ||
| 203 | 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, | ||
| 204 | 2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, | ||
| 205 | 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, | ||
| 206 | 2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, | ||
| 207 | 2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, | ||
| 208 | 1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, | ||
| 209 | 1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, | ||
| 210 | 2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, | ||
| 211 | 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, | ||
| 212 | 1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, | ||
| 213 | 1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, | ||
| 214 | 2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, | ||
| 215 | 2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, | ||
| 216 | 3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, | ||
| 217 | 1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, | ||
| 218 | 4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, | ||
| 219 | 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, | ||
| 220 | 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, | ||
| 221 | 3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, | ||
| 222 | 1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, | ||
| 223 | 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, | ||
| 224 | 3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, | ||
| 225 | 1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, | ||
| 226 | 4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, | ||
| 227 | 1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, | ||
| 228 | 2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, | ||
| 229 | 1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, | ||
| 230 | 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, | ||
| 231 | 1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, | ||
| 232 | 3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, | ||
| 233 | 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, | ||
| 234 | 2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, | ||
| 235 | 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, | ||
| 236 | 1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, | ||
| 237 | 1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, | ||
| 238 | 1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, | ||
| 239 | 3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, | ||
| 240 | 2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, | ||
| 241 | 3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, | ||
| 242 | 3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, | ||
| 243 | 3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, | ||
| 244 | 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, | ||
| 245 | 2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, | ||
| 246 | 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, | ||
| 247 | 2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, | ||
| 248 | 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, | ||
| 249 | 1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, | ||
| 250 | 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, | ||
| 251 | 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, | ||
| 252 | 1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, | ||
| 253 | 3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, | ||
| 254 | 3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, | ||
| 255 | 1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, | ||
| 256 | 1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, | ||
| 257 | 3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, | ||
| 258 | 2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, | ||
| 259 | 2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, | ||
| 260 | 1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, | ||
| 261 | 3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, | ||
| 262 | 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, | ||
| 263 | 4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, | ||
| 264 | 1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, | ||
| 265 | 2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, | ||
| 266 | 3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, | ||
| 267 | 3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, | ||
| 268 | 1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, | ||
| 269 | 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, | ||
| 270 | 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, | ||
| 271 | 2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, | ||
| 272 | 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, | ||
| 273 | 1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, | ||
| 274 | 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, | ||
| 275 | 1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, | ||
| 276 | 1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, | ||
| 277 | 1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, | ||
| 278 | 1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, | ||
| 279 | 1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, | ||
| 280 | 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, | ||
| 281 | 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 | ||
| 282 | ) | ||
| 283 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312prober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312prober.py new file mode 100644 index 0000000..7cae6b5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/gb2312prober.py | |||
| @@ -0,0 +1,46 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .chardistribution import GB2312DistributionAnalysis | ||
| 31 | from .mbcssm import GB2312_SM_MODEL | ||
| 32 | |||
| 33 | class GB2312Prober(MultiByteCharSetProber): | ||
| 34 | def __init__(self): | ||
| 35 | super(GB2312Prober, self).__init__() | ||
| 36 | self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) | ||
| 37 | self.distribution_analyzer = GB2312DistributionAnalysis() | ||
| 38 | self.reset() | ||
| 39 | |||
| 40 | @property | ||
| 41 | def charset_name(self): | ||
| 42 | return "GB2312" | ||
| 43 | |||
| 44 | @property | ||
| 45 | def language(self): | ||
| 46 | return "Chinese" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/hebrewprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/hebrewprober.py new file mode 100644 index 0000000..10b8122 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/hebrewprober.py | |||
| @@ -0,0 +1,292 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Shy Shalom | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2005 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .charsetprober import CharSetProber | ||
| 29 | from .enums import ProbingState | ||
| 30 | |||
| 31 | # This prober doesn't actually recognize a language or a charset. | ||
| 32 | # It is a helper prober for the use of the Hebrew model probers | ||
| 33 | |||
| 34 | ### General ideas of the Hebrew charset recognition ### | ||
| 35 | # | ||
| 36 | # Four main charsets exist in Hebrew: | ||
| 37 | # "ISO-8859-8" - Visual Hebrew | ||
| 38 | # "windows-1255" - Logical Hebrew | ||
| 39 | # "ISO-8859-8-I" - Logical Hebrew | ||
| 40 | # "x-mac-hebrew" - ?? Logical Hebrew ?? | ||
| 41 | # | ||
| 42 | # Both "ISO" charsets use a completely identical set of code points, whereas | ||
| 43 | # "windows-1255" and "x-mac-hebrew" are two different proper supersets of | ||
| 44 | # these code points. windows-1255 defines additional characters in the range | ||
| 45 | # 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific | ||
| 46 | # diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. | ||
| 47 | # x-mac-hebrew defines similar additional code points but with a different | ||
| 48 | # mapping. | ||
| 49 | # | ||
| 50 | # As far as an average Hebrew text with no diacritics is concerned, all four | ||
| 51 | # charsets are identical with respect to code points. Meaning that for the | ||
| 52 | # main Hebrew alphabet, all four map the same values to all 27 Hebrew letters | ||
| 53 | # (including final letters). | ||
| 54 | # | ||
| 55 | # The dominant difference between these charsets is their directionality. | ||
| 56 | # "Visual" directionality means that the text is ordered as if the renderer is | ||
| 57 | # not aware of a BIDI rendering algorithm. The renderer sees the text and | ||
| 58 | # draws it from left to right. The text itself when ordered naturally is read | ||
| 59 | # backwards. A buffer of Visual Hebrew generally looks like so: | ||
| 60 | # "[last word of first line spelled backwards] [whole line ordered backwards | ||
| 61 | # and spelled backwards] [first word of first line spelled backwards] | ||
| 62 | # [end of line] [last word of second line] ... etc' " | ||
| 63 | # adding punctuation marks, numbers and English text to visual text is | ||
| 64 | # naturally also "visual" and from left to right. | ||
| 65 | # | ||
| 66 | # "Logical" directionality means the text is ordered "naturally" according to | ||
| 67 | # the order it is read. It is the responsibility of the renderer to display | ||
| 68 | # the text from right to left. A BIDI algorithm is used to place general | ||
| 69 | # punctuation marks, numbers and English text in the text. | ||
| 70 | # | ||
| 71 | # Texts in x-mac-hebrew are almost impossible to find on the Internet. From | ||
| 72 | # what little evidence I could find, it seems that its general directionality | ||
| 73 | # is Logical. | ||
| 74 | # | ||
| 75 | # To sum up all of the above, the Hebrew probing mechanism knows about two | ||
| 76 | # charsets: | ||
| 77 | # Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are | ||
| 78 | # backwards while line order is natural. For charset recognition purposes | ||
| 79 | # the line order is unimportant (In fact, for this implementation, even | ||
| 80 | # word order is unimportant). | ||
| 81 | # Logical Hebrew - "windows-1255" - normal, naturally ordered text. | ||
| 82 | # | ||
| 83 | # "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be | ||
| 84 | # specifically identified. | ||
| 85 | # "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew | ||
| 86 | # that contain special punctuation marks or diacritics is displayed with | ||
| 87 | # some unconverted characters showing as question marks. This problem might | ||
| 88 | # be corrected using another model prober for x-mac-hebrew. Due to the fact | ||
| 89 | # that x-mac-hebrew texts are so rare, writing another model prober isn't | ||
| 90 | # worth the effort and performance hit. | ||
| 91 | # | ||
| 92 | #### The Prober #### | ||
| 93 | # | ||
| 94 | # The prober is divided between two SBCharSetProbers and a HebrewProber, | ||
| 95 | # all of which are managed, created, fed data, inquired and deleted by the | ||
| 96 | # SBCSGroupProber. The two SBCharSetProbers identify that the text is in | ||
| 97 | # fact some kind of Hebrew, Logical or Visual. The final decision about which | ||
| 98 | # one is it is made by the HebrewProber by combining final-letter scores | ||
| 99 | # with the scores of the two SBCharSetProbers to produce a final answer. | ||
| 100 | # | ||
| 101 | # The SBCSGroupProber is responsible for stripping the original text of HTML | ||
| 102 | # tags, English characters, numbers, low-ASCII punctuation characters, spaces | ||
| 103 | # and new lines. It reduces any sequence of such characters to a single space. | ||
| 104 | # The buffer fed to each prober in the SBCS group prober is pure text in | ||
| 105 | # high-ASCII. | ||
| 106 | # The two SBCharSetProbers (model probers) share the same language model: | ||
| 107 | # Win1255Model. | ||
| 108 | # The first SBCharSetProber uses the model normally as any other | ||
| 109 | # SBCharSetProber does, to recognize windows-1255, upon which this model was | ||
| 110 | # built. The second SBCharSetProber is told to make the pair-of-letter | ||
| 111 | # lookup in the language model backwards. This in practice exactly simulates | ||
| 112 | # a visual Hebrew model using the windows-1255 logical Hebrew model. | ||
| 113 | # | ||
| 114 | # The HebrewProber is not using any language model. All it does is look for | ||
| 115 | # final-letter evidence suggesting the text is either logical Hebrew or visual | ||
| 116 | # Hebrew. Disjointed from the model probers, the results of the HebrewProber | ||
| 117 | # alone are meaningless. HebrewProber always returns 0.00 as confidence | ||
| 118 | # since it never identifies a charset by itself. Instead, the pointer to the | ||
| 119 | # HebrewProber is passed to the model probers as a helper "Name Prober". | ||
| 120 | # When the Group prober receives a positive identification from any prober, | ||
| 121 | # it asks for the name of the charset identified. If the prober queried is a | ||
| 122 | # Hebrew model prober, the model prober forwards the call to the | ||
| 123 | # HebrewProber to make the final decision. In the HebrewProber, the | ||
| 124 | # decision is made according to the final-letters scores maintained and Both | ||
| 125 | # model probers scores. The answer is returned in the form of the name of the | ||
| 126 | # charset identified, either "windows-1255" or "ISO-8859-8". | ||
| 127 | |||
| 128 | class HebrewProber(CharSetProber): | ||
| 129 | # windows-1255 / ISO-8859-8 code points of interest | ||
| 130 | FINAL_KAF = 0xea | ||
| 131 | NORMAL_KAF = 0xeb | ||
| 132 | FINAL_MEM = 0xed | ||
| 133 | NORMAL_MEM = 0xee | ||
| 134 | FINAL_NUN = 0xef | ||
| 135 | NORMAL_NUN = 0xf0 | ||
| 136 | FINAL_PE = 0xf3 | ||
| 137 | NORMAL_PE = 0xf4 | ||
| 138 | FINAL_TSADI = 0xf5 | ||
| 139 | NORMAL_TSADI = 0xf6 | ||
| 140 | |||
| 141 | # Minimum Visual vs Logical final letter score difference. | ||
| 142 | # If the difference is below this, don't rely solely on the final letter score | ||
| 143 | # distance. | ||
| 144 | MIN_FINAL_CHAR_DISTANCE = 5 | ||
| 145 | |||
| 146 | # Minimum Visual vs Logical model score difference. | ||
| 147 | # If the difference is below this, don't rely at all on the model score | ||
| 148 | # distance. | ||
| 149 | MIN_MODEL_DISTANCE = 0.01 | ||
| 150 | |||
| 151 | VISUAL_HEBREW_NAME = "ISO-8859-8" | ||
| 152 | LOGICAL_HEBREW_NAME = "windows-1255" | ||
| 153 | |||
| 154 | def __init__(self): | ||
| 155 | super(HebrewProber, self).__init__() | ||
| 156 | self._final_char_logical_score = None | ||
| 157 | self._final_char_visual_score = None | ||
| 158 | self._prev = None | ||
| 159 | self._before_prev = None | ||
| 160 | self._logical_prober = None | ||
| 161 | self._visual_prober = None | ||
| 162 | self.reset() | ||
| 163 | |||
| 164 | def reset(self): | ||
| 165 | self._final_char_logical_score = 0 | ||
| 166 | self._final_char_visual_score = 0 | ||
| 167 | # The two last characters seen in the previous buffer, | ||
| 168 | # mPrev and mBeforePrev are initialized to space in order to simulate | ||
| 169 | # a word delimiter at the beginning of the data | ||
| 170 | self._prev = ' ' | ||
| 171 | self._before_prev = ' ' | ||
| 172 | # These probers are owned by the group prober. | ||
| 173 | |||
| 174 | def set_model_probers(self, logicalProber, visualProber): | ||
| 175 | self._logical_prober = logicalProber | ||
| 176 | self._visual_prober = visualProber | ||
| 177 | |||
| 178 | def is_final(self, c): | ||
| 179 | return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, | ||
| 180 | self.FINAL_PE, self.FINAL_TSADI] | ||
| 181 | |||
| 182 | def is_non_final(self, c): | ||
| 183 | # The normal Tsadi is not a good Non-Final letter due to words like | ||
| 184 | # 'lechotet' (to chat) containing an apostrophe after the tsadi. This | ||
| 185 | # apostrophe is converted to a space in FilterWithoutEnglishLetters | ||
| 186 | # causing the Non-Final tsadi to appear at an end of a word even | ||
| 187 | # though this is not the case in the original text. | ||
| 188 | # The letters Pe and Kaf rarely display a related behavior of not being | ||
| 189 | # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' | ||
| 190 | # for example legally end with a Non-Final Pe or Kaf. However, the | ||
| 191 | # benefit of these letters as Non-Final letters outweighs the damage | ||
| 192 | # since these words are quite rare. | ||
| 193 | return c in [self.NORMAL_KAF, self.NORMAL_MEM, | ||
| 194 | self.NORMAL_NUN, self.NORMAL_PE] | ||
| 195 | |||
| 196 | def feed(self, byte_str): | ||
| 197 | # Final letter analysis for logical-visual decision. | ||
| 198 | # Look for evidence that the received buffer is either logical Hebrew | ||
| 199 | # or visual Hebrew. | ||
| 200 | # The following cases are checked: | ||
| 201 | # 1) A word longer than 1 letter, ending with a final letter. This is | ||
| 202 | # an indication that the text is laid out "naturally" since the | ||
| 203 | # final letter really appears at the end. +1 for logical score. | ||
| 204 | # 2) A word longer than 1 letter, ending with a Non-Final letter. In | ||
| 205 | # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, | ||
| 206 | # should not end with the Non-Final form of that letter. Exceptions | ||
| 207 | # to this rule are mentioned above in isNonFinal(). This is an | ||
| 208 | # indication that the text is laid out backwards. +1 for visual | ||
| 209 | # score | ||
| 210 | # 3) A word longer than 1 letter, starting with a final letter. Final | ||
| 211 | # letters should not appear at the beginning of a word. This is an | ||
| 212 | # indication that the text is laid out backwards. +1 for visual | ||
| 213 | # score. | ||
| 214 | # | ||
| 215 | # The visual score and logical score are accumulated throughout the | ||
| 216 | # text and are finally checked against each other in GetCharSetName(). | ||
| 217 | # No checking for final letters in the middle of words is done since | ||
| 218 | # that case is not an indication for either Logical or Visual text. | ||
| 219 | # | ||
| 220 | # We automatically filter out all 7-bit characters (replace them with | ||
| 221 | # spaces) so the word boundary detection works properly. [MAP] | ||
| 222 | |||
| 223 | if self.state == ProbingState.NOT_ME: | ||
| 224 | # Both model probers say it's not them. No reason to continue. | ||
| 225 | return ProbingState.NOT_ME | ||
| 226 | |||
| 227 | byte_str = self.filter_high_byte_only(byte_str) | ||
| 228 | |||
| 229 | for cur in byte_str: | ||
| 230 | if cur == ' ': | ||
| 231 | # We stand on a space - a word just ended | ||
| 232 | if self._before_prev != ' ': | ||
| 233 | # next-to-last char was not a space so self._prev is not a | ||
| 234 | # 1 letter word | ||
| 235 | if self.is_final(self._prev): | ||
| 236 | # case (1) [-2:not space][-1:final letter][cur:space] | ||
| 237 | self._final_char_logical_score += 1 | ||
| 238 | elif self.is_non_final(self._prev): | ||
| 239 | # case (2) [-2:not space][-1:Non-Final letter][ | ||
| 240 | # cur:space] | ||
| 241 | self._final_char_visual_score += 1 | ||
| 242 | else: | ||
| 243 | # Not standing on a space | ||
| 244 | if ((self._before_prev == ' ') and | ||
| 245 | (self.is_final(self._prev)) and (cur != ' ')): | ||
| 246 | # case (3) [-2:space][-1:final letter][cur:not space] | ||
| 247 | self._final_char_visual_score += 1 | ||
| 248 | self._before_prev = self._prev | ||
| 249 | self._prev = cur | ||
| 250 | |||
| 251 | # Forever detecting, till the end or until both model probers return | ||
| 252 | # ProbingState.NOT_ME (handled above) | ||
| 253 | return ProbingState.DETECTING | ||
| 254 | |||
| 255 | @property | ||
| 256 | def charset_name(self): | ||
| 257 | # Make the decision: is it Logical or Visual? | ||
| 258 | # If the final letter score distance is dominant enough, rely on it. | ||
| 259 | finalsub = self._final_char_logical_score - self._final_char_visual_score | ||
| 260 | if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: | ||
| 261 | return self.LOGICAL_HEBREW_NAME | ||
| 262 | if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: | ||
| 263 | return self.VISUAL_HEBREW_NAME | ||
| 264 | |||
| 265 | # It's not dominant enough, try to rely on the model scores instead. | ||
| 266 | modelsub = (self._logical_prober.get_confidence() | ||
| 267 | - self._visual_prober.get_confidence()) | ||
| 268 | if modelsub > self.MIN_MODEL_DISTANCE: | ||
| 269 | return self.LOGICAL_HEBREW_NAME | ||
| 270 | if modelsub < -self.MIN_MODEL_DISTANCE: | ||
| 271 | return self.VISUAL_HEBREW_NAME | ||
| 272 | |||
| 273 | # Still no good, back to final letter distance, maybe it'll save the | ||
| 274 | # day. | ||
| 275 | if finalsub < 0.0: | ||
| 276 | return self.VISUAL_HEBREW_NAME | ||
| 277 | |||
| 278 | # (finalsub > 0 - Logical) or (don't know what to do) default to | ||
| 279 | # Logical. | ||
| 280 | return self.LOGICAL_HEBREW_NAME | ||
| 281 | |||
| 282 | @property | ||
| 283 | def language(self): | ||
| 284 | return 'Hebrew' | ||
| 285 | |||
| 286 | @property | ||
| 287 | def state(self): | ||
| 288 | # Remain active as long as any of the model probers are active. | ||
| 289 | if (self._logical_prober.state == ProbingState.NOT_ME) and \ | ||
| 290 | (self._visual_prober.state == ProbingState.NOT_ME): | ||
| 291 | return ProbingState.NOT_ME | ||
| 292 | return ProbingState.DETECTING | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jisfreq.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jisfreq.py new file mode 100644 index 0000000..510b940 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jisfreq.py | |||
| @@ -0,0 +1,325 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # Sampling from about 20M text materials include literature and computer technology | ||
| 29 | # | ||
| 30 | # Japanese frequency table, applied to both S-JIS and EUC-JP | ||
| 31 | # They are sorted in order. | ||
| 32 | |||
| 33 | # 128 --> 0.77094 | ||
| 34 | # 256 --> 0.85710 | ||
| 35 | # 512 --> 0.92635 | ||
| 36 | # 1024 --> 0.97130 | ||
| 37 | # 2048 --> 0.99431 | ||
| 38 | # | ||
| 39 | # Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 | ||
| 40 | # Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 | ||
| 41 | # | ||
| 42 | # Typical Distribution Ratio, 25% of IDR | ||
| 43 | |||
| 44 | JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 | ||
| 45 | |||
| 46 | # Char to FreqOrder table , | ||
| 47 | JIS_TABLE_SIZE = 4368 | ||
| 48 | |||
| 49 | JIS_CHAR_TO_FREQ_ORDER = ( | ||
| 50 | 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 | ||
| 51 | 3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 | ||
| 52 | 1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 | ||
| 53 | 2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 | ||
| 54 | 2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 | ||
| 55 | 5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 | ||
| 56 | 1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 | ||
| 57 | 5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 | ||
| 58 | 5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 | ||
| 59 | 5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 | ||
| 60 | 5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 | ||
| 61 | 5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 | ||
| 62 | 5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 | ||
| 63 | 1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 | ||
| 64 | 1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 | ||
| 65 | 1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 | ||
| 66 | 2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 | ||
| 67 | 3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 | ||
| 68 | 3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 | ||
| 69 | 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 | ||
| 70 | 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 | ||
| 71 | 1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 | ||
| 72 | 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 | ||
| 73 | 5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 | ||
| 74 | 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 | ||
| 75 | 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 | ||
| 76 | 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 | ||
| 77 | 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 | ||
| 78 | 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 | ||
| 79 | 5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 | ||
| 80 | 5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 | ||
| 81 | 5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 | ||
| 82 | 4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 | ||
| 83 | 5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 | ||
| 84 | 5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 | ||
| 85 | 5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 | ||
| 86 | 5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 | ||
| 87 | 5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 | ||
| 88 | 5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 | ||
| 89 | 5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 | ||
| 90 | 5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 | ||
| 91 | 5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 | ||
| 92 | 3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 | ||
| 93 | 5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 | ||
| 94 | 5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 | ||
| 95 | 5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 | ||
| 96 | 5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 | ||
| 97 | 5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 | ||
| 98 | 5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 | ||
| 99 | 5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 | ||
| 100 | 5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 | ||
| 101 | 5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 | ||
| 102 | 5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 | ||
| 103 | 5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 | ||
| 104 | 5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 | ||
| 105 | 5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 | ||
| 106 | 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 | ||
| 107 | 5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 | ||
| 108 | 5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 | ||
| 109 | 5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 | ||
| 110 | 5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 | ||
| 111 | 5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 | ||
| 112 | 5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 | ||
| 113 | 5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 | ||
| 114 | 5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 | ||
| 115 | 5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 | ||
| 116 | 5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 | ||
| 117 | 5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 | ||
| 118 | 5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 | ||
| 119 | 5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 | ||
| 120 | 5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 | ||
| 121 | 5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 | ||
| 122 | 5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 | ||
| 123 | 5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 | ||
| 124 | 5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 | ||
| 125 | 5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 | ||
| 126 | 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 | ||
| 127 | 5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 | ||
| 128 | 5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 | ||
| 129 | 5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 | ||
| 130 | 5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 | ||
| 131 | 6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 | ||
| 132 | 6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 | ||
| 133 | 6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 | ||
| 134 | 6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 | ||
| 135 | 6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 | ||
| 136 | 6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 | ||
| 137 | 6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 | ||
| 138 | 6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 | ||
| 139 | 4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 | ||
| 140 | 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 | ||
| 141 | 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 | ||
| 142 | 1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 | ||
| 143 | 1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 | ||
| 144 | 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 | ||
| 145 | 3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 | ||
| 146 | 3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 | ||
| 147 | 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 | ||
| 148 | 3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 | ||
| 149 | 3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 | ||
| 150 | 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 | ||
| 151 | 2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 | ||
| 152 | 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 | ||
| 153 | 3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 | ||
| 154 | 1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 | ||
| 155 | 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 | ||
| 156 | 1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 | ||
| 157 | 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 | ||
| 158 | 2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 | ||
| 159 | 2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 | ||
| 160 | 2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 | ||
| 161 | 2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 | ||
| 162 | 1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 | ||
| 163 | 1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 | ||
| 164 | 1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 | ||
| 165 | 1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 | ||
| 166 | 2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 | ||
| 167 | 1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 | ||
| 168 | 2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 | ||
| 169 | 1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 | ||
| 170 | 1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 | ||
| 171 | 1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 | ||
| 172 | 1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 | ||
| 173 | 1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 | ||
| 174 | 1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 | ||
| 175 | 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 | ||
| 176 | 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 | ||
| 177 | 1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 | ||
| 178 | 2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 | ||
| 179 | 2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 | ||
| 180 | 2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 | ||
| 181 | 3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 | ||
| 182 | 3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 | ||
| 183 | 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 | ||
| 184 | 3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 | ||
| 185 | 1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 | ||
| 186 | 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 | ||
| 187 | 2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 | ||
| 188 | 1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 | ||
| 189 | 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 | ||
| 190 | 3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 | ||
| 191 | 4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 | ||
| 192 | 2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 | ||
| 193 | 1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 | ||
| 194 | 2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 | ||
| 195 | 1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 | ||
| 196 | 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 | ||
| 197 | 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 | ||
| 198 | 1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 | ||
| 199 | 2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 | ||
| 200 | 2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 | ||
| 201 | 2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 | ||
| 202 | 3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 | ||
| 203 | 1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 | ||
| 204 | 2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 | ||
| 205 | 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 | ||
| 206 | 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 | ||
| 207 | 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 | ||
| 208 | 1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 | ||
| 209 | 2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 | ||
| 210 | 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 | ||
| 211 | 1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 | ||
| 212 | 1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 | ||
| 213 | 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 | ||
| 214 | 1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 | ||
| 215 | 1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 | ||
| 216 | 1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 | ||
| 217 | 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 | ||
| 218 | 2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 | ||
| 219 | 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 | ||
| 220 | 2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 | ||
| 221 | 3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 | ||
| 222 | 2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 | ||
| 223 | 1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 | ||
| 224 | 6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 | ||
| 225 | 1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 | ||
| 226 | 2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 | ||
| 227 | 1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 | ||
| 228 | 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 | ||
| 229 | 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 | ||
| 230 | 3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 | ||
| 231 | 3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 | ||
| 232 | 1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 | ||
| 233 | 1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 | ||
| 234 | 1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 | ||
| 235 | 1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 | ||
| 236 | 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 | ||
| 237 | 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 | ||
| 238 | 2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 | ||
| 239 | 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 | ||
| 240 | 3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 | ||
| 241 | 2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 | ||
| 242 | 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 | ||
| 243 | 1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 | ||
| 244 | 2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 | ||
| 245 | 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 | ||
| 246 | 1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 | ||
| 247 | 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 | ||
| 248 | 4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 | ||
| 249 | 2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 | ||
| 250 | 1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 | ||
| 251 | 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 | ||
| 252 | 1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 | ||
| 253 | 2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 | ||
| 254 | 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 | ||
| 255 | 6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 | ||
| 256 | 1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 | ||
| 257 | 1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 | ||
| 258 | 2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 | ||
| 259 | 3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 | ||
| 260 | 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 | ||
| 261 | 3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 | ||
| 262 | 1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 | ||
| 263 | 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 | ||
| 264 | 1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 | ||
| 265 | 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 | ||
| 266 | 3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 | ||
| 267 | 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 | ||
| 268 | 2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 | ||
| 269 | 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 | ||
| 270 | 4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 | ||
| 271 | 2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 | ||
| 272 | 1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 | ||
| 273 | 1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 | ||
| 274 | 1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 | ||
| 275 | 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 | ||
| 276 | 1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 | ||
| 277 | 3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 | ||
| 278 | 1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 | ||
| 279 | 3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 | ||
| 280 | 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 | ||
| 281 | 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 | ||
| 282 | 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 | ||
| 283 | 2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 | ||
| 284 | 1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 | ||
| 285 | 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 | ||
| 286 | 1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 | ||
| 287 | 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 | ||
| 288 | 1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 | ||
| 289 | 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 | ||
| 290 | 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 | ||
| 291 | 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 | ||
| 292 | 1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 | ||
| 293 | 1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 | ||
| 294 | 2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 | ||
| 295 | 4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 | ||
| 296 | 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 | ||
| 297 | 1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 | ||
| 298 | 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 | ||
| 299 | 1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 | ||
| 300 | 3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 | ||
| 301 | 1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 | ||
| 302 | 2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 | ||
| 303 | 2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 | ||
| 304 | 1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 | ||
| 305 | 1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 | ||
| 306 | 2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 | ||
| 307 | 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 | ||
| 308 | 2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 | ||
| 309 | 1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 | ||
| 310 | 1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 | ||
| 311 | 1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 | ||
| 312 | 1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 | ||
| 313 | 3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 | ||
| 314 | 2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 | ||
| 315 | 2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 | ||
| 316 | 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 | ||
| 317 | 3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 | ||
| 318 | 3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 | ||
| 319 | 1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 | ||
| 320 | 2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 | ||
| 321 | 1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 | ||
| 322 | 2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 | ||
| 323 | ) | ||
| 324 | |||
| 325 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jpcntx.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jpcntx.py new file mode 100644 index 0000000..624d534 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/jpcntx.py | |||
| @@ -0,0 +1,233 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | |||
| 29 | # This is hiragana 2-char sequence table, the number in each cell represents its frequency category | ||
| 30 | jp2CharContext = ( | ||
| 31 | (0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), | ||
| 32 | (2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), | ||
| 33 | (0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), | ||
| 34 | (0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), | ||
| 35 | (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), | ||
| 36 | (0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), | ||
| 37 | (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), | ||
| 38 | (0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), | ||
| 39 | (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), | ||
| 40 | (0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), | ||
| 41 | (1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), | ||
| 42 | (0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), | ||
| 43 | (0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), | ||
| 44 | (0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), | ||
| 45 | (0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), | ||
| 46 | (0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), | ||
| 47 | (2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), | ||
| 48 | (0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), | ||
| 49 | (0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), | ||
| 50 | (0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), | ||
| 51 | (2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), | ||
| 52 | (0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), | ||
| 53 | (1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), | ||
| 54 | (0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), | ||
| 55 | (0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), | ||
| 56 | (0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), | ||
| 57 | (0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), | ||
| 58 | (0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), | ||
| 59 | (0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), | ||
| 60 | (0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), | ||
| 61 | (0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), | ||
| 62 | (0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), | ||
| 63 | (0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), | ||
| 64 | (0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), | ||
| 65 | (0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), | ||
| 66 | (1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), | ||
| 67 | (0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), | ||
| 68 | (0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), | ||
| 69 | (0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), | ||
| 70 | (0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), | ||
| 71 | (0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), | ||
| 72 | (2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), | ||
| 73 | (0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), | ||
| 74 | (0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), | ||
| 75 | (0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), | ||
| 76 | (0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), | ||
| 77 | (0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), | ||
| 78 | (0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), | ||
| 79 | (0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), | ||
| 80 | (0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), | ||
| 81 | (0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), | ||
| 82 | (0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), | ||
| 83 | (0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), | ||
| 84 | (0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), | ||
| 85 | (0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), | ||
| 86 | (0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), | ||
| 87 | (0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), | ||
| 88 | (0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), | ||
| 89 | (0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), | ||
| 90 | (0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), | ||
| 91 | (0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), | ||
| 92 | (2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), | ||
| 93 | (0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), | ||
| 94 | (0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), | ||
| 95 | (0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), | ||
| 96 | (0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), | ||
| 97 | (1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), | ||
| 98 | (0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), | ||
| 99 | (0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), | ||
| 100 | (0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), | ||
| 101 | (0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), | ||
| 102 | (0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), | ||
| 103 | (0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), | ||
| 104 | (0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), | ||
| 105 | (0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), | ||
| 106 | (1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), | ||
| 107 | (0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), | ||
| 108 | (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), | ||
| 109 | (0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), | ||
| 110 | (0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), | ||
| 111 | (0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), | ||
| 112 | (0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), | ||
| 113 | (0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), | ||
| 114 | ) | ||
| 115 | |||
| 116 | class JapaneseContextAnalysis(object): | ||
| 117 | NUM_OF_CATEGORY = 6 | ||
| 118 | DONT_KNOW = -1 | ||
| 119 | ENOUGH_REL_THRESHOLD = 100 | ||
| 120 | MAX_REL_THRESHOLD = 1000 | ||
| 121 | MINIMUM_DATA_THRESHOLD = 4 | ||
| 122 | |||
| 123 | def __init__(self): | ||
| 124 | self._total_rel = None | ||
| 125 | self._rel_sample = None | ||
| 126 | self._need_to_skip_char_num = None | ||
| 127 | self._last_char_order = None | ||
| 128 | self._done = None | ||
| 129 | self.reset() | ||
| 130 | |||
| 131 | def reset(self): | ||
| 132 | self._total_rel = 0 # total sequence received | ||
| 133 | # category counters, each integer counts sequence in its category | ||
| 134 | self._rel_sample = [0] * self.NUM_OF_CATEGORY | ||
| 135 | # if last byte in current buffer is not the last byte of a character, | ||
| 136 | # we need to know how many bytes to skip in next buffer | ||
| 137 | self._need_to_skip_char_num = 0 | ||
| 138 | self._last_char_order = -1 # The order of previous char | ||
| 139 | # If this flag is set to True, detection is done and conclusion has | ||
| 140 | # been made | ||
| 141 | self._done = False | ||
| 142 | |||
| 143 | def feed(self, byte_str, num_bytes): | ||
| 144 | if self._done: | ||
| 145 | return | ||
| 146 | |||
| 147 | # The buffer we got is byte oriented, and a character may span in more than one | ||
| 148 | # buffers. In case the last one or two byte in last buffer is not | ||
| 149 | # complete, we record how many byte needed to complete that character | ||
| 150 | # and skip these bytes here. We can choose to record those bytes as | ||
| 151 | # well and analyse the character once it is complete, but since a | ||
| 152 | # character will not make much difference, by simply skipping | ||
| 153 | # this character will simply our logic and improve performance. | ||
| 154 | i = self._need_to_skip_char_num | ||
| 155 | while i < num_bytes: | ||
| 156 | order, char_len = self.get_order(byte_str[i:i + 2]) | ||
| 157 | i += char_len | ||
| 158 | if i > num_bytes: | ||
| 159 | self._need_to_skip_char_num = i - num_bytes | ||
| 160 | self._last_char_order = -1 | ||
| 161 | else: | ||
| 162 | if (order != -1) and (self._last_char_order != -1): | ||
| 163 | self._total_rel += 1 | ||
| 164 | if self._total_rel > self.MAX_REL_THRESHOLD: | ||
| 165 | self._done = True | ||
| 166 | break | ||
| 167 | self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 | ||
| 168 | self._last_char_order = order | ||
| 169 | |||
| 170 | def got_enough_data(self): | ||
| 171 | return self._total_rel > self.ENOUGH_REL_THRESHOLD | ||
| 172 | |||
| 173 | def get_confidence(self): | ||
| 174 | # This is just one way to calculate confidence. It works well for me. | ||
| 175 | if self._total_rel > self.MINIMUM_DATA_THRESHOLD: | ||
| 176 | return (self._total_rel - self._rel_sample[0]) / self._total_rel | ||
| 177 | else: | ||
| 178 | return self.DONT_KNOW | ||
| 179 | |||
| 180 | def get_order(self, byte_str): | ||
| 181 | return -1, 1 | ||
| 182 | |||
| 183 | class SJISContextAnalysis(JapaneseContextAnalysis): | ||
| 184 | def __init__(self): | ||
| 185 | super(SJISContextAnalysis, self).__init__() | ||
| 186 | self._charset_name = "SHIFT_JIS" | ||
| 187 | |||
| 188 | @property | ||
| 189 | def charset_name(self): | ||
| 190 | return self._charset_name | ||
| 191 | |||
| 192 | def get_order(self, byte_str): | ||
| 193 | if not byte_str: | ||
| 194 | return -1, 1 | ||
| 195 | # find out current char's byte length | ||
| 196 | first_char = byte_str[0] | ||
| 197 | if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): | ||
| 198 | char_len = 2 | ||
| 199 | if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): | ||
| 200 | self._charset_name = "CP932" | ||
| 201 | else: | ||
| 202 | char_len = 1 | ||
| 203 | |||
| 204 | # return its order if it is hiragana | ||
| 205 | if len(byte_str) > 1: | ||
| 206 | second_char = byte_str[1] | ||
| 207 | if (first_char == 202) and (0x9F <= second_char <= 0xF1): | ||
| 208 | return second_char - 0x9F, char_len | ||
| 209 | |||
| 210 | return -1, char_len | ||
| 211 | |||
| 212 | class EUCJPContextAnalysis(JapaneseContextAnalysis): | ||
| 213 | def get_order(self, byte_str): | ||
| 214 | if not byte_str: | ||
| 215 | return -1, 1 | ||
| 216 | # find out current char's byte length | ||
| 217 | first_char = byte_str[0] | ||
| 218 | if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): | ||
| 219 | char_len = 2 | ||
| 220 | elif first_char == 0x8F: | ||
| 221 | char_len = 3 | ||
| 222 | else: | ||
| 223 | char_len = 1 | ||
| 224 | |||
| 225 | # return its order if it is hiragana | ||
| 226 | if len(byte_str) > 1: | ||
| 227 | second_char = byte_str[1] | ||
| 228 | if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): | ||
| 229 | return second_char - 0xA1, char_len | ||
| 230 | |||
| 231 | return -1, char_len | ||
| 232 | |||
| 233 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langbulgarianmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..eb6f19a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langbulgarianmodel.py | |||
| @@ -0,0 +1,228 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # 255: Control characters that usually does not exist in any text | ||
| 29 | # 254: Carriage/Return | ||
| 30 | # 253: symbol (punctuation) that does not belong to word | ||
| 31 | # 252: 0 - 9 | ||
| 32 | |||
| 33 | # Character Mapping Table: | ||
| 34 | # this table is modified base on win1251BulgarianCharToOrderMap, so | ||
| 35 | # only number <64 is sure valid | ||
| 36 | |||
| 37 | Latin5_BulgarianCharToOrderMap = ( | ||
| 38 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 39 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 40 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 41 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 42 | 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 | ||
| 43 | 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 | ||
| 44 | 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 | ||
| 45 | 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 | ||
| 46 | 194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 | ||
| 47 | 210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 | ||
| 48 | 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 | ||
| 49 | 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 | ||
| 50 | 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 | ||
| 51 | 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 | ||
| 52 | 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 | ||
| 53 | 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 | ||
| 54 | ) | ||
| 55 | |||
| 56 | win1251BulgarianCharToOrderMap = ( | ||
| 57 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 58 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 59 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 60 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 61 | 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 | ||
| 62 | 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 | ||
| 63 | 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 | ||
| 64 | 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 | ||
| 65 | 206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 | ||
| 66 | 221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 | ||
| 67 | 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 | ||
| 68 | 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 | ||
| 69 | 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 | ||
| 70 | 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 | ||
| 71 | 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 | ||
| 72 | 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 | ||
| 73 | ) | ||
| 74 | |||
| 75 | # Model Table: | ||
| 76 | # total sequences: 100% | ||
| 77 | # first 512 sequences: 96.9392% | ||
| 78 | # first 1024 sequences:3.0618% | ||
| 79 | # rest sequences: 0.2992% | ||
| 80 | # negative sequences: 0.0020% | ||
| 81 | BulgarianLangModel = ( | ||
| 82 | 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, | ||
| 83 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, | ||
| 84 | 3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, | ||
| 85 | 0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 86 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, | ||
| 87 | 0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 88 | 3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, | ||
| 89 | 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 90 | 3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, | ||
| 91 | 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 92 | 3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, | ||
| 93 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, | ||
| 94 | 3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, | ||
| 95 | 0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, | ||
| 96 | 3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, | ||
| 97 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 98 | 3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, | ||
| 99 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, | ||
| 100 | 3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, | ||
| 101 | 1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 102 | 3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, | ||
| 103 | 0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, | ||
| 104 | 3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, | ||
| 105 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 106 | 3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, | ||
| 107 | 0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 108 | 3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, | ||
| 109 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 110 | 3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, | ||
| 111 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 112 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, | ||
| 113 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 114 | 3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, | ||
| 115 | 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 116 | 1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, | ||
| 117 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 118 | 2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, | ||
| 119 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 120 | 3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, | ||
| 121 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 122 | 3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, | ||
| 123 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 124 | 3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, | ||
| 125 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 126 | 3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, | ||
| 127 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 128 | 3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 129 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 130 | 2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 131 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 132 | 3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, | ||
| 133 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 134 | 3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, | ||
| 135 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 136 | 3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, | ||
| 137 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, | ||
| 138 | 3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, | ||
| 139 | 2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, | ||
| 140 | 3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, | ||
| 141 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, | ||
| 142 | 3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, | ||
| 143 | 1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, | ||
| 144 | 3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, | ||
| 145 | 1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, | ||
| 146 | 2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, | ||
| 147 | 2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, | ||
| 148 | 3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, | ||
| 149 | 1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, | ||
| 150 | 2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, | ||
| 151 | 2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, | ||
| 152 | 3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, | ||
| 153 | 1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, | ||
| 154 | 2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, | ||
| 155 | 2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, | ||
| 156 | 2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, | ||
| 157 | 1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, | ||
| 158 | 2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, | ||
| 159 | 1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, | ||
| 160 | 3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, | ||
| 161 | 1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, | ||
| 162 | 3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, | ||
| 163 | 1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, | ||
| 164 | 2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, | ||
| 165 | 1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, | ||
| 166 | 2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, | ||
| 167 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 168 | 2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, | ||
| 169 | 1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, | ||
| 170 | 2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, | ||
| 171 | 1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, | ||
| 172 | 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, | ||
| 173 | 1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 174 | 2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, | ||
| 175 | 1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, | ||
| 176 | 2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, | ||
| 177 | 1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, | ||
| 178 | 2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, | ||
| 179 | 1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 180 | 1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, | ||
| 181 | 0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 182 | 1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, | ||
| 183 | 1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 184 | 2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, | ||
| 185 | 1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, | ||
| 186 | 1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 187 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 188 | 2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, | ||
| 189 | 0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 190 | 1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, | ||
| 191 | 0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 192 | 2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 193 | 0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 194 | 2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, | ||
| 195 | 1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 196 | 2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 197 | 0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 198 | 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 199 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 200 | 1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, | ||
| 201 | 0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 202 | 1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, | ||
| 203 | 1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 204 | 0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, | ||
| 205 | 1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, | ||
| 206 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 207 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 208 | 1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, | ||
| 209 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 210 | ) | ||
| 211 | |||
| 212 | Latin5BulgarianModel = { | ||
| 213 | 'char_to_order_map': Latin5_BulgarianCharToOrderMap, | ||
| 214 | 'precedence_matrix': BulgarianLangModel, | ||
| 215 | 'typical_positive_ratio': 0.969392, | ||
| 216 | 'keep_english_letter': False, | ||
| 217 | 'charset_name': "ISO-8859-5", | ||
| 218 | 'language': 'Bulgairan', | ||
| 219 | } | ||
| 220 | |||
| 221 | Win1251BulgarianModel = { | ||
| 222 | 'char_to_order_map': win1251BulgarianCharToOrderMap, | ||
| 223 | 'precedence_matrix': BulgarianLangModel, | ||
| 224 | 'typical_positive_ratio': 0.969392, | ||
| 225 | 'keep_english_letter': False, | ||
| 226 | 'charset_name': "windows-1251", | ||
| 227 | 'language': 'Bulgarian', | ||
| 228 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langcyrillicmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..bdbad70 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langcyrillicmodel.py | |||
| @@ -0,0 +1,333 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # KOI8-R language model | ||
| 29 | # Character Mapping Table: | ||
| 30 | KOI8R_char_to_order_map = ( | ||
| 31 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 32 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 33 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 34 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 35 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 36 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 37 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 38 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 39 | 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 | ||
| 40 | 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 | ||
| 41 | 223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 | ||
| 42 | 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 | ||
| 43 | 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 | ||
| 44 | 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 | ||
| 45 | 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 | ||
| 46 | 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 | ||
| 47 | ) | ||
| 48 | |||
| 49 | win1251_char_to_order_map = ( | ||
| 50 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 51 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 52 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 53 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 54 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 55 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 56 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 57 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 58 | 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, | ||
| 59 | 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, | ||
| 60 | 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, | ||
| 61 | 239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, | ||
| 62 | 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, | ||
| 63 | 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, | ||
| 64 | 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, | ||
| 65 | 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, | ||
| 66 | ) | ||
| 67 | |||
| 68 | latin5_char_to_order_map = ( | ||
| 69 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 70 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 71 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 72 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 73 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 74 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 75 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 76 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 77 | 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, | ||
| 78 | 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, | ||
| 79 | 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, | ||
| 80 | 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, | ||
| 81 | 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, | ||
| 82 | 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, | ||
| 83 | 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, | ||
| 84 | 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, | ||
| 85 | ) | ||
| 86 | |||
| 87 | macCyrillic_char_to_order_map = ( | ||
| 88 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 89 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 90 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 91 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 92 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 93 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 94 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 95 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 96 | 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, | ||
| 97 | 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, | ||
| 98 | 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, | ||
| 99 | 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, | ||
| 100 | 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, | ||
| 101 | 239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, | ||
| 102 | 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, | ||
| 103 | 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, | ||
| 104 | ) | ||
| 105 | |||
| 106 | IBM855_char_to_order_map = ( | ||
| 107 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 108 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 109 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 110 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 111 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 112 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 113 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 114 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 115 | 191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, | ||
| 116 | 206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, | ||
| 117 | 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, | ||
| 118 | 220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, | ||
| 119 | 230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, | ||
| 120 | 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, | ||
| 121 | 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, | ||
| 122 | 250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, | ||
| 123 | ) | ||
| 124 | |||
| 125 | IBM866_char_to_order_map = ( | ||
| 126 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 127 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 128 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 129 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 130 | 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 | ||
| 131 | 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 | ||
| 132 | 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 | ||
| 133 | 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 | ||
| 134 | 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, | ||
| 135 | 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, | ||
| 136 | 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, | ||
| 137 | 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, | ||
| 138 | 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, | ||
| 139 | 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, | ||
| 140 | 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, | ||
| 141 | 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, | ||
| 142 | ) | ||
| 143 | |||
| 144 | # Model Table: | ||
| 145 | # total sequences: 100% | ||
| 146 | # first 512 sequences: 97.6601% | ||
| 147 | # first 1024 sequences: 2.3389% | ||
| 148 | # rest sequences: 0.1237% | ||
| 149 | # negative sequences: 0.0009% | ||
| 150 | RussianLangModel = ( | ||
| 151 | 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, | ||
| 152 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, | ||
| 153 | 3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, | ||
| 154 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 155 | 3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, | ||
| 156 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 157 | 3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, | ||
| 158 | 0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 159 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, | ||
| 160 | 0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 161 | 3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, | ||
| 162 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 163 | 3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, | ||
| 164 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 165 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, | ||
| 166 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 167 | 3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, | ||
| 168 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 169 | 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, | ||
| 170 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 171 | 3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, | ||
| 172 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 173 | 3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, | ||
| 174 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 175 | 3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, | ||
| 176 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 177 | 3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, | ||
| 178 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 179 | 3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, | ||
| 180 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 181 | 2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, | ||
| 182 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 183 | 3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, | ||
| 184 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 185 | 3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, | ||
| 186 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 187 | 3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, | ||
| 188 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 189 | 3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, | ||
| 190 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 191 | 3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, | ||
| 192 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 193 | 3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, | ||
| 194 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 195 | 2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, | ||
| 196 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 197 | 3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, | ||
| 198 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 199 | 3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, | ||
| 200 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 201 | 2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, | ||
| 202 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 203 | 3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, | ||
| 204 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 205 | 3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, | ||
| 206 | 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 207 | 3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, | ||
| 208 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 209 | 2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 210 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 211 | 2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, | ||
| 212 | 1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, | ||
| 213 | 2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, | ||
| 214 | 1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, | ||
| 215 | 2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, | ||
| 216 | 1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, | ||
| 217 | 3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, | ||
| 218 | 1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, | ||
| 219 | 2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, | ||
| 220 | 1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, | ||
| 221 | 1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, | ||
| 222 | 1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, | ||
| 223 | 2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, | ||
| 224 | 1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, | ||
| 225 | 3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, | ||
| 226 | 1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, | ||
| 227 | 2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, | ||
| 228 | 1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, | ||
| 229 | 2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, | ||
| 230 | 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 231 | 2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, | ||
| 232 | 1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, | ||
| 233 | 1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, | ||
| 234 | 1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, | ||
| 235 | 3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, | ||
| 236 | 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, | ||
| 237 | 3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, | ||
| 238 | 1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, | ||
| 239 | 1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, | ||
| 240 | 0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, | ||
| 241 | 2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, | ||
| 242 | 1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, | ||
| 243 | 1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, | ||
| 244 | 0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 245 | 1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, | ||
| 246 | 1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, | ||
| 247 | 2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, | ||
| 248 | 2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, | ||
| 249 | 1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, | ||
| 250 | 1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, | ||
| 251 | 2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 252 | 0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, | ||
| 253 | 1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, | ||
| 254 | 0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, | ||
| 255 | 2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, | ||
| 256 | 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, | ||
| 257 | 1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, | ||
| 258 | 0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 259 | 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, | ||
| 260 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 261 | 1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, | ||
| 262 | 0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 263 | 1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, | ||
| 264 | 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 265 | 1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, | ||
| 266 | 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 267 | 1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, | ||
| 268 | 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, | ||
| 269 | 1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, | ||
| 270 | 0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 271 | 2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 272 | 1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, | ||
| 273 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 274 | 1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, | ||
| 275 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 276 | 1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, | ||
| 277 | 0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 278 | 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, | ||
| 279 | ) | ||
| 280 | |||
| 281 | Koi8rModel = { | ||
| 282 | 'char_to_order_map': KOI8R_char_to_order_map, | ||
| 283 | 'precedence_matrix': RussianLangModel, | ||
| 284 | 'typical_positive_ratio': 0.976601, | ||
| 285 | 'keep_english_letter': False, | ||
| 286 | 'charset_name': "KOI8-R", | ||
| 287 | 'language': 'Russian', | ||
| 288 | } | ||
| 289 | |||
| 290 | Win1251CyrillicModel = { | ||
| 291 | 'char_to_order_map': win1251_char_to_order_map, | ||
| 292 | 'precedence_matrix': RussianLangModel, | ||
| 293 | 'typical_positive_ratio': 0.976601, | ||
| 294 | 'keep_english_letter': False, | ||
| 295 | 'charset_name': "windows-1251", | ||
| 296 | 'language': 'Russian', | ||
| 297 | } | ||
| 298 | |||
| 299 | Latin5CyrillicModel = { | ||
| 300 | 'char_to_order_map': latin5_char_to_order_map, | ||
| 301 | 'precedence_matrix': RussianLangModel, | ||
| 302 | 'typical_positive_ratio': 0.976601, | ||
| 303 | 'keep_english_letter': False, | ||
| 304 | 'charset_name': "ISO-8859-5", | ||
| 305 | 'language': 'Russian', | ||
| 306 | } | ||
| 307 | |||
| 308 | MacCyrillicModel = { | ||
| 309 | 'char_to_order_map': macCyrillic_char_to_order_map, | ||
| 310 | 'precedence_matrix': RussianLangModel, | ||
| 311 | 'typical_positive_ratio': 0.976601, | ||
| 312 | 'keep_english_letter': False, | ||
| 313 | 'charset_name': "MacCyrillic", | ||
| 314 | 'language': 'Russian', | ||
| 315 | } | ||
| 316 | |||
| 317 | Ibm866Model = { | ||
| 318 | 'char_to_order_map': IBM866_char_to_order_map, | ||
| 319 | 'precedence_matrix': RussianLangModel, | ||
| 320 | 'typical_positive_ratio': 0.976601, | ||
| 321 | 'keep_english_letter': False, | ||
| 322 | 'charset_name': "IBM866", | ||
| 323 | 'language': 'Russian', | ||
| 324 | } | ||
| 325 | |||
| 326 | Ibm855Model = { | ||
| 327 | 'char_to_order_map': IBM855_char_to_order_map, | ||
| 328 | 'precedence_matrix': RussianLangModel, | ||
| 329 | 'typical_positive_ratio': 0.976601, | ||
| 330 | 'keep_english_letter': False, | ||
| 331 | 'charset_name': "IBM855", | ||
| 332 | 'language': 'Russian', | ||
| 333 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langgreekmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langgreekmodel.py new file mode 100644 index 0000000..73541cc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langgreekmodel.py | |||
| @@ -0,0 +1,225 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # 255: Control characters that usually does not exist in any text | ||
| 29 | # 254: Carriage/Return | ||
| 30 | # 253: symbol (punctuation) that does not belong to word | ||
| 31 | # 252: 0 - 9 | ||
| 32 | |||
| 33 | # Character Mapping Table: | ||
| 34 | Latin7_char_to_order_map = ( | ||
| 35 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 36 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 37 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 38 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 39 | 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 | ||
| 40 | 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 | ||
| 41 | 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 | ||
| 42 | 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 | ||
| 43 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 | ||
| 44 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 | ||
| 45 | 253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 | ||
| 46 | 253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 | ||
| 47 | 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 | ||
| 48 | 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 | ||
| 49 | 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 | ||
| 50 | 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 | ||
| 51 | ) | ||
| 52 | |||
| 53 | win1253_char_to_order_map = ( | ||
| 54 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 55 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 56 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 57 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 58 | 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 | ||
| 59 | 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 | ||
| 60 | 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 | ||
| 61 | 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 | ||
| 62 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 | ||
| 63 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 | ||
| 64 | 253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 | ||
| 65 | 253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 | ||
| 66 | 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 | ||
| 67 | 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 | ||
| 68 | 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 | ||
| 69 | 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 | ||
| 70 | ) | ||
| 71 | |||
| 72 | # Model Table: | ||
| 73 | # total sequences: 100% | ||
| 74 | # first 512 sequences: 98.2851% | ||
| 75 | # first 1024 sequences:1.7001% | ||
| 76 | # rest sequences: 0.0359% | ||
| 77 | # negative sequences: 0.0148% | ||
| 78 | GreekLangModel = ( | ||
| 79 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 80 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 81 | 0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, | ||
| 82 | 3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 83 | 0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, | ||
| 84 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, | ||
| 85 | 0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, | ||
| 86 | 2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 87 | 0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, | ||
| 88 | 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 89 | 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, | ||
| 90 | 2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 91 | 0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, | ||
| 92 | 2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 93 | 0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, | ||
| 94 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 95 | 0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, | ||
| 96 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 97 | 0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, | ||
| 98 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 99 | 0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, | ||
| 100 | 0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 101 | 0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, | ||
| 102 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 103 | 0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, | ||
| 104 | 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 105 | 0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, | ||
| 106 | 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 107 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 108 | 0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 109 | 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, | ||
| 110 | 3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 111 | 0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, | ||
| 112 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 113 | 0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, | ||
| 114 | 3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 115 | 0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, | ||
| 116 | 2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 117 | 0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, | ||
| 118 | 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 119 | 0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, | ||
| 120 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 121 | 0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, | ||
| 122 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 123 | 0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, | ||
| 124 | 0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 125 | 0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, | ||
| 126 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 127 | 0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, | ||
| 128 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 129 | 0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, | ||
| 130 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 131 | 0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, | ||
| 132 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 133 | 0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, | ||
| 134 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 135 | 0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, | ||
| 136 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 137 | 0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, | ||
| 138 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 139 | 0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, | ||
| 140 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 141 | 0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, | ||
| 142 | 0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, | ||
| 143 | 0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, | ||
| 144 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 145 | 0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, | ||
| 146 | 0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, | ||
| 147 | 0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, | ||
| 148 | 0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, | ||
| 149 | 0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, | ||
| 150 | 0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, | ||
| 151 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 152 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 153 | 0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, | ||
| 154 | 0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, | ||
| 155 | 0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, | ||
| 156 | 0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 157 | 0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, | ||
| 158 | 0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, | ||
| 159 | 0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 160 | 0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, | ||
| 161 | 0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, | ||
| 162 | 0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, | ||
| 163 | 0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, | ||
| 164 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 165 | 0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 166 | 0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, | ||
| 167 | 0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, | ||
| 168 | 0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 169 | 0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 170 | 0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, | ||
| 171 | 0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, | ||
| 172 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 173 | 0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, | ||
| 174 | 0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, | ||
| 175 | 0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, | ||
| 176 | 0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 177 | 0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, | ||
| 178 | 0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, | ||
| 179 | 0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, | ||
| 180 | 0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 181 | 0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, | ||
| 182 | 0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, | ||
| 183 | 0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, | ||
| 184 | 0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 185 | 0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, | ||
| 186 | 0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, | ||
| 187 | 0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, | ||
| 188 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 189 | 0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 190 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, | ||
| 191 | 0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, | ||
| 192 | 0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 193 | 0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 194 | 0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, | ||
| 195 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 196 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, | ||
| 197 | 0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 198 | 0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 199 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 200 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, | ||
| 201 | 0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, | ||
| 202 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 203 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 204 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 205 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 206 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 207 | ) | ||
| 208 | |||
| 209 | Latin7GreekModel = { | ||
| 210 | 'char_to_order_map': Latin7_char_to_order_map, | ||
| 211 | 'precedence_matrix': GreekLangModel, | ||
| 212 | 'typical_positive_ratio': 0.982851, | ||
| 213 | 'keep_english_letter': False, | ||
| 214 | 'charset_name': "ISO-8859-7", | ||
| 215 | 'language': 'Greek', | ||
| 216 | } | ||
| 217 | |||
| 218 | Win1253GreekModel = { | ||
| 219 | 'char_to_order_map': win1253_char_to_order_map, | ||
| 220 | 'precedence_matrix': GreekLangModel, | ||
| 221 | 'typical_positive_ratio': 0.982851, | ||
| 222 | 'keep_english_letter': False, | ||
| 223 | 'charset_name': "windows-1253", | ||
| 224 | 'language': 'Greek', | ||
| 225 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhebrewmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhebrewmodel.py new file mode 100644 index 0000000..07029b6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhebrewmodel.py | |||
| @@ -0,0 +1,200 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Simon Montagu | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2005 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # Shoshannah Forbes - original C code (?) | ||
| 13 | # | ||
| 14 | # This library is free software; you can redistribute it and/or | ||
| 15 | # modify it under the terms of the GNU Lesser General Public | ||
| 16 | # License as published by the Free Software Foundation; either | ||
| 17 | # version 2.1 of the License, or (at your option) any later version. | ||
| 18 | # | ||
| 19 | # This library is distributed in the hope that it will be useful, | ||
| 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 22 | # Lesser General Public License for more details. | ||
| 23 | # | ||
| 24 | # You should have received a copy of the GNU Lesser General Public | ||
| 25 | # License along with this library; if not, write to the Free Software | ||
| 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 27 | # 02110-1301 USA | ||
| 28 | ######################### END LICENSE BLOCK ######################### | ||
| 29 | |||
| 30 | # 255: Control characters that usually does not exist in any text | ||
| 31 | # 254: Carriage/Return | ||
| 32 | # 253: symbol (punctuation) that does not belong to word | ||
| 33 | # 252: 0 - 9 | ||
| 34 | |||
| 35 | # Windows-1255 language model | ||
| 36 | # Character Mapping Table: | ||
| 37 | WIN1255_CHAR_TO_ORDER_MAP = ( | ||
| 38 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 39 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 40 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 41 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 42 | 253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 | ||
| 43 | 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 | ||
| 44 | 253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 | ||
| 45 | 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 | ||
| 46 | 124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, | ||
| 47 | 215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, | ||
| 48 | 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, | ||
| 49 | 106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, | ||
| 50 | 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, | ||
| 51 | 238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, | ||
| 52 | 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, | ||
| 53 | 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, | ||
| 54 | ) | ||
| 55 | |||
| 56 | # Model Table: | ||
| 57 | # total sequences: 100% | ||
| 58 | # first 512 sequences: 98.4004% | ||
| 59 | # first 1024 sequences: 1.5981% | ||
| 60 | # rest sequences: 0.087% | ||
| 61 | # negative sequences: 0.0015% | ||
| 62 | HEBREW_LANG_MODEL = ( | ||
| 63 | 0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, | ||
| 64 | 3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, | ||
| 65 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, | ||
| 66 | 1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 67 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, | ||
| 68 | 1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 69 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, | ||
| 70 | 1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, | ||
| 71 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, | ||
| 72 | 1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 73 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, | ||
| 74 | 1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, | ||
| 75 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, | ||
| 76 | 0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 77 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, | ||
| 78 | 0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, | ||
| 79 | 3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, | ||
| 80 | 1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, | ||
| 81 | 3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, | ||
| 82 | 0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, | ||
| 83 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, | ||
| 84 | 0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 85 | 3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, | ||
| 86 | 0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 87 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, | ||
| 88 | 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 89 | 3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, | ||
| 90 | 0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, | ||
| 91 | 3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, | ||
| 92 | 0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, | ||
| 93 | 3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, | ||
| 94 | 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 95 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, | ||
| 96 | 0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 97 | 3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, | ||
| 98 | 0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, | ||
| 99 | 3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, | ||
| 100 | 0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 101 | 3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, | ||
| 102 | 0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, | ||
| 103 | 3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, | ||
| 104 | 0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, | ||
| 105 | 3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, | ||
| 106 | 0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 107 | 3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, | ||
| 108 | 0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 109 | 3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, | ||
| 110 | 1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 111 | 3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, | ||
| 112 | 0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, | ||
| 113 | 3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, | ||
| 114 | 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 115 | 3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, | ||
| 116 | 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 117 | 3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 118 | 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 119 | 3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, | ||
| 120 | 0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 121 | 2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, | ||
| 122 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 123 | 2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, | ||
| 124 | 0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 125 | 2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, | ||
| 126 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 127 | 3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, | ||
| 128 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 129 | 0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, | ||
| 130 | 0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 131 | 3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, | ||
| 132 | 0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, | ||
| 133 | 2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, | ||
| 134 | 0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 135 | 1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, | ||
| 136 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 137 | 0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, | ||
| 138 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 139 | 2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, | ||
| 140 | 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 141 | 3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, | ||
| 142 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 143 | 2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 144 | 0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, | ||
| 145 | 0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, | ||
| 146 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 147 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 148 | 1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, | ||
| 149 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 150 | 0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, | ||
| 151 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 152 | 0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, | ||
| 153 | 1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, | ||
| 154 | 0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 155 | 2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, | ||
| 156 | 0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 157 | 1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, | ||
| 158 | 0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, | ||
| 159 | 2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 160 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 161 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 162 | 0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, | ||
| 163 | 1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 164 | 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, | ||
| 165 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 166 | 0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, | ||
| 167 | 2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 168 | 0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, | ||
| 169 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 170 | 0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, | ||
| 171 | 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 172 | 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, | ||
| 173 | 2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 174 | 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 175 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 176 | 0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, | ||
| 177 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 178 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 179 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 180 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, | ||
| 181 | 0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, | ||
| 182 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 183 | 1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 184 | 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, | ||
| 185 | 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 186 | 1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, | ||
| 187 | 0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, | ||
| 188 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 189 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 190 | 0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, | ||
| 191 | ) | ||
| 192 | |||
| 193 | Win1255HebrewModel = { | ||
| 194 | 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, | ||
| 195 | 'precedence_matrix': HEBREW_LANG_MODEL, | ||
| 196 | 'typical_positive_ratio': 0.984004, | ||
| 197 | 'keep_english_letter': False, | ||
| 198 | 'charset_name': "windows-1255", | ||
| 199 | 'language': 'Hebrew', | ||
| 200 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhungarianmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhungarianmodel.py new file mode 100644 index 0000000..6de87b7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langhungarianmodel.py | |||
| @@ -0,0 +1,225 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # 255: Control characters that usually does not exist in any text | ||
| 29 | # 254: Carriage/Return | ||
| 30 | # 253: symbol (punctuation) that does not belong to word | ||
| 31 | # 252: 0 - 9 | ||
| 32 | |||
| 33 | # Character Mapping Table: | ||
| 34 | Latin2_HungarianCharToOrderMap = ( | ||
| 35 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 36 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 37 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 38 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 39 | 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, | ||
| 40 | 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, | ||
| 41 | 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, | ||
| 42 | 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, | ||
| 43 | 159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, | ||
| 44 | 175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, | ||
| 45 | 191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, | ||
| 46 | 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, | ||
| 47 | 221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, | ||
| 48 | 232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, | ||
| 49 | 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, | ||
| 50 | 245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, | ||
| 51 | ) | ||
| 52 | |||
| 53 | win1250HungarianCharToOrderMap = ( | ||
| 54 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 55 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 56 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 57 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 58 | 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, | ||
| 59 | 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, | ||
| 60 | 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, | ||
| 61 | 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, | ||
| 62 | 161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, | ||
| 63 | 177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, | ||
| 64 | 191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, | ||
| 65 | 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, | ||
| 66 | 221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, | ||
| 67 | 232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, | ||
| 68 | 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, | ||
| 69 | 245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, | ||
| 70 | ) | ||
| 71 | |||
| 72 | # Model Table: | ||
| 73 | # total sequences: 100% | ||
| 74 | # first 512 sequences: 94.7368% | ||
| 75 | # first 1024 sequences:5.2623% | ||
| 76 | # rest sequences: 0.8894% | ||
| 77 | # negative sequences: 0.0009% | ||
| 78 | HungarianLangModel = ( | ||
| 79 | 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, | ||
| 80 | 3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, | ||
| 81 | 3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, | ||
| 82 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, | ||
| 83 | 3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, | ||
| 84 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 85 | 3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, | ||
| 86 | 0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, | ||
| 87 | 3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, | ||
| 88 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, | ||
| 89 | 3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, | ||
| 90 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 91 | 3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, | ||
| 92 | 0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 93 | 3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, | ||
| 94 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 95 | 3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, | ||
| 96 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 97 | 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, | ||
| 98 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, | ||
| 99 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, | ||
| 100 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, | ||
| 101 | 3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, | ||
| 102 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 103 | 3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, | ||
| 104 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 105 | 3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, | ||
| 106 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, | ||
| 107 | 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, | ||
| 108 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 109 | 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, | ||
| 110 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 111 | 3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, | ||
| 112 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, | ||
| 113 | 3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, | ||
| 114 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 115 | 3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, | ||
| 116 | 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 117 | 3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, | ||
| 118 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 119 | 3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, | ||
| 120 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 121 | 3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, | ||
| 122 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 123 | 3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, | ||
| 124 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 125 | 3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, | ||
| 126 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 127 | 1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, | ||
| 128 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 129 | 3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, | ||
| 130 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 131 | 3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, | ||
| 132 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 133 | 2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, | ||
| 134 | 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, | ||
| 135 | 3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, | ||
| 136 | 1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, | ||
| 137 | 1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, | ||
| 138 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 139 | 1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, | ||
| 140 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 141 | 3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, | ||
| 142 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 143 | 3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, | ||
| 144 | 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, | ||
| 145 | 3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, | ||
| 146 | 2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, | ||
| 147 | 2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, | ||
| 148 | 2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, | ||
| 149 | 2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, | ||
| 150 | 2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, | ||
| 151 | 3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, | ||
| 152 | 2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, | ||
| 153 | 2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, | ||
| 154 | 2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, | ||
| 155 | 1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, | ||
| 156 | 1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, | ||
| 157 | 3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, | ||
| 158 | 1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, | ||
| 159 | 1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, | ||
| 160 | 2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, | ||
| 161 | 2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, | ||
| 162 | 2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, | ||
| 163 | 3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, | ||
| 164 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 165 | 2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, | ||
| 166 | 2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, | ||
| 167 | 1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, | ||
| 168 | 1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 169 | 2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, | ||
| 170 | 2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, | ||
| 171 | 1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, | ||
| 172 | 1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, | ||
| 173 | 2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, | ||
| 174 | 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, | ||
| 175 | 1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, | ||
| 176 | 2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, | ||
| 177 | 2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, | ||
| 178 | 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, | ||
| 179 | 1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, | ||
| 180 | 1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, | ||
| 181 | 1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, | ||
| 182 | 0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, | ||
| 183 | 2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, | ||
| 184 | 2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, | ||
| 185 | 1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, | ||
| 186 | 2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, | ||
| 187 | 1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, | ||
| 188 | 1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, | ||
| 189 | 2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, | ||
| 190 | 2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, | ||
| 191 | 2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, | ||
| 192 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 193 | 1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, | ||
| 194 | 1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, | ||
| 195 | 2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, | ||
| 196 | 0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 197 | 1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, | ||
| 198 | 0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 199 | 1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 200 | 0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 201 | 0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 202 | 0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, | ||
| 203 | 2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, | ||
| 204 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 205 | 1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 206 | 0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, | ||
| 207 | ) | ||
| 208 | |||
| 209 | Latin2HungarianModel = { | ||
| 210 | 'char_to_order_map': Latin2_HungarianCharToOrderMap, | ||
| 211 | 'precedence_matrix': HungarianLangModel, | ||
| 212 | 'typical_positive_ratio': 0.947368, | ||
| 213 | 'keep_english_letter': True, | ||
| 214 | 'charset_name': "ISO-8859-2", | ||
| 215 | 'language': 'Hungarian', | ||
| 216 | } | ||
| 217 | |||
| 218 | Win1250HungarianModel = { | ||
| 219 | 'char_to_order_map': win1250HungarianCharToOrderMap, | ||
| 220 | 'precedence_matrix': HungarianLangModel, | ||
| 221 | 'typical_positive_ratio': 0.947368, | ||
| 222 | 'keep_english_letter': True, | ||
| 223 | 'charset_name': "windows-1250", | ||
| 224 | 'language': 'Hungarian', | ||
| 225 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langthaimodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langthaimodel.py new file mode 100644 index 0000000..fdb3313 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langthaimodel.py | |||
| @@ -0,0 +1,199 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Communicator client code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | # 255: Control characters that usually does not exist in any text | ||
| 29 | # 254: Carriage/Return | ||
| 30 | # 253: symbol (punctuation) that does not belong to word | ||
| 31 | # 252: 0 - 9 | ||
| 32 | |||
| 33 | # The following result for thai was collected from a limited sample (1M). | ||
| 34 | |||
| 35 | # Character Mapping Table: | ||
| 36 | TIS620CharToOrderMap = ( | ||
| 37 | 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 | ||
| 38 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 | ||
| 39 | 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 | ||
| 40 | 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 | ||
| 41 | 253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 | ||
| 42 | 188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 | ||
| 43 | 253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 | ||
| 44 | 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 | ||
| 45 | 209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, | ||
| 46 | 223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, | ||
| 47 | 236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, | ||
| 48 | 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, | ||
| 49 | 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, | ||
| 50 | 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, | ||
| 51 | 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, | ||
| 52 | 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, | ||
| 53 | ) | ||
| 54 | |||
| 55 | # Model Table: | ||
| 56 | # total sequences: 100% | ||
| 57 | # first 512 sequences: 92.6386% | ||
| 58 | # first 1024 sequences:7.3177% | ||
| 59 | # rest sequences: 1.0230% | ||
| 60 | # negative sequences: 0.0436% | ||
| 61 | ThaiLangModel = ( | ||
| 62 | 0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, | ||
| 63 | 0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, | ||
| 64 | 3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, | ||
| 65 | 0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, | ||
| 66 | 3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, | ||
| 67 | 3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, | ||
| 68 | 3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, | ||
| 69 | 3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, | ||
| 70 | 3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, | ||
| 71 | 3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 72 | 3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, | ||
| 73 | 2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, | ||
| 74 | 3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, | ||
| 75 | 0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, | ||
| 76 | 3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, | ||
| 77 | 0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 78 | 3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, | ||
| 79 | 1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, | ||
| 80 | 3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, | ||
| 81 | 3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, | ||
| 82 | 1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, | ||
| 83 | 0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, | ||
| 84 | 2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, | ||
| 85 | 0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, | ||
| 86 | 3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, | ||
| 87 | 2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 88 | 3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, | ||
| 89 | 0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 90 | 3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, | ||
| 91 | 3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, | ||
| 92 | 3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, | ||
| 93 | 2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, | ||
| 94 | 3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, | ||
| 95 | 2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 96 | 3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, | ||
| 97 | 3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, | ||
| 98 | 3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, | ||
| 99 | 3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 100 | 3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, | ||
| 101 | 3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 102 | 3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, | ||
| 103 | 3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 104 | 3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, | ||
| 105 | 1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 106 | 3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, | ||
| 107 | 0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 108 | 3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, | ||
| 109 | 0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, | ||
| 110 | 3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, | ||
| 111 | 3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 112 | 3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, | ||
| 113 | 1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, | ||
| 114 | 3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, | ||
| 115 | 3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 116 | 0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 117 | 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 118 | 0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, | ||
| 119 | 0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 120 | 0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, | ||
| 121 | 0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 122 | 3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, | ||
| 123 | 1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 124 | 3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, | ||
| 125 | 1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, | ||
| 126 | 3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, | ||
| 127 | 0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, | ||
| 128 | 0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, | ||
| 129 | 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 130 | 3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, | ||
| 131 | 3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 132 | 3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, | ||
| 133 | 0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 134 | 3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, | ||
| 135 | 0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 136 | 3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, | ||
| 137 | 0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 138 | 3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, | ||
| 139 | 0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, | ||
| 140 | 0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, | ||
| 141 | 0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 142 | 0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 143 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 144 | 1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, | ||
| 145 | 0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, | ||
| 146 | 3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, | ||
| 147 | 0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 148 | 3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, | ||
| 149 | 0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, | ||
| 150 | 3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, | ||
| 151 | 2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 152 | 1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, | ||
| 153 | 0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 154 | 3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 155 | 0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, | ||
| 156 | 3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, | ||
| 157 | 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 158 | 2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, | ||
| 159 | 2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 160 | 2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 161 | 0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 162 | 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 163 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 164 | 1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, | ||
| 165 | 1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 166 | 0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, | ||
| 167 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 168 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, | ||
| 169 | 0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 170 | 2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, | ||
| 171 | 1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, | ||
| 172 | 1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, | ||
| 173 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 174 | 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 175 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, | ||
| 176 | 2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, | ||
| 177 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 178 | 2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, | ||
| 179 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 180 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 181 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, | ||
| 182 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 183 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, | ||
| 184 | 1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 185 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 186 | 0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 187 | 0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 188 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 189 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 190 | ) | ||
| 191 | |||
| 192 | TIS620ThaiModel = { | ||
| 193 | 'char_to_order_map': TIS620CharToOrderMap, | ||
| 194 | 'precedence_matrix': ThaiLangModel, | ||
| 195 | 'typical_positive_ratio': 0.926386, | ||
| 196 | 'keep_english_letter': False, | ||
| 197 | 'charset_name': "TIS-620", | ||
| 198 | 'language': 'Thai', | ||
| 199 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langturkishmodel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langturkishmodel.py new file mode 100644 index 0000000..64ec9bd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/langturkishmodel.py | |||
| @@ -0,0 +1,193 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 3 | # The Original Code is Mozilla Communicator client code. | ||
| 4 | # | ||
| 5 | # The Initial Developer of the Original Code is | ||
| 6 | # Netscape Communications Corporation. | ||
| 7 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 8 | # the Initial Developer. All Rights Reserved. | ||
| 9 | # | ||
| 10 | # Contributor(s): | ||
| 11 | # Mark Pilgrim - port to Python | ||
| 12 | # Özgür Baskın - Turkish Language Model | ||
| 13 | # | ||
| 14 | # This library is free software; you can redistribute it and/or | ||
| 15 | # modify it under the terms of the GNU Lesser General Public | ||
| 16 | # License as published by the Free Software Foundation; either | ||
| 17 | # version 2.1 of the License, or (at your option) any later version. | ||
| 18 | # | ||
| 19 | # This library is distributed in the hope that it will be useful, | ||
| 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 22 | # Lesser General Public License for more details. | ||
| 23 | # | ||
| 24 | # You should have received a copy of the GNU Lesser General Public | ||
| 25 | # License along with this library; if not, write to the Free Software | ||
| 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 27 | # 02110-1301 USA | ||
| 28 | ######################### END LICENSE BLOCK ######################### | ||
| 29 | |||
| 30 | # 255: Control characters that usually does not exist in any text | ||
| 31 | # 254: Carriage/Return | ||
| 32 | # 253: symbol (punctuation) that does not belong to word | ||
| 33 | # 252: 0 - 9 | ||
| 34 | |||
| 35 | # Character Mapping Table: | ||
| 36 | Latin5_TurkishCharToOrderMap = ( | ||
| 37 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, | ||
| 38 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, | ||
| 39 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, | ||
| 40 | 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, | ||
| 41 | 255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, | ||
| 42 | 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, | ||
| 43 | 255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, | ||
| 44 | 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, | ||
| 45 | 180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, | ||
| 46 | 164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, | ||
| 47 | 150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, | ||
| 48 | 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, | ||
| 49 | 124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, | ||
| 50 | 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, | ||
| 51 | 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, | ||
| 52 | 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, | ||
| 53 | ) | ||
| 54 | |||
| 55 | TurkishLangModel = ( | ||
| 56 | 3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, | ||
| 57 | 3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, | ||
| 58 | 3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, | ||
| 59 | 3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, | ||
| 60 | 3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, | ||
| 61 | 3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, | ||
| 62 | 3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, | ||
| 63 | 2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, | ||
| 64 | 3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, | ||
| 65 | 2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, | ||
| 66 | 1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, | ||
| 67 | 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 68 | 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, | ||
| 69 | 3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, | ||
| 70 | 3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, | ||
| 71 | 2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, | ||
| 72 | 3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, | ||
| 73 | 2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, | ||
| 74 | 3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, | ||
| 75 | 3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, | ||
| 76 | 3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, | ||
| 77 | 0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, | ||
| 78 | 3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, | ||
| 79 | 0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, | ||
| 80 | 3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, | ||
| 81 | 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, | ||
| 82 | 3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, | ||
| 83 | 2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 84 | 2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, | ||
| 85 | 2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, | ||
| 86 | 3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, | ||
| 87 | 0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, | ||
| 88 | 1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, | ||
| 89 | 3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 90 | 3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, | ||
| 91 | 1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, | ||
| 92 | 3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, | ||
| 93 | 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 94 | 3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, | ||
| 95 | 0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, | ||
| 96 | 3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, | ||
| 97 | 0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, | ||
| 98 | 3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, | ||
| 99 | 1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, | ||
| 100 | 1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, | ||
| 101 | 2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, | ||
| 102 | 2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, | ||
| 103 | 0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 104 | 3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, | ||
| 105 | 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, | ||
| 106 | 3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, | ||
| 107 | 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 108 | 3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, | ||
| 109 | 0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, | ||
| 110 | 3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, | ||
| 111 | 1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, | ||
| 112 | 1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, | ||
| 113 | 0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, | ||
| 114 | 3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, | ||
| 115 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 116 | 3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, | ||
| 117 | 0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, | ||
| 118 | 3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, | ||
| 119 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, | ||
| 120 | 3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, | ||
| 121 | 1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 122 | 0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, | ||
| 123 | 2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 124 | 3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, | ||
| 125 | 0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, | ||
| 126 | 3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, | ||
| 127 | 0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, | ||
| 128 | 3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, | ||
| 129 | 0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, | ||
| 130 | 3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, | ||
| 131 | 0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, | ||
| 132 | 3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, | ||
| 133 | 0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, | ||
| 134 | 3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, | ||
| 135 | 0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, | ||
| 136 | 0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, | ||
| 137 | 3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, | ||
| 138 | 0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, | ||
| 139 | 0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, | ||
| 140 | 3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, | ||
| 141 | 0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, | ||
| 142 | 3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, | ||
| 143 | 0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, | ||
| 144 | 3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, | ||
| 145 | 0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 146 | 3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, | ||
| 147 | 0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, | ||
| 148 | 3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, | ||
| 149 | 0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, | ||
| 150 | 3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, | ||
| 151 | 0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, | ||
| 152 | 3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, | ||
| 153 | 0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, | ||
| 154 | 0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, | ||
| 155 | 0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, | ||
| 156 | 2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, | ||
| 157 | 1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 158 | 3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, | ||
| 159 | 0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, | ||
| 160 | 0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, | ||
| 161 | 0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, | ||
| 162 | 3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, | ||
| 163 | 0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, | ||
| 164 | 0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, | ||
| 165 | 2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, | ||
| 166 | 2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, | ||
| 167 | 0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, | ||
| 168 | 2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, | ||
| 169 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, | ||
| 170 | 1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, | ||
| 171 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, | ||
| 172 | 0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, | ||
| 173 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, | ||
| 174 | 2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, | ||
| 175 | 0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 176 | 0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, | ||
| 177 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 178 | 2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, | ||
| 179 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, | ||
| 180 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 181 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 182 | 0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 183 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | ||
| 184 | ) | ||
| 185 | |||
| 186 | Latin5TurkishModel = { | ||
| 187 | 'char_to_order_map': Latin5_TurkishCharToOrderMap, | ||
| 188 | 'precedence_matrix': TurkishLangModel, | ||
| 189 | 'typical_positive_ratio': 0.970290, | ||
| 190 | 'keep_english_letter': True, | ||
| 191 | 'charset_name': "ISO-8859-9", | ||
| 192 | 'language': 'Turkish', | ||
| 193 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/latin1prober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/latin1prober.py new file mode 100644 index 0000000..7c37520 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/latin1prober.py | |||
| @@ -0,0 +1,145 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # | ||
| 13 | # This library is free software; you can redistribute it and/or | ||
| 14 | # modify it under the terms of the GNU Lesser General Public | ||
| 15 | # License as published by the Free Software Foundation; either | ||
| 16 | # version 2.1 of the License, or (at your option) any later version. | ||
| 17 | # | ||
| 18 | # This library is distributed in the hope that it will be useful, | ||
| 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 21 | # Lesser General Public License for more details. | ||
| 22 | # | ||
| 23 | # You should have received a copy of the GNU Lesser General Public | ||
| 24 | # License along with this library; if not, write to the Free Software | ||
| 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 26 | # 02110-1301 USA | ||
| 27 | ######################### END LICENSE BLOCK ######################### | ||
| 28 | |||
| 29 | from .charsetprober import CharSetProber | ||
| 30 | from .enums import ProbingState | ||
| 31 | |||
| 32 | FREQ_CAT_NUM = 4 | ||
| 33 | |||
| 34 | UDF = 0 # undefined | ||
| 35 | OTH = 1 # other | ||
| 36 | ASC = 2 # ascii capital letter | ||
| 37 | ASS = 3 # ascii small letter | ||
| 38 | ACV = 4 # accent capital vowel | ||
| 39 | ACO = 5 # accent capital other | ||
| 40 | ASV = 6 # accent small vowel | ||
| 41 | ASO = 7 # accent small other | ||
| 42 | CLASS_NUM = 8 # total classes | ||
| 43 | |||
| 44 | Latin1_CharToClass = ( | ||
| 45 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 | ||
| 46 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F | ||
| 47 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 | ||
| 48 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F | ||
| 49 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 | ||
| 50 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F | ||
| 51 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 | ||
| 52 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F | ||
| 53 | OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 | ||
| 54 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F | ||
| 55 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 | ||
| 56 | ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F | ||
| 57 | OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 | ||
| 58 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F | ||
| 59 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 | ||
| 60 | ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F | ||
| 61 | OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 | ||
| 62 | OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F | ||
| 63 | UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 | ||
| 64 | OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F | ||
| 65 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 | ||
| 66 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF | ||
| 67 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 | ||
| 68 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF | ||
| 69 | ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 | ||
| 70 | ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF | ||
| 71 | ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 | ||
| 72 | ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF | ||
| 73 | ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 | ||
| 74 | ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF | ||
| 75 | ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 | ||
| 76 | ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF | ||
| 77 | ) | ||
| 78 | |||
| 79 | # 0 : illegal | ||
| 80 | # 1 : very unlikely | ||
| 81 | # 2 : normal | ||
| 82 | # 3 : very likely | ||
| 83 | Latin1ClassModel = ( | ||
| 84 | # UDF OTH ASC ASS ACV ACO ASV ASO | ||
| 85 | 0, 0, 0, 0, 0, 0, 0, 0, # UDF | ||
| 86 | 0, 3, 3, 3, 3, 3, 3, 3, # OTH | ||
| 87 | 0, 3, 3, 3, 3, 3, 3, 3, # ASC | ||
| 88 | 0, 3, 3, 3, 1, 1, 3, 3, # ASS | ||
| 89 | 0, 3, 3, 3, 1, 2, 1, 2, # ACV | ||
| 90 | 0, 3, 3, 3, 3, 3, 3, 3, # ACO | ||
| 91 | 0, 3, 1, 3, 1, 1, 1, 3, # ASV | ||
| 92 | 0, 3, 1, 3, 1, 1, 3, 3, # ASO | ||
| 93 | ) | ||
| 94 | |||
| 95 | |||
| 96 | class Latin1Prober(CharSetProber): | ||
| 97 | def __init__(self): | ||
| 98 | super(Latin1Prober, self).__init__() | ||
| 99 | self._last_char_class = None | ||
| 100 | self._freq_counter = None | ||
| 101 | self.reset() | ||
| 102 | |||
| 103 | def reset(self): | ||
| 104 | self._last_char_class = OTH | ||
| 105 | self._freq_counter = [0] * FREQ_CAT_NUM | ||
| 106 | CharSetProber.reset(self) | ||
| 107 | |||
| 108 | @property | ||
| 109 | def charset_name(self): | ||
| 110 | return "ISO-8859-1" | ||
| 111 | |||
| 112 | @property | ||
| 113 | def language(self): | ||
| 114 | return "" | ||
| 115 | |||
| 116 | def feed(self, byte_str): | ||
| 117 | byte_str = self.filter_with_english_letters(byte_str) | ||
| 118 | for c in byte_str: | ||
| 119 | char_class = Latin1_CharToClass[c] | ||
| 120 | freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) | ||
| 121 | + char_class] | ||
| 122 | if freq == 0: | ||
| 123 | self._state = ProbingState.NOT_ME | ||
| 124 | break | ||
| 125 | self._freq_counter[freq] += 1 | ||
| 126 | self._last_char_class = char_class | ||
| 127 | |||
| 128 | return self.state | ||
| 129 | |||
| 130 | def get_confidence(self): | ||
| 131 | if self.state == ProbingState.NOT_ME: | ||
| 132 | return 0.01 | ||
| 133 | |||
| 134 | total = sum(self._freq_counter) | ||
| 135 | if total < 0.01: | ||
| 136 | confidence = 0.0 | ||
| 137 | else: | ||
| 138 | confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) | ||
| 139 | / total) | ||
| 140 | if confidence < 0.0: | ||
| 141 | confidence = 0.0 | ||
| 142 | # lower the confidence of latin1 so that other more accurate | ||
| 143 | # detector can take priority. | ||
| 144 | confidence = confidence * 0.73 | ||
| 145 | return confidence | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcharsetprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcharsetprober.py new file mode 100644 index 0000000..4609154 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcharsetprober.py | |||
| @@ -0,0 +1,91 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # Proofpoint, Inc. | ||
| 13 | # | ||
| 14 | # This library is free software; you can redistribute it and/or | ||
| 15 | # modify it under the terms of the GNU Lesser General Public | ||
| 16 | # License as published by the Free Software Foundation; either | ||
| 17 | # version 2.1 of the License, or (at your option) any later version. | ||
| 18 | # | ||
| 19 | # This library is distributed in the hope that it will be useful, | ||
| 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 22 | # Lesser General Public License for more details. | ||
| 23 | # | ||
| 24 | # You should have received a copy of the GNU Lesser General Public | ||
| 25 | # License along with this library; if not, write to the Free Software | ||
| 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 27 | # 02110-1301 USA | ||
| 28 | ######################### END LICENSE BLOCK ######################### | ||
| 29 | |||
| 30 | from .charsetprober import CharSetProber | ||
| 31 | from .enums import ProbingState, MachineState | ||
| 32 | |||
| 33 | |||
| 34 | class MultiByteCharSetProber(CharSetProber): | ||
| 35 | """ | ||
| 36 | MultiByteCharSetProber | ||
| 37 | """ | ||
| 38 | |||
| 39 | def __init__(self, lang_filter=None): | ||
| 40 | super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) | ||
| 41 | self.distribution_analyzer = None | ||
| 42 | self.coding_sm = None | ||
| 43 | self._last_char = [0, 0] | ||
| 44 | |||
| 45 | def reset(self): | ||
| 46 | super(MultiByteCharSetProber, self).reset() | ||
| 47 | if self.coding_sm: | ||
| 48 | self.coding_sm.reset() | ||
| 49 | if self.distribution_analyzer: | ||
| 50 | self.distribution_analyzer.reset() | ||
| 51 | self._last_char = [0, 0] | ||
| 52 | |||
| 53 | @property | ||
| 54 | def charset_name(self): | ||
| 55 | raise NotImplementedError | ||
| 56 | |||
| 57 | @property | ||
| 58 | def language(self): | ||
| 59 | raise NotImplementedError | ||
| 60 | |||
| 61 | def feed(self, byte_str): | ||
| 62 | for i in range(len(byte_str)): | ||
| 63 | coding_state = self.coding_sm.next_state(byte_str[i]) | ||
| 64 | if coding_state == MachineState.ERROR: | ||
| 65 | self.logger.debug('%s %s prober hit error at byte %s', | ||
| 66 | self.charset_name, self.language, i) | ||
| 67 | self._state = ProbingState.NOT_ME | ||
| 68 | break | ||
| 69 | elif coding_state == MachineState.ITS_ME: | ||
| 70 | self._state = ProbingState.FOUND_IT | ||
| 71 | break | ||
| 72 | elif coding_state == MachineState.START: | ||
| 73 | char_len = self.coding_sm.get_current_charlen() | ||
| 74 | if i == 0: | ||
| 75 | self._last_char[1] = byte_str[0] | ||
| 76 | self.distribution_analyzer.feed(self._last_char, char_len) | ||
| 77 | else: | ||
| 78 | self.distribution_analyzer.feed(byte_str[i - 1:i + 1], | ||
| 79 | char_len) | ||
| 80 | |||
| 81 | self._last_char[0] = byte_str[-1] | ||
| 82 | |||
| 83 | if self.state == ProbingState.DETECTING: | ||
| 84 | if (self.distribution_analyzer.got_enough_data() and | ||
| 85 | (self.get_confidence() > self.SHORTCUT_THRESHOLD)): | ||
| 86 | self._state = ProbingState.FOUND_IT | ||
| 87 | |||
| 88 | return self.state | ||
| 89 | |||
| 90 | def get_confidence(self): | ||
| 91 | return self.distribution_analyzer.get_confidence() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..4b04929 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py | |||
| @@ -0,0 +1,54 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # Proofpoint, Inc. | ||
| 13 | # | ||
| 14 | # This library is free software; you can redistribute it and/or | ||
| 15 | # modify it under the terms of the GNU Lesser General Public | ||
| 16 | # License as published by the Free Software Foundation; either | ||
| 17 | # version 2.1 of the License, or (at your option) any later version. | ||
| 18 | # | ||
| 19 | # This library is distributed in the hope that it will be useful, | ||
| 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 22 | # Lesser General Public License for more details. | ||
| 23 | # | ||
| 24 | # You should have received a copy of the GNU Lesser General Public | ||
| 25 | # License along with this library; if not, write to the Free Software | ||
| 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 27 | # 02110-1301 USA | ||
| 28 | ######################### END LICENSE BLOCK ######################### | ||
| 29 | |||
| 30 | from .charsetgroupprober import CharSetGroupProber | ||
| 31 | from .utf8prober import UTF8Prober | ||
| 32 | from .sjisprober import SJISProber | ||
| 33 | from .eucjpprober import EUCJPProber | ||
| 34 | from .gb2312prober import GB2312Prober | ||
| 35 | from .euckrprober import EUCKRProber | ||
| 36 | from .cp949prober import CP949Prober | ||
| 37 | from .big5prober import Big5Prober | ||
| 38 | from .euctwprober import EUCTWProber | ||
| 39 | |||
| 40 | |||
| 41 | class MBCSGroupProber(CharSetGroupProber): | ||
| 42 | def __init__(self, lang_filter=None): | ||
| 43 | super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) | ||
| 44 | self.probers = [ | ||
| 45 | UTF8Prober(), | ||
| 46 | SJISProber(), | ||
| 47 | EUCJPProber(), | ||
| 48 | GB2312Prober(), | ||
| 49 | EUCKRProber(), | ||
| 50 | CP949Prober(), | ||
| 51 | Big5Prober(), | ||
| 52 | EUCTWProber() | ||
| 53 | ] | ||
| 54 | self.reset() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcssm.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcssm.py new file mode 100644 index 0000000..d68f6f6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/mbcssm.py | |||
| @@ -0,0 +1,572 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .enums import MachineState | ||
| 29 | |||
| 30 | # BIG5 | ||
| 31 | |||
| 32 | BIG5_CLS = ( | ||
| 33 | 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value | ||
| 34 | 1,1,1,1,1,1,0,0, # 08 - 0f | ||
| 35 | 1,1,1,1,1,1,1,1, # 10 - 17 | ||
| 36 | 1,1,1,0,1,1,1,1, # 18 - 1f | ||
| 37 | 1,1,1,1,1,1,1,1, # 20 - 27 | ||
| 38 | 1,1,1,1,1,1,1,1, # 28 - 2f | ||
| 39 | 1,1,1,1,1,1,1,1, # 30 - 37 | ||
| 40 | 1,1,1,1,1,1,1,1, # 38 - 3f | ||
| 41 | 2,2,2,2,2,2,2,2, # 40 - 47 | ||
| 42 | 2,2,2,2,2,2,2,2, # 48 - 4f | ||
| 43 | 2,2,2,2,2,2,2,2, # 50 - 57 | ||
| 44 | 2,2,2,2,2,2,2,2, # 58 - 5f | ||
| 45 | 2,2,2,2,2,2,2,2, # 60 - 67 | ||
| 46 | 2,2,2,2,2,2,2,2, # 68 - 6f | ||
| 47 | 2,2,2,2,2,2,2,2, # 70 - 77 | ||
| 48 | 2,2,2,2,2,2,2,1, # 78 - 7f | ||
| 49 | 4,4,4,4,4,4,4,4, # 80 - 87 | ||
| 50 | 4,4,4,4,4,4,4,4, # 88 - 8f | ||
| 51 | 4,4,4,4,4,4,4,4, # 90 - 97 | ||
| 52 | 4,4,4,4,4,4,4,4, # 98 - 9f | ||
| 53 | 4,3,3,3,3,3,3,3, # a0 - a7 | ||
| 54 | 3,3,3,3,3,3,3,3, # a8 - af | ||
| 55 | 3,3,3,3,3,3,3,3, # b0 - b7 | ||
| 56 | 3,3,3,3,3,3,3,3, # b8 - bf | ||
| 57 | 3,3,3,3,3,3,3,3, # c0 - c7 | ||
| 58 | 3,3,3,3,3,3,3,3, # c8 - cf | ||
| 59 | 3,3,3,3,3,3,3,3, # d0 - d7 | ||
| 60 | 3,3,3,3,3,3,3,3, # d8 - df | ||
| 61 | 3,3,3,3,3,3,3,3, # e0 - e7 | ||
| 62 | 3,3,3,3,3,3,3,3, # e8 - ef | ||
| 63 | 3,3,3,3,3,3,3,3, # f0 - f7 | ||
| 64 | 3,3,3,3,3,3,3,0 # f8 - ff | ||
| 65 | ) | ||
| 66 | |||
| 67 | BIG5_ST = ( | ||
| 68 | MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 69 | MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f | ||
| 70 | MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 | ||
| 71 | ) | ||
| 72 | |||
| 73 | BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) | ||
| 74 | |||
| 75 | BIG5_SM_MODEL = {'class_table': BIG5_CLS, | ||
| 76 | 'class_factor': 5, | ||
| 77 | 'state_table': BIG5_ST, | ||
| 78 | 'char_len_table': BIG5_CHAR_LEN_TABLE, | ||
| 79 | 'name': 'Big5'} | ||
| 80 | |||
| 81 | # CP949 | ||
| 82 | |||
| 83 | CP949_CLS = ( | ||
| 84 | 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f | ||
| 85 | 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f | ||
| 86 | 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f | ||
| 87 | 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f | ||
| 88 | 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f | ||
| 89 | 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f | ||
| 90 | 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f | ||
| 91 | 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f | ||
| 92 | 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f | ||
| 93 | 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f | ||
| 94 | 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af | ||
| 95 | 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf | ||
| 96 | 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf | ||
| 97 | 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df | ||
| 98 | 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef | ||
| 99 | 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff | ||
| 100 | ) | ||
| 101 | |||
| 102 | CP949_ST = ( | ||
| 103 | #cls= 0 1 2 3 4 5 6 7 8 9 # previous state = | ||
| 104 | MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START | ||
| 105 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR | ||
| 106 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME | ||
| 107 | MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 | ||
| 108 | MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 | ||
| 109 | MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 | ||
| 110 | MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 | ||
| 111 | ) | ||
| 112 | |||
| 113 | CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) | ||
| 114 | |||
| 115 | CP949_SM_MODEL = {'class_table': CP949_CLS, | ||
| 116 | 'class_factor': 10, | ||
| 117 | 'state_table': CP949_ST, | ||
| 118 | 'char_len_table': CP949_CHAR_LEN_TABLE, | ||
| 119 | 'name': 'CP949'} | ||
| 120 | |||
| 121 | # EUC-JP | ||
| 122 | |||
| 123 | EUCJP_CLS = ( | ||
| 124 | 4,4,4,4,4,4,4,4, # 00 - 07 | ||
| 125 | 4,4,4,4,4,4,5,5, # 08 - 0f | ||
| 126 | 4,4,4,4,4,4,4,4, # 10 - 17 | ||
| 127 | 4,4,4,5,4,4,4,4, # 18 - 1f | ||
| 128 | 4,4,4,4,4,4,4,4, # 20 - 27 | ||
| 129 | 4,4,4,4,4,4,4,4, # 28 - 2f | ||
| 130 | 4,4,4,4,4,4,4,4, # 30 - 37 | ||
| 131 | 4,4,4,4,4,4,4,4, # 38 - 3f | ||
| 132 | 4,4,4,4,4,4,4,4, # 40 - 47 | ||
| 133 | 4,4,4,4,4,4,4,4, # 48 - 4f | ||
| 134 | 4,4,4,4,4,4,4,4, # 50 - 57 | ||
| 135 | 4,4,4,4,4,4,4,4, # 58 - 5f | ||
| 136 | 4,4,4,4,4,4,4,4, # 60 - 67 | ||
| 137 | 4,4,4,4,4,4,4,4, # 68 - 6f | ||
| 138 | 4,4,4,4,4,4,4,4, # 70 - 77 | ||
| 139 | 4,4,4,4,4,4,4,4, # 78 - 7f | ||
| 140 | 5,5,5,5,5,5,5,5, # 80 - 87 | ||
| 141 | 5,5,5,5,5,5,1,3, # 88 - 8f | ||
| 142 | 5,5,5,5,5,5,5,5, # 90 - 97 | ||
| 143 | 5,5,5,5,5,5,5,5, # 98 - 9f | ||
| 144 | 5,2,2,2,2,2,2,2, # a0 - a7 | ||
| 145 | 2,2,2,2,2,2,2,2, # a8 - af | ||
| 146 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 147 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 148 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 149 | 2,2,2,2,2,2,2,2, # c8 - cf | ||
| 150 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 151 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 152 | 0,0,0,0,0,0,0,0, # e0 - e7 | ||
| 153 | 0,0,0,0,0,0,0,0, # e8 - ef | ||
| 154 | 0,0,0,0,0,0,0,0, # f0 - f7 | ||
| 155 | 0,0,0,0,0,0,0,5 # f8 - ff | ||
| 156 | ) | ||
| 157 | |||
| 158 | EUCJP_ST = ( | ||
| 159 | 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 160 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 161 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 | ||
| 162 | MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f | ||
| 163 | 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 | ||
| 164 | ) | ||
| 165 | |||
| 166 | EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) | ||
| 167 | |||
| 168 | EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, | ||
| 169 | 'class_factor': 6, | ||
| 170 | 'state_table': EUCJP_ST, | ||
| 171 | 'char_len_table': EUCJP_CHAR_LEN_TABLE, | ||
| 172 | 'name': 'EUC-JP'} | ||
| 173 | |||
| 174 | # EUC-KR | ||
| 175 | |||
| 176 | EUCKR_CLS = ( | ||
| 177 | 1,1,1,1,1,1,1,1, # 00 - 07 | ||
| 178 | 1,1,1,1,1,1,0,0, # 08 - 0f | ||
| 179 | 1,1,1,1,1,1,1,1, # 10 - 17 | ||
| 180 | 1,1,1,0,1,1,1,1, # 18 - 1f | ||
| 181 | 1,1,1,1,1,1,1,1, # 20 - 27 | ||
| 182 | 1,1,1,1,1,1,1,1, # 28 - 2f | ||
| 183 | 1,1,1,1,1,1,1,1, # 30 - 37 | ||
| 184 | 1,1,1,1,1,1,1,1, # 38 - 3f | ||
| 185 | 1,1,1,1,1,1,1,1, # 40 - 47 | ||
| 186 | 1,1,1,1,1,1,1,1, # 48 - 4f | ||
| 187 | 1,1,1,1,1,1,1,1, # 50 - 57 | ||
| 188 | 1,1,1,1,1,1,1,1, # 58 - 5f | ||
| 189 | 1,1,1,1,1,1,1,1, # 60 - 67 | ||
| 190 | 1,1,1,1,1,1,1,1, # 68 - 6f | ||
| 191 | 1,1,1,1,1,1,1,1, # 70 - 77 | ||
| 192 | 1,1,1,1,1,1,1,1, # 78 - 7f | ||
| 193 | 0,0,0,0,0,0,0,0, # 80 - 87 | ||
| 194 | 0,0,0,0,0,0,0,0, # 88 - 8f | ||
| 195 | 0,0,0,0,0,0,0,0, # 90 - 97 | ||
| 196 | 0,0,0,0,0,0,0,0, # 98 - 9f | ||
| 197 | 0,2,2,2,2,2,2,2, # a0 - a7 | ||
| 198 | 2,2,2,2,2,3,3,3, # a8 - af | ||
| 199 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 200 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 201 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 202 | 2,3,2,2,2,2,2,2, # c8 - cf | ||
| 203 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 204 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 205 | 2,2,2,2,2,2,2,2, # e0 - e7 | ||
| 206 | 2,2,2,2,2,2,2,2, # e8 - ef | ||
| 207 | 2,2,2,2,2,2,2,2, # f0 - f7 | ||
| 208 | 2,2,2,2,2,2,2,0 # f8 - ff | ||
| 209 | ) | ||
| 210 | |||
| 211 | EUCKR_ST = ( | ||
| 212 | MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 213 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f | ||
| 214 | ) | ||
| 215 | |||
| 216 | EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) | ||
| 217 | |||
| 218 | EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, | ||
| 219 | 'class_factor': 4, | ||
| 220 | 'state_table': EUCKR_ST, | ||
| 221 | 'char_len_table': EUCKR_CHAR_LEN_TABLE, | ||
| 222 | 'name': 'EUC-KR'} | ||
| 223 | |||
| 224 | # EUC-TW | ||
| 225 | |||
| 226 | EUCTW_CLS = ( | ||
| 227 | 2,2,2,2,2,2,2,2, # 00 - 07 | ||
| 228 | 2,2,2,2,2,2,0,0, # 08 - 0f | ||
| 229 | 2,2,2,2,2,2,2,2, # 10 - 17 | ||
| 230 | 2,2,2,0,2,2,2,2, # 18 - 1f | ||
| 231 | 2,2,2,2,2,2,2,2, # 20 - 27 | ||
| 232 | 2,2,2,2,2,2,2,2, # 28 - 2f | ||
| 233 | 2,2,2,2,2,2,2,2, # 30 - 37 | ||
| 234 | 2,2,2,2,2,2,2,2, # 38 - 3f | ||
| 235 | 2,2,2,2,2,2,2,2, # 40 - 47 | ||
| 236 | 2,2,2,2,2,2,2,2, # 48 - 4f | ||
| 237 | 2,2,2,2,2,2,2,2, # 50 - 57 | ||
| 238 | 2,2,2,2,2,2,2,2, # 58 - 5f | ||
| 239 | 2,2,2,2,2,2,2,2, # 60 - 67 | ||
| 240 | 2,2,2,2,2,2,2,2, # 68 - 6f | ||
| 241 | 2,2,2,2,2,2,2,2, # 70 - 77 | ||
| 242 | 2,2,2,2,2,2,2,2, # 78 - 7f | ||
| 243 | 0,0,0,0,0,0,0,0, # 80 - 87 | ||
| 244 | 0,0,0,0,0,0,6,0, # 88 - 8f | ||
| 245 | 0,0,0,0,0,0,0,0, # 90 - 97 | ||
| 246 | 0,0,0,0,0,0,0,0, # 98 - 9f | ||
| 247 | 0,3,4,4,4,4,4,4, # a0 - a7 | ||
| 248 | 5,5,1,1,1,1,1,1, # a8 - af | ||
| 249 | 1,1,1,1,1,1,1,1, # b0 - b7 | ||
| 250 | 1,1,1,1,1,1,1,1, # b8 - bf | ||
| 251 | 1,1,3,1,3,3,3,3, # c0 - c7 | ||
| 252 | 3,3,3,3,3,3,3,3, # c8 - cf | ||
| 253 | 3,3,3,3,3,3,3,3, # d0 - d7 | ||
| 254 | 3,3,3,3,3,3,3,3, # d8 - df | ||
| 255 | 3,3,3,3,3,3,3,3, # e0 - e7 | ||
| 256 | 3,3,3,3,3,3,3,3, # e8 - ef | ||
| 257 | 3,3,3,3,3,3,3,3, # f0 - f7 | ||
| 258 | 3,3,3,3,3,3,3,0 # f8 - ff | ||
| 259 | ) | ||
| 260 | |||
| 261 | EUCTW_ST = ( | ||
| 262 | MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 | ||
| 263 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 264 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 | ||
| 265 | MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f | ||
| 266 | 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 | ||
| 267 | MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f | ||
| 268 | ) | ||
| 269 | |||
| 270 | EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) | ||
| 271 | |||
| 272 | EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, | ||
| 273 | 'class_factor': 7, | ||
| 274 | 'state_table': EUCTW_ST, | ||
| 275 | 'char_len_table': EUCTW_CHAR_LEN_TABLE, | ||
| 276 | 'name': 'x-euc-tw'} | ||
| 277 | |||
| 278 | # GB2312 | ||
| 279 | |||
| 280 | GB2312_CLS = ( | ||
| 281 | 1,1,1,1,1,1,1,1, # 00 - 07 | ||
| 282 | 1,1,1,1,1,1,0,0, # 08 - 0f | ||
| 283 | 1,1,1,1,1,1,1,1, # 10 - 17 | ||
| 284 | 1,1,1,0,1,1,1,1, # 18 - 1f | ||
| 285 | 1,1,1,1,1,1,1,1, # 20 - 27 | ||
| 286 | 1,1,1,1,1,1,1,1, # 28 - 2f | ||
| 287 | 3,3,3,3,3,3,3,3, # 30 - 37 | ||
| 288 | 3,3,1,1,1,1,1,1, # 38 - 3f | ||
| 289 | 2,2,2,2,2,2,2,2, # 40 - 47 | ||
| 290 | 2,2,2,2,2,2,2,2, # 48 - 4f | ||
| 291 | 2,2,2,2,2,2,2,2, # 50 - 57 | ||
| 292 | 2,2,2,2,2,2,2,2, # 58 - 5f | ||
| 293 | 2,2,2,2,2,2,2,2, # 60 - 67 | ||
| 294 | 2,2,2,2,2,2,2,2, # 68 - 6f | ||
| 295 | 2,2,2,2,2,2,2,2, # 70 - 77 | ||
| 296 | 2,2,2,2,2,2,2,4, # 78 - 7f | ||
| 297 | 5,6,6,6,6,6,6,6, # 80 - 87 | ||
| 298 | 6,6,6,6,6,6,6,6, # 88 - 8f | ||
| 299 | 6,6,6,6,6,6,6,6, # 90 - 97 | ||
| 300 | 6,6,6,6,6,6,6,6, # 98 - 9f | ||
| 301 | 6,6,6,6,6,6,6,6, # a0 - a7 | ||
| 302 | 6,6,6,6,6,6,6,6, # a8 - af | ||
| 303 | 6,6,6,6,6,6,6,6, # b0 - b7 | ||
| 304 | 6,6,6,6,6,6,6,6, # b8 - bf | ||
| 305 | 6,6,6,6,6,6,6,6, # c0 - c7 | ||
| 306 | 6,6,6,6,6,6,6,6, # c8 - cf | ||
| 307 | 6,6,6,6,6,6,6,6, # d0 - d7 | ||
| 308 | 6,6,6,6,6,6,6,6, # d8 - df | ||
| 309 | 6,6,6,6,6,6,6,6, # e0 - e7 | ||
| 310 | 6,6,6,6,6,6,6,6, # e8 - ef | ||
| 311 | 6,6,6,6,6,6,6,6, # f0 - f7 | ||
| 312 | 6,6,6,6,6,6,6,0 # f8 - ff | ||
| 313 | ) | ||
| 314 | |||
| 315 | GB2312_ST = ( | ||
| 316 | MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 | ||
| 317 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 318 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 | ||
| 319 | 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f | ||
| 320 | MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 | ||
| 321 | MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f | ||
| 322 | ) | ||
| 323 | |||
| 324 | # To be accurate, the length of class 6 can be either 2 or 4. | ||
| 325 | # But it is not necessary to discriminate between the two since | ||
| 326 | # it is used for frequency analysis only, and we are validating | ||
| 327 | # each code range there as well. So it is safe to set it to be | ||
| 328 | # 2 here. | ||
| 329 | GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) | ||
| 330 | |||
| 331 | GB2312_SM_MODEL = {'class_table': GB2312_CLS, | ||
| 332 | 'class_factor': 7, | ||
| 333 | 'state_table': GB2312_ST, | ||
| 334 | 'char_len_table': GB2312_CHAR_LEN_TABLE, | ||
| 335 | 'name': 'GB2312'} | ||
| 336 | |||
| 337 | # Shift_JIS | ||
| 338 | |||
| 339 | SJIS_CLS = ( | ||
| 340 | 1,1,1,1,1,1,1,1, # 00 - 07 | ||
| 341 | 1,1,1,1,1,1,0,0, # 08 - 0f | ||
| 342 | 1,1,1,1,1,1,1,1, # 10 - 17 | ||
| 343 | 1,1,1,0,1,1,1,1, # 18 - 1f | ||
| 344 | 1,1,1,1,1,1,1,1, # 20 - 27 | ||
| 345 | 1,1,1,1,1,1,1,1, # 28 - 2f | ||
| 346 | 1,1,1,1,1,1,1,1, # 30 - 37 | ||
| 347 | 1,1,1,1,1,1,1,1, # 38 - 3f | ||
| 348 | 2,2,2,2,2,2,2,2, # 40 - 47 | ||
| 349 | 2,2,2,2,2,2,2,2, # 48 - 4f | ||
| 350 | 2,2,2,2,2,2,2,2, # 50 - 57 | ||
| 351 | 2,2,2,2,2,2,2,2, # 58 - 5f | ||
| 352 | 2,2,2,2,2,2,2,2, # 60 - 67 | ||
| 353 | 2,2,2,2,2,2,2,2, # 68 - 6f | ||
| 354 | 2,2,2,2,2,2,2,2, # 70 - 77 | ||
| 355 | 2,2,2,2,2,2,2,1, # 78 - 7f | ||
| 356 | 3,3,3,3,3,2,2,3, # 80 - 87 | ||
| 357 | 3,3,3,3,3,3,3,3, # 88 - 8f | ||
| 358 | 3,3,3,3,3,3,3,3, # 90 - 97 | ||
| 359 | 3,3,3,3,3,3,3,3, # 98 - 9f | ||
| 360 | #0xa0 is illegal in sjis encoding, but some pages does | ||
| 361 | #contain such byte. We need to be more error forgiven. | ||
| 362 | 2,2,2,2,2,2,2,2, # a0 - a7 | ||
| 363 | 2,2,2,2,2,2,2,2, # a8 - af | ||
| 364 | 2,2,2,2,2,2,2,2, # b0 - b7 | ||
| 365 | 2,2,2,2,2,2,2,2, # b8 - bf | ||
| 366 | 2,2,2,2,2,2,2,2, # c0 - c7 | ||
| 367 | 2,2,2,2,2,2,2,2, # c8 - cf | ||
| 368 | 2,2,2,2,2,2,2,2, # d0 - d7 | ||
| 369 | 2,2,2,2,2,2,2,2, # d8 - df | ||
| 370 | 3,3,3,3,3,3,3,3, # e0 - e7 | ||
| 371 | 3,3,3,3,3,4,4,4, # e8 - ef | ||
| 372 | 3,3,3,3,3,3,3,3, # f0 - f7 | ||
| 373 | 3,3,3,3,3,0,0,0) # f8 - ff | ||
| 374 | |||
| 375 | |||
| 376 | SJIS_ST = ( | ||
| 377 | MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 378 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 379 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 | ||
| 380 | ) | ||
| 381 | |||
| 382 | SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) | ||
| 383 | |||
| 384 | SJIS_SM_MODEL = {'class_table': SJIS_CLS, | ||
| 385 | 'class_factor': 6, | ||
| 386 | 'state_table': SJIS_ST, | ||
| 387 | 'char_len_table': SJIS_CHAR_LEN_TABLE, | ||
| 388 | 'name': 'Shift_JIS'} | ||
| 389 | |||
| 390 | # UCS2-BE | ||
| 391 | |||
| 392 | UCS2BE_CLS = ( | ||
| 393 | 0,0,0,0,0,0,0,0, # 00 - 07 | ||
| 394 | 0,0,1,0,0,2,0,0, # 08 - 0f | ||
| 395 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 396 | 0,0,0,3,0,0,0,0, # 18 - 1f | ||
| 397 | 0,0,0,0,0,0,0,0, # 20 - 27 | ||
| 398 | 0,3,3,3,3,3,0,0, # 28 - 2f | ||
| 399 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 400 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 401 | 0,0,0,0,0,0,0,0, # 40 - 47 | ||
| 402 | 0,0,0,0,0,0,0,0, # 48 - 4f | ||
| 403 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 404 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 405 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 406 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 407 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 408 | 0,0,0,0,0,0,0,0, # 78 - 7f | ||
| 409 | 0,0,0,0,0,0,0,0, # 80 - 87 | ||
| 410 | 0,0,0,0,0,0,0,0, # 88 - 8f | ||
| 411 | 0,0,0,0,0,0,0,0, # 90 - 97 | ||
| 412 | 0,0,0,0,0,0,0,0, # 98 - 9f | ||
| 413 | 0,0,0,0,0,0,0,0, # a0 - a7 | ||
| 414 | 0,0,0,0,0,0,0,0, # a8 - af | ||
| 415 | 0,0,0,0,0,0,0,0, # b0 - b7 | ||
| 416 | 0,0,0,0,0,0,0,0, # b8 - bf | ||
| 417 | 0,0,0,0,0,0,0,0, # c0 - c7 | ||
| 418 | 0,0,0,0,0,0,0,0, # c8 - cf | ||
| 419 | 0,0,0,0,0,0,0,0, # d0 - d7 | ||
| 420 | 0,0,0,0,0,0,0,0, # d8 - df | ||
| 421 | 0,0,0,0,0,0,0,0, # e0 - e7 | ||
| 422 | 0,0,0,0,0,0,0,0, # e8 - ef | ||
| 423 | 0,0,0,0,0,0,0,0, # f0 - f7 | ||
| 424 | 0,0,0,0,0,0,4,5 # f8 - ff | ||
| 425 | ) | ||
| 426 | |||
| 427 | UCS2BE_ST = ( | ||
| 428 | 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 429 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 430 | MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 | ||
| 431 | 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f | ||
| 432 | 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 | ||
| 433 | 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f | ||
| 434 | 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 | ||
| 435 | ) | ||
| 436 | |||
| 437 | UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) | ||
| 438 | |||
| 439 | UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, | ||
| 440 | 'class_factor': 6, | ||
| 441 | 'state_table': UCS2BE_ST, | ||
| 442 | 'char_len_table': UCS2BE_CHAR_LEN_TABLE, | ||
| 443 | 'name': 'UTF-16BE'} | ||
| 444 | |||
| 445 | # UCS2-LE | ||
| 446 | |||
| 447 | UCS2LE_CLS = ( | ||
| 448 | 0,0,0,0,0,0,0,0, # 00 - 07 | ||
| 449 | 0,0,1,0,0,2,0,0, # 08 - 0f | ||
| 450 | 0,0,0,0,0,0,0,0, # 10 - 17 | ||
| 451 | 0,0,0,3,0,0,0,0, # 18 - 1f | ||
| 452 | 0,0,0,0,0,0,0,0, # 20 - 27 | ||
| 453 | 0,3,3,3,3,3,0,0, # 28 - 2f | ||
| 454 | 0,0,0,0,0,0,0,0, # 30 - 37 | ||
| 455 | 0,0,0,0,0,0,0,0, # 38 - 3f | ||
| 456 | 0,0,0,0,0,0,0,0, # 40 - 47 | ||
| 457 | 0,0,0,0,0,0,0,0, # 48 - 4f | ||
| 458 | 0,0,0,0,0,0,0,0, # 50 - 57 | ||
| 459 | 0,0,0,0,0,0,0,0, # 58 - 5f | ||
| 460 | 0,0,0,0,0,0,0,0, # 60 - 67 | ||
| 461 | 0,0,0,0,0,0,0,0, # 68 - 6f | ||
| 462 | 0,0,0,0,0,0,0,0, # 70 - 77 | ||
| 463 | 0,0,0,0,0,0,0,0, # 78 - 7f | ||
| 464 | 0,0,0,0,0,0,0,0, # 80 - 87 | ||
| 465 | 0,0,0,0,0,0,0,0, # 88 - 8f | ||
| 466 | 0,0,0,0,0,0,0,0, # 90 - 97 | ||
| 467 | 0,0,0,0,0,0,0,0, # 98 - 9f | ||
| 468 | 0,0,0,0,0,0,0,0, # a0 - a7 | ||
| 469 | 0,0,0,0,0,0,0,0, # a8 - af | ||
| 470 | 0,0,0,0,0,0,0,0, # b0 - b7 | ||
| 471 | 0,0,0,0,0,0,0,0, # b8 - bf | ||
| 472 | 0,0,0,0,0,0,0,0, # c0 - c7 | ||
| 473 | 0,0,0,0,0,0,0,0, # c8 - cf | ||
| 474 | 0,0,0,0,0,0,0,0, # d0 - d7 | ||
| 475 | 0,0,0,0,0,0,0,0, # d8 - df | ||
| 476 | 0,0,0,0,0,0,0,0, # e0 - e7 | ||
| 477 | 0,0,0,0,0,0,0,0, # e8 - ef | ||
| 478 | 0,0,0,0,0,0,0,0, # f0 - f7 | ||
| 479 | 0,0,0,0,0,0,4,5 # f8 - ff | ||
| 480 | ) | ||
| 481 | |||
| 482 | UCS2LE_ST = ( | ||
| 483 | 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 | ||
| 484 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f | ||
| 485 | MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 | ||
| 486 | 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f | ||
| 487 | 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 | ||
| 488 | 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f | ||
| 489 | 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 | ||
| 490 | ) | ||
| 491 | |||
| 492 | UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) | ||
| 493 | |||
| 494 | UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, | ||
| 495 | 'class_factor': 6, | ||
| 496 | 'state_table': UCS2LE_ST, | ||
| 497 | 'char_len_table': UCS2LE_CHAR_LEN_TABLE, | ||
| 498 | 'name': 'UTF-16LE'} | ||
| 499 | |||
| 500 | # UTF-8 | ||
| 501 | |||
| 502 | UTF8_CLS = ( | ||
| 503 | 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value | ||
| 504 | 1,1,1,1,1,1,0,0, # 08 - 0f | ||
| 505 | 1,1,1,1,1,1,1,1, # 10 - 17 | ||
| 506 | 1,1,1,0,1,1,1,1, # 18 - 1f | ||
| 507 | 1,1,1,1,1,1,1,1, # 20 - 27 | ||
| 508 | 1,1,1,1,1,1,1,1, # 28 - 2f | ||
| 509 | 1,1,1,1,1,1,1,1, # 30 - 37 | ||
| 510 | 1,1,1,1,1,1,1,1, # 38 - 3f | ||
| 511 | 1,1,1,1,1,1,1,1, # 40 - 47 | ||
| 512 | 1,1,1,1,1,1,1,1, # 48 - 4f | ||
| 513 | 1,1,1,1,1,1,1,1, # 50 - 57 | ||
| 514 | 1,1,1,1,1,1,1,1, # 58 - 5f | ||
| 515 | 1,1,1,1,1,1,1,1, # 60 - 67 | ||
| 516 | 1,1,1,1,1,1,1,1, # 68 - 6f | ||
| 517 | 1,1,1,1,1,1,1,1, # 70 - 77 | ||
| 518 | 1,1,1,1,1,1,1,1, # 78 - 7f | ||
| 519 | 2,2,2,2,3,3,3,3, # 80 - 87 | ||
| 520 | 4,4,4,4,4,4,4,4, # 88 - 8f | ||
| 521 | 4,4,4,4,4,4,4,4, # 90 - 97 | ||
| 522 | 4,4,4,4,4,4,4,4, # 98 - 9f | ||
| 523 | 5,5,5,5,5,5,5,5, # a0 - a7 | ||
| 524 | 5,5,5,5,5,5,5,5, # a8 - af | ||
| 525 | 5,5,5,5,5,5,5,5, # b0 - b7 | ||
| 526 | 5,5,5,5,5,5,5,5, # b8 - bf | ||
| 527 | 0,0,6,6,6,6,6,6, # c0 - c7 | ||
| 528 | 6,6,6,6,6,6,6,6, # c8 - cf | ||
| 529 | 6,6,6,6,6,6,6,6, # d0 - d7 | ||
| 530 | 6,6,6,6,6,6,6,6, # d8 - df | ||
| 531 | 7,8,8,8,8,8,8,8, # e0 - e7 | ||
| 532 | 8,8,8,8,8,9,8,8, # e8 - ef | ||
| 533 | 10,11,11,11,11,11,11,11, # f0 - f7 | ||
| 534 | 12,13,13,13,14,15,0,0 # f8 - ff | ||
| 535 | ) | ||
| 536 | |||
| 537 | UTF8_ST = ( | ||
| 538 | MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 | ||
| 539 | 9, 11, 8, 7, 6, 5, 4, 3,#08-0f | ||
| 540 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 | ||
| 541 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f | ||
| 542 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 | ||
| 543 | MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f | ||
| 544 | MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 | ||
| 545 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f | ||
| 546 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 | ||
| 547 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f | ||
| 548 | MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 | ||
| 549 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f | ||
| 550 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 | ||
| 551 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f | ||
| 552 | MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 | ||
| 553 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f | ||
| 554 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 | ||
| 555 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f | ||
| 556 | MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 | ||
| 557 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f | ||
| 558 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 | ||
| 559 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af | ||
| 560 | MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 | ||
| 561 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf | ||
| 562 | MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 | ||
| 563 | MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf | ||
| 564 | ) | ||
| 565 | |||
| 566 | UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) | ||
| 567 | |||
| 568 | UTF8_SM_MODEL = {'class_table': UTF8_CLS, | ||
| 569 | 'class_factor': 16, | ||
| 570 | 'state_table': UTF8_ST, | ||
| 571 | 'char_len_table': UTF8_CHAR_LEN_TABLE, | ||
| 572 | 'name': 'UTF-8'} | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcharsetprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcharsetprober.py new file mode 100644 index 0000000..66e0dfc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcharsetprober.py | |||
| @@ -0,0 +1,132 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # | ||
| 13 | # This library is free software; you can redistribute it and/or | ||
| 14 | # modify it under the terms of the GNU Lesser General Public | ||
| 15 | # License as published by the Free Software Foundation; either | ||
| 16 | # version 2.1 of the License, or (at your option) any later version. | ||
| 17 | # | ||
| 18 | # This library is distributed in the hope that it will be useful, | ||
| 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 21 | # Lesser General Public License for more details. | ||
| 22 | # | ||
| 23 | # You should have received a copy of the GNU Lesser General Public | ||
| 24 | # License along with this library; if not, write to the Free Software | ||
| 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 26 | # 02110-1301 USA | ||
| 27 | ######################### END LICENSE BLOCK ######################### | ||
| 28 | |||
| 29 | from .charsetprober import CharSetProber | ||
| 30 | from .enums import CharacterCategory, ProbingState, SequenceLikelihood | ||
| 31 | |||
| 32 | |||
| 33 | class SingleByteCharSetProber(CharSetProber): | ||
| 34 | SAMPLE_SIZE = 64 | ||
| 35 | SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 | ||
| 36 | POSITIVE_SHORTCUT_THRESHOLD = 0.95 | ||
| 37 | NEGATIVE_SHORTCUT_THRESHOLD = 0.05 | ||
| 38 | |||
| 39 | def __init__(self, model, reversed=False, name_prober=None): | ||
| 40 | super(SingleByteCharSetProber, self).__init__() | ||
| 41 | self._model = model | ||
| 42 | # TRUE if we need to reverse every pair in the model lookup | ||
| 43 | self._reversed = reversed | ||
| 44 | # Optional auxiliary prober for name decision | ||
| 45 | self._name_prober = name_prober | ||
| 46 | self._last_order = None | ||
| 47 | self._seq_counters = None | ||
| 48 | self._total_seqs = None | ||
| 49 | self._total_char = None | ||
| 50 | self._freq_char = None | ||
| 51 | self.reset() | ||
| 52 | |||
| 53 | def reset(self): | ||
| 54 | super(SingleByteCharSetProber, self).reset() | ||
| 55 | # char order of last character | ||
| 56 | self._last_order = 255 | ||
| 57 | self._seq_counters = [0] * SequenceLikelihood.get_num_categories() | ||
| 58 | self._total_seqs = 0 | ||
| 59 | self._total_char = 0 | ||
| 60 | # characters that fall in our sampling range | ||
| 61 | self._freq_char = 0 | ||
| 62 | |||
| 63 | @property | ||
| 64 | def charset_name(self): | ||
| 65 | if self._name_prober: | ||
| 66 | return self._name_prober.charset_name | ||
| 67 | else: | ||
| 68 | return self._model['charset_name'] | ||
| 69 | |||
| 70 | @property | ||
| 71 | def language(self): | ||
| 72 | if self._name_prober: | ||
| 73 | return self._name_prober.language | ||
| 74 | else: | ||
| 75 | return self._model.get('language') | ||
| 76 | |||
| 77 | def feed(self, byte_str): | ||
| 78 | if not self._model['keep_english_letter']: | ||
| 79 | byte_str = self.filter_international_words(byte_str) | ||
| 80 | if not byte_str: | ||
| 81 | return self.state | ||
| 82 | char_to_order_map = self._model['char_to_order_map'] | ||
| 83 | for i, c in enumerate(byte_str): | ||
| 84 | # XXX: Order is in range 1-64, so one would think we want 0-63 here, | ||
| 85 | # but that leads to 27 more test failures than before. | ||
| 86 | order = char_to_order_map[c] | ||
| 87 | # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but | ||
| 88 | # CharacterCategory.SYMBOL is actually 253, so we use CONTROL | ||
| 89 | # to make it closer to the original intent. The only difference | ||
| 90 | # is whether or not we count digits and control characters for | ||
| 91 | # _total_char purposes. | ||
| 92 | if order < CharacterCategory.CONTROL: | ||
| 93 | self._total_char += 1 | ||
| 94 | if order < self.SAMPLE_SIZE: | ||
| 95 | self._freq_char += 1 | ||
| 96 | if self._last_order < self.SAMPLE_SIZE: | ||
| 97 | self._total_seqs += 1 | ||
| 98 | if not self._reversed: | ||
| 99 | i = (self._last_order * self.SAMPLE_SIZE) + order | ||
| 100 | model = self._model['precedence_matrix'][i] | ||
| 101 | else: # reverse the order of the letters in the lookup | ||
| 102 | i = (order * self.SAMPLE_SIZE) + self._last_order | ||
| 103 | model = self._model['precedence_matrix'][i] | ||
| 104 | self._seq_counters[model] += 1 | ||
| 105 | self._last_order = order | ||
| 106 | |||
| 107 | charset_name = self._model['charset_name'] | ||
| 108 | if self.state == ProbingState.DETECTING: | ||
| 109 | if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: | ||
| 110 | confidence = self.get_confidence() | ||
| 111 | if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: | ||
| 112 | self.logger.debug('%s confidence = %s, we have a winner', | ||
| 113 | charset_name, confidence) | ||
| 114 | self._state = ProbingState.FOUND_IT | ||
| 115 | elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: | ||
| 116 | self.logger.debug('%s confidence = %s, below negative ' | ||
| 117 | 'shortcut threshhold %s', charset_name, | ||
| 118 | confidence, | ||
| 119 | self.NEGATIVE_SHORTCUT_THRESHOLD) | ||
| 120 | self._state = ProbingState.NOT_ME | ||
| 121 | |||
| 122 | return self.state | ||
| 123 | |||
| 124 | def get_confidence(self): | ||
| 125 | r = 0.01 | ||
| 126 | if self._total_seqs > 0: | ||
| 127 | r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / | ||
| 128 | self._total_seqs / self._model['typical_positive_ratio']) | ||
| 129 | r = r * self._freq_char / self._total_char | ||
| 130 | if r >= 1.0: | ||
| 131 | r = 0.99 | ||
| 132 | return r | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcsgroupprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..29bcc2a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sbcsgroupprober.py | |||
| @@ -0,0 +1,73 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # | ||
| 13 | # This library is free software; you can redistribute it and/or | ||
| 14 | # modify it under the terms of the GNU Lesser General Public | ||
| 15 | # License as published by the Free Software Foundation; either | ||
| 16 | # version 2.1 of the License, or (at your option) any later version. | ||
| 17 | # | ||
| 18 | # This library is distributed in the hope that it will be useful, | ||
| 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 21 | # Lesser General Public License for more details. | ||
| 22 | # | ||
| 23 | # You should have received a copy of the GNU Lesser General Public | ||
| 24 | # License along with this library; if not, write to the Free Software | ||
| 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 26 | # 02110-1301 USA | ||
| 27 | ######################### END LICENSE BLOCK ######################### | ||
| 28 | |||
| 29 | from .charsetgroupprober import CharSetGroupProber | ||
| 30 | from .sbcharsetprober import SingleByteCharSetProber | ||
| 31 | from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, | ||
| 32 | Latin5CyrillicModel, MacCyrillicModel, | ||
| 33 | Ibm866Model, Ibm855Model) | ||
| 34 | from .langgreekmodel import Latin7GreekModel, Win1253GreekModel | ||
| 35 | from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel | ||
| 36 | # from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel | ||
| 37 | from .langthaimodel import TIS620ThaiModel | ||
| 38 | from .langhebrewmodel import Win1255HebrewModel | ||
| 39 | from .hebrewprober import HebrewProber | ||
| 40 | from .langturkishmodel import Latin5TurkishModel | ||
| 41 | |||
| 42 | |||
| 43 | class SBCSGroupProber(CharSetGroupProber): | ||
| 44 | def __init__(self): | ||
| 45 | super(SBCSGroupProber, self).__init__() | ||
| 46 | self.probers = [ | ||
| 47 | SingleByteCharSetProber(Win1251CyrillicModel), | ||
| 48 | SingleByteCharSetProber(Koi8rModel), | ||
| 49 | SingleByteCharSetProber(Latin5CyrillicModel), | ||
| 50 | SingleByteCharSetProber(MacCyrillicModel), | ||
| 51 | SingleByteCharSetProber(Ibm866Model), | ||
| 52 | SingleByteCharSetProber(Ibm855Model), | ||
| 53 | SingleByteCharSetProber(Latin7GreekModel), | ||
| 54 | SingleByteCharSetProber(Win1253GreekModel), | ||
| 55 | SingleByteCharSetProber(Latin5BulgarianModel), | ||
| 56 | SingleByteCharSetProber(Win1251BulgarianModel), | ||
| 57 | # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) | ||
| 58 | # after we retrain model. | ||
| 59 | # SingleByteCharSetProber(Latin2HungarianModel), | ||
| 60 | # SingleByteCharSetProber(Win1250HungarianModel), | ||
| 61 | SingleByteCharSetProber(TIS620ThaiModel), | ||
| 62 | SingleByteCharSetProber(Latin5TurkishModel), | ||
| 63 | ] | ||
| 64 | hebrew_prober = HebrewProber() | ||
| 65 | logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, | ||
| 66 | False, hebrew_prober) | ||
| 67 | visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, | ||
| 68 | hebrew_prober) | ||
| 69 | hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) | ||
| 70 | self.probers.extend([hebrew_prober, logical_hebrew_prober, | ||
| 71 | visual_hebrew_prober]) | ||
| 72 | |||
| 73 | self.reset() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sjisprober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sjisprober.py new file mode 100644 index 0000000..683add0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/sjisprober.py | |||
| @@ -0,0 +1,92 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .mbcharsetprober import MultiByteCharSetProber | ||
| 29 | from .codingstatemachine import CodingStateMachine | ||
| 30 | from .chardistribution import SJISDistributionAnalysis | ||
| 31 | from .jpcntx import SJISContextAnalysis | ||
| 32 | from .mbcssm import SJIS_SM_MODEL | ||
| 33 | from .enums import ProbingState, MachineState | ||
| 34 | |||
| 35 | |||
| 36 | class SJISProber(MultiByteCharSetProber): | ||
| 37 | def __init__(self): | ||
| 38 | super(SJISProber, self).__init__() | ||
| 39 | self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) | ||
| 40 | self.distribution_analyzer = SJISDistributionAnalysis() | ||
| 41 | self.context_analyzer = SJISContextAnalysis() | ||
| 42 | self.reset() | ||
| 43 | |||
| 44 | def reset(self): | ||
| 45 | super(SJISProber, self).reset() | ||
| 46 | self.context_analyzer.reset() | ||
| 47 | |||
| 48 | @property | ||
| 49 | def charset_name(self): | ||
| 50 | return self.context_analyzer.charset_name | ||
| 51 | |||
| 52 | @property | ||
| 53 | def language(self): | ||
| 54 | return "Japanese" | ||
| 55 | |||
| 56 | def feed(self, byte_str): | ||
| 57 | for i in range(len(byte_str)): | ||
| 58 | coding_state = self.coding_sm.next_state(byte_str[i]) | ||
| 59 | if coding_state == MachineState.ERROR: | ||
| 60 | self.logger.debug('%s %s prober hit error at byte %s', | ||
| 61 | self.charset_name, self.language, i) | ||
| 62 | self._state = ProbingState.NOT_ME | ||
| 63 | break | ||
| 64 | elif coding_state == MachineState.ITS_ME: | ||
| 65 | self._state = ProbingState.FOUND_IT | ||
| 66 | break | ||
| 67 | elif coding_state == MachineState.START: | ||
| 68 | char_len = self.coding_sm.get_current_charlen() | ||
| 69 | if i == 0: | ||
| 70 | self._last_char[1] = byte_str[0] | ||
| 71 | self.context_analyzer.feed(self._last_char[2 - char_len:], | ||
| 72 | char_len) | ||
| 73 | self.distribution_analyzer.feed(self._last_char, char_len) | ||
| 74 | else: | ||
| 75 | self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 | ||
| 76 | - char_len], char_len) | ||
| 77 | self.distribution_analyzer.feed(byte_str[i - 1:i + 1], | ||
| 78 | char_len) | ||
| 79 | |||
| 80 | self._last_char[0] = byte_str[-1] | ||
| 81 | |||
| 82 | if self.state == ProbingState.DETECTING: | ||
| 83 | if (self.context_analyzer.got_enough_data() and | ||
| 84 | (self.get_confidence() > self.SHORTCUT_THRESHOLD)): | ||
| 85 | self._state = ProbingState.FOUND_IT | ||
| 86 | |||
| 87 | return self.state | ||
| 88 | |||
| 89 | def get_confidence(self): | ||
| 90 | context_conf = self.context_analyzer.get_confidence() | ||
| 91 | distrib_conf = self.distribution_analyzer.get_confidence() | ||
| 92 | return max(context_conf, distrib_conf) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/universaldetector.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/universaldetector.py new file mode 100644 index 0000000..8a6de3b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/universaldetector.py | |||
| @@ -0,0 +1,286 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is Mozilla Universal charset detector code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 2001 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # Shy Shalom - original C code | ||
| 12 | # | ||
| 13 | # This library is free software; you can redistribute it and/or | ||
| 14 | # modify it under the terms of the GNU Lesser General Public | ||
| 15 | # License as published by the Free Software Foundation; either | ||
| 16 | # version 2.1 of the License, or (at your option) any later version. | ||
| 17 | # | ||
| 18 | # This library is distributed in the hope that it will be useful, | ||
| 19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 21 | # Lesser General Public License for more details. | ||
| 22 | # | ||
| 23 | # You should have received a copy of the GNU Lesser General Public | ||
| 24 | # License along with this library; if not, write to the Free Software | ||
| 25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 26 | # 02110-1301 USA | ||
| 27 | ######################### END LICENSE BLOCK ######################### | ||
| 28 | """ | ||
| 29 | Module containing the UniversalDetector detector class, which is the primary | ||
| 30 | class a user of ``chardet`` should use. | ||
| 31 | |||
| 32 | :author: Mark Pilgrim (initial port to Python) | ||
| 33 | :author: Shy Shalom (original C code) | ||
| 34 | :author: Dan Blanchard (major refactoring for 3.0) | ||
| 35 | :author: Ian Cordasco | ||
| 36 | """ | ||
| 37 | |||
| 38 | |||
| 39 | import codecs | ||
| 40 | import logging | ||
| 41 | import re | ||
| 42 | |||
| 43 | from .charsetgroupprober import CharSetGroupProber | ||
| 44 | from .enums import InputState, LanguageFilter, ProbingState | ||
| 45 | from .escprober import EscCharSetProber | ||
| 46 | from .latin1prober import Latin1Prober | ||
| 47 | from .mbcsgroupprober import MBCSGroupProber | ||
| 48 | from .sbcsgroupprober import SBCSGroupProber | ||
| 49 | |||
| 50 | |||
| 51 | class UniversalDetector(object): | ||
| 52 | """ | ||
| 53 | The ``UniversalDetector`` class underlies the ``chardet.detect`` function | ||
| 54 | and coordinates all of the different charset probers. | ||
| 55 | |||
| 56 | To get a ``dict`` containing an encoding and its confidence, you can simply | ||
| 57 | run: | ||
| 58 | |||
| 59 | .. code:: | ||
| 60 | |||
| 61 | u = UniversalDetector() | ||
| 62 | u.feed(some_bytes) | ||
| 63 | u.close() | ||
| 64 | detected = u.result | ||
| 65 | |||
| 66 | """ | ||
| 67 | |||
| 68 | MINIMUM_THRESHOLD = 0.20 | ||
| 69 | HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') | ||
| 70 | ESC_DETECTOR = re.compile(b'(\033|~{)') | ||
| 71 | WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') | ||
| 72 | ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', | ||
| 73 | 'iso-8859-2': 'Windows-1250', | ||
| 74 | 'iso-8859-5': 'Windows-1251', | ||
| 75 | 'iso-8859-6': 'Windows-1256', | ||
| 76 | 'iso-8859-7': 'Windows-1253', | ||
| 77 | 'iso-8859-8': 'Windows-1255', | ||
| 78 | 'iso-8859-9': 'Windows-1254', | ||
| 79 | 'iso-8859-13': 'Windows-1257'} | ||
| 80 | |||
| 81 | def __init__(self, lang_filter=LanguageFilter.ALL): | ||
| 82 | self._esc_charset_prober = None | ||
| 83 | self._charset_probers = [] | ||
| 84 | self.result = None | ||
| 85 | self.done = None | ||
| 86 | self._got_data = None | ||
| 87 | self._input_state = None | ||
| 88 | self._last_char = None | ||
| 89 | self.lang_filter = lang_filter | ||
| 90 | self.logger = logging.getLogger(__name__) | ||
| 91 | self._has_win_bytes = None | ||
| 92 | self.reset() | ||
| 93 | |||
| 94 | def reset(self): | ||
| 95 | """ | ||
| 96 | Reset the UniversalDetector and all of its probers back to their | ||
| 97 | initial states. This is called by ``__init__``, so you only need to | ||
| 98 | call this directly in between analyses of different documents. | ||
| 99 | """ | ||
| 100 | self.result = {'encoding': None, 'confidence': 0.0, 'language': None} | ||
| 101 | self.done = False | ||
| 102 | self._got_data = False | ||
| 103 | self._has_win_bytes = False | ||
| 104 | self._input_state = InputState.PURE_ASCII | ||
| 105 | self._last_char = b'' | ||
| 106 | if self._esc_charset_prober: | ||
| 107 | self._esc_charset_prober.reset() | ||
| 108 | for prober in self._charset_probers: | ||
| 109 | prober.reset() | ||
| 110 | |||
| 111 | def feed(self, byte_str): | ||
| 112 | """ | ||
| 113 | Takes a chunk of a document and feeds it through all of the relevant | ||
| 114 | charset probers. | ||
| 115 | |||
| 116 | After calling ``feed``, you can check the value of the ``done`` | ||
| 117 | attribute to see if you need to continue feeding the | ||
| 118 | ``UniversalDetector`` more data, or if it has made a prediction | ||
| 119 | (in the ``result`` attribute). | ||
| 120 | |||
| 121 | .. note:: | ||
| 122 | You should always call ``close`` when you're done feeding in your | ||
| 123 | document if ``done`` is not already ``True``. | ||
| 124 | """ | ||
| 125 | if self.done: | ||
| 126 | return | ||
| 127 | |||
| 128 | if not len(byte_str): | ||
| 129 | return | ||
| 130 | |||
| 131 | if not isinstance(byte_str, bytearray): | ||
| 132 | byte_str = bytearray(byte_str) | ||
| 133 | |||
| 134 | # First check for known BOMs, since these are guaranteed to be correct | ||
| 135 | if not self._got_data: | ||
| 136 | # If the data starts with BOM, we know it is UTF | ||
| 137 | if byte_str.startswith(codecs.BOM_UTF8): | ||
| 138 | # EF BB BF UTF-8 with BOM | ||
| 139 | self.result = {'encoding': "UTF-8-SIG", | ||
| 140 | 'confidence': 1.0, | ||
| 141 | 'language': ''} | ||
| 142 | elif byte_str.startswith((codecs.BOM_UTF32_LE, | ||
| 143 | codecs.BOM_UTF32_BE)): | ||
| 144 | # FF FE 00 00 UTF-32, little-endian BOM | ||
| 145 | # 00 00 FE FF UTF-32, big-endian BOM | ||
| 146 | self.result = {'encoding': "UTF-32", | ||
| 147 | 'confidence': 1.0, | ||
| 148 | 'language': ''} | ||
| 149 | elif byte_str.startswith(b'\xFE\xFF\x00\x00'): | ||
| 150 | # FE FF 00 00 UCS-4, unusual octet order BOM (3412) | ||
| 151 | self.result = {'encoding': "X-ISO-10646-UCS-4-3412", | ||
| 152 | 'confidence': 1.0, | ||
| 153 | 'language': ''} | ||
| 154 | elif byte_str.startswith(b'\x00\x00\xFF\xFE'): | ||
| 155 | # 00 00 FF FE UCS-4, unusual octet order BOM (2143) | ||
| 156 | self.result = {'encoding': "X-ISO-10646-UCS-4-2143", | ||
| 157 | 'confidence': 1.0, | ||
| 158 | 'language': ''} | ||
| 159 | elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): | ||
| 160 | # FF FE UTF-16, little endian BOM | ||
| 161 | # FE FF UTF-16, big endian BOM | ||
| 162 | self.result = {'encoding': "UTF-16", | ||
| 163 | 'confidence': 1.0, | ||
| 164 | 'language': ''} | ||
| 165 | |||
| 166 | self._got_data = True | ||
| 167 | if self.result['encoding'] is not None: | ||
| 168 | self.done = True | ||
| 169 | return | ||
| 170 | |||
| 171 | # If none of those matched and we've only see ASCII so far, check | ||
| 172 | # for high bytes and escape sequences | ||
| 173 | if self._input_state == InputState.PURE_ASCII: | ||
| 174 | if self.HIGH_BYTE_DETECTOR.search(byte_str): | ||
| 175 | self._input_state = InputState.HIGH_BYTE | ||
| 176 | elif self._input_state == InputState.PURE_ASCII and \ | ||
| 177 | self.ESC_DETECTOR.search(self._last_char + byte_str): | ||
| 178 | self._input_state = InputState.ESC_ASCII | ||
| 179 | |||
| 180 | self._last_char = byte_str[-1:] | ||
| 181 | |||
| 182 | # If we've seen escape sequences, use the EscCharSetProber, which | ||
| 183 | # uses a simple state machine to check for known escape sequences in | ||
| 184 | # HZ and ISO-2022 encodings, since those are the only encodings that | ||
| 185 | # use such sequences. | ||
| 186 | if self._input_state == InputState.ESC_ASCII: | ||
| 187 | if not self._esc_charset_prober: | ||
| 188 | self._esc_charset_prober = EscCharSetProber(self.lang_filter) | ||
| 189 | if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: | ||
| 190 | self.result = {'encoding': | ||
| 191 | self._esc_charset_prober.charset_name, | ||
| 192 | 'confidence': | ||
| 193 | self._esc_charset_prober.get_confidence(), | ||
| 194 | 'language': | ||
| 195 | self._esc_charset_prober.language} | ||
| 196 | self.done = True | ||
| 197 | # If we've seen high bytes (i.e., those with values greater than 127), | ||
| 198 | # we need to do more complicated checks using all our multi-byte and | ||
| 199 | # single-byte probers that are left. The single-byte probers | ||
| 200 | # use character bigram distributions to determine the encoding, whereas | ||
| 201 | # the multi-byte probers use a combination of character unigram and | ||
| 202 | # bigram distributions. | ||
| 203 | elif self._input_state == InputState.HIGH_BYTE: | ||
| 204 | if not self._charset_probers: | ||
| 205 | self._charset_probers = [MBCSGroupProber(self.lang_filter)] | ||
| 206 | # If we're checking non-CJK encodings, use single-byte prober | ||
| 207 | if self.lang_filter & LanguageFilter.NON_CJK: | ||
| 208 | self._charset_probers.append(SBCSGroupProber()) | ||
| 209 | self._charset_probers.append(Latin1Prober()) | ||
| 210 | for prober in self._charset_probers: | ||
| 211 | if prober.feed(byte_str) == ProbingState.FOUND_IT: | ||
| 212 | self.result = {'encoding': prober.charset_name, | ||
| 213 | 'confidence': prober.get_confidence(), | ||
| 214 | 'language': prober.language} | ||
| 215 | self.done = True | ||
| 216 | break | ||
| 217 | if self.WIN_BYTE_DETECTOR.search(byte_str): | ||
| 218 | self._has_win_bytes = True | ||
| 219 | |||
| 220 | def close(self): | ||
| 221 | """ | ||
| 222 | Stop analyzing the current document and come up with a final | ||
| 223 | prediction. | ||
| 224 | |||
| 225 | :returns: The ``result`` attribute, a ``dict`` with the keys | ||
| 226 | `encoding`, `confidence`, and `language`. | ||
| 227 | """ | ||
| 228 | # Don't bother with checks if we're already done | ||
| 229 | if self.done: | ||
| 230 | return self.result | ||
| 231 | self.done = True | ||
| 232 | |||
| 233 | if not self._got_data: | ||
| 234 | self.logger.debug('no data received!') | ||
| 235 | |||
| 236 | # Default to ASCII if it is all we've seen so far | ||
| 237 | elif self._input_state == InputState.PURE_ASCII: | ||
| 238 | self.result = {'encoding': 'ascii', | ||
| 239 | 'confidence': 1.0, | ||
| 240 | 'language': ''} | ||
| 241 | |||
| 242 | # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD | ||
| 243 | elif self._input_state == InputState.HIGH_BYTE: | ||
| 244 | prober_confidence = None | ||
| 245 | max_prober_confidence = 0.0 | ||
| 246 | max_prober = None | ||
| 247 | for prober in self._charset_probers: | ||
| 248 | if not prober: | ||
| 249 | continue | ||
| 250 | prober_confidence = prober.get_confidence() | ||
| 251 | if prober_confidence > max_prober_confidence: | ||
| 252 | max_prober_confidence = prober_confidence | ||
| 253 | max_prober = prober | ||
| 254 | if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): | ||
| 255 | charset_name = max_prober.charset_name | ||
| 256 | lower_charset_name = max_prober.charset_name.lower() | ||
| 257 | confidence = max_prober.get_confidence() | ||
| 258 | # Use Windows encoding name instead of ISO-8859 if we saw any | ||
| 259 | # extra Windows-specific bytes | ||
| 260 | if lower_charset_name.startswith('iso-8859'): | ||
| 261 | if self._has_win_bytes: | ||
| 262 | charset_name = self.ISO_WIN_MAP.get(lower_charset_name, | ||
| 263 | charset_name) | ||
| 264 | self.result = {'encoding': charset_name, | ||
| 265 | 'confidence': confidence, | ||
| 266 | 'language': max_prober.language} | ||
| 267 | |||
| 268 | # Log all prober confidences if none met MINIMUM_THRESHOLD | ||
| 269 | if self.logger.getEffectiveLevel() == logging.DEBUG: | ||
| 270 | if self.result['encoding'] is None: | ||
| 271 | self.logger.debug('no probers hit minimum threshold') | ||
| 272 | for group_prober in self._charset_probers: | ||
| 273 | if not group_prober: | ||
| 274 | continue | ||
| 275 | if isinstance(group_prober, CharSetGroupProber): | ||
| 276 | for prober in group_prober.probers: | ||
| 277 | self.logger.debug('%s %s confidence = %s', | ||
| 278 | prober.charset_name, | ||
| 279 | prober.language, | ||
| 280 | prober.get_confidence()) | ||
| 281 | else: | ||
| 282 | self.logger.debug('%s %s confidence = %s', | ||
| 283 | prober.charset_name, | ||
| 284 | prober.language, | ||
| 285 | prober.get_confidence()) | ||
| 286 | return self.result | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/utf8prober.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/utf8prober.py new file mode 100644 index 0000000..4573267 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/utf8prober.py | |||
| @@ -0,0 +1,82 @@ | |||
| 1 | ######################## BEGIN LICENSE BLOCK ######################## | ||
| 2 | # The Original Code is mozilla.org code. | ||
| 3 | # | ||
| 4 | # The Initial Developer of the Original Code is | ||
| 5 | # Netscape Communications Corporation. | ||
| 6 | # Portions created by the Initial Developer are Copyright (C) 1998 | ||
| 7 | # the Initial Developer. All Rights Reserved. | ||
| 8 | # | ||
| 9 | # Contributor(s): | ||
| 10 | # Mark Pilgrim - port to Python | ||
| 11 | # | ||
| 12 | # This library is free software; you can redistribute it and/or | ||
| 13 | # modify it under the terms of the GNU Lesser General Public | ||
| 14 | # License as published by the Free Software Foundation; either | ||
| 15 | # version 2.1 of the License, or (at your option) any later version. | ||
| 16 | # | ||
| 17 | # This library is distributed in the hope that it will be useful, | ||
| 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 20 | # Lesser General Public License for more details. | ||
| 21 | # | ||
| 22 | # You should have received a copy of the GNU Lesser General Public | ||
| 23 | # License along with this library; if not, write to the Free Software | ||
| 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | ||
| 25 | # 02110-1301 USA | ||
| 26 | ######################### END LICENSE BLOCK ######################### | ||
| 27 | |||
| 28 | from .charsetprober import CharSetProber | ||
| 29 | from .enums import ProbingState, MachineState | ||
| 30 | from .codingstatemachine import CodingStateMachine | ||
| 31 | from .mbcssm import UTF8_SM_MODEL | ||
| 32 | |||
| 33 | |||
| 34 | |||
| 35 | class UTF8Prober(CharSetProber): | ||
| 36 | ONE_CHAR_PROB = 0.5 | ||
| 37 | |||
| 38 | def __init__(self): | ||
| 39 | super(UTF8Prober, self).__init__() | ||
| 40 | self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) | ||
| 41 | self._num_mb_chars = None | ||
| 42 | self.reset() | ||
| 43 | |||
| 44 | def reset(self): | ||
| 45 | super(UTF8Prober, self).reset() | ||
| 46 | self.coding_sm.reset() | ||
| 47 | self._num_mb_chars = 0 | ||
| 48 | |||
| 49 | @property | ||
| 50 | def charset_name(self): | ||
| 51 | return "utf-8" | ||
| 52 | |||
| 53 | @property | ||
| 54 | def language(self): | ||
| 55 | return "" | ||
| 56 | |||
| 57 | def feed(self, byte_str): | ||
| 58 | for c in byte_str: | ||
| 59 | coding_state = self.coding_sm.next_state(c) | ||
| 60 | if coding_state == MachineState.ERROR: | ||
| 61 | self._state = ProbingState.NOT_ME | ||
| 62 | break | ||
| 63 | elif coding_state == MachineState.ITS_ME: | ||
| 64 | self._state = ProbingState.FOUND_IT | ||
| 65 | break | ||
| 66 | elif coding_state == MachineState.START: | ||
| 67 | if self.coding_sm.get_current_charlen() >= 2: | ||
| 68 | self._num_mb_chars += 1 | ||
| 69 | |||
| 70 | if self.state == ProbingState.DETECTING: | ||
| 71 | if self.get_confidence() > self.SHORTCUT_THRESHOLD: | ||
| 72 | self._state = ProbingState.FOUND_IT | ||
| 73 | |||
| 74 | return self.state | ||
| 75 | |||
| 76 | def get_confidence(self): | ||
| 77 | unlike = 0.99 | ||
| 78 | if self._num_mb_chars < 6: | ||
| 79 | unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars | ||
| 80 | return 1.0 - unlike | ||
| 81 | else: | ||
| 82 | return unlike | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/version.py new file mode 100644 index 0000000..f24d042 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/version.py | |||
| @@ -0,0 +1,9 @@ | |||
| 1 | """ | ||
| 2 | This module exists only to simplify retrieving the version number of chardet | ||
| 3 | from within setup.py and from chardet subpackages. | ||
| 4 | |||
| 5 | :author: Dan Blanchard (dan.blanchard@gmail.com) | ||
| 6 | """ | ||
| 7 | |||
| 8 | __version__ = "3.0.4" | ||
| 9 | VERSION = __version__.split('.') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/__init__.py new file mode 100644 index 0000000..10c372d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/__init__.py | |||
| @@ -0,0 +1,7 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | from .initialise import init, deinit, reinit, colorama_text | ||
| 3 | from .ansi import Fore, Back, Style, Cursor | ||
| 4 | from .ansitowin32 import AnsiToWin32 | ||
| 5 | |||
| 6 | __version__ = '0.3.9' | ||
| 7 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansi.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..8530fd0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansi.py | |||
| @@ -0,0 +1,102 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | ''' | ||
| 3 | This module generates ANSI character codes to printing colors to terminals. | ||
| 4 | See: http://en.wikipedia.org/wiki/ANSI_escape_code | ||
| 5 | ''' | ||
| 6 | |||
| 7 | CSI = '\033[' | ||
| 8 | OSC = '\033]' | ||
| 9 | BEL = '\007' | ||
| 10 | |||
| 11 | |||
| 12 | def code_to_chars(code): | ||
| 13 | return CSI + str(code) + 'm' | ||
| 14 | |||
| 15 | def set_title(title): | ||
| 16 | return OSC + '2;' + title + BEL | ||
| 17 | |||
| 18 | def clear_screen(mode=2): | ||
| 19 | return CSI + str(mode) + 'J' | ||
| 20 | |||
| 21 | def clear_line(mode=2): | ||
| 22 | return CSI + str(mode) + 'K' | ||
| 23 | |||
| 24 | |||
| 25 | class AnsiCodes(object): | ||
| 26 | def __init__(self): | ||
| 27 | # the subclasses declare class attributes which are numbers. | ||
| 28 | # Upon instantiation we define instance attributes, which are the same | ||
| 29 | # as the class attributes but wrapped with the ANSI escape sequence | ||
| 30 | for name in dir(self): | ||
| 31 | if not name.startswith('_'): | ||
| 32 | value = getattr(self, name) | ||
| 33 | setattr(self, name, code_to_chars(value)) | ||
| 34 | |||
| 35 | |||
| 36 | class AnsiCursor(object): | ||
| 37 | def UP(self, n=1): | ||
| 38 | return CSI + str(n) + 'A' | ||
| 39 | def DOWN(self, n=1): | ||
| 40 | return CSI + str(n) + 'B' | ||
| 41 | def FORWARD(self, n=1): | ||
| 42 | return CSI + str(n) + 'C' | ||
| 43 | def BACK(self, n=1): | ||
| 44 | return CSI + str(n) + 'D' | ||
| 45 | def POS(self, x=1, y=1): | ||
| 46 | return CSI + str(y) + ';' + str(x) + 'H' | ||
| 47 | |||
| 48 | |||
| 49 | class AnsiFore(AnsiCodes): | ||
| 50 | BLACK = 30 | ||
| 51 | RED = 31 | ||
| 52 | GREEN = 32 | ||
| 53 | YELLOW = 33 | ||
| 54 | BLUE = 34 | ||
| 55 | MAGENTA = 35 | ||
| 56 | CYAN = 36 | ||
| 57 | WHITE = 37 | ||
| 58 | RESET = 39 | ||
| 59 | |||
| 60 | # These are fairly well supported, but not part of the standard. | ||
| 61 | LIGHTBLACK_EX = 90 | ||
| 62 | LIGHTRED_EX = 91 | ||
| 63 | LIGHTGREEN_EX = 92 | ||
| 64 | LIGHTYELLOW_EX = 93 | ||
| 65 | LIGHTBLUE_EX = 94 | ||
| 66 | LIGHTMAGENTA_EX = 95 | ||
| 67 | LIGHTCYAN_EX = 96 | ||
| 68 | LIGHTWHITE_EX = 97 | ||
| 69 | |||
| 70 | |||
| 71 | class AnsiBack(AnsiCodes): | ||
| 72 | BLACK = 40 | ||
| 73 | RED = 41 | ||
| 74 | GREEN = 42 | ||
| 75 | YELLOW = 43 | ||
| 76 | BLUE = 44 | ||
| 77 | MAGENTA = 45 | ||
| 78 | CYAN = 46 | ||
| 79 | WHITE = 47 | ||
| 80 | RESET = 49 | ||
| 81 | |||
| 82 | # These are fairly well supported, but not part of the standard. | ||
| 83 | LIGHTBLACK_EX = 100 | ||
| 84 | LIGHTRED_EX = 101 | ||
| 85 | LIGHTGREEN_EX = 102 | ||
| 86 | LIGHTYELLOW_EX = 103 | ||
| 87 | LIGHTBLUE_EX = 104 | ||
| 88 | LIGHTMAGENTA_EX = 105 | ||
| 89 | LIGHTCYAN_EX = 106 | ||
| 90 | LIGHTWHITE_EX = 107 | ||
| 91 | |||
| 92 | |||
| 93 | class AnsiStyle(AnsiCodes): | ||
| 94 | BRIGHT = 1 | ||
| 95 | DIM = 2 | ||
| 96 | NORMAL = 22 | ||
| 97 | RESET_ALL = 0 | ||
| 98 | |||
| 99 | Fore = AnsiFore() | ||
| 100 | Back = AnsiBack() | ||
| 101 | Style = AnsiStyle() | ||
| 102 | Cursor = AnsiCursor() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansitowin32.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..0cb9efc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/ansitowin32.py | |||
| @@ -0,0 +1,236 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | import re | ||
| 3 | import sys | ||
| 4 | import os | ||
| 5 | |||
| 6 | from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style | ||
| 7 | from .winterm import WinTerm, WinColor, WinStyle | ||
| 8 | from .win32 import windll, winapi_test | ||
| 9 | |||
| 10 | |||
| 11 | winterm = None | ||
| 12 | if windll is not None: | ||
| 13 | winterm = WinTerm() | ||
| 14 | |||
| 15 | |||
| 16 | def is_stream_closed(stream): | ||
| 17 | return not hasattr(stream, 'closed') or stream.closed | ||
| 18 | |||
| 19 | |||
| 20 | def is_a_tty(stream): | ||
| 21 | return hasattr(stream, 'isatty') and stream.isatty() | ||
| 22 | |||
| 23 | |||
| 24 | class StreamWrapper(object): | ||
| 25 | ''' | ||
| 26 | Wraps a stream (such as stdout), acting as a transparent proxy for all | ||
| 27 | attribute access apart from method 'write()', which is delegated to our | ||
| 28 | Converter instance. | ||
| 29 | ''' | ||
| 30 | def __init__(self, wrapped, converter): | ||
| 31 | # double-underscore everything to prevent clashes with names of | ||
| 32 | # attributes on the wrapped stream object. | ||
| 33 | self.__wrapped = wrapped | ||
| 34 | self.__convertor = converter | ||
| 35 | |||
| 36 | def __getattr__(self, name): | ||
| 37 | return getattr(self.__wrapped, name) | ||
| 38 | |||
| 39 | def write(self, text): | ||
| 40 | self.__convertor.write(text) | ||
| 41 | |||
| 42 | |||
| 43 | class AnsiToWin32(object): | ||
| 44 | ''' | ||
| 45 | Implements a 'write()' method which, on Windows, will strip ANSI character | ||
| 46 | sequences from the text, and if outputting to a tty, will convert them into | ||
| 47 | win32 function calls. | ||
| 48 | ''' | ||
| 49 | ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer | ||
| 50 | ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command | ||
| 51 | |||
| 52 | def __init__(self, wrapped, convert=None, strip=None, autoreset=False): | ||
| 53 | # The wrapped stream (normally sys.stdout or sys.stderr) | ||
| 54 | self.wrapped = wrapped | ||
| 55 | |||
| 56 | # should we reset colors to defaults after every .write() | ||
| 57 | self.autoreset = autoreset | ||
| 58 | |||
| 59 | # create the proxy wrapping our output stream | ||
| 60 | self.stream = StreamWrapper(wrapped, self) | ||
| 61 | |||
| 62 | on_windows = os.name == 'nt' | ||
| 63 | # We test if the WinAPI works, because even if we are on Windows | ||
| 64 | # we may be using a terminal that doesn't support the WinAPI | ||
| 65 | # (e.g. Cygwin Terminal). In this case it's up to the terminal | ||
| 66 | # to support the ANSI codes. | ||
| 67 | conversion_supported = on_windows and winapi_test() | ||
| 68 | |||
| 69 | # should we strip ANSI sequences from our output? | ||
| 70 | if strip is None: | ||
| 71 | strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) | ||
| 72 | self.strip = strip | ||
| 73 | |||
| 74 | # should we should convert ANSI sequences into win32 calls? | ||
| 75 | if convert is None: | ||
| 76 | convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) | ||
| 77 | self.convert = convert | ||
| 78 | |||
| 79 | # dict of ansi codes to win32 functions and parameters | ||
| 80 | self.win32_calls = self.get_win32_calls() | ||
| 81 | |||
| 82 | # are we wrapping stderr? | ||
| 83 | self.on_stderr = self.wrapped is sys.stderr | ||
| 84 | |||
| 85 | def should_wrap(self): | ||
| 86 | ''' | ||
| 87 | True if this class is actually needed. If false, then the output | ||
| 88 | stream will not be affected, nor will win32 calls be issued, so | ||
| 89 | wrapping stdout is not actually required. This will generally be | ||
| 90 | False on non-Windows platforms, unless optional functionality like | ||
| 91 | autoreset has been requested using kwargs to init() | ||
| 92 | ''' | ||
| 93 | return self.convert or self.strip or self.autoreset | ||
| 94 | |||
| 95 | def get_win32_calls(self): | ||
| 96 | if self.convert and winterm: | ||
| 97 | return { | ||
| 98 | AnsiStyle.RESET_ALL: (winterm.reset_all, ), | ||
| 99 | AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), | ||
| 100 | AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), | ||
| 101 | AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), | ||
| 102 | AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), | ||
| 103 | AnsiFore.RED: (winterm.fore, WinColor.RED), | ||
| 104 | AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), | ||
| 105 | AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), | ||
| 106 | AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), | ||
| 107 | AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), | ||
| 108 | AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), | ||
| 109 | AnsiFore.WHITE: (winterm.fore, WinColor.GREY), | ||
| 110 | AnsiFore.RESET: (winterm.fore, ), | ||
| 111 | AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), | ||
| 112 | AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), | ||
| 113 | AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), | ||
| 114 | AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), | ||
| 115 | AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), | ||
| 116 | AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), | ||
| 117 | AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), | ||
| 118 | AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), | ||
| 119 | AnsiBack.BLACK: (winterm.back, WinColor.BLACK), | ||
| 120 | AnsiBack.RED: (winterm.back, WinColor.RED), | ||
| 121 | AnsiBack.GREEN: (winterm.back, WinColor.GREEN), | ||
| 122 | AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), | ||
| 123 | AnsiBack.BLUE: (winterm.back, WinColor.BLUE), | ||
| 124 | AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), | ||
| 125 | AnsiBack.CYAN: (winterm.back, WinColor.CYAN), | ||
| 126 | AnsiBack.WHITE: (winterm.back, WinColor.GREY), | ||
| 127 | AnsiBack.RESET: (winterm.back, ), | ||
| 128 | AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), | ||
| 129 | AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), | ||
| 130 | AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), | ||
| 131 | AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), | ||
| 132 | AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), | ||
| 133 | AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), | ||
| 134 | AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), | ||
| 135 | AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), | ||
| 136 | } | ||
| 137 | return dict() | ||
| 138 | |||
| 139 | def write(self, text): | ||
| 140 | if self.strip or self.convert: | ||
| 141 | self.write_and_convert(text) | ||
| 142 | else: | ||
| 143 | self.wrapped.write(text) | ||
| 144 | self.wrapped.flush() | ||
| 145 | if self.autoreset: | ||
| 146 | self.reset_all() | ||
| 147 | |||
| 148 | |||
| 149 | def reset_all(self): | ||
| 150 | if self.convert: | ||
| 151 | self.call_win32('m', (0,)) | ||
| 152 | elif not self.strip and not is_stream_closed(self.wrapped): | ||
| 153 | self.wrapped.write(Style.RESET_ALL) | ||
| 154 | |||
| 155 | |||
| 156 | def write_and_convert(self, text): | ||
| 157 | ''' | ||
| 158 | Write the given text to our wrapped stream, stripping any ANSI | ||
| 159 | sequences from the text, and optionally converting them into win32 | ||
| 160 | calls. | ||
| 161 | ''' | ||
| 162 | cursor = 0 | ||
| 163 | text = self.convert_osc(text) | ||
| 164 | for match in self.ANSI_CSI_RE.finditer(text): | ||
| 165 | start, end = match.span() | ||
| 166 | self.write_plain_text(text, cursor, start) | ||
| 167 | self.convert_ansi(*match.groups()) | ||
| 168 | cursor = end | ||
| 169 | self.write_plain_text(text, cursor, len(text)) | ||
| 170 | |||
| 171 | |||
| 172 | def write_plain_text(self, text, start, end): | ||
| 173 | if start < end: | ||
| 174 | self.wrapped.write(text[start:end]) | ||
| 175 | self.wrapped.flush() | ||
| 176 | |||
| 177 | |||
| 178 | def convert_ansi(self, paramstring, command): | ||
| 179 | if self.convert: | ||
| 180 | params = self.extract_params(command, paramstring) | ||
| 181 | self.call_win32(command, params) | ||
| 182 | |||
| 183 | |||
| 184 | def extract_params(self, command, paramstring): | ||
| 185 | if command in 'Hf': | ||
| 186 | params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) | ||
| 187 | while len(params) < 2: | ||
| 188 | # defaults: | ||
| 189 | params = params + (1,) | ||
| 190 | else: | ||
| 191 | params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) | ||
| 192 | if len(params) == 0: | ||
| 193 | # defaults: | ||
| 194 | if command in 'JKm': | ||
| 195 | params = (0,) | ||
| 196 | elif command in 'ABCD': | ||
| 197 | params = (1,) | ||
| 198 | |||
| 199 | return params | ||
| 200 | |||
| 201 | |||
| 202 | def call_win32(self, command, params): | ||
| 203 | if command == 'm': | ||
| 204 | for param in params: | ||
| 205 | if param in self.win32_calls: | ||
| 206 | func_args = self.win32_calls[param] | ||
| 207 | func = func_args[0] | ||
| 208 | args = func_args[1:] | ||
| 209 | kwargs = dict(on_stderr=self.on_stderr) | ||
| 210 | func(*args, **kwargs) | ||
| 211 | elif command in 'J': | ||
| 212 | winterm.erase_screen(params[0], on_stderr=self.on_stderr) | ||
| 213 | elif command in 'K': | ||
| 214 | winterm.erase_line(params[0], on_stderr=self.on_stderr) | ||
| 215 | elif command in 'Hf': # cursor position - absolute | ||
| 216 | winterm.set_cursor_position(params, on_stderr=self.on_stderr) | ||
| 217 | elif command in 'ABCD': # cursor position - relative | ||
| 218 | n = params[0] | ||
| 219 | # A - up, B - down, C - forward, D - back | ||
| 220 | x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] | ||
| 221 | winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) | ||
| 222 | |||
| 223 | |||
| 224 | def convert_osc(self, text): | ||
| 225 | for match in self.ANSI_OSC_RE.finditer(text): | ||
| 226 | start, end = match.span() | ||
| 227 | text = text[:start] + text[end:] | ||
| 228 | paramstring, command = match.groups() | ||
| 229 | if command in '\x07': # \x07 = BEL | ||
| 230 | params = paramstring.split(";") | ||
| 231 | # 0 - change title and icon (we will only change title) | ||
| 232 | # 1 - change icon (we don't support this) | ||
| 233 | # 2 - change title | ||
| 234 | if params[0] in '02': | ||
| 235 | winterm.set_title(params[1]) | ||
| 236 | return text | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/initialise.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..7f03156 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/initialise.py | |||
| @@ -0,0 +1,82 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | import atexit | ||
| 3 | import contextlib | ||
| 4 | import sys | ||
| 5 | |||
| 6 | from .ansitowin32 import AnsiToWin32 | ||
| 7 | |||
| 8 | |||
| 9 | orig_stdout = None | ||
| 10 | orig_stderr = None | ||
| 11 | |||
| 12 | wrapped_stdout = None | ||
| 13 | wrapped_stderr = None | ||
| 14 | |||
| 15 | atexit_done = False | ||
| 16 | |||
| 17 | |||
| 18 | def reset_all(): | ||
| 19 | if AnsiToWin32 is not None: # Issue #74: objects might become None at exit | ||
| 20 | AnsiToWin32(orig_stdout).reset_all() | ||
| 21 | |||
| 22 | |||
| 23 | def init(autoreset=False, convert=None, strip=None, wrap=True): | ||
| 24 | |||
| 25 | if not wrap and any([autoreset, convert, strip]): | ||
| 26 | raise ValueError('wrap=False conflicts with any other arg=True') | ||
| 27 | |||
| 28 | global wrapped_stdout, wrapped_stderr | ||
| 29 | global orig_stdout, orig_stderr | ||
| 30 | |||
| 31 | orig_stdout = sys.stdout | ||
| 32 | orig_stderr = sys.stderr | ||
| 33 | |||
| 34 | if sys.stdout is None: | ||
| 35 | wrapped_stdout = None | ||
| 36 | else: | ||
| 37 | sys.stdout = wrapped_stdout = \ | ||
| 38 | wrap_stream(orig_stdout, convert, strip, autoreset, wrap) | ||
| 39 | if sys.stderr is None: | ||
| 40 | wrapped_stderr = None | ||
| 41 | else: | ||
| 42 | sys.stderr = wrapped_stderr = \ | ||
| 43 | wrap_stream(orig_stderr, convert, strip, autoreset, wrap) | ||
| 44 | |||
| 45 | global atexit_done | ||
| 46 | if not atexit_done: | ||
| 47 | atexit.register(reset_all) | ||
| 48 | atexit_done = True | ||
| 49 | |||
| 50 | |||
| 51 | def deinit(): | ||
| 52 | if orig_stdout is not None: | ||
| 53 | sys.stdout = orig_stdout | ||
| 54 | if orig_stderr is not None: | ||
| 55 | sys.stderr = orig_stderr | ||
| 56 | |||
| 57 | |||
| 58 | @contextlib.contextmanager | ||
| 59 | def colorama_text(*args, **kwargs): | ||
| 60 | init(*args, **kwargs) | ||
| 61 | try: | ||
| 62 | yield | ||
| 63 | finally: | ||
| 64 | deinit() | ||
| 65 | |||
| 66 | |||
| 67 | def reinit(): | ||
| 68 | if wrapped_stdout is not None: | ||
| 69 | sys.stdout = wrapped_stdout | ||
| 70 | if wrapped_stderr is not None: | ||
| 71 | sys.stderr = wrapped_stderr | ||
| 72 | |||
| 73 | |||
| 74 | def wrap_stream(stream, convert, strip, autoreset, wrap): | ||
| 75 | if wrap: | ||
| 76 | wrapper = AnsiToWin32(stream, | ||
| 77 | convert=convert, strip=strip, autoreset=autoreset) | ||
| 78 | if wrapper.should_wrap(): | ||
| 79 | stream = wrapper.stream | ||
| 80 | return stream | ||
| 81 | |||
| 82 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/win32.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..1485e69 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/win32.py | |||
| @@ -0,0 +1,156 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | |||
| 3 | # from winbase.h | ||
| 4 | STDOUT = -11 | ||
| 5 | STDERR = -12 | ||
| 6 | |||
| 7 | try: | ||
| 8 | import ctypes | ||
| 9 | from ctypes import LibraryLoader | ||
| 10 | windll = LibraryLoader(ctypes.WinDLL) | ||
| 11 | from ctypes import wintypes | ||
| 12 | except (AttributeError, ImportError): | ||
| 13 | windll = None | ||
| 14 | SetConsoleTextAttribute = lambda *_: None | ||
| 15 | winapi_test = lambda *_: None | ||
| 16 | else: | ||
| 17 | from ctypes import byref, Structure, c_char, POINTER | ||
| 18 | |||
| 19 | COORD = wintypes._COORD | ||
| 20 | |||
| 21 | class CONSOLE_SCREEN_BUFFER_INFO(Structure): | ||
| 22 | """struct in wincon.h.""" | ||
| 23 | _fields_ = [ | ||
| 24 | ("dwSize", COORD), | ||
| 25 | ("dwCursorPosition", COORD), | ||
| 26 | ("wAttributes", wintypes.WORD), | ||
| 27 | ("srWindow", wintypes.SMALL_RECT), | ||
| 28 | ("dwMaximumWindowSize", COORD), | ||
| 29 | ] | ||
| 30 | def __str__(self): | ||
| 31 | return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( | ||
| 32 | self.dwSize.Y, self.dwSize.X | ||
| 33 | , self.dwCursorPosition.Y, self.dwCursorPosition.X | ||
| 34 | , self.wAttributes | ||
| 35 | , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right | ||
| 36 | , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X | ||
| 37 | ) | ||
| 38 | |||
| 39 | _GetStdHandle = windll.kernel32.GetStdHandle | ||
| 40 | _GetStdHandle.argtypes = [ | ||
| 41 | wintypes.DWORD, | ||
| 42 | ] | ||
| 43 | _GetStdHandle.restype = wintypes.HANDLE | ||
| 44 | |||
| 45 | _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo | ||
| 46 | _GetConsoleScreenBufferInfo.argtypes = [ | ||
| 47 | wintypes.HANDLE, | ||
| 48 | POINTER(CONSOLE_SCREEN_BUFFER_INFO), | ||
| 49 | ] | ||
| 50 | _GetConsoleScreenBufferInfo.restype = wintypes.BOOL | ||
| 51 | |||
| 52 | _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute | ||
| 53 | _SetConsoleTextAttribute.argtypes = [ | ||
| 54 | wintypes.HANDLE, | ||
| 55 | wintypes.WORD, | ||
| 56 | ] | ||
| 57 | _SetConsoleTextAttribute.restype = wintypes.BOOL | ||
| 58 | |||
| 59 | _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition | ||
| 60 | _SetConsoleCursorPosition.argtypes = [ | ||
| 61 | wintypes.HANDLE, | ||
| 62 | COORD, | ||
| 63 | ] | ||
| 64 | _SetConsoleCursorPosition.restype = wintypes.BOOL | ||
| 65 | |||
| 66 | _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA | ||
| 67 | _FillConsoleOutputCharacterA.argtypes = [ | ||
| 68 | wintypes.HANDLE, | ||
| 69 | c_char, | ||
| 70 | wintypes.DWORD, | ||
| 71 | COORD, | ||
| 72 | POINTER(wintypes.DWORD), | ||
| 73 | ] | ||
| 74 | _FillConsoleOutputCharacterA.restype = wintypes.BOOL | ||
| 75 | |||
| 76 | _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute | ||
| 77 | _FillConsoleOutputAttribute.argtypes = [ | ||
| 78 | wintypes.HANDLE, | ||
| 79 | wintypes.WORD, | ||
| 80 | wintypes.DWORD, | ||
| 81 | COORD, | ||
| 82 | POINTER(wintypes.DWORD), | ||
| 83 | ] | ||
| 84 | _FillConsoleOutputAttribute.restype = wintypes.BOOL | ||
| 85 | |||
| 86 | _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW | ||
| 87 | _SetConsoleTitleW.argtypes = [ | ||
| 88 | wintypes.LPCWSTR | ||
| 89 | ] | ||
| 90 | _SetConsoleTitleW.restype = wintypes.BOOL | ||
| 91 | |||
| 92 | handles = { | ||
| 93 | STDOUT: _GetStdHandle(STDOUT), | ||
| 94 | STDERR: _GetStdHandle(STDERR), | ||
| 95 | } | ||
| 96 | |||
| 97 | def _winapi_test(handle): | ||
| 98 | csbi = CONSOLE_SCREEN_BUFFER_INFO() | ||
| 99 | success = _GetConsoleScreenBufferInfo( | ||
| 100 | handle, byref(csbi)) | ||
| 101 | return bool(success) | ||
| 102 | |||
| 103 | def winapi_test(): | ||
| 104 | return any(_winapi_test(h) for h in handles.values()) | ||
| 105 | |||
| 106 | def GetConsoleScreenBufferInfo(stream_id=STDOUT): | ||
| 107 | handle = handles[stream_id] | ||
| 108 | csbi = CONSOLE_SCREEN_BUFFER_INFO() | ||
| 109 | success = _GetConsoleScreenBufferInfo( | ||
| 110 | handle, byref(csbi)) | ||
| 111 | return csbi | ||
| 112 | |||
| 113 | def SetConsoleTextAttribute(stream_id, attrs): | ||
| 114 | handle = handles[stream_id] | ||
| 115 | return _SetConsoleTextAttribute(handle, attrs) | ||
| 116 | |||
| 117 | def SetConsoleCursorPosition(stream_id, position, adjust=True): | ||
| 118 | position = COORD(*position) | ||
| 119 | # If the position is out of range, do nothing. | ||
| 120 | if position.Y <= 0 or position.X <= 0: | ||
| 121 | return | ||
| 122 | # Adjust for Windows' SetConsoleCursorPosition: | ||
| 123 | # 1. being 0-based, while ANSI is 1-based. | ||
| 124 | # 2. expecting (x,y), while ANSI uses (y,x). | ||
| 125 | adjusted_position = COORD(position.Y - 1, position.X - 1) | ||
| 126 | if adjust: | ||
| 127 | # Adjust for viewport's scroll position | ||
| 128 | sr = GetConsoleScreenBufferInfo(STDOUT).srWindow | ||
| 129 | adjusted_position.Y += sr.Top | ||
| 130 | adjusted_position.X += sr.Left | ||
| 131 | # Resume normal processing | ||
| 132 | handle = handles[stream_id] | ||
| 133 | return _SetConsoleCursorPosition(handle, adjusted_position) | ||
| 134 | |||
| 135 | def FillConsoleOutputCharacter(stream_id, char, length, start): | ||
| 136 | handle = handles[stream_id] | ||
| 137 | char = c_char(char.encode()) | ||
| 138 | length = wintypes.DWORD(length) | ||
| 139 | num_written = wintypes.DWORD(0) | ||
| 140 | # Note that this is hard-coded for ANSI (vs wide) bytes. | ||
| 141 | success = _FillConsoleOutputCharacterA( | ||
| 142 | handle, char, length, start, byref(num_written)) | ||
| 143 | return num_written.value | ||
| 144 | |||
| 145 | def FillConsoleOutputAttribute(stream_id, attr, length, start): | ||
| 146 | ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' | ||
| 147 | handle = handles[stream_id] | ||
| 148 | attribute = wintypes.WORD(attr) | ||
| 149 | length = wintypes.DWORD(length) | ||
| 150 | num_written = wintypes.DWORD(0) | ||
| 151 | # Note that this is hard-coded for ANSI (vs wide) bytes. | ||
| 152 | return _FillConsoleOutputAttribute( | ||
| 153 | handle, attribute, length, start, byref(num_written)) | ||
| 154 | |||
| 155 | def SetConsoleTitle(title): | ||
| 156 | return _SetConsoleTitleW(title) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/winterm.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..385862e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/colorama/winterm.py | |||
| @@ -0,0 +1,162 @@ | |||
| 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. | ||
| 2 | from . import win32 | ||
| 3 | |||
| 4 | |||
| 5 | # from wincon.h | ||
| 6 | class WinColor(object): | ||
| 7 | BLACK = 0 | ||
| 8 | BLUE = 1 | ||
| 9 | GREEN = 2 | ||
| 10 | CYAN = 3 | ||
| 11 | RED = 4 | ||
| 12 | MAGENTA = 5 | ||
| 13 | YELLOW = 6 | ||
| 14 | GREY = 7 | ||
| 15 | |||
| 16 | # from wincon.h | ||
| 17 | class WinStyle(object): | ||
| 18 | NORMAL = 0x00 # dim text, dim background | ||
| 19 | BRIGHT = 0x08 # bright text, dim background | ||
| 20 | BRIGHT_BACKGROUND = 0x80 # dim text, bright background | ||
| 21 | |||
| 22 | class WinTerm(object): | ||
| 23 | |||
| 24 | def __init__(self): | ||
| 25 | self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes | ||
| 26 | self.set_attrs(self._default) | ||
| 27 | self._default_fore = self._fore | ||
| 28 | self._default_back = self._back | ||
| 29 | self._default_style = self._style | ||
| 30 | # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. | ||
| 31 | # So that LIGHT_EX colors and BRIGHT style do not clobber each other, | ||
| 32 | # we track them separately, since LIGHT_EX is overwritten by Fore/Back | ||
| 33 | # and BRIGHT is overwritten by Style codes. | ||
| 34 | self._light = 0 | ||
| 35 | |||
| 36 | def get_attrs(self): | ||
| 37 | return self._fore + self._back * 16 + (self._style | self._light) | ||
| 38 | |||
| 39 | def set_attrs(self, value): | ||
| 40 | self._fore = value & 7 | ||
| 41 | self._back = (value >> 4) & 7 | ||
| 42 | self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) | ||
| 43 | |||
| 44 | def reset_all(self, on_stderr=None): | ||
| 45 | self.set_attrs(self._default) | ||
| 46 | self.set_console(attrs=self._default) | ||
| 47 | |||
| 48 | def fore(self, fore=None, light=False, on_stderr=False): | ||
| 49 | if fore is None: | ||
| 50 | fore = self._default_fore | ||
| 51 | self._fore = fore | ||
| 52 | # Emulate LIGHT_EX with BRIGHT Style | ||
| 53 | if light: | ||
| 54 | self._light |= WinStyle.BRIGHT | ||
| 55 | else: | ||
| 56 | self._light &= ~WinStyle.BRIGHT | ||
| 57 | self.set_console(on_stderr=on_stderr) | ||
| 58 | |||
| 59 | def back(self, back=None, light=False, on_stderr=False): | ||
| 60 | if back is None: | ||
| 61 | back = self._default_back | ||
| 62 | self._back = back | ||
| 63 | # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style | ||
| 64 | if light: | ||
| 65 | self._light |= WinStyle.BRIGHT_BACKGROUND | ||
| 66 | else: | ||
| 67 | self._light &= ~WinStyle.BRIGHT_BACKGROUND | ||
| 68 | self.set_console(on_stderr=on_stderr) | ||
| 69 | |||
| 70 | def style(self, style=None, on_stderr=False): | ||
| 71 | if style is None: | ||
| 72 | style = self._default_style | ||
| 73 | self._style = style | ||
| 74 | self.set_console(on_stderr=on_stderr) | ||
| 75 | |||
| 76 | def set_console(self, attrs=None, on_stderr=False): | ||
| 77 | if attrs is None: | ||
| 78 | attrs = self.get_attrs() | ||
| 79 | handle = win32.STDOUT | ||
| 80 | if on_stderr: | ||
| 81 | handle = win32.STDERR | ||
| 82 | win32.SetConsoleTextAttribute(handle, attrs) | ||
| 83 | |||
| 84 | def get_position(self, handle): | ||
| 85 | position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition | ||
| 86 | # Because Windows coordinates are 0-based, | ||
| 87 | # and win32.SetConsoleCursorPosition expects 1-based. | ||
| 88 | position.X += 1 | ||
| 89 | position.Y += 1 | ||
| 90 | return position | ||
| 91 | |||
| 92 | def set_cursor_position(self, position=None, on_stderr=False): | ||
| 93 | if position is None: | ||
| 94 | # I'm not currently tracking the position, so there is no default. | ||
| 95 | # position = self.get_position() | ||
| 96 | return | ||
| 97 | handle = win32.STDOUT | ||
| 98 | if on_stderr: | ||
| 99 | handle = win32.STDERR | ||
| 100 | win32.SetConsoleCursorPosition(handle, position) | ||
| 101 | |||
| 102 | def cursor_adjust(self, x, y, on_stderr=False): | ||
| 103 | handle = win32.STDOUT | ||
| 104 | if on_stderr: | ||
| 105 | handle = win32.STDERR | ||
| 106 | position = self.get_position(handle) | ||
| 107 | adjusted_position = (position.Y + y, position.X + x) | ||
| 108 | win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) | ||
| 109 | |||
| 110 | def erase_screen(self, mode=0, on_stderr=False): | ||
| 111 | # 0 should clear from the cursor to the end of the screen. | ||
| 112 | # 1 should clear from the cursor to the beginning of the screen. | ||
| 113 | # 2 should clear the entire screen, and move cursor to (1,1) | ||
| 114 | handle = win32.STDOUT | ||
| 115 | if on_stderr: | ||
| 116 | handle = win32.STDERR | ||
| 117 | csbi = win32.GetConsoleScreenBufferInfo(handle) | ||
| 118 | # get the number of character cells in the current buffer | ||
| 119 | cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y | ||
| 120 | # get number of character cells before current cursor position | ||
| 121 | cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X | ||
| 122 | if mode == 0: | ||
| 123 | from_coord = csbi.dwCursorPosition | ||
| 124 | cells_to_erase = cells_in_screen - cells_before_cursor | ||
| 125 | if mode == 1: | ||
| 126 | from_coord = win32.COORD(0, 0) | ||
| 127 | cells_to_erase = cells_before_cursor | ||
| 128 | elif mode == 2: | ||
| 129 | from_coord = win32.COORD(0, 0) | ||
| 130 | cells_to_erase = cells_in_screen | ||
| 131 | # fill the entire screen with blanks | ||
| 132 | win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) | ||
| 133 | # now set the buffer's attributes accordingly | ||
| 134 | win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) | ||
| 135 | if mode == 2: | ||
| 136 | # put the cursor where needed | ||
| 137 | win32.SetConsoleCursorPosition(handle, (1, 1)) | ||
| 138 | |||
| 139 | def erase_line(self, mode=0, on_stderr=False): | ||
| 140 | # 0 should clear from the cursor to the end of the line. | ||
| 141 | # 1 should clear from the cursor to the beginning of the line. | ||
| 142 | # 2 should clear the entire line. | ||
| 143 | handle = win32.STDOUT | ||
| 144 | if on_stderr: | ||
| 145 | handle = win32.STDERR | ||
| 146 | csbi = win32.GetConsoleScreenBufferInfo(handle) | ||
| 147 | if mode == 0: | ||
| 148 | from_coord = csbi.dwCursorPosition | ||
| 149 | cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X | ||
| 150 | if mode == 1: | ||
| 151 | from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) | ||
| 152 | cells_to_erase = csbi.dwCursorPosition.X | ||
| 153 | elif mode == 2: | ||
| 154 | from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) | ||
| 155 | cells_to_erase = csbi.dwSize.X | ||
| 156 | # fill the entire screen with blanks | ||
| 157 | win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) | ||
| 158 | # now set the buffer's attributes accordingly | ||
| 159 | win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) | ||
| 160 | |||
| 161 | def set_title(self, title): | ||
| 162 | win32.SetConsoleTitle(title) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..9430718 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py | |||
| @@ -0,0 +1,23 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2017 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | import logging | ||
| 8 | |||
| 9 | __version__ = '0.2.7' | ||
| 10 | |||
| 11 | class DistlibException(Exception): | ||
| 12 | pass | ||
| 13 | |||
| 14 | try: | ||
| 15 | from logging import NullHandler | ||
| 16 | except ImportError: # pragma: no cover | ||
| 17 | class NullHandler(logging.Handler): | ||
| 18 | def handle(self, record): pass | ||
| 19 | def emit(self, record): pass | ||
| 20 | def createLock(self): self.lock = None | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | logger.addHandler(NullHandler()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000..e6143f1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py | |||
| @@ -0,0 +1,6 @@ | |||
| 1 | """Modules copied from Python 3 standard libraries, for internal use only. | ||
| 2 | |||
| 3 | Individual classes and functions are found in d2._backport.misc. Intended | ||
| 4 | usage is to always import things missing from 3.1 from that module: the | ||
| 5 | built-in/stdlib objects will be used if found. | ||
| 6 | """ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..6eb7b86 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py | |||
| @@ -0,0 +1,41 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """Backports for individual classes and functions.""" | ||
| 7 | |||
| 8 | import os | ||
| 9 | import sys | ||
| 10 | |||
| 11 | __all__ = ['cache_from_source', 'callable', 'fsencode'] | ||
| 12 | |||
| 13 | |||
| 14 | try: | ||
| 15 | from imp import cache_from_source | ||
| 16 | except ImportError: | ||
| 17 | def cache_from_source(py_file, debug=__debug__): | ||
| 18 | ext = debug and 'c' or 'o' | ||
| 19 | return py_file + ext | ||
| 20 | |||
| 21 | |||
| 22 | try: | ||
| 23 | callable = callable | ||
| 24 | except NameError: | ||
| 25 | from collections import Callable | ||
| 26 | |||
| 27 | def callable(obj): | ||
| 28 | return isinstance(obj, Callable) | ||
| 29 | |||
| 30 | |||
| 31 | try: | ||
| 32 | fsencode = os.fsencode | ||
| 33 | except AttributeError: | ||
| 34 | def fsencode(filename): | ||
| 35 | if isinstance(filename, bytes): | ||
| 36 | return filename | ||
| 37 | elif isinstance(filename, str): | ||
| 38 | return filename.encode(sys.getfilesystemencoding()) | ||
| 39 | else: | ||
| 40 | raise TypeError("expect bytes or str, not %s" % | ||
| 41 | type(filename).__name__) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..becbfd7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py | |||
| @@ -0,0 +1,761 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """Utility functions for copying and archiving files and directory trees. | ||
| 7 | |||
| 8 | XXX The functions here don't copy the resource fork or other metadata on Mac. | ||
| 9 | |||
| 10 | """ | ||
| 11 | |||
| 12 | import os | ||
| 13 | import sys | ||
| 14 | import stat | ||
| 15 | from os.path import abspath | ||
| 16 | import fnmatch | ||
| 17 | import collections | ||
| 18 | import errno | ||
| 19 | from . import tarfile | ||
| 20 | |||
| 21 | try: | ||
| 22 | import bz2 | ||
| 23 | _BZ2_SUPPORTED = True | ||
| 24 | except ImportError: | ||
| 25 | _BZ2_SUPPORTED = False | ||
| 26 | |||
| 27 | try: | ||
| 28 | from pwd import getpwnam | ||
| 29 | except ImportError: | ||
| 30 | getpwnam = None | ||
| 31 | |||
| 32 | try: | ||
| 33 | from grp import getgrnam | ||
| 34 | except ImportError: | ||
| 35 | getgrnam = None | ||
| 36 | |||
| 37 | __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", | ||
| 38 | "copytree", "move", "rmtree", "Error", "SpecialFileError", | ||
| 39 | "ExecError", "make_archive", "get_archive_formats", | ||
| 40 | "register_archive_format", "unregister_archive_format", | ||
| 41 | "get_unpack_formats", "register_unpack_format", | ||
| 42 | "unregister_unpack_format", "unpack_archive", "ignore_patterns"] | ||
| 43 | |||
| 44 | class Error(EnvironmentError): | ||
| 45 | pass | ||
| 46 | |||
| 47 | class SpecialFileError(EnvironmentError): | ||
| 48 | """Raised when trying to do a kind of operation (e.g. copying) which is | ||
| 49 | not supported on a special file (e.g. a named pipe)""" | ||
| 50 | |||
| 51 | class ExecError(EnvironmentError): | ||
| 52 | """Raised when a command could not be executed""" | ||
| 53 | |||
| 54 | class ReadError(EnvironmentError): | ||
| 55 | """Raised when an archive cannot be read""" | ||
| 56 | |||
| 57 | class RegistryError(Exception): | ||
| 58 | """Raised when a registry operation with the archiving | ||
| 59 | and unpacking registries fails""" | ||
| 60 | |||
| 61 | |||
| 62 | try: | ||
| 63 | WindowsError | ||
| 64 | except NameError: | ||
| 65 | WindowsError = None | ||
| 66 | |||
| 67 | def copyfileobj(fsrc, fdst, length=16*1024): | ||
| 68 | """copy data from file-like object fsrc to file-like object fdst""" | ||
| 69 | while 1: | ||
| 70 | buf = fsrc.read(length) | ||
| 71 | if not buf: | ||
| 72 | break | ||
| 73 | fdst.write(buf) | ||
| 74 | |||
| 75 | def _samefile(src, dst): | ||
| 76 | # Macintosh, Unix. | ||
| 77 | if hasattr(os.path, 'samefile'): | ||
| 78 | try: | ||
| 79 | return os.path.samefile(src, dst) | ||
| 80 | except OSError: | ||
| 81 | return False | ||
| 82 | |||
| 83 | # All other platforms: check for same pathname. | ||
| 84 | return (os.path.normcase(os.path.abspath(src)) == | ||
| 85 | os.path.normcase(os.path.abspath(dst))) | ||
| 86 | |||
| 87 | def copyfile(src, dst): | ||
| 88 | """Copy data from src to dst""" | ||
| 89 | if _samefile(src, dst): | ||
| 90 | raise Error("`%s` and `%s` are the same file" % (src, dst)) | ||
| 91 | |||
| 92 | for fn in [src, dst]: | ||
| 93 | try: | ||
| 94 | st = os.stat(fn) | ||
| 95 | except OSError: | ||
| 96 | # File most likely does not exist | ||
| 97 | pass | ||
| 98 | else: | ||
| 99 | # XXX What about other special files? (sockets, devices...) | ||
| 100 | if stat.S_ISFIFO(st.st_mode): | ||
| 101 | raise SpecialFileError("`%s` is a named pipe" % fn) | ||
| 102 | |||
| 103 | with open(src, 'rb') as fsrc: | ||
| 104 | with open(dst, 'wb') as fdst: | ||
| 105 | copyfileobj(fsrc, fdst) | ||
| 106 | |||
| 107 | def copymode(src, dst): | ||
| 108 | """Copy mode bits from src to dst""" | ||
| 109 | if hasattr(os, 'chmod'): | ||
| 110 | st = os.stat(src) | ||
| 111 | mode = stat.S_IMODE(st.st_mode) | ||
| 112 | os.chmod(dst, mode) | ||
| 113 | |||
| 114 | def copystat(src, dst): | ||
| 115 | """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" | ||
| 116 | st = os.stat(src) | ||
| 117 | mode = stat.S_IMODE(st.st_mode) | ||
| 118 | if hasattr(os, 'utime'): | ||
| 119 | os.utime(dst, (st.st_atime, st.st_mtime)) | ||
| 120 | if hasattr(os, 'chmod'): | ||
| 121 | os.chmod(dst, mode) | ||
| 122 | if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): | ||
| 123 | try: | ||
| 124 | os.chflags(dst, st.st_flags) | ||
| 125 | except OSError as why: | ||
| 126 | if (not hasattr(errno, 'EOPNOTSUPP') or | ||
| 127 | why.errno != errno.EOPNOTSUPP): | ||
| 128 | raise | ||
| 129 | |||
| 130 | def copy(src, dst): | ||
| 131 | """Copy data and mode bits ("cp src dst"). | ||
| 132 | |||
| 133 | The destination may be a directory. | ||
| 134 | |||
| 135 | """ | ||
| 136 | if os.path.isdir(dst): | ||
| 137 | dst = os.path.join(dst, os.path.basename(src)) | ||
| 138 | copyfile(src, dst) | ||
| 139 | copymode(src, dst) | ||
| 140 | |||
| 141 | def copy2(src, dst): | ||
| 142 | """Copy data and all stat info ("cp -p src dst"). | ||
| 143 | |||
| 144 | The destination may be a directory. | ||
| 145 | |||
| 146 | """ | ||
| 147 | if os.path.isdir(dst): | ||
| 148 | dst = os.path.join(dst, os.path.basename(src)) | ||
| 149 | copyfile(src, dst) | ||
| 150 | copystat(src, dst) | ||
| 151 | |||
| 152 | def ignore_patterns(*patterns): | ||
| 153 | """Function that can be used as copytree() ignore parameter. | ||
| 154 | |||
| 155 | Patterns is a sequence of glob-style patterns | ||
| 156 | that are used to exclude files""" | ||
| 157 | def _ignore_patterns(path, names): | ||
| 158 | ignored_names = [] | ||
| 159 | for pattern in patterns: | ||
| 160 | ignored_names.extend(fnmatch.filter(names, pattern)) | ||
| 161 | return set(ignored_names) | ||
| 162 | return _ignore_patterns | ||
| 163 | |||
| 164 | def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, | ||
| 165 | ignore_dangling_symlinks=False): | ||
| 166 | """Recursively copy a directory tree. | ||
| 167 | |||
| 168 | The destination directory must not already exist. | ||
| 169 | If exception(s) occur, an Error is raised with a list of reasons. | ||
| 170 | |||
| 171 | If the optional symlinks flag is true, symbolic links in the | ||
| 172 | source tree result in symbolic links in the destination tree; if | ||
| 173 | it is false, the contents of the files pointed to by symbolic | ||
| 174 | links are copied. If the file pointed by the symlink doesn't | ||
| 175 | exist, an exception will be added in the list of errors raised in | ||
| 176 | an Error exception at the end of the copy process. | ||
| 177 | |||
| 178 | You can set the optional ignore_dangling_symlinks flag to true if you | ||
| 179 | want to silence this exception. Notice that this has no effect on | ||
| 180 | platforms that don't support os.symlink. | ||
| 181 | |||
| 182 | The optional ignore argument is a callable. If given, it | ||
| 183 | is called with the `src` parameter, which is the directory | ||
| 184 | being visited by copytree(), and `names` which is the list of | ||
| 185 | `src` contents, as returned by os.listdir(): | ||
| 186 | |||
| 187 | callable(src, names) -> ignored_names | ||
| 188 | |||
| 189 | Since copytree() is called recursively, the callable will be | ||
| 190 | called once for each directory that is copied. It returns a | ||
| 191 | list of names relative to the `src` directory that should | ||
| 192 | not be copied. | ||
| 193 | |||
| 194 | The optional copy_function argument is a callable that will be used | ||
| 195 | to copy each file. It will be called with the source path and the | ||
| 196 | destination path as arguments. By default, copy2() is used, but any | ||
| 197 | function that supports the same signature (like copy()) can be used. | ||
| 198 | |||
| 199 | """ | ||
| 200 | names = os.listdir(src) | ||
| 201 | if ignore is not None: | ||
| 202 | ignored_names = ignore(src, names) | ||
| 203 | else: | ||
| 204 | ignored_names = set() | ||
| 205 | |||
| 206 | os.makedirs(dst) | ||
| 207 | errors = [] | ||
| 208 | for name in names: | ||
| 209 | if name in ignored_names: | ||
| 210 | continue | ||
| 211 | srcname = os.path.join(src, name) | ||
| 212 | dstname = os.path.join(dst, name) | ||
| 213 | try: | ||
| 214 | if os.path.islink(srcname): | ||
| 215 | linkto = os.readlink(srcname) | ||
| 216 | if symlinks: | ||
| 217 | os.symlink(linkto, dstname) | ||
| 218 | else: | ||
| 219 | # ignore dangling symlink if the flag is on | ||
| 220 | if not os.path.exists(linkto) and ignore_dangling_symlinks: | ||
| 221 | continue | ||
| 222 | # otherwise let the copy occurs. copy2 will raise an error | ||
| 223 | copy_function(srcname, dstname) | ||
| 224 | elif os.path.isdir(srcname): | ||
| 225 | copytree(srcname, dstname, symlinks, ignore, copy_function) | ||
| 226 | else: | ||
| 227 | # Will raise a SpecialFileError for unsupported file types | ||
| 228 | copy_function(srcname, dstname) | ||
| 229 | # catch the Error from the recursive copytree so that we can | ||
| 230 | # continue with other files | ||
| 231 | except Error as err: | ||
| 232 | errors.extend(err.args[0]) | ||
| 233 | except EnvironmentError as why: | ||
| 234 | errors.append((srcname, dstname, str(why))) | ||
| 235 | try: | ||
| 236 | copystat(src, dst) | ||
| 237 | except OSError as why: | ||
| 238 | if WindowsError is not None and isinstance(why, WindowsError): | ||
| 239 | # Copying file access times may fail on Windows | ||
| 240 | pass | ||
| 241 | else: | ||
| 242 | errors.extend((src, dst, str(why))) | ||
| 243 | if errors: | ||
| 244 | raise Error(errors) | ||
| 245 | |||
| 246 | def rmtree(path, ignore_errors=False, onerror=None): | ||
| 247 | """Recursively delete a directory tree. | ||
| 248 | |||
| 249 | If ignore_errors is set, errors are ignored; otherwise, if onerror | ||
| 250 | is set, it is called to handle the error with arguments (func, | ||
| 251 | path, exc_info) where func is os.listdir, os.remove, or os.rmdir; | ||
| 252 | path is the argument to that function that caused it to fail; and | ||
| 253 | exc_info is a tuple returned by sys.exc_info(). If ignore_errors | ||
| 254 | is false and onerror is None, an exception is raised. | ||
| 255 | |||
| 256 | """ | ||
| 257 | if ignore_errors: | ||
| 258 | def onerror(*args): | ||
| 259 | pass | ||
| 260 | elif onerror is None: | ||
| 261 | def onerror(*args): | ||
| 262 | raise | ||
| 263 | try: | ||
| 264 | if os.path.islink(path): | ||
| 265 | # symlinks to directories are forbidden, see bug #1669 | ||
| 266 | raise OSError("Cannot call rmtree on a symbolic link") | ||
| 267 | except OSError: | ||
| 268 | onerror(os.path.islink, path, sys.exc_info()) | ||
| 269 | # can't continue even if onerror hook returns | ||
| 270 | return | ||
| 271 | names = [] | ||
| 272 | try: | ||
| 273 | names = os.listdir(path) | ||
| 274 | except os.error: | ||
| 275 | onerror(os.listdir, path, sys.exc_info()) | ||
| 276 | for name in names: | ||
| 277 | fullname = os.path.join(path, name) | ||
| 278 | try: | ||
| 279 | mode = os.lstat(fullname).st_mode | ||
| 280 | except os.error: | ||
| 281 | mode = 0 | ||
| 282 | if stat.S_ISDIR(mode): | ||
| 283 | rmtree(fullname, ignore_errors, onerror) | ||
| 284 | else: | ||
| 285 | try: | ||
| 286 | os.remove(fullname) | ||
| 287 | except os.error: | ||
| 288 | onerror(os.remove, fullname, sys.exc_info()) | ||
| 289 | try: | ||
| 290 | os.rmdir(path) | ||
| 291 | except os.error: | ||
| 292 | onerror(os.rmdir, path, sys.exc_info()) | ||
| 293 | |||
| 294 | |||
| 295 | def _basename(path): | ||
| 296 | # A basename() variant which first strips the trailing slash, if present. | ||
| 297 | # Thus we always get the last component of the path, even for directories. | ||
| 298 | return os.path.basename(path.rstrip(os.path.sep)) | ||
| 299 | |||
| 300 | def move(src, dst): | ||
| 301 | """Recursively move a file or directory to another location. This is | ||
| 302 | similar to the Unix "mv" command. | ||
| 303 | |||
| 304 | If the destination is a directory or a symlink to a directory, the source | ||
| 305 | is moved inside the directory. The destination path must not already | ||
| 306 | exist. | ||
| 307 | |||
| 308 | If the destination already exists but is not a directory, it may be | ||
| 309 | overwritten depending on os.rename() semantics. | ||
| 310 | |||
| 311 | If the destination is on our current filesystem, then rename() is used. | ||
| 312 | Otherwise, src is copied to the destination and then removed. | ||
| 313 | A lot more could be done here... A look at a mv.c shows a lot of | ||
| 314 | the issues this implementation glosses over. | ||
| 315 | |||
| 316 | """ | ||
| 317 | real_dst = dst | ||
| 318 | if os.path.isdir(dst): | ||
| 319 | if _samefile(src, dst): | ||
| 320 | # We might be on a case insensitive filesystem, | ||
| 321 | # perform the rename anyway. | ||
| 322 | os.rename(src, dst) | ||
| 323 | return | ||
| 324 | |||
| 325 | real_dst = os.path.join(dst, _basename(src)) | ||
| 326 | if os.path.exists(real_dst): | ||
| 327 | raise Error("Destination path '%s' already exists" % real_dst) | ||
| 328 | try: | ||
| 329 | os.rename(src, real_dst) | ||
| 330 | except OSError: | ||
| 331 | if os.path.isdir(src): | ||
| 332 | if _destinsrc(src, dst): | ||
| 333 | raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) | ||
| 334 | copytree(src, real_dst, symlinks=True) | ||
| 335 | rmtree(src) | ||
| 336 | else: | ||
| 337 | copy2(src, real_dst) | ||
| 338 | os.unlink(src) | ||
| 339 | |||
| 340 | def _destinsrc(src, dst): | ||
| 341 | src = abspath(src) | ||
| 342 | dst = abspath(dst) | ||
| 343 | if not src.endswith(os.path.sep): | ||
| 344 | src += os.path.sep | ||
| 345 | if not dst.endswith(os.path.sep): | ||
| 346 | dst += os.path.sep | ||
| 347 | return dst.startswith(src) | ||
| 348 | |||
| 349 | def _get_gid(name): | ||
| 350 | """Returns a gid, given a group name.""" | ||
| 351 | if getgrnam is None or name is None: | ||
| 352 | return None | ||
| 353 | try: | ||
| 354 | result = getgrnam(name) | ||
| 355 | except KeyError: | ||
| 356 | result = None | ||
| 357 | if result is not None: | ||
| 358 | return result[2] | ||
| 359 | return None | ||
| 360 | |||
| 361 | def _get_uid(name): | ||
| 362 | """Returns an uid, given a user name.""" | ||
| 363 | if getpwnam is None or name is None: | ||
| 364 | return None | ||
| 365 | try: | ||
| 366 | result = getpwnam(name) | ||
| 367 | except KeyError: | ||
| 368 | result = None | ||
| 369 | if result is not None: | ||
| 370 | return result[2] | ||
| 371 | return None | ||
| 372 | |||
| 373 | def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, | ||
| 374 | owner=None, group=None, logger=None): | ||
| 375 | """Create a (possibly compressed) tar file from all the files under | ||
| 376 | 'base_dir'. | ||
| 377 | |||
| 378 | 'compress' must be "gzip" (the default), "bzip2", or None. | ||
| 379 | |||
| 380 | 'owner' and 'group' can be used to define an owner and a group for the | ||
| 381 | archive that is being built. If not provided, the current owner and group | ||
| 382 | will be used. | ||
| 383 | |||
| 384 | The output tar file will be named 'base_name' + ".tar", possibly plus | ||
| 385 | the appropriate compression extension (".gz", or ".bz2"). | ||
| 386 | |||
| 387 | Returns the output filename. | ||
| 388 | """ | ||
| 389 | tar_compression = {'gzip': 'gz', None: ''} | ||
| 390 | compress_ext = {'gzip': '.gz'} | ||
| 391 | |||
| 392 | if _BZ2_SUPPORTED: | ||
| 393 | tar_compression['bzip2'] = 'bz2' | ||
| 394 | compress_ext['bzip2'] = '.bz2' | ||
| 395 | |||
| 396 | # flags for compression program, each element of list will be an argument | ||
| 397 | if compress is not None and compress not in compress_ext: | ||
| 398 | raise ValueError("bad value for 'compress', or compression format not " | ||
| 399 | "supported : {0}".format(compress)) | ||
| 400 | |||
| 401 | archive_name = base_name + '.tar' + compress_ext.get(compress, '') | ||
| 402 | archive_dir = os.path.dirname(archive_name) | ||
| 403 | |||
| 404 | if not os.path.exists(archive_dir): | ||
| 405 | if logger is not None: | ||
| 406 | logger.info("creating %s", archive_dir) | ||
| 407 | if not dry_run: | ||
| 408 | os.makedirs(archive_dir) | ||
| 409 | |||
| 410 | # creating the tarball | ||
| 411 | if logger is not None: | ||
| 412 | logger.info('Creating tar archive') | ||
| 413 | |||
| 414 | uid = _get_uid(owner) | ||
| 415 | gid = _get_gid(group) | ||
| 416 | |||
| 417 | def _set_uid_gid(tarinfo): | ||
| 418 | if gid is not None: | ||
| 419 | tarinfo.gid = gid | ||
| 420 | tarinfo.gname = group | ||
| 421 | if uid is not None: | ||
| 422 | tarinfo.uid = uid | ||
| 423 | tarinfo.uname = owner | ||
| 424 | return tarinfo | ||
| 425 | |||
| 426 | if not dry_run: | ||
| 427 | tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) | ||
| 428 | try: | ||
| 429 | tar.add(base_dir, filter=_set_uid_gid) | ||
| 430 | finally: | ||
| 431 | tar.close() | ||
| 432 | |||
| 433 | return archive_name | ||
| 434 | |||
| 435 | def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): | ||
| 436 | # XXX see if we want to keep an external call here | ||
| 437 | if verbose: | ||
| 438 | zipoptions = "-r" | ||
| 439 | else: | ||
| 440 | zipoptions = "-rq" | ||
| 441 | from distutils.errors import DistutilsExecError | ||
| 442 | from distutils.spawn import spawn | ||
| 443 | try: | ||
| 444 | spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) | ||
| 445 | except DistutilsExecError: | ||
| 446 | # XXX really should distinguish between "couldn't find | ||
| 447 | # external 'zip' command" and "zip failed". | ||
| 448 | raise ExecError("unable to create zip file '%s': " | ||
| 449 | "could neither import the 'zipfile' module nor " | ||
| 450 | "find a standalone zip utility") % zip_filename | ||
| 451 | |||
| 452 | def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): | ||
| 453 | """Create a zip file from all the files under 'base_dir'. | ||
| 454 | |||
| 455 | The output zip file will be named 'base_name' + ".zip". Uses either the | ||
| 456 | "zipfile" Python module (if available) or the InfoZIP "zip" utility | ||
| 457 | (if installed and found on the default search path). If neither tool is | ||
| 458 | available, raises ExecError. Returns the name of the output zip | ||
| 459 | file. | ||
| 460 | """ | ||
| 461 | zip_filename = base_name + ".zip" | ||
| 462 | archive_dir = os.path.dirname(base_name) | ||
| 463 | |||
| 464 | if not os.path.exists(archive_dir): | ||
| 465 | if logger is not None: | ||
| 466 | logger.info("creating %s", archive_dir) | ||
| 467 | if not dry_run: | ||
| 468 | os.makedirs(archive_dir) | ||
| 469 | |||
| 470 | # If zipfile module is not available, try spawning an external 'zip' | ||
| 471 | # command. | ||
| 472 | try: | ||
| 473 | import zipfile | ||
| 474 | except ImportError: | ||
| 475 | zipfile = None | ||
| 476 | |||
| 477 | if zipfile is None: | ||
| 478 | _call_external_zip(base_dir, zip_filename, verbose, dry_run) | ||
| 479 | else: | ||
| 480 | if logger is not None: | ||
| 481 | logger.info("creating '%s' and adding '%s' to it", | ||
| 482 | zip_filename, base_dir) | ||
| 483 | |||
| 484 | if not dry_run: | ||
| 485 | zip = zipfile.ZipFile(zip_filename, "w", | ||
| 486 | compression=zipfile.ZIP_DEFLATED) | ||
| 487 | |||
| 488 | for dirpath, dirnames, filenames in os.walk(base_dir): | ||
| 489 | for name in filenames: | ||
| 490 | path = os.path.normpath(os.path.join(dirpath, name)) | ||
| 491 | if os.path.isfile(path): | ||
| 492 | zip.write(path, path) | ||
| 493 | if logger is not None: | ||
| 494 | logger.info("adding '%s'", path) | ||
| 495 | zip.close() | ||
| 496 | |||
| 497 | return zip_filename | ||
| 498 | |||
| 499 | _ARCHIVE_FORMATS = { | ||
| 500 | 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), | ||
| 501 | 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), | ||
| 502 | 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), | ||
| 503 | 'zip': (_make_zipfile, [], "ZIP file"), | ||
| 504 | } | ||
| 505 | |||
| 506 | if _BZ2_SUPPORTED: | ||
| 507 | _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], | ||
| 508 | "bzip2'ed tar-file") | ||
| 509 | |||
| 510 | def get_archive_formats(): | ||
| 511 | """Returns a list of supported formats for archiving and unarchiving. | ||
| 512 | |||
| 513 | Each element of the returned sequence is a tuple (name, description) | ||
| 514 | """ | ||
| 515 | formats = [(name, registry[2]) for name, registry in | ||
| 516 | _ARCHIVE_FORMATS.items()] | ||
| 517 | formats.sort() | ||
| 518 | return formats | ||
| 519 | |||
| 520 | def register_archive_format(name, function, extra_args=None, description=''): | ||
| 521 | """Registers an archive format. | ||
| 522 | |||
| 523 | name is the name of the format. function is the callable that will be | ||
| 524 | used to create archives. If provided, extra_args is a sequence of | ||
| 525 | (name, value) tuples that will be passed as arguments to the callable. | ||
| 526 | description can be provided to describe the format, and will be returned | ||
| 527 | by the get_archive_formats() function. | ||
| 528 | """ | ||
| 529 | if extra_args is None: | ||
| 530 | extra_args = [] | ||
| 531 | if not isinstance(function, collections.Callable): | ||
| 532 | raise TypeError('The %s object is not callable' % function) | ||
| 533 | if not isinstance(extra_args, (tuple, list)): | ||
| 534 | raise TypeError('extra_args needs to be a sequence') | ||
| 535 | for element in extra_args: | ||
| 536 | if not isinstance(element, (tuple, list)) or len(element) !=2: | ||
| 537 | raise TypeError('extra_args elements are : (arg_name, value)') | ||
| 538 | |||
| 539 | _ARCHIVE_FORMATS[name] = (function, extra_args, description) | ||
| 540 | |||
| 541 | def unregister_archive_format(name): | ||
| 542 | del _ARCHIVE_FORMATS[name] | ||
| 543 | |||
| 544 | def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, | ||
| 545 | dry_run=0, owner=None, group=None, logger=None): | ||
| 546 | """Create an archive file (eg. zip or tar). | ||
| 547 | |||
| 548 | 'base_name' is the name of the file to create, minus any format-specific | ||
| 549 | extension; 'format' is the archive format: one of "zip", "tar", "bztar" | ||
| 550 | or "gztar". | ||
| 551 | |||
| 552 | 'root_dir' is a directory that will be the root directory of the | ||
| 553 | archive; ie. we typically chdir into 'root_dir' before creating the | ||
| 554 | archive. 'base_dir' is the directory where we start archiving from; | ||
| 555 | ie. 'base_dir' will be the common prefix of all files and | ||
| 556 | directories in the archive. 'root_dir' and 'base_dir' both default | ||
| 557 | to the current directory. Returns the name of the archive file. | ||
| 558 | |||
| 559 | 'owner' and 'group' are used when creating a tar archive. By default, | ||
| 560 | uses the current owner and group. | ||
| 561 | """ | ||
| 562 | save_cwd = os.getcwd() | ||
| 563 | if root_dir is not None: | ||
| 564 | if logger is not None: | ||
| 565 | logger.debug("changing into '%s'", root_dir) | ||
| 566 | base_name = os.path.abspath(base_name) | ||
| 567 | if not dry_run: | ||
| 568 | os.chdir(root_dir) | ||
| 569 | |||
| 570 | if base_dir is None: | ||
| 571 | base_dir = os.curdir | ||
| 572 | |||
| 573 | kwargs = {'dry_run': dry_run, 'logger': logger} | ||
| 574 | |||
| 575 | try: | ||
| 576 | format_info = _ARCHIVE_FORMATS[format] | ||
| 577 | except KeyError: | ||
| 578 | raise ValueError("unknown archive format '%s'" % format) | ||
| 579 | |||
| 580 | func = format_info[0] | ||
| 581 | for arg, val in format_info[1]: | ||
| 582 | kwargs[arg] = val | ||
| 583 | |||
| 584 | if format != 'zip': | ||
| 585 | kwargs['owner'] = owner | ||
| 586 | kwargs['group'] = group | ||
| 587 | |||
| 588 | try: | ||
| 589 | filename = func(base_name, base_dir, **kwargs) | ||
| 590 | finally: | ||
| 591 | if root_dir is not None: | ||
| 592 | if logger is not None: | ||
| 593 | logger.debug("changing back to '%s'", save_cwd) | ||
| 594 | os.chdir(save_cwd) | ||
| 595 | |||
| 596 | return filename | ||
| 597 | |||
| 598 | |||
| 599 | def get_unpack_formats(): | ||
| 600 | """Returns a list of supported formats for unpacking. | ||
| 601 | |||
| 602 | Each element of the returned sequence is a tuple | ||
| 603 | (name, extensions, description) | ||
| 604 | """ | ||
| 605 | formats = [(name, info[0], info[3]) for name, info in | ||
| 606 | _UNPACK_FORMATS.items()] | ||
| 607 | formats.sort() | ||
| 608 | return formats | ||
| 609 | |||
| 610 | def _check_unpack_options(extensions, function, extra_args): | ||
| 611 | """Checks what gets registered as an unpacker.""" | ||
| 612 | # first make sure no other unpacker is registered for this extension | ||
| 613 | existing_extensions = {} | ||
| 614 | for name, info in _UNPACK_FORMATS.items(): | ||
| 615 | for ext in info[0]: | ||
| 616 | existing_extensions[ext] = name | ||
| 617 | |||
| 618 | for extension in extensions: | ||
| 619 | if extension in existing_extensions: | ||
| 620 | msg = '%s is already registered for "%s"' | ||
| 621 | raise RegistryError(msg % (extension, | ||
| 622 | existing_extensions[extension])) | ||
| 623 | |||
| 624 | if not isinstance(function, collections.Callable): | ||
| 625 | raise TypeError('The registered function must be a callable') | ||
| 626 | |||
| 627 | |||
| 628 | def register_unpack_format(name, extensions, function, extra_args=None, | ||
| 629 | description=''): | ||
| 630 | """Registers an unpack format. | ||
| 631 | |||
| 632 | `name` is the name of the format. `extensions` is a list of extensions | ||
| 633 | corresponding to the format. | ||
| 634 | |||
| 635 | `function` is the callable that will be | ||
| 636 | used to unpack archives. The callable will receive archives to unpack. | ||
| 637 | If it's unable to handle an archive, it needs to raise a ReadError | ||
| 638 | exception. | ||
| 639 | |||
| 640 | If provided, `extra_args` is a sequence of | ||
| 641 | (name, value) tuples that will be passed as arguments to the callable. | ||
| 642 | description can be provided to describe the format, and will be returned | ||
| 643 | by the get_unpack_formats() function. | ||
| 644 | """ | ||
| 645 | if extra_args is None: | ||
| 646 | extra_args = [] | ||
| 647 | _check_unpack_options(extensions, function, extra_args) | ||
| 648 | _UNPACK_FORMATS[name] = extensions, function, extra_args, description | ||
| 649 | |||
| 650 | def unregister_unpack_format(name): | ||
| 651 | """Removes the pack format from the registry.""" | ||
| 652 | del _UNPACK_FORMATS[name] | ||
| 653 | |||
| 654 | def _ensure_directory(path): | ||
| 655 | """Ensure that the parent directory of `path` exists""" | ||
| 656 | dirname = os.path.dirname(path) | ||
| 657 | if not os.path.isdir(dirname): | ||
| 658 | os.makedirs(dirname) | ||
| 659 | |||
| 660 | def _unpack_zipfile(filename, extract_dir): | ||
| 661 | """Unpack zip `filename` to `extract_dir` | ||
| 662 | """ | ||
| 663 | try: | ||
| 664 | import zipfile | ||
| 665 | except ImportError: | ||
| 666 | raise ReadError('zlib not supported, cannot unpack this archive.') | ||
| 667 | |||
| 668 | if not zipfile.is_zipfile(filename): | ||
| 669 | raise ReadError("%s is not a zip file" % filename) | ||
| 670 | |||
| 671 | zip = zipfile.ZipFile(filename) | ||
| 672 | try: | ||
| 673 | for info in zip.infolist(): | ||
| 674 | name = info.filename | ||
| 675 | |||
| 676 | # don't extract absolute paths or ones with .. in them | ||
| 677 | if name.startswith('/') or '..' in name: | ||
| 678 | continue | ||
| 679 | |||
| 680 | target = os.path.join(extract_dir, *name.split('/')) | ||
| 681 | if not target: | ||
| 682 | continue | ||
| 683 | |||
| 684 | _ensure_directory(target) | ||
| 685 | if not name.endswith('/'): | ||
| 686 | # file | ||
| 687 | data = zip.read(info.filename) | ||
| 688 | f = open(target, 'wb') | ||
| 689 | try: | ||
| 690 | f.write(data) | ||
| 691 | finally: | ||
| 692 | f.close() | ||
| 693 | del data | ||
| 694 | finally: | ||
| 695 | zip.close() | ||
| 696 | |||
| 697 | def _unpack_tarfile(filename, extract_dir): | ||
| 698 | """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` | ||
| 699 | """ | ||
| 700 | try: | ||
| 701 | tarobj = tarfile.open(filename) | ||
| 702 | except tarfile.TarError: | ||
| 703 | raise ReadError( | ||
| 704 | "%s is not a compressed or uncompressed tar file" % filename) | ||
| 705 | try: | ||
| 706 | tarobj.extractall(extract_dir) | ||
| 707 | finally: | ||
| 708 | tarobj.close() | ||
| 709 | |||
| 710 | _UNPACK_FORMATS = { | ||
| 711 | 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), | ||
| 712 | 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), | ||
| 713 | 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") | ||
| 714 | } | ||
| 715 | |||
| 716 | if _BZ2_SUPPORTED: | ||
| 717 | _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], | ||
| 718 | "bzip2'ed tar-file") | ||
| 719 | |||
| 720 | def _find_unpack_format(filename): | ||
| 721 | for name, info in _UNPACK_FORMATS.items(): | ||
| 722 | for extension in info[0]: | ||
| 723 | if filename.endswith(extension): | ||
| 724 | return name | ||
| 725 | return None | ||
| 726 | |||
| 727 | def unpack_archive(filename, extract_dir=None, format=None): | ||
| 728 | """Unpack an archive. | ||
| 729 | |||
| 730 | `filename` is the name of the archive. | ||
| 731 | |||
| 732 | `extract_dir` is the name of the target directory, where the archive | ||
| 733 | is unpacked. If not provided, the current working directory is used. | ||
| 734 | |||
| 735 | `format` is the archive format: one of "zip", "tar", or "gztar". Or any | ||
| 736 | other registered format. If not provided, unpack_archive will use the | ||
| 737 | filename extension and see if an unpacker was registered for that | ||
| 738 | extension. | ||
| 739 | |||
| 740 | In case none is found, a ValueError is raised. | ||
| 741 | """ | ||
| 742 | if extract_dir is None: | ||
| 743 | extract_dir = os.getcwd() | ||
| 744 | |||
| 745 | if format is not None: | ||
| 746 | try: | ||
| 747 | format_info = _UNPACK_FORMATS[format] | ||
| 748 | except KeyError: | ||
| 749 | raise ValueError("Unknown unpack format '{0}'".format(format)) | ||
| 750 | |||
| 751 | func = format_info[1] | ||
| 752 | func(filename, extract_dir, **dict(format_info[2])) | ||
| 753 | else: | ||
| 754 | # we need to look at the registered unpackers supported extensions | ||
| 755 | format = _find_unpack_format(filename) | ||
| 756 | if format is None: | ||
| 757 | raise ReadError("Unknown archive format '{0}'".format(filename)) | ||
| 758 | |||
| 759 | func = _UNPACK_FORMATS[format][1] | ||
| 760 | kwargs = dict(_UNPACK_FORMATS[format][2]) | ||
| 761 | func(filename, extract_dir, **kwargs) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..c92cd48 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg | |||
| @@ -0,0 +1,84 @@ | |||
| 1 | [posix_prefix] | ||
| 2 | # Configuration directories. Some of these come straight out of the | ||
| 3 | # configure script. They are for implementing the other variables, not to | ||
| 4 | # be used directly in [resource_locations]. | ||
| 5 | confdir = /etc | ||
| 6 | datadir = /usr/share | ||
| 7 | libdir = /usr/lib | ||
| 8 | statedir = /var | ||
| 9 | # User resource directory | ||
| 10 | local = ~/.local/{distribution.name} | ||
| 11 | |||
| 12 | stdlib = {base}/lib/python{py_version_short} | ||
| 13 | platstdlib = {platbase}/lib/python{py_version_short} | ||
| 14 | purelib = {base}/lib/python{py_version_short}/site-packages | ||
| 15 | platlib = {platbase}/lib/python{py_version_short}/site-packages | ||
| 16 | include = {base}/include/python{py_version_short}{abiflags} | ||
| 17 | platinclude = {platbase}/include/python{py_version_short}{abiflags} | ||
| 18 | data = {base} | ||
| 19 | |||
| 20 | [posix_home] | ||
| 21 | stdlib = {base}/lib/python | ||
| 22 | platstdlib = {base}/lib/python | ||
| 23 | purelib = {base}/lib/python | ||
| 24 | platlib = {base}/lib/python | ||
| 25 | include = {base}/include/python | ||
| 26 | platinclude = {base}/include/python | ||
| 27 | scripts = {base}/bin | ||
| 28 | data = {base} | ||
| 29 | |||
| 30 | [nt] | ||
| 31 | stdlib = {base}/Lib | ||
| 32 | platstdlib = {base}/Lib | ||
| 33 | purelib = {base}/Lib/site-packages | ||
| 34 | platlib = {base}/Lib/site-packages | ||
| 35 | include = {base}/Include | ||
| 36 | platinclude = {base}/Include | ||
| 37 | scripts = {base}/Scripts | ||
| 38 | data = {base} | ||
| 39 | |||
| 40 | [os2] | ||
| 41 | stdlib = {base}/Lib | ||
| 42 | platstdlib = {base}/Lib | ||
| 43 | purelib = {base}/Lib/site-packages | ||
| 44 | platlib = {base}/Lib/site-packages | ||
| 45 | include = {base}/Include | ||
| 46 | platinclude = {base}/Include | ||
| 47 | scripts = {base}/Scripts | ||
| 48 | data = {base} | ||
| 49 | |||
| 50 | [os2_home] | ||
| 51 | stdlib = {userbase}/lib/python{py_version_short} | ||
| 52 | platstdlib = {userbase}/lib/python{py_version_short} | ||
| 53 | purelib = {userbase}/lib/python{py_version_short}/site-packages | ||
| 54 | platlib = {userbase}/lib/python{py_version_short}/site-packages | ||
| 55 | include = {userbase}/include/python{py_version_short} | ||
| 56 | scripts = {userbase}/bin | ||
| 57 | data = {userbase} | ||
| 58 | |||
| 59 | [nt_user] | ||
| 60 | stdlib = {userbase}/Python{py_version_nodot} | ||
| 61 | platstdlib = {userbase}/Python{py_version_nodot} | ||
| 62 | purelib = {userbase}/Python{py_version_nodot}/site-packages | ||
| 63 | platlib = {userbase}/Python{py_version_nodot}/site-packages | ||
| 64 | include = {userbase}/Python{py_version_nodot}/Include | ||
| 65 | scripts = {userbase}/Scripts | ||
| 66 | data = {userbase} | ||
| 67 | |||
| 68 | [posix_user] | ||
| 69 | stdlib = {userbase}/lib/python{py_version_short} | ||
| 70 | platstdlib = {userbase}/lib/python{py_version_short} | ||
| 71 | purelib = {userbase}/lib/python{py_version_short}/site-packages | ||
| 72 | platlib = {userbase}/lib/python{py_version_short}/site-packages | ||
| 73 | include = {userbase}/include/python{py_version_short} | ||
| 74 | scripts = {userbase}/bin | ||
| 75 | data = {userbase} | ||
| 76 | |||
| 77 | [osx_framework_user] | ||
| 78 | stdlib = {userbase}/lib/python | ||
| 79 | platstdlib = {userbase}/lib/python | ||
| 80 | purelib = {userbase}/lib/python/site-packages | ||
| 81 | platlib = {userbase}/lib/python/site-packages | ||
| 82 | include = {userbase}/include | ||
| 83 | scripts = {userbase}/bin | ||
| 84 | data = {userbase} | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..b243da3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py | |||
| @@ -0,0 +1,788 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """Access to Python's configuration information.""" | ||
| 7 | |||
| 8 | import codecs | ||
| 9 | import os | ||
| 10 | import re | ||
| 11 | import sys | ||
| 12 | from os.path import pardir, realpath | ||
| 13 | try: | ||
| 14 | import configparser | ||
| 15 | except ImportError: | ||
| 16 | import ConfigParser as configparser | ||
| 17 | |||
| 18 | |||
| 19 | __all__ = [ | ||
| 20 | 'get_config_h_filename', | ||
| 21 | 'get_config_var', | ||
| 22 | 'get_config_vars', | ||
| 23 | 'get_makefile_filename', | ||
| 24 | 'get_path', | ||
| 25 | 'get_path_names', | ||
| 26 | 'get_paths', | ||
| 27 | 'get_platform', | ||
| 28 | 'get_python_version', | ||
| 29 | 'get_scheme_names', | ||
| 30 | 'parse_config_h', | ||
| 31 | ] | ||
| 32 | |||
| 33 | |||
| 34 | def _safe_realpath(path): | ||
| 35 | try: | ||
| 36 | return realpath(path) | ||
| 37 | except OSError: | ||
| 38 | return path | ||
| 39 | |||
| 40 | |||
| 41 | if sys.executable: | ||
| 42 | _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) | ||
| 43 | else: | ||
| 44 | # sys.executable can be empty if argv[0] has been changed and Python is | ||
| 45 | # unable to retrieve the real program name | ||
| 46 | _PROJECT_BASE = _safe_realpath(os.getcwd()) | ||
| 47 | |||
| 48 | if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): | ||
| 49 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) | ||
| 50 | # PC/VS7.1 | ||
| 51 | if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): | ||
| 52 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) | ||
| 53 | # PC/AMD64 | ||
| 54 | if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): | ||
| 55 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) | ||
| 56 | |||
| 57 | |||
| 58 | def is_python_build(): | ||
| 59 | for fn in ("Setup.dist", "Setup.local"): | ||
| 60 | if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): | ||
| 61 | return True | ||
| 62 | return False | ||
| 63 | |||
| 64 | _PYTHON_BUILD = is_python_build() | ||
| 65 | |||
| 66 | _cfg_read = False | ||
| 67 | |||
| 68 | def _ensure_cfg_read(): | ||
| 69 | global _cfg_read | ||
| 70 | if not _cfg_read: | ||
| 71 | from ..resources import finder | ||
| 72 | backport_package = __name__.rsplit('.', 1)[0] | ||
| 73 | _finder = finder(backport_package) | ||
| 74 | _cfgfile = _finder.find('sysconfig.cfg') | ||
| 75 | assert _cfgfile, 'sysconfig.cfg exists' | ||
| 76 | with _cfgfile.as_stream() as s: | ||
| 77 | _SCHEMES.readfp(s) | ||
| 78 | if _PYTHON_BUILD: | ||
| 79 | for scheme in ('posix_prefix', 'posix_home'): | ||
| 80 | _SCHEMES.set(scheme, 'include', '{srcdir}/Include') | ||
| 81 | _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') | ||
| 82 | |||
| 83 | _cfg_read = True | ||
| 84 | |||
| 85 | |||
| 86 | _SCHEMES = configparser.RawConfigParser() | ||
| 87 | _VAR_REPL = re.compile(r'\{([^{]*?)\}') | ||
| 88 | |||
| 89 | def _expand_globals(config): | ||
| 90 | _ensure_cfg_read() | ||
| 91 | if config.has_section('globals'): | ||
| 92 | globals = config.items('globals') | ||
| 93 | else: | ||
| 94 | globals = tuple() | ||
| 95 | |||
| 96 | sections = config.sections() | ||
| 97 | for section in sections: | ||
| 98 | if section == 'globals': | ||
| 99 | continue | ||
| 100 | for option, value in globals: | ||
| 101 | if config.has_option(section, option): | ||
| 102 | continue | ||
| 103 | config.set(section, option, value) | ||
| 104 | config.remove_section('globals') | ||
| 105 | |||
| 106 | # now expanding local variables defined in the cfg file | ||
| 107 | # | ||
| 108 | for section in config.sections(): | ||
| 109 | variables = dict(config.items(section)) | ||
| 110 | |||
| 111 | def _replacer(matchobj): | ||
| 112 | name = matchobj.group(1) | ||
| 113 | if name in variables: | ||
| 114 | return variables[name] | ||
| 115 | return matchobj.group(0) | ||
| 116 | |||
| 117 | for option, value in config.items(section): | ||
| 118 | config.set(section, option, _VAR_REPL.sub(_replacer, value)) | ||
| 119 | |||
| 120 | #_expand_globals(_SCHEMES) | ||
| 121 | |||
| 122 | # FIXME don't rely on sys.version here, its format is an implementation detail | ||
| 123 | # of CPython, use sys.version_info or sys.hexversion | ||
| 124 | _PY_VERSION = sys.version.split()[0] | ||
| 125 | _PY_VERSION_SHORT = sys.version[:3] | ||
| 126 | _PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] | ||
| 127 | _PREFIX = os.path.normpath(sys.prefix) | ||
| 128 | _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) | ||
| 129 | _CONFIG_VARS = None | ||
| 130 | _USER_BASE = None | ||
| 131 | |||
| 132 | |||
| 133 | def _subst_vars(path, local_vars): | ||
| 134 | """In the string `path`, replace tokens like {some.thing} with the | ||
| 135 | corresponding value from the map `local_vars`. | ||
| 136 | |||
| 137 | If there is no corresponding value, leave the token unchanged. | ||
| 138 | """ | ||
| 139 | def _replacer(matchobj): | ||
| 140 | name = matchobj.group(1) | ||
| 141 | if name in local_vars: | ||
| 142 | return local_vars[name] | ||
| 143 | elif name in os.environ: | ||
| 144 | return os.environ[name] | ||
| 145 | return matchobj.group(0) | ||
| 146 | return _VAR_REPL.sub(_replacer, path) | ||
| 147 | |||
| 148 | |||
| 149 | def _extend_dict(target_dict, other_dict): | ||
| 150 | target_keys = target_dict.keys() | ||
| 151 | for key, value in other_dict.items(): | ||
| 152 | if key in target_keys: | ||
| 153 | continue | ||
| 154 | target_dict[key] = value | ||
| 155 | |||
| 156 | |||
| 157 | def _expand_vars(scheme, vars): | ||
| 158 | res = {} | ||
| 159 | if vars is None: | ||
| 160 | vars = {} | ||
| 161 | _extend_dict(vars, get_config_vars()) | ||
| 162 | |||
| 163 | for key, value in _SCHEMES.items(scheme): | ||
| 164 | if os.name in ('posix', 'nt'): | ||
| 165 | value = os.path.expanduser(value) | ||
| 166 | res[key] = os.path.normpath(_subst_vars(value, vars)) | ||
| 167 | return res | ||
| 168 | |||
| 169 | |||
| 170 | def format_value(value, vars): | ||
| 171 | def _replacer(matchobj): | ||
| 172 | name = matchobj.group(1) | ||
| 173 | if name in vars: | ||
| 174 | return vars[name] | ||
| 175 | return matchobj.group(0) | ||
| 176 | return _VAR_REPL.sub(_replacer, value) | ||
| 177 | |||
| 178 | |||
| 179 | def _get_default_scheme(): | ||
| 180 | if os.name == 'posix': | ||
| 181 | # the default scheme for posix is posix_prefix | ||
| 182 | return 'posix_prefix' | ||
| 183 | return os.name | ||
| 184 | |||
| 185 | |||
| 186 | def _getuserbase(): | ||
| 187 | env_base = os.environ.get("PYTHONUSERBASE", None) | ||
| 188 | |||
| 189 | def joinuser(*args): | ||
| 190 | return os.path.expanduser(os.path.join(*args)) | ||
| 191 | |||
| 192 | # what about 'os2emx', 'riscos' ? | ||
| 193 | if os.name == "nt": | ||
| 194 | base = os.environ.get("APPDATA") or "~" | ||
| 195 | if env_base: | ||
| 196 | return env_base | ||
| 197 | else: | ||
| 198 | return joinuser(base, "Python") | ||
| 199 | |||
| 200 | if sys.platform == "darwin": | ||
| 201 | framework = get_config_var("PYTHONFRAMEWORK") | ||
| 202 | if framework: | ||
| 203 | if env_base: | ||
| 204 | return env_base | ||
| 205 | else: | ||
| 206 | return joinuser("~", "Library", framework, "%d.%d" % | ||
| 207 | sys.version_info[:2]) | ||
| 208 | |||
| 209 | if env_base: | ||
| 210 | return env_base | ||
| 211 | else: | ||
| 212 | return joinuser("~", ".local") | ||
| 213 | |||
| 214 | |||
| 215 | def _parse_makefile(filename, vars=None): | ||
| 216 | """Parse a Makefile-style file. | ||
| 217 | |||
| 218 | A dictionary containing name/value pairs is returned. If an | ||
| 219 | optional dictionary is passed in as the second argument, it is | ||
| 220 | used instead of a new dictionary. | ||
| 221 | """ | ||
| 222 | # Regexes needed for parsing Makefile (and similar syntaxes, | ||
| 223 | # like old-style Setup files). | ||
| 224 | _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") | ||
| 225 | _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") | ||
| 226 | _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") | ||
| 227 | |||
| 228 | if vars is None: | ||
| 229 | vars = {} | ||
| 230 | done = {} | ||
| 231 | notdone = {} | ||
| 232 | |||
| 233 | with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: | ||
| 234 | lines = f.readlines() | ||
| 235 | |||
| 236 | for line in lines: | ||
| 237 | if line.startswith('#') or line.strip() == '': | ||
| 238 | continue | ||
| 239 | m = _variable_rx.match(line) | ||
| 240 | if m: | ||
| 241 | n, v = m.group(1, 2) | ||
| 242 | v = v.strip() | ||
| 243 | # `$$' is a literal `$' in make | ||
| 244 | tmpv = v.replace('$$', '') | ||
| 245 | |||
| 246 | if "$" in tmpv: | ||
| 247 | notdone[n] = v | ||
| 248 | else: | ||
| 249 | try: | ||
| 250 | v = int(v) | ||
| 251 | except ValueError: | ||
| 252 | # insert literal `$' | ||
| 253 | done[n] = v.replace('$$', '$') | ||
| 254 | else: | ||
| 255 | done[n] = v | ||
| 256 | |||
| 257 | # do variable interpolation here | ||
| 258 | variables = list(notdone.keys()) | ||
| 259 | |||
| 260 | # Variables with a 'PY_' prefix in the makefile. These need to | ||
| 261 | # be made available without that prefix through sysconfig. | ||
| 262 | # Special care is needed to ensure that variable expansion works, even | ||
| 263 | # if the expansion uses the name without a prefix. | ||
| 264 | renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') | ||
| 265 | |||
| 266 | while len(variables) > 0: | ||
| 267 | for name in tuple(variables): | ||
| 268 | value = notdone[name] | ||
| 269 | m = _findvar1_rx.search(value) or _findvar2_rx.search(value) | ||
| 270 | if m is not None: | ||
| 271 | n = m.group(1) | ||
| 272 | found = True | ||
| 273 | if n in done: | ||
| 274 | item = str(done[n]) | ||
| 275 | elif n in notdone: | ||
| 276 | # get it on a subsequent round | ||
| 277 | found = False | ||
| 278 | elif n in os.environ: | ||
| 279 | # do it like make: fall back to environment | ||
| 280 | item = os.environ[n] | ||
| 281 | |||
| 282 | elif n in renamed_variables: | ||
| 283 | if (name.startswith('PY_') and | ||
| 284 | name[3:] in renamed_variables): | ||
| 285 | item = "" | ||
| 286 | |||
| 287 | elif 'PY_' + n in notdone: | ||
| 288 | found = False | ||
| 289 | |||
| 290 | else: | ||
| 291 | item = str(done['PY_' + n]) | ||
| 292 | |||
| 293 | else: | ||
| 294 | done[n] = item = "" | ||
| 295 | |||
| 296 | if found: | ||
| 297 | after = value[m.end():] | ||
| 298 | value = value[:m.start()] + item + after | ||
| 299 | if "$" in after: | ||
| 300 | notdone[name] = value | ||
| 301 | else: | ||
| 302 | try: | ||
| 303 | value = int(value) | ||
| 304 | except ValueError: | ||
| 305 | done[name] = value.strip() | ||
| 306 | else: | ||
| 307 | done[name] = value | ||
| 308 | variables.remove(name) | ||
| 309 | |||
| 310 | if (name.startswith('PY_') and | ||
| 311 | name[3:] in renamed_variables): | ||
| 312 | |||
| 313 | name = name[3:] | ||
| 314 | if name not in done: | ||
| 315 | done[name] = value | ||
| 316 | |||
| 317 | else: | ||
| 318 | # bogus variable reference (e.g. "prefix=$/opt/python"); | ||
| 319 | # just drop it since we can't deal | ||
| 320 | done[name] = value | ||
| 321 | variables.remove(name) | ||
| 322 | |||
| 323 | # strip spurious spaces | ||
| 324 | for k, v in done.items(): | ||
| 325 | if isinstance(v, str): | ||
| 326 | done[k] = v.strip() | ||
| 327 | |||
| 328 | # save the results in the global dictionary | ||
| 329 | vars.update(done) | ||
| 330 | return vars | ||
| 331 | |||
| 332 | |||
| 333 | def get_makefile_filename(): | ||
| 334 | """Return the path of the Makefile.""" | ||
| 335 | if _PYTHON_BUILD: | ||
| 336 | return os.path.join(_PROJECT_BASE, "Makefile") | ||
| 337 | if hasattr(sys, 'abiflags'): | ||
| 338 | config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) | ||
| 339 | else: | ||
| 340 | config_dir_name = 'config' | ||
| 341 | return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') | ||
| 342 | |||
| 343 | |||
| 344 | def _init_posix(vars): | ||
| 345 | """Initialize the module as appropriate for POSIX systems.""" | ||
| 346 | # load the installed Makefile: | ||
| 347 | makefile = get_makefile_filename() | ||
| 348 | try: | ||
| 349 | _parse_makefile(makefile, vars) | ||
| 350 | except IOError as e: | ||
| 351 | msg = "invalid Python installation: unable to open %s" % makefile | ||
| 352 | if hasattr(e, "strerror"): | ||
| 353 | msg = msg + " (%s)" % e.strerror | ||
| 354 | raise IOError(msg) | ||
| 355 | # load the installed pyconfig.h: | ||
| 356 | config_h = get_config_h_filename() | ||
| 357 | try: | ||
| 358 | with open(config_h) as f: | ||
| 359 | parse_config_h(f, vars) | ||
| 360 | except IOError as e: | ||
| 361 | msg = "invalid Python installation: unable to open %s" % config_h | ||
| 362 | if hasattr(e, "strerror"): | ||
| 363 | msg = msg + " (%s)" % e.strerror | ||
| 364 | raise IOError(msg) | ||
| 365 | # On AIX, there are wrong paths to the linker scripts in the Makefile | ||
| 366 | # -- these paths are relative to the Python source, but when installed | ||
| 367 | # the scripts are in another directory. | ||
| 368 | if _PYTHON_BUILD: | ||
| 369 | vars['LDSHARED'] = vars['BLDSHARED'] | ||
| 370 | |||
| 371 | |||
| 372 | def _init_non_posix(vars): | ||
| 373 | """Initialize the module as appropriate for NT""" | ||
| 374 | # set basic install directories | ||
| 375 | vars['LIBDEST'] = get_path('stdlib') | ||
| 376 | vars['BINLIBDEST'] = get_path('platstdlib') | ||
| 377 | vars['INCLUDEPY'] = get_path('include') | ||
| 378 | vars['SO'] = '.pyd' | ||
| 379 | vars['EXE'] = '.exe' | ||
| 380 | vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT | ||
| 381 | vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) | ||
| 382 | |||
| 383 | # | ||
| 384 | # public APIs | ||
| 385 | # | ||
| 386 | |||
| 387 | |||
| 388 | def parse_config_h(fp, vars=None): | ||
| 389 | """Parse a config.h-style file. | ||
| 390 | |||
| 391 | A dictionary containing name/value pairs is returned. If an | ||
| 392 | optional dictionary is passed in as the second argument, it is | ||
| 393 | used instead of a new dictionary. | ||
| 394 | """ | ||
| 395 | if vars is None: | ||
| 396 | vars = {} | ||
| 397 | define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") | ||
| 398 | undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") | ||
| 399 | |||
| 400 | while True: | ||
| 401 | line = fp.readline() | ||
| 402 | if not line: | ||
| 403 | break | ||
| 404 | m = define_rx.match(line) | ||
| 405 | if m: | ||
| 406 | n, v = m.group(1, 2) | ||
| 407 | try: | ||
| 408 | v = int(v) | ||
| 409 | except ValueError: | ||
| 410 | pass | ||
| 411 | vars[n] = v | ||
| 412 | else: | ||
| 413 | m = undef_rx.match(line) | ||
| 414 | if m: | ||
| 415 | vars[m.group(1)] = 0 | ||
| 416 | return vars | ||
| 417 | |||
| 418 | |||
| 419 | def get_config_h_filename(): | ||
| 420 | """Return the path of pyconfig.h.""" | ||
| 421 | if _PYTHON_BUILD: | ||
| 422 | if os.name == "nt": | ||
| 423 | inc_dir = os.path.join(_PROJECT_BASE, "PC") | ||
| 424 | else: | ||
| 425 | inc_dir = _PROJECT_BASE | ||
| 426 | else: | ||
| 427 | inc_dir = get_path('platinclude') | ||
| 428 | return os.path.join(inc_dir, 'pyconfig.h') | ||
| 429 | |||
| 430 | |||
| 431 | def get_scheme_names(): | ||
| 432 | """Return a tuple containing the schemes names.""" | ||
| 433 | return tuple(sorted(_SCHEMES.sections())) | ||
| 434 | |||
| 435 | |||
| 436 | def get_path_names(): | ||
| 437 | """Return a tuple containing the paths names.""" | ||
| 438 | # xxx see if we want a static list | ||
| 439 | return _SCHEMES.options('posix_prefix') | ||
| 440 | |||
| 441 | |||
| 442 | def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): | ||
| 443 | """Return a mapping containing an install scheme. | ||
| 444 | |||
| 445 | ``scheme`` is the install scheme name. If not provided, it will | ||
| 446 | return the default scheme for the current platform. | ||
| 447 | """ | ||
| 448 | _ensure_cfg_read() | ||
| 449 | if expand: | ||
| 450 | return _expand_vars(scheme, vars) | ||
| 451 | else: | ||
| 452 | return dict(_SCHEMES.items(scheme)) | ||
| 453 | |||
| 454 | |||
| 455 | def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): | ||
| 456 | """Return a path corresponding to the scheme. | ||
| 457 | |||
| 458 | ``scheme`` is the install scheme name. | ||
| 459 | """ | ||
| 460 | return get_paths(scheme, vars, expand)[name] | ||
| 461 | |||
| 462 | |||
| 463 | def get_config_vars(*args): | ||
| 464 | """With no arguments, return a dictionary of all configuration | ||
| 465 | variables relevant for the current platform. | ||
| 466 | |||
| 467 | On Unix, this means every variable defined in Python's installed Makefile; | ||
| 468 | On Windows and Mac OS it's a much smaller set. | ||
| 469 | |||
| 470 | With arguments, return a list of values that result from looking up | ||
| 471 | each argument in the configuration variable dictionary. | ||
| 472 | """ | ||
| 473 | global _CONFIG_VARS | ||
| 474 | if _CONFIG_VARS is None: | ||
| 475 | _CONFIG_VARS = {} | ||
| 476 | # Normalized versions of prefix and exec_prefix are handy to have; | ||
| 477 | # in fact, these are the standard versions used most places in the | ||
| 478 | # distutils2 module. | ||
| 479 | _CONFIG_VARS['prefix'] = _PREFIX | ||
| 480 | _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX | ||
| 481 | _CONFIG_VARS['py_version'] = _PY_VERSION | ||
| 482 | _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT | ||
| 483 | _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] | ||
| 484 | _CONFIG_VARS['base'] = _PREFIX | ||
| 485 | _CONFIG_VARS['platbase'] = _EXEC_PREFIX | ||
| 486 | _CONFIG_VARS['projectbase'] = _PROJECT_BASE | ||
| 487 | try: | ||
| 488 | _CONFIG_VARS['abiflags'] = sys.abiflags | ||
| 489 | except AttributeError: | ||
| 490 | # sys.abiflags may not be defined on all platforms. | ||
| 491 | _CONFIG_VARS['abiflags'] = '' | ||
| 492 | |||
| 493 | if os.name in ('nt', 'os2'): | ||
| 494 | _init_non_posix(_CONFIG_VARS) | ||
| 495 | if os.name == 'posix': | ||
| 496 | _init_posix(_CONFIG_VARS) | ||
| 497 | # Setting 'userbase' is done below the call to the | ||
| 498 | # init function to enable using 'get_config_var' in | ||
| 499 | # the init-function. | ||
| 500 | if sys.version >= '2.6': | ||
| 501 | _CONFIG_VARS['userbase'] = _getuserbase() | ||
| 502 | |||
| 503 | if 'srcdir' not in _CONFIG_VARS: | ||
| 504 | _CONFIG_VARS['srcdir'] = _PROJECT_BASE | ||
| 505 | else: | ||
| 506 | _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) | ||
| 507 | |||
| 508 | # Convert srcdir into an absolute path if it appears necessary. | ||
| 509 | # Normally it is relative to the build directory. However, during | ||
| 510 | # testing, for example, we might be running a non-installed python | ||
| 511 | # from a different directory. | ||
| 512 | if _PYTHON_BUILD and os.name == "posix": | ||
| 513 | base = _PROJECT_BASE | ||
| 514 | try: | ||
| 515 | cwd = os.getcwd() | ||
| 516 | except OSError: | ||
| 517 | cwd = None | ||
| 518 | if (not os.path.isabs(_CONFIG_VARS['srcdir']) and | ||
| 519 | base != cwd): | ||
| 520 | # srcdir is relative and we are not in the same directory | ||
| 521 | # as the executable. Assume executable is in the build | ||
| 522 | # directory and make srcdir absolute. | ||
| 523 | srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) | ||
| 524 | _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) | ||
| 525 | |||
| 526 | if sys.platform == 'darwin': | ||
| 527 | kernel_version = os.uname()[2] # Kernel version (8.4.3) | ||
| 528 | major_version = int(kernel_version.split('.')[0]) | ||
| 529 | |||
| 530 | if major_version < 8: | ||
| 531 | # On Mac OS X before 10.4, check if -arch and -isysroot | ||
| 532 | # are in CFLAGS or LDFLAGS and remove them if they are. | ||
| 533 | # This is needed when building extensions on a 10.3 system | ||
| 534 | # using a universal build of python. | ||
| 535 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
| 536 | # a number of derived variables. These need to be | ||
| 537 | # patched up as well. | ||
| 538 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
| 539 | flags = _CONFIG_VARS[key] | ||
| 540 | flags = re.sub(r'-arch\s+\w+\s', ' ', flags) | ||
| 541 | flags = re.sub('-isysroot [^ \t]*', ' ', flags) | ||
| 542 | _CONFIG_VARS[key] = flags | ||
| 543 | else: | ||
| 544 | # Allow the user to override the architecture flags using | ||
| 545 | # an environment variable. | ||
| 546 | # NOTE: This name was introduced by Apple in OSX 10.5 and | ||
| 547 | # is used by several scripting languages distributed with | ||
| 548 | # that OS release. | ||
| 549 | if 'ARCHFLAGS' in os.environ: | ||
| 550 | arch = os.environ['ARCHFLAGS'] | ||
| 551 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
| 552 | # a number of derived variables. These need to be | ||
| 553 | # patched up as well. | ||
| 554 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
| 555 | |||
| 556 | flags = _CONFIG_VARS[key] | ||
| 557 | flags = re.sub(r'-arch\s+\w+\s', ' ', flags) | ||
| 558 | flags = flags + ' ' + arch | ||
| 559 | _CONFIG_VARS[key] = flags | ||
| 560 | |||
| 561 | # If we're on OSX 10.5 or later and the user tries to | ||
| 562 | # compiles an extension using an SDK that is not present | ||
| 563 | # on the current machine it is better to not use an SDK | ||
| 564 | # than to fail. | ||
| 565 | # | ||
| 566 | # The major usecase for this is users using a Python.org | ||
| 567 | # binary installer on OSX 10.6: that installer uses | ||
| 568 | # the 10.4u SDK, but that SDK is not installed by default | ||
| 569 | # when you install Xcode. | ||
| 570 | # | ||
| 571 | CFLAGS = _CONFIG_VARS.get('CFLAGS', '') | ||
| 572 | m = re.search(r'-isysroot\s+(\S+)', CFLAGS) | ||
| 573 | if m is not None: | ||
| 574 | sdk = m.group(1) | ||
| 575 | if not os.path.exists(sdk): | ||
| 576 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
| 577 | # a number of derived variables. These need to be | ||
| 578 | # patched up as well. | ||
| 579 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
| 580 | |||
| 581 | flags = _CONFIG_VARS[key] | ||
| 582 | flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) | ||
| 583 | _CONFIG_VARS[key] = flags | ||
| 584 | |||
| 585 | if args: | ||
| 586 | vals = [] | ||
| 587 | for name in args: | ||
| 588 | vals.append(_CONFIG_VARS.get(name)) | ||
| 589 | return vals | ||
| 590 | else: | ||
| 591 | return _CONFIG_VARS | ||
| 592 | |||
| 593 | |||
| 594 | def get_config_var(name): | ||
| 595 | """Return the value of a single variable using the dictionary returned by | ||
| 596 | 'get_config_vars()'. | ||
| 597 | |||
| 598 | Equivalent to get_config_vars().get(name) | ||
| 599 | """ | ||
| 600 | return get_config_vars().get(name) | ||
| 601 | |||
| 602 | |||
| 603 | def get_platform(): | ||
| 604 | """Return a string that identifies the current platform. | ||
| 605 | |||
| 606 | This is used mainly to distinguish platform-specific build directories and | ||
| 607 | platform-specific built distributions. Typically includes the OS name | ||
| 608 | and version and the architecture (as supplied by 'os.uname()'), | ||
| 609 | although the exact information included depends on the OS; eg. for IRIX | ||
| 610 | the architecture isn't particularly important (IRIX only runs on SGI | ||
| 611 | hardware), but for Linux the kernel version isn't particularly | ||
| 612 | important. | ||
| 613 | |||
| 614 | Examples of returned values: | ||
| 615 | linux-i586 | ||
| 616 | linux-alpha (?) | ||
| 617 | solaris-2.6-sun4u | ||
| 618 | irix-5.3 | ||
| 619 | irix64-6.2 | ||
| 620 | |||
| 621 | Windows will return one of: | ||
| 622 | win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) | ||
| 623 | win-ia64 (64bit Windows on Itanium) | ||
| 624 | win32 (all others - specifically, sys.platform is returned) | ||
| 625 | |||
| 626 | For other non-POSIX platforms, currently just returns 'sys.platform'. | ||
| 627 | """ | ||
| 628 | if os.name == 'nt': | ||
| 629 | # sniff sys.version for architecture. | ||
| 630 | prefix = " bit (" | ||
| 631 | i = sys.version.find(prefix) | ||
| 632 | if i == -1: | ||
| 633 | return sys.platform | ||
| 634 | j = sys.version.find(")", i) | ||
| 635 | look = sys.version[i+len(prefix):j].lower() | ||
| 636 | if look == 'amd64': | ||
| 637 | return 'win-amd64' | ||
| 638 | if look == 'itanium': | ||
| 639 | return 'win-ia64' | ||
| 640 | return sys.platform | ||
| 641 | |||
| 642 | if os.name != "posix" or not hasattr(os, 'uname'): | ||
| 643 | # XXX what about the architecture? NT is Intel or Alpha, | ||
| 644 | # Mac OS is M68k or PPC, etc. | ||
| 645 | return sys.platform | ||
| 646 | |||
| 647 | # Try to distinguish various flavours of Unix | ||
| 648 | osname, host, release, version, machine = os.uname() | ||
| 649 | |||
| 650 | # Convert the OS name to lowercase, remove '/' characters | ||
| 651 | # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") | ||
| 652 | osname = osname.lower().replace('/', '') | ||
| 653 | machine = machine.replace(' ', '_') | ||
| 654 | machine = machine.replace('/', '-') | ||
| 655 | |||
| 656 | if osname[:5] == "linux": | ||
| 657 | # At least on Linux/Intel, 'machine' is the processor -- | ||
| 658 | # i386, etc. | ||
| 659 | # XXX what about Alpha, SPARC, etc? | ||
| 660 | return "%s-%s" % (osname, machine) | ||
| 661 | elif osname[:5] == "sunos": | ||
| 662 | if release[0] >= "5": # SunOS 5 == Solaris 2 | ||
| 663 | osname = "solaris" | ||
| 664 | release = "%d.%s" % (int(release[0]) - 3, release[2:]) | ||
| 665 | # fall through to standard osname-release-machine representation | ||
| 666 | elif osname[:4] == "irix": # could be "irix64"! | ||
| 667 | return "%s-%s" % (osname, release) | ||
| 668 | elif osname[:3] == "aix": | ||
| 669 | return "%s-%s.%s" % (osname, version, release) | ||
| 670 | elif osname[:6] == "cygwin": | ||
| 671 | osname = "cygwin" | ||
| 672 | rel_re = re.compile(r'[\d.]+') | ||
| 673 | m = rel_re.match(release) | ||
| 674 | if m: | ||
| 675 | release = m.group() | ||
| 676 | elif osname[:6] == "darwin": | ||
| 677 | # | ||
| 678 | # For our purposes, we'll assume that the system version from | ||
| 679 | # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set | ||
| 680 | # to. This makes the compatibility story a bit more sane because the | ||
| 681 | # machine is going to compile and link as if it were | ||
| 682 | # MACOSX_DEPLOYMENT_TARGET. | ||
| 683 | cfgvars = get_config_vars() | ||
| 684 | macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') | ||
| 685 | |||
| 686 | if True: | ||
| 687 | # Always calculate the release of the running machine, | ||
| 688 | # needed to determine if we can build fat binaries or not. | ||
| 689 | |||
| 690 | macrelease = macver | ||
| 691 | # Get the system version. Reading this plist is a documented | ||
| 692 | # way to get the system version (see the documentation for | ||
| 693 | # the Gestalt Manager) | ||
| 694 | try: | ||
| 695 | f = open('/System/Library/CoreServices/SystemVersion.plist') | ||
| 696 | except IOError: | ||
| 697 | # We're on a plain darwin box, fall back to the default | ||
| 698 | # behaviour. | ||
| 699 | pass | ||
| 700 | else: | ||
| 701 | try: | ||
| 702 | m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' | ||
| 703 | r'<string>(.*?)</string>', f.read()) | ||
| 704 | finally: | ||
| 705 | f.close() | ||
| 706 | if m is not None: | ||
| 707 | macrelease = '.'.join(m.group(1).split('.')[:2]) | ||
| 708 | # else: fall back to the default behaviour | ||
| 709 | |||
| 710 | if not macver: | ||
| 711 | macver = macrelease | ||
| 712 | |||
| 713 | if macver: | ||
| 714 | release = macver | ||
| 715 | osname = "macosx" | ||
| 716 | |||
| 717 | if ((macrelease + '.') >= '10.4.' and | ||
| 718 | '-arch' in get_config_vars().get('CFLAGS', '').strip()): | ||
| 719 | # The universal build will build fat binaries, but not on | ||
| 720 | # systems before 10.4 | ||
| 721 | # | ||
| 722 | # Try to detect 4-way universal builds, those have machine-type | ||
| 723 | # 'universal' instead of 'fat'. | ||
| 724 | |||
| 725 | machine = 'fat' | ||
| 726 | cflags = get_config_vars().get('CFLAGS') | ||
| 727 | |||
| 728 | archs = re.findall(r'-arch\s+(\S+)', cflags) | ||
| 729 | archs = tuple(sorted(set(archs))) | ||
| 730 | |||
| 731 | if len(archs) == 1: | ||
| 732 | machine = archs[0] | ||
| 733 | elif archs == ('i386', 'ppc'): | ||
| 734 | machine = 'fat' | ||
| 735 | elif archs == ('i386', 'x86_64'): | ||
| 736 | machine = 'intel' | ||
| 737 | elif archs == ('i386', 'ppc', 'x86_64'): | ||
| 738 | machine = 'fat3' | ||
| 739 | elif archs == ('ppc64', 'x86_64'): | ||
| 740 | machine = 'fat64' | ||
| 741 | elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): | ||
| 742 | machine = 'universal' | ||
| 743 | else: | ||
| 744 | raise ValueError( | ||
| 745 | "Don't know machine value for archs=%r" % (archs,)) | ||
| 746 | |||
| 747 | elif machine == 'i386': | ||
| 748 | # On OSX the machine type returned by uname is always the | ||
| 749 | # 32-bit variant, even if the executable architecture is | ||
| 750 | # the 64-bit variant | ||
| 751 | if sys.maxsize >= 2**32: | ||
| 752 | machine = 'x86_64' | ||
| 753 | |||
| 754 | elif machine in ('PowerPC', 'Power_Macintosh'): | ||
| 755 | # Pick a sane name for the PPC architecture. | ||
| 756 | # See 'i386' case | ||
| 757 | if sys.maxsize >= 2**32: | ||
| 758 | machine = 'ppc64' | ||
| 759 | else: | ||
| 760 | machine = 'ppc' | ||
| 761 | |||
| 762 | return "%s-%s-%s" % (osname, release, machine) | ||
| 763 | |||
| 764 | |||
| 765 | def get_python_version(): | ||
| 766 | return _PY_VERSION_SHORT | ||
| 767 | |||
| 768 | |||
| 769 | def _print_dict(title, data): | ||
| 770 | for index, (key, value) in enumerate(sorted(data.items())): | ||
| 771 | if index == 0: | ||
| 772 | print('%s: ' % (title)) | ||
| 773 | print('\t%s = "%s"' % (key, value)) | ||
| 774 | |||
| 775 | |||
| 776 | def _main(): | ||
| 777 | """Display all information sysconfig detains.""" | ||
| 778 | print('Platform: "%s"' % get_platform()) | ||
| 779 | print('Python version: "%s"' % get_python_version()) | ||
| 780 | print('Current installation scheme: "%s"' % _get_default_scheme()) | ||
| 781 | print() | ||
| 782 | _print_dict('Paths', get_paths()) | ||
| 783 | print() | ||
| 784 | _print_dict('Variables', get_config_vars()) | ||
| 785 | |||
| 786 | |||
| 787 | if __name__ == '__main__': | ||
| 788 | _main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..b0599bc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py | |||
| @@ -0,0 +1,2607 @@ | |||
| 1 | #------------------------------------------------------------------- | ||
| 2 | # tarfile.py | ||
| 3 | #------------------------------------------------------------------- | ||
| 4 | # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de> | ||
| 5 | # All rights reserved. | ||
| 6 | # | ||
| 7 | # Permission is hereby granted, free of charge, to any person | ||
| 8 | # obtaining a copy of this software and associated documentation | ||
| 9 | # files (the "Software"), to deal in the Software without | ||
| 10 | # restriction, including without limitation the rights to use, | ||
| 11 | # copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 12 | # copies of the Software, and to permit persons to whom the | ||
| 13 | # Software is furnished to do so, subject to the following | ||
| 14 | # conditions: | ||
| 15 | # | ||
| 16 | # The above copyright notice and this permission notice shall be | ||
| 17 | # included in all copies or substantial portions of the Software. | ||
| 18 | # | ||
| 19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||
| 20 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES | ||
| 21 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | ||
| 22 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT | ||
| 23 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | ||
| 24 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | ||
| 25 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | ||
| 26 | # OTHER DEALINGS IN THE SOFTWARE. | ||
| 27 | # | ||
| 28 | from __future__ import print_function | ||
| 29 | |||
| 30 | """Read from and write to tar format archives. | ||
| 31 | """ | ||
| 32 | |||
| 33 | __version__ = "$Revision$" | ||
| 34 | |||
| 35 | version = "0.9.0" | ||
| 36 | __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" | ||
| 37 | __date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" | ||
| 38 | __cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" | ||
| 39 | __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." | ||
| 40 | |||
| 41 | #--------- | ||
| 42 | # Imports | ||
| 43 | #--------- | ||
| 44 | import sys | ||
| 45 | import os | ||
| 46 | import stat | ||
| 47 | import errno | ||
| 48 | import time | ||
| 49 | import struct | ||
| 50 | import copy | ||
| 51 | import re | ||
| 52 | |||
| 53 | try: | ||
| 54 | import grp, pwd | ||
| 55 | except ImportError: | ||
| 56 | grp = pwd = None | ||
| 57 | |||
| 58 | # os.symlink on Windows prior to 6.0 raises NotImplementedError | ||
| 59 | symlink_exception = (AttributeError, NotImplementedError) | ||
| 60 | try: | ||
| 61 | # WindowsError (1314) will be raised if the caller does not hold the | ||
| 62 | # SeCreateSymbolicLinkPrivilege privilege | ||
| 63 | symlink_exception += (WindowsError,) | ||
| 64 | except NameError: | ||
| 65 | pass | ||
| 66 | |||
| 67 | # from tarfile import * | ||
| 68 | __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] | ||
| 69 | |||
| 70 | if sys.version_info[0] < 3: | ||
| 71 | import __builtin__ as builtins | ||
| 72 | else: | ||
| 73 | import builtins | ||
| 74 | |||
| 75 | _open = builtins.open # Since 'open' is TarFile.open | ||
| 76 | |||
| 77 | #--------------------------------------------------------- | ||
| 78 | # tar constants | ||
| 79 | #--------------------------------------------------------- | ||
| 80 | NUL = b"\0" # the null character | ||
| 81 | BLOCKSIZE = 512 # length of processing blocks | ||
| 82 | RECORDSIZE = BLOCKSIZE * 20 # length of records | ||
| 83 | GNU_MAGIC = b"ustar \0" # magic gnu tar string | ||
| 84 | POSIX_MAGIC = b"ustar\x0000" # magic posix tar string | ||
| 85 | |||
| 86 | LENGTH_NAME = 100 # maximum length of a filename | ||
| 87 | LENGTH_LINK = 100 # maximum length of a linkname | ||
| 88 | LENGTH_PREFIX = 155 # maximum length of the prefix field | ||
| 89 | |||
| 90 | REGTYPE = b"0" # regular file | ||
| 91 | AREGTYPE = b"\0" # regular file | ||
| 92 | LNKTYPE = b"1" # link (inside tarfile) | ||
| 93 | SYMTYPE = b"2" # symbolic link | ||
| 94 | CHRTYPE = b"3" # character special device | ||
| 95 | BLKTYPE = b"4" # block special device | ||
| 96 | DIRTYPE = b"5" # directory | ||
| 97 | FIFOTYPE = b"6" # fifo special device | ||
| 98 | CONTTYPE = b"7" # contiguous file | ||
| 99 | |||
| 100 | GNUTYPE_LONGNAME = b"L" # GNU tar longname | ||
| 101 | GNUTYPE_LONGLINK = b"K" # GNU tar longlink | ||
| 102 | GNUTYPE_SPARSE = b"S" # GNU tar sparse file | ||
| 103 | |||
| 104 | XHDTYPE = b"x" # POSIX.1-2001 extended header | ||
| 105 | XGLTYPE = b"g" # POSIX.1-2001 global header | ||
| 106 | SOLARIS_XHDTYPE = b"X" # Solaris extended header | ||
| 107 | |||
| 108 | USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format | ||
| 109 | GNU_FORMAT = 1 # GNU tar format | ||
| 110 | PAX_FORMAT = 2 # POSIX.1-2001 (pax) format | ||
| 111 | DEFAULT_FORMAT = GNU_FORMAT | ||
| 112 | |||
| 113 | #--------------------------------------------------------- | ||
| 114 | # tarfile constants | ||
| 115 | #--------------------------------------------------------- | ||
| 116 | # File types that tarfile supports: | ||
| 117 | SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, | ||
| 118 | SYMTYPE, DIRTYPE, FIFOTYPE, | ||
| 119 | CONTTYPE, CHRTYPE, BLKTYPE, | ||
| 120 | GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, | ||
| 121 | GNUTYPE_SPARSE) | ||
| 122 | |||
| 123 | # File types that will be treated as a regular file. | ||
| 124 | REGULAR_TYPES = (REGTYPE, AREGTYPE, | ||
| 125 | CONTTYPE, GNUTYPE_SPARSE) | ||
| 126 | |||
| 127 | # File types that are part of the GNU tar format. | ||
| 128 | GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, | ||
| 129 | GNUTYPE_SPARSE) | ||
| 130 | |||
| 131 | # Fields from a pax header that override a TarInfo attribute. | ||
| 132 | PAX_FIELDS = ("path", "linkpath", "size", "mtime", | ||
| 133 | "uid", "gid", "uname", "gname") | ||
| 134 | |||
| 135 | # Fields from a pax header that are affected by hdrcharset. | ||
| 136 | PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) | ||
| 137 | |||
| 138 | # Fields in a pax header that are numbers, all other fields | ||
| 139 | # are treated as strings. | ||
| 140 | PAX_NUMBER_FIELDS = { | ||
| 141 | "atime": float, | ||
| 142 | "ctime": float, | ||
| 143 | "mtime": float, | ||
| 144 | "uid": int, | ||
| 145 | "gid": int, | ||
| 146 | "size": int | ||
| 147 | } | ||
| 148 | |||
| 149 | #--------------------------------------------------------- | ||
| 150 | # Bits used in the mode field, values in octal. | ||
| 151 | #--------------------------------------------------------- | ||
| 152 | S_IFLNK = 0o120000 # symbolic link | ||
| 153 | S_IFREG = 0o100000 # regular file | ||
| 154 | S_IFBLK = 0o060000 # block device | ||
| 155 | S_IFDIR = 0o040000 # directory | ||
| 156 | S_IFCHR = 0o020000 # character device | ||
| 157 | S_IFIFO = 0o010000 # fifo | ||
| 158 | |||
| 159 | TSUID = 0o4000 # set UID on execution | ||
| 160 | TSGID = 0o2000 # set GID on execution | ||
| 161 | TSVTX = 0o1000 # reserved | ||
| 162 | |||
| 163 | TUREAD = 0o400 # read by owner | ||
| 164 | TUWRITE = 0o200 # write by owner | ||
| 165 | TUEXEC = 0o100 # execute/search by owner | ||
| 166 | TGREAD = 0o040 # read by group | ||
| 167 | TGWRITE = 0o020 # write by group | ||
| 168 | TGEXEC = 0o010 # execute/search by group | ||
| 169 | TOREAD = 0o004 # read by other | ||
| 170 | TOWRITE = 0o002 # write by other | ||
| 171 | TOEXEC = 0o001 # execute/search by other | ||
| 172 | |||
| 173 | #--------------------------------------------------------- | ||
| 174 | # initialization | ||
| 175 | #--------------------------------------------------------- | ||
| 176 | if os.name in ("nt", "ce"): | ||
| 177 | ENCODING = "utf-8" | ||
| 178 | else: | ||
| 179 | ENCODING = sys.getfilesystemencoding() | ||
| 180 | |||
| 181 | #--------------------------------------------------------- | ||
| 182 | # Some useful functions | ||
| 183 | #--------------------------------------------------------- | ||
| 184 | |||
| 185 | def stn(s, length, encoding, errors): | ||
| 186 | """Convert a string to a null-terminated bytes object. | ||
| 187 | """ | ||
| 188 | s = s.encode(encoding, errors) | ||
| 189 | return s[:length] + (length - len(s)) * NUL | ||
| 190 | |||
| 191 | def nts(s, encoding, errors): | ||
| 192 | """Convert a null-terminated bytes object to a string. | ||
| 193 | """ | ||
| 194 | p = s.find(b"\0") | ||
| 195 | if p != -1: | ||
| 196 | s = s[:p] | ||
| 197 | return s.decode(encoding, errors) | ||
| 198 | |||
| 199 | def nti(s): | ||
| 200 | """Convert a number field to a python number. | ||
| 201 | """ | ||
| 202 | # There are two possible encodings for a number field, see | ||
| 203 | # itn() below. | ||
| 204 | if s[0] != chr(0o200): | ||
| 205 | try: | ||
| 206 | n = int(nts(s, "ascii", "strict") or "0", 8) | ||
| 207 | except ValueError: | ||
| 208 | raise InvalidHeaderError("invalid header") | ||
| 209 | else: | ||
| 210 | n = 0 | ||
| 211 | for i in range(len(s) - 1): | ||
| 212 | n <<= 8 | ||
| 213 | n += ord(s[i + 1]) | ||
| 214 | return n | ||
| 215 | |||
| 216 | def itn(n, digits=8, format=DEFAULT_FORMAT): | ||
| 217 | """Convert a python number to a number field. | ||
| 218 | """ | ||
| 219 | # POSIX 1003.1-1988 requires numbers to be encoded as a string of | ||
| 220 | # octal digits followed by a null-byte, this allows values up to | ||
| 221 | # (8**(digits-1))-1. GNU tar allows storing numbers greater than | ||
| 222 | # that if necessary. A leading 0o200 byte indicates this particular | ||
| 223 | # encoding, the following digits-1 bytes are a big-endian | ||
| 224 | # representation. This allows values up to (256**(digits-1))-1. | ||
| 225 | if 0 <= n < 8 ** (digits - 1): | ||
| 226 | s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL | ||
| 227 | else: | ||
| 228 | if format != GNU_FORMAT or n >= 256 ** (digits - 1): | ||
| 229 | raise ValueError("overflow in number field") | ||
| 230 | |||
| 231 | if n < 0: | ||
| 232 | # XXX We mimic GNU tar's behaviour with negative numbers, | ||
| 233 | # this could raise OverflowError. | ||
| 234 | n = struct.unpack("L", struct.pack("l", n))[0] | ||
| 235 | |||
| 236 | s = bytearray() | ||
| 237 | for i in range(digits - 1): | ||
| 238 | s.insert(0, n & 0o377) | ||
| 239 | n >>= 8 | ||
| 240 | s.insert(0, 0o200) | ||
| 241 | return s | ||
| 242 | |||
| 243 | def calc_chksums(buf): | ||
| 244 | """Calculate the checksum for a member's header by summing up all | ||
| 245 | characters except for the chksum field which is treated as if | ||
| 246 | it was filled with spaces. According to the GNU tar sources, | ||
| 247 | some tars (Sun and NeXT) calculate chksum with signed char, | ||
| 248 | which will be different if there are chars in the buffer with | ||
| 249 | the high bit set. So we calculate two checksums, unsigned and | ||
| 250 | signed. | ||
| 251 | """ | ||
| 252 | unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) | ||
| 253 | signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) | ||
| 254 | return unsigned_chksum, signed_chksum | ||
| 255 | |||
| 256 | def copyfileobj(src, dst, length=None): | ||
| 257 | """Copy length bytes from fileobj src to fileobj dst. | ||
| 258 | If length is None, copy the entire content. | ||
| 259 | """ | ||
| 260 | if length == 0: | ||
| 261 | return | ||
| 262 | if length is None: | ||
| 263 | while True: | ||
| 264 | buf = src.read(16*1024) | ||
| 265 | if not buf: | ||
| 266 | break | ||
| 267 | dst.write(buf) | ||
| 268 | return | ||
| 269 | |||
| 270 | BUFSIZE = 16 * 1024 | ||
| 271 | blocks, remainder = divmod(length, BUFSIZE) | ||
| 272 | for b in range(blocks): | ||
| 273 | buf = src.read(BUFSIZE) | ||
| 274 | if len(buf) < BUFSIZE: | ||
| 275 | raise IOError("end of file reached") | ||
| 276 | dst.write(buf) | ||
| 277 | |||
| 278 | if remainder != 0: | ||
| 279 | buf = src.read(remainder) | ||
| 280 | if len(buf) < remainder: | ||
| 281 | raise IOError("end of file reached") | ||
| 282 | dst.write(buf) | ||
| 283 | return | ||
| 284 | |||
| 285 | filemode_table = ( | ||
| 286 | ((S_IFLNK, "l"), | ||
| 287 | (S_IFREG, "-"), | ||
| 288 | (S_IFBLK, "b"), | ||
| 289 | (S_IFDIR, "d"), | ||
| 290 | (S_IFCHR, "c"), | ||
| 291 | (S_IFIFO, "p")), | ||
| 292 | |||
| 293 | ((TUREAD, "r"),), | ||
| 294 | ((TUWRITE, "w"),), | ||
| 295 | ((TUEXEC|TSUID, "s"), | ||
| 296 | (TSUID, "S"), | ||
| 297 | (TUEXEC, "x")), | ||
| 298 | |||
| 299 | ((TGREAD, "r"),), | ||
| 300 | ((TGWRITE, "w"),), | ||
| 301 | ((TGEXEC|TSGID, "s"), | ||
| 302 | (TSGID, "S"), | ||
| 303 | (TGEXEC, "x")), | ||
| 304 | |||
| 305 | ((TOREAD, "r"),), | ||
| 306 | ((TOWRITE, "w"),), | ||
| 307 | ((TOEXEC|TSVTX, "t"), | ||
| 308 | (TSVTX, "T"), | ||
| 309 | (TOEXEC, "x")) | ||
| 310 | ) | ||
| 311 | |||
| 312 | def filemode(mode): | ||
| 313 | """Convert a file's mode to a string of the form | ||
| 314 | -rwxrwxrwx. | ||
| 315 | Used by TarFile.list() | ||
| 316 | """ | ||
| 317 | perm = [] | ||
| 318 | for table in filemode_table: | ||
| 319 | for bit, char in table: | ||
| 320 | if mode & bit == bit: | ||
| 321 | perm.append(char) | ||
| 322 | break | ||
| 323 | else: | ||
| 324 | perm.append("-") | ||
| 325 | return "".join(perm) | ||
| 326 | |||
| 327 | class TarError(Exception): | ||
| 328 | """Base exception.""" | ||
| 329 | pass | ||
| 330 | class ExtractError(TarError): | ||
| 331 | """General exception for extract errors.""" | ||
| 332 | pass | ||
| 333 | class ReadError(TarError): | ||
| 334 | """Exception for unreadable tar archives.""" | ||
| 335 | pass | ||
| 336 | class CompressionError(TarError): | ||
| 337 | """Exception for unavailable compression methods.""" | ||
| 338 | pass | ||
| 339 | class StreamError(TarError): | ||
| 340 | """Exception for unsupported operations on stream-like TarFiles.""" | ||
| 341 | pass | ||
| 342 | class HeaderError(TarError): | ||
| 343 | """Base exception for header errors.""" | ||
| 344 | pass | ||
| 345 | class EmptyHeaderError(HeaderError): | ||
| 346 | """Exception for empty headers.""" | ||
| 347 | pass | ||
| 348 | class TruncatedHeaderError(HeaderError): | ||
| 349 | """Exception for truncated headers.""" | ||
| 350 | pass | ||
| 351 | class EOFHeaderError(HeaderError): | ||
| 352 | """Exception for end of file headers.""" | ||
| 353 | pass | ||
| 354 | class InvalidHeaderError(HeaderError): | ||
| 355 | """Exception for invalid headers.""" | ||
| 356 | pass | ||
| 357 | class SubsequentHeaderError(HeaderError): | ||
| 358 | """Exception for missing and invalid extended headers.""" | ||
| 359 | pass | ||
| 360 | |||
| 361 | #--------------------------- | ||
| 362 | # internal stream interface | ||
| 363 | #--------------------------- | ||
| 364 | class _LowLevelFile(object): | ||
| 365 | """Low-level file object. Supports reading and writing. | ||
| 366 | It is used instead of a regular file object for streaming | ||
| 367 | access. | ||
| 368 | """ | ||
| 369 | |||
| 370 | def __init__(self, name, mode): | ||
| 371 | mode = { | ||
| 372 | "r": os.O_RDONLY, | ||
| 373 | "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, | ||
| 374 | }[mode] | ||
| 375 | if hasattr(os, "O_BINARY"): | ||
| 376 | mode |= os.O_BINARY | ||
| 377 | self.fd = os.open(name, mode, 0o666) | ||
| 378 | |||
| 379 | def close(self): | ||
| 380 | os.close(self.fd) | ||
| 381 | |||
| 382 | def read(self, size): | ||
| 383 | return os.read(self.fd, size) | ||
| 384 | |||
| 385 | def write(self, s): | ||
| 386 | os.write(self.fd, s) | ||
| 387 | |||
| 388 | class _Stream(object): | ||
| 389 | """Class that serves as an adapter between TarFile and | ||
| 390 | a stream-like object. The stream-like object only | ||
| 391 | needs to have a read() or write() method and is accessed | ||
| 392 | blockwise. Use of gzip or bzip2 compression is possible. | ||
| 393 | A stream-like object could be for example: sys.stdin, | ||
| 394 | sys.stdout, a socket, a tape device etc. | ||
| 395 | |||
| 396 | _Stream is intended to be used only internally. | ||
| 397 | """ | ||
| 398 | |||
| 399 | def __init__(self, name, mode, comptype, fileobj, bufsize): | ||
| 400 | """Construct a _Stream object. | ||
| 401 | """ | ||
| 402 | self._extfileobj = True | ||
| 403 | if fileobj is None: | ||
| 404 | fileobj = _LowLevelFile(name, mode) | ||
| 405 | self._extfileobj = False | ||
| 406 | |||
| 407 | if comptype == '*': | ||
| 408 | # Enable transparent compression detection for the | ||
| 409 | # stream interface | ||
| 410 | fileobj = _StreamProxy(fileobj) | ||
| 411 | comptype = fileobj.getcomptype() | ||
| 412 | |||
| 413 | self.name = name or "" | ||
| 414 | self.mode = mode | ||
| 415 | self.comptype = comptype | ||
| 416 | self.fileobj = fileobj | ||
| 417 | self.bufsize = bufsize | ||
| 418 | self.buf = b"" | ||
| 419 | self.pos = 0 | ||
| 420 | self.closed = False | ||
| 421 | |||
| 422 | try: | ||
| 423 | if comptype == "gz": | ||
| 424 | try: | ||
| 425 | import zlib | ||
| 426 | except ImportError: | ||
| 427 | raise CompressionError("zlib module is not available") | ||
| 428 | self.zlib = zlib | ||
| 429 | self.crc = zlib.crc32(b"") | ||
| 430 | if mode == "r": | ||
| 431 | self._init_read_gz() | ||
| 432 | else: | ||
| 433 | self._init_write_gz() | ||
| 434 | |||
| 435 | if comptype == "bz2": | ||
| 436 | try: | ||
| 437 | import bz2 | ||
| 438 | except ImportError: | ||
| 439 | raise CompressionError("bz2 module is not available") | ||
| 440 | if mode == "r": | ||
| 441 | self.dbuf = b"" | ||
| 442 | self.cmp = bz2.BZ2Decompressor() | ||
| 443 | else: | ||
| 444 | self.cmp = bz2.BZ2Compressor() | ||
| 445 | except: | ||
| 446 | if not self._extfileobj: | ||
| 447 | self.fileobj.close() | ||
| 448 | self.closed = True | ||
| 449 | raise | ||
| 450 | |||
| 451 | def __del__(self): | ||
| 452 | if hasattr(self, "closed") and not self.closed: | ||
| 453 | self.close() | ||
| 454 | |||
| 455 | def _init_write_gz(self): | ||
| 456 | """Initialize for writing with gzip compression. | ||
| 457 | """ | ||
| 458 | self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, | ||
| 459 | -self.zlib.MAX_WBITS, | ||
| 460 | self.zlib.DEF_MEM_LEVEL, | ||
| 461 | 0) | ||
| 462 | timestamp = struct.pack("<L", int(time.time())) | ||
| 463 | self.__write(b"\037\213\010\010" + timestamp + b"\002\377") | ||
| 464 | if self.name.endswith(".gz"): | ||
| 465 | self.name = self.name[:-3] | ||
| 466 | # RFC1952 says we must use ISO-8859-1 for the FNAME field. | ||
| 467 | self.__write(self.name.encode("iso-8859-1", "replace") + NUL) | ||
| 468 | |||
| 469 | def write(self, s): | ||
| 470 | """Write string s to the stream. | ||
| 471 | """ | ||
| 472 | if self.comptype == "gz": | ||
| 473 | self.crc = self.zlib.crc32(s, self.crc) | ||
| 474 | self.pos += len(s) | ||
| 475 | if self.comptype != "tar": | ||
| 476 | s = self.cmp.compress(s) | ||
| 477 | self.__write(s) | ||
| 478 | |||
| 479 | def __write(self, s): | ||
| 480 | """Write string s to the stream if a whole new block | ||
| 481 | is ready to be written. | ||
| 482 | """ | ||
| 483 | self.buf += s | ||
| 484 | while len(self.buf) > self.bufsize: | ||
| 485 | self.fileobj.write(self.buf[:self.bufsize]) | ||
| 486 | self.buf = self.buf[self.bufsize:] | ||
| 487 | |||
| 488 | def close(self): | ||
| 489 | """Close the _Stream object. No operation should be | ||
| 490 | done on it afterwards. | ||
| 491 | """ | ||
| 492 | if self.closed: | ||
| 493 | return | ||
| 494 | |||
| 495 | if self.mode == "w" and self.comptype != "tar": | ||
| 496 | self.buf += self.cmp.flush() | ||
| 497 | |||
| 498 | if self.mode == "w" and self.buf: | ||
| 499 | self.fileobj.write(self.buf) | ||
| 500 | self.buf = b"" | ||
| 501 | if self.comptype == "gz": | ||
| 502 | # The native zlib crc is an unsigned 32-bit integer, but | ||
| 503 | # the Python wrapper implicitly casts that to a signed C | ||
| 504 | # long. So, on a 32-bit box self.crc may "look negative", | ||
| 505 | # while the same crc on a 64-bit box may "look positive". | ||
| 506 | # To avoid irksome warnings from the `struct` module, force | ||
| 507 | # it to look positive on all boxes. | ||
| 508 | self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff)) | ||
| 509 | self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF)) | ||
| 510 | |||
| 511 | if not self._extfileobj: | ||
| 512 | self.fileobj.close() | ||
| 513 | |||
| 514 | self.closed = True | ||
| 515 | |||
| 516 | def _init_read_gz(self): | ||
| 517 | """Initialize for reading a gzip compressed fileobj. | ||
| 518 | """ | ||
| 519 | self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS) | ||
| 520 | self.dbuf = b"" | ||
| 521 | |||
| 522 | # taken from gzip.GzipFile with some alterations | ||
| 523 | if self.__read(2) != b"\037\213": | ||
| 524 | raise ReadError("not a gzip file") | ||
| 525 | if self.__read(1) != b"\010": | ||
| 526 | raise CompressionError("unsupported compression method") | ||
| 527 | |||
| 528 | flag = ord(self.__read(1)) | ||
| 529 | self.__read(6) | ||
| 530 | |||
| 531 | if flag & 4: | ||
| 532 | xlen = ord(self.__read(1)) + 256 * ord(self.__read(1)) | ||
| 533 | self.read(xlen) | ||
| 534 | if flag & 8: | ||
| 535 | while True: | ||
| 536 | s = self.__read(1) | ||
| 537 | if not s or s == NUL: | ||
| 538 | break | ||
| 539 | if flag & 16: | ||
| 540 | while True: | ||
| 541 | s = self.__read(1) | ||
| 542 | if not s or s == NUL: | ||
| 543 | break | ||
| 544 | if flag & 2: | ||
| 545 | self.__read(2) | ||
| 546 | |||
| 547 | def tell(self): | ||
| 548 | """Return the stream's file pointer position. | ||
| 549 | """ | ||
| 550 | return self.pos | ||
| 551 | |||
| 552 | def seek(self, pos=0): | ||
| 553 | """Set the stream's file pointer to pos. Negative seeking | ||
| 554 | is forbidden. | ||
| 555 | """ | ||
| 556 | if pos - self.pos >= 0: | ||
| 557 | blocks, remainder = divmod(pos - self.pos, self.bufsize) | ||
| 558 | for i in range(blocks): | ||
| 559 | self.read(self.bufsize) | ||
| 560 | self.read(remainder) | ||
| 561 | else: | ||
| 562 | raise StreamError("seeking backwards is not allowed") | ||
| 563 | return self.pos | ||
| 564 | |||
| 565 | def read(self, size=None): | ||
| 566 | """Return the next size number of bytes from the stream. | ||
| 567 | If size is not defined, return all bytes of the stream | ||
| 568 | up to EOF. | ||
| 569 | """ | ||
| 570 | if size is None: | ||
| 571 | t = [] | ||
| 572 | while True: | ||
| 573 | buf = self._read(self.bufsize) | ||
| 574 | if not buf: | ||
| 575 | break | ||
| 576 | t.append(buf) | ||
| 577 | buf = "".join(t) | ||
| 578 | else: | ||
| 579 | buf = self._read(size) | ||
| 580 | self.pos += len(buf) | ||
| 581 | return buf | ||
| 582 | |||
| 583 | def _read(self, size): | ||
| 584 | """Return size bytes from the stream. | ||
| 585 | """ | ||
| 586 | if self.comptype == "tar": | ||
| 587 | return self.__read(size) | ||
| 588 | |||
| 589 | c = len(self.dbuf) | ||
| 590 | while c < size: | ||
| 591 | buf = self.__read(self.bufsize) | ||
| 592 | if not buf: | ||
| 593 | break | ||
| 594 | try: | ||
| 595 | buf = self.cmp.decompress(buf) | ||
| 596 | except IOError: | ||
| 597 | raise ReadError("invalid compressed data") | ||
| 598 | self.dbuf += buf | ||
| 599 | c += len(buf) | ||
| 600 | buf = self.dbuf[:size] | ||
| 601 | self.dbuf = self.dbuf[size:] | ||
| 602 | return buf | ||
| 603 | |||
| 604 | def __read(self, size): | ||
| 605 | """Return size bytes from stream. If internal buffer is empty, | ||
| 606 | read another block from the stream. | ||
| 607 | """ | ||
| 608 | c = len(self.buf) | ||
| 609 | while c < size: | ||
| 610 | buf = self.fileobj.read(self.bufsize) | ||
| 611 | if not buf: | ||
| 612 | break | ||
| 613 | self.buf += buf | ||
| 614 | c += len(buf) | ||
| 615 | buf = self.buf[:size] | ||
| 616 | self.buf = self.buf[size:] | ||
| 617 | return buf | ||
| 618 | # class _Stream | ||
| 619 | |||
| 620 | class _StreamProxy(object): | ||
| 621 | """Small proxy class that enables transparent compression | ||
| 622 | detection for the Stream interface (mode 'r|*'). | ||
| 623 | """ | ||
| 624 | |||
| 625 | def __init__(self, fileobj): | ||
| 626 | self.fileobj = fileobj | ||
| 627 | self.buf = self.fileobj.read(BLOCKSIZE) | ||
| 628 | |||
| 629 | def read(self, size): | ||
| 630 | self.read = self.fileobj.read | ||
| 631 | return self.buf | ||
| 632 | |||
| 633 | def getcomptype(self): | ||
| 634 | if self.buf.startswith(b"\037\213\010"): | ||
| 635 | return "gz" | ||
| 636 | if self.buf.startswith(b"BZh91"): | ||
| 637 | return "bz2" | ||
| 638 | return "tar" | ||
| 639 | |||
| 640 | def close(self): | ||
| 641 | self.fileobj.close() | ||
| 642 | # class StreamProxy | ||
| 643 | |||
| 644 | class _BZ2Proxy(object): | ||
| 645 | """Small proxy class that enables external file object | ||
| 646 | support for "r:bz2" and "w:bz2" modes. This is actually | ||
| 647 | a workaround for a limitation in bz2 module's BZ2File | ||
| 648 | class which (unlike gzip.GzipFile) has no support for | ||
| 649 | a file object argument. | ||
| 650 | """ | ||
| 651 | |||
| 652 | blocksize = 16 * 1024 | ||
| 653 | |||
| 654 | def __init__(self, fileobj, mode): | ||
| 655 | self.fileobj = fileobj | ||
| 656 | self.mode = mode | ||
| 657 | self.name = getattr(self.fileobj, "name", None) | ||
| 658 | self.init() | ||
| 659 | |||
| 660 | def init(self): | ||
| 661 | import bz2 | ||
| 662 | self.pos = 0 | ||
| 663 | if self.mode == "r": | ||
| 664 | self.bz2obj = bz2.BZ2Decompressor() | ||
| 665 | self.fileobj.seek(0) | ||
| 666 | self.buf = b"" | ||
| 667 | else: | ||
| 668 | self.bz2obj = bz2.BZ2Compressor() | ||
| 669 | |||
| 670 | def read(self, size): | ||
| 671 | x = len(self.buf) | ||
| 672 | while x < size: | ||
| 673 | raw = self.fileobj.read(self.blocksize) | ||
| 674 | if not raw: | ||
| 675 | break | ||
| 676 | data = self.bz2obj.decompress(raw) | ||
| 677 | self.buf += data | ||
| 678 | x += len(data) | ||
| 679 | |||
| 680 | buf = self.buf[:size] | ||
| 681 | self.buf = self.buf[size:] | ||
| 682 | self.pos += len(buf) | ||
| 683 | return buf | ||
| 684 | |||
| 685 | def seek(self, pos): | ||
| 686 | if pos < self.pos: | ||
| 687 | self.init() | ||
| 688 | self.read(pos - self.pos) | ||
| 689 | |||
| 690 | def tell(self): | ||
| 691 | return self.pos | ||
| 692 | |||
| 693 | def write(self, data): | ||
| 694 | self.pos += len(data) | ||
| 695 | raw = self.bz2obj.compress(data) | ||
| 696 | self.fileobj.write(raw) | ||
| 697 | |||
| 698 | def close(self): | ||
| 699 | if self.mode == "w": | ||
| 700 | raw = self.bz2obj.flush() | ||
| 701 | self.fileobj.write(raw) | ||
| 702 | # class _BZ2Proxy | ||
| 703 | |||
| 704 | #------------------------ | ||
| 705 | # Extraction file object | ||
| 706 | #------------------------ | ||
| 707 | class _FileInFile(object): | ||
| 708 | """A thin wrapper around an existing file object that | ||
| 709 | provides a part of its data as an individual file | ||
| 710 | object. | ||
| 711 | """ | ||
| 712 | |||
| 713 | def __init__(self, fileobj, offset, size, blockinfo=None): | ||
| 714 | self.fileobj = fileobj | ||
| 715 | self.offset = offset | ||
| 716 | self.size = size | ||
| 717 | self.position = 0 | ||
| 718 | |||
| 719 | if blockinfo is None: | ||
| 720 | blockinfo = [(0, size)] | ||
| 721 | |||
| 722 | # Construct a map with data and zero blocks. | ||
| 723 | self.map_index = 0 | ||
| 724 | self.map = [] | ||
| 725 | lastpos = 0 | ||
| 726 | realpos = self.offset | ||
| 727 | for offset, size in blockinfo: | ||
| 728 | if offset > lastpos: | ||
| 729 | self.map.append((False, lastpos, offset, None)) | ||
| 730 | self.map.append((True, offset, offset + size, realpos)) | ||
| 731 | realpos += size | ||
| 732 | lastpos = offset + size | ||
| 733 | if lastpos < self.size: | ||
| 734 | self.map.append((False, lastpos, self.size, None)) | ||
| 735 | |||
| 736 | def seekable(self): | ||
| 737 | if not hasattr(self.fileobj, "seekable"): | ||
| 738 | # XXX gzip.GzipFile and bz2.BZ2File | ||
| 739 | return True | ||
| 740 | return self.fileobj.seekable() | ||
| 741 | |||
| 742 | def tell(self): | ||
| 743 | """Return the current file position. | ||
| 744 | """ | ||
| 745 | return self.position | ||
| 746 | |||
| 747 | def seek(self, position): | ||
| 748 | """Seek to a position in the file. | ||
| 749 | """ | ||
| 750 | self.position = position | ||
| 751 | |||
| 752 | def read(self, size=None): | ||
| 753 | """Read data from the file. | ||
| 754 | """ | ||
| 755 | if size is None: | ||
| 756 | size = self.size - self.position | ||
| 757 | else: | ||
| 758 | size = min(size, self.size - self.position) | ||
| 759 | |||
| 760 | buf = b"" | ||
| 761 | while size > 0: | ||
| 762 | while True: | ||
| 763 | data, start, stop, offset = self.map[self.map_index] | ||
| 764 | if start <= self.position < stop: | ||
| 765 | break | ||
| 766 | else: | ||
| 767 | self.map_index += 1 | ||
| 768 | if self.map_index == len(self.map): | ||
| 769 | self.map_index = 0 | ||
| 770 | length = min(size, stop - self.position) | ||
| 771 | if data: | ||
| 772 | self.fileobj.seek(offset + (self.position - start)) | ||
| 773 | buf += self.fileobj.read(length) | ||
| 774 | else: | ||
| 775 | buf += NUL * length | ||
| 776 | size -= length | ||
| 777 | self.position += length | ||
| 778 | return buf | ||
| 779 | #class _FileInFile | ||
| 780 | |||
| 781 | |||
| 782 | class ExFileObject(object): | ||
| 783 | """File-like object for reading an archive member. | ||
| 784 | Is returned by TarFile.extractfile(). | ||
| 785 | """ | ||
| 786 | blocksize = 1024 | ||
| 787 | |||
| 788 | def __init__(self, tarfile, tarinfo): | ||
| 789 | self.fileobj = _FileInFile(tarfile.fileobj, | ||
| 790 | tarinfo.offset_data, | ||
| 791 | tarinfo.size, | ||
| 792 | tarinfo.sparse) | ||
| 793 | self.name = tarinfo.name | ||
| 794 | self.mode = "r" | ||
| 795 | self.closed = False | ||
| 796 | self.size = tarinfo.size | ||
| 797 | |||
| 798 | self.position = 0 | ||
| 799 | self.buffer = b"" | ||
| 800 | |||
| 801 | def readable(self): | ||
| 802 | return True | ||
| 803 | |||
| 804 | def writable(self): | ||
| 805 | return False | ||
| 806 | |||
| 807 | def seekable(self): | ||
| 808 | return self.fileobj.seekable() | ||
| 809 | |||
| 810 | def read(self, size=None): | ||
| 811 | """Read at most size bytes from the file. If size is not | ||
| 812 | present or None, read all data until EOF is reached. | ||
| 813 | """ | ||
| 814 | if self.closed: | ||
| 815 | raise ValueError("I/O operation on closed file") | ||
| 816 | |||
| 817 | buf = b"" | ||
| 818 | if self.buffer: | ||
| 819 | if size is None: | ||
| 820 | buf = self.buffer | ||
| 821 | self.buffer = b"" | ||
| 822 | else: | ||
| 823 | buf = self.buffer[:size] | ||
| 824 | self.buffer = self.buffer[size:] | ||
| 825 | |||
| 826 | if size is None: | ||
| 827 | buf += self.fileobj.read() | ||
| 828 | else: | ||
| 829 | buf += self.fileobj.read(size - len(buf)) | ||
| 830 | |||
| 831 | self.position += len(buf) | ||
| 832 | return buf | ||
| 833 | |||
| 834 | # XXX TextIOWrapper uses the read1() method. | ||
| 835 | read1 = read | ||
| 836 | |||
| 837 | def readline(self, size=-1): | ||
| 838 | """Read one entire line from the file. If size is present | ||
| 839 | and non-negative, return a string with at most that | ||
| 840 | size, which may be an incomplete line. | ||
| 841 | """ | ||
| 842 | if self.closed: | ||
| 843 | raise ValueError("I/O operation on closed file") | ||
| 844 | |||
| 845 | pos = self.buffer.find(b"\n") + 1 | ||
| 846 | if pos == 0: | ||
| 847 | # no newline found. | ||
| 848 | while True: | ||
| 849 | buf = self.fileobj.read(self.blocksize) | ||
| 850 | self.buffer += buf | ||
| 851 | if not buf or b"\n" in buf: | ||
| 852 | pos = self.buffer.find(b"\n") + 1 | ||
| 853 | if pos == 0: | ||
| 854 | # no newline found. | ||
| 855 | pos = len(self.buffer) | ||
| 856 | break | ||
| 857 | |||
| 858 | if size != -1: | ||
| 859 | pos = min(size, pos) | ||
| 860 | |||
| 861 | buf = self.buffer[:pos] | ||
| 862 | self.buffer = self.buffer[pos:] | ||
| 863 | self.position += len(buf) | ||
| 864 | return buf | ||
| 865 | |||
| 866 | def readlines(self): | ||
| 867 | """Return a list with all remaining lines. | ||
| 868 | """ | ||
| 869 | result = [] | ||
| 870 | while True: | ||
| 871 | line = self.readline() | ||
| 872 | if not line: break | ||
| 873 | result.append(line) | ||
| 874 | return result | ||
| 875 | |||
| 876 | def tell(self): | ||
| 877 | """Return the current file position. | ||
| 878 | """ | ||
| 879 | if self.closed: | ||
| 880 | raise ValueError("I/O operation on closed file") | ||
| 881 | |||
| 882 | return self.position | ||
| 883 | |||
| 884 | def seek(self, pos, whence=os.SEEK_SET): | ||
| 885 | """Seek to a position in the file. | ||
| 886 | """ | ||
| 887 | if self.closed: | ||
| 888 | raise ValueError("I/O operation on closed file") | ||
| 889 | |||
| 890 | if whence == os.SEEK_SET: | ||
| 891 | self.position = min(max(pos, 0), self.size) | ||
| 892 | elif whence == os.SEEK_CUR: | ||
| 893 | if pos < 0: | ||
| 894 | self.position = max(self.position + pos, 0) | ||
| 895 | else: | ||
| 896 | self.position = min(self.position + pos, self.size) | ||
| 897 | elif whence == os.SEEK_END: | ||
| 898 | self.position = max(min(self.size + pos, self.size), 0) | ||
| 899 | else: | ||
| 900 | raise ValueError("Invalid argument") | ||
| 901 | |||
| 902 | self.buffer = b"" | ||
| 903 | self.fileobj.seek(self.position) | ||
| 904 | |||
| 905 | def close(self): | ||
| 906 | """Close the file object. | ||
| 907 | """ | ||
| 908 | self.closed = True | ||
| 909 | |||
| 910 | def __iter__(self): | ||
| 911 | """Get an iterator over the file's lines. | ||
| 912 | """ | ||
| 913 | while True: | ||
| 914 | line = self.readline() | ||
| 915 | if not line: | ||
| 916 | break | ||
| 917 | yield line | ||
| 918 | #class ExFileObject | ||
| 919 | |||
| 920 | #------------------ | ||
| 921 | # Exported Classes | ||
| 922 | #------------------ | ||
| 923 | class TarInfo(object): | ||
| 924 | """Informational class which holds the details about an | ||
| 925 | archive member given by a tar header block. | ||
| 926 | TarInfo objects are returned by TarFile.getmember(), | ||
| 927 | TarFile.getmembers() and TarFile.gettarinfo() and are | ||
| 928 | usually created internally. | ||
| 929 | """ | ||
| 930 | |||
| 931 | __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", | ||
| 932 | "chksum", "type", "linkname", "uname", "gname", | ||
| 933 | "devmajor", "devminor", | ||
| 934 | "offset", "offset_data", "pax_headers", "sparse", | ||
| 935 | "tarfile", "_sparse_structs", "_link_target") | ||
| 936 | |||
| 937 | def __init__(self, name=""): | ||
| 938 | """Construct a TarInfo object. name is the optional name | ||
| 939 | of the member. | ||
| 940 | """ | ||
| 941 | self.name = name # member name | ||
| 942 | self.mode = 0o644 # file permissions | ||
| 943 | self.uid = 0 # user id | ||
| 944 | self.gid = 0 # group id | ||
| 945 | self.size = 0 # file size | ||
| 946 | self.mtime = 0 # modification time | ||
| 947 | self.chksum = 0 # header checksum | ||
| 948 | self.type = REGTYPE # member type | ||
| 949 | self.linkname = "" # link name | ||
| 950 | self.uname = "" # user name | ||
| 951 | self.gname = "" # group name | ||
| 952 | self.devmajor = 0 # device major number | ||
| 953 | self.devminor = 0 # device minor number | ||
| 954 | |||
| 955 | self.offset = 0 # the tar header starts here | ||
| 956 | self.offset_data = 0 # the file's data starts here | ||
| 957 | |||
| 958 | self.sparse = None # sparse member information | ||
| 959 | self.pax_headers = {} # pax header information | ||
| 960 | |||
| 961 | # In pax headers the "name" and "linkname" field are called | ||
| 962 | # "path" and "linkpath". | ||
| 963 | def _getpath(self): | ||
| 964 | return self.name | ||
| 965 | def _setpath(self, name): | ||
| 966 | self.name = name | ||
| 967 | path = property(_getpath, _setpath) | ||
| 968 | |||
| 969 | def _getlinkpath(self): | ||
| 970 | return self.linkname | ||
| 971 | def _setlinkpath(self, linkname): | ||
| 972 | self.linkname = linkname | ||
| 973 | linkpath = property(_getlinkpath, _setlinkpath) | ||
| 974 | |||
| 975 | def __repr__(self): | ||
| 976 | return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) | ||
| 977 | |||
| 978 | def get_info(self): | ||
| 979 | """Return the TarInfo's attributes as a dictionary. | ||
| 980 | """ | ||
| 981 | info = { | ||
| 982 | "name": self.name, | ||
| 983 | "mode": self.mode & 0o7777, | ||
| 984 | "uid": self.uid, | ||
| 985 | "gid": self.gid, | ||
| 986 | "size": self.size, | ||
| 987 | "mtime": self.mtime, | ||
| 988 | "chksum": self.chksum, | ||
| 989 | "type": self.type, | ||
| 990 | "linkname": self.linkname, | ||
| 991 | "uname": self.uname, | ||
| 992 | "gname": self.gname, | ||
| 993 | "devmajor": self.devmajor, | ||
| 994 | "devminor": self.devminor | ||
| 995 | } | ||
| 996 | |||
| 997 | if info["type"] == DIRTYPE and not info["name"].endswith("/"): | ||
| 998 | info["name"] += "/" | ||
| 999 | |||
| 1000 | return info | ||
| 1001 | |||
| 1002 | def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): | ||
| 1003 | """Return a tar header as a string of 512 byte blocks. | ||
| 1004 | """ | ||
| 1005 | info = self.get_info() | ||
| 1006 | |||
| 1007 | if format == USTAR_FORMAT: | ||
| 1008 | return self.create_ustar_header(info, encoding, errors) | ||
| 1009 | elif format == GNU_FORMAT: | ||
| 1010 | return self.create_gnu_header(info, encoding, errors) | ||
| 1011 | elif format == PAX_FORMAT: | ||
| 1012 | return self.create_pax_header(info, encoding) | ||
| 1013 | else: | ||
| 1014 | raise ValueError("invalid format") | ||
| 1015 | |||
| 1016 | def create_ustar_header(self, info, encoding, errors): | ||
| 1017 | """Return the object as a ustar header block. | ||
| 1018 | """ | ||
| 1019 | info["magic"] = POSIX_MAGIC | ||
| 1020 | |||
| 1021 | if len(info["linkname"]) > LENGTH_LINK: | ||
| 1022 | raise ValueError("linkname is too long") | ||
| 1023 | |||
| 1024 | if len(info["name"]) > LENGTH_NAME: | ||
| 1025 | info["prefix"], info["name"] = self._posix_split_name(info["name"]) | ||
| 1026 | |||
| 1027 | return self._create_header(info, USTAR_FORMAT, encoding, errors) | ||
| 1028 | |||
| 1029 | def create_gnu_header(self, info, encoding, errors): | ||
| 1030 | """Return the object as a GNU header block sequence. | ||
| 1031 | """ | ||
| 1032 | info["magic"] = GNU_MAGIC | ||
| 1033 | |||
| 1034 | buf = b"" | ||
| 1035 | if len(info["linkname"]) > LENGTH_LINK: | ||
| 1036 | buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) | ||
| 1037 | |||
| 1038 | if len(info["name"]) > LENGTH_NAME: | ||
| 1039 | buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) | ||
| 1040 | |||
| 1041 | return buf + self._create_header(info, GNU_FORMAT, encoding, errors) | ||
| 1042 | |||
| 1043 | def create_pax_header(self, info, encoding): | ||
| 1044 | """Return the object as a ustar header block. If it cannot be | ||
| 1045 | represented this way, prepend a pax extended header sequence | ||
| 1046 | with supplement information. | ||
| 1047 | """ | ||
| 1048 | info["magic"] = POSIX_MAGIC | ||
| 1049 | pax_headers = self.pax_headers.copy() | ||
| 1050 | |||
| 1051 | # Test string fields for values that exceed the field length or cannot | ||
| 1052 | # be represented in ASCII encoding. | ||
| 1053 | for name, hname, length in ( | ||
| 1054 | ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), | ||
| 1055 | ("uname", "uname", 32), ("gname", "gname", 32)): | ||
| 1056 | |||
| 1057 | if hname in pax_headers: | ||
| 1058 | # The pax header has priority. | ||
| 1059 | continue | ||
| 1060 | |||
| 1061 | # Try to encode the string as ASCII. | ||
| 1062 | try: | ||
| 1063 | info[name].encode("ascii", "strict") | ||
| 1064 | except UnicodeEncodeError: | ||
| 1065 | pax_headers[hname] = info[name] | ||
| 1066 | continue | ||
| 1067 | |||
| 1068 | if len(info[name]) > length: | ||
| 1069 | pax_headers[hname] = info[name] | ||
| 1070 | |||
| 1071 | # Test number fields for values that exceed the field limit or values | ||
| 1072 | # that like to be stored as float. | ||
| 1073 | for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): | ||
| 1074 | if name in pax_headers: | ||
| 1075 | # The pax header has priority. Avoid overflow. | ||
| 1076 | info[name] = 0 | ||
| 1077 | continue | ||
| 1078 | |||
| 1079 | val = info[name] | ||
| 1080 | if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): | ||
| 1081 | pax_headers[name] = str(val) | ||
| 1082 | info[name] = 0 | ||
| 1083 | |||
| 1084 | # Create a pax extended header if necessary. | ||
| 1085 | if pax_headers: | ||
| 1086 | buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) | ||
| 1087 | else: | ||
| 1088 | buf = b"" | ||
| 1089 | |||
| 1090 | return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") | ||
| 1091 | |||
| 1092 | @classmethod | ||
| 1093 | def create_pax_global_header(cls, pax_headers): | ||
| 1094 | """Return the object as a pax global header block sequence. | ||
| 1095 | """ | ||
| 1096 | return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") | ||
| 1097 | |||
| 1098 | def _posix_split_name(self, name): | ||
| 1099 | """Split a name longer than 100 chars into a prefix | ||
| 1100 | and a name part. | ||
| 1101 | """ | ||
| 1102 | prefix = name[:LENGTH_PREFIX + 1] | ||
| 1103 | while prefix and prefix[-1] != "/": | ||
| 1104 | prefix = prefix[:-1] | ||
| 1105 | |||
| 1106 | name = name[len(prefix):] | ||
| 1107 | prefix = prefix[:-1] | ||
| 1108 | |||
| 1109 | if not prefix or len(name) > LENGTH_NAME: | ||
| 1110 | raise ValueError("name is too long") | ||
| 1111 | return prefix, name | ||
| 1112 | |||
| 1113 | @staticmethod | ||
| 1114 | def _create_header(info, format, encoding, errors): | ||
| 1115 | """Return a header block. info is a dictionary with file | ||
| 1116 | information, format must be one of the *_FORMAT constants. | ||
| 1117 | """ | ||
| 1118 | parts = [ | ||
| 1119 | stn(info.get("name", ""), 100, encoding, errors), | ||
| 1120 | itn(info.get("mode", 0) & 0o7777, 8, format), | ||
| 1121 | itn(info.get("uid", 0), 8, format), | ||
| 1122 | itn(info.get("gid", 0), 8, format), | ||
| 1123 | itn(info.get("size", 0), 12, format), | ||
| 1124 | itn(info.get("mtime", 0), 12, format), | ||
| 1125 | b" ", # checksum field | ||
| 1126 | info.get("type", REGTYPE), | ||
| 1127 | stn(info.get("linkname", ""), 100, encoding, errors), | ||
| 1128 | info.get("magic", POSIX_MAGIC), | ||
| 1129 | stn(info.get("uname", ""), 32, encoding, errors), | ||
| 1130 | stn(info.get("gname", ""), 32, encoding, errors), | ||
| 1131 | itn(info.get("devmajor", 0), 8, format), | ||
| 1132 | itn(info.get("devminor", 0), 8, format), | ||
| 1133 | stn(info.get("prefix", ""), 155, encoding, errors) | ||
| 1134 | ] | ||
| 1135 | |||
| 1136 | buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) | ||
| 1137 | chksum = calc_chksums(buf[-BLOCKSIZE:])[0] | ||
| 1138 | buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] | ||
| 1139 | return buf | ||
| 1140 | |||
| 1141 | @staticmethod | ||
| 1142 | def _create_payload(payload): | ||
| 1143 | """Return the string payload filled with zero bytes | ||
| 1144 | up to the next 512 byte border. | ||
| 1145 | """ | ||
| 1146 | blocks, remainder = divmod(len(payload), BLOCKSIZE) | ||
| 1147 | if remainder > 0: | ||
| 1148 | payload += (BLOCKSIZE - remainder) * NUL | ||
| 1149 | return payload | ||
| 1150 | |||
| 1151 | @classmethod | ||
| 1152 | def _create_gnu_long_header(cls, name, type, encoding, errors): | ||
| 1153 | """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence | ||
| 1154 | for name. | ||
| 1155 | """ | ||
| 1156 | name = name.encode(encoding, errors) + NUL | ||
| 1157 | |||
| 1158 | info = {} | ||
| 1159 | info["name"] = "././@LongLink" | ||
| 1160 | info["type"] = type | ||
| 1161 | info["size"] = len(name) | ||
| 1162 | info["magic"] = GNU_MAGIC | ||
| 1163 | |||
| 1164 | # create extended header + name blocks. | ||
| 1165 | return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ | ||
| 1166 | cls._create_payload(name) | ||
| 1167 | |||
| 1168 | @classmethod | ||
| 1169 | def _create_pax_generic_header(cls, pax_headers, type, encoding): | ||
| 1170 | """Return a POSIX.1-2008 extended or global header sequence | ||
| 1171 | that contains a list of keyword, value pairs. The values | ||
| 1172 | must be strings. | ||
| 1173 | """ | ||
| 1174 | # Check if one of the fields contains surrogate characters and thereby | ||
| 1175 | # forces hdrcharset=BINARY, see _proc_pax() for more information. | ||
| 1176 | binary = False | ||
| 1177 | for keyword, value in pax_headers.items(): | ||
| 1178 | try: | ||
| 1179 | value.encode("utf8", "strict") | ||
| 1180 | except UnicodeEncodeError: | ||
| 1181 | binary = True | ||
| 1182 | break | ||
| 1183 | |||
| 1184 | records = b"" | ||
| 1185 | if binary: | ||
| 1186 | # Put the hdrcharset field at the beginning of the header. | ||
| 1187 | records += b"21 hdrcharset=BINARY\n" | ||
| 1188 | |||
| 1189 | for keyword, value in pax_headers.items(): | ||
| 1190 | keyword = keyword.encode("utf8") | ||
| 1191 | if binary: | ||
| 1192 | # Try to restore the original byte representation of `value'. | ||
| 1193 | # Needless to say, that the encoding must match the string. | ||
| 1194 | value = value.encode(encoding, "surrogateescape") | ||
| 1195 | else: | ||
| 1196 | value = value.encode("utf8") | ||
| 1197 | |||
| 1198 | l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' | ||
| 1199 | n = p = 0 | ||
| 1200 | while True: | ||
| 1201 | n = l + len(str(p)) | ||
| 1202 | if n == p: | ||
| 1203 | break | ||
| 1204 | p = n | ||
| 1205 | records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" | ||
| 1206 | |||
| 1207 | # We use a hardcoded "././@PaxHeader" name like star does | ||
| 1208 | # instead of the one that POSIX recommends. | ||
| 1209 | info = {} | ||
| 1210 | info["name"] = "././@PaxHeader" | ||
| 1211 | info["type"] = type | ||
| 1212 | info["size"] = len(records) | ||
| 1213 | info["magic"] = POSIX_MAGIC | ||
| 1214 | |||
| 1215 | # Create pax header + record blocks. | ||
| 1216 | return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ | ||
| 1217 | cls._create_payload(records) | ||
| 1218 | |||
| 1219 | @classmethod | ||
| 1220 | def frombuf(cls, buf, encoding, errors): | ||
| 1221 | """Construct a TarInfo object from a 512 byte bytes object. | ||
| 1222 | """ | ||
| 1223 | if len(buf) == 0: | ||
| 1224 | raise EmptyHeaderError("empty header") | ||
| 1225 | if len(buf) != BLOCKSIZE: | ||
| 1226 | raise TruncatedHeaderError("truncated header") | ||
| 1227 | if buf.count(NUL) == BLOCKSIZE: | ||
| 1228 | raise EOFHeaderError("end of file header") | ||
| 1229 | |||
| 1230 | chksum = nti(buf[148:156]) | ||
| 1231 | if chksum not in calc_chksums(buf): | ||
| 1232 | raise InvalidHeaderError("bad checksum") | ||
| 1233 | |||
| 1234 | obj = cls() | ||
| 1235 | obj.name = nts(buf[0:100], encoding, errors) | ||
| 1236 | obj.mode = nti(buf[100:108]) | ||
| 1237 | obj.uid = nti(buf[108:116]) | ||
| 1238 | obj.gid = nti(buf[116:124]) | ||
| 1239 | obj.size = nti(buf[124:136]) | ||
| 1240 | obj.mtime = nti(buf[136:148]) | ||
| 1241 | obj.chksum = chksum | ||
| 1242 | obj.type = buf[156:157] | ||
| 1243 | obj.linkname = nts(buf[157:257], encoding, errors) | ||
| 1244 | obj.uname = nts(buf[265:297], encoding, errors) | ||
| 1245 | obj.gname = nts(buf[297:329], encoding, errors) | ||
| 1246 | obj.devmajor = nti(buf[329:337]) | ||
| 1247 | obj.devminor = nti(buf[337:345]) | ||
| 1248 | prefix = nts(buf[345:500], encoding, errors) | ||
| 1249 | |||
| 1250 | # Old V7 tar format represents a directory as a regular | ||
| 1251 | # file with a trailing slash. | ||
| 1252 | if obj.type == AREGTYPE and obj.name.endswith("/"): | ||
| 1253 | obj.type = DIRTYPE | ||
| 1254 | |||
| 1255 | # The old GNU sparse format occupies some of the unused | ||
| 1256 | # space in the buffer for up to 4 sparse structures. | ||
| 1257 | # Save the them for later processing in _proc_sparse(). | ||
| 1258 | if obj.type == GNUTYPE_SPARSE: | ||
| 1259 | pos = 386 | ||
| 1260 | structs = [] | ||
| 1261 | for i in range(4): | ||
| 1262 | try: | ||
| 1263 | offset = nti(buf[pos:pos + 12]) | ||
| 1264 | numbytes = nti(buf[pos + 12:pos + 24]) | ||
| 1265 | except ValueError: | ||
| 1266 | break | ||
| 1267 | structs.append((offset, numbytes)) | ||
| 1268 | pos += 24 | ||
| 1269 | isextended = bool(buf[482]) | ||
| 1270 | origsize = nti(buf[483:495]) | ||
| 1271 | obj._sparse_structs = (structs, isextended, origsize) | ||
| 1272 | |||
| 1273 | # Remove redundant slashes from directories. | ||
| 1274 | if obj.isdir(): | ||
| 1275 | obj.name = obj.name.rstrip("/") | ||
| 1276 | |||
| 1277 | # Reconstruct a ustar longname. | ||
| 1278 | if prefix and obj.type not in GNU_TYPES: | ||
| 1279 | obj.name = prefix + "/" + obj.name | ||
| 1280 | return obj | ||
| 1281 | |||
| 1282 | @classmethod | ||
| 1283 | def fromtarfile(cls, tarfile): | ||
| 1284 | """Return the next TarInfo object from TarFile object | ||
| 1285 | tarfile. | ||
| 1286 | """ | ||
| 1287 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
| 1288 | obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) | ||
| 1289 | obj.offset = tarfile.fileobj.tell() - BLOCKSIZE | ||
| 1290 | return obj._proc_member(tarfile) | ||
| 1291 | |||
| 1292 | #-------------------------------------------------------------------------- | ||
| 1293 | # The following are methods that are called depending on the type of a | ||
| 1294 | # member. The entry point is _proc_member() which can be overridden in a | ||
| 1295 | # subclass to add custom _proc_*() methods. A _proc_*() method MUST | ||
| 1296 | # implement the following | ||
| 1297 | # operations: | ||
| 1298 | # 1. Set self.offset_data to the position where the data blocks begin, | ||
| 1299 | # if there is data that follows. | ||
| 1300 | # 2. Set tarfile.offset to the position where the next member's header will | ||
| 1301 | # begin. | ||
| 1302 | # 3. Return self or another valid TarInfo object. | ||
| 1303 | def _proc_member(self, tarfile): | ||
| 1304 | """Choose the right processing method depending on | ||
| 1305 | the type and call it. | ||
| 1306 | """ | ||
| 1307 | if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): | ||
| 1308 | return self._proc_gnulong(tarfile) | ||
| 1309 | elif self.type == GNUTYPE_SPARSE: | ||
| 1310 | return self._proc_sparse(tarfile) | ||
| 1311 | elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): | ||
| 1312 | return self._proc_pax(tarfile) | ||
| 1313 | else: | ||
| 1314 | return self._proc_builtin(tarfile) | ||
| 1315 | |||
| 1316 | def _proc_builtin(self, tarfile): | ||
| 1317 | """Process a builtin type or an unknown type which | ||
| 1318 | will be treated as a regular file. | ||
| 1319 | """ | ||
| 1320 | self.offset_data = tarfile.fileobj.tell() | ||
| 1321 | offset = self.offset_data | ||
| 1322 | if self.isreg() or self.type not in SUPPORTED_TYPES: | ||
| 1323 | # Skip the following data blocks. | ||
| 1324 | offset += self._block(self.size) | ||
| 1325 | tarfile.offset = offset | ||
| 1326 | |||
| 1327 | # Patch the TarInfo object with saved global | ||
| 1328 | # header information. | ||
| 1329 | self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) | ||
| 1330 | |||
| 1331 | return self | ||
| 1332 | |||
| 1333 | def _proc_gnulong(self, tarfile): | ||
| 1334 | """Process the blocks that hold a GNU longname | ||
| 1335 | or longlink member. | ||
| 1336 | """ | ||
| 1337 | buf = tarfile.fileobj.read(self._block(self.size)) | ||
| 1338 | |||
| 1339 | # Fetch the next header and process it. | ||
| 1340 | try: | ||
| 1341 | next = self.fromtarfile(tarfile) | ||
| 1342 | except HeaderError: | ||
| 1343 | raise SubsequentHeaderError("missing or bad subsequent header") | ||
| 1344 | |||
| 1345 | # Patch the TarInfo object from the next header with | ||
| 1346 | # the longname information. | ||
| 1347 | next.offset = self.offset | ||
| 1348 | if self.type == GNUTYPE_LONGNAME: | ||
| 1349 | next.name = nts(buf, tarfile.encoding, tarfile.errors) | ||
| 1350 | elif self.type == GNUTYPE_LONGLINK: | ||
| 1351 | next.linkname = nts(buf, tarfile.encoding, tarfile.errors) | ||
| 1352 | |||
| 1353 | return next | ||
| 1354 | |||
| 1355 | def _proc_sparse(self, tarfile): | ||
| 1356 | """Process a GNU sparse header plus extra headers. | ||
| 1357 | """ | ||
| 1358 | # We already collected some sparse structures in frombuf(). | ||
| 1359 | structs, isextended, origsize = self._sparse_structs | ||
| 1360 | del self._sparse_structs | ||
| 1361 | |||
| 1362 | # Collect sparse structures from extended header blocks. | ||
| 1363 | while isextended: | ||
| 1364 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
| 1365 | pos = 0 | ||
| 1366 | for i in range(21): | ||
| 1367 | try: | ||
| 1368 | offset = nti(buf[pos:pos + 12]) | ||
| 1369 | numbytes = nti(buf[pos + 12:pos + 24]) | ||
| 1370 | except ValueError: | ||
| 1371 | break | ||
| 1372 | if offset and numbytes: | ||
| 1373 | structs.append((offset, numbytes)) | ||
| 1374 | pos += 24 | ||
| 1375 | isextended = bool(buf[504]) | ||
| 1376 | self.sparse = structs | ||
| 1377 | |||
| 1378 | self.offset_data = tarfile.fileobj.tell() | ||
| 1379 | tarfile.offset = self.offset_data + self._block(self.size) | ||
| 1380 | self.size = origsize | ||
| 1381 | return self | ||
| 1382 | |||
| 1383 | def _proc_pax(self, tarfile): | ||
| 1384 | """Process an extended or global header as described in | ||
| 1385 | POSIX.1-2008. | ||
| 1386 | """ | ||
| 1387 | # Read the header information. | ||
| 1388 | buf = tarfile.fileobj.read(self._block(self.size)) | ||
| 1389 | |||
| 1390 | # A pax header stores supplemental information for either | ||
| 1391 | # the following file (extended) or all following files | ||
| 1392 | # (global). | ||
| 1393 | if self.type == XGLTYPE: | ||
| 1394 | pax_headers = tarfile.pax_headers | ||
| 1395 | else: | ||
| 1396 | pax_headers = tarfile.pax_headers.copy() | ||
| 1397 | |||
| 1398 | # Check if the pax header contains a hdrcharset field. This tells us | ||
| 1399 | # the encoding of the path, linkpath, uname and gname fields. Normally, | ||
| 1400 | # these fields are UTF-8 encoded but since POSIX.1-2008 tar | ||
| 1401 | # implementations are allowed to store them as raw binary strings if | ||
| 1402 | # the translation to UTF-8 fails. | ||
| 1403 | match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) | ||
| 1404 | if match is not None: | ||
| 1405 | pax_headers["hdrcharset"] = match.group(1).decode("utf8") | ||
| 1406 | |||
| 1407 | # For the time being, we don't care about anything other than "BINARY". | ||
| 1408 | # The only other value that is currently allowed by the standard is | ||
| 1409 | # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. | ||
| 1410 | hdrcharset = pax_headers.get("hdrcharset") | ||
| 1411 | if hdrcharset == "BINARY": | ||
| 1412 | encoding = tarfile.encoding | ||
| 1413 | else: | ||
| 1414 | encoding = "utf8" | ||
| 1415 | |||
| 1416 | # Parse pax header information. A record looks like that: | ||
| 1417 | # "%d %s=%s\n" % (length, keyword, value). length is the size | ||
| 1418 | # of the complete record including the length field itself and | ||
| 1419 | # the newline. keyword and value are both UTF-8 encoded strings. | ||
| 1420 | regex = re.compile(br"(\d+) ([^=]+)=") | ||
| 1421 | pos = 0 | ||
| 1422 | while True: | ||
| 1423 | match = regex.match(buf, pos) | ||
| 1424 | if not match: | ||
| 1425 | break | ||
| 1426 | |||
| 1427 | length, keyword = match.groups() | ||
| 1428 | length = int(length) | ||
| 1429 | value = buf[match.end(2) + 1:match.start(1) + length - 1] | ||
| 1430 | |||
| 1431 | # Normally, we could just use "utf8" as the encoding and "strict" | ||
| 1432 | # as the error handler, but we better not take the risk. For | ||
| 1433 | # example, GNU tar <= 1.23 is known to store filenames it cannot | ||
| 1434 | # translate to UTF-8 as raw strings (unfortunately without a | ||
| 1435 | # hdrcharset=BINARY header). | ||
| 1436 | # We first try the strict standard encoding, and if that fails we | ||
| 1437 | # fall back on the user's encoding and error handler. | ||
| 1438 | keyword = self._decode_pax_field(keyword, "utf8", "utf8", | ||
| 1439 | tarfile.errors) | ||
| 1440 | if keyword in PAX_NAME_FIELDS: | ||
| 1441 | value = self._decode_pax_field(value, encoding, tarfile.encoding, | ||
| 1442 | tarfile.errors) | ||
| 1443 | else: | ||
| 1444 | value = self._decode_pax_field(value, "utf8", "utf8", | ||
| 1445 | tarfile.errors) | ||
| 1446 | |||
| 1447 | pax_headers[keyword] = value | ||
| 1448 | pos += length | ||
| 1449 | |||
| 1450 | # Fetch the next header. | ||
| 1451 | try: | ||
| 1452 | next = self.fromtarfile(tarfile) | ||
| 1453 | except HeaderError: | ||
| 1454 | raise SubsequentHeaderError("missing or bad subsequent header") | ||
| 1455 | |||
| 1456 | # Process GNU sparse information. | ||
| 1457 | if "GNU.sparse.map" in pax_headers: | ||
| 1458 | # GNU extended sparse format version 0.1. | ||
| 1459 | self._proc_gnusparse_01(next, pax_headers) | ||
| 1460 | |||
| 1461 | elif "GNU.sparse.size" in pax_headers: | ||
| 1462 | # GNU extended sparse format version 0.0. | ||
| 1463 | self._proc_gnusparse_00(next, pax_headers, buf) | ||
| 1464 | |||
| 1465 | elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": | ||
| 1466 | # GNU extended sparse format version 1.0. | ||
| 1467 | self._proc_gnusparse_10(next, pax_headers, tarfile) | ||
| 1468 | |||
| 1469 | if self.type in (XHDTYPE, SOLARIS_XHDTYPE): | ||
| 1470 | # Patch the TarInfo object with the extended header info. | ||
| 1471 | next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) | ||
| 1472 | next.offset = self.offset | ||
| 1473 | |||
| 1474 | if "size" in pax_headers: | ||
| 1475 | # If the extended header replaces the size field, | ||
| 1476 | # we need to recalculate the offset where the next | ||
| 1477 | # header starts. | ||
| 1478 | offset = next.offset_data | ||
| 1479 | if next.isreg() or next.type not in SUPPORTED_TYPES: | ||
| 1480 | offset += next._block(next.size) | ||
| 1481 | tarfile.offset = offset | ||
| 1482 | |||
| 1483 | return next | ||
| 1484 | |||
| 1485 | def _proc_gnusparse_00(self, next, pax_headers, buf): | ||
| 1486 | """Process a GNU tar extended sparse header, version 0.0. | ||
| 1487 | """ | ||
| 1488 | offsets = [] | ||
| 1489 | for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): | ||
| 1490 | offsets.append(int(match.group(1))) | ||
| 1491 | numbytes = [] | ||
| 1492 | for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): | ||
| 1493 | numbytes.append(int(match.group(1))) | ||
| 1494 | next.sparse = list(zip(offsets, numbytes)) | ||
| 1495 | |||
| 1496 | def _proc_gnusparse_01(self, next, pax_headers): | ||
| 1497 | """Process a GNU tar extended sparse header, version 0.1. | ||
| 1498 | """ | ||
| 1499 | sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] | ||
| 1500 | next.sparse = list(zip(sparse[::2], sparse[1::2])) | ||
| 1501 | |||
| 1502 | def _proc_gnusparse_10(self, next, pax_headers, tarfile): | ||
| 1503 | """Process a GNU tar extended sparse header, version 1.0. | ||
| 1504 | """ | ||
| 1505 | fields = None | ||
| 1506 | sparse = [] | ||
| 1507 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
| 1508 | fields, buf = buf.split(b"\n", 1) | ||
| 1509 | fields = int(fields) | ||
| 1510 | while len(sparse) < fields * 2: | ||
| 1511 | if b"\n" not in buf: | ||
| 1512 | buf += tarfile.fileobj.read(BLOCKSIZE) | ||
| 1513 | number, buf = buf.split(b"\n", 1) | ||
| 1514 | sparse.append(int(number)) | ||
| 1515 | next.offset_data = tarfile.fileobj.tell() | ||
| 1516 | next.sparse = list(zip(sparse[::2], sparse[1::2])) | ||
| 1517 | |||
| 1518 | def _apply_pax_info(self, pax_headers, encoding, errors): | ||
| 1519 | """Replace fields with supplemental information from a previous | ||
| 1520 | pax extended or global header. | ||
| 1521 | """ | ||
| 1522 | for keyword, value in pax_headers.items(): | ||
| 1523 | if keyword == "GNU.sparse.name": | ||
| 1524 | setattr(self, "path", value) | ||
| 1525 | elif keyword == "GNU.sparse.size": | ||
| 1526 | setattr(self, "size", int(value)) | ||
| 1527 | elif keyword == "GNU.sparse.realsize": | ||
| 1528 | setattr(self, "size", int(value)) | ||
| 1529 | elif keyword in PAX_FIELDS: | ||
| 1530 | if keyword in PAX_NUMBER_FIELDS: | ||
| 1531 | try: | ||
| 1532 | value = PAX_NUMBER_FIELDS[keyword](value) | ||
| 1533 | except ValueError: | ||
| 1534 | value = 0 | ||
| 1535 | if keyword == "path": | ||
| 1536 | value = value.rstrip("/") | ||
| 1537 | setattr(self, keyword, value) | ||
| 1538 | |||
| 1539 | self.pax_headers = pax_headers.copy() | ||
| 1540 | |||
| 1541 | def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): | ||
| 1542 | """Decode a single field from a pax record. | ||
| 1543 | """ | ||
| 1544 | try: | ||
| 1545 | return value.decode(encoding, "strict") | ||
| 1546 | except UnicodeDecodeError: | ||
| 1547 | return value.decode(fallback_encoding, fallback_errors) | ||
| 1548 | |||
| 1549 | def _block(self, count): | ||
| 1550 | """Round up a byte count by BLOCKSIZE and return it, | ||
| 1551 | e.g. _block(834) => 1024. | ||
| 1552 | """ | ||
| 1553 | blocks, remainder = divmod(count, BLOCKSIZE) | ||
| 1554 | if remainder: | ||
| 1555 | blocks += 1 | ||
| 1556 | return blocks * BLOCKSIZE | ||
| 1557 | |||
| 1558 | def isreg(self): | ||
| 1559 | return self.type in REGULAR_TYPES | ||
| 1560 | def isfile(self): | ||
| 1561 | return self.isreg() | ||
| 1562 | def isdir(self): | ||
| 1563 | return self.type == DIRTYPE | ||
| 1564 | def issym(self): | ||
| 1565 | return self.type == SYMTYPE | ||
| 1566 | def islnk(self): | ||
| 1567 | return self.type == LNKTYPE | ||
| 1568 | def ischr(self): | ||
| 1569 | return self.type == CHRTYPE | ||
| 1570 | def isblk(self): | ||
| 1571 | return self.type == BLKTYPE | ||
| 1572 | def isfifo(self): | ||
| 1573 | return self.type == FIFOTYPE | ||
| 1574 | def issparse(self): | ||
| 1575 | return self.sparse is not None | ||
| 1576 | def isdev(self): | ||
| 1577 | return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) | ||
| 1578 | # class TarInfo | ||
| 1579 | |||
| 1580 | class TarFile(object): | ||
| 1581 | """The TarFile Class provides an interface to tar archives. | ||
| 1582 | """ | ||
| 1583 | |||
| 1584 | debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) | ||
| 1585 | |||
| 1586 | dereference = False # If true, add content of linked file to the | ||
| 1587 | # tar file, else the link. | ||
| 1588 | |||
| 1589 | ignore_zeros = False # If true, skips empty or invalid blocks and | ||
| 1590 | # continues processing. | ||
| 1591 | |||
| 1592 | errorlevel = 1 # If 0, fatal errors only appear in debug | ||
| 1593 | # messages (if debug >= 0). If > 0, errors | ||
| 1594 | # are passed to the caller as exceptions. | ||
| 1595 | |||
| 1596 | format = DEFAULT_FORMAT # The format to use when creating an archive. | ||
| 1597 | |||
| 1598 | encoding = ENCODING # Encoding for 8-bit character strings. | ||
| 1599 | |||
| 1600 | errors = None # Error handler for unicode conversion. | ||
| 1601 | |||
| 1602 | tarinfo = TarInfo # The default TarInfo class to use. | ||
| 1603 | |||
| 1604 | fileobject = ExFileObject # The default ExFileObject class to use. | ||
| 1605 | |||
| 1606 | def __init__(self, name=None, mode="r", fileobj=None, format=None, | ||
| 1607 | tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, | ||
| 1608 | errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): | ||
| 1609 | """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to | ||
| 1610 | read from an existing archive, 'a' to append data to an existing | ||
| 1611 | file or 'w' to create a new file overwriting an existing one. `mode' | ||
| 1612 | defaults to 'r'. | ||
| 1613 | If `fileobj' is given, it is used for reading or writing data. If it | ||
| 1614 | can be determined, `mode' is overridden by `fileobj's mode. | ||
| 1615 | `fileobj' is not closed, when TarFile is closed. | ||
| 1616 | """ | ||
| 1617 | if len(mode) > 1 or mode not in "raw": | ||
| 1618 | raise ValueError("mode must be 'r', 'a' or 'w'") | ||
| 1619 | self.mode = mode | ||
| 1620 | self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] | ||
| 1621 | |||
| 1622 | if not fileobj: | ||
| 1623 | if self.mode == "a" and not os.path.exists(name): | ||
| 1624 | # Create nonexistent files in append mode. | ||
| 1625 | self.mode = "w" | ||
| 1626 | self._mode = "wb" | ||
| 1627 | fileobj = bltn_open(name, self._mode) | ||
| 1628 | self._extfileobj = False | ||
| 1629 | else: | ||
| 1630 | if name is None and hasattr(fileobj, "name"): | ||
| 1631 | name = fileobj.name | ||
| 1632 | if hasattr(fileobj, "mode"): | ||
| 1633 | self._mode = fileobj.mode | ||
| 1634 | self._extfileobj = True | ||
| 1635 | self.name = os.path.abspath(name) if name else None | ||
| 1636 | self.fileobj = fileobj | ||
| 1637 | |||
| 1638 | # Init attributes. | ||
| 1639 | if format is not None: | ||
| 1640 | self.format = format | ||
| 1641 | if tarinfo is not None: | ||
| 1642 | self.tarinfo = tarinfo | ||
| 1643 | if dereference is not None: | ||
| 1644 | self.dereference = dereference | ||
| 1645 | if ignore_zeros is not None: | ||
| 1646 | self.ignore_zeros = ignore_zeros | ||
| 1647 | if encoding is not None: | ||
| 1648 | self.encoding = encoding | ||
| 1649 | self.errors = errors | ||
| 1650 | |||
| 1651 | if pax_headers is not None and self.format == PAX_FORMAT: | ||
| 1652 | self.pax_headers = pax_headers | ||
| 1653 | else: | ||
| 1654 | self.pax_headers = {} | ||
| 1655 | |||
| 1656 | if debug is not None: | ||
| 1657 | self.debug = debug | ||
| 1658 | if errorlevel is not None: | ||
| 1659 | self.errorlevel = errorlevel | ||
| 1660 | |||
| 1661 | # Init datastructures. | ||
| 1662 | self.closed = False | ||
| 1663 | self.members = [] # list of members as TarInfo objects | ||
| 1664 | self._loaded = False # flag if all members have been read | ||
| 1665 | self.offset = self.fileobj.tell() | ||
| 1666 | # current position in the archive file | ||
| 1667 | self.inodes = {} # dictionary caching the inodes of | ||
| 1668 | # archive members already added | ||
| 1669 | |||
| 1670 | try: | ||
| 1671 | if self.mode == "r": | ||
| 1672 | self.firstmember = None | ||
| 1673 | self.firstmember = self.next() | ||
| 1674 | |||
| 1675 | if self.mode == "a": | ||
| 1676 | # Move to the end of the archive, | ||
| 1677 | # before the first empty block. | ||
| 1678 | while True: | ||
| 1679 | self.fileobj.seek(self.offset) | ||
| 1680 | try: | ||
| 1681 | tarinfo = self.tarinfo.fromtarfile(self) | ||
| 1682 | self.members.append(tarinfo) | ||
| 1683 | except EOFHeaderError: | ||
| 1684 | self.fileobj.seek(self.offset) | ||
| 1685 | break | ||
| 1686 | except HeaderError as e: | ||
| 1687 | raise ReadError(str(e)) | ||
| 1688 | |||
| 1689 | if self.mode in "aw": | ||
| 1690 | self._loaded = True | ||
| 1691 | |||
| 1692 | if self.pax_headers: | ||
| 1693 | buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) | ||
| 1694 | self.fileobj.write(buf) | ||
| 1695 | self.offset += len(buf) | ||
| 1696 | except: | ||
| 1697 | if not self._extfileobj: | ||
| 1698 | self.fileobj.close() | ||
| 1699 | self.closed = True | ||
| 1700 | raise | ||
| 1701 | |||
| 1702 | #-------------------------------------------------------------------------- | ||
| 1703 | # Below are the classmethods which act as alternate constructors to the | ||
| 1704 | # TarFile class. The open() method is the only one that is needed for | ||
| 1705 | # public use; it is the "super"-constructor and is able to select an | ||
| 1706 | # adequate "sub"-constructor for a particular compression using the mapping | ||
| 1707 | # from OPEN_METH. | ||
| 1708 | # | ||
| 1709 | # This concept allows one to subclass TarFile without losing the comfort of | ||
| 1710 | # the super-constructor. A sub-constructor is registered and made available | ||
| 1711 | # by adding it to the mapping in OPEN_METH. | ||
| 1712 | |||
| 1713 | @classmethod | ||
| 1714 | def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): | ||
| 1715 | """Open a tar archive for reading, writing or appending. Return | ||
| 1716 | an appropriate TarFile class. | ||
| 1717 | |||
| 1718 | mode: | ||
| 1719 | 'r' or 'r:*' open for reading with transparent compression | ||
| 1720 | 'r:' open for reading exclusively uncompressed | ||
| 1721 | 'r:gz' open for reading with gzip compression | ||
| 1722 | 'r:bz2' open for reading with bzip2 compression | ||
| 1723 | 'a' or 'a:' open for appending, creating the file if necessary | ||
| 1724 | 'w' or 'w:' open for writing without compression | ||
| 1725 | 'w:gz' open for writing with gzip compression | ||
| 1726 | 'w:bz2' open for writing with bzip2 compression | ||
| 1727 | |||
| 1728 | 'r|*' open a stream of tar blocks with transparent compression | ||
| 1729 | 'r|' open an uncompressed stream of tar blocks for reading | ||
| 1730 | 'r|gz' open a gzip compressed stream of tar blocks | ||
| 1731 | 'r|bz2' open a bzip2 compressed stream of tar blocks | ||
| 1732 | 'w|' open an uncompressed stream for writing | ||
| 1733 | 'w|gz' open a gzip compressed stream for writing | ||
| 1734 | 'w|bz2' open a bzip2 compressed stream for writing | ||
| 1735 | """ | ||
| 1736 | |||
| 1737 | if not name and not fileobj: | ||
| 1738 | raise ValueError("nothing to open") | ||
| 1739 | |||
| 1740 | if mode in ("r", "r:*"): | ||
| 1741 | # Find out which *open() is appropriate for opening the file. | ||
| 1742 | for comptype in cls.OPEN_METH: | ||
| 1743 | func = getattr(cls, cls.OPEN_METH[comptype]) | ||
| 1744 | if fileobj is not None: | ||
| 1745 | saved_pos = fileobj.tell() | ||
| 1746 | try: | ||
| 1747 | return func(name, "r", fileobj, **kwargs) | ||
| 1748 | except (ReadError, CompressionError) as e: | ||
| 1749 | if fileobj is not None: | ||
| 1750 | fileobj.seek(saved_pos) | ||
| 1751 | continue | ||
| 1752 | raise ReadError("file could not be opened successfully") | ||
| 1753 | |||
| 1754 | elif ":" in mode: | ||
| 1755 | filemode, comptype = mode.split(":", 1) | ||
| 1756 | filemode = filemode or "r" | ||
| 1757 | comptype = comptype or "tar" | ||
| 1758 | |||
| 1759 | # Select the *open() function according to | ||
| 1760 | # given compression. | ||
| 1761 | if comptype in cls.OPEN_METH: | ||
| 1762 | func = getattr(cls, cls.OPEN_METH[comptype]) | ||
| 1763 | else: | ||
| 1764 | raise CompressionError("unknown compression type %r" % comptype) | ||
| 1765 | return func(name, filemode, fileobj, **kwargs) | ||
| 1766 | |||
| 1767 | elif "|" in mode: | ||
| 1768 | filemode, comptype = mode.split("|", 1) | ||
| 1769 | filemode = filemode or "r" | ||
| 1770 | comptype = comptype or "tar" | ||
| 1771 | |||
| 1772 | if filemode not in "rw": | ||
| 1773 | raise ValueError("mode must be 'r' or 'w'") | ||
| 1774 | |||
| 1775 | stream = _Stream(name, filemode, comptype, fileobj, bufsize) | ||
| 1776 | try: | ||
| 1777 | t = cls(name, filemode, stream, **kwargs) | ||
| 1778 | except: | ||
| 1779 | stream.close() | ||
| 1780 | raise | ||
| 1781 | t._extfileobj = False | ||
| 1782 | return t | ||
| 1783 | |||
| 1784 | elif mode in "aw": | ||
| 1785 | return cls.taropen(name, mode, fileobj, **kwargs) | ||
| 1786 | |||
| 1787 | raise ValueError("undiscernible mode") | ||
| 1788 | |||
| 1789 | @classmethod | ||
| 1790 | def taropen(cls, name, mode="r", fileobj=None, **kwargs): | ||
| 1791 | """Open uncompressed tar archive name for reading or writing. | ||
| 1792 | """ | ||
| 1793 | if len(mode) > 1 or mode not in "raw": | ||
| 1794 | raise ValueError("mode must be 'r', 'a' or 'w'") | ||
| 1795 | return cls(name, mode, fileobj, **kwargs) | ||
| 1796 | |||
| 1797 | @classmethod | ||
| 1798 | def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): | ||
| 1799 | """Open gzip compressed tar archive name for reading or writing. | ||
| 1800 | Appending is not allowed. | ||
| 1801 | """ | ||
| 1802 | if len(mode) > 1 or mode not in "rw": | ||
| 1803 | raise ValueError("mode must be 'r' or 'w'") | ||
| 1804 | |||
| 1805 | try: | ||
| 1806 | import gzip | ||
| 1807 | gzip.GzipFile | ||
| 1808 | except (ImportError, AttributeError): | ||
| 1809 | raise CompressionError("gzip module is not available") | ||
| 1810 | |||
| 1811 | extfileobj = fileobj is not None | ||
| 1812 | try: | ||
| 1813 | fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) | ||
| 1814 | t = cls.taropen(name, mode, fileobj, **kwargs) | ||
| 1815 | except IOError: | ||
| 1816 | if not extfileobj and fileobj is not None: | ||
| 1817 | fileobj.close() | ||
| 1818 | if fileobj is None: | ||
| 1819 | raise | ||
| 1820 | raise ReadError("not a gzip file") | ||
| 1821 | except: | ||
| 1822 | if not extfileobj and fileobj is not None: | ||
| 1823 | fileobj.close() | ||
| 1824 | raise | ||
| 1825 | t._extfileobj = extfileobj | ||
| 1826 | return t | ||
| 1827 | |||
| 1828 | @classmethod | ||
| 1829 | def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): | ||
| 1830 | """Open bzip2 compressed tar archive name for reading or writing. | ||
| 1831 | Appending is not allowed. | ||
| 1832 | """ | ||
| 1833 | if len(mode) > 1 or mode not in "rw": | ||
| 1834 | raise ValueError("mode must be 'r' or 'w'.") | ||
| 1835 | |||
| 1836 | try: | ||
| 1837 | import bz2 | ||
| 1838 | except ImportError: | ||
| 1839 | raise CompressionError("bz2 module is not available") | ||
| 1840 | |||
| 1841 | if fileobj is not None: | ||
| 1842 | fileobj = _BZ2Proxy(fileobj, mode) | ||
| 1843 | else: | ||
| 1844 | fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) | ||
| 1845 | |||
| 1846 | try: | ||
| 1847 | t = cls.taropen(name, mode, fileobj, **kwargs) | ||
| 1848 | except (IOError, EOFError): | ||
| 1849 | fileobj.close() | ||
| 1850 | raise ReadError("not a bzip2 file") | ||
| 1851 | t._extfileobj = False | ||
| 1852 | return t | ||
| 1853 | |||
| 1854 | # All *open() methods are registered here. | ||
| 1855 | OPEN_METH = { | ||
| 1856 | "tar": "taropen", # uncompressed tar | ||
| 1857 | "gz": "gzopen", # gzip compressed tar | ||
| 1858 | "bz2": "bz2open" # bzip2 compressed tar | ||
| 1859 | } | ||
| 1860 | |||
| 1861 | #-------------------------------------------------------------------------- | ||
| 1862 | # The public methods which TarFile provides: | ||
| 1863 | |||
| 1864 | def close(self): | ||
| 1865 | """Close the TarFile. In write-mode, two finishing zero blocks are | ||
| 1866 | appended to the archive. | ||
| 1867 | """ | ||
| 1868 | if self.closed: | ||
| 1869 | return | ||
| 1870 | |||
| 1871 | if self.mode in "aw": | ||
| 1872 | self.fileobj.write(NUL * (BLOCKSIZE * 2)) | ||
| 1873 | self.offset += (BLOCKSIZE * 2) | ||
| 1874 | # fill up the end with zero-blocks | ||
| 1875 | # (like option -b20 for tar does) | ||
| 1876 | blocks, remainder = divmod(self.offset, RECORDSIZE) | ||
| 1877 | if remainder > 0: | ||
| 1878 | self.fileobj.write(NUL * (RECORDSIZE - remainder)) | ||
| 1879 | |||
| 1880 | if not self._extfileobj: | ||
| 1881 | self.fileobj.close() | ||
| 1882 | self.closed = True | ||
| 1883 | |||
| 1884 | def getmember(self, name): | ||
| 1885 | """Return a TarInfo object for member `name'. If `name' can not be | ||
| 1886 | found in the archive, KeyError is raised. If a member occurs more | ||
| 1887 | than once in the archive, its last occurrence is assumed to be the | ||
| 1888 | most up-to-date version. | ||
| 1889 | """ | ||
| 1890 | tarinfo = self._getmember(name) | ||
| 1891 | if tarinfo is None: | ||
| 1892 | raise KeyError("filename %r not found" % name) | ||
| 1893 | return tarinfo | ||
| 1894 | |||
| 1895 | def getmembers(self): | ||
| 1896 | """Return the members of the archive as a list of TarInfo objects. The | ||
| 1897 | list has the same order as the members in the archive. | ||
| 1898 | """ | ||
| 1899 | self._check() | ||
| 1900 | if not self._loaded: # if we want to obtain a list of | ||
| 1901 | self._load() # all members, we first have to | ||
| 1902 | # scan the whole archive. | ||
| 1903 | return self.members | ||
| 1904 | |||
| 1905 | def getnames(self): | ||
| 1906 | """Return the members of the archive as a list of their names. It has | ||
| 1907 | the same order as the list returned by getmembers(). | ||
| 1908 | """ | ||
| 1909 | return [tarinfo.name for tarinfo in self.getmembers()] | ||
| 1910 | |||
| 1911 | def gettarinfo(self, name=None, arcname=None, fileobj=None): | ||
| 1912 | """Create a TarInfo object for either the file `name' or the file | ||
| 1913 | object `fileobj' (using os.fstat on its file descriptor). You can | ||
| 1914 | modify some of the TarInfo's attributes before you add it using | ||
| 1915 | addfile(). If given, `arcname' specifies an alternative name for the | ||
| 1916 | file in the archive. | ||
| 1917 | """ | ||
| 1918 | self._check("aw") | ||
| 1919 | |||
| 1920 | # When fileobj is given, replace name by | ||
| 1921 | # fileobj's real name. | ||
| 1922 | if fileobj is not None: | ||
| 1923 | name = fileobj.name | ||
| 1924 | |||
| 1925 | # Building the name of the member in the archive. | ||
| 1926 | # Backward slashes are converted to forward slashes, | ||
| 1927 | # Absolute paths are turned to relative paths. | ||
| 1928 | if arcname is None: | ||
| 1929 | arcname = name | ||
| 1930 | drv, arcname = os.path.splitdrive(arcname) | ||
| 1931 | arcname = arcname.replace(os.sep, "/") | ||
| 1932 | arcname = arcname.lstrip("/") | ||
| 1933 | |||
| 1934 | # Now, fill the TarInfo object with | ||
| 1935 | # information specific for the file. | ||
| 1936 | tarinfo = self.tarinfo() | ||
| 1937 | tarinfo.tarfile = self | ||
| 1938 | |||
| 1939 | # Use os.stat or os.lstat, depending on platform | ||
| 1940 | # and if symlinks shall be resolved. | ||
| 1941 | if fileobj is None: | ||
| 1942 | if hasattr(os, "lstat") and not self.dereference: | ||
| 1943 | statres = os.lstat(name) | ||
| 1944 | else: | ||
| 1945 | statres = os.stat(name) | ||
| 1946 | else: | ||
| 1947 | statres = os.fstat(fileobj.fileno()) | ||
| 1948 | linkname = "" | ||
| 1949 | |||
| 1950 | stmd = statres.st_mode | ||
| 1951 | if stat.S_ISREG(stmd): | ||
| 1952 | inode = (statres.st_ino, statres.st_dev) | ||
| 1953 | if not self.dereference and statres.st_nlink > 1 and \ | ||
| 1954 | inode in self.inodes and arcname != self.inodes[inode]: | ||
| 1955 | # Is it a hardlink to an already | ||
| 1956 | # archived file? | ||
| 1957 | type = LNKTYPE | ||
| 1958 | linkname = self.inodes[inode] | ||
| 1959 | else: | ||
| 1960 | # The inode is added only if its valid. | ||
| 1961 | # For win32 it is always 0. | ||
| 1962 | type = REGTYPE | ||
| 1963 | if inode[0]: | ||
| 1964 | self.inodes[inode] = arcname | ||
| 1965 | elif stat.S_ISDIR(stmd): | ||
| 1966 | type = DIRTYPE | ||
| 1967 | elif stat.S_ISFIFO(stmd): | ||
| 1968 | type = FIFOTYPE | ||
| 1969 | elif stat.S_ISLNK(stmd): | ||
| 1970 | type = SYMTYPE | ||
| 1971 | linkname = os.readlink(name) | ||
| 1972 | elif stat.S_ISCHR(stmd): | ||
| 1973 | type = CHRTYPE | ||
| 1974 | elif stat.S_ISBLK(stmd): | ||
| 1975 | type = BLKTYPE | ||
| 1976 | else: | ||
| 1977 | return None | ||
| 1978 | |||
| 1979 | # Fill the TarInfo object with all | ||
| 1980 | # information we can get. | ||
| 1981 | tarinfo.name = arcname | ||
| 1982 | tarinfo.mode = stmd | ||
| 1983 | tarinfo.uid = statres.st_uid | ||
| 1984 | tarinfo.gid = statres.st_gid | ||
| 1985 | if type == REGTYPE: | ||
| 1986 | tarinfo.size = statres.st_size | ||
| 1987 | else: | ||
| 1988 | tarinfo.size = 0 | ||
| 1989 | tarinfo.mtime = statres.st_mtime | ||
| 1990 | tarinfo.type = type | ||
| 1991 | tarinfo.linkname = linkname | ||
| 1992 | if pwd: | ||
| 1993 | try: | ||
| 1994 | tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] | ||
| 1995 | except KeyError: | ||
| 1996 | pass | ||
| 1997 | if grp: | ||
| 1998 | try: | ||
| 1999 | tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] | ||
| 2000 | except KeyError: | ||
| 2001 | pass | ||
| 2002 | |||
| 2003 | if type in (CHRTYPE, BLKTYPE): | ||
| 2004 | if hasattr(os, "major") and hasattr(os, "minor"): | ||
| 2005 | tarinfo.devmajor = os.major(statres.st_rdev) | ||
| 2006 | tarinfo.devminor = os.minor(statres.st_rdev) | ||
| 2007 | return tarinfo | ||
| 2008 | |||
| 2009 | def list(self, verbose=True): | ||
| 2010 | """Print a table of contents to sys.stdout. If `verbose' is False, only | ||
| 2011 | the names of the members are printed. If it is True, an `ls -l'-like | ||
| 2012 | output is produced. | ||
| 2013 | """ | ||
| 2014 | self._check() | ||
| 2015 | |||
| 2016 | for tarinfo in self: | ||
| 2017 | if verbose: | ||
| 2018 | print(filemode(tarinfo.mode), end=' ') | ||
| 2019 | print("%s/%s" % (tarinfo.uname or tarinfo.uid, | ||
| 2020 | tarinfo.gname or tarinfo.gid), end=' ') | ||
| 2021 | if tarinfo.ischr() or tarinfo.isblk(): | ||
| 2022 | print("%10s" % ("%d,%d" \ | ||
| 2023 | % (tarinfo.devmajor, tarinfo.devminor)), end=' ') | ||
| 2024 | else: | ||
| 2025 | print("%10d" % tarinfo.size, end=' ') | ||
| 2026 | print("%d-%02d-%02d %02d:%02d:%02d" \ | ||
| 2027 | % time.localtime(tarinfo.mtime)[:6], end=' ') | ||
| 2028 | |||
| 2029 | print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') | ||
| 2030 | |||
| 2031 | if verbose: | ||
| 2032 | if tarinfo.issym(): | ||
| 2033 | print("->", tarinfo.linkname, end=' ') | ||
| 2034 | if tarinfo.islnk(): | ||
| 2035 | print("link to", tarinfo.linkname, end=' ') | ||
| 2036 | print() | ||
| 2037 | |||
| 2038 | def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): | ||
| 2039 | """Add the file `name' to the archive. `name' may be any type of file | ||
| 2040 | (directory, fifo, symbolic link, etc.). If given, `arcname' | ||
| 2041 | specifies an alternative name for the file in the archive. | ||
| 2042 | Directories are added recursively by default. This can be avoided by | ||
| 2043 | setting `recursive' to False. `exclude' is a function that should | ||
| 2044 | return True for each filename to be excluded. `filter' is a function | ||
| 2045 | that expects a TarInfo object argument and returns the changed | ||
| 2046 | TarInfo object, if it returns None the TarInfo object will be | ||
| 2047 | excluded from the archive. | ||
| 2048 | """ | ||
| 2049 | self._check("aw") | ||
| 2050 | |||
| 2051 | if arcname is None: | ||
| 2052 | arcname = name | ||
| 2053 | |||
| 2054 | # Exclude pathnames. | ||
| 2055 | if exclude is not None: | ||
| 2056 | import warnings | ||
| 2057 | warnings.warn("use the filter argument instead", | ||
| 2058 | DeprecationWarning, 2) | ||
| 2059 | if exclude(name): | ||
| 2060 | self._dbg(2, "tarfile: Excluded %r" % name) | ||
| 2061 | return | ||
| 2062 | |||
| 2063 | # Skip if somebody tries to archive the archive... | ||
| 2064 | if self.name is not None and os.path.abspath(name) == self.name: | ||
| 2065 | self._dbg(2, "tarfile: Skipped %r" % name) | ||
| 2066 | return | ||
| 2067 | |||
| 2068 | self._dbg(1, name) | ||
| 2069 | |||
| 2070 | # Create a TarInfo object from the file. | ||
| 2071 | tarinfo = self.gettarinfo(name, arcname) | ||
| 2072 | |||
| 2073 | if tarinfo is None: | ||
| 2074 | self._dbg(1, "tarfile: Unsupported type %r" % name) | ||
| 2075 | return | ||
| 2076 | |||
| 2077 | # Change or exclude the TarInfo object. | ||
| 2078 | if filter is not None: | ||
| 2079 | tarinfo = filter(tarinfo) | ||
| 2080 | if tarinfo is None: | ||
| 2081 | self._dbg(2, "tarfile: Excluded %r" % name) | ||
| 2082 | return | ||
| 2083 | |||
| 2084 | # Append the tar header and data to the archive. | ||
| 2085 | if tarinfo.isreg(): | ||
| 2086 | f = bltn_open(name, "rb") | ||
| 2087 | self.addfile(tarinfo, f) | ||
| 2088 | f.close() | ||
| 2089 | |||
| 2090 | elif tarinfo.isdir(): | ||
| 2091 | self.addfile(tarinfo) | ||
| 2092 | if recursive: | ||
| 2093 | for f in os.listdir(name): | ||
| 2094 | self.add(os.path.join(name, f), os.path.join(arcname, f), | ||
| 2095 | recursive, exclude, filter=filter) | ||
| 2096 | |||
| 2097 | else: | ||
| 2098 | self.addfile(tarinfo) | ||
| 2099 | |||
| 2100 | def addfile(self, tarinfo, fileobj=None): | ||
| 2101 | """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is | ||
| 2102 | given, tarinfo.size bytes are read from it and added to the archive. | ||
| 2103 | You can create TarInfo objects using gettarinfo(). | ||
| 2104 | On Windows platforms, `fileobj' should always be opened with mode | ||
| 2105 | 'rb' to avoid irritation about the file size. | ||
| 2106 | """ | ||
| 2107 | self._check("aw") | ||
| 2108 | |||
| 2109 | tarinfo = copy.copy(tarinfo) | ||
| 2110 | |||
| 2111 | buf = tarinfo.tobuf(self.format, self.encoding, self.errors) | ||
| 2112 | self.fileobj.write(buf) | ||
| 2113 | self.offset += len(buf) | ||
| 2114 | |||
| 2115 | # If there's data to follow, append it. | ||
| 2116 | if fileobj is not None: | ||
| 2117 | copyfileobj(fileobj, self.fileobj, tarinfo.size) | ||
| 2118 | blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) | ||
| 2119 | if remainder > 0: | ||
| 2120 | self.fileobj.write(NUL * (BLOCKSIZE - remainder)) | ||
| 2121 | blocks += 1 | ||
| 2122 | self.offset += blocks * BLOCKSIZE | ||
| 2123 | |||
| 2124 | self.members.append(tarinfo) | ||
| 2125 | |||
| 2126 | def extractall(self, path=".", members=None): | ||
| 2127 | """Extract all members from the archive to the current working | ||
| 2128 | directory and set owner, modification time and permissions on | ||
| 2129 | directories afterwards. `path' specifies a different directory | ||
| 2130 | to extract to. `members' is optional and must be a subset of the | ||
| 2131 | list returned by getmembers(). | ||
| 2132 | """ | ||
| 2133 | directories = [] | ||
| 2134 | |||
| 2135 | if members is None: | ||
| 2136 | members = self | ||
| 2137 | |||
| 2138 | for tarinfo in members: | ||
| 2139 | if tarinfo.isdir(): | ||
| 2140 | # Extract directories with a safe mode. | ||
| 2141 | directories.append(tarinfo) | ||
| 2142 | tarinfo = copy.copy(tarinfo) | ||
| 2143 | tarinfo.mode = 0o700 | ||
| 2144 | # Do not set_attrs directories, as we will do that further down | ||
| 2145 | self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) | ||
| 2146 | |||
| 2147 | # Reverse sort directories. | ||
| 2148 | directories.sort(key=lambda a: a.name) | ||
| 2149 | directories.reverse() | ||
| 2150 | |||
| 2151 | # Set correct owner, mtime and filemode on directories. | ||
| 2152 | for tarinfo in directories: | ||
| 2153 | dirpath = os.path.join(path, tarinfo.name) | ||
| 2154 | try: | ||
| 2155 | self.chown(tarinfo, dirpath) | ||
| 2156 | self.utime(tarinfo, dirpath) | ||
| 2157 | self.chmod(tarinfo, dirpath) | ||
| 2158 | except ExtractError as e: | ||
| 2159 | if self.errorlevel > 1: | ||
| 2160 | raise | ||
| 2161 | else: | ||
| 2162 | self._dbg(1, "tarfile: %s" % e) | ||
| 2163 | |||
| 2164 | def extract(self, member, path="", set_attrs=True): | ||
| 2165 | """Extract a member from the archive to the current working directory, | ||
| 2166 | using its full name. Its file information is extracted as accurately | ||
| 2167 | as possible. `member' may be a filename or a TarInfo object. You can | ||
| 2168 | specify a different directory using `path'. File attributes (owner, | ||
| 2169 | mtime, mode) are set unless `set_attrs' is False. | ||
| 2170 | """ | ||
| 2171 | self._check("r") | ||
| 2172 | |||
| 2173 | if isinstance(member, str): | ||
| 2174 | tarinfo = self.getmember(member) | ||
| 2175 | else: | ||
| 2176 | tarinfo = member | ||
| 2177 | |||
| 2178 | # Prepare the link target for makelink(). | ||
| 2179 | if tarinfo.islnk(): | ||
| 2180 | tarinfo._link_target = os.path.join(path, tarinfo.linkname) | ||
| 2181 | |||
| 2182 | try: | ||
| 2183 | self._extract_member(tarinfo, os.path.join(path, tarinfo.name), | ||
| 2184 | set_attrs=set_attrs) | ||
| 2185 | except EnvironmentError as e: | ||
| 2186 | if self.errorlevel > 0: | ||
| 2187 | raise | ||
| 2188 | else: | ||
| 2189 | if e.filename is None: | ||
| 2190 | self._dbg(1, "tarfile: %s" % e.strerror) | ||
| 2191 | else: | ||
| 2192 | self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) | ||
| 2193 | except ExtractError as e: | ||
| 2194 | if self.errorlevel > 1: | ||
| 2195 | raise | ||
| 2196 | else: | ||
| 2197 | self._dbg(1, "tarfile: %s" % e) | ||
| 2198 | |||
| 2199 | def extractfile(self, member): | ||
| 2200 | """Extract a member from the archive as a file object. `member' may be | ||
| 2201 | a filename or a TarInfo object. If `member' is a regular file, a | ||
| 2202 | file-like object is returned. If `member' is a link, a file-like | ||
| 2203 | object is constructed from the link's target. If `member' is none of | ||
| 2204 | the above, None is returned. | ||
| 2205 | The file-like object is read-only and provides the following | ||
| 2206 | methods: read(), readline(), readlines(), seek() and tell() | ||
| 2207 | """ | ||
| 2208 | self._check("r") | ||
| 2209 | |||
| 2210 | if isinstance(member, str): | ||
| 2211 | tarinfo = self.getmember(member) | ||
| 2212 | else: | ||
| 2213 | tarinfo = member | ||
| 2214 | |||
| 2215 | if tarinfo.isreg(): | ||
| 2216 | return self.fileobject(self, tarinfo) | ||
| 2217 | |||
| 2218 | elif tarinfo.type not in SUPPORTED_TYPES: | ||
| 2219 | # If a member's type is unknown, it is treated as a | ||
| 2220 | # regular file. | ||
| 2221 | return self.fileobject(self, tarinfo) | ||
| 2222 | |||
| 2223 | elif tarinfo.islnk() or tarinfo.issym(): | ||
| 2224 | if isinstance(self.fileobj, _Stream): | ||
| 2225 | # A small but ugly workaround for the case that someone tries | ||
| 2226 | # to extract a (sym)link as a file-object from a non-seekable | ||
| 2227 | # stream of tar blocks. | ||
| 2228 | raise StreamError("cannot extract (sym)link as file object") | ||
| 2229 | else: | ||
| 2230 | # A (sym)link's file object is its target's file object. | ||
| 2231 | return self.extractfile(self._find_link_target(tarinfo)) | ||
| 2232 | else: | ||
| 2233 | # If there's no data associated with the member (directory, chrdev, | ||
| 2234 | # blkdev, etc.), return None instead of a file object. | ||
| 2235 | return None | ||
| 2236 | |||
| 2237 | def _extract_member(self, tarinfo, targetpath, set_attrs=True): | ||
| 2238 | """Extract the TarInfo object tarinfo to a physical | ||
| 2239 | file called targetpath. | ||
| 2240 | """ | ||
| 2241 | # Fetch the TarInfo object for the given name | ||
| 2242 | # and build the destination pathname, replacing | ||
| 2243 | # forward slashes to platform specific separators. | ||
| 2244 | targetpath = targetpath.rstrip("/") | ||
| 2245 | targetpath = targetpath.replace("/", os.sep) | ||
| 2246 | |||
| 2247 | # Create all upper directories. | ||
| 2248 | upperdirs = os.path.dirname(targetpath) | ||
| 2249 | if upperdirs and not os.path.exists(upperdirs): | ||
| 2250 | # Create directories that are not part of the archive with | ||
| 2251 | # default permissions. | ||
| 2252 | os.makedirs(upperdirs) | ||
| 2253 | |||
| 2254 | if tarinfo.islnk() or tarinfo.issym(): | ||
| 2255 | self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) | ||
| 2256 | else: | ||
| 2257 | self._dbg(1, tarinfo.name) | ||
| 2258 | |||
| 2259 | if tarinfo.isreg(): | ||
| 2260 | self.makefile(tarinfo, targetpath) | ||
| 2261 | elif tarinfo.isdir(): | ||
| 2262 | self.makedir(tarinfo, targetpath) | ||
| 2263 | elif tarinfo.isfifo(): | ||
| 2264 | self.makefifo(tarinfo, targetpath) | ||
| 2265 | elif tarinfo.ischr() or tarinfo.isblk(): | ||
| 2266 | self.makedev(tarinfo, targetpath) | ||
| 2267 | elif tarinfo.islnk() or tarinfo.issym(): | ||
| 2268 | self.makelink(tarinfo, targetpath) | ||
| 2269 | elif tarinfo.type not in SUPPORTED_TYPES: | ||
| 2270 | self.makeunknown(tarinfo, targetpath) | ||
| 2271 | else: | ||
| 2272 | self.makefile(tarinfo, targetpath) | ||
| 2273 | |||
| 2274 | if set_attrs: | ||
| 2275 | self.chown(tarinfo, targetpath) | ||
| 2276 | if not tarinfo.issym(): | ||
| 2277 | self.chmod(tarinfo, targetpath) | ||
| 2278 | self.utime(tarinfo, targetpath) | ||
| 2279 | |||
| 2280 | #-------------------------------------------------------------------------- | ||
| 2281 | # Below are the different file methods. They are called via | ||
| 2282 | # _extract_member() when extract() is called. They can be replaced in a | ||
| 2283 | # subclass to implement other functionality. | ||
| 2284 | |||
| 2285 | def makedir(self, tarinfo, targetpath): | ||
| 2286 | """Make a directory called targetpath. | ||
| 2287 | """ | ||
| 2288 | try: | ||
| 2289 | # Use a safe mode for the directory, the real mode is set | ||
| 2290 | # later in _extract_member(). | ||
| 2291 | os.mkdir(targetpath, 0o700) | ||
| 2292 | except EnvironmentError as e: | ||
| 2293 | if e.errno != errno.EEXIST: | ||
| 2294 | raise | ||
| 2295 | |||
| 2296 | def makefile(self, tarinfo, targetpath): | ||
| 2297 | """Make a file called targetpath. | ||
| 2298 | """ | ||
| 2299 | source = self.fileobj | ||
| 2300 | source.seek(tarinfo.offset_data) | ||
| 2301 | target = bltn_open(targetpath, "wb") | ||
| 2302 | if tarinfo.sparse is not None: | ||
| 2303 | for offset, size in tarinfo.sparse: | ||
| 2304 | target.seek(offset) | ||
| 2305 | copyfileobj(source, target, size) | ||
| 2306 | else: | ||
| 2307 | copyfileobj(source, target, tarinfo.size) | ||
| 2308 | target.seek(tarinfo.size) | ||
| 2309 | target.truncate() | ||
| 2310 | target.close() | ||
| 2311 | |||
| 2312 | def makeunknown(self, tarinfo, targetpath): | ||
| 2313 | """Make a file from a TarInfo object with an unknown type | ||
| 2314 | at targetpath. | ||
| 2315 | """ | ||
| 2316 | self.makefile(tarinfo, targetpath) | ||
| 2317 | self._dbg(1, "tarfile: Unknown file type %r, " \ | ||
| 2318 | "extracted as regular file." % tarinfo.type) | ||
| 2319 | |||
| 2320 | def makefifo(self, tarinfo, targetpath): | ||
| 2321 | """Make a fifo called targetpath. | ||
| 2322 | """ | ||
| 2323 | if hasattr(os, "mkfifo"): | ||
| 2324 | os.mkfifo(targetpath) | ||
| 2325 | else: | ||
| 2326 | raise ExtractError("fifo not supported by system") | ||
| 2327 | |||
| 2328 | def makedev(self, tarinfo, targetpath): | ||
| 2329 | """Make a character or block device called targetpath. | ||
| 2330 | """ | ||
| 2331 | if not hasattr(os, "mknod") or not hasattr(os, "makedev"): | ||
| 2332 | raise ExtractError("special devices not supported by system") | ||
| 2333 | |||
| 2334 | mode = tarinfo.mode | ||
| 2335 | if tarinfo.isblk(): | ||
| 2336 | mode |= stat.S_IFBLK | ||
| 2337 | else: | ||
| 2338 | mode |= stat.S_IFCHR | ||
| 2339 | |||
| 2340 | os.mknod(targetpath, mode, | ||
| 2341 | os.makedev(tarinfo.devmajor, tarinfo.devminor)) | ||
| 2342 | |||
| 2343 | def makelink(self, tarinfo, targetpath): | ||
| 2344 | """Make a (symbolic) link called targetpath. If it cannot be created | ||
| 2345 | (platform limitation), we try to make a copy of the referenced file | ||
| 2346 | instead of a link. | ||
| 2347 | """ | ||
| 2348 | try: | ||
| 2349 | # For systems that support symbolic and hard links. | ||
| 2350 | if tarinfo.issym(): | ||
| 2351 | os.symlink(tarinfo.linkname, targetpath) | ||
| 2352 | else: | ||
| 2353 | # See extract(). | ||
| 2354 | if os.path.exists(tarinfo._link_target): | ||
| 2355 | os.link(tarinfo._link_target, targetpath) | ||
| 2356 | else: | ||
| 2357 | self._extract_member(self._find_link_target(tarinfo), | ||
| 2358 | targetpath) | ||
| 2359 | except symlink_exception: | ||
| 2360 | if tarinfo.issym(): | ||
| 2361 | linkpath = os.path.join(os.path.dirname(tarinfo.name), | ||
| 2362 | tarinfo.linkname) | ||
| 2363 | else: | ||
| 2364 | linkpath = tarinfo.linkname | ||
| 2365 | else: | ||
| 2366 | try: | ||
| 2367 | self._extract_member(self._find_link_target(tarinfo), | ||
| 2368 | targetpath) | ||
| 2369 | except KeyError: | ||
| 2370 | raise ExtractError("unable to resolve link inside archive") | ||
| 2371 | |||
| 2372 | def chown(self, tarinfo, targetpath): | ||
| 2373 | """Set owner of targetpath according to tarinfo. | ||
| 2374 | """ | ||
| 2375 | if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: | ||
| 2376 | # We have to be root to do so. | ||
| 2377 | try: | ||
| 2378 | g = grp.getgrnam(tarinfo.gname)[2] | ||
| 2379 | except KeyError: | ||
| 2380 | g = tarinfo.gid | ||
| 2381 | try: | ||
| 2382 | u = pwd.getpwnam(tarinfo.uname)[2] | ||
| 2383 | except KeyError: | ||
| 2384 | u = tarinfo.uid | ||
| 2385 | try: | ||
| 2386 | if tarinfo.issym() and hasattr(os, "lchown"): | ||
| 2387 | os.lchown(targetpath, u, g) | ||
| 2388 | else: | ||
| 2389 | if sys.platform != "os2emx": | ||
| 2390 | os.chown(targetpath, u, g) | ||
| 2391 | except EnvironmentError as e: | ||
| 2392 | raise ExtractError("could not change owner") | ||
| 2393 | |||
| 2394 | def chmod(self, tarinfo, targetpath): | ||
| 2395 | """Set file permissions of targetpath according to tarinfo. | ||
| 2396 | """ | ||
| 2397 | if hasattr(os, 'chmod'): | ||
| 2398 | try: | ||
| 2399 | os.chmod(targetpath, tarinfo.mode) | ||
| 2400 | except EnvironmentError as e: | ||
| 2401 | raise ExtractError("could not change mode") | ||
| 2402 | |||
| 2403 | def utime(self, tarinfo, targetpath): | ||
| 2404 | """Set modification time of targetpath according to tarinfo. | ||
| 2405 | """ | ||
| 2406 | if not hasattr(os, 'utime'): | ||
| 2407 | return | ||
| 2408 | try: | ||
| 2409 | os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) | ||
| 2410 | except EnvironmentError as e: | ||
| 2411 | raise ExtractError("could not change modification time") | ||
| 2412 | |||
| 2413 | #-------------------------------------------------------------------------- | ||
| 2414 | def next(self): | ||
| 2415 | """Return the next member of the archive as a TarInfo object, when | ||
| 2416 | TarFile is opened for reading. Return None if there is no more | ||
| 2417 | available. | ||
| 2418 | """ | ||
| 2419 | self._check("ra") | ||
| 2420 | if self.firstmember is not None: | ||
| 2421 | m = self.firstmember | ||
| 2422 | self.firstmember = None | ||
| 2423 | return m | ||
| 2424 | |||
| 2425 | # Read the next block. | ||
| 2426 | self.fileobj.seek(self.offset) | ||
| 2427 | tarinfo = None | ||
| 2428 | while True: | ||
| 2429 | try: | ||
| 2430 | tarinfo = self.tarinfo.fromtarfile(self) | ||
| 2431 | except EOFHeaderError as e: | ||
| 2432 | if self.ignore_zeros: | ||
| 2433 | self._dbg(2, "0x%X: %s" % (self.offset, e)) | ||
| 2434 | self.offset += BLOCKSIZE | ||
| 2435 | continue | ||
| 2436 | except InvalidHeaderError as e: | ||
| 2437 | if self.ignore_zeros: | ||
| 2438 | self._dbg(2, "0x%X: %s" % (self.offset, e)) | ||
| 2439 | self.offset += BLOCKSIZE | ||
| 2440 | continue | ||
| 2441 | elif self.offset == 0: | ||
| 2442 | raise ReadError(str(e)) | ||
| 2443 | except EmptyHeaderError: | ||
| 2444 | if self.offset == 0: | ||
| 2445 | raise ReadError("empty file") | ||
| 2446 | except TruncatedHeaderError as e: | ||
| 2447 | if self.offset == 0: | ||
| 2448 | raise ReadError(str(e)) | ||
| 2449 | except SubsequentHeaderError as e: | ||
| 2450 | raise ReadError(str(e)) | ||
| 2451 | break | ||
| 2452 | |||
| 2453 | if tarinfo is not None: | ||
| 2454 | self.members.append(tarinfo) | ||
| 2455 | else: | ||
| 2456 | self._loaded = True | ||
| 2457 | |||
| 2458 | return tarinfo | ||
| 2459 | |||
| 2460 | #-------------------------------------------------------------------------- | ||
| 2461 | # Little helper methods: | ||
| 2462 | |||
| 2463 | def _getmember(self, name, tarinfo=None, normalize=False): | ||
| 2464 | """Find an archive member by name from bottom to top. | ||
| 2465 | If tarinfo is given, it is used as the starting point. | ||
| 2466 | """ | ||
| 2467 | # Ensure that all members have been loaded. | ||
| 2468 | members = self.getmembers() | ||
| 2469 | |||
| 2470 | # Limit the member search list up to tarinfo. | ||
| 2471 | if tarinfo is not None: | ||
| 2472 | members = members[:members.index(tarinfo)] | ||
| 2473 | |||
| 2474 | if normalize: | ||
| 2475 | name = os.path.normpath(name) | ||
| 2476 | |||
| 2477 | for member in reversed(members): | ||
| 2478 | if normalize: | ||
| 2479 | member_name = os.path.normpath(member.name) | ||
| 2480 | else: | ||
| 2481 | member_name = member.name | ||
| 2482 | |||
| 2483 | if name == member_name: | ||
| 2484 | return member | ||
| 2485 | |||
| 2486 | def _load(self): | ||
| 2487 | """Read through the entire archive file and look for readable | ||
| 2488 | members. | ||
| 2489 | """ | ||
| 2490 | while True: | ||
| 2491 | tarinfo = self.next() | ||
| 2492 | if tarinfo is None: | ||
| 2493 | break | ||
| 2494 | self._loaded = True | ||
| 2495 | |||
| 2496 | def _check(self, mode=None): | ||
| 2497 | """Check if TarFile is still open, and if the operation's mode | ||
| 2498 | corresponds to TarFile's mode. | ||
| 2499 | """ | ||
| 2500 | if self.closed: | ||
| 2501 | raise IOError("%s is closed" % self.__class__.__name__) | ||
| 2502 | if mode is not None and self.mode not in mode: | ||
| 2503 | raise IOError("bad operation for mode %r" % self.mode) | ||
| 2504 | |||
| 2505 | def _find_link_target(self, tarinfo): | ||
| 2506 | """Find the target member of a symlink or hardlink member in the | ||
| 2507 | archive. | ||
| 2508 | """ | ||
| 2509 | if tarinfo.issym(): | ||
| 2510 | # Always search the entire archive. | ||
| 2511 | linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname | ||
| 2512 | limit = None | ||
| 2513 | else: | ||
| 2514 | # Search the archive before the link, because a hard link is | ||
| 2515 | # just a reference to an already archived file. | ||
| 2516 | linkname = tarinfo.linkname | ||
| 2517 | limit = tarinfo | ||
| 2518 | |||
| 2519 | member = self._getmember(linkname, tarinfo=limit, normalize=True) | ||
| 2520 | if member is None: | ||
| 2521 | raise KeyError("linkname %r not found" % linkname) | ||
| 2522 | return member | ||
| 2523 | |||
| 2524 | def __iter__(self): | ||
| 2525 | """Provide an iterator object. | ||
| 2526 | """ | ||
| 2527 | if self._loaded: | ||
| 2528 | return iter(self.members) | ||
| 2529 | else: | ||
| 2530 | return TarIter(self) | ||
| 2531 | |||
| 2532 | def _dbg(self, level, msg): | ||
| 2533 | """Write debugging output to sys.stderr. | ||
| 2534 | """ | ||
| 2535 | if level <= self.debug: | ||
| 2536 | print(msg, file=sys.stderr) | ||
| 2537 | |||
| 2538 | def __enter__(self): | ||
| 2539 | self._check() | ||
| 2540 | return self | ||
| 2541 | |||
| 2542 | def __exit__(self, type, value, traceback): | ||
| 2543 | if type is None: | ||
| 2544 | self.close() | ||
| 2545 | else: | ||
| 2546 | # An exception occurred. We must not call close() because | ||
| 2547 | # it would try to write end-of-archive blocks and padding. | ||
| 2548 | if not self._extfileobj: | ||
| 2549 | self.fileobj.close() | ||
| 2550 | self.closed = True | ||
| 2551 | # class TarFile | ||
| 2552 | |||
| 2553 | class TarIter(object): | ||
| 2554 | """Iterator Class. | ||
| 2555 | |||
| 2556 | for tarinfo in TarFile(...): | ||
| 2557 | suite... | ||
| 2558 | """ | ||
| 2559 | |||
| 2560 | def __init__(self, tarfile): | ||
| 2561 | """Construct a TarIter object. | ||
| 2562 | """ | ||
| 2563 | self.tarfile = tarfile | ||
| 2564 | self.index = 0 | ||
| 2565 | def __iter__(self): | ||
| 2566 | """Return iterator object. | ||
| 2567 | """ | ||
| 2568 | return self | ||
| 2569 | |||
| 2570 | def __next__(self): | ||
| 2571 | """Return the next item using TarFile's next() method. | ||
| 2572 | When all members have been read, set TarFile as _loaded. | ||
| 2573 | """ | ||
| 2574 | # Fix for SF #1100429: Under rare circumstances it can | ||
| 2575 | # happen that getmembers() is called during iteration, | ||
| 2576 | # which will cause TarIter to stop prematurely. | ||
| 2577 | if not self.tarfile._loaded: | ||
| 2578 | tarinfo = self.tarfile.next() | ||
| 2579 | if not tarinfo: | ||
| 2580 | self.tarfile._loaded = True | ||
| 2581 | raise StopIteration | ||
| 2582 | else: | ||
| 2583 | try: | ||
| 2584 | tarinfo = self.tarfile.members[self.index] | ||
| 2585 | except IndexError: | ||
| 2586 | raise StopIteration | ||
| 2587 | self.index += 1 | ||
| 2588 | return tarinfo | ||
| 2589 | |||
| 2590 | next = __next__ # for Python 2.x | ||
| 2591 | |||
| 2592 | #-------------------- | ||
| 2593 | # exported functions | ||
| 2594 | #-------------------- | ||
| 2595 | def is_tarfile(name): | ||
| 2596 | """Return True if name points to a tar archive that we | ||
| 2597 | are able to handle, else return False. | ||
| 2598 | """ | ||
| 2599 | try: | ||
| 2600 | t = open(name) | ||
| 2601 | t.close() | ||
| 2602 | return True | ||
| 2603 | except TarError: | ||
| 2604 | return False | ||
| 2605 | |||
| 2606 | bltn_open = open | ||
| 2607 | open = TarFile.open | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..09929b0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py | |||
| @@ -0,0 +1,1120 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | from __future__ import absolute_import | ||
| 8 | |||
| 9 | import os | ||
| 10 | import re | ||
| 11 | import sys | ||
| 12 | |||
| 13 | try: | ||
| 14 | import ssl | ||
| 15 | except ImportError: # pragma: no cover | ||
| 16 | ssl = None | ||
| 17 | |||
| 18 | if sys.version_info[0] < 3: # pragma: no cover | ||
| 19 | from StringIO import StringIO | ||
| 20 | string_types = basestring, | ||
| 21 | text_type = unicode | ||
| 22 | from types import FileType as file_type | ||
| 23 | import __builtin__ as builtins | ||
| 24 | import ConfigParser as configparser | ||
| 25 | from ._backport import shutil | ||
| 26 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit | ||
| 27 | from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, | ||
| 28 | pathname2url, ContentTooShortError, splittype) | ||
| 29 | |||
| 30 | def quote(s): | ||
| 31 | if isinstance(s, unicode): | ||
| 32 | s = s.encode('utf-8') | ||
| 33 | return _quote(s) | ||
| 34 | |||
| 35 | import urllib2 | ||
| 36 | from urllib2 import (Request, urlopen, URLError, HTTPError, | ||
| 37 | HTTPBasicAuthHandler, HTTPPasswordMgr, | ||
| 38 | HTTPHandler, HTTPRedirectHandler, | ||
| 39 | build_opener) | ||
| 40 | if ssl: | ||
| 41 | from urllib2 import HTTPSHandler | ||
| 42 | import httplib | ||
| 43 | import xmlrpclib | ||
| 44 | import Queue as queue | ||
| 45 | from HTMLParser import HTMLParser | ||
| 46 | import htmlentitydefs | ||
| 47 | raw_input = raw_input | ||
| 48 | from itertools import ifilter as filter | ||
| 49 | from itertools import ifilterfalse as filterfalse | ||
| 50 | |||
| 51 | _userprog = None | ||
| 52 | def splituser(host): | ||
| 53 | """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" | ||
| 54 | global _userprog | ||
| 55 | if _userprog is None: | ||
| 56 | import re | ||
| 57 | _userprog = re.compile('^(.*)@(.*)$') | ||
| 58 | |||
| 59 | match = _userprog.match(host) | ||
| 60 | if match: return match.group(1, 2) | ||
| 61 | return None, host | ||
| 62 | |||
| 63 | else: # pragma: no cover | ||
| 64 | from io import StringIO | ||
| 65 | string_types = str, | ||
| 66 | text_type = str | ||
| 67 | from io import TextIOWrapper as file_type | ||
| 68 | import builtins | ||
| 69 | import configparser | ||
| 70 | import shutil | ||
| 71 | from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, | ||
| 72 | unquote, urlsplit, urlunsplit, splittype) | ||
| 73 | from urllib.request import (urlopen, urlretrieve, Request, url2pathname, | ||
| 74 | pathname2url, | ||
| 75 | HTTPBasicAuthHandler, HTTPPasswordMgr, | ||
| 76 | HTTPHandler, HTTPRedirectHandler, | ||
| 77 | build_opener) | ||
| 78 | if ssl: | ||
| 79 | from urllib.request import HTTPSHandler | ||
| 80 | from urllib.error import HTTPError, URLError, ContentTooShortError | ||
| 81 | import http.client as httplib | ||
| 82 | import urllib.request as urllib2 | ||
| 83 | import xmlrpc.client as xmlrpclib | ||
| 84 | import queue | ||
| 85 | from html.parser import HTMLParser | ||
| 86 | import html.entities as htmlentitydefs | ||
| 87 | raw_input = input | ||
| 88 | from itertools import filterfalse | ||
| 89 | filter = filter | ||
| 90 | |||
| 91 | try: | ||
| 92 | from ssl import match_hostname, CertificateError | ||
| 93 | except ImportError: # pragma: no cover | ||
| 94 | class CertificateError(ValueError): | ||
| 95 | pass | ||
| 96 | |||
| 97 | |||
| 98 | def _dnsname_match(dn, hostname, max_wildcards=1): | ||
| 99 | """Matching according to RFC 6125, section 6.4.3 | ||
| 100 | |||
| 101 | http://tools.ietf.org/html/rfc6125#section-6.4.3 | ||
| 102 | """ | ||
| 103 | pats = [] | ||
| 104 | if not dn: | ||
| 105 | return False | ||
| 106 | |||
| 107 | parts = dn.split('.') | ||
| 108 | leftmost, remainder = parts[0], parts[1:] | ||
| 109 | |||
| 110 | wildcards = leftmost.count('*') | ||
| 111 | if wildcards > max_wildcards: | ||
| 112 | # Issue #17980: avoid denials of service by refusing more | ||
| 113 | # than one wildcard per fragment. A survey of established | ||
| 114 | # policy among SSL implementations showed it to be a | ||
| 115 | # reasonable choice. | ||
| 116 | raise CertificateError( | ||
| 117 | "too many wildcards in certificate DNS name: " + repr(dn)) | ||
| 118 | |||
| 119 | # speed up common case w/o wildcards | ||
| 120 | if not wildcards: | ||
| 121 | return dn.lower() == hostname.lower() | ||
| 122 | |||
| 123 | # RFC 6125, section 6.4.3, subitem 1. | ||
| 124 | # The client SHOULD NOT attempt to match a presented identifier in which | ||
| 125 | # the wildcard character comprises a label other than the left-most label. | ||
| 126 | if leftmost == '*': | ||
| 127 | # When '*' is a fragment by itself, it matches a non-empty dotless | ||
| 128 | # fragment. | ||
| 129 | pats.append('[^.]+') | ||
| 130 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'): | ||
| 131 | # RFC 6125, section 6.4.3, subitem 3. | ||
| 132 | # The client SHOULD NOT attempt to match a presented identifier | ||
| 133 | # where the wildcard character is embedded within an A-label or | ||
| 134 | # U-label of an internationalized domain name. | ||
| 135 | pats.append(re.escape(leftmost)) | ||
| 136 | else: | ||
| 137 | # Otherwise, '*' matches any dotless string, e.g. www* | ||
| 138 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) | ||
| 139 | |||
| 140 | # add the remaining fragments, ignore any wildcards | ||
| 141 | for frag in remainder: | ||
| 142 | pats.append(re.escape(frag)) | ||
| 143 | |||
| 144 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) | ||
| 145 | return pat.match(hostname) | ||
| 146 | |||
| 147 | |||
| 148 | def match_hostname(cert, hostname): | ||
| 149 | """Verify that *cert* (in decoded format as returned by | ||
| 150 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 | ||
| 151 | rules are followed, but IP addresses are not accepted for *hostname*. | ||
| 152 | |||
| 153 | CertificateError is raised on failure. On success, the function | ||
| 154 | returns nothing. | ||
| 155 | """ | ||
| 156 | if not cert: | ||
| 157 | raise ValueError("empty or no certificate, match_hostname needs a " | ||
| 158 | "SSL socket or SSL context with either " | ||
| 159 | "CERT_OPTIONAL or CERT_REQUIRED") | ||
| 160 | dnsnames = [] | ||
| 161 | san = cert.get('subjectAltName', ()) | ||
| 162 | for key, value in san: | ||
| 163 | if key == 'DNS': | ||
| 164 | if _dnsname_match(value, hostname): | ||
| 165 | return | ||
| 166 | dnsnames.append(value) | ||
| 167 | if not dnsnames: | ||
| 168 | # The subject is only checked when there is no dNSName entry | ||
| 169 | # in subjectAltName | ||
| 170 | for sub in cert.get('subject', ()): | ||
| 171 | for key, value in sub: | ||
| 172 | # XXX according to RFC 2818, the most specific Common Name | ||
| 173 | # must be used. | ||
| 174 | if key == 'commonName': | ||
| 175 | if _dnsname_match(value, hostname): | ||
| 176 | return | ||
| 177 | dnsnames.append(value) | ||
| 178 | if len(dnsnames) > 1: | ||
| 179 | raise CertificateError("hostname %r " | ||
| 180 | "doesn't match either of %s" | ||
| 181 | % (hostname, ', '.join(map(repr, dnsnames)))) | ||
| 182 | elif len(dnsnames) == 1: | ||
| 183 | raise CertificateError("hostname %r " | ||
| 184 | "doesn't match %r" | ||
| 185 | % (hostname, dnsnames[0])) | ||
| 186 | else: | ||
| 187 | raise CertificateError("no appropriate commonName or " | ||
| 188 | "subjectAltName fields were found") | ||
| 189 | |||
| 190 | |||
| 191 | try: | ||
| 192 | from types import SimpleNamespace as Container | ||
| 193 | except ImportError: # pragma: no cover | ||
| 194 | class Container(object): | ||
| 195 | """ | ||
| 196 | A generic container for when multiple values need to be returned | ||
| 197 | """ | ||
| 198 | def __init__(self, **kwargs): | ||
| 199 | self.__dict__.update(kwargs) | ||
| 200 | |||
| 201 | |||
| 202 | try: | ||
| 203 | from shutil import which | ||
| 204 | except ImportError: # pragma: no cover | ||
| 205 | # Implementation from Python 3.3 | ||
| 206 | def which(cmd, mode=os.F_OK | os.X_OK, path=None): | ||
| 207 | """Given a command, mode, and a PATH string, return the path which | ||
| 208 | conforms to the given mode on the PATH, or None if there is no such | ||
| 209 | file. | ||
| 210 | |||
| 211 | `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result | ||
| 212 | of os.environ.get("PATH"), or can be overridden with a custom search | ||
| 213 | path. | ||
| 214 | |||
| 215 | """ | ||
| 216 | # Check that a given file can be accessed with the correct mode. | ||
| 217 | # Additionally check that `file` is not a directory, as on Windows | ||
| 218 | # directories pass the os.access check. | ||
| 219 | def _access_check(fn, mode): | ||
| 220 | return (os.path.exists(fn) and os.access(fn, mode) | ||
| 221 | and not os.path.isdir(fn)) | ||
| 222 | |||
| 223 | # If we're given a path with a directory part, look it up directly rather | ||
| 224 | # than referring to PATH directories. This includes checking relative to the | ||
| 225 | # current directory, e.g. ./script | ||
| 226 | if os.path.dirname(cmd): | ||
| 227 | if _access_check(cmd, mode): | ||
| 228 | return cmd | ||
| 229 | return None | ||
| 230 | |||
| 231 | if path is None: | ||
| 232 | path = os.environ.get("PATH", os.defpath) | ||
| 233 | if not path: | ||
| 234 | return None | ||
| 235 | path = path.split(os.pathsep) | ||
| 236 | |||
| 237 | if sys.platform == "win32": | ||
| 238 | # The current directory takes precedence on Windows. | ||
| 239 | if not os.curdir in path: | ||
| 240 | path.insert(0, os.curdir) | ||
| 241 | |||
| 242 | # PATHEXT is necessary to check on Windows. | ||
| 243 | pathext = os.environ.get("PATHEXT", "").split(os.pathsep) | ||
| 244 | # See if the given file matches any of the expected path extensions. | ||
| 245 | # This will allow us to short circuit when given "python.exe". | ||
| 246 | # If it does match, only test that one, otherwise we have to try | ||
| 247 | # others. | ||
| 248 | if any(cmd.lower().endswith(ext.lower()) for ext in pathext): | ||
| 249 | files = [cmd] | ||
| 250 | else: | ||
| 251 | files = [cmd + ext for ext in pathext] | ||
| 252 | else: | ||
| 253 | # On other platforms you don't have things like PATHEXT to tell you | ||
| 254 | # what file suffixes are executable, so just pass on cmd as-is. | ||
| 255 | files = [cmd] | ||
| 256 | |||
| 257 | seen = set() | ||
| 258 | for dir in path: | ||
| 259 | normdir = os.path.normcase(dir) | ||
| 260 | if not normdir in seen: | ||
| 261 | seen.add(normdir) | ||
| 262 | for thefile in files: | ||
| 263 | name = os.path.join(dir, thefile) | ||
| 264 | if _access_check(name, mode): | ||
| 265 | return name | ||
| 266 | return None | ||
| 267 | |||
| 268 | |||
| 269 | # ZipFile is a context manager in 2.7, but not in 2.6 | ||
| 270 | |||
| 271 | from zipfile import ZipFile as BaseZipFile | ||
| 272 | |||
| 273 | if hasattr(BaseZipFile, '__enter__'): # pragma: no cover | ||
| 274 | ZipFile = BaseZipFile | ||
| 275 | else: # pragma: no cover | ||
| 276 | from zipfile import ZipExtFile as BaseZipExtFile | ||
| 277 | |||
| 278 | class ZipExtFile(BaseZipExtFile): | ||
| 279 | def __init__(self, base): | ||
| 280 | self.__dict__.update(base.__dict__) | ||
| 281 | |||
| 282 | def __enter__(self): | ||
| 283 | return self | ||
| 284 | |||
| 285 | def __exit__(self, *exc_info): | ||
| 286 | self.close() | ||
| 287 | # return None, so if an exception occurred, it will propagate | ||
| 288 | |||
| 289 | class ZipFile(BaseZipFile): | ||
| 290 | def __enter__(self): | ||
| 291 | return self | ||
| 292 | |||
| 293 | def __exit__(self, *exc_info): | ||
| 294 | self.close() | ||
| 295 | # return None, so if an exception occurred, it will propagate | ||
| 296 | |||
| 297 | def open(self, *args, **kwargs): | ||
| 298 | base = BaseZipFile.open(self, *args, **kwargs) | ||
| 299 | return ZipExtFile(base) | ||
| 300 | |||
| 301 | try: | ||
| 302 | from platform import python_implementation | ||
| 303 | except ImportError: # pragma: no cover | ||
| 304 | def python_implementation(): | ||
| 305 | """Return a string identifying the Python implementation.""" | ||
| 306 | if 'PyPy' in sys.version: | ||
| 307 | return 'PyPy' | ||
| 308 | if os.name == 'java': | ||
| 309 | return 'Jython' | ||
| 310 | if sys.version.startswith('IronPython'): | ||
| 311 | return 'IronPython' | ||
| 312 | return 'CPython' | ||
| 313 | |||
| 314 | try: | ||
| 315 | import sysconfig | ||
| 316 | except ImportError: # pragma: no cover | ||
| 317 | from ._backport import sysconfig | ||
| 318 | |||
| 319 | try: | ||
| 320 | callable = callable | ||
| 321 | except NameError: # pragma: no cover | ||
| 322 | from collections import Callable | ||
| 323 | |||
| 324 | def callable(obj): | ||
| 325 | return isinstance(obj, Callable) | ||
| 326 | |||
| 327 | |||
| 328 | try: | ||
| 329 | fsencode = os.fsencode | ||
| 330 | fsdecode = os.fsdecode | ||
| 331 | except AttributeError: # pragma: no cover | ||
| 332 | # Issue #99: on some systems (e.g. containerised), | ||
| 333 | # sys.getfilesystemencoding() returns None, and we need a real value, | ||
| 334 | # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and | ||
| 335 | # sys.getfilesystemencoding(): the return value is "the user’s preference | ||
| 336 | # according to the result of nl_langinfo(CODESET), or None if the | ||
| 337 | # nl_langinfo(CODESET) failed." | ||
| 338 | _fsencoding = sys.getfilesystemencoding() or 'utf-8' | ||
| 339 | if _fsencoding == 'mbcs': | ||
| 340 | _fserrors = 'strict' | ||
| 341 | else: | ||
| 342 | _fserrors = 'surrogateescape' | ||
| 343 | |||
| 344 | def fsencode(filename): | ||
| 345 | if isinstance(filename, bytes): | ||
| 346 | return filename | ||
| 347 | elif isinstance(filename, text_type): | ||
| 348 | return filename.encode(_fsencoding, _fserrors) | ||
| 349 | else: | ||
| 350 | raise TypeError("expect bytes or str, not %s" % | ||
| 351 | type(filename).__name__) | ||
| 352 | |||
| 353 | def fsdecode(filename): | ||
| 354 | if isinstance(filename, text_type): | ||
| 355 | return filename | ||
| 356 | elif isinstance(filename, bytes): | ||
| 357 | return filename.decode(_fsencoding, _fserrors) | ||
| 358 | else: | ||
| 359 | raise TypeError("expect bytes or str, not %s" % | ||
| 360 | type(filename).__name__) | ||
| 361 | |||
| 362 | try: | ||
| 363 | from tokenize import detect_encoding | ||
| 364 | except ImportError: # pragma: no cover | ||
| 365 | from codecs import BOM_UTF8, lookup | ||
| 366 | import re | ||
| 367 | |||
| 368 | cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") | ||
| 369 | |||
| 370 | def _get_normal_name(orig_enc): | ||
| 371 | """Imitates get_normal_name in tokenizer.c.""" | ||
| 372 | # Only care about the first 12 characters. | ||
| 373 | enc = orig_enc[:12].lower().replace("_", "-") | ||
| 374 | if enc == "utf-8" or enc.startswith("utf-8-"): | ||
| 375 | return "utf-8" | ||
| 376 | if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ | ||
| 377 | enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): | ||
| 378 | return "iso-8859-1" | ||
| 379 | return orig_enc | ||
| 380 | |||
| 381 | def detect_encoding(readline): | ||
| 382 | """ | ||
| 383 | The detect_encoding() function is used to detect the encoding that should | ||
| 384 | be used to decode a Python source file. It requires one argument, readline, | ||
| 385 | in the same way as the tokenize() generator. | ||
| 386 | |||
| 387 | It will call readline a maximum of twice, and return the encoding used | ||
| 388 | (as a string) and a list of any lines (left as bytes) it has read in. | ||
| 389 | |||
| 390 | It detects the encoding from the presence of a utf-8 bom or an encoding | ||
| 391 | cookie as specified in pep-0263. If both a bom and a cookie are present, | ||
| 392 | but disagree, a SyntaxError will be raised. If the encoding cookie is an | ||
| 393 | invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, | ||
| 394 | 'utf-8-sig' is returned. | ||
| 395 | |||
| 396 | If no encoding is specified, then the default of 'utf-8' will be returned. | ||
| 397 | """ | ||
| 398 | try: | ||
| 399 | filename = readline.__self__.name | ||
| 400 | except AttributeError: | ||
| 401 | filename = None | ||
| 402 | bom_found = False | ||
| 403 | encoding = None | ||
| 404 | default = 'utf-8' | ||
| 405 | def read_or_stop(): | ||
| 406 | try: | ||
| 407 | return readline() | ||
| 408 | except StopIteration: | ||
| 409 | return b'' | ||
| 410 | |||
| 411 | def find_cookie(line): | ||
| 412 | try: | ||
| 413 | # Decode as UTF-8. Either the line is an encoding declaration, | ||
| 414 | # in which case it should be pure ASCII, or it must be UTF-8 | ||
| 415 | # per default encoding. | ||
| 416 | line_string = line.decode('utf-8') | ||
| 417 | except UnicodeDecodeError: | ||
| 418 | msg = "invalid or missing encoding declaration" | ||
| 419 | if filename is not None: | ||
| 420 | msg = '{} for {!r}'.format(msg, filename) | ||
| 421 | raise SyntaxError(msg) | ||
| 422 | |||
| 423 | matches = cookie_re.findall(line_string) | ||
| 424 | if not matches: | ||
| 425 | return None | ||
| 426 | encoding = _get_normal_name(matches[0]) | ||
| 427 | try: | ||
| 428 | codec = lookup(encoding) | ||
| 429 | except LookupError: | ||
| 430 | # This behaviour mimics the Python interpreter | ||
| 431 | if filename is None: | ||
| 432 | msg = "unknown encoding: " + encoding | ||
| 433 | else: | ||
| 434 | msg = "unknown encoding for {!r}: {}".format(filename, | ||
| 435 | encoding) | ||
| 436 | raise SyntaxError(msg) | ||
| 437 | |||
| 438 | if bom_found: | ||
| 439 | if codec.name != 'utf-8': | ||
| 440 | # This behaviour mimics the Python interpreter | ||
| 441 | if filename is None: | ||
| 442 | msg = 'encoding problem: utf-8' | ||
| 443 | else: | ||
| 444 | msg = 'encoding problem for {!r}: utf-8'.format(filename) | ||
| 445 | raise SyntaxError(msg) | ||
| 446 | encoding += '-sig' | ||
| 447 | return encoding | ||
| 448 | |||
| 449 | first = read_or_stop() | ||
| 450 | if first.startswith(BOM_UTF8): | ||
| 451 | bom_found = True | ||
| 452 | first = first[3:] | ||
| 453 | default = 'utf-8-sig' | ||
| 454 | if not first: | ||
| 455 | return default, [] | ||
| 456 | |||
| 457 | encoding = find_cookie(first) | ||
| 458 | if encoding: | ||
| 459 | return encoding, [first] | ||
| 460 | |||
| 461 | second = read_or_stop() | ||
| 462 | if not second: | ||
| 463 | return default, [first] | ||
| 464 | |||
| 465 | encoding = find_cookie(second) | ||
| 466 | if encoding: | ||
| 467 | return encoding, [first, second] | ||
| 468 | |||
| 469 | return default, [first, second] | ||
| 470 | |||
| 471 | # For converting & <-> & etc. | ||
| 472 | try: | ||
| 473 | from html import escape | ||
| 474 | except ImportError: | ||
| 475 | from cgi import escape | ||
| 476 | if sys.version_info[:2] < (3, 4): | ||
| 477 | unescape = HTMLParser().unescape | ||
| 478 | else: | ||
| 479 | from html import unescape | ||
| 480 | |||
| 481 | try: | ||
| 482 | from collections import ChainMap | ||
| 483 | except ImportError: # pragma: no cover | ||
| 484 | from collections import MutableMapping | ||
| 485 | |||
| 486 | try: | ||
| 487 | from reprlib import recursive_repr as _recursive_repr | ||
| 488 | except ImportError: | ||
| 489 | def _recursive_repr(fillvalue='...'): | ||
| 490 | ''' | ||
| 491 | Decorator to make a repr function return fillvalue for a recursive | ||
| 492 | call | ||
| 493 | ''' | ||
| 494 | |||
| 495 | def decorating_function(user_function): | ||
| 496 | repr_running = set() | ||
| 497 | |||
| 498 | def wrapper(self): | ||
| 499 | key = id(self), get_ident() | ||
| 500 | if key in repr_running: | ||
| 501 | return fillvalue | ||
| 502 | repr_running.add(key) | ||
| 503 | try: | ||
| 504 | result = user_function(self) | ||
| 505 | finally: | ||
| 506 | repr_running.discard(key) | ||
| 507 | return result | ||
| 508 | |||
| 509 | # Can't use functools.wraps() here because of bootstrap issues | ||
| 510 | wrapper.__module__ = getattr(user_function, '__module__') | ||
| 511 | wrapper.__doc__ = getattr(user_function, '__doc__') | ||
| 512 | wrapper.__name__ = getattr(user_function, '__name__') | ||
| 513 | wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) | ||
| 514 | return wrapper | ||
| 515 | |||
| 516 | return decorating_function | ||
| 517 | |||
| 518 | class ChainMap(MutableMapping): | ||
| 519 | ''' A ChainMap groups multiple dicts (or other mappings) together | ||
| 520 | to create a single, updateable view. | ||
| 521 | |||
| 522 | The underlying mappings are stored in a list. That list is public and can | ||
| 523 | accessed or updated using the *maps* attribute. There is no other state. | ||
| 524 | |||
| 525 | Lookups search the underlying mappings successively until a key is found. | ||
| 526 | In contrast, writes, updates, and deletions only operate on the first | ||
| 527 | mapping. | ||
| 528 | |||
| 529 | ''' | ||
| 530 | |||
| 531 | def __init__(self, *maps): | ||
| 532 | '''Initialize a ChainMap by setting *maps* to the given mappings. | ||
| 533 | If no mappings are provided, a single empty dictionary is used. | ||
| 534 | |||
| 535 | ''' | ||
| 536 | self.maps = list(maps) or [{}] # always at least one map | ||
| 537 | |||
| 538 | def __missing__(self, key): | ||
| 539 | raise KeyError(key) | ||
| 540 | |||
| 541 | def __getitem__(self, key): | ||
| 542 | for mapping in self.maps: | ||
| 543 | try: | ||
| 544 | return mapping[key] # can't use 'key in mapping' with defaultdict | ||
| 545 | except KeyError: | ||
| 546 | pass | ||
| 547 | return self.__missing__(key) # support subclasses that define __missing__ | ||
| 548 | |||
| 549 | def get(self, key, default=None): | ||
| 550 | return self[key] if key in self else default | ||
| 551 | |||
| 552 | def __len__(self): | ||
| 553 | return len(set().union(*self.maps)) # reuses stored hash values if possible | ||
| 554 | |||
| 555 | def __iter__(self): | ||
| 556 | return iter(set().union(*self.maps)) | ||
| 557 | |||
| 558 | def __contains__(self, key): | ||
| 559 | return any(key in m for m in self.maps) | ||
| 560 | |||
| 561 | def __bool__(self): | ||
| 562 | return any(self.maps) | ||
| 563 | |||
| 564 | @_recursive_repr() | ||
| 565 | def __repr__(self): | ||
| 566 | return '{0.__class__.__name__}({1})'.format( | ||
| 567 | self, ', '.join(map(repr, self.maps))) | ||
| 568 | |||
| 569 | @classmethod | ||
| 570 | def fromkeys(cls, iterable, *args): | ||
| 571 | 'Create a ChainMap with a single dict created from the iterable.' | ||
| 572 | return cls(dict.fromkeys(iterable, *args)) | ||
| 573 | |||
| 574 | def copy(self): | ||
| 575 | 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' | ||
| 576 | return self.__class__(self.maps[0].copy(), *self.maps[1:]) | ||
| 577 | |||
| 578 | __copy__ = copy | ||
| 579 | |||
| 580 | def new_child(self): # like Django's Context.push() | ||
| 581 | 'New ChainMap with a new dict followed by all previous maps.' | ||
| 582 | return self.__class__({}, *self.maps) | ||
| 583 | |||
| 584 | @property | ||
| 585 | def parents(self): # like Django's Context.pop() | ||
| 586 | 'New ChainMap from maps[1:].' | ||
| 587 | return self.__class__(*self.maps[1:]) | ||
| 588 | |||
| 589 | def __setitem__(self, key, value): | ||
| 590 | self.maps[0][key] = value | ||
| 591 | |||
| 592 | def __delitem__(self, key): | ||
| 593 | try: | ||
| 594 | del self.maps[0][key] | ||
| 595 | except KeyError: | ||
| 596 | raise KeyError('Key not found in the first mapping: {!r}'.format(key)) | ||
| 597 | |||
| 598 | def popitem(self): | ||
| 599 | 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' | ||
| 600 | try: | ||
| 601 | return self.maps[0].popitem() | ||
| 602 | except KeyError: | ||
| 603 | raise KeyError('No keys found in the first mapping.') | ||
| 604 | |||
| 605 | def pop(self, key, *args): | ||
| 606 | 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' | ||
| 607 | try: | ||
| 608 | return self.maps[0].pop(key, *args) | ||
| 609 | except KeyError: | ||
| 610 | raise KeyError('Key not found in the first mapping: {!r}'.format(key)) | ||
| 611 | |||
| 612 | def clear(self): | ||
| 613 | 'Clear maps[0], leaving maps[1:] intact.' | ||
| 614 | self.maps[0].clear() | ||
| 615 | |||
| 616 | try: | ||
| 617 | from importlib.util import cache_from_source # Python >= 3.4 | ||
| 618 | except ImportError: # pragma: no cover | ||
| 619 | try: | ||
| 620 | from imp import cache_from_source | ||
| 621 | except ImportError: # pragma: no cover | ||
| 622 | def cache_from_source(path, debug_override=None): | ||
| 623 | assert path.endswith('.py') | ||
| 624 | if debug_override is None: | ||
| 625 | debug_override = __debug__ | ||
| 626 | if debug_override: | ||
| 627 | suffix = 'c' | ||
| 628 | else: | ||
| 629 | suffix = 'o' | ||
| 630 | return path + suffix | ||
| 631 | |||
| 632 | try: | ||
| 633 | from collections import OrderedDict | ||
| 634 | except ImportError: # pragma: no cover | ||
| 635 | ## {{{ http://code.activestate.com/recipes/576693/ (r9) | ||
| 636 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. | ||
| 637 | # Passes Python2.7's test suite and incorporates all the latest updates. | ||
| 638 | try: | ||
| 639 | from thread import get_ident as _get_ident | ||
| 640 | except ImportError: | ||
| 641 | from dummy_thread import get_ident as _get_ident | ||
| 642 | |||
| 643 | try: | ||
| 644 | from _abcoll import KeysView, ValuesView, ItemsView | ||
| 645 | except ImportError: | ||
| 646 | pass | ||
| 647 | |||
| 648 | |||
| 649 | class OrderedDict(dict): | ||
| 650 | 'Dictionary that remembers insertion order' | ||
| 651 | # An inherited dict maps keys to values. | ||
| 652 | # The inherited dict provides __getitem__, __len__, __contains__, and get. | ||
| 653 | # The remaining methods are order-aware. | ||
| 654 | # Big-O running times for all methods are the same as for regular dictionaries. | ||
| 655 | |||
| 656 | # The internal self.__map dictionary maps keys to links in a doubly linked list. | ||
| 657 | # The circular doubly linked list starts and ends with a sentinel element. | ||
| 658 | # The sentinel element never gets deleted (this simplifies the algorithm). | ||
| 659 | # Each link is stored as a list of length three: [PREV, NEXT, KEY]. | ||
| 660 | |||
| 661 | def __init__(self, *args, **kwds): | ||
| 662 | '''Initialize an ordered dictionary. Signature is the same as for | ||
| 663 | regular dictionaries, but keyword arguments are not recommended | ||
| 664 | because their insertion order is arbitrary. | ||
| 665 | |||
| 666 | ''' | ||
| 667 | if len(args) > 1: | ||
| 668 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) | ||
| 669 | try: | ||
| 670 | self.__root | ||
| 671 | except AttributeError: | ||
| 672 | self.__root = root = [] # sentinel node | ||
| 673 | root[:] = [root, root, None] | ||
| 674 | self.__map = {} | ||
| 675 | self.__update(*args, **kwds) | ||
| 676 | |||
| 677 | def __setitem__(self, key, value, dict_setitem=dict.__setitem__): | ||
| 678 | 'od.__setitem__(i, y) <==> od[i]=y' | ||
| 679 | # Setting a new item creates a new link which goes at the end of the linked | ||
| 680 | # list, and the inherited dictionary is updated with the new key/value pair. | ||
| 681 | if key not in self: | ||
| 682 | root = self.__root | ||
| 683 | last = root[0] | ||
| 684 | last[1] = root[0] = self.__map[key] = [last, root, key] | ||
| 685 | dict_setitem(self, key, value) | ||
| 686 | |||
| 687 | def __delitem__(self, key, dict_delitem=dict.__delitem__): | ||
| 688 | 'od.__delitem__(y) <==> del od[y]' | ||
| 689 | # Deleting an existing item uses self.__map to find the link which is | ||
| 690 | # then removed by updating the links in the predecessor and successor nodes. | ||
| 691 | dict_delitem(self, key) | ||
| 692 | link_prev, link_next, key = self.__map.pop(key) | ||
| 693 | link_prev[1] = link_next | ||
| 694 | link_next[0] = link_prev | ||
| 695 | |||
| 696 | def __iter__(self): | ||
| 697 | 'od.__iter__() <==> iter(od)' | ||
| 698 | root = self.__root | ||
| 699 | curr = root[1] | ||
| 700 | while curr is not root: | ||
| 701 | yield curr[2] | ||
| 702 | curr = curr[1] | ||
| 703 | |||
| 704 | def __reversed__(self): | ||
| 705 | 'od.__reversed__() <==> reversed(od)' | ||
| 706 | root = self.__root | ||
| 707 | curr = root[0] | ||
| 708 | while curr is not root: | ||
| 709 | yield curr[2] | ||
| 710 | curr = curr[0] | ||
| 711 | |||
| 712 | def clear(self): | ||
| 713 | 'od.clear() -> None. Remove all items from od.' | ||
| 714 | try: | ||
| 715 | for node in self.__map.itervalues(): | ||
| 716 | del node[:] | ||
| 717 | root = self.__root | ||
| 718 | root[:] = [root, root, None] | ||
| 719 | self.__map.clear() | ||
| 720 | except AttributeError: | ||
| 721 | pass | ||
| 722 | dict.clear(self) | ||
| 723 | |||
| 724 | def popitem(self, last=True): | ||
| 725 | '''od.popitem() -> (k, v), return and remove a (key, value) pair. | ||
| 726 | Pairs are returned in LIFO order if last is true or FIFO order if false. | ||
| 727 | |||
| 728 | ''' | ||
| 729 | if not self: | ||
| 730 | raise KeyError('dictionary is empty') | ||
| 731 | root = self.__root | ||
| 732 | if last: | ||
| 733 | link = root[0] | ||
| 734 | link_prev = link[0] | ||
| 735 | link_prev[1] = root | ||
| 736 | root[0] = link_prev | ||
| 737 | else: | ||
| 738 | link = root[1] | ||
| 739 | link_next = link[1] | ||
| 740 | root[1] = link_next | ||
| 741 | link_next[0] = root | ||
| 742 | key = link[2] | ||
| 743 | del self.__map[key] | ||
| 744 | value = dict.pop(self, key) | ||
| 745 | return key, value | ||
| 746 | |||
| 747 | # -- the following methods do not depend on the internal structure -- | ||
| 748 | |||
| 749 | def keys(self): | ||
| 750 | 'od.keys() -> list of keys in od' | ||
| 751 | return list(self) | ||
| 752 | |||
| 753 | def values(self): | ||
| 754 | 'od.values() -> list of values in od' | ||
| 755 | return [self[key] for key in self] | ||
| 756 | |||
| 757 | def items(self): | ||
| 758 | 'od.items() -> list of (key, value) pairs in od' | ||
| 759 | return [(key, self[key]) for key in self] | ||
| 760 | |||
| 761 | def iterkeys(self): | ||
| 762 | 'od.iterkeys() -> an iterator over the keys in od' | ||
| 763 | return iter(self) | ||
| 764 | |||
| 765 | def itervalues(self): | ||
| 766 | 'od.itervalues -> an iterator over the values in od' | ||
| 767 | for k in self: | ||
| 768 | yield self[k] | ||
| 769 | |||
| 770 | def iteritems(self): | ||
| 771 | 'od.iteritems -> an iterator over the (key, value) items in od' | ||
| 772 | for k in self: | ||
| 773 | yield (k, self[k]) | ||
| 774 | |||
| 775 | def update(*args, **kwds): | ||
| 776 | '''od.update(E, **F) -> None. Update od from dict/iterable E and F. | ||
| 777 | |||
| 778 | If E is a dict instance, does: for k in E: od[k] = E[k] | ||
| 779 | If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] | ||
| 780 | Or if E is an iterable of items, does: for k, v in E: od[k] = v | ||
| 781 | In either case, this is followed by: for k, v in F.items(): od[k] = v | ||
| 782 | |||
| 783 | ''' | ||
| 784 | if len(args) > 2: | ||
| 785 | raise TypeError('update() takes at most 2 positional ' | ||
| 786 | 'arguments (%d given)' % (len(args),)) | ||
| 787 | elif not args: | ||
| 788 | raise TypeError('update() takes at least 1 argument (0 given)') | ||
| 789 | self = args[0] | ||
| 790 | # Make progressively weaker assumptions about "other" | ||
| 791 | other = () | ||
| 792 | if len(args) == 2: | ||
| 793 | other = args[1] | ||
| 794 | if isinstance(other, dict): | ||
| 795 | for key in other: | ||
| 796 | self[key] = other[key] | ||
| 797 | elif hasattr(other, 'keys'): | ||
| 798 | for key in other.keys(): | ||
| 799 | self[key] = other[key] | ||
| 800 | else: | ||
| 801 | for key, value in other: | ||
| 802 | self[key] = value | ||
| 803 | for key, value in kwds.items(): | ||
| 804 | self[key] = value | ||
| 805 | |||
| 806 | __update = update # let subclasses override update without breaking __init__ | ||
| 807 | |||
| 808 | __marker = object() | ||
| 809 | |||
| 810 | def pop(self, key, default=__marker): | ||
| 811 | '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. | ||
| 812 | If key is not found, d is returned if given, otherwise KeyError is raised. | ||
| 813 | |||
| 814 | ''' | ||
| 815 | if key in self: | ||
| 816 | result = self[key] | ||
| 817 | del self[key] | ||
| 818 | return result | ||
| 819 | if default is self.__marker: | ||
| 820 | raise KeyError(key) | ||
| 821 | return default | ||
| 822 | |||
| 823 | def setdefault(self, key, default=None): | ||
| 824 | 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' | ||
| 825 | if key in self: | ||
| 826 | return self[key] | ||
| 827 | self[key] = default | ||
| 828 | return default | ||
| 829 | |||
| 830 | def __repr__(self, _repr_running=None): | ||
| 831 | 'od.__repr__() <==> repr(od)' | ||
| 832 | if not _repr_running: _repr_running = {} | ||
| 833 | call_key = id(self), _get_ident() | ||
| 834 | if call_key in _repr_running: | ||
| 835 | return '...' | ||
| 836 | _repr_running[call_key] = 1 | ||
| 837 | try: | ||
| 838 | if not self: | ||
| 839 | return '%s()' % (self.__class__.__name__,) | ||
| 840 | return '%s(%r)' % (self.__class__.__name__, self.items()) | ||
| 841 | finally: | ||
| 842 | del _repr_running[call_key] | ||
| 843 | |||
| 844 | def __reduce__(self): | ||
| 845 | 'Return state information for pickling' | ||
| 846 | items = [[k, self[k]] for k in self] | ||
| 847 | inst_dict = vars(self).copy() | ||
| 848 | for k in vars(OrderedDict()): | ||
| 849 | inst_dict.pop(k, None) | ||
| 850 | if inst_dict: | ||
| 851 | return (self.__class__, (items,), inst_dict) | ||
| 852 | return self.__class__, (items,) | ||
| 853 | |||
| 854 | def copy(self): | ||
| 855 | 'od.copy() -> a shallow copy of od' | ||
| 856 | return self.__class__(self) | ||
| 857 | |||
| 858 | @classmethod | ||
| 859 | def fromkeys(cls, iterable, value=None): | ||
| 860 | '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S | ||
| 861 | and values equal to v (which defaults to None). | ||
| 862 | |||
| 863 | ''' | ||
| 864 | d = cls() | ||
| 865 | for key in iterable: | ||
| 866 | d[key] = value | ||
| 867 | return d | ||
| 868 | |||
| 869 | def __eq__(self, other): | ||
| 870 | '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive | ||
| 871 | while comparison to a regular mapping is order-insensitive. | ||
| 872 | |||
| 873 | ''' | ||
| 874 | if isinstance(other, OrderedDict): | ||
| 875 | return len(self)==len(other) and self.items() == other.items() | ||
| 876 | return dict.__eq__(self, other) | ||
| 877 | |||
| 878 | def __ne__(self, other): | ||
| 879 | return not self == other | ||
| 880 | |||
| 881 | # -- the following methods are only used in Python 2.7 -- | ||
| 882 | |||
| 883 | def viewkeys(self): | ||
| 884 | "od.viewkeys() -> a set-like object providing a view on od's keys" | ||
| 885 | return KeysView(self) | ||
| 886 | |||
| 887 | def viewvalues(self): | ||
| 888 | "od.viewvalues() -> an object providing a view on od's values" | ||
| 889 | return ValuesView(self) | ||
| 890 | |||
| 891 | def viewitems(self): | ||
| 892 | "od.viewitems() -> a set-like object providing a view on od's items" | ||
| 893 | return ItemsView(self) | ||
| 894 | |||
| 895 | try: | ||
| 896 | from logging.config import BaseConfigurator, valid_ident | ||
| 897 | except ImportError: # pragma: no cover | ||
| 898 | IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) | ||
| 899 | |||
| 900 | |||
| 901 | def valid_ident(s): | ||
| 902 | m = IDENTIFIER.match(s) | ||
| 903 | if not m: | ||
| 904 | raise ValueError('Not a valid Python identifier: %r' % s) | ||
| 905 | return True | ||
| 906 | |||
| 907 | |||
| 908 | # The ConvertingXXX classes are wrappers around standard Python containers, | ||
| 909 | # and they serve to convert any suitable values in the container. The | ||
| 910 | # conversion converts base dicts, lists and tuples to their wrapped | ||
| 911 | # equivalents, whereas strings which match a conversion format are converted | ||
| 912 | # appropriately. | ||
| 913 | # | ||
| 914 | # Each wrapper should have a configurator attribute holding the actual | ||
| 915 | # configurator to use for conversion. | ||
| 916 | |||
| 917 | class ConvertingDict(dict): | ||
| 918 | """A converting dictionary wrapper.""" | ||
| 919 | |||
| 920 | def __getitem__(self, key): | ||
| 921 | value = dict.__getitem__(self, key) | ||
| 922 | result = self.configurator.convert(value) | ||
| 923 | #If the converted value is different, save for next time | ||
| 924 | if value is not result: | ||
| 925 | self[key] = result | ||
| 926 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 927 | ConvertingTuple): | ||
| 928 | result.parent = self | ||
| 929 | result.key = key | ||
| 930 | return result | ||
| 931 | |||
| 932 | def get(self, key, default=None): | ||
| 933 | value = dict.get(self, key, default) | ||
| 934 | result = self.configurator.convert(value) | ||
| 935 | #If the converted value is different, save for next time | ||
| 936 | if value is not result: | ||
| 937 | self[key] = result | ||
| 938 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 939 | ConvertingTuple): | ||
| 940 | result.parent = self | ||
| 941 | result.key = key | ||
| 942 | return result | ||
| 943 | |||
| 944 | def pop(self, key, default=None): | ||
| 945 | value = dict.pop(self, key, default) | ||
| 946 | result = self.configurator.convert(value) | ||
| 947 | if value is not result: | ||
| 948 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 949 | ConvertingTuple): | ||
| 950 | result.parent = self | ||
| 951 | result.key = key | ||
| 952 | return result | ||
| 953 | |||
| 954 | class ConvertingList(list): | ||
| 955 | """A converting list wrapper.""" | ||
| 956 | def __getitem__(self, key): | ||
| 957 | value = list.__getitem__(self, key) | ||
| 958 | result = self.configurator.convert(value) | ||
| 959 | #If the converted value is different, save for next time | ||
| 960 | if value is not result: | ||
| 961 | self[key] = result | ||
| 962 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 963 | ConvertingTuple): | ||
| 964 | result.parent = self | ||
| 965 | result.key = key | ||
| 966 | return result | ||
| 967 | |||
| 968 | def pop(self, idx=-1): | ||
| 969 | value = list.pop(self, idx) | ||
| 970 | result = self.configurator.convert(value) | ||
| 971 | if value is not result: | ||
| 972 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 973 | ConvertingTuple): | ||
| 974 | result.parent = self | ||
| 975 | return result | ||
| 976 | |||
| 977 | class ConvertingTuple(tuple): | ||
| 978 | """A converting tuple wrapper.""" | ||
| 979 | def __getitem__(self, key): | ||
| 980 | value = tuple.__getitem__(self, key) | ||
| 981 | result = self.configurator.convert(value) | ||
| 982 | if value is not result: | ||
| 983 | if type(result) in (ConvertingDict, ConvertingList, | ||
| 984 | ConvertingTuple): | ||
| 985 | result.parent = self | ||
| 986 | result.key = key | ||
| 987 | return result | ||
| 988 | |||
| 989 | class BaseConfigurator(object): | ||
| 990 | """ | ||
| 991 | The configurator base class which defines some useful defaults. | ||
| 992 | """ | ||
| 993 | |||
| 994 | CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') | ||
| 995 | |||
| 996 | WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') | ||
| 997 | DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') | ||
| 998 | INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') | ||
| 999 | DIGIT_PATTERN = re.compile(r'^\d+$') | ||
| 1000 | |||
| 1001 | value_converters = { | ||
| 1002 | 'ext' : 'ext_convert', | ||
| 1003 | 'cfg' : 'cfg_convert', | ||
| 1004 | } | ||
| 1005 | |||
| 1006 | # We might want to use a different one, e.g. importlib | ||
| 1007 | importer = staticmethod(__import__) | ||
| 1008 | |||
| 1009 | def __init__(self, config): | ||
| 1010 | self.config = ConvertingDict(config) | ||
| 1011 | self.config.configurator = self | ||
| 1012 | |||
| 1013 | def resolve(self, s): | ||
| 1014 | """ | ||
| 1015 | Resolve strings to objects using standard import and attribute | ||
| 1016 | syntax. | ||
| 1017 | """ | ||
| 1018 | name = s.split('.') | ||
| 1019 | used = name.pop(0) | ||
| 1020 | try: | ||
| 1021 | found = self.importer(used) | ||
| 1022 | for frag in name: | ||
| 1023 | used += '.' + frag | ||
| 1024 | try: | ||
| 1025 | found = getattr(found, frag) | ||
| 1026 | except AttributeError: | ||
| 1027 | self.importer(used) | ||
| 1028 | found = getattr(found, frag) | ||
| 1029 | return found | ||
| 1030 | except ImportError: | ||
| 1031 | e, tb = sys.exc_info()[1:] | ||
| 1032 | v = ValueError('Cannot resolve %r: %s' % (s, e)) | ||
| 1033 | v.__cause__, v.__traceback__ = e, tb | ||
| 1034 | raise v | ||
| 1035 | |||
| 1036 | def ext_convert(self, value): | ||
| 1037 | """Default converter for the ext:// protocol.""" | ||
| 1038 | return self.resolve(value) | ||
| 1039 | |||
| 1040 | def cfg_convert(self, value): | ||
| 1041 | """Default converter for the cfg:// protocol.""" | ||
| 1042 | rest = value | ||
| 1043 | m = self.WORD_PATTERN.match(rest) | ||
| 1044 | if m is None: | ||
| 1045 | raise ValueError("Unable to convert %r" % value) | ||
| 1046 | else: | ||
| 1047 | rest = rest[m.end():] | ||
| 1048 | d = self.config[m.groups()[0]] | ||
| 1049 | #print d, rest | ||
| 1050 | while rest: | ||
| 1051 | m = self.DOT_PATTERN.match(rest) | ||
| 1052 | if m: | ||
| 1053 | d = d[m.groups()[0]] | ||
| 1054 | else: | ||
| 1055 | m = self.INDEX_PATTERN.match(rest) | ||
| 1056 | if m: | ||
| 1057 | idx = m.groups()[0] | ||
| 1058 | if not self.DIGIT_PATTERN.match(idx): | ||
| 1059 | d = d[idx] | ||
| 1060 | else: | ||
| 1061 | try: | ||
| 1062 | n = int(idx) # try as number first (most likely) | ||
| 1063 | d = d[n] | ||
| 1064 | except TypeError: | ||
| 1065 | d = d[idx] | ||
| 1066 | if m: | ||
| 1067 | rest = rest[m.end():] | ||
| 1068 | else: | ||
| 1069 | raise ValueError('Unable to convert ' | ||
| 1070 | '%r at %r' % (value, rest)) | ||
| 1071 | #rest should be empty | ||
| 1072 | return d | ||
| 1073 | |||
| 1074 | def convert(self, value): | ||
| 1075 | """ | ||
| 1076 | Convert values to an appropriate type. dicts, lists and tuples are | ||
| 1077 | replaced by their converting alternatives. Strings are checked to | ||
| 1078 | see if they have a conversion format and are converted if they do. | ||
| 1079 | """ | ||
| 1080 | if not isinstance(value, ConvertingDict) and isinstance(value, dict): | ||
| 1081 | value = ConvertingDict(value) | ||
| 1082 | value.configurator = self | ||
| 1083 | elif not isinstance(value, ConvertingList) and isinstance(value, list): | ||
| 1084 | value = ConvertingList(value) | ||
| 1085 | value.configurator = self | ||
| 1086 | elif not isinstance(value, ConvertingTuple) and\ | ||
| 1087 | isinstance(value, tuple): | ||
| 1088 | value = ConvertingTuple(value) | ||
| 1089 | value.configurator = self | ||
| 1090 | elif isinstance(value, string_types): | ||
| 1091 | m = self.CONVERT_PATTERN.match(value) | ||
| 1092 | if m: | ||
| 1093 | d = m.groupdict() | ||
| 1094 | prefix = d['prefix'] | ||
| 1095 | converter = self.value_converters.get(prefix, None) | ||
| 1096 | if converter: | ||
| 1097 | suffix = d['suffix'] | ||
| 1098 | converter = getattr(self, converter) | ||
| 1099 | value = converter(suffix) | ||
| 1100 | return value | ||
| 1101 | |||
| 1102 | def configure_custom(self, config): | ||
| 1103 | """Configure an object with a user-supplied factory.""" | ||
| 1104 | c = config.pop('()') | ||
| 1105 | if not callable(c): | ||
| 1106 | c = self.resolve(c) | ||
| 1107 | props = config.pop('.', None) | ||
| 1108 | # Check for valid identifiers | ||
| 1109 | kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) | ||
| 1110 | result = c(**kwargs) | ||
| 1111 | if props: | ||
| 1112 | for name, value in props.items(): | ||
| 1113 | setattr(result, name, value) | ||
| 1114 | return result | ||
| 1115 | |||
| 1116 | def as_tuple(self, value): | ||
| 1117 | """Utility function which converts lists to tuples.""" | ||
| 1118 | if isinstance(value, list): | ||
| 1119 | value = tuple(value) | ||
| 1120 | return value | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..54483e1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py | |||
| @@ -0,0 +1,1336 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """PEP 376 implementation.""" | ||
| 7 | |||
| 8 | from __future__ import unicode_literals | ||
| 9 | |||
| 10 | import base64 | ||
| 11 | import codecs | ||
| 12 | import contextlib | ||
| 13 | import hashlib | ||
| 14 | import logging | ||
| 15 | import os | ||
| 16 | import posixpath | ||
| 17 | import sys | ||
| 18 | import zipimport | ||
| 19 | |||
| 20 | from . import DistlibException, resources | ||
| 21 | from .compat import StringIO | ||
| 22 | from .version import get_scheme, UnsupportedVersionError | ||
| 23 | from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | ||
| 24 | from .util import (parse_requirement, cached_property, parse_name_and_version, | ||
| 25 | read_exports, write_exports, CSVReader, CSVWriter) | ||
| 26 | |||
| 27 | |||
| 28 | __all__ = ['Distribution', 'BaseInstalledDistribution', | ||
| 29 | 'InstalledDistribution', 'EggInfoDistribution', | ||
| 30 | 'DistributionPath'] | ||
| 31 | |||
| 32 | |||
| 33 | logger = logging.getLogger(__name__) | ||
| 34 | |||
| 35 | EXPORTS_FILENAME = 'pydist-exports.json' | ||
| 36 | COMMANDS_FILENAME = 'pydist-commands.json' | ||
| 37 | |||
| 38 | DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', | ||
| 39 | 'RESOURCES', EXPORTS_FILENAME, 'SHARED') | ||
| 40 | |||
| 41 | DISTINFO_EXT = '.dist-info' | ||
| 42 | |||
| 43 | |||
| 44 | class _Cache(object): | ||
| 45 | """ | ||
| 46 | A simple cache mapping names and .dist-info paths to distributions | ||
| 47 | """ | ||
| 48 | def __init__(self): | ||
| 49 | """ | ||
| 50 | Initialise an instance. There is normally one for each DistributionPath. | ||
| 51 | """ | ||
| 52 | self.name = {} | ||
| 53 | self.path = {} | ||
| 54 | self.generated = False | ||
| 55 | |||
| 56 | def clear(self): | ||
| 57 | """ | ||
| 58 | Clear the cache, setting it to its initial state. | ||
| 59 | """ | ||
| 60 | self.name.clear() | ||
| 61 | self.path.clear() | ||
| 62 | self.generated = False | ||
| 63 | |||
| 64 | def add(self, dist): | ||
| 65 | """ | ||
| 66 | Add a distribution to the cache. | ||
| 67 | :param dist: The distribution to add. | ||
| 68 | """ | ||
| 69 | if dist.path not in self.path: | ||
| 70 | self.path[dist.path] = dist | ||
| 71 | self.name.setdefault(dist.key, []).append(dist) | ||
| 72 | |||
| 73 | |||
| 74 | class DistributionPath(object): | ||
| 75 | """ | ||
| 76 | Represents a set of distributions installed on a path (typically sys.path). | ||
| 77 | """ | ||
| 78 | def __init__(self, path=None, include_egg=False): | ||
| 79 | """ | ||
| 80 | Create an instance from a path, optionally including legacy (distutils/ | ||
| 81 | setuptools/distribute) distributions. | ||
| 82 | :param path: The path to use, as a list of directories. If not specified, | ||
| 83 | sys.path is used. | ||
| 84 | :param include_egg: If True, this instance will look for and return legacy | ||
| 85 | distributions as well as those based on PEP 376. | ||
| 86 | """ | ||
| 87 | if path is None: | ||
| 88 | path = sys.path | ||
| 89 | self.path = path | ||
| 90 | self._include_dist = True | ||
| 91 | self._include_egg = include_egg | ||
| 92 | |||
| 93 | self._cache = _Cache() | ||
| 94 | self._cache_egg = _Cache() | ||
| 95 | self._cache_enabled = True | ||
| 96 | self._scheme = get_scheme('default') | ||
| 97 | |||
| 98 | def _get_cache_enabled(self): | ||
| 99 | return self._cache_enabled | ||
| 100 | |||
| 101 | def _set_cache_enabled(self, value): | ||
| 102 | self._cache_enabled = value | ||
| 103 | |||
| 104 | cache_enabled = property(_get_cache_enabled, _set_cache_enabled) | ||
| 105 | |||
| 106 | def clear_cache(self): | ||
| 107 | """ | ||
| 108 | Clears the internal cache. | ||
| 109 | """ | ||
| 110 | self._cache.clear() | ||
| 111 | self._cache_egg.clear() | ||
| 112 | |||
| 113 | |||
| 114 | def _yield_distributions(self): | ||
| 115 | """ | ||
| 116 | Yield .dist-info and/or .egg(-info) distributions. | ||
| 117 | """ | ||
| 118 | # We need to check if we've seen some resources already, because on | ||
| 119 | # some Linux systems (e.g. some Debian/Ubuntu variants) there are | ||
| 120 | # symlinks which alias other files in the environment. | ||
| 121 | seen = set() | ||
| 122 | for path in self.path: | ||
| 123 | finder = resources.finder_for_path(path) | ||
| 124 | if finder is None: | ||
| 125 | continue | ||
| 126 | r = finder.find('') | ||
| 127 | if not r or not r.is_container: | ||
| 128 | continue | ||
| 129 | rset = sorted(r.resources) | ||
| 130 | for entry in rset: | ||
| 131 | r = finder.find(entry) | ||
| 132 | if not r or r.path in seen: | ||
| 133 | continue | ||
| 134 | if self._include_dist and entry.endswith(DISTINFO_EXT): | ||
| 135 | possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] | ||
| 136 | for metadata_filename in possible_filenames: | ||
| 137 | metadata_path = posixpath.join(entry, metadata_filename) | ||
| 138 | pydist = finder.find(metadata_path) | ||
| 139 | if pydist: | ||
| 140 | break | ||
| 141 | else: | ||
| 142 | continue | ||
| 143 | |||
| 144 | with contextlib.closing(pydist.as_stream()) as stream: | ||
| 145 | metadata = Metadata(fileobj=stream, scheme='legacy') | ||
| 146 | logger.debug('Found %s', r.path) | ||
| 147 | seen.add(r.path) | ||
| 148 | yield new_dist_class(r.path, metadata=metadata, | ||
| 149 | env=self) | ||
| 150 | elif self._include_egg and entry.endswith(('.egg-info', | ||
| 151 | '.egg')): | ||
| 152 | logger.debug('Found %s', r.path) | ||
| 153 | seen.add(r.path) | ||
| 154 | yield old_dist_class(r.path, self) | ||
| 155 | |||
| 156 | def _generate_cache(self): | ||
| 157 | """ | ||
| 158 | Scan the path for distributions and populate the cache with | ||
| 159 | those that are found. | ||
| 160 | """ | ||
| 161 | gen_dist = not self._cache.generated | ||
| 162 | gen_egg = self._include_egg and not self._cache_egg.generated | ||
| 163 | if gen_dist or gen_egg: | ||
| 164 | for dist in self._yield_distributions(): | ||
| 165 | if isinstance(dist, InstalledDistribution): | ||
| 166 | self._cache.add(dist) | ||
| 167 | else: | ||
| 168 | self._cache_egg.add(dist) | ||
| 169 | |||
| 170 | if gen_dist: | ||
| 171 | self._cache.generated = True | ||
| 172 | if gen_egg: | ||
| 173 | self._cache_egg.generated = True | ||
| 174 | |||
| 175 | @classmethod | ||
| 176 | def distinfo_dirname(cls, name, version): | ||
| 177 | """ | ||
| 178 | The *name* and *version* parameters are converted into their | ||
| 179 | filename-escaped form, i.e. any ``'-'`` characters are replaced | ||
| 180 | with ``'_'`` other than the one in ``'dist-info'`` and the one | ||
| 181 | separating the name from the version number. | ||
| 182 | |||
| 183 | :parameter name: is converted to a standard distribution name by replacing | ||
| 184 | any runs of non- alphanumeric characters with a single | ||
| 185 | ``'-'``. | ||
| 186 | :type name: string | ||
| 187 | :parameter version: is converted to a standard version string. Spaces | ||
| 188 | become dots, and all other non-alphanumeric characters | ||
| 189 | (except dots) become dashes, with runs of multiple | ||
| 190 | dashes condensed to a single dash. | ||
| 191 | :type version: string | ||
| 192 | :returns: directory name | ||
| 193 | :rtype: string""" | ||
| 194 | name = name.replace('-', '_') | ||
| 195 | return '-'.join([name, version]) + DISTINFO_EXT | ||
| 196 | |||
| 197 | def get_distributions(self): | ||
| 198 | """ | ||
| 199 | Provides an iterator that looks for distributions and returns | ||
| 200 | :class:`InstalledDistribution` or | ||
| 201 | :class:`EggInfoDistribution` instances for each one of them. | ||
| 202 | |||
| 203 | :rtype: iterator of :class:`InstalledDistribution` and | ||
| 204 | :class:`EggInfoDistribution` instances | ||
| 205 | """ | ||
| 206 | if not self._cache_enabled: | ||
| 207 | for dist in self._yield_distributions(): | ||
| 208 | yield dist | ||
| 209 | else: | ||
| 210 | self._generate_cache() | ||
| 211 | |||
| 212 | for dist in self._cache.path.values(): | ||
| 213 | yield dist | ||
| 214 | |||
| 215 | if self._include_egg: | ||
| 216 | for dist in self._cache_egg.path.values(): | ||
| 217 | yield dist | ||
| 218 | |||
| 219 | def get_distribution(self, name): | ||
| 220 | """ | ||
| 221 | Looks for a named distribution on the path. | ||
| 222 | |||
| 223 | This function only returns the first result found, as no more than one | ||
| 224 | value is expected. If nothing is found, ``None`` is returned. | ||
| 225 | |||
| 226 | :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` | ||
| 227 | or ``None`` | ||
| 228 | """ | ||
| 229 | result = None | ||
| 230 | name = name.lower() | ||
| 231 | if not self._cache_enabled: | ||
| 232 | for dist in self._yield_distributions(): | ||
| 233 | if dist.key == name: | ||
| 234 | result = dist | ||
| 235 | break | ||
| 236 | else: | ||
| 237 | self._generate_cache() | ||
| 238 | |||
| 239 | if name in self._cache.name: | ||
| 240 | result = self._cache.name[name][0] | ||
| 241 | elif self._include_egg and name in self._cache_egg.name: | ||
| 242 | result = self._cache_egg.name[name][0] | ||
| 243 | return result | ||
| 244 | |||
| 245 | def provides_distribution(self, name, version=None): | ||
| 246 | """ | ||
| 247 | Iterates over all distributions to find which distributions provide *name*. | ||
| 248 | If a *version* is provided, it will be used to filter the results. | ||
| 249 | |||
| 250 | This function only returns the first result found, since no more than | ||
| 251 | one values are expected. If the directory is not found, returns ``None``. | ||
| 252 | |||
| 253 | :parameter version: a version specifier that indicates the version | ||
| 254 | required, conforming to the format in ``PEP-345`` | ||
| 255 | |||
| 256 | :type name: string | ||
| 257 | :type version: string | ||
| 258 | """ | ||
| 259 | matcher = None | ||
| 260 | if version is not None: | ||
| 261 | try: | ||
| 262 | matcher = self._scheme.matcher('%s (%s)' % (name, version)) | ||
| 263 | except ValueError: | ||
| 264 | raise DistlibException('invalid name or version: %r, %r' % | ||
| 265 | (name, version)) | ||
| 266 | |||
| 267 | for dist in self.get_distributions(): | ||
| 268 | # We hit a problem on Travis where enum34 was installed and doesn't | ||
| 269 | # have a provides attribute ... | ||
| 270 | if not hasattr(dist, 'provides'): | ||
| 271 | logger.debug('No "provides": %s', dist) | ||
| 272 | else: | ||
| 273 | provided = dist.provides | ||
| 274 | |||
| 275 | for p in provided: | ||
| 276 | p_name, p_ver = parse_name_and_version(p) | ||
| 277 | if matcher is None: | ||
| 278 | if p_name == name: | ||
| 279 | yield dist | ||
| 280 | break | ||
| 281 | else: | ||
| 282 | if p_name == name and matcher.match(p_ver): | ||
| 283 | yield dist | ||
| 284 | break | ||
| 285 | |||
| 286 | def get_file_path(self, name, relative_path): | ||
| 287 | """ | ||
| 288 | Return the path to a resource file. | ||
| 289 | """ | ||
| 290 | dist = self.get_distribution(name) | ||
| 291 | if dist is None: | ||
| 292 | raise LookupError('no distribution named %r found' % name) | ||
| 293 | return dist.get_resource_path(relative_path) | ||
| 294 | |||
| 295 | def get_exported_entries(self, category, name=None): | ||
| 296 | """ | ||
| 297 | Return all of the exported entries in a particular category. | ||
| 298 | |||
| 299 | :param category: The category to search for entries. | ||
| 300 | :param name: If specified, only entries with that name are returned. | ||
| 301 | """ | ||
| 302 | for dist in self.get_distributions(): | ||
| 303 | r = dist.exports | ||
| 304 | if category in r: | ||
| 305 | d = r[category] | ||
| 306 | if name is not None: | ||
| 307 | if name in d: | ||
| 308 | yield d[name] | ||
| 309 | else: | ||
| 310 | for v in d.values(): | ||
| 311 | yield v | ||
| 312 | |||
| 313 | |||
| 314 | class Distribution(object): | ||
| 315 | """ | ||
| 316 | A base class for distributions, whether installed or from indexes. | ||
| 317 | Either way, it must have some metadata, so that's all that's needed | ||
| 318 | for construction. | ||
| 319 | """ | ||
| 320 | |||
| 321 | build_time_dependency = False | ||
| 322 | """ | ||
| 323 | Set to True if it's known to be only a build-time dependency (i.e. | ||
| 324 | not needed after installation). | ||
| 325 | """ | ||
| 326 | |||
| 327 | requested = False | ||
| 328 | """A boolean that indicates whether the ``REQUESTED`` metadata file is | ||
| 329 | present (in other words, whether the package was installed by user | ||
| 330 | request or it was installed as a dependency).""" | ||
| 331 | |||
| 332 | def __init__(self, metadata): | ||
| 333 | """ | ||
| 334 | Initialise an instance. | ||
| 335 | :param metadata: The instance of :class:`Metadata` describing this | ||
| 336 | distribution. | ||
| 337 | """ | ||
| 338 | self.metadata = metadata | ||
| 339 | self.name = metadata.name | ||
| 340 | self.key = self.name.lower() # for case-insensitive comparisons | ||
| 341 | self.version = metadata.version | ||
| 342 | self.locator = None | ||
| 343 | self.digest = None | ||
| 344 | self.extras = None # additional features requested | ||
| 345 | self.context = None # environment marker overrides | ||
| 346 | self.download_urls = set() | ||
| 347 | self.digests = {} | ||
| 348 | |||
| 349 | @property | ||
| 350 | def source_url(self): | ||
| 351 | """ | ||
| 352 | The source archive download URL for this distribution. | ||
| 353 | """ | ||
| 354 | return self.metadata.source_url | ||
| 355 | |||
| 356 | download_url = source_url # Backward compatibility | ||
| 357 | |||
| 358 | @property | ||
| 359 | def name_and_version(self): | ||
| 360 | """ | ||
| 361 | A utility property which displays the name and version in parentheses. | ||
| 362 | """ | ||
| 363 | return '%s (%s)' % (self.name, self.version) | ||
| 364 | |||
| 365 | @property | ||
| 366 | def provides(self): | ||
| 367 | """ | ||
| 368 | A set of distribution names and versions provided by this distribution. | ||
| 369 | :return: A set of "name (version)" strings. | ||
| 370 | """ | ||
| 371 | plist = self.metadata.provides | ||
| 372 | s = '%s (%s)' % (self.name, self.version) | ||
| 373 | if s not in plist: | ||
| 374 | plist.append(s) | ||
| 375 | return plist | ||
| 376 | |||
| 377 | def _get_requirements(self, req_attr): | ||
| 378 | md = self.metadata | ||
| 379 | logger.debug('Getting requirements from metadata %r', md.todict()) | ||
| 380 | reqts = getattr(md, req_attr) | ||
| 381 | return set(md.get_requirements(reqts, extras=self.extras, | ||
| 382 | env=self.context)) | ||
| 383 | |||
| 384 | @property | ||
| 385 | def run_requires(self): | ||
| 386 | return self._get_requirements('run_requires') | ||
| 387 | |||
| 388 | @property | ||
| 389 | def meta_requires(self): | ||
| 390 | return self._get_requirements('meta_requires') | ||
| 391 | |||
| 392 | @property | ||
| 393 | def build_requires(self): | ||
| 394 | return self._get_requirements('build_requires') | ||
| 395 | |||
| 396 | @property | ||
| 397 | def test_requires(self): | ||
| 398 | return self._get_requirements('test_requires') | ||
| 399 | |||
| 400 | @property | ||
| 401 | def dev_requires(self): | ||
| 402 | return self._get_requirements('dev_requires') | ||
| 403 | |||
| 404 | def matches_requirement(self, req): | ||
| 405 | """ | ||
| 406 | Say if this instance matches (fulfills) a requirement. | ||
| 407 | :param req: The requirement to match. | ||
| 408 | :rtype req: str | ||
| 409 | :return: True if it matches, else False. | ||
| 410 | """ | ||
| 411 | # Requirement may contain extras - parse to lose those | ||
| 412 | # from what's passed to the matcher | ||
| 413 | r = parse_requirement(req) | ||
| 414 | scheme = get_scheme(self.metadata.scheme) | ||
| 415 | try: | ||
| 416 | matcher = scheme.matcher(r.requirement) | ||
| 417 | except UnsupportedVersionError: | ||
| 418 | # XXX compat-mode if cannot read the version | ||
| 419 | logger.warning('could not read version %r - using name only', | ||
| 420 | req) | ||
| 421 | name = req.split()[0] | ||
| 422 | matcher = scheme.matcher(name) | ||
| 423 | |||
| 424 | name = matcher.key # case-insensitive | ||
| 425 | |||
| 426 | result = False | ||
| 427 | for p in self.provides: | ||
| 428 | p_name, p_ver = parse_name_and_version(p) | ||
| 429 | if p_name != name: | ||
| 430 | continue | ||
| 431 | try: | ||
| 432 | result = matcher.match(p_ver) | ||
| 433 | break | ||
| 434 | except UnsupportedVersionError: | ||
| 435 | pass | ||
| 436 | return result | ||
| 437 | |||
| 438 | def __repr__(self): | ||
| 439 | """ | ||
| 440 | Return a textual representation of this instance, | ||
| 441 | """ | ||
| 442 | if self.source_url: | ||
| 443 | suffix = ' [%s]' % self.source_url | ||
| 444 | else: | ||
| 445 | suffix = '' | ||
| 446 | return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix) | ||
| 447 | |||
| 448 | def __eq__(self, other): | ||
| 449 | """ | ||
| 450 | See if this distribution is the same as another. | ||
| 451 | :param other: The distribution to compare with. To be equal to one | ||
| 452 | another. distributions must have the same type, name, | ||
| 453 | version and source_url. | ||
| 454 | :return: True if it is the same, else False. | ||
| 455 | """ | ||
| 456 | if type(other) is not type(self): | ||
| 457 | result = False | ||
| 458 | else: | ||
| 459 | result = (self.name == other.name and | ||
| 460 | self.version == other.version and | ||
| 461 | self.source_url == other.source_url) | ||
| 462 | return result | ||
| 463 | |||
| 464 | def __hash__(self): | ||
| 465 | """ | ||
| 466 | Compute hash in a way which matches the equality test. | ||
| 467 | """ | ||
| 468 | return hash(self.name) + hash(self.version) + hash(self.source_url) | ||
| 469 | |||
| 470 | |||
| 471 | class BaseInstalledDistribution(Distribution): | ||
| 472 | """ | ||
| 473 | This is the base class for installed distributions (whether PEP 376 or | ||
| 474 | legacy). | ||
| 475 | """ | ||
| 476 | |||
| 477 | hasher = None | ||
| 478 | |||
| 479 | def __init__(self, metadata, path, env=None): | ||
| 480 | """ | ||
| 481 | Initialise an instance. | ||
| 482 | :param metadata: An instance of :class:`Metadata` which describes the | ||
| 483 | distribution. This will normally have been initialised | ||
| 484 | from a metadata file in the ``path``. | ||
| 485 | :param path: The path of the ``.dist-info`` or ``.egg-info`` | ||
| 486 | directory for the distribution. | ||
| 487 | :param env: This is normally the :class:`DistributionPath` | ||
| 488 | instance where this distribution was found. | ||
| 489 | """ | ||
| 490 | super(BaseInstalledDistribution, self).__init__(metadata) | ||
| 491 | self.path = path | ||
| 492 | self.dist_path = env | ||
| 493 | |||
| 494 | def get_hash(self, data, hasher=None): | ||
| 495 | """ | ||
| 496 | Get the hash of some data, using a particular hash algorithm, if | ||
| 497 | specified. | ||
| 498 | |||
| 499 | :param data: The data to be hashed. | ||
| 500 | :type data: bytes | ||
| 501 | :param hasher: The name of a hash implementation, supported by hashlib, | ||
| 502 | or ``None``. Examples of valid values are ``'sha1'``, | ||
| 503 | ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and | ||
| 504 | ``'sha512'``. If no hasher is specified, the ``hasher`` | ||
| 505 | attribute of the :class:`InstalledDistribution` instance | ||
| 506 | is used. If the hasher is determined to be ``None``, MD5 | ||
| 507 | is used as the hashing algorithm. | ||
| 508 | :returns: The hash of the data. If a hasher was explicitly specified, | ||
| 509 | the returned hash will be prefixed with the specified hasher | ||
| 510 | followed by '='. | ||
| 511 | :rtype: str | ||
| 512 | """ | ||
| 513 | if hasher is None: | ||
| 514 | hasher = self.hasher | ||
| 515 | if hasher is None: | ||
| 516 | hasher = hashlib.md5 | ||
| 517 | prefix = '' | ||
| 518 | else: | ||
| 519 | hasher = getattr(hashlib, hasher) | ||
| 520 | prefix = '%s=' % self.hasher | ||
| 521 | digest = hasher(data).digest() | ||
| 522 | digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') | ||
| 523 | return '%s%s' % (prefix, digest) | ||
| 524 | |||
| 525 | |||
| 526 | class InstalledDistribution(BaseInstalledDistribution): | ||
| 527 | """ | ||
| 528 | Created with the *path* of the ``.dist-info`` directory provided to the | ||
| 529 | constructor. It reads the metadata contained in ``pydist.json`` when it is | ||
| 530 | instantiated., or uses a passed in Metadata instance (useful for when | ||
| 531 | dry-run mode is being used). | ||
| 532 | """ | ||
| 533 | |||
| 534 | hasher = 'sha256' | ||
| 535 | |||
| 536 | def __init__(self, path, metadata=None, env=None): | ||
| 537 | self.modules = [] | ||
| 538 | self.finder = finder = resources.finder_for_path(path) | ||
| 539 | if finder is None: | ||
| 540 | raise ValueError('finder unavailable for %s' % path) | ||
| 541 | if env and env._cache_enabled and path in env._cache.path: | ||
| 542 | metadata = env._cache.path[path].metadata | ||
| 543 | elif metadata is None: | ||
| 544 | r = finder.find(METADATA_FILENAME) | ||
| 545 | # Temporary - for Wheel 0.23 support | ||
| 546 | if r is None: | ||
| 547 | r = finder.find(WHEEL_METADATA_FILENAME) | ||
| 548 | # Temporary - for legacy support | ||
| 549 | if r is None: | ||
| 550 | r = finder.find('METADATA') | ||
| 551 | if r is None: | ||
| 552 | raise ValueError('no %s found in %s' % (METADATA_FILENAME, | ||
| 553 | path)) | ||
| 554 | with contextlib.closing(r.as_stream()) as stream: | ||
| 555 | metadata = Metadata(fileobj=stream, scheme='legacy') | ||
| 556 | |||
| 557 | super(InstalledDistribution, self).__init__(metadata, path, env) | ||
| 558 | |||
| 559 | if env and env._cache_enabled: | ||
| 560 | env._cache.add(self) | ||
| 561 | |||
| 562 | r = finder.find('REQUESTED') | ||
| 563 | self.requested = r is not None | ||
| 564 | p = os.path.join(path, 'top_level.txt') | ||
| 565 | if os.path.exists(p): | ||
| 566 | with open(p, 'rb') as f: | ||
| 567 | data = f.read() | ||
| 568 | self.modules = data.splitlines() | ||
| 569 | |||
| 570 | def __repr__(self): | ||
| 571 | return '<InstalledDistribution %r %s at %r>' % ( | ||
| 572 | self.name, self.version, self.path) | ||
| 573 | |||
| 574 | def __str__(self): | ||
| 575 | return "%s %s" % (self.name, self.version) | ||
| 576 | |||
| 577 | def _get_records(self): | ||
| 578 | """ | ||
| 579 | Get the list of installed files for the distribution | ||
| 580 | :return: A list of tuples of path, hash and size. Note that hash and | ||
| 581 | size might be ``None`` for some entries. The path is exactly | ||
| 582 | as stored in the file (which is as in PEP 376). | ||
| 583 | """ | ||
| 584 | results = [] | ||
| 585 | r = self.get_distinfo_resource('RECORD') | ||
| 586 | with contextlib.closing(r.as_stream()) as stream: | ||
| 587 | with CSVReader(stream=stream) as record_reader: | ||
| 588 | # Base location is parent dir of .dist-info dir | ||
| 589 | #base_location = os.path.dirname(self.path) | ||
| 590 | #base_location = os.path.abspath(base_location) | ||
| 591 | for row in record_reader: | ||
| 592 | missing = [None for i in range(len(row), 3)] | ||
| 593 | path, checksum, size = row + missing | ||
| 594 | #if not os.path.isabs(path): | ||
| 595 | # path = path.replace('/', os.sep) | ||
| 596 | # path = os.path.join(base_location, path) | ||
| 597 | results.append((path, checksum, size)) | ||
| 598 | return results | ||
| 599 | |||
| 600 | @cached_property | ||
| 601 | def exports(self): | ||
| 602 | """ | ||
| 603 | Return the information exported by this distribution. | ||
| 604 | :return: A dictionary of exports, mapping an export category to a dict | ||
| 605 | of :class:`ExportEntry` instances describing the individual | ||
| 606 | export entries, and keyed by name. | ||
| 607 | """ | ||
| 608 | result = {} | ||
| 609 | r = self.get_distinfo_resource(EXPORTS_FILENAME) | ||
| 610 | if r: | ||
| 611 | result = self.read_exports() | ||
| 612 | return result | ||
| 613 | |||
| 614 | def read_exports(self): | ||
| 615 | """ | ||
| 616 | Read exports data from a file in .ini format. | ||
| 617 | |||
| 618 | :return: A dictionary of exports, mapping an export category to a list | ||
| 619 | of :class:`ExportEntry` instances describing the individual | ||
| 620 | export entries. | ||
| 621 | """ | ||
| 622 | result = {} | ||
| 623 | r = self.get_distinfo_resource(EXPORTS_FILENAME) | ||
| 624 | if r: | ||
| 625 | with contextlib.closing(r.as_stream()) as stream: | ||
| 626 | result = read_exports(stream) | ||
| 627 | return result | ||
| 628 | |||
| 629 | def write_exports(self, exports): | ||
| 630 | """ | ||
| 631 | Write a dictionary of exports to a file in .ini format. | ||
| 632 | :param exports: A dictionary of exports, mapping an export category to | ||
| 633 | a list of :class:`ExportEntry` instances describing the | ||
| 634 | individual export entries. | ||
| 635 | """ | ||
| 636 | rf = self.get_distinfo_file(EXPORTS_FILENAME) | ||
| 637 | with open(rf, 'w') as f: | ||
| 638 | write_exports(exports, f) | ||
| 639 | |||
| 640 | def get_resource_path(self, relative_path): | ||
| 641 | """ | ||
| 642 | NOTE: This API may change in the future. | ||
| 643 | |||
| 644 | Return the absolute path to a resource file with the given relative | ||
| 645 | path. | ||
| 646 | |||
| 647 | :param relative_path: The path, relative to .dist-info, of the resource | ||
| 648 | of interest. | ||
| 649 | :return: The absolute path where the resource is to be found. | ||
| 650 | """ | ||
| 651 | r = self.get_distinfo_resource('RESOURCES') | ||
| 652 | with contextlib.closing(r.as_stream()) as stream: | ||
| 653 | with CSVReader(stream=stream) as resources_reader: | ||
| 654 | for relative, destination in resources_reader: | ||
| 655 | if relative == relative_path: | ||
| 656 | return destination | ||
| 657 | raise KeyError('no resource file with relative path %r ' | ||
| 658 | 'is installed' % relative_path) | ||
| 659 | |||
| 660 | def list_installed_files(self): | ||
| 661 | """ | ||
| 662 | Iterates over the ``RECORD`` entries and returns a tuple | ||
| 663 | ``(path, hash, size)`` for each line. | ||
| 664 | |||
| 665 | :returns: iterator of (path, hash, size) | ||
| 666 | """ | ||
| 667 | for result in self._get_records(): | ||
| 668 | yield result | ||
| 669 | |||
| 670 | def write_installed_files(self, paths, prefix, dry_run=False): | ||
| 671 | """ | ||
| 672 | Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any | ||
| 673 | existing ``RECORD`` file is silently overwritten. | ||
| 674 | |||
| 675 | prefix is used to determine when to write absolute paths. | ||
| 676 | """ | ||
| 677 | prefix = os.path.join(prefix, '') | ||
| 678 | base = os.path.dirname(self.path) | ||
| 679 | base_under_prefix = base.startswith(prefix) | ||
| 680 | base = os.path.join(base, '') | ||
| 681 | record_path = self.get_distinfo_file('RECORD') | ||
| 682 | logger.info('creating %s', record_path) | ||
| 683 | if dry_run: | ||
| 684 | return None | ||
| 685 | with CSVWriter(record_path) as writer: | ||
| 686 | for path in paths: | ||
| 687 | if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): | ||
| 688 | # do not put size and hash, as in PEP-376 | ||
| 689 | hash_value = size = '' | ||
| 690 | else: | ||
| 691 | size = '%d' % os.path.getsize(path) | ||
| 692 | with open(path, 'rb') as fp: | ||
| 693 | hash_value = self.get_hash(fp.read()) | ||
| 694 | if path.startswith(base) or (base_under_prefix and | ||
| 695 | path.startswith(prefix)): | ||
| 696 | path = os.path.relpath(path, base) | ||
| 697 | writer.writerow((path, hash_value, size)) | ||
| 698 | |||
| 699 | # add the RECORD file itself | ||
| 700 | if record_path.startswith(base): | ||
| 701 | record_path = os.path.relpath(record_path, base) | ||
| 702 | writer.writerow((record_path, '', '')) | ||
| 703 | return record_path | ||
| 704 | |||
| 705 | def check_installed_files(self): | ||
| 706 | """ | ||
| 707 | Checks that the hashes and sizes of the files in ``RECORD`` are | ||
| 708 | matched by the files themselves. Returns a (possibly empty) list of | ||
| 709 | mismatches. Each entry in the mismatch list will be a tuple consisting | ||
| 710 | of the path, 'exists', 'size' or 'hash' according to what didn't match | ||
| 711 | (existence is checked first, then size, then hash), the expected | ||
| 712 | value and the actual value. | ||
| 713 | """ | ||
| 714 | mismatches = [] | ||
| 715 | base = os.path.dirname(self.path) | ||
| 716 | record_path = self.get_distinfo_file('RECORD') | ||
| 717 | for path, hash_value, size in self.list_installed_files(): | ||
| 718 | if not os.path.isabs(path): | ||
| 719 | path = os.path.join(base, path) | ||
| 720 | if path == record_path: | ||
| 721 | continue | ||
| 722 | if not os.path.exists(path): | ||
| 723 | mismatches.append((path, 'exists', True, False)) | ||
| 724 | elif os.path.isfile(path): | ||
| 725 | actual_size = str(os.path.getsize(path)) | ||
| 726 | if size and actual_size != size: | ||
| 727 | mismatches.append((path, 'size', size, actual_size)) | ||
| 728 | elif hash_value: | ||
| 729 | if '=' in hash_value: | ||
| 730 | hasher = hash_value.split('=', 1)[0] | ||
| 731 | else: | ||
| 732 | hasher = None | ||
| 733 | |||
| 734 | with open(path, 'rb') as f: | ||
| 735 | actual_hash = self.get_hash(f.read(), hasher) | ||
| 736 | if actual_hash != hash_value: | ||
| 737 | mismatches.append((path, 'hash', hash_value, actual_hash)) | ||
| 738 | return mismatches | ||
| 739 | |||
| 740 | @cached_property | ||
| 741 | def shared_locations(self): | ||
| 742 | """ | ||
| 743 | A dictionary of shared locations whose keys are in the set 'prefix', | ||
| 744 | 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. | ||
| 745 | The corresponding value is the absolute path of that category for | ||
| 746 | this distribution, and takes into account any paths selected by the | ||
| 747 | user at installation time (e.g. via command-line arguments). In the | ||
| 748 | case of the 'namespace' key, this would be a list of absolute paths | ||
| 749 | for the roots of namespace packages in this distribution. | ||
| 750 | |||
| 751 | The first time this property is accessed, the relevant information is | ||
| 752 | read from the SHARED file in the .dist-info directory. | ||
| 753 | """ | ||
| 754 | result = {} | ||
| 755 | shared_path = os.path.join(self.path, 'SHARED') | ||
| 756 | if os.path.isfile(shared_path): | ||
| 757 | with codecs.open(shared_path, 'r', encoding='utf-8') as f: | ||
| 758 | lines = f.read().splitlines() | ||
| 759 | for line in lines: | ||
| 760 | key, value = line.split('=', 1) | ||
| 761 | if key == 'namespace': | ||
| 762 | result.setdefault(key, []).append(value) | ||
| 763 | else: | ||
| 764 | result[key] = value | ||
| 765 | return result | ||
| 766 | |||
| 767 | def write_shared_locations(self, paths, dry_run=False): | ||
| 768 | """ | ||
| 769 | Write shared location information to the SHARED file in .dist-info. | ||
| 770 | :param paths: A dictionary as described in the documentation for | ||
| 771 | :meth:`shared_locations`. | ||
| 772 | :param dry_run: If True, the action is logged but no file is actually | ||
| 773 | written. | ||
| 774 | :return: The path of the file written to. | ||
| 775 | """ | ||
| 776 | shared_path = os.path.join(self.path, 'SHARED') | ||
| 777 | logger.info('creating %s', shared_path) | ||
| 778 | if dry_run: | ||
| 779 | return None | ||
| 780 | lines = [] | ||
| 781 | for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): | ||
| 782 | path = paths[key] | ||
| 783 | if os.path.isdir(paths[key]): | ||
| 784 | lines.append('%s=%s' % (key, path)) | ||
| 785 | for ns in paths.get('namespace', ()): | ||
| 786 | lines.append('namespace=%s' % ns) | ||
| 787 | |||
| 788 | with codecs.open(shared_path, 'w', encoding='utf-8') as f: | ||
| 789 | f.write('\n'.join(lines)) | ||
| 790 | return shared_path | ||
| 791 | |||
| 792 | def get_distinfo_resource(self, path): | ||
| 793 | if path not in DIST_FILES: | ||
| 794 | raise DistlibException('invalid path for a dist-info file: ' | ||
| 795 | '%r at %r' % (path, self.path)) | ||
| 796 | finder = resources.finder_for_path(self.path) | ||
| 797 | if finder is None: | ||
| 798 | raise DistlibException('Unable to get a finder for %s' % self.path) | ||
| 799 | return finder.find(path) | ||
| 800 | |||
| 801 | def get_distinfo_file(self, path): | ||
| 802 | """ | ||
| 803 | Returns a path located under the ``.dist-info`` directory. Returns a | ||
| 804 | string representing the path. | ||
| 805 | |||
| 806 | :parameter path: a ``'/'``-separated path relative to the | ||
| 807 | ``.dist-info`` directory or an absolute path; | ||
| 808 | If *path* is an absolute path and doesn't start | ||
| 809 | with the ``.dist-info`` directory path, | ||
| 810 | a :class:`DistlibException` is raised | ||
| 811 | :type path: str | ||
| 812 | :rtype: str | ||
| 813 | """ | ||
| 814 | # Check if it is an absolute path # XXX use relpath, add tests | ||
| 815 | if path.find(os.sep) >= 0: | ||
| 816 | # it's an absolute path? | ||
| 817 | distinfo_dirname, path = path.split(os.sep)[-2:] | ||
| 818 | if distinfo_dirname != self.path.split(os.sep)[-1]: | ||
| 819 | raise DistlibException( | ||
| 820 | 'dist-info file %r does not belong to the %r %s ' | ||
| 821 | 'distribution' % (path, self.name, self.version)) | ||
| 822 | |||
| 823 | # The file must be relative | ||
| 824 | if path not in DIST_FILES: | ||
| 825 | raise DistlibException('invalid path for a dist-info file: ' | ||
| 826 | '%r at %r' % (path, self.path)) | ||
| 827 | |||
| 828 | return os.path.join(self.path, path) | ||
| 829 | |||
| 830 | def list_distinfo_files(self): | ||
| 831 | """ | ||
| 832 | Iterates over the ``RECORD`` entries and returns paths for each line if | ||
| 833 | the path is pointing to a file located in the ``.dist-info`` directory | ||
| 834 | or one of its subdirectories. | ||
| 835 | |||
| 836 | :returns: iterator of paths | ||
| 837 | """ | ||
| 838 | base = os.path.dirname(self.path) | ||
| 839 | for path, checksum, size in self._get_records(): | ||
| 840 | # XXX add separator or use real relpath algo | ||
| 841 | if not os.path.isabs(path): | ||
| 842 | path = os.path.join(base, path) | ||
| 843 | if path.startswith(self.path): | ||
| 844 | yield path | ||
| 845 | |||
| 846 | def __eq__(self, other): | ||
| 847 | return (isinstance(other, InstalledDistribution) and | ||
| 848 | self.path == other.path) | ||
| 849 | |||
| 850 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
| 851 | __hash__ = object.__hash__ | ||
| 852 | |||
| 853 | |||
| 854 | class EggInfoDistribution(BaseInstalledDistribution): | ||
| 855 | """Created with the *path* of the ``.egg-info`` directory or file provided | ||
| 856 | to the constructor. It reads the metadata contained in the file itself, or | ||
| 857 | if the given path happens to be a directory, the metadata is read from the | ||
| 858 | file ``PKG-INFO`` under that directory.""" | ||
| 859 | |||
| 860 | requested = True # as we have no way of knowing, assume it was | ||
| 861 | shared_locations = {} | ||
| 862 | |||
| 863 | def __init__(self, path, env=None): | ||
| 864 | def set_name_and_version(s, n, v): | ||
| 865 | s.name = n | ||
| 866 | s.key = n.lower() # for case-insensitive comparisons | ||
| 867 | s.version = v | ||
| 868 | |||
| 869 | self.path = path | ||
| 870 | self.dist_path = env | ||
| 871 | if env and env._cache_enabled and path in env._cache_egg.path: | ||
| 872 | metadata = env._cache_egg.path[path].metadata | ||
| 873 | set_name_and_version(self, metadata.name, metadata.version) | ||
| 874 | else: | ||
| 875 | metadata = self._get_metadata(path) | ||
| 876 | |||
| 877 | # Need to be set before caching | ||
| 878 | set_name_and_version(self, metadata.name, metadata.version) | ||
| 879 | |||
| 880 | if env and env._cache_enabled: | ||
| 881 | env._cache_egg.add(self) | ||
| 882 | super(EggInfoDistribution, self).__init__(metadata, path, env) | ||
| 883 | |||
| 884 | def _get_metadata(self, path): | ||
| 885 | requires = None | ||
| 886 | |||
| 887 | def parse_requires_data(data): | ||
| 888 | """Create a list of dependencies from a requires.txt file. | ||
| 889 | |||
| 890 | *data*: the contents of a setuptools-produced requires.txt file. | ||
| 891 | """ | ||
| 892 | reqs = [] | ||
| 893 | lines = data.splitlines() | ||
| 894 | for line in lines: | ||
| 895 | line = line.strip() | ||
| 896 | if line.startswith('['): | ||
| 897 | logger.warning('Unexpected line: quitting requirement scan: %r', | ||
| 898 | line) | ||
| 899 | break | ||
| 900 | r = parse_requirement(line) | ||
| 901 | if not r: | ||
| 902 | logger.warning('Not recognised as a requirement: %r', line) | ||
| 903 | continue | ||
| 904 | if r.extras: | ||
| 905 | logger.warning('extra requirements in requires.txt are ' | ||
| 906 | 'not supported') | ||
| 907 | if not r.constraints: | ||
| 908 | reqs.append(r.name) | ||
| 909 | else: | ||
| 910 | cons = ', '.join('%s%s' % c for c in r.constraints) | ||
| 911 | reqs.append('%s (%s)' % (r.name, cons)) | ||
| 912 | return reqs | ||
| 913 | |||
| 914 | def parse_requires_path(req_path): | ||
| 915 | """Create a list of dependencies from a requires.txt file. | ||
| 916 | |||
| 917 | *req_path*: the path to a setuptools-produced requires.txt file. | ||
| 918 | """ | ||
| 919 | |||
| 920 | reqs = [] | ||
| 921 | try: | ||
| 922 | with codecs.open(req_path, 'r', 'utf-8') as fp: | ||
| 923 | reqs = parse_requires_data(fp.read()) | ||
| 924 | except IOError: | ||
| 925 | pass | ||
| 926 | return reqs | ||
| 927 | |||
| 928 | tl_path = tl_data = None | ||
| 929 | if path.endswith('.egg'): | ||
| 930 | if os.path.isdir(path): | ||
| 931 | p = os.path.join(path, 'EGG-INFO') | ||
| 932 | meta_path = os.path.join(p, 'PKG-INFO') | ||
| 933 | metadata = Metadata(path=meta_path, scheme='legacy') | ||
| 934 | req_path = os.path.join(p, 'requires.txt') | ||
| 935 | tl_path = os.path.join(p, 'top_level.txt') | ||
| 936 | requires = parse_requires_path(req_path) | ||
| 937 | else: | ||
| 938 | # FIXME handle the case where zipfile is not available | ||
| 939 | zipf = zipimport.zipimporter(path) | ||
| 940 | fileobj = StringIO( | ||
| 941 | zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) | ||
| 942 | metadata = Metadata(fileobj=fileobj, scheme='legacy') | ||
| 943 | try: | ||
| 944 | data = zipf.get_data('EGG-INFO/requires.txt') | ||
| 945 | tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') | ||
| 946 | requires = parse_requires_data(data.decode('utf-8')) | ||
| 947 | except IOError: | ||
| 948 | requires = None | ||
| 949 | elif path.endswith('.egg-info'): | ||
| 950 | if os.path.isdir(path): | ||
| 951 | req_path = os.path.join(path, 'requires.txt') | ||
| 952 | requires = parse_requires_path(req_path) | ||
| 953 | path = os.path.join(path, 'PKG-INFO') | ||
| 954 | tl_path = os.path.join(path, 'top_level.txt') | ||
| 955 | metadata = Metadata(path=path, scheme='legacy') | ||
| 956 | else: | ||
| 957 | raise DistlibException('path must end with .egg-info or .egg, ' | ||
| 958 | 'got %r' % path) | ||
| 959 | |||
| 960 | if requires: | ||
| 961 | metadata.add_requirements(requires) | ||
| 962 | # look for top-level modules in top_level.txt, if present | ||
| 963 | if tl_data is None: | ||
| 964 | if tl_path is not None and os.path.exists(tl_path): | ||
| 965 | with open(tl_path, 'rb') as f: | ||
| 966 | tl_data = f.read().decode('utf-8') | ||
| 967 | if not tl_data: | ||
| 968 | tl_data = [] | ||
| 969 | else: | ||
| 970 | tl_data = tl_data.splitlines() | ||
| 971 | self.modules = tl_data | ||
| 972 | return metadata | ||
| 973 | |||
| 974 | def __repr__(self): | ||
| 975 | return '<EggInfoDistribution %r %s at %r>' % ( | ||
| 976 | self.name, self.version, self.path) | ||
| 977 | |||
| 978 | def __str__(self): | ||
| 979 | return "%s %s" % (self.name, self.version) | ||
| 980 | |||
| 981 | def check_installed_files(self): | ||
| 982 | """ | ||
| 983 | Checks that the hashes and sizes of the files in ``RECORD`` are | ||
| 984 | matched by the files themselves. Returns a (possibly empty) list of | ||
| 985 | mismatches. Each entry in the mismatch list will be a tuple consisting | ||
| 986 | of the path, 'exists', 'size' or 'hash' according to what didn't match | ||
| 987 | (existence is checked first, then size, then hash), the expected | ||
| 988 | value and the actual value. | ||
| 989 | """ | ||
| 990 | mismatches = [] | ||
| 991 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
| 992 | if os.path.exists(record_path): | ||
| 993 | for path, _, _ in self.list_installed_files(): | ||
| 994 | if path == record_path: | ||
| 995 | continue | ||
| 996 | if not os.path.exists(path): | ||
| 997 | mismatches.append((path, 'exists', True, False)) | ||
| 998 | return mismatches | ||
| 999 | |||
| 1000 | def list_installed_files(self): | ||
| 1001 | """ | ||
| 1002 | Iterates over the ``installed-files.txt`` entries and returns a tuple | ||
| 1003 | ``(path, hash, size)`` for each line. | ||
| 1004 | |||
| 1005 | :returns: a list of (path, hash, size) | ||
| 1006 | """ | ||
| 1007 | |||
| 1008 | def _md5(path): | ||
| 1009 | f = open(path, 'rb') | ||
| 1010 | try: | ||
| 1011 | content = f.read() | ||
| 1012 | finally: | ||
| 1013 | f.close() | ||
| 1014 | return hashlib.md5(content).hexdigest() | ||
| 1015 | |||
| 1016 | def _size(path): | ||
| 1017 | return os.stat(path).st_size | ||
| 1018 | |||
| 1019 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
| 1020 | result = [] | ||
| 1021 | if os.path.exists(record_path): | ||
| 1022 | with codecs.open(record_path, 'r', encoding='utf-8') as f: | ||
| 1023 | for line in f: | ||
| 1024 | line = line.strip() | ||
| 1025 | p = os.path.normpath(os.path.join(self.path, line)) | ||
| 1026 | # "./" is present as a marker between installed files | ||
| 1027 | # and installation metadata files | ||
| 1028 | if not os.path.exists(p): | ||
| 1029 | logger.warning('Non-existent file: %s', p) | ||
| 1030 | if p.endswith(('.pyc', '.pyo')): | ||
| 1031 | continue | ||
| 1032 | #otherwise fall through and fail | ||
| 1033 | if not os.path.isdir(p): | ||
| 1034 | result.append((p, _md5(p), _size(p))) | ||
| 1035 | result.append((record_path, None, None)) | ||
| 1036 | return result | ||
| 1037 | |||
| 1038 | def list_distinfo_files(self, absolute=False): | ||
| 1039 | """ | ||
| 1040 | Iterates over the ``installed-files.txt`` entries and returns paths for | ||
| 1041 | each line if the path is pointing to a file located in the | ||
| 1042 | ``.egg-info`` directory or one of its subdirectories. | ||
| 1043 | |||
| 1044 | :parameter absolute: If *absolute* is ``True``, each returned path is | ||
| 1045 | transformed into a local absolute path. Otherwise the | ||
| 1046 | raw value from ``installed-files.txt`` is returned. | ||
| 1047 | :type absolute: boolean | ||
| 1048 | :returns: iterator of paths | ||
| 1049 | """ | ||
| 1050 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
| 1051 | if os.path.exists(record_path): | ||
| 1052 | skip = True | ||
| 1053 | with codecs.open(record_path, 'r', encoding='utf-8') as f: | ||
| 1054 | for line in f: | ||
| 1055 | line = line.strip() | ||
| 1056 | if line == './': | ||
| 1057 | skip = False | ||
| 1058 | continue | ||
| 1059 | if not skip: | ||
| 1060 | p = os.path.normpath(os.path.join(self.path, line)) | ||
| 1061 | if p.startswith(self.path): | ||
| 1062 | if absolute: | ||
| 1063 | yield p | ||
| 1064 | else: | ||
| 1065 | yield line | ||
| 1066 | |||
| 1067 | def __eq__(self, other): | ||
| 1068 | return (isinstance(other, EggInfoDistribution) and | ||
| 1069 | self.path == other.path) | ||
| 1070 | |||
| 1071 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
| 1072 | __hash__ = object.__hash__ | ||
| 1073 | |||
| 1074 | new_dist_class = InstalledDistribution | ||
| 1075 | old_dist_class = EggInfoDistribution | ||
| 1076 | |||
| 1077 | |||
| 1078 | class DependencyGraph(object): | ||
| 1079 | """ | ||
| 1080 | Represents a dependency graph between distributions. | ||
| 1081 | |||
| 1082 | The dependency relationships are stored in an ``adjacency_list`` that maps | ||
| 1083 | distributions to a list of ``(other, label)`` tuples where ``other`` | ||
| 1084 | is a distribution and the edge is labeled with ``label`` (i.e. the version | ||
| 1085 | specifier, if such was provided). Also, for more efficient traversal, for | ||
| 1086 | every distribution ``x``, a list of predecessors is kept in | ||
| 1087 | ``reverse_list[x]``. An edge from distribution ``a`` to | ||
| 1088 | distribution ``b`` means that ``a`` depends on ``b``. If any missing | ||
| 1089 | dependencies are found, they are stored in ``missing``, which is a | ||
| 1090 | dictionary that maps distributions to a list of requirements that were not | ||
| 1091 | provided by any other distributions. | ||
| 1092 | """ | ||
| 1093 | |||
| 1094 | def __init__(self): | ||
| 1095 | self.adjacency_list = {} | ||
| 1096 | self.reverse_list = {} | ||
| 1097 | self.missing = {} | ||
| 1098 | |||
| 1099 | def add_distribution(self, distribution): | ||
| 1100 | """Add the *distribution* to the graph. | ||
| 1101 | |||
| 1102 | :type distribution: :class:`distutils2.database.InstalledDistribution` | ||
| 1103 | or :class:`distutils2.database.EggInfoDistribution` | ||
| 1104 | """ | ||
| 1105 | self.adjacency_list[distribution] = [] | ||
| 1106 | self.reverse_list[distribution] = [] | ||
| 1107 | #self.missing[distribution] = [] | ||
| 1108 | |||
| 1109 | def add_edge(self, x, y, label=None): | ||
| 1110 | """Add an edge from distribution *x* to distribution *y* with the given | ||
| 1111 | *label*. | ||
| 1112 | |||
| 1113 | :type x: :class:`distutils2.database.InstalledDistribution` or | ||
| 1114 | :class:`distutils2.database.EggInfoDistribution` | ||
| 1115 | :type y: :class:`distutils2.database.InstalledDistribution` or | ||
| 1116 | :class:`distutils2.database.EggInfoDistribution` | ||
| 1117 | :type label: ``str`` or ``None`` | ||
| 1118 | """ | ||
| 1119 | self.adjacency_list[x].append((y, label)) | ||
| 1120 | # multiple edges are allowed, so be careful | ||
| 1121 | if x not in self.reverse_list[y]: | ||
| 1122 | self.reverse_list[y].append(x) | ||
| 1123 | |||
| 1124 | def add_missing(self, distribution, requirement): | ||
| 1125 | """ | ||
| 1126 | Add a missing *requirement* for the given *distribution*. | ||
| 1127 | |||
| 1128 | :type distribution: :class:`distutils2.database.InstalledDistribution` | ||
| 1129 | or :class:`distutils2.database.EggInfoDistribution` | ||
| 1130 | :type requirement: ``str`` | ||
| 1131 | """ | ||
| 1132 | logger.debug('%s missing %r', distribution, requirement) | ||
| 1133 | self.missing.setdefault(distribution, []).append(requirement) | ||
| 1134 | |||
| 1135 | def _repr_dist(self, dist): | ||
| 1136 | return '%s %s' % (dist.name, dist.version) | ||
| 1137 | |||
| 1138 | def repr_node(self, dist, level=1): | ||
| 1139 | """Prints only a subgraph""" | ||
| 1140 | output = [self._repr_dist(dist)] | ||
| 1141 | for other, label in self.adjacency_list[dist]: | ||
| 1142 | dist = self._repr_dist(other) | ||
| 1143 | if label is not None: | ||
| 1144 | dist = '%s [%s]' % (dist, label) | ||
| 1145 | output.append(' ' * level + str(dist)) | ||
| 1146 | suboutput = self.repr_node(other, level + 1) | ||
| 1147 | subs = suboutput.split('\n') | ||
| 1148 | output.extend(subs[1:]) | ||
| 1149 | return '\n'.join(output) | ||
| 1150 | |||
| 1151 | def to_dot(self, f, skip_disconnected=True): | ||
| 1152 | """Writes a DOT output for the graph to the provided file *f*. | ||
| 1153 | |||
| 1154 | If *skip_disconnected* is set to ``True``, then all distributions | ||
| 1155 | that are not dependent on any other distribution are skipped. | ||
| 1156 | |||
| 1157 | :type f: has to support ``file``-like operations | ||
| 1158 | :type skip_disconnected: ``bool`` | ||
| 1159 | """ | ||
| 1160 | disconnected = [] | ||
| 1161 | |||
| 1162 | f.write("digraph dependencies {\n") | ||
| 1163 | for dist, adjs in self.adjacency_list.items(): | ||
| 1164 | if len(adjs) == 0 and not skip_disconnected: | ||
| 1165 | disconnected.append(dist) | ||
| 1166 | for other, label in adjs: | ||
| 1167 | if not label is None: | ||
| 1168 | f.write('"%s" -> "%s" [label="%s"]\n' % | ||
| 1169 | (dist.name, other.name, label)) | ||
| 1170 | else: | ||
| 1171 | f.write('"%s" -> "%s"\n' % (dist.name, other.name)) | ||
| 1172 | if not skip_disconnected and len(disconnected) > 0: | ||
| 1173 | f.write('subgraph disconnected {\n') | ||
| 1174 | f.write('label = "Disconnected"\n') | ||
| 1175 | f.write('bgcolor = red\n') | ||
| 1176 | |||
| 1177 | for dist in disconnected: | ||
| 1178 | f.write('"%s"' % dist.name) | ||
| 1179 | f.write('\n') | ||
| 1180 | f.write('}\n') | ||
| 1181 | f.write('}\n') | ||
| 1182 | |||
| 1183 | def topological_sort(self): | ||
| 1184 | """ | ||
| 1185 | Perform a topological sort of the graph. | ||
| 1186 | :return: A tuple, the first element of which is a topologically sorted | ||
| 1187 | list of distributions, and the second element of which is a | ||
| 1188 | list of distributions that cannot be sorted because they have | ||
| 1189 | circular dependencies and so form a cycle. | ||
| 1190 | """ | ||
| 1191 | result = [] | ||
| 1192 | # Make a shallow copy of the adjacency list | ||
| 1193 | alist = {} | ||
| 1194 | for k, v in self.adjacency_list.items(): | ||
| 1195 | alist[k] = v[:] | ||
| 1196 | while True: | ||
| 1197 | # See what we can remove in this run | ||
| 1198 | to_remove = [] | ||
| 1199 | for k, v in list(alist.items())[:]: | ||
| 1200 | if not v: | ||
| 1201 | to_remove.append(k) | ||
| 1202 | del alist[k] | ||
| 1203 | if not to_remove: | ||
| 1204 | # What's left in alist (if anything) is a cycle. | ||
| 1205 | break | ||
| 1206 | # Remove from the adjacency list of others | ||
| 1207 | for k, v in alist.items(): | ||
| 1208 | alist[k] = [(d, r) for d, r in v if d not in to_remove] | ||
| 1209 | logger.debug('Moving to result: %s', | ||
| 1210 | ['%s (%s)' % (d.name, d.version) for d in to_remove]) | ||
| 1211 | result.extend(to_remove) | ||
| 1212 | return result, list(alist.keys()) | ||
| 1213 | |||
| 1214 | def __repr__(self): | ||
| 1215 | """Representation of the graph""" | ||
| 1216 | output = [] | ||
| 1217 | for dist, adjs in self.adjacency_list.items(): | ||
| 1218 | output.append(self.repr_node(dist)) | ||
| 1219 | return '\n'.join(output) | ||
| 1220 | |||
| 1221 | |||
| 1222 | def make_graph(dists, scheme='default'): | ||
| 1223 | """Makes a dependency graph from the given distributions. | ||
| 1224 | |||
| 1225 | :parameter dists: a list of distributions | ||
| 1226 | :type dists: list of :class:`distutils2.database.InstalledDistribution` and | ||
| 1227 | :class:`distutils2.database.EggInfoDistribution` instances | ||
| 1228 | :rtype: a :class:`DependencyGraph` instance | ||
| 1229 | """ | ||
| 1230 | scheme = get_scheme(scheme) | ||
| 1231 | graph = DependencyGraph() | ||
| 1232 | provided = {} # maps names to lists of (version, dist) tuples | ||
| 1233 | |||
| 1234 | # first, build the graph and find out what's provided | ||
| 1235 | for dist in dists: | ||
| 1236 | graph.add_distribution(dist) | ||
| 1237 | |||
| 1238 | for p in dist.provides: | ||
| 1239 | name, version = parse_name_and_version(p) | ||
| 1240 | logger.debug('Add to provided: %s, %s, %s', name, version, dist) | ||
| 1241 | provided.setdefault(name, []).append((version, dist)) | ||
| 1242 | |||
| 1243 | # now make the edges | ||
| 1244 | for dist in dists: | ||
| 1245 | requires = (dist.run_requires | dist.meta_requires | | ||
| 1246 | dist.build_requires | dist.dev_requires) | ||
| 1247 | for req in requires: | ||
| 1248 | try: | ||
| 1249 | matcher = scheme.matcher(req) | ||
| 1250 | except UnsupportedVersionError: | ||
| 1251 | # XXX compat-mode if cannot read the version | ||
| 1252 | logger.warning('could not read version %r - using name only', | ||
| 1253 | req) | ||
| 1254 | name = req.split()[0] | ||
| 1255 | matcher = scheme.matcher(name) | ||
| 1256 | |||
| 1257 | name = matcher.key # case-insensitive | ||
| 1258 | |||
| 1259 | matched = False | ||
| 1260 | if name in provided: | ||
| 1261 | for version, provider in provided[name]: | ||
| 1262 | try: | ||
| 1263 | match = matcher.match(version) | ||
| 1264 | except UnsupportedVersionError: | ||
| 1265 | match = False | ||
| 1266 | |||
| 1267 | if match: | ||
| 1268 | graph.add_edge(dist, provider, req) | ||
| 1269 | matched = True | ||
| 1270 | break | ||
| 1271 | if not matched: | ||
| 1272 | graph.add_missing(dist, req) | ||
| 1273 | return graph | ||
| 1274 | |||
| 1275 | |||
| 1276 | def get_dependent_dists(dists, dist): | ||
| 1277 | """Recursively generate a list of distributions from *dists* that are | ||
| 1278 | dependent on *dist*. | ||
| 1279 | |||
| 1280 | :param dists: a list of distributions | ||
| 1281 | :param dist: a distribution, member of *dists* for which we are interested | ||
| 1282 | """ | ||
| 1283 | if dist not in dists: | ||
| 1284 | raise DistlibException('given distribution %r is not a member ' | ||
| 1285 | 'of the list' % dist.name) | ||
| 1286 | graph = make_graph(dists) | ||
| 1287 | |||
| 1288 | dep = [dist] # dependent distributions | ||
| 1289 | todo = graph.reverse_list[dist] # list of nodes we should inspect | ||
| 1290 | |||
| 1291 | while todo: | ||
| 1292 | d = todo.pop() | ||
| 1293 | dep.append(d) | ||
| 1294 | for succ in graph.reverse_list[d]: | ||
| 1295 | if succ not in dep: | ||
| 1296 | todo.append(succ) | ||
| 1297 | |||
| 1298 | dep.pop(0) # remove dist from dep, was there to prevent infinite loops | ||
| 1299 | return dep | ||
| 1300 | |||
| 1301 | |||
| 1302 | def get_required_dists(dists, dist): | ||
| 1303 | """Recursively generate a list of distributions from *dists* that are | ||
| 1304 | required by *dist*. | ||
| 1305 | |||
| 1306 | :param dists: a list of distributions | ||
| 1307 | :param dist: a distribution, member of *dists* for which we are interested | ||
| 1308 | """ | ||
| 1309 | if dist not in dists: | ||
| 1310 | raise DistlibException('given distribution %r is not a member ' | ||
| 1311 | 'of the list' % dist.name) | ||
| 1312 | graph = make_graph(dists) | ||
| 1313 | |||
| 1314 | req = [] # required distributions | ||
| 1315 | todo = graph.adjacency_list[dist] # list of nodes we should inspect | ||
| 1316 | |||
| 1317 | while todo: | ||
| 1318 | d = todo.pop()[0] | ||
| 1319 | req.append(d) | ||
| 1320 | for pred in graph.adjacency_list[d]: | ||
| 1321 | if pred not in req: | ||
| 1322 | todo.append(pred) | ||
| 1323 | |||
| 1324 | return req | ||
| 1325 | |||
| 1326 | |||
| 1327 | def make_dist(name, version, **kwargs): | ||
| 1328 | """ | ||
| 1329 | A convenience method for making a dist given just a name and version. | ||
| 1330 | """ | ||
| 1331 | summary = kwargs.pop('summary', 'Placeholder for summary') | ||
| 1332 | md = Metadata(**kwargs) | ||
| 1333 | md.name = name | ||
| 1334 | md.version = version | ||
| 1335 | md.summary = summary or 'Placeholder for summary' | ||
| 1336 | return Distribution(md) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..7197238 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py | |||
| @@ -0,0 +1,516 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2013 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | import hashlib | ||
| 8 | import logging | ||
| 9 | import os | ||
| 10 | import shutil | ||
| 11 | import subprocess | ||
| 12 | import tempfile | ||
| 13 | try: | ||
| 14 | from threading import Thread | ||
| 15 | except ImportError: | ||
| 16 | from dummy_threading import Thread | ||
| 17 | |||
| 18 | from . import DistlibException | ||
| 19 | from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, | ||
| 20 | urlparse, build_opener, string_types) | ||
| 21 | from .util import cached_property, zip_dir, ServerProxy | ||
| 22 | |||
| 23 | logger = logging.getLogger(__name__) | ||
| 24 | |||
| 25 | DEFAULT_INDEX = 'https://pypi.python.org/pypi' | ||
| 26 | DEFAULT_REALM = 'pypi' | ||
| 27 | |||
| 28 | class PackageIndex(object): | ||
| 29 | """ | ||
| 30 | This class represents a package index compatible with PyPI, the Python | ||
| 31 | Package Index. | ||
| 32 | """ | ||
| 33 | |||
| 34 | boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' | ||
| 35 | |||
| 36 | def __init__(self, url=None): | ||
| 37 | """ | ||
| 38 | Initialise an instance. | ||
| 39 | |||
| 40 | :param url: The URL of the index. If not specified, the URL for PyPI is | ||
| 41 | used. | ||
| 42 | """ | ||
| 43 | self.url = url or DEFAULT_INDEX | ||
| 44 | self.read_configuration() | ||
| 45 | scheme, netloc, path, params, query, frag = urlparse(self.url) | ||
| 46 | if params or query or frag or scheme not in ('http', 'https'): | ||
| 47 | raise DistlibException('invalid repository: %s' % self.url) | ||
| 48 | self.password_handler = None | ||
| 49 | self.ssl_verifier = None | ||
| 50 | self.gpg = None | ||
| 51 | self.gpg_home = None | ||
| 52 | with open(os.devnull, 'w') as sink: | ||
| 53 | # Use gpg by default rather than gpg2, as gpg2 insists on | ||
| 54 | # prompting for passwords | ||
| 55 | for s in ('gpg', 'gpg2'): | ||
| 56 | try: | ||
| 57 | rc = subprocess.check_call([s, '--version'], stdout=sink, | ||
| 58 | stderr=sink) | ||
| 59 | if rc == 0: | ||
| 60 | self.gpg = s | ||
| 61 | break | ||
| 62 | except OSError: | ||
| 63 | pass | ||
| 64 | |||
| 65 | def _get_pypirc_command(self): | ||
| 66 | """ | ||
| 67 | Get the distutils command for interacting with PyPI configurations. | ||
| 68 | :return: the command. | ||
| 69 | """ | ||
| 70 | from distutils.core import Distribution | ||
| 71 | from distutils.config import PyPIRCCommand | ||
| 72 | d = Distribution() | ||
| 73 | return PyPIRCCommand(d) | ||
| 74 | |||
| 75 | def read_configuration(self): | ||
| 76 | """ | ||
| 77 | Read the PyPI access configuration as supported by distutils, getting | ||
| 78 | PyPI to do the actual work. This populates ``username``, ``password``, | ||
| 79 | ``realm`` and ``url`` attributes from the configuration. | ||
| 80 | """ | ||
| 81 | # get distutils to do the work | ||
| 82 | c = self._get_pypirc_command() | ||
| 83 | c.repository = self.url | ||
| 84 | cfg = c._read_pypirc() | ||
| 85 | self.username = cfg.get('username') | ||
| 86 | self.password = cfg.get('password') | ||
| 87 | self.realm = cfg.get('realm', 'pypi') | ||
| 88 | self.url = cfg.get('repository', self.url) | ||
| 89 | |||
| 90 | def save_configuration(self): | ||
| 91 | """ | ||
| 92 | Save the PyPI access configuration. You must have set ``username`` and | ||
| 93 | ``password`` attributes before calling this method. | ||
| 94 | |||
| 95 | Again, distutils is used to do the actual work. | ||
| 96 | """ | ||
| 97 | self.check_credentials() | ||
| 98 | # get distutils to do the work | ||
| 99 | c = self._get_pypirc_command() | ||
| 100 | c._store_pypirc(self.username, self.password) | ||
| 101 | |||
| 102 | def check_credentials(self): | ||
| 103 | """ | ||
| 104 | Check that ``username`` and ``password`` have been set, and raise an | ||
| 105 | exception if not. | ||
| 106 | """ | ||
| 107 | if self.username is None or self.password is None: | ||
| 108 | raise DistlibException('username and password must be set') | ||
| 109 | pm = HTTPPasswordMgr() | ||
| 110 | _, netloc, _, _, _, _ = urlparse(self.url) | ||
| 111 | pm.add_password(self.realm, netloc, self.username, self.password) | ||
| 112 | self.password_handler = HTTPBasicAuthHandler(pm) | ||
| 113 | |||
| 114 | def register(self, metadata): | ||
| 115 | """ | ||
| 116 | Register a distribution on PyPI, using the provided metadata. | ||
| 117 | |||
| 118 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
| 119 | and version number for the distribution to be | ||
| 120 | registered. | ||
| 121 | :return: The HTTP response received from PyPI upon submission of the | ||
| 122 | request. | ||
| 123 | """ | ||
| 124 | self.check_credentials() | ||
| 125 | metadata.validate() | ||
| 126 | d = metadata.todict() | ||
| 127 | d[':action'] = 'verify' | ||
| 128 | request = self.encode_request(d.items(), []) | ||
| 129 | response = self.send_request(request) | ||
| 130 | d[':action'] = 'submit' | ||
| 131 | request = self.encode_request(d.items(), []) | ||
| 132 | return self.send_request(request) | ||
| 133 | |||
| 134 | def _reader(self, name, stream, outbuf): | ||
| 135 | """ | ||
| 136 | Thread runner for reading lines of from a subprocess into a buffer. | ||
| 137 | |||
| 138 | :param name: The logical name of the stream (used for logging only). | ||
| 139 | :param stream: The stream to read from. This will typically a pipe | ||
| 140 | connected to the output stream of a subprocess. | ||
| 141 | :param outbuf: The list to append the read lines to. | ||
| 142 | """ | ||
| 143 | while True: | ||
| 144 | s = stream.readline() | ||
| 145 | if not s: | ||
| 146 | break | ||
| 147 | s = s.decode('utf-8').rstrip() | ||
| 148 | outbuf.append(s) | ||
| 149 | logger.debug('%s: %s' % (name, s)) | ||
| 150 | stream.close() | ||
| 151 | |||
| 152 | def get_sign_command(self, filename, signer, sign_password, | ||
| 153 | keystore=None): | ||
| 154 | """ | ||
| 155 | Return a suitable command for signing a file. | ||
| 156 | |||
| 157 | :param filename: The pathname to the file to be signed. | ||
| 158 | :param signer: The identifier of the signer of the file. | ||
| 159 | :param sign_password: The passphrase for the signer's | ||
| 160 | private key used for signing. | ||
| 161 | :param keystore: The path to a directory which contains the keys | ||
| 162 | used in verification. If not specified, the | ||
| 163 | instance's ``gpg_home`` attribute is used instead. | ||
| 164 | :return: The signing command as a list suitable to be | ||
| 165 | passed to :class:`subprocess.Popen`. | ||
| 166 | """ | ||
| 167 | cmd = [self.gpg, '--status-fd', '2', '--no-tty'] | ||
| 168 | if keystore is None: | ||
| 169 | keystore = self.gpg_home | ||
| 170 | if keystore: | ||
| 171 | cmd.extend(['--homedir', keystore]) | ||
| 172 | if sign_password is not None: | ||
| 173 | cmd.extend(['--batch', '--passphrase-fd', '0']) | ||
| 174 | td = tempfile.mkdtemp() | ||
| 175 | sf = os.path.join(td, os.path.basename(filename) + '.asc') | ||
| 176 | cmd.extend(['--detach-sign', '--armor', '--local-user', | ||
| 177 | signer, '--output', sf, filename]) | ||
| 178 | logger.debug('invoking: %s', ' '.join(cmd)) | ||
| 179 | return cmd, sf | ||
| 180 | |||
| 181 | def run_command(self, cmd, input_data=None): | ||
| 182 | """ | ||
| 183 | Run a command in a child process , passing it any input data specified. | ||
| 184 | |||
| 185 | :param cmd: The command to run. | ||
| 186 | :param input_data: If specified, this must be a byte string containing | ||
| 187 | data to be sent to the child process. | ||
| 188 | :return: A tuple consisting of the subprocess' exit code, a list of | ||
| 189 | lines read from the subprocess' ``stdout``, and a list of | ||
| 190 | lines read from the subprocess' ``stderr``. | ||
| 191 | """ | ||
| 192 | kwargs = { | ||
| 193 | 'stdout': subprocess.PIPE, | ||
| 194 | 'stderr': subprocess.PIPE, | ||
| 195 | } | ||
| 196 | if input_data is not None: | ||
| 197 | kwargs['stdin'] = subprocess.PIPE | ||
| 198 | stdout = [] | ||
| 199 | stderr = [] | ||
| 200 | p = subprocess.Popen(cmd, **kwargs) | ||
| 201 | # We don't use communicate() here because we may need to | ||
| 202 | # get clever with interacting with the command | ||
| 203 | t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) | ||
| 204 | t1.start() | ||
| 205 | t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) | ||
| 206 | t2.start() | ||
| 207 | if input_data is not None: | ||
| 208 | p.stdin.write(input_data) | ||
| 209 | p.stdin.close() | ||
| 210 | |||
| 211 | p.wait() | ||
| 212 | t1.join() | ||
| 213 | t2.join() | ||
| 214 | return p.returncode, stdout, stderr | ||
| 215 | |||
| 216 | def sign_file(self, filename, signer, sign_password, keystore=None): | ||
| 217 | """ | ||
| 218 | Sign a file. | ||
| 219 | |||
| 220 | :param filename: The pathname to the file to be signed. | ||
| 221 | :param signer: The identifier of the signer of the file. | ||
| 222 | :param sign_password: The passphrase for the signer's | ||
| 223 | private key used for signing. | ||
| 224 | :param keystore: The path to a directory which contains the keys | ||
| 225 | used in signing. If not specified, the instance's | ||
| 226 | ``gpg_home`` attribute is used instead. | ||
| 227 | :return: The absolute pathname of the file where the signature is | ||
| 228 | stored. | ||
| 229 | """ | ||
| 230 | cmd, sig_file = self.get_sign_command(filename, signer, sign_password, | ||
| 231 | keystore) | ||
| 232 | rc, stdout, stderr = self.run_command(cmd, | ||
| 233 | sign_password.encode('utf-8')) | ||
| 234 | if rc != 0: | ||
| 235 | raise DistlibException('sign command failed with error ' | ||
| 236 | 'code %s' % rc) | ||
| 237 | return sig_file | ||
| 238 | |||
| 239 | def upload_file(self, metadata, filename, signer=None, sign_password=None, | ||
| 240 | filetype='sdist', pyversion='source', keystore=None): | ||
| 241 | """ | ||
| 242 | Upload a release file to the index. | ||
| 243 | |||
| 244 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
| 245 | and version number for the file to be uploaded. | ||
| 246 | :param filename: The pathname of the file to be uploaded. | ||
| 247 | :param signer: The identifier of the signer of the file. | ||
| 248 | :param sign_password: The passphrase for the signer's | ||
| 249 | private key used for signing. | ||
| 250 | :param filetype: The type of the file being uploaded. This is the | ||
| 251 | distutils command which produced that file, e.g. | ||
| 252 | ``sdist`` or ``bdist_wheel``. | ||
| 253 | :param pyversion: The version of Python which the release relates | ||
| 254 | to. For code compatible with any Python, this would | ||
| 255 | be ``source``, otherwise it would be e.g. ``3.2``. | ||
| 256 | :param keystore: The path to a directory which contains the keys | ||
| 257 | used in signing. If not specified, the instance's | ||
| 258 | ``gpg_home`` attribute is used instead. | ||
| 259 | :return: The HTTP response received from PyPI upon submission of the | ||
| 260 | request. | ||
| 261 | """ | ||
| 262 | self.check_credentials() | ||
| 263 | if not os.path.exists(filename): | ||
| 264 | raise DistlibException('not found: %s' % filename) | ||
| 265 | metadata.validate() | ||
| 266 | d = metadata.todict() | ||
| 267 | sig_file = None | ||
| 268 | if signer: | ||
| 269 | if not self.gpg: | ||
| 270 | logger.warning('no signing program available - not signed') | ||
| 271 | else: | ||
| 272 | sig_file = self.sign_file(filename, signer, sign_password, | ||
| 273 | keystore) | ||
| 274 | with open(filename, 'rb') as f: | ||
| 275 | file_data = f.read() | ||
| 276 | md5_digest = hashlib.md5(file_data).hexdigest() | ||
| 277 | sha256_digest = hashlib.sha256(file_data).hexdigest() | ||
| 278 | d.update({ | ||
| 279 | ':action': 'file_upload', | ||
| 280 | 'protocol_version': '1', | ||
| 281 | 'filetype': filetype, | ||
| 282 | 'pyversion': pyversion, | ||
| 283 | 'md5_digest': md5_digest, | ||
| 284 | 'sha256_digest': sha256_digest, | ||
| 285 | }) | ||
| 286 | files = [('content', os.path.basename(filename), file_data)] | ||
| 287 | if sig_file: | ||
| 288 | with open(sig_file, 'rb') as f: | ||
| 289 | sig_data = f.read() | ||
| 290 | files.append(('gpg_signature', os.path.basename(sig_file), | ||
| 291 | sig_data)) | ||
| 292 | shutil.rmtree(os.path.dirname(sig_file)) | ||
| 293 | request = self.encode_request(d.items(), files) | ||
| 294 | return self.send_request(request) | ||
| 295 | |||
| 296 | def upload_documentation(self, metadata, doc_dir): | ||
| 297 | """ | ||
| 298 | Upload documentation to the index. | ||
| 299 | |||
| 300 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
| 301 | and version number for the documentation to be | ||
| 302 | uploaded. | ||
| 303 | :param doc_dir: The pathname of the directory which contains the | ||
| 304 | documentation. This should be the directory that | ||
| 305 | contains the ``index.html`` for the documentation. | ||
| 306 | :return: The HTTP response received from PyPI upon submission of the | ||
| 307 | request. | ||
| 308 | """ | ||
| 309 | self.check_credentials() | ||
| 310 | if not os.path.isdir(doc_dir): | ||
| 311 | raise DistlibException('not a directory: %r' % doc_dir) | ||
| 312 | fn = os.path.join(doc_dir, 'index.html') | ||
| 313 | if not os.path.exists(fn): | ||
| 314 | raise DistlibException('not found: %r' % fn) | ||
| 315 | metadata.validate() | ||
| 316 | name, version = metadata.name, metadata.version | ||
| 317 | zip_data = zip_dir(doc_dir).getvalue() | ||
| 318 | fields = [(':action', 'doc_upload'), | ||
| 319 | ('name', name), ('version', version)] | ||
| 320 | files = [('content', name, zip_data)] | ||
| 321 | request = self.encode_request(fields, files) | ||
| 322 | return self.send_request(request) | ||
| 323 | |||
| 324 | def get_verify_command(self, signature_filename, data_filename, | ||
| 325 | keystore=None): | ||
| 326 | """ | ||
| 327 | Return a suitable command for verifying a file. | ||
| 328 | |||
| 329 | :param signature_filename: The pathname to the file containing the | ||
| 330 | signature. | ||
| 331 | :param data_filename: The pathname to the file containing the | ||
| 332 | signed data. | ||
| 333 | :param keystore: The path to a directory which contains the keys | ||
| 334 | used in verification. If not specified, the | ||
| 335 | instance's ``gpg_home`` attribute is used instead. | ||
| 336 | :return: The verifying command as a list suitable to be | ||
| 337 | passed to :class:`subprocess.Popen`. | ||
| 338 | """ | ||
| 339 | cmd = [self.gpg, '--status-fd', '2', '--no-tty'] | ||
| 340 | if keystore is None: | ||
| 341 | keystore = self.gpg_home | ||
| 342 | if keystore: | ||
| 343 | cmd.extend(['--homedir', keystore]) | ||
| 344 | cmd.extend(['--verify', signature_filename, data_filename]) | ||
| 345 | logger.debug('invoking: %s', ' '.join(cmd)) | ||
| 346 | return cmd | ||
| 347 | |||
| 348 | def verify_signature(self, signature_filename, data_filename, | ||
| 349 | keystore=None): | ||
| 350 | """ | ||
| 351 | Verify a signature for a file. | ||
| 352 | |||
| 353 | :param signature_filename: The pathname to the file containing the | ||
| 354 | signature. | ||
| 355 | :param data_filename: The pathname to the file containing the | ||
| 356 | signed data. | ||
| 357 | :param keystore: The path to a directory which contains the keys | ||
| 358 | used in verification. If not specified, the | ||
| 359 | instance's ``gpg_home`` attribute is used instead. | ||
| 360 | :return: True if the signature was verified, else False. | ||
| 361 | """ | ||
| 362 | if not self.gpg: | ||
| 363 | raise DistlibException('verification unavailable because gpg ' | ||
| 364 | 'unavailable') | ||
| 365 | cmd = self.get_verify_command(signature_filename, data_filename, | ||
| 366 | keystore) | ||
| 367 | rc, stdout, stderr = self.run_command(cmd) | ||
| 368 | if rc not in (0, 1): | ||
| 369 | raise DistlibException('verify command failed with error ' | ||
| 370 | 'code %s' % rc) | ||
| 371 | return rc == 0 | ||
| 372 | |||
| 373 | def download_file(self, url, destfile, digest=None, reporthook=None): | ||
| 374 | """ | ||
| 375 | This is a convenience method for downloading a file from an URL. | ||
| 376 | Normally, this will be a file from the index, though currently | ||
| 377 | no check is made for this (i.e. a file can be downloaded from | ||
| 378 | anywhere). | ||
| 379 | |||
| 380 | The method is just like the :func:`urlretrieve` function in the | ||
| 381 | standard library, except that it allows digest computation to be | ||
| 382 | done during download and checking that the downloaded data | ||
| 383 | matched any expected value. | ||
| 384 | |||
| 385 | :param url: The URL of the file to be downloaded (assumed to be | ||
| 386 | available via an HTTP GET request). | ||
| 387 | :param destfile: The pathname where the downloaded file is to be | ||
| 388 | saved. | ||
| 389 | :param digest: If specified, this must be a (hasher, value) | ||
| 390 | tuple, where hasher is the algorithm used (e.g. | ||
| 391 | ``'md5'``) and ``value`` is the expected value. | ||
| 392 | :param reporthook: The same as for :func:`urlretrieve` in the | ||
| 393 | standard library. | ||
| 394 | """ | ||
| 395 | if digest is None: | ||
| 396 | digester = None | ||
| 397 | logger.debug('No digest specified') | ||
| 398 | else: | ||
| 399 | if isinstance(digest, (list, tuple)): | ||
| 400 | hasher, digest = digest | ||
| 401 | else: | ||
| 402 | hasher = 'md5' | ||
| 403 | digester = getattr(hashlib, hasher)() | ||
| 404 | logger.debug('Digest specified: %s' % digest) | ||
| 405 | # The following code is equivalent to urlretrieve. | ||
| 406 | # We need to do it this way so that we can compute the | ||
| 407 | # digest of the file as we go. | ||
| 408 | with open(destfile, 'wb') as dfp: | ||
| 409 | # addinfourl is not a context manager on 2.x | ||
| 410 | # so we have to use try/finally | ||
| 411 | sfp = self.send_request(Request(url)) | ||
| 412 | try: | ||
| 413 | headers = sfp.info() | ||
| 414 | blocksize = 8192 | ||
| 415 | size = -1 | ||
| 416 | read = 0 | ||
| 417 | blocknum = 0 | ||
| 418 | if "content-length" in headers: | ||
| 419 | size = int(headers["Content-Length"]) | ||
| 420 | if reporthook: | ||
| 421 | reporthook(blocknum, blocksize, size) | ||
| 422 | while True: | ||
| 423 | block = sfp.read(blocksize) | ||
| 424 | if not block: | ||
| 425 | break | ||
| 426 | read += len(block) | ||
| 427 | dfp.write(block) | ||
| 428 | if digester: | ||
| 429 | digester.update(block) | ||
| 430 | blocknum += 1 | ||
| 431 | if reporthook: | ||
| 432 | reporthook(blocknum, blocksize, size) | ||
| 433 | finally: | ||
| 434 | sfp.close() | ||
| 435 | |||
| 436 | # check that we got the whole file, if we can | ||
| 437 | if size >= 0 and read < size: | ||
| 438 | raise DistlibException( | ||
| 439 | 'retrieval incomplete: got only %d out of %d bytes' | ||
| 440 | % (read, size)) | ||
| 441 | # if we have a digest, it must match. | ||
| 442 | if digester: | ||
| 443 | actual = digester.hexdigest() | ||
| 444 | if digest != actual: | ||
| 445 | raise DistlibException('%s digest mismatch for %s: expected ' | ||
| 446 | '%s, got %s' % (hasher, destfile, | ||
| 447 | digest, actual)) | ||
| 448 | logger.debug('Digest verified: %s', digest) | ||
| 449 | |||
| 450 | def send_request(self, req): | ||
| 451 | """ | ||
| 452 | Send a standard library :class:`Request` to PyPI and return its | ||
| 453 | response. | ||
| 454 | |||
| 455 | :param req: The request to send. | ||
| 456 | :return: The HTTP response from PyPI (a standard library HTTPResponse). | ||
| 457 | """ | ||
| 458 | handlers = [] | ||
| 459 | if self.password_handler: | ||
| 460 | handlers.append(self.password_handler) | ||
| 461 | if self.ssl_verifier: | ||
| 462 | handlers.append(self.ssl_verifier) | ||
| 463 | opener = build_opener(*handlers) | ||
| 464 | return opener.open(req) | ||
| 465 | |||
| 466 | def encode_request(self, fields, files): | ||
| 467 | """ | ||
| 468 | Encode fields and files for posting to an HTTP server. | ||
| 469 | |||
| 470 | :param fields: The fields to send as a list of (fieldname, value) | ||
| 471 | tuples. | ||
| 472 | :param files: The files to send as a list of (fieldname, filename, | ||
| 473 | file_bytes) tuple. | ||
| 474 | """ | ||
| 475 | # Adapted from packaging, which in turn was adapted from | ||
| 476 | # http://code.activestate.com/recipes/146306 | ||
| 477 | |||
| 478 | parts = [] | ||
| 479 | boundary = self.boundary | ||
| 480 | for k, values in fields: | ||
| 481 | if not isinstance(values, (list, tuple)): | ||
| 482 | values = [values] | ||
| 483 | |||
| 484 | for v in values: | ||
| 485 | parts.extend(( | ||
| 486 | b'--' + boundary, | ||
| 487 | ('Content-Disposition: form-data; name="%s"' % | ||
| 488 | k).encode('utf-8'), | ||
| 489 | b'', | ||
| 490 | v.encode('utf-8'))) | ||
| 491 | for key, filename, value in files: | ||
| 492 | parts.extend(( | ||
| 493 | b'--' + boundary, | ||
| 494 | ('Content-Disposition: form-data; name="%s"; filename="%s"' % | ||
| 495 | (key, filename)).encode('utf-8'), | ||
| 496 | b'', | ||
| 497 | value)) | ||
| 498 | |||
| 499 | parts.extend((b'--' + boundary + b'--', b'')) | ||
| 500 | |||
| 501 | body = b'\r\n'.join(parts) | ||
| 502 | ct = b'multipart/form-data; boundary=' + boundary | ||
| 503 | headers = { | ||
| 504 | 'Content-type': ct, | ||
| 505 | 'Content-length': str(len(body)) | ||
| 506 | } | ||
| 507 | return Request(self.url, body, headers) | ||
| 508 | |||
| 509 | def search(self, terms, operator=None): | ||
| 510 | if isinstance(terms, string_types): | ||
| 511 | terms = {'name': terms} | ||
| 512 | rpc_proxy = ServerProxy(self.url, timeout=3.0) | ||
| 513 | try: | ||
| 514 | return rpc_proxy.search(terms, operator or 'and') | ||
| 515 | finally: | ||
| 516 | rpc_proxy('close')() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..9131b77 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py | |||
| @@ -0,0 +1,1292 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2015 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | |||
| 8 | import gzip | ||
| 9 | from io import BytesIO | ||
| 10 | import json | ||
| 11 | import logging | ||
| 12 | import os | ||
| 13 | import posixpath | ||
| 14 | import re | ||
| 15 | try: | ||
| 16 | import threading | ||
| 17 | except ImportError: # pragma: no cover | ||
| 18 | import dummy_threading as threading | ||
| 19 | import zlib | ||
| 20 | |||
| 21 | from . import DistlibException | ||
| 22 | from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, | ||
| 23 | queue, quote, unescape, string_types, build_opener, | ||
| 24 | HTTPRedirectHandler as BaseRedirectHandler, text_type, | ||
| 25 | Request, HTTPError, URLError) | ||
| 26 | from .database import Distribution, DistributionPath, make_dist | ||
| 27 | from .metadata import Metadata, MetadataInvalidError | ||
| 28 | from .util import (cached_property, parse_credentials, ensure_slash, | ||
| 29 | split_filename, get_project_data, parse_requirement, | ||
| 30 | parse_name_and_version, ServerProxy, normalize_name) | ||
| 31 | from .version import get_scheme, UnsupportedVersionError | ||
| 32 | from .wheel import Wheel, is_compatible | ||
| 33 | |||
| 34 | logger = logging.getLogger(__name__) | ||
| 35 | |||
| 36 | HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') | ||
| 37 | CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) | ||
| 38 | HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') | ||
| 39 | DEFAULT_INDEX = 'https://pypi.python.org/pypi' | ||
| 40 | |||
| 41 | def get_all_distribution_names(url=None): | ||
| 42 | """ | ||
| 43 | Return all distribution names known by an index. | ||
| 44 | :param url: The URL of the index. | ||
| 45 | :return: A list of all known distribution names. | ||
| 46 | """ | ||
| 47 | if url is None: | ||
| 48 | url = DEFAULT_INDEX | ||
| 49 | client = ServerProxy(url, timeout=3.0) | ||
| 50 | try: | ||
| 51 | return client.list_packages() | ||
| 52 | finally: | ||
| 53 | client('close')() | ||
| 54 | |||
| 55 | class RedirectHandler(BaseRedirectHandler): | ||
| 56 | """ | ||
| 57 | A class to work around a bug in some Python 3.2.x releases. | ||
| 58 | """ | ||
| 59 | # There's a bug in the base version for some 3.2.x | ||
| 60 | # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header | ||
| 61 | # returns e.g. /abc, it bails because it says the scheme '' | ||
| 62 | # is bogus, when actually it should use the request's | ||
| 63 | # URL for the scheme. See Python issue #13696. | ||
| 64 | def http_error_302(self, req, fp, code, msg, headers): | ||
| 65 | # Some servers (incorrectly) return multiple Location headers | ||
| 66 | # (so probably same goes for URI). Use first header. | ||
| 67 | newurl = None | ||
| 68 | for key in ('location', 'uri'): | ||
| 69 | if key in headers: | ||
| 70 | newurl = headers[key] | ||
| 71 | break | ||
| 72 | if newurl is None: # pragma: no cover | ||
| 73 | return | ||
| 74 | urlparts = urlparse(newurl) | ||
| 75 | if urlparts.scheme == '': | ||
| 76 | newurl = urljoin(req.get_full_url(), newurl) | ||
| 77 | if hasattr(headers, 'replace_header'): | ||
| 78 | headers.replace_header(key, newurl) | ||
| 79 | else: | ||
| 80 | headers[key] = newurl | ||
| 81 | return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, | ||
| 82 | headers) | ||
| 83 | |||
| 84 | http_error_301 = http_error_303 = http_error_307 = http_error_302 | ||
| 85 | |||
| 86 | class Locator(object): | ||
| 87 | """ | ||
| 88 | A base class for locators - things that locate distributions. | ||
| 89 | """ | ||
| 90 | source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') | ||
| 91 | binary_extensions = ('.egg', '.exe', '.whl') | ||
| 92 | excluded_extensions = ('.pdf',) | ||
| 93 | |||
| 94 | # A list of tags indicating which wheels you want to match. The default | ||
| 95 | # value of None matches against the tags compatible with the running | ||
| 96 | # Python. If you want to match other values, set wheel_tags on a locator | ||
| 97 | # instance to a list of tuples (pyver, abi, arch) which you want to match. | ||
| 98 | wheel_tags = None | ||
| 99 | |||
| 100 | downloadable_extensions = source_extensions + ('.whl',) | ||
| 101 | |||
| 102 | def __init__(self, scheme='default'): | ||
| 103 | """ | ||
| 104 | Initialise an instance. | ||
| 105 | :param scheme: Because locators look for most recent versions, they | ||
| 106 | need to know the version scheme to use. This specifies | ||
| 107 | the current PEP-recommended scheme - use ``'legacy'`` | ||
| 108 | if you need to support existing distributions on PyPI. | ||
| 109 | """ | ||
| 110 | self._cache = {} | ||
| 111 | self.scheme = scheme | ||
| 112 | # Because of bugs in some of the handlers on some of the platforms, | ||
| 113 | # we use our own opener rather than just using urlopen. | ||
| 114 | self.opener = build_opener(RedirectHandler()) | ||
| 115 | # If get_project() is called from locate(), the matcher instance | ||
| 116 | # is set from the requirement passed to locate(). See issue #18 for | ||
| 117 | # why this can be useful to know. | ||
| 118 | self.matcher = None | ||
| 119 | self.errors = queue.Queue() | ||
| 120 | |||
| 121 | def get_errors(self): | ||
| 122 | """ | ||
| 123 | Return any errors which have occurred. | ||
| 124 | """ | ||
| 125 | result = [] | ||
| 126 | while not self.errors.empty(): # pragma: no cover | ||
| 127 | try: | ||
| 128 | e = self.errors.get(False) | ||
| 129 | result.append(e) | ||
| 130 | except self.errors.Empty: | ||
| 131 | continue | ||
| 132 | self.errors.task_done() | ||
| 133 | return result | ||
| 134 | |||
| 135 | def clear_errors(self): | ||
| 136 | """ | ||
| 137 | Clear any errors which may have been logged. | ||
| 138 | """ | ||
| 139 | # Just get the errors and throw them away | ||
| 140 | self.get_errors() | ||
| 141 | |||
| 142 | def clear_cache(self): | ||
| 143 | self._cache.clear() | ||
| 144 | |||
| 145 | def _get_scheme(self): | ||
| 146 | return self._scheme | ||
| 147 | |||
| 148 | def _set_scheme(self, value): | ||
| 149 | self._scheme = value | ||
| 150 | |||
| 151 | scheme = property(_get_scheme, _set_scheme) | ||
| 152 | |||
| 153 | def _get_project(self, name): | ||
| 154 | """ | ||
| 155 | For a given project, get a dictionary mapping available versions to Distribution | ||
| 156 | instances. | ||
| 157 | |||
| 158 | This should be implemented in subclasses. | ||
| 159 | |||
| 160 | If called from a locate() request, self.matcher will be set to a | ||
| 161 | matcher for the requirement to satisfy, otherwise it will be None. | ||
| 162 | """ | ||
| 163 | raise NotImplementedError('Please implement in the subclass') | ||
| 164 | |||
| 165 | def get_distribution_names(self): | ||
| 166 | """ | ||
| 167 | Return all the distribution names known to this locator. | ||
| 168 | """ | ||
| 169 | raise NotImplementedError('Please implement in the subclass') | ||
| 170 | |||
| 171 | def get_project(self, name): | ||
| 172 | """ | ||
| 173 | For a given project, get a dictionary mapping available versions to Distribution | ||
| 174 | instances. | ||
| 175 | |||
| 176 | This calls _get_project to do all the work, and just implements a caching layer on top. | ||
| 177 | """ | ||
| 178 | if self._cache is None: # pragma: no cover | ||
| 179 | result = self._get_project(name) | ||
| 180 | elif name in self._cache: | ||
| 181 | result = self._cache[name] | ||
| 182 | else: | ||
| 183 | self.clear_errors() | ||
| 184 | result = self._get_project(name) | ||
| 185 | self._cache[name] = result | ||
| 186 | return result | ||
| 187 | |||
| 188 | def score_url(self, url): | ||
| 189 | """ | ||
| 190 | Give an url a score which can be used to choose preferred URLs | ||
| 191 | for a given project release. | ||
| 192 | """ | ||
| 193 | t = urlparse(url) | ||
| 194 | basename = posixpath.basename(t.path) | ||
| 195 | compatible = True | ||
| 196 | is_wheel = basename.endswith('.whl') | ||
| 197 | is_downloadable = basename.endswith(self.downloadable_extensions) | ||
| 198 | if is_wheel: | ||
| 199 | compatible = is_compatible(Wheel(basename), self.wheel_tags) | ||
| 200 | return (t.scheme == 'https', 'pypi.python.org' in t.netloc, | ||
| 201 | is_downloadable, is_wheel, compatible, basename) | ||
| 202 | |||
| 203 | def prefer_url(self, url1, url2): | ||
| 204 | """ | ||
| 205 | Choose one of two URLs where both are candidates for distribution | ||
| 206 | archives for the same version of a distribution (for example, | ||
| 207 | .tar.gz vs. zip). | ||
| 208 | |||
| 209 | The current implementation favours https:// URLs over http://, archives | ||
| 210 | from PyPI over those from other locations, wheel compatibility (if a | ||
| 211 | wheel) and then the archive name. | ||
| 212 | """ | ||
| 213 | result = url2 | ||
| 214 | if url1: | ||
| 215 | s1 = self.score_url(url1) | ||
| 216 | s2 = self.score_url(url2) | ||
| 217 | if s1 > s2: | ||
| 218 | result = url1 | ||
| 219 | if result != url2: | ||
| 220 | logger.debug('Not replacing %r with %r', url1, url2) | ||
| 221 | else: | ||
| 222 | logger.debug('Replacing %r with %r', url1, url2) | ||
| 223 | return result | ||
| 224 | |||
| 225 | def split_filename(self, filename, project_name): | ||
| 226 | """ | ||
| 227 | Attempt to split a filename in project name, version and Python version. | ||
| 228 | """ | ||
| 229 | return split_filename(filename, project_name) | ||
| 230 | |||
| 231 | def convert_url_to_download_info(self, url, project_name): | ||
| 232 | """ | ||
| 233 | See if a URL is a candidate for a download URL for a project (the URL | ||
| 234 | has typically been scraped from an HTML page). | ||
| 235 | |||
| 236 | If it is, a dictionary is returned with keys "name", "version", | ||
| 237 | "filename" and "url"; otherwise, None is returned. | ||
| 238 | """ | ||
| 239 | def same_project(name1, name2): | ||
| 240 | return normalize_name(name1) == normalize_name(name2) | ||
| 241 | |||
| 242 | result = None | ||
| 243 | scheme, netloc, path, params, query, frag = urlparse(url) | ||
| 244 | if frag.lower().startswith('egg='): # pragma: no cover | ||
| 245 | logger.debug('%s: version hint in fragment: %r', | ||
| 246 | project_name, frag) | ||
| 247 | m = HASHER_HASH.match(frag) | ||
| 248 | if m: | ||
| 249 | algo, digest = m.groups() | ||
| 250 | else: | ||
| 251 | algo, digest = None, None | ||
| 252 | origpath = path | ||
| 253 | if path and path[-1] == '/': # pragma: no cover | ||
| 254 | path = path[:-1] | ||
| 255 | if path.endswith('.whl'): | ||
| 256 | try: | ||
| 257 | wheel = Wheel(path) | ||
| 258 | if is_compatible(wheel, self.wheel_tags): | ||
| 259 | if project_name is None: | ||
| 260 | include = True | ||
| 261 | else: | ||
| 262 | include = same_project(wheel.name, project_name) | ||
| 263 | if include: | ||
| 264 | result = { | ||
| 265 | 'name': wheel.name, | ||
| 266 | 'version': wheel.version, | ||
| 267 | 'filename': wheel.filename, | ||
| 268 | 'url': urlunparse((scheme, netloc, origpath, | ||
| 269 | params, query, '')), | ||
| 270 | 'python-version': ', '.join( | ||
| 271 | ['.'.join(list(v[2:])) for v in wheel.pyver]), | ||
| 272 | } | ||
| 273 | except Exception as e: # pragma: no cover | ||
| 274 | logger.warning('invalid path for wheel: %s', path) | ||
| 275 | elif not path.endswith(self.downloadable_extensions): # pragma: no cover | ||
| 276 | logger.debug('Not downloadable: %s', path) | ||
| 277 | else: # downloadable extension | ||
| 278 | path = filename = posixpath.basename(path) | ||
| 279 | for ext in self.downloadable_extensions: | ||
| 280 | if path.endswith(ext): | ||
| 281 | path = path[:-len(ext)] | ||
| 282 | t = self.split_filename(path, project_name) | ||
| 283 | if not t: # pragma: no cover | ||
| 284 | logger.debug('No match for project/version: %s', path) | ||
| 285 | else: | ||
| 286 | name, version, pyver = t | ||
| 287 | if not project_name or same_project(project_name, name): | ||
| 288 | result = { | ||
| 289 | 'name': name, | ||
| 290 | 'version': version, | ||
| 291 | 'filename': filename, | ||
| 292 | 'url': urlunparse((scheme, netloc, origpath, | ||
| 293 | params, query, '')), | ||
| 294 | #'packagetype': 'sdist', | ||
| 295 | } | ||
| 296 | if pyver: # pragma: no cover | ||
| 297 | result['python-version'] = pyver | ||
| 298 | break | ||
| 299 | if result and algo: | ||
| 300 | result['%s_digest' % algo] = digest | ||
| 301 | return result | ||
| 302 | |||
| 303 | def _get_digest(self, info): | ||
| 304 | """ | ||
| 305 | Get a digest from a dictionary by looking at keys of the form | ||
| 306 | 'algo_digest'. | ||
| 307 | |||
| 308 | Returns a 2-tuple (algo, digest) if found, else None. Currently | ||
| 309 | looks only for SHA256, then MD5. | ||
| 310 | """ | ||
| 311 | result = None | ||
| 312 | for algo in ('sha256', 'md5'): | ||
| 313 | key = '%s_digest' % algo | ||
| 314 | if key in info: | ||
| 315 | result = (algo, info[key]) | ||
| 316 | break | ||
| 317 | return result | ||
| 318 | |||
| 319 | def _update_version_data(self, result, info): | ||
| 320 | """ | ||
| 321 | Update a result dictionary (the final result from _get_project) with a | ||
| 322 | dictionary for a specific version, which typically holds information | ||
| 323 | gleaned from a filename or URL for an archive for the distribution. | ||
| 324 | """ | ||
| 325 | name = info.pop('name') | ||
| 326 | version = info.pop('version') | ||
| 327 | if version in result: | ||
| 328 | dist = result[version] | ||
| 329 | md = dist.metadata | ||
| 330 | else: | ||
| 331 | dist = make_dist(name, version, scheme=self.scheme) | ||
| 332 | md = dist.metadata | ||
| 333 | dist.digest = digest = self._get_digest(info) | ||
| 334 | url = info['url'] | ||
| 335 | result['digests'][url] = digest | ||
| 336 | if md.source_url != info['url']: | ||
| 337 | md.source_url = self.prefer_url(md.source_url, url) | ||
| 338 | result['urls'].setdefault(version, set()).add(url) | ||
| 339 | dist.locator = self | ||
| 340 | result[version] = dist | ||
| 341 | |||
| 342 | def locate(self, requirement, prereleases=False): | ||
| 343 | """ | ||
| 344 | Find the most recent distribution which matches the given | ||
| 345 | requirement. | ||
| 346 | |||
| 347 | :param requirement: A requirement of the form 'foo (1.0)' or perhaps | ||
| 348 | 'foo (>= 1.0, < 2.0, != 1.3)' | ||
| 349 | :param prereleases: If ``True``, allow pre-release versions | ||
| 350 | to be located. Otherwise, pre-release versions | ||
| 351 | are not returned. | ||
| 352 | :return: A :class:`Distribution` instance, or ``None`` if no such | ||
| 353 | distribution could be located. | ||
| 354 | """ | ||
| 355 | result = None | ||
| 356 | r = parse_requirement(requirement) | ||
| 357 | if r is None: # pragma: no cover | ||
| 358 | raise DistlibException('Not a valid requirement: %r' % requirement) | ||
| 359 | scheme = get_scheme(self.scheme) | ||
| 360 | self.matcher = matcher = scheme.matcher(r.requirement) | ||
| 361 | logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) | ||
| 362 | versions = self.get_project(r.name) | ||
| 363 | if len(versions) > 2: # urls and digests keys are present | ||
| 364 | # sometimes, versions are invalid | ||
| 365 | slist = [] | ||
| 366 | vcls = matcher.version_class | ||
| 367 | for k in versions: | ||
| 368 | if k in ('urls', 'digests'): | ||
| 369 | continue | ||
| 370 | try: | ||
| 371 | if not matcher.match(k): | ||
| 372 | logger.debug('%s did not match %r', matcher, k) | ||
| 373 | else: | ||
| 374 | if prereleases or not vcls(k).is_prerelease: | ||
| 375 | slist.append(k) | ||
| 376 | else: | ||
| 377 | logger.debug('skipping pre-release ' | ||
| 378 | 'version %s of %s', k, matcher.name) | ||
| 379 | except Exception: # pragma: no cover | ||
| 380 | logger.warning('error matching %s with %r', matcher, k) | ||
| 381 | pass # slist.append(k) | ||
| 382 | if len(slist) > 1: | ||
| 383 | slist = sorted(slist, key=scheme.key) | ||
| 384 | if slist: | ||
| 385 | logger.debug('sorted list: %s', slist) | ||
| 386 | version = slist[-1] | ||
| 387 | result = versions[version] | ||
| 388 | if result: | ||
| 389 | if r.extras: | ||
| 390 | result.extras = r.extras | ||
| 391 | result.download_urls = versions.get('urls', {}).get(version, set()) | ||
| 392 | d = {} | ||
| 393 | sd = versions.get('digests', {}) | ||
| 394 | for url in result.download_urls: | ||
| 395 | if url in sd: # pragma: no cover | ||
| 396 | d[url] = sd[url] | ||
| 397 | result.digests = d | ||
| 398 | self.matcher = None | ||
| 399 | return result | ||
| 400 | |||
| 401 | |||
| 402 | class PyPIRPCLocator(Locator): | ||
| 403 | """ | ||
| 404 | This locator uses XML-RPC to locate distributions. It therefore | ||
| 405 | cannot be used with simple mirrors (that only mirror file content). | ||
| 406 | """ | ||
| 407 | def __init__(self, url, **kwargs): | ||
| 408 | """ | ||
| 409 | Initialise an instance. | ||
| 410 | |||
| 411 | :param url: The URL to use for XML-RPC. | ||
| 412 | :param kwargs: Passed to the superclass constructor. | ||
| 413 | """ | ||
| 414 | super(PyPIRPCLocator, self).__init__(**kwargs) | ||
| 415 | self.base_url = url | ||
| 416 | self.client = ServerProxy(url, timeout=3.0) | ||
| 417 | |||
| 418 | def get_distribution_names(self): | ||
| 419 | """ | ||
| 420 | Return all the distribution names known to this locator. | ||
| 421 | """ | ||
| 422 | return set(self.client.list_packages()) | ||
| 423 | |||
| 424 | def _get_project(self, name): | ||
| 425 | result = {'urls': {}, 'digests': {}} | ||
| 426 | versions = self.client.package_releases(name, True) | ||
| 427 | for v in versions: | ||
| 428 | urls = self.client.release_urls(name, v) | ||
| 429 | data = self.client.release_data(name, v) | ||
| 430 | metadata = Metadata(scheme=self.scheme) | ||
| 431 | metadata.name = data['name'] | ||
| 432 | metadata.version = data['version'] | ||
| 433 | metadata.license = data.get('license') | ||
| 434 | metadata.keywords = data.get('keywords', []) | ||
| 435 | metadata.summary = data.get('summary') | ||
| 436 | dist = Distribution(metadata) | ||
| 437 | if urls: | ||
| 438 | info = urls[0] | ||
| 439 | metadata.source_url = info['url'] | ||
| 440 | dist.digest = self._get_digest(info) | ||
| 441 | dist.locator = self | ||
| 442 | result[v] = dist | ||
| 443 | for info in urls: | ||
| 444 | url = info['url'] | ||
| 445 | digest = self._get_digest(info) | ||
| 446 | result['urls'].setdefault(v, set()).add(url) | ||
| 447 | result['digests'][url] = digest | ||
| 448 | return result | ||
| 449 | |||
| 450 | class PyPIJSONLocator(Locator): | ||
| 451 | """ | ||
| 452 | This locator uses PyPI's JSON interface. It's very limited in functionality | ||
| 453 | and probably not worth using. | ||
| 454 | """ | ||
| 455 | def __init__(self, url, **kwargs): | ||
| 456 | super(PyPIJSONLocator, self).__init__(**kwargs) | ||
| 457 | self.base_url = ensure_slash(url) | ||
| 458 | |||
| 459 | def get_distribution_names(self): | ||
| 460 | """ | ||
| 461 | Return all the distribution names known to this locator. | ||
| 462 | """ | ||
| 463 | raise NotImplementedError('Not available from this locator') | ||
| 464 | |||
| 465 | def _get_project(self, name): | ||
| 466 | result = {'urls': {}, 'digests': {}} | ||
| 467 | url = urljoin(self.base_url, '%s/json' % quote(name)) | ||
| 468 | try: | ||
| 469 | resp = self.opener.open(url) | ||
| 470 | data = resp.read().decode() # for now | ||
| 471 | d = json.loads(data) | ||
| 472 | md = Metadata(scheme=self.scheme) | ||
| 473 | data = d['info'] | ||
| 474 | md.name = data['name'] | ||
| 475 | md.version = data['version'] | ||
| 476 | md.license = data.get('license') | ||
| 477 | md.keywords = data.get('keywords', []) | ||
| 478 | md.summary = data.get('summary') | ||
| 479 | dist = Distribution(md) | ||
| 480 | dist.locator = self | ||
| 481 | urls = d['urls'] | ||
| 482 | result[md.version] = dist | ||
| 483 | for info in d['urls']: | ||
| 484 | url = info['url'] | ||
| 485 | dist.download_urls.add(url) | ||
| 486 | dist.digests[url] = self._get_digest(info) | ||
| 487 | result['urls'].setdefault(md.version, set()).add(url) | ||
| 488 | result['digests'][url] = self._get_digest(info) | ||
| 489 | # Now get other releases | ||
| 490 | for version, infos in d['releases'].items(): | ||
| 491 | if version == md.version: | ||
| 492 | continue # already done | ||
| 493 | omd = Metadata(scheme=self.scheme) | ||
| 494 | omd.name = md.name | ||
| 495 | omd.version = version | ||
| 496 | odist = Distribution(omd) | ||
| 497 | odist.locator = self | ||
| 498 | result[version] = odist | ||
| 499 | for info in infos: | ||
| 500 | url = info['url'] | ||
| 501 | odist.download_urls.add(url) | ||
| 502 | odist.digests[url] = self._get_digest(info) | ||
| 503 | result['urls'].setdefault(version, set()).add(url) | ||
| 504 | result['digests'][url] = self._get_digest(info) | ||
| 505 | # for info in urls: | ||
| 506 | # md.source_url = info['url'] | ||
| 507 | # dist.digest = self._get_digest(info) | ||
| 508 | # dist.locator = self | ||
| 509 | # for info in urls: | ||
| 510 | # url = info['url'] | ||
| 511 | # result['urls'].setdefault(md.version, set()).add(url) | ||
| 512 | # result['digests'][url] = self._get_digest(info) | ||
| 513 | except Exception as e: | ||
| 514 | self.errors.put(text_type(e)) | ||
| 515 | logger.exception('JSON fetch failed: %s', e) | ||
| 516 | return result | ||
| 517 | |||
| 518 | |||
| 519 | class Page(object): | ||
| 520 | """ | ||
| 521 | This class represents a scraped HTML page. | ||
| 522 | """ | ||
| 523 | # The following slightly hairy-looking regex just looks for the contents of | ||
| 524 | # an anchor link, which has an attribute "href" either immediately preceded | ||
| 525 | # or immediately followed by a "rel" attribute. The attribute values can be | ||
| 526 | # declared with double quotes, single quotes or no quotes - which leads to | ||
| 527 | # the length of the expression. | ||
| 528 | _href = re.compile(""" | ||
| 529 | (rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)? | ||
| 530 | href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*)) | ||
| 531 | (\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))? | ||
| 532 | """, re.I | re.S | re.X) | ||
| 533 | _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S) | ||
| 534 | |||
| 535 | def __init__(self, data, url): | ||
| 536 | """ | ||
| 537 | Initialise an instance with the Unicode page contents and the URL they | ||
| 538 | came from. | ||
| 539 | """ | ||
| 540 | self.data = data | ||
| 541 | self.base_url = self.url = url | ||
| 542 | m = self._base.search(self.data) | ||
| 543 | if m: | ||
| 544 | self.base_url = m.group(1) | ||
| 545 | |||
| 546 | _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) | ||
| 547 | |||
| 548 | @cached_property | ||
| 549 | def links(self): | ||
| 550 | """ | ||
| 551 | Return the URLs of all the links on a page together with information | ||
| 552 | about their "rel" attribute, for determining which ones to treat as | ||
| 553 | downloads and which ones to queue for further scraping. | ||
| 554 | """ | ||
| 555 | def clean(url): | ||
| 556 | "Tidy up an URL." | ||
| 557 | scheme, netloc, path, params, query, frag = urlparse(url) | ||
| 558 | return urlunparse((scheme, netloc, quote(path), | ||
| 559 | params, query, frag)) | ||
| 560 | |||
| 561 | result = set() | ||
| 562 | for match in self._href.finditer(self.data): | ||
| 563 | d = match.groupdict('') | ||
| 564 | rel = (d['rel1'] or d['rel2'] or d['rel3'] or | ||
| 565 | d['rel4'] or d['rel5'] or d['rel6']) | ||
| 566 | url = d['url1'] or d['url2'] or d['url3'] | ||
| 567 | url = urljoin(self.base_url, url) | ||
| 568 | url = unescape(url) | ||
| 569 | url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) | ||
| 570 | result.add((url, rel)) | ||
| 571 | # We sort the result, hoping to bring the most recent versions | ||
| 572 | # to the front | ||
| 573 | result = sorted(result, key=lambda t: t[0], reverse=True) | ||
| 574 | return result | ||
| 575 | |||
| 576 | |||
| 577 | class SimpleScrapingLocator(Locator): | ||
| 578 | """ | ||
| 579 | A locator which scrapes HTML pages to locate downloads for a distribution. | ||
| 580 | This runs multiple threads to do the I/O; performance is at least as good | ||
| 581 | as pip's PackageFinder, which works in an analogous fashion. | ||
| 582 | """ | ||
| 583 | |||
| 584 | # These are used to deal with various Content-Encoding schemes. | ||
| 585 | decoders = { | ||
| 586 | 'deflate': zlib.decompress, | ||
| 587 | 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), | ||
| 588 | 'none': lambda b: b, | ||
| 589 | } | ||
| 590 | |||
| 591 | def __init__(self, url, timeout=None, num_workers=10, **kwargs): | ||
| 592 | """ | ||
| 593 | Initialise an instance. | ||
| 594 | :param url: The root URL to use for scraping. | ||
| 595 | :param timeout: The timeout, in seconds, to be applied to requests. | ||
| 596 | This defaults to ``None`` (no timeout specified). | ||
| 597 | :param num_workers: The number of worker threads you want to do I/O, | ||
| 598 | This defaults to 10. | ||
| 599 | :param kwargs: Passed to the superclass. | ||
| 600 | """ | ||
| 601 | super(SimpleScrapingLocator, self).__init__(**kwargs) | ||
| 602 | self.base_url = ensure_slash(url) | ||
| 603 | self.timeout = timeout | ||
| 604 | self._page_cache = {} | ||
| 605 | self._seen = set() | ||
| 606 | self._to_fetch = queue.Queue() | ||
| 607 | self._bad_hosts = set() | ||
| 608 | self.skip_externals = False | ||
| 609 | self.num_workers = num_workers | ||
| 610 | self._lock = threading.RLock() | ||
| 611 | # See issue #45: we need to be resilient when the locator is used | ||
| 612 | # in a thread, e.g. with concurrent.futures. We can't use self._lock | ||
| 613 | # as it is for coordinating our internal threads - the ones created | ||
| 614 | # in _prepare_threads. | ||
| 615 | self._gplock = threading.RLock() | ||
| 616 | |||
| 617 | def _prepare_threads(self): | ||
| 618 | """ | ||
| 619 | Threads are created only when get_project is called, and terminate | ||
| 620 | before it returns. They are there primarily to parallelise I/O (i.e. | ||
| 621 | fetching web pages). | ||
| 622 | """ | ||
| 623 | self._threads = [] | ||
| 624 | for i in range(self.num_workers): | ||
| 625 | t = threading.Thread(target=self._fetch) | ||
| 626 | t.setDaemon(True) | ||
| 627 | t.start() | ||
| 628 | self._threads.append(t) | ||
| 629 | |||
| 630 | def _wait_threads(self): | ||
| 631 | """ | ||
| 632 | Tell all the threads to terminate (by sending a sentinel value) and | ||
| 633 | wait for them to do so. | ||
| 634 | """ | ||
| 635 | # Note that you need two loops, since you can't say which | ||
| 636 | # thread will get each sentinel | ||
| 637 | for t in self._threads: | ||
| 638 | self._to_fetch.put(None) # sentinel | ||
| 639 | for t in self._threads: | ||
| 640 | t.join() | ||
| 641 | self._threads = [] | ||
| 642 | |||
| 643 | def _get_project(self, name): | ||
| 644 | result = {'urls': {}, 'digests': {}} | ||
| 645 | with self._gplock: | ||
| 646 | self.result = result | ||
| 647 | self.project_name = name | ||
| 648 | url = urljoin(self.base_url, '%s/' % quote(name)) | ||
| 649 | self._seen.clear() | ||
| 650 | self._page_cache.clear() | ||
| 651 | self._prepare_threads() | ||
| 652 | try: | ||
| 653 | logger.debug('Queueing %s', url) | ||
| 654 | self._to_fetch.put(url) | ||
| 655 | self._to_fetch.join() | ||
| 656 | finally: | ||
| 657 | self._wait_threads() | ||
| 658 | del self.result | ||
| 659 | return result | ||
| 660 | |||
| 661 | platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' | ||
| 662 | r'win(32|-amd64)|macosx-?\d+)\b', re.I) | ||
| 663 | |||
| 664 | def _is_platform_dependent(self, url): | ||
| 665 | """ | ||
| 666 | Does an URL refer to a platform-specific download? | ||
| 667 | """ | ||
| 668 | return self.platform_dependent.search(url) | ||
| 669 | |||
| 670 | def _process_download(self, url): | ||
| 671 | """ | ||
| 672 | See if an URL is a suitable download for a project. | ||
| 673 | |||
| 674 | If it is, register information in the result dictionary (for | ||
| 675 | _get_project) about the specific version it's for. | ||
| 676 | |||
| 677 | Note that the return value isn't actually used other than as a boolean | ||
| 678 | value. | ||
| 679 | """ | ||
| 680 | if self._is_platform_dependent(url): | ||
| 681 | info = None | ||
| 682 | else: | ||
| 683 | info = self.convert_url_to_download_info(url, self.project_name) | ||
| 684 | logger.debug('process_download: %s -> %s', url, info) | ||
| 685 | if info: | ||
| 686 | with self._lock: # needed because self.result is shared | ||
| 687 | self._update_version_data(self.result, info) | ||
| 688 | return info | ||
| 689 | |||
| 690 | def _should_queue(self, link, referrer, rel): | ||
| 691 | """ | ||
| 692 | Determine whether a link URL from a referring page and with a | ||
| 693 | particular "rel" attribute should be queued for scraping. | ||
| 694 | """ | ||
| 695 | scheme, netloc, path, _, _, _ = urlparse(link) | ||
| 696 | if path.endswith(self.source_extensions + self.binary_extensions + | ||
| 697 | self.excluded_extensions): | ||
| 698 | result = False | ||
| 699 | elif self.skip_externals and not link.startswith(self.base_url): | ||
| 700 | result = False | ||
| 701 | elif not referrer.startswith(self.base_url): | ||
| 702 | result = False | ||
| 703 | elif rel not in ('homepage', 'download'): | ||
| 704 | result = False | ||
| 705 | elif scheme not in ('http', 'https', 'ftp'): | ||
| 706 | result = False | ||
| 707 | elif self._is_platform_dependent(link): | ||
| 708 | result = False | ||
| 709 | else: | ||
| 710 | host = netloc.split(':', 1)[0] | ||
| 711 | if host.lower() == 'localhost': | ||
| 712 | result = False | ||
| 713 | else: | ||
| 714 | result = True | ||
| 715 | logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, | ||
| 716 | referrer, result) | ||
| 717 | return result | ||
| 718 | |||
| 719 | def _fetch(self): | ||
| 720 | """ | ||
| 721 | Get a URL to fetch from the work queue, get the HTML page, examine its | ||
| 722 | links for download candidates and candidates for further scraping. | ||
| 723 | |||
| 724 | This is a handy method to run in a thread. | ||
| 725 | """ | ||
| 726 | while True: | ||
| 727 | url = self._to_fetch.get() | ||
| 728 | try: | ||
| 729 | if url: | ||
| 730 | page = self.get_page(url) | ||
| 731 | if page is None: # e.g. after an error | ||
| 732 | continue | ||
| 733 | for link, rel in page.links: | ||
| 734 | if link not in self._seen: | ||
| 735 | try: | ||
| 736 | self._seen.add(link) | ||
| 737 | if (not self._process_download(link) and | ||
| 738 | self._should_queue(link, url, rel)): | ||
| 739 | logger.debug('Queueing %s from %s', link, url) | ||
| 740 | self._to_fetch.put(link) | ||
| 741 | except MetadataInvalidError: # e.g. invalid versions | ||
| 742 | pass | ||
| 743 | except Exception as e: # pragma: no cover | ||
| 744 | self.errors.put(text_type(e)) | ||
| 745 | finally: | ||
| 746 | # always do this, to avoid hangs :-) | ||
| 747 | self._to_fetch.task_done() | ||
| 748 | if not url: | ||
| 749 | #logger.debug('Sentinel seen, quitting.') | ||
| 750 | break | ||
| 751 | |||
| 752 | def get_page(self, url): | ||
| 753 | """ | ||
| 754 | Get the HTML for an URL, possibly from an in-memory cache. | ||
| 755 | |||
| 756 | XXX TODO Note: this cache is never actually cleared. It's assumed that | ||
| 757 | the data won't get stale over the lifetime of a locator instance (not | ||
| 758 | necessarily true for the default_locator). | ||
| 759 | """ | ||
| 760 | # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api | ||
| 761 | scheme, netloc, path, _, _, _ = urlparse(url) | ||
| 762 | if scheme == 'file' and os.path.isdir(url2pathname(path)): | ||
| 763 | url = urljoin(ensure_slash(url), 'index.html') | ||
| 764 | |||
| 765 | if url in self._page_cache: | ||
| 766 | result = self._page_cache[url] | ||
| 767 | logger.debug('Returning %s from cache: %s', url, result) | ||
| 768 | else: | ||
| 769 | host = netloc.split(':', 1)[0] | ||
| 770 | result = None | ||
| 771 | if host in self._bad_hosts: | ||
| 772 | logger.debug('Skipping %s due to bad host %s', url, host) | ||
| 773 | else: | ||
| 774 | req = Request(url, headers={'Accept-encoding': 'identity'}) | ||
| 775 | try: | ||
| 776 | logger.debug('Fetching %s', url) | ||
| 777 | resp = self.opener.open(req, timeout=self.timeout) | ||
| 778 | logger.debug('Fetched %s', url) | ||
| 779 | headers = resp.info() | ||
| 780 | content_type = headers.get('Content-Type', '') | ||
| 781 | if HTML_CONTENT_TYPE.match(content_type): | ||
| 782 | final_url = resp.geturl() | ||
| 783 | data = resp.read() | ||
| 784 | encoding = headers.get('Content-Encoding') | ||
| 785 | if encoding: | ||
| 786 | decoder = self.decoders[encoding] # fail if not found | ||
| 787 | data = decoder(data) | ||
| 788 | encoding = 'utf-8' | ||
| 789 | m = CHARSET.search(content_type) | ||
| 790 | if m: | ||
| 791 | encoding = m.group(1) | ||
| 792 | try: | ||
| 793 | data = data.decode(encoding) | ||
| 794 | except UnicodeError: # pragma: no cover | ||
| 795 | data = data.decode('latin-1') # fallback | ||
| 796 | result = Page(data, final_url) | ||
| 797 | self._page_cache[final_url] = result | ||
| 798 | except HTTPError as e: | ||
| 799 | if e.code != 404: | ||
| 800 | logger.exception('Fetch failed: %s: %s', url, e) | ||
| 801 | except URLError as e: # pragma: no cover | ||
| 802 | logger.exception('Fetch failed: %s: %s', url, e) | ||
| 803 | with self._lock: | ||
| 804 | self._bad_hosts.add(host) | ||
| 805 | except Exception as e: # pragma: no cover | ||
| 806 | logger.exception('Fetch failed: %s: %s', url, e) | ||
| 807 | finally: | ||
| 808 | self._page_cache[url] = result # even if None (failure) | ||
| 809 | return result | ||
| 810 | |||
| 811 | _distname_re = re.compile('<a href=[^>]*>([^<]+)<') | ||
| 812 | |||
| 813 | def get_distribution_names(self): | ||
| 814 | """ | ||
| 815 | Return all the distribution names known to this locator. | ||
| 816 | """ | ||
| 817 | result = set() | ||
| 818 | page = self.get_page(self.base_url) | ||
| 819 | if not page: | ||
| 820 | raise DistlibException('Unable to get %s' % self.base_url) | ||
| 821 | for match in self._distname_re.finditer(page.data): | ||
| 822 | result.add(match.group(1)) | ||
| 823 | return result | ||
| 824 | |||
| 825 | class DirectoryLocator(Locator): | ||
| 826 | """ | ||
| 827 | This class locates distributions in a directory tree. | ||
| 828 | """ | ||
| 829 | |||
| 830 | def __init__(self, path, **kwargs): | ||
| 831 | """ | ||
| 832 | Initialise an instance. | ||
| 833 | :param path: The root of the directory tree to search. | ||
| 834 | :param kwargs: Passed to the superclass constructor, | ||
| 835 | except for: | ||
| 836 | * recursive - if True (the default), subdirectories are | ||
| 837 | recursed into. If False, only the top-level directory | ||
| 838 | is searched, | ||
| 839 | """ | ||
| 840 | self.recursive = kwargs.pop('recursive', True) | ||
| 841 | super(DirectoryLocator, self).__init__(**kwargs) | ||
| 842 | path = os.path.abspath(path) | ||
| 843 | if not os.path.isdir(path): # pragma: no cover | ||
| 844 | raise DistlibException('Not a directory: %r' % path) | ||
| 845 | self.base_dir = path | ||
| 846 | |||
| 847 | def should_include(self, filename, parent): | ||
| 848 | """ | ||
| 849 | Should a filename be considered as a candidate for a distribution | ||
| 850 | archive? As well as the filename, the directory which contains it | ||
| 851 | is provided, though not used by the current implementation. | ||
| 852 | """ | ||
| 853 | return filename.endswith(self.downloadable_extensions) | ||
| 854 | |||
| 855 | def _get_project(self, name): | ||
| 856 | result = {'urls': {}, 'digests': {}} | ||
| 857 | for root, dirs, files in os.walk(self.base_dir): | ||
| 858 | for fn in files: | ||
| 859 | if self.should_include(fn, root): | ||
| 860 | fn = os.path.join(root, fn) | ||
| 861 | url = urlunparse(('file', '', | ||
| 862 | pathname2url(os.path.abspath(fn)), | ||
| 863 | '', '', '')) | ||
| 864 | info = self.convert_url_to_download_info(url, name) | ||
| 865 | if info: | ||
| 866 | self._update_version_data(result, info) | ||
| 867 | if not self.recursive: | ||
| 868 | break | ||
| 869 | return result | ||
| 870 | |||
| 871 | def get_distribution_names(self): | ||
| 872 | """ | ||
| 873 | Return all the distribution names known to this locator. | ||
| 874 | """ | ||
| 875 | result = set() | ||
| 876 | for root, dirs, files in os.walk(self.base_dir): | ||
| 877 | for fn in files: | ||
| 878 | if self.should_include(fn, root): | ||
| 879 | fn = os.path.join(root, fn) | ||
| 880 | url = urlunparse(('file', '', | ||
| 881 | pathname2url(os.path.abspath(fn)), | ||
| 882 | '', '', '')) | ||
| 883 | info = self.convert_url_to_download_info(url, None) | ||
| 884 | if info: | ||
| 885 | result.add(info['name']) | ||
| 886 | if not self.recursive: | ||
| 887 | break | ||
| 888 | return result | ||
| 889 | |||
| 890 | class JSONLocator(Locator): | ||
| 891 | """ | ||
| 892 | This locator uses special extended metadata (not available on PyPI) and is | ||
| 893 | the basis of performant dependency resolution in distlib. Other locators | ||
| 894 | require archive downloads before dependencies can be determined! As you | ||
| 895 | might imagine, that can be slow. | ||
| 896 | """ | ||
| 897 | def get_distribution_names(self): | ||
| 898 | """ | ||
| 899 | Return all the distribution names known to this locator. | ||
| 900 | """ | ||
| 901 | raise NotImplementedError('Not available from this locator') | ||
| 902 | |||
| 903 | def _get_project(self, name): | ||
| 904 | result = {'urls': {}, 'digests': {}} | ||
| 905 | data = get_project_data(name) | ||
| 906 | if data: | ||
| 907 | for info in data.get('files', []): | ||
| 908 | if info['ptype'] != 'sdist' or info['pyversion'] != 'source': | ||
| 909 | continue | ||
| 910 | # We don't store summary in project metadata as it makes | ||
| 911 | # the data bigger for no benefit during dependency | ||
| 912 | # resolution | ||
| 913 | dist = make_dist(data['name'], info['version'], | ||
| 914 | summary=data.get('summary', | ||
| 915 | 'Placeholder for summary'), | ||
| 916 | scheme=self.scheme) | ||
| 917 | md = dist.metadata | ||
| 918 | md.source_url = info['url'] | ||
| 919 | # TODO SHA256 digest | ||
| 920 | if 'digest' in info and info['digest']: | ||
| 921 | dist.digest = ('md5', info['digest']) | ||
| 922 | md.dependencies = info.get('requirements', {}) | ||
| 923 | dist.exports = info.get('exports', {}) | ||
| 924 | result[dist.version] = dist | ||
| 925 | result['urls'].setdefault(dist.version, set()).add(info['url']) | ||
| 926 | return result | ||
| 927 | |||
| 928 | class DistPathLocator(Locator): | ||
| 929 | """ | ||
| 930 | This locator finds installed distributions in a path. It can be useful for | ||
| 931 | adding to an :class:`AggregatingLocator`. | ||
| 932 | """ | ||
| 933 | def __init__(self, distpath, **kwargs): | ||
| 934 | """ | ||
| 935 | Initialise an instance. | ||
| 936 | |||
| 937 | :param distpath: A :class:`DistributionPath` instance to search. | ||
| 938 | """ | ||
| 939 | super(DistPathLocator, self).__init__(**kwargs) | ||
| 940 | assert isinstance(distpath, DistributionPath) | ||
| 941 | self.distpath = distpath | ||
| 942 | |||
| 943 | def _get_project(self, name): | ||
| 944 | dist = self.distpath.get_distribution(name) | ||
| 945 | if dist is None: | ||
| 946 | result = {'urls': {}, 'digests': {}} | ||
| 947 | else: | ||
| 948 | result = { | ||
| 949 | dist.version: dist, | ||
| 950 | 'urls': {dist.version: set([dist.source_url])}, | ||
| 951 | 'digests': {dist.version: set([None])} | ||
| 952 | } | ||
| 953 | return result | ||
| 954 | |||
| 955 | |||
| 956 | class AggregatingLocator(Locator): | ||
| 957 | """ | ||
| 958 | This class allows you to chain and/or merge a list of locators. | ||
| 959 | """ | ||
| 960 | def __init__(self, *locators, **kwargs): | ||
| 961 | """ | ||
| 962 | Initialise an instance. | ||
| 963 | |||
| 964 | :param locators: The list of locators to search. | ||
| 965 | :param kwargs: Passed to the superclass constructor, | ||
| 966 | except for: | ||
| 967 | * merge - if False (the default), the first successful | ||
| 968 | search from any of the locators is returned. If True, | ||
| 969 | the results from all locators are merged (this can be | ||
| 970 | slow). | ||
| 971 | """ | ||
| 972 | self.merge = kwargs.pop('merge', False) | ||
| 973 | self.locators = locators | ||
| 974 | super(AggregatingLocator, self).__init__(**kwargs) | ||
| 975 | |||
| 976 | def clear_cache(self): | ||
| 977 | super(AggregatingLocator, self).clear_cache() | ||
| 978 | for locator in self.locators: | ||
| 979 | locator.clear_cache() | ||
| 980 | |||
| 981 | def _set_scheme(self, value): | ||
| 982 | self._scheme = value | ||
| 983 | for locator in self.locators: | ||
| 984 | locator.scheme = value | ||
| 985 | |||
| 986 | scheme = property(Locator.scheme.fget, _set_scheme) | ||
| 987 | |||
| 988 | def _get_project(self, name): | ||
| 989 | result = {} | ||
| 990 | for locator in self.locators: | ||
| 991 | d = locator.get_project(name) | ||
| 992 | if d: | ||
| 993 | if self.merge: | ||
| 994 | files = result.get('urls', {}) | ||
| 995 | digests = result.get('digests', {}) | ||
| 996 | # next line could overwrite result['urls'], result['digests'] | ||
| 997 | result.update(d) | ||
| 998 | df = result.get('urls') | ||
| 999 | if files and df: | ||
| 1000 | for k, v in files.items(): | ||
| 1001 | if k in df: | ||
| 1002 | df[k] |= v | ||
| 1003 | else: | ||
| 1004 | df[k] = v | ||
| 1005 | dd = result.get('digests') | ||
| 1006 | if digests and dd: | ||
| 1007 | dd.update(digests) | ||
| 1008 | else: | ||
| 1009 | # See issue #18. If any dists are found and we're looking | ||
| 1010 | # for specific constraints, we only return something if | ||
| 1011 | # a match is found. For example, if a DirectoryLocator | ||
| 1012 | # returns just foo (1.0) while we're looking for | ||
| 1013 | # foo (>= 2.0), we'll pretend there was nothing there so | ||
| 1014 | # that subsequent locators can be queried. Otherwise we | ||
| 1015 | # would just return foo (1.0) which would then lead to a | ||
| 1016 | # failure to find foo (>= 2.0), because other locators | ||
| 1017 | # weren't searched. Note that this only matters when | ||
| 1018 | # merge=False. | ||
| 1019 | if self.matcher is None: | ||
| 1020 | found = True | ||
| 1021 | else: | ||
| 1022 | found = False | ||
| 1023 | for k in d: | ||
| 1024 | if self.matcher.match(k): | ||
| 1025 | found = True | ||
| 1026 | break | ||
| 1027 | if found: | ||
| 1028 | result = d | ||
| 1029 | break | ||
| 1030 | return result | ||
| 1031 | |||
| 1032 | def get_distribution_names(self): | ||
| 1033 | """ | ||
| 1034 | Return all the distribution names known to this locator. | ||
| 1035 | """ | ||
| 1036 | result = set() | ||
| 1037 | for locator in self.locators: | ||
| 1038 | try: | ||
| 1039 | result |= locator.get_distribution_names() | ||
| 1040 | except NotImplementedError: | ||
| 1041 | pass | ||
| 1042 | return result | ||
| 1043 | |||
| 1044 | |||
| 1045 | # We use a legacy scheme simply because most of the dists on PyPI use legacy | ||
| 1046 | # versions which don't conform to PEP 426 / PEP 440. | ||
| 1047 | default_locator = AggregatingLocator( | ||
| 1048 | JSONLocator(), | ||
| 1049 | SimpleScrapingLocator('https://pypi.python.org/simple/', | ||
| 1050 | timeout=3.0), | ||
| 1051 | scheme='legacy') | ||
| 1052 | |||
| 1053 | locate = default_locator.locate | ||
| 1054 | |||
| 1055 | NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*' | ||
| 1056 | r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$') | ||
| 1057 | |||
| 1058 | class DependencyFinder(object): | ||
| 1059 | """ | ||
| 1060 | Locate dependencies for distributions. | ||
| 1061 | """ | ||
| 1062 | |||
| 1063 | def __init__(self, locator=None): | ||
| 1064 | """ | ||
| 1065 | Initialise an instance, using the specified locator | ||
| 1066 | to locate distributions. | ||
| 1067 | """ | ||
| 1068 | self.locator = locator or default_locator | ||
| 1069 | self.scheme = get_scheme(self.locator.scheme) | ||
| 1070 | |||
| 1071 | def add_distribution(self, dist): | ||
| 1072 | """ | ||
| 1073 | Add a distribution to the finder. This will update internal information | ||
| 1074 | about who provides what. | ||
| 1075 | :param dist: The distribution to add. | ||
| 1076 | """ | ||
| 1077 | logger.debug('adding distribution %s', dist) | ||
| 1078 | name = dist.key | ||
| 1079 | self.dists_by_name[name] = dist | ||
| 1080 | self.dists[(name, dist.version)] = dist | ||
| 1081 | for p in dist.provides: | ||
| 1082 | name, version = parse_name_and_version(p) | ||
| 1083 | logger.debug('Add to provided: %s, %s, %s', name, version, dist) | ||
| 1084 | self.provided.setdefault(name, set()).add((version, dist)) | ||
| 1085 | |||
| 1086 | def remove_distribution(self, dist): | ||
| 1087 | """ | ||
| 1088 | Remove a distribution from the finder. This will update internal | ||
| 1089 | information about who provides what. | ||
| 1090 | :param dist: The distribution to remove. | ||
| 1091 | """ | ||
| 1092 | logger.debug('removing distribution %s', dist) | ||
| 1093 | name = dist.key | ||
| 1094 | del self.dists_by_name[name] | ||
| 1095 | del self.dists[(name, dist.version)] | ||
| 1096 | for p in dist.provides: | ||
| 1097 | name, version = parse_name_and_version(p) | ||
| 1098 | logger.debug('Remove from provided: %s, %s, %s', name, version, dist) | ||
| 1099 | s = self.provided[name] | ||
| 1100 | s.remove((version, dist)) | ||
| 1101 | if not s: | ||
| 1102 | del self.provided[name] | ||
| 1103 | |||
| 1104 | def get_matcher(self, reqt): | ||
| 1105 | """ | ||
| 1106 | Get a version matcher for a requirement. | ||
| 1107 | :param reqt: The requirement | ||
| 1108 | :type reqt: str | ||
| 1109 | :return: A version matcher (an instance of | ||
| 1110 | :class:`distlib.version.Matcher`). | ||
| 1111 | """ | ||
| 1112 | try: | ||
| 1113 | matcher = self.scheme.matcher(reqt) | ||
| 1114 | except UnsupportedVersionError: # pragma: no cover | ||
| 1115 | # XXX compat-mode if cannot read the version | ||
| 1116 | name = reqt.split()[0] | ||
| 1117 | matcher = self.scheme.matcher(name) | ||
| 1118 | return matcher | ||
| 1119 | |||
| 1120 | def find_providers(self, reqt): | ||
| 1121 | """ | ||
| 1122 | Find the distributions which can fulfill a requirement. | ||
| 1123 | |||
| 1124 | :param reqt: The requirement. | ||
| 1125 | :type reqt: str | ||
| 1126 | :return: A set of distribution which can fulfill the requirement. | ||
| 1127 | """ | ||
| 1128 | matcher = self.get_matcher(reqt) | ||
| 1129 | name = matcher.key # case-insensitive | ||
| 1130 | result = set() | ||
| 1131 | provided = self.provided | ||
| 1132 | if name in provided: | ||
| 1133 | for version, provider in provided[name]: | ||
| 1134 | try: | ||
| 1135 | match = matcher.match(version) | ||
| 1136 | except UnsupportedVersionError: | ||
| 1137 | match = False | ||
| 1138 | |||
| 1139 | if match: | ||
| 1140 | result.add(provider) | ||
| 1141 | break | ||
| 1142 | return result | ||
| 1143 | |||
| 1144 | def try_to_replace(self, provider, other, problems): | ||
| 1145 | """ | ||
| 1146 | Attempt to replace one provider with another. This is typically used | ||
| 1147 | when resolving dependencies from multiple sources, e.g. A requires | ||
| 1148 | (B >= 1.0) while C requires (B >= 1.1). | ||
| 1149 | |||
| 1150 | For successful replacement, ``provider`` must meet all the requirements | ||
| 1151 | which ``other`` fulfills. | ||
| 1152 | |||
| 1153 | :param provider: The provider we are trying to replace with. | ||
| 1154 | :param other: The provider we're trying to replace. | ||
| 1155 | :param problems: If False is returned, this will contain what | ||
| 1156 | problems prevented replacement. This is currently | ||
| 1157 | a tuple of the literal string 'cantreplace', | ||
| 1158 | ``provider``, ``other`` and the set of requirements | ||
| 1159 | that ``provider`` couldn't fulfill. | ||
| 1160 | :return: True if we can replace ``other`` with ``provider``, else | ||
| 1161 | False. | ||
| 1162 | """ | ||
| 1163 | rlist = self.reqts[other] | ||
| 1164 | unmatched = set() | ||
| 1165 | for s in rlist: | ||
| 1166 | matcher = self.get_matcher(s) | ||
| 1167 | if not matcher.match(provider.version): | ||
| 1168 | unmatched.add(s) | ||
| 1169 | if unmatched: | ||
| 1170 | # can't replace other with provider | ||
| 1171 | problems.add(('cantreplace', provider, other, | ||
| 1172 | frozenset(unmatched))) | ||
| 1173 | result = False | ||
| 1174 | else: | ||
| 1175 | # can replace other with provider | ||
| 1176 | self.remove_distribution(other) | ||
| 1177 | del self.reqts[other] | ||
| 1178 | for s in rlist: | ||
| 1179 | self.reqts.setdefault(provider, set()).add(s) | ||
| 1180 | self.add_distribution(provider) | ||
| 1181 | result = True | ||
| 1182 | return result | ||
| 1183 | |||
| 1184 | def find(self, requirement, meta_extras=None, prereleases=False): | ||
| 1185 | """ | ||
| 1186 | Find a distribution and all distributions it depends on. | ||
| 1187 | |||
| 1188 | :param requirement: The requirement specifying the distribution to | ||
| 1189 | find, or a Distribution instance. | ||
| 1190 | :param meta_extras: A list of meta extras such as :test:, :build: and | ||
| 1191 | so on. | ||
| 1192 | :param prereleases: If ``True``, allow pre-release versions to be | ||
| 1193 | returned - otherwise, don't return prereleases | ||
| 1194 | unless they're all that's available. | ||
| 1195 | |||
| 1196 | Return a set of :class:`Distribution` instances and a set of | ||
| 1197 | problems. | ||
| 1198 | |||
| 1199 | The distributions returned should be such that they have the | ||
| 1200 | :attr:`required` attribute set to ``True`` if they were | ||
| 1201 | from the ``requirement`` passed to ``find()``, and they have the | ||
| 1202 | :attr:`build_time_dependency` attribute set to ``True`` unless they | ||
| 1203 | are post-installation dependencies of the ``requirement``. | ||
| 1204 | |||
| 1205 | The problems should be a tuple consisting of the string | ||
| 1206 | ``'unsatisfied'`` and the requirement which couldn't be satisfied | ||
| 1207 | by any distribution known to the locator. | ||
| 1208 | """ | ||
| 1209 | |||
| 1210 | self.provided = {} | ||
| 1211 | self.dists = {} | ||
| 1212 | self.dists_by_name = {} | ||
| 1213 | self.reqts = {} | ||
| 1214 | |||
| 1215 | meta_extras = set(meta_extras or []) | ||
| 1216 | if ':*:' in meta_extras: | ||
| 1217 | meta_extras.remove(':*:') | ||
| 1218 | # :meta: and :run: are implicitly included | ||
| 1219 | meta_extras |= set([':test:', ':build:', ':dev:']) | ||
| 1220 | |||
| 1221 | if isinstance(requirement, Distribution): | ||
| 1222 | dist = odist = requirement | ||
| 1223 | logger.debug('passed %s as requirement', odist) | ||
| 1224 | else: | ||
| 1225 | dist = odist = self.locator.locate(requirement, | ||
| 1226 | prereleases=prereleases) | ||
| 1227 | if dist is None: | ||
| 1228 | raise DistlibException('Unable to locate %r' % requirement) | ||
| 1229 | logger.debug('located %s', odist) | ||
| 1230 | dist.requested = True | ||
| 1231 | problems = set() | ||
| 1232 | todo = set([dist]) | ||
| 1233 | install_dists = set([odist]) | ||
| 1234 | while todo: | ||
| 1235 | dist = todo.pop() | ||
| 1236 | name = dist.key # case-insensitive | ||
| 1237 | if name not in self.dists_by_name: | ||
| 1238 | self.add_distribution(dist) | ||
| 1239 | else: | ||
| 1240 | #import pdb; pdb.set_trace() | ||
| 1241 | other = self.dists_by_name[name] | ||
| 1242 | if other != dist: | ||
| 1243 | self.try_to_replace(dist, other, problems) | ||
| 1244 | |||
| 1245 | ireqts = dist.run_requires | dist.meta_requires | ||
| 1246 | sreqts = dist.build_requires | ||
| 1247 | ereqts = set() | ||
| 1248 | if meta_extras and dist in install_dists: | ||
| 1249 | for key in ('test', 'build', 'dev'): | ||
| 1250 | e = ':%s:' % key | ||
| 1251 | if e in meta_extras: | ||
| 1252 | ereqts |= getattr(dist, '%s_requires' % key) | ||
| 1253 | all_reqts = ireqts | sreqts | ereqts | ||
| 1254 | for r in all_reqts: | ||
| 1255 | providers = self.find_providers(r) | ||
| 1256 | if not providers: | ||
| 1257 | logger.debug('No providers found for %r', r) | ||
| 1258 | provider = self.locator.locate(r, prereleases=prereleases) | ||
| 1259 | # If no provider is found and we didn't consider | ||
| 1260 | # prereleases, consider them now. | ||
| 1261 | if provider is None and not prereleases: | ||
| 1262 | provider = self.locator.locate(r, prereleases=True) | ||
| 1263 | if provider is None: | ||
| 1264 | logger.debug('Cannot satisfy %r', r) | ||
| 1265 | problems.add(('unsatisfied', r)) | ||
| 1266 | else: | ||
| 1267 | n, v = provider.key, provider.version | ||
| 1268 | if (n, v) not in self.dists: | ||
| 1269 | todo.add(provider) | ||
| 1270 | providers.add(provider) | ||
| 1271 | if r in ireqts and dist in install_dists: | ||
| 1272 | install_dists.add(provider) | ||
| 1273 | logger.debug('Adding %s to install_dists', | ||
| 1274 | provider.name_and_version) | ||
| 1275 | for p in providers: | ||
| 1276 | name = p.key | ||
| 1277 | if name not in self.dists_by_name: | ||
| 1278 | self.reqts.setdefault(p, set()).add(r) | ||
| 1279 | else: | ||
| 1280 | other = self.dists_by_name[name] | ||
| 1281 | if other != p: | ||
| 1282 | # see if other can be replaced by p | ||
| 1283 | self.try_to_replace(p, other, problems) | ||
| 1284 | |||
| 1285 | dists = set(self.dists.values()) | ||
| 1286 | for dist in dists: | ||
| 1287 | dist.build_time_dependency = dist not in install_dists | ||
| 1288 | if dist.build_time_dependency: | ||
| 1289 | logger.debug('%s is a build-time dependency only.', | ||
| 1290 | dist.name_and_version) | ||
| 1291 | logger.debug('find done for %s', odist) | ||
| 1292 | return dists, problems | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..92688d0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py | |||
| @@ -0,0 +1,393 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2013 Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """ | ||
| 7 | Class representing the list of files in a distribution. | ||
| 8 | |||
| 9 | Equivalent to distutils.filelist, but fixes some problems. | ||
| 10 | """ | ||
| 11 | import fnmatch | ||
| 12 | import logging | ||
| 13 | import os | ||
| 14 | import re | ||
| 15 | import sys | ||
| 16 | |||
| 17 | from . import DistlibException | ||
| 18 | from .compat import fsdecode | ||
| 19 | from .util import convert_path | ||
| 20 | |||
| 21 | |||
| 22 | __all__ = ['Manifest'] | ||
| 23 | |||
| 24 | logger = logging.getLogger(__name__) | ||
| 25 | |||
| 26 | # a \ followed by some spaces + EOL | ||
| 27 | _COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) | ||
| 28 | _COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) | ||
| 29 | |||
| 30 | # | ||
| 31 | # Due to the different results returned by fnmatch.translate, we need | ||
| 32 | # to do slightly different processing for Python 2.7 and 3.2 ... this needed | ||
| 33 | # to be brought in for Python 3.6 onwards. | ||
| 34 | # | ||
| 35 | _PYTHON_VERSION = sys.version_info[:2] | ||
| 36 | |||
| 37 | class Manifest(object): | ||
| 38 | """A list of files built by on exploring the filesystem and filtered by | ||
| 39 | applying various patterns to what we find there. | ||
| 40 | """ | ||
| 41 | |||
| 42 | def __init__(self, base=None): | ||
| 43 | """ | ||
| 44 | Initialise an instance. | ||
| 45 | |||
| 46 | :param base: The base directory to explore under. | ||
| 47 | """ | ||
| 48 | self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) | ||
| 49 | self.prefix = self.base + os.sep | ||
| 50 | self.allfiles = None | ||
| 51 | self.files = set() | ||
| 52 | |||
| 53 | # | ||
| 54 | # Public API | ||
| 55 | # | ||
| 56 | |||
| 57 | def findall(self): | ||
| 58 | """Find all files under the base and set ``allfiles`` to the absolute | ||
| 59 | pathnames of files found. | ||
| 60 | """ | ||
| 61 | from stat import S_ISREG, S_ISDIR, S_ISLNK | ||
| 62 | |||
| 63 | self.allfiles = allfiles = [] | ||
| 64 | root = self.base | ||
| 65 | stack = [root] | ||
| 66 | pop = stack.pop | ||
| 67 | push = stack.append | ||
| 68 | |||
| 69 | while stack: | ||
| 70 | root = pop() | ||
| 71 | names = os.listdir(root) | ||
| 72 | |||
| 73 | for name in names: | ||
| 74 | fullname = os.path.join(root, name) | ||
| 75 | |||
| 76 | # Avoid excess stat calls -- just one will do, thank you! | ||
| 77 | stat = os.stat(fullname) | ||
| 78 | mode = stat.st_mode | ||
| 79 | if S_ISREG(mode): | ||
| 80 | allfiles.append(fsdecode(fullname)) | ||
| 81 | elif S_ISDIR(mode) and not S_ISLNK(mode): | ||
| 82 | push(fullname) | ||
| 83 | |||
| 84 | def add(self, item): | ||
| 85 | """ | ||
| 86 | Add a file to the manifest. | ||
| 87 | |||
| 88 | :param item: The pathname to add. This can be relative to the base. | ||
| 89 | """ | ||
| 90 | if not item.startswith(self.prefix): | ||
| 91 | item = os.path.join(self.base, item) | ||
| 92 | self.files.add(os.path.normpath(item)) | ||
| 93 | |||
| 94 | def add_many(self, items): | ||
| 95 | """ | ||
| 96 | Add a list of files to the manifest. | ||
| 97 | |||
| 98 | :param items: The pathnames to add. These can be relative to the base. | ||
| 99 | """ | ||
| 100 | for item in items: | ||
| 101 | self.add(item) | ||
| 102 | |||
| 103 | def sorted(self, wantdirs=False): | ||
| 104 | """ | ||
| 105 | Return sorted files in directory order | ||
| 106 | """ | ||
| 107 | |||
| 108 | def add_dir(dirs, d): | ||
| 109 | dirs.add(d) | ||
| 110 | logger.debug('add_dir added %s', d) | ||
| 111 | if d != self.base: | ||
| 112 | parent, _ = os.path.split(d) | ||
| 113 | assert parent not in ('', '/') | ||
| 114 | add_dir(dirs, parent) | ||
| 115 | |||
| 116 | result = set(self.files) # make a copy! | ||
| 117 | if wantdirs: | ||
| 118 | dirs = set() | ||
| 119 | for f in result: | ||
| 120 | add_dir(dirs, os.path.dirname(f)) | ||
| 121 | result |= dirs | ||
| 122 | return [os.path.join(*path_tuple) for path_tuple in | ||
| 123 | sorted(os.path.split(path) for path in result)] | ||
| 124 | |||
| 125 | def clear(self): | ||
| 126 | """Clear all collected files.""" | ||
| 127 | self.files = set() | ||
| 128 | self.allfiles = [] | ||
| 129 | |||
| 130 | def process_directive(self, directive): | ||
| 131 | """ | ||
| 132 | Process a directive which either adds some files from ``allfiles`` to | ||
| 133 | ``files``, or removes some files from ``files``. | ||
| 134 | |||
| 135 | :param directive: The directive to process. This should be in a format | ||
| 136 | compatible with distutils ``MANIFEST.in`` files: | ||
| 137 | |||
| 138 | http://docs.python.org/distutils/sourcedist.html#commands | ||
| 139 | """ | ||
| 140 | # Parse the line: split it up, make sure the right number of words | ||
| 141 | # is there, and return the relevant words. 'action' is always | ||
| 142 | # defined: it's the first word of the line. Which of the other | ||
| 143 | # three are defined depends on the action; it'll be either | ||
| 144 | # patterns, (dir and patterns), or (dirpattern). | ||
| 145 | action, patterns, thedir, dirpattern = self._parse_directive(directive) | ||
| 146 | |||
| 147 | # OK, now we know that the action is valid and we have the | ||
| 148 | # right number of words on the line for that action -- so we | ||
| 149 | # can proceed with minimal error-checking. | ||
| 150 | if action == 'include': | ||
| 151 | for pattern in patterns: | ||
| 152 | if not self._include_pattern(pattern, anchor=True): | ||
| 153 | logger.warning('no files found matching %r', pattern) | ||
| 154 | |||
| 155 | elif action == 'exclude': | ||
| 156 | for pattern in patterns: | ||
| 157 | found = self._exclude_pattern(pattern, anchor=True) | ||
| 158 | #if not found: | ||
| 159 | # logger.warning('no previously-included files ' | ||
| 160 | # 'found matching %r', pattern) | ||
| 161 | |||
| 162 | elif action == 'global-include': | ||
| 163 | for pattern in patterns: | ||
| 164 | if not self._include_pattern(pattern, anchor=False): | ||
| 165 | logger.warning('no files found matching %r ' | ||
| 166 | 'anywhere in distribution', pattern) | ||
| 167 | |||
| 168 | elif action == 'global-exclude': | ||
| 169 | for pattern in patterns: | ||
| 170 | found = self._exclude_pattern(pattern, anchor=False) | ||
| 171 | #if not found: | ||
| 172 | # logger.warning('no previously-included files ' | ||
| 173 | # 'matching %r found anywhere in ' | ||
| 174 | # 'distribution', pattern) | ||
| 175 | |||
| 176 | elif action == 'recursive-include': | ||
| 177 | for pattern in patterns: | ||
| 178 | if not self._include_pattern(pattern, prefix=thedir): | ||
| 179 | logger.warning('no files found matching %r ' | ||
| 180 | 'under directory %r', pattern, thedir) | ||
| 181 | |||
| 182 | elif action == 'recursive-exclude': | ||
| 183 | for pattern in patterns: | ||
| 184 | found = self._exclude_pattern(pattern, prefix=thedir) | ||
| 185 | #if not found: | ||
| 186 | # logger.warning('no previously-included files ' | ||
| 187 | # 'matching %r found under directory %r', | ||
| 188 | # pattern, thedir) | ||
| 189 | |||
| 190 | elif action == 'graft': | ||
| 191 | if not self._include_pattern(None, prefix=dirpattern): | ||
| 192 | logger.warning('no directories found matching %r', | ||
| 193 | dirpattern) | ||
| 194 | |||
| 195 | elif action == 'prune': | ||
| 196 | if not self._exclude_pattern(None, prefix=dirpattern): | ||
| 197 | logger.warning('no previously-included directories found ' | ||
| 198 | 'matching %r', dirpattern) | ||
| 199 | else: # pragma: no cover | ||
| 200 | # This should never happen, as it should be caught in | ||
| 201 | # _parse_template_line | ||
| 202 | raise DistlibException( | ||
| 203 | 'invalid action %r' % action) | ||
| 204 | |||
| 205 | # | ||
| 206 | # Private API | ||
| 207 | # | ||
| 208 | |||
| 209 | def _parse_directive(self, directive): | ||
| 210 | """ | ||
| 211 | Validate a directive. | ||
| 212 | :param directive: The directive to validate. | ||
| 213 | :return: A tuple of action, patterns, thedir, dir_patterns | ||
| 214 | """ | ||
| 215 | words = directive.split() | ||
| 216 | if len(words) == 1 and words[0] not in ('include', 'exclude', | ||
| 217 | 'global-include', | ||
| 218 | 'global-exclude', | ||
| 219 | 'recursive-include', | ||
| 220 | 'recursive-exclude', | ||
| 221 | 'graft', 'prune'): | ||
| 222 | # no action given, let's use the default 'include' | ||
| 223 | words.insert(0, 'include') | ||
| 224 | |||
| 225 | action = words[0] | ||
| 226 | patterns = thedir = dir_pattern = None | ||
| 227 | |||
| 228 | if action in ('include', 'exclude', | ||
| 229 | 'global-include', 'global-exclude'): | ||
| 230 | if len(words) < 2: | ||
| 231 | raise DistlibException( | ||
| 232 | '%r expects <pattern1> <pattern2> ...' % action) | ||
| 233 | |||
| 234 | patterns = [convert_path(word) for word in words[1:]] | ||
| 235 | |||
| 236 | elif action in ('recursive-include', 'recursive-exclude'): | ||
| 237 | if len(words) < 3: | ||
| 238 | raise DistlibException( | ||
| 239 | '%r expects <dir> <pattern1> <pattern2> ...' % action) | ||
| 240 | |||
| 241 | thedir = convert_path(words[1]) | ||
| 242 | patterns = [convert_path(word) for word in words[2:]] | ||
| 243 | |||
| 244 | elif action in ('graft', 'prune'): | ||
| 245 | if len(words) != 2: | ||
| 246 | raise DistlibException( | ||
| 247 | '%r expects a single <dir_pattern>' % action) | ||
| 248 | |||
| 249 | dir_pattern = convert_path(words[1]) | ||
| 250 | |||
| 251 | else: | ||
| 252 | raise DistlibException('unknown action %r' % action) | ||
| 253 | |||
| 254 | return action, patterns, thedir, dir_pattern | ||
| 255 | |||
| 256 | def _include_pattern(self, pattern, anchor=True, prefix=None, | ||
| 257 | is_regex=False): | ||
| 258 | """Select strings (presumably filenames) from 'self.files' that | ||
| 259 | match 'pattern', a Unix-style wildcard (glob) pattern. | ||
| 260 | |||
| 261 | Patterns are not quite the same as implemented by the 'fnmatch' | ||
| 262 | module: '*' and '?' match non-special characters, where "special" | ||
| 263 | is platform-dependent: slash on Unix; colon, slash, and backslash on | ||
| 264 | DOS/Windows; and colon on Mac OS. | ||
| 265 | |||
| 266 | If 'anchor' is true (the default), then the pattern match is more | ||
| 267 | stringent: "*.py" will match "foo.py" but not "foo/bar.py". If | ||
| 268 | 'anchor' is false, both of these will match. | ||
| 269 | |||
| 270 | If 'prefix' is supplied, then only filenames starting with 'prefix' | ||
| 271 | (itself a pattern) and ending with 'pattern', with anything in between | ||
| 272 | them, will match. 'anchor' is ignored in this case. | ||
| 273 | |||
| 274 | If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and | ||
| 275 | 'pattern' is assumed to be either a string containing a regex or a | ||
| 276 | regex object -- no translation is done, the regex is just compiled | ||
| 277 | and used as-is. | ||
| 278 | |||
| 279 | Selected strings will be added to self.files. | ||
| 280 | |||
| 281 | Return True if files are found. | ||
| 282 | """ | ||
| 283 | # XXX docstring lying about what the special chars are? | ||
| 284 | found = False | ||
| 285 | pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) | ||
| 286 | |||
| 287 | # delayed loading of allfiles list | ||
| 288 | if self.allfiles is None: | ||
| 289 | self.findall() | ||
| 290 | |||
| 291 | for name in self.allfiles: | ||
| 292 | if pattern_re.search(name): | ||
| 293 | self.files.add(name) | ||
| 294 | found = True | ||
| 295 | return found | ||
| 296 | |||
| 297 | def _exclude_pattern(self, pattern, anchor=True, prefix=None, | ||
| 298 | is_regex=False): | ||
| 299 | """Remove strings (presumably filenames) from 'files' that match | ||
| 300 | 'pattern'. | ||
| 301 | |||
| 302 | Other parameters are the same as for 'include_pattern()', above. | ||
| 303 | The list 'self.files' is modified in place. Return True if files are | ||
| 304 | found. | ||
| 305 | |||
| 306 | This API is public to allow e.g. exclusion of SCM subdirs, e.g. when | ||
| 307 | packaging source distributions | ||
| 308 | """ | ||
| 309 | found = False | ||
| 310 | pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) | ||
| 311 | for f in list(self.files): | ||
| 312 | if pattern_re.search(f): | ||
| 313 | self.files.remove(f) | ||
| 314 | found = True | ||
| 315 | return found | ||
| 316 | |||
| 317 | def _translate_pattern(self, pattern, anchor=True, prefix=None, | ||
| 318 | is_regex=False): | ||
| 319 | """Translate a shell-like wildcard pattern to a compiled regular | ||
| 320 | expression. | ||
| 321 | |||
| 322 | Return the compiled regex. If 'is_regex' true, | ||
| 323 | then 'pattern' is directly compiled to a regex (if it's a string) | ||
| 324 | or just returned as-is (assumes it's a regex object). | ||
| 325 | """ | ||
| 326 | if is_regex: | ||
| 327 | if isinstance(pattern, str): | ||
| 328 | return re.compile(pattern) | ||
| 329 | else: | ||
| 330 | return pattern | ||
| 331 | |||
| 332 | if _PYTHON_VERSION > (3, 2): | ||
| 333 | # ditch start and end characters | ||
| 334 | start, _, end = self._glob_to_re('_').partition('_') | ||
| 335 | |||
| 336 | if pattern: | ||
| 337 | pattern_re = self._glob_to_re(pattern) | ||
| 338 | if _PYTHON_VERSION > (3, 2): | ||
| 339 | assert pattern_re.startswith(start) and pattern_re.endswith(end) | ||
| 340 | else: | ||
| 341 | pattern_re = '' | ||
| 342 | |||
| 343 | base = re.escape(os.path.join(self.base, '')) | ||
| 344 | if prefix is not None: | ||
| 345 | # ditch end of pattern character | ||
| 346 | if _PYTHON_VERSION <= (3, 2): | ||
| 347 | empty_pattern = self._glob_to_re('') | ||
| 348 | prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] | ||
| 349 | else: | ||
| 350 | prefix_re = self._glob_to_re(prefix) | ||
| 351 | assert prefix_re.startswith(start) and prefix_re.endswith(end) | ||
| 352 | prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] | ||
| 353 | sep = os.sep | ||
| 354 | if os.sep == '\\': | ||
| 355 | sep = r'\\' | ||
| 356 | if _PYTHON_VERSION <= (3, 2): | ||
| 357 | pattern_re = '^' + base + sep.join((prefix_re, | ||
| 358 | '.*' + pattern_re)) | ||
| 359 | else: | ||
| 360 | pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] | ||
| 361 | pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, | ||
| 362 | pattern_re, end) | ||
| 363 | else: # no prefix -- respect anchor flag | ||
| 364 | if anchor: | ||
| 365 | if _PYTHON_VERSION <= (3, 2): | ||
| 366 | pattern_re = '^' + base + pattern_re | ||
| 367 | else: | ||
| 368 | pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) | ||
| 369 | |||
| 370 | return re.compile(pattern_re) | ||
| 371 | |||
| 372 | def _glob_to_re(self, pattern): | ||
| 373 | """Translate a shell-like glob pattern to a regular expression. | ||
| 374 | |||
| 375 | Return a string containing the regex. Differs from | ||
| 376 | 'fnmatch.translate()' in that '*' does not match "special characters" | ||
| 377 | (which are platform-specific). | ||
| 378 | """ | ||
| 379 | pattern_re = fnmatch.translate(pattern) | ||
| 380 | |||
| 381 | # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which | ||
| 382 | # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, | ||
| 383 | # and by extension they shouldn't match such "special characters" under | ||
| 384 | # any OS. So change all non-escaped dots in the RE to match any | ||
| 385 | # character except the special characters (currently: just os.sep). | ||
| 386 | sep = os.sep | ||
| 387 | if os.sep == '\\': | ||
| 388 | # we're using a regex to manipulate a regex, so we need | ||
| 389 | # to escape the backslash twice | ||
| 390 | sep = r'\\\\' | ||
| 391 | escaped = r'\1[^%s]' % sep | ||
| 392 | pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) | ||
| 393 | return pattern_re | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py new file mode 100644 index 0000000..82fcfb8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py | |||
| @@ -0,0 +1,131 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2017 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | """ | ||
| 8 | Parser for the environment markers micro-language defined in PEP 508. | ||
| 9 | """ | ||
| 10 | |||
| 11 | # Note: In PEP 345, the micro-language was Python compatible, so the ast | ||
| 12 | # module could be used to parse it. However, PEP 508 introduced operators such | ||
| 13 | # as ~= and === which aren't in Python, necessitating a different approach. | ||
| 14 | |||
| 15 | import os | ||
| 16 | import sys | ||
| 17 | import platform | ||
| 18 | import re | ||
| 19 | |||
| 20 | from .compat import python_implementation, urlparse, string_types | ||
| 21 | from .util import in_venv, parse_marker | ||
| 22 | |||
| 23 | __all__ = ['interpret'] | ||
| 24 | |||
| 25 | def _is_literal(o): | ||
| 26 | if not isinstance(o, string_types) or not o: | ||
| 27 | return False | ||
| 28 | return o[0] in '\'"' | ||
| 29 | |||
| 30 | class Evaluator(object): | ||
| 31 | """ | ||
| 32 | This class is used to evaluate marker expessions. | ||
| 33 | """ | ||
| 34 | |||
| 35 | operations = { | ||
| 36 | '==': lambda x, y: x == y, | ||
| 37 | '===': lambda x, y: x == y, | ||
| 38 | '~=': lambda x, y: x == y or x > y, | ||
| 39 | '!=': lambda x, y: x != y, | ||
| 40 | '<': lambda x, y: x < y, | ||
| 41 | '<=': lambda x, y: x == y or x < y, | ||
| 42 | '>': lambda x, y: x > y, | ||
| 43 | '>=': lambda x, y: x == y or x > y, | ||
| 44 | 'and': lambda x, y: x and y, | ||
| 45 | 'or': lambda x, y: x or y, | ||
| 46 | 'in': lambda x, y: x in y, | ||
| 47 | 'not in': lambda x, y: x not in y, | ||
| 48 | } | ||
| 49 | |||
| 50 | def evaluate(self, expr, context): | ||
| 51 | """ | ||
| 52 | Evaluate a marker expression returned by the :func:`parse_requirement` | ||
| 53 | function in the specified context. | ||
| 54 | """ | ||
| 55 | if isinstance(expr, string_types): | ||
| 56 | if expr[0] in '\'"': | ||
| 57 | result = expr[1:-1] | ||
| 58 | else: | ||
| 59 | if expr not in context: | ||
| 60 | raise SyntaxError('unknown variable: %s' % expr) | ||
| 61 | result = context[expr] | ||
| 62 | else: | ||
| 63 | assert isinstance(expr, dict) | ||
| 64 | op = expr['op'] | ||
| 65 | if op not in self.operations: | ||
| 66 | raise NotImplementedError('op not implemented: %s' % op) | ||
| 67 | elhs = expr['lhs'] | ||
| 68 | erhs = expr['rhs'] | ||
| 69 | if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): | ||
| 70 | raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) | ||
| 71 | |||
| 72 | lhs = self.evaluate(elhs, context) | ||
| 73 | rhs = self.evaluate(erhs, context) | ||
| 74 | result = self.operations[op](lhs, rhs) | ||
| 75 | return result | ||
| 76 | |||
| 77 | def default_context(): | ||
| 78 | def format_full_version(info): | ||
| 79 | version = '%s.%s.%s' % (info.major, info.minor, info.micro) | ||
| 80 | kind = info.releaselevel | ||
| 81 | if kind != 'final': | ||
| 82 | version += kind[0] + str(info.serial) | ||
| 83 | return version | ||
| 84 | |||
| 85 | if hasattr(sys, 'implementation'): | ||
| 86 | implementation_version = format_full_version(sys.implementation.version) | ||
| 87 | implementation_name = sys.implementation.name | ||
| 88 | else: | ||
| 89 | implementation_version = '0' | ||
| 90 | implementation_name = '' | ||
| 91 | |||
| 92 | result = { | ||
| 93 | 'implementation_name': implementation_name, | ||
| 94 | 'implementation_version': implementation_version, | ||
| 95 | 'os_name': os.name, | ||
| 96 | 'platform_machine': platform.machine(), | ||
| 97 | 'platform_python_implementation': platform.python_implementation(), | ||
| 98 | 'platform_release': platform.release(), | ||
| 99 | 'platform_system': platform.system(), | ||
| 100 | 'platform_version': platform.version(), | ||
| 101 | 'platform_in_venv': str(in_venv()), | ||
| 102 | 'python_full_version': platform.python_version(), | ||
| 103 | 'python_version': platform.python_version()[:3], | ||
| 104 | 'sys_platform': sys.platform, | ||
| 105 | } | ||
| 106 | return result | ||
| 107 | |||
| 108 | DEFAULT_CONTEXT = default_context() | ||
| 109 | del default_context | ||
| 110 | |||
| 111 | evaluator = Evaluator() | ||
| 112 | |||
| 113 | def interpret(marker, execution_context=None): | ||
| 114 | """ | ||
| 115 | Interpret a marker and return a result depending on environment. | ||
| 116 | |||
| 117 | :param marker: The marker to interpret. | ||
| 118 | :type marker: str | ||
| 119 | :param execution_context: The context used for name lookup. | ||
| 120 | :type execution_context: mapping | ||
| 121 | """ | ||
| 122 | try: | ||
| 123 | expr, rest = parse_marker(marker) | ||
| 124 | except Exception as e: | ||
| 125 | raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) | ||
| 126 | if rest and rest[0] != '#': | ||
| 127 | raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) | ||
| 128 | context = dict(DEFAULT_CONTEXT) | ||
| 129 | if execution_context: | ||
| 130 | context.update(execution_context) | ||
| 131 | return evaluator.evaluate(expr, context) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..10a1fee --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py | |||
| @@ -0,0 +1,1091 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """Implementation of the Metadata for Python packages PEPs. | ||
| 7 | |||
| 8 | Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). | ||
| 9 | """ | ||
| 10 | from __future__ import unicode_literals | ||
| 11 | |||
| 12 | import codecs | ||
| 13 | from email import message_from_file | ||
| 14 | import json | ||
| 15 | import logging | ||
| 16 | import re | ||
| 17 | |||
| 18 | |||
| 19 | from . import DistlibException, __version__ | ||
| 20 | from .compat import StringIO, string_types, text_type | ||
| 21 | from .markers import interpret | ||
| 22 | from .util import extract_by_key, get_extras | ||
| 23 | from .version import get_scheme, PEP440_VERSION_RE | ||
| 24 | |||
| 25 | logger = logging.getLogger(__name__) | ||
| 26 | |||
| 27 | |||
| 28 | class MetadataMissingError(DistlibException): | ||
| 29 | """A required metadata is missing""" | ||
| 30 | |||
| 31 | |||
| 32 | class MetadataConflictError(DistlibException): | ||
| 33 | """Attempt to read or write metadata fields that are conflictual.""" | ||
| 34 | |||
| 35 | |||
| 36 | class MetadataUnrecognizedVersionError(DistlibException): | ||
| 37 | """Unknown metadata version number.""" | ||
| 38 | |||
| 39 | |||
| 40 | class MetadataInvalidError(DistlibException): | ||
| 41 | """A metadata value is invalid""" | ||
| 42 | |||
| 43 | # public API of this module | ||
| 44 | __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] | ||
| 45 | |||
| 46 | # Encoding used for the PKG-INFO files | ||
| 47 | PKG_INFO_ENCODING = 'utf-8' | ||
| 48 | |||
| 49 | # preferred version. Hopefully will be changed | ||
| 50 | # to 1.2 once PEP 345 is supported everywhere | ||
| 51 | PKG_INFO_PREFERRED_VERSION = '1.1' | ||
| 52 | |||
| 53 | _LINE_PREFIX_1_2 = re.compile('\n \\|') | ||
| 54 | _LINE_PREFIX_PRE_1_2 = re.compile('\n ') | ||
| 55 | _241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
| 56 | 'Summary', 'Description', | ||
| 57 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
| 58 | 'License') | ||
| 59 | |||
| 60 | _314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
| 61 | 'Supported-Platform', 'Summary', 'Description', | ||
| 62 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
| 63 | 'License', 'Classifier', 'Download-URL', 'Obsoletes', | ||
| 64 | 'Provides', 'Requires') | ||
| 65 | |||
| 66 | _314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', | ||
| 67 | 'Download-URL') | ||
| 68 | |||
| 69 | _345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
| 70 | 'Supported-Platform', 'Summary', 'Description', | ||
| 71 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
| 72 | 'Maintainer', 'Maintainer-email', 'License', | ||
| 73 | 'Classifier', 'Download-URL', 'Obsoletes-Dist', | ||
| 74 | 'Project-URL', 'Provides-Dist', 'Requires-Dist', | ||
| 75 | 'Requires-Python', 'Requires-External') | ||
| 76 | |||
| 77 | _345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', | ||
| 78 | 'Obsoletes-Dist', 'Requires-External', 'Maintainer', | ||
| 79 | 'Maintainer-email', 'Project-URL') | ||
| 80 | |||
| 81 | _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
| 82 | 'Supported-Platform', 'Summary', 'Description', | ||
| 83 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
| 84 | 'Maintainer', 'Maintainer-email', 'License', | ||
| 85 | 'Classifier', 'Download-URL', 'Obsoletes-Dist', | ||
| 86 | 'Project-URL', 'Provides-Dist', 'Requires-Dist', | ||
| 87 | 'Requires-Python', 'Requires-External', 'Private-Version', | ||
| 88 | 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', | ||
| 89 | 'Provides-Extra') | ||
| 90 | |||
| 91 | _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', | ||
| 92 | 'Setup-Requires-Dist', 'Extension') | ||
| 93 | |||
| 94 | _566_FIELDS = _426_FIELDS + ('Description-Content-Type',) | ||
| 95 | |||
| 96 | _566_MARKERS = ('Description-Content-Type',) | ||
| 97 | |||
| 98 | _ALL_FIELDS = set() | ||
| 99 | _ALL_FIELDS.update(_241_FIELDS) | ||
| 100 | _ALL_FIELDS.update(_314_FIELDS) | ||
| 101 | _ALL_FIELDS.update(_345_FIELDS) | ||
| 102 | _ALL_FIELDS.update(_426_FIELDS) | ||
| 103 | _ALL_FIELDS.update(_566_FIELDS) | ||
| 104 | |||
| 105 | EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') | ||
| 106 | |||
| 107 | |||
| 108 | def _version2fieldlist(version): | ||
| 109 | if version == '1.0': | ||
| 110 | return _241_FIELDS | ||
| 111 | elif version == '1.1': | ||
| 112 | return _314_FIELDS | ||
| 113 | elif version == '1.2': | ||
| 114 | return _345_FIELDS | ||
| 115 | elif version in ('1.3', '2.1'): | ||
| 116 | return _345_FIELDS + _566_FIELDS | ||
| 117 | elif version == '2.0': | ||
| 118 | return _426_FIELDS | ||
| 119 | raise MetadataUnrecognizedVersionError(version) | ||
| 120 | |||
| 121 | |||
| 122 | def _best_version(fields): | ||
| 123 | """Detect the best version depending on the fields used.""" | ||
| 124 | def _has_marker(keys, markers): | ||
| 125 | for marker in markers: | ||
| 126 | if marker in keys: | ||
| 127 | return True | ||
| 128 | return False | ||
| 129 | |||
| 130 | keys = [] | ||
| 131 | for key, value in fields.items(): | ||
| 132 | if value in ([], 'UNKNOWN', None): | ||
| 133 | continue | ||
| 134 | keys.append(key) | ||
| 135 | |||
| 136 | possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] | ||
| 137 | |||
| 138 | # first let's try to see if a field is not part of one of the version | ||
| 139 | for key in keys: | ||
| 140 | if key not in _241_FIELDS and '1.0' in possible_versions: | ||
| 141 | possible_versions.remove('1.0') | ||
| 142 | logger.debug('Removed 1.0 due to %s', key) | ||
| 143 | if key not in _314_FIELDS and '1.1' in possible_versions: | ||
| 144 | possible_versions.remove('1.1') | ||
| 145 | logger.debug('Removed 1.1 due to %s', key) | ||
| 146 | if key not in _345_FIELDS and '1.2' in possible_versions: | ||
| 147 | possible_versions.remove('1.2') | ||
| 148 | logger.debug('Removed 1.2 due to %s', key) | ||
| 149 | if key not in _566_FIELDS and '1.3' in possible_versions: | ||
| 150 | possible_versions.remove('1.3') | ||
| 151 | logger.debug('Removed 1.3 due to %s', key) | ||
| 152 | if key not in _566_FIELDS and '2.1' in possible_versions: | ||
| 153 | if key != 'Description': # In 2.1, description allowed after headers | ||
| 154 | possible_versions.remove('2.1') | ||
| 155 | logger.debug('Removed 2.1 due to %s', key) | ||
| 156 | if key not in _426_FIELDS and '2.0' in possible_versions: | ||
| 157 | possible_versions.remove('2.0') | ||
| 158 | logger.debug('Removed 2.0 due to %s', key) | ||
| 159 | |||
| 160 | # possible_version contains qualified versions | ||
| 161 | if len(possible_versions) == 1: | ||
| 162 | return possible_versions[0] # found ! | ||
| 163 | elif len(possible_versions) == 0: | ||
| 164 | logger.debug('Out of options - unknown metadata set: %s', fields) | ||
| 165 | raise MetadataConflictError('Unknown metadata set') | ||
| 166 | |||
| 167 | # let's see if one unique marker is found | ||
| 168 | is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) | ||
| 169 | is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) | ||
| 170 | is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) | ||
| 171 | is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) | ||
| 172 | if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: | ||
| 173 | raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') | ||
| 174 | |||
| 175 | # we have the choice, 1.0, or 1.2, or 2.0 | ||
| 176 | # - 1.0 has a broken Summary field but works with all tools | ||
| 177 | # - 1.1 is to avoid | ||
| 178 | # - 1.2 fixes Summary but has little adoption | ||
| 179 | # - 2.0 adds more features and is very new | ||
| 180 | if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: | ||
| 181 | # we couldn't find any specific marker | ||
| 182 | if PKG_INFO_PREFERRED_VERSION in possible_versions: | ||
| 183 | return PKG_INFO_PREFERRED_VERSION | ||
| 184 | if is_1_1: | ||
| 185 | return '1.1' | ||
| 186 | if is_1_2: | ||
| 187 | return '1.2' | ||
| 188 | if is_2_1: | ||
| 189 | return '2.1' | ||
| 190 | |||
| 191 | return '2.0' | ||
| 192 | |||
| 193 | _ATTR2FIELD = { | ||
| 194 | 'metadata_version': 'Metadata-Version', | ||
| 195 | 'name': 'Name', | ||
| 196 | 'version': 'Version', | ||
| 197 | 'platform': 'Platform', | ||
| 198 | 'supported_platform': 'Supported-Platform', | ||
| 199 | 'summary': 'Summary', | ||
| 200 | 'description': 'Description', | ||
| 201 | 'keywords': 'Keywords', | ||
| 202 | 'home_page': 'Home-page', | ||
| 203 | 'author': 'Author', | ||
| 204 | 'author_email': 'Author-email', | ||
| 205 | 'maintainer': 'Maintainer', | ||
| 206 | 'maintainer_email': 'Maintainer-email', | ||
| 207 | 'license': 'License', | ||
| 208 | 'classifier': 'Classifier', | ||
| 209 | 'download_url': 'Download-URL', | ||
| 210 | 'obsoletes_dist': 'Obsoletes-Dist', | ||
| 211 | 'provides_dist': 'Provides-Dist', | ||
| 212 | 'requires_dist': 'Requires-Dist', | ||
| 213 | 'setup_requires_dist': 'Setup-Requires-Dist', | ||
| 214 | 'requires_python': 'Requires-Python', | ||
| 215 | 'requires_external': 'Requires-External', | ||
| 216 | 'requires': 'Requires', | ||
| 217 | 'provides': 'Provides', | ||
| 218 | 'obsoletes': 'Obsoletes', | ||
| 219 | 'project_url': 'Project-URL', | ||
| 220 | 'private_version': 'Private-Version', | ||
| 221 | 'obsoleted_by': 'Obsoleted-By', | ||
| 222 | 'extension': 'Extension', | ||
| 223 | 'provides_extra': 'Provides-Extra', | ||
| 224 | } | ||
| 225 | |||
| 226 | _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') | ||
| 227 | _VERSIONS_FIELDS = ('Requires-Python',) | ||
| 228 | _VERSION_FIELDS = ('Version',) | ||
| 229 | _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', | ||
| 230 | 'Requires', 'Provides', 'Obsoletes-Dist', | ||
| 231 | 'Provides-Dist', 'Requires-Dist', 'Requires-External', | ||
| 232 | 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', | ||
| 233 | 'Provides-Extra', 'Extension') | ||
| 234 | _LISTTUPLEFIELDS = ('Project-URL',) | ||
| 235 | |||
| 236 | _ELEMENTSFIELD = ('Keywords',) | ||
| 237 | |||
| 238 | _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') | ||
| 239 | |||
| 240 | _MISSING = object() | ||
| 241 | |||
| 242 | _FILESAFE = re.compile('[^A-Za-z0-9.]+') | ||
| 243 | |||
| 244 | |||
| 245 | def _get_name_and_version(name, version, for_filename=False): | ||
| 246 | """Return the distribution name with version. | ||
| 247 | |||
| 248 | If for_filename is true, return a filename-escaped form.""" | ||
| 249 | if for_filename: | ||
| 250 | # For both name and version any runs of non-alphanumeric or '.' | ||
| 251 | # characters are replaced with a single '-'. Additionally any | ||
| 252 | # spaces in the version string become '.' | ||
| 253 | name = _FILESAFE.sub('-', name) | ||
| 254 | version = _FILESAFE.sub('-', version.replace(' ', '.')) | ||
| 255 | return '%s-%s' % (name, version) | ||
| 256 | |||
| 257 | |||
| 258 | class LegacyMetadata(object): | ||
| 259 | """The legacy metadata of a release. | ||
| 260 | |||
| 261 | Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can | ||
| 262 | instantiate the class with one of these arguments (or none): | ||
| 263 | - *path*, the path to a metadata file | ||
| 264 | - *fileobj* give a file-like object with metadata as content | ||
| 265 | - *mapping* is a dict-like object | ||
| 266 | - *scheme* is a version scheme name | ||
| 267 | """ | ||
| 268 | # TODO document the mapping API and UNKNOWN default key | ||
| 269 | |||
| 270 | def __init__(self, path=None, fileobj=None, mapping=None, | ||
| 271 | scheme='default'): | ||
| 272 | if [path, fileobj, mapping].count(None) < 2: | ||
| 273 | raise TypeError('path, fileobj and mapping are exclusive') | ||
| 274 | self._fields = {} | ||
| 275 | self.requires_files = [] | ||
| 276 | self._dependencies = None | ||
| 277 | self.scheme = scheme | ||
| 278 | if path is not None: | ||
| 279 | self.read(path) | ||
| 280 | elif fileobj is not None: | ||
| 281 | self.read_file(fileobj) | ||
| 282 | elif mapping is not None: | ||
| 283 | self.update(mapping) | ||
| 284 | self.set_metadata_version() | ||
| 285 | |||
| 286 | def set_metadata_version(self): | ||
| 287 | self._fields['Metadata-Version'] = _best_version(self._fields) | ||
| 288 | |||
| 289 | def _write_field(self, fileobj, name, value): | ||
| 290 | fileobj.write('%s: %s\n' % (name, value)) | ||
| 291 | |||
| 292 | def __getitem__(self, name): | ||
| 293 | return self.get(name) | ||
| 294 | |||
| 295 | def __setitem__(self, name, value): | ||
| 296 | return self.set(name, value) | ||
| 297 | |||
| 298 | def __delitem__(self, name): | ||
| 299 | field_name = self._convert_name(name) | ||
| 300 | try: | ||
| 301 | del self._fields[field_name] | ||
| 302 | except KeyError: | ||
| 303 | raise KeyError(name) | ||
| 304 | |||
| 305 | def __contains__(self, name): | ||
| 306 | return (name in self._fields or | ||
| 307 | self._convert_name(name) in self._fields) | ||
| 308 | |||
| 309 | def _convert_name(self, name): | ||
| 310 | if name in _ALL_FIELDS: | ||
| 311 | return name | ||
| 312 | name = name.replace('-', '_').lower() | ||
| 313 | return _ATTR2FIELD.get(name, name) | ||
| 314 | |||
| 315 | def _default_value(self, name): | ||
| 316 | if name in _LISTFIELDS or name in _ELEMENTSFIELD: | ||
| 317 | return [] | ||
| 318 | return 'UNKNOWN' | ||
| 319 | |||
| 320 | def _remove_line_prefix(self, value): | ||
| 321 | if self.metadata_version in ('1.0', '1.1'): | ||
| 322 | return _LINE_PREFIX_PRE_1_2.sub('\n', value) | ||
| 323 | else: | ||
| 324 | return _LINE_PREFIX_1_2.sub('\n', value) | ||
| 325 | |||
| 326 | def __getattr__(self, name): | ||
| 327 | if name in _ATTR2FIELD: | ||
| 328 | return self[name] | ||
| 329 | raise AttributeError(name) | ||
| 330 | |||
| 331 | # | ||
| 332 | # Public API | ||
| 333 | # | ||
| 334 | |||
| 335 | # dependencies = property(_get_dependencies, _set_dependencies) | ||
| 336 | |||
| 337 | def get_fullname(self, filesafe=False): | ||
| 338 | """Return the distribution name with version. | ||
| 339 | |||
| 340 | If filesafe is true, return a filename-escaped form.""" | ||
| 341 | return _get_name_and_version(self['Name'], self['Version'], filesafe) | ||
| 342 | |||
| 343 | def is_field(self, name): | ||
| 344 | """return True if name is a valid metadata key""" | ||
| 345 | name = self._convert_name(name) | ||
| 346 | return name in _ALL_FIELDS | ||
| 347 | |||
| 348 | def is_multi_field(self, name): | ||
| 349 | name = self._convert_name(name) | ||
| 350 | return name in _LISTFIELDS | ||
| 351 | |||
| 352 | def read(self, filepath): | ||
| 353 | """Read the metadata values from a file path.""" | ||
| 354 | fp = codecs.open(filepath, 'r', encoding='utf-8') | ||
| 355 | try: | ||
| 356 | self.read_file(fp) | ||
| 357 | finally: | ||
| 358 | fp.close() | ||
| 359 | |||
| 360 | def read_file(self, fileob): | ||
| 361 | """Read the metadata values from a file object.""" | ||
| 362 | msg = message_from_file(fileob) | ||
| 363 | self._fields['Metadata-Version'] = msg['metadata-version'] | ||
| 364 | |||
| 365 | # When reading, get all the fields we can | ||
| 366 | for field in _ALL_FIELDS: | ||
| 367 | if field not in msg: | ||
| 368 | continue | ||
| 369 | if field in _LISTFIELDS: | ||
| 370 | # we can have multiple lines | ||
| 371 | values = msg.get_all(field) | ||
| 372 | if field in _LISTTUPLEFIELDS and values is not None: | ||
| 373 | values = [tuple(value.split(',')) for value in values] | ||
| 374 | self.set(field, values) | ||
| 375 | else: | ||
| 376 | # single line | ||
| 377 | value = msg[field] | ||
| 378 | if value is not None and value != 'UNKNOWN': | ||
| 379 | self.set(field, value) | ||
| 380 | logger.debug('Attempting to set metadata for %s', self) | ||
| 381 | self.set_metadata_version() | ||
| 382 | |||
| 383 | def write(self, filepath, skip_unknown=False): | ||
| 384 | """Write the metadata fields to filepath.""" | ||
| 385 | fp = codecs.open(filepath, 'w', encoding='utf-8') | ||
| 386 | try: | ||
| 387 | self.write_file(fp, skip_unknown) | ||
| 388 | finally: | ||
| 389 | fp.close() | ||
| 390 | |||
| 391 | def write_file(self, fileobject, skip_unknown=False): | ||
| 392 | """Write the PKG-INFO format data to a file object.""" | ||
| 393 | self.set_metadata_version() | ||
| 394 | |||
| 395 | for field in _version2fieldlist(self['Metadata-Version']): | ||
| 396 | values = self.get(field) | ||
| 397 | if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): | ||
| 398 | continue | ||
| 399 | if field in _ELEMENTSFIELD: | ||
| 400 | self._write_field(fileobject, field, ','.join(values)) | ||
| 401 | continue | ||
| 402 | if field not in _LISTFIELDS: | ||
| 403 | if field == 'Description': | ||
| 404 | if self.metadata_version in ('1.0', '1.1'): | ||
| 405 | values = values.replace('\n', '\n ') | ||
| 406 | else: | ||
| 407 | values = values.replace('\n', '\n |') | ||
| 408 | values = [values] | ||
| 409 | |||
| 410 | if field in _LISTTUPLEFIELDS: | ||
| 411 | values = [','.join(value) for value in values] | ||
| 412 | |||
| 413 | for value in values: | ||
| 414 | self._write_field(fileobject, field, value) | ||
| 415 | |||
| 416 | def update(self, other=None, **kwargs): | ||
| 417 | """Set metadata values from the given iterable `other` and kwargs. | ||
| 418 | |||
| 419 | Behavior is like `dict.update`: If `other` has a ``keys`` method, | ||
| 420 | they are looped over and ``self[key]`` is assigned ``other[key]``. | ||
| 421 | Else, ``other`` is an iterable of ``(key, value)`` iterables. | ||
| 422 | |||
| 423 | Keys that don't match a metadata field or that have an empty value are | ||
| 424 | dropped. | ||
| 425 | """ | ||
| 426 | def _set(key, value): | ||
| 427 | if key in _ATTR2FIELD and value: | ||
| 428 | self.set(self._convert_name(key), value) | ||
| 429 | |||
| 430 | if not other: | ||
| 431 | # other is None or empty container | ||
| 432 | pass | ||
| 433 | elif hasattr(other, 'keys'): | ||
| 434 | for k in other.keys(): | ||
| 435 | _set(k, other[k]) | ||
| 436 | else: | ||
| 437 | for k, v in other: | ||
| 438 | _set(k, v) | ||
| 439 | |||
| 440 | if kwargs: | ||
| 441 | for k, v in kwargs.items(): | ||
| 442 | _set(k, v) | ||
| 443 | |||
| 444 | def set(self, name, value): | ||
| 445 | """Control then set a metadata field.""" | ||
| 446 | name = self._convert_name(name) | ||
| 447 | |||
| 448 | if ((name in _ELEMENTSFIELD or name == 'Platform') and | ||
| 449 | not isinstance(value, (list, tuple))): | ||
| 450 | if isinstance(value, string_types): | ||
| 451 | value = [v.strip() for v in value.split(',')] | ||
| 452 | else: | ||
| 453 | value = [] | ||
| 454 | elif (name in _LISTFIELDS and | ||
| 455 | not isinstance(value, (list, tuple))): | ||
| 456 | if isinstance(value, string_types): | ||
| 457 | value = [value] | ||
| 458 | else: | ||
| 459 | value = [] | ||
| 460 | |||
| 461 | if logger.isEnabledFor(logging.WARNING): | ||
| 462 | project_name = self['Name'] | ||
| 463 | |||
| 464 | scheme = get_scheme(self.scheme) | ||
| 465 | if name in _PREDICATE_FIELDS and value is not None: | ||
| 466 | for v in value: | ||
| 467 | # check that the values are valid | ||
| 468 | if not scheme.is_valid_matcher(v.split(';')[0]): | ||
| 469 | logger.warning( | ||
| 470 | "'%s': '%s' is not valid (field '%s')", | ||
| 471 | project_name, v, name) | ||
| 472 | # FIXME this rejects UNKNOWN, is that right? | ||
| 473 | elif name in _VERSIONS_FIELDS and value is not None: | ||
| 474 | if not scheme.is_valid_constraint_list(value): | ||
| 475 | logger.warning("'%s': '%s' is not a valid version (field '%s')", | ||
| 476 | project_name, value, name) | ||
| 477 | elif name in _VERSION_FIELDS and value is not None: | ||
| 478 | if not scheme.is_valid_version(value): | ||
| 479 | logger.warning("'%s': '%s' is not a valid version (field '%s')", | ||
| 480 | project_name, value, name) | ||
| 481 | |||
| 482 | if name in _UNICODEFIELDS: | ||
| 483 | if name == 'Description': | ||
| 484 | value = self._remove_line_prefix(value) | ||
| 485 | |||
| 486 | self._fields[name] = value | ||
| 487 | |||
| 488 | def get(self, name, default=_MISSING): | ||
| 489 | """Get a metadata field.""" | ||
| 490 | name = self._convert_name(name) | ||
| 491 | if name not in self._fields: | ||
| 492 | if default is _MISSING: | ||
| 493 | default = self._default_value(name) | ||
| 494 | return default | ||
| 495 | if name in _UNICODEFIELDS: | ||
| 496 | value = self._fields[name] | ||
| 497 | return value | ||
| 498 | elif name in _LISTFIELDS: | ||
| 499 | value = self._fields[name] | ||
| 500 | if value is None: | ||
| 501 | return [] | ||
| 502 | res = [] | ||
| 503 | for val in value: | ||
| 504 | if name not in _LISTTUPLEFIELDS: | ||
| 505 | res.append(val) | ||
| 506 | else: | ||
| 507 | # That's for Project-URL | ||
| 508 | res.append((val[0], val[1])) | ||
| 509 | return res | ||
| 510 | |||
| 511 | elif name in _ELEMENTSFIELD: | ||
| 512 | value = self._fields[name] | ||
| 513 | if isinstance(value, string_types): | ||
| 514 | return value.split(',') | ||
| 515 | return self._fields[name] | ||
| 516 | |||
| 517 | def check(self, strict=False): | ||
| 518 | """Check if the metadata is compliant. If strict is True then raise if | ||
| 519 | no Name or Version are provided""" | ||
| 520 | self.set_metadata_version() | ||
| 521 | |||
| 522 | # XXX should check the versions (if the file was loaded) | ||
| 523 | missing, warnings = [], [] | ||
| 524 | |||
| 525 | for attr in ('Name', 'Version'): # required by PEP 345 | ||
| 526 | if attr not in self: | ||
| 527 | missing.append(attr) | ||
| 528 | |||
| 529 | if strict and missing != []: | ||
| 530 | msg = 'missing required metadata: %s' % ', '.join(missing) | ||
| 531 | raise MetadataMissingError(msg) | ||
| 532 | |||
| 533 | for attr in ('Home-page', 'Author'): | ||
| 534 | if attr not in self: | ||
| 535 | missing.append(attr) | ||
| 536 | |||
| 537 | # checking metadata 1.2 (XXX needs to check 1.1, 1.0) | ||
| 538 | if self['Metadata-Version'] != '1.2': | ||
| 539 | return missing, warnings | ||
| 540 | |||
| 541 | scheme = get_scheme(self.scheme) | ||
| 542 | |||
| 543 | def are_valid_constraints(value): | ||
| 544 | for v in value: | ||
| 545 | if not scheme.is_valid_matcher(v.split(';')[0]): | ||
| 546 | return False | ||
| 547 | return True | ||
| 548 | |||
| 549 | for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), | ||
| 550 | (_VERSIONS_FIELDS, | ||
| 551 | scheme.is_valid_constraint_list), | ||
| 552 | (_VERSION_FIELDS, | ||
| 553 | scheme.is_valid_version)): | ||
| 554 | for field in fields: | ||
| 555 | value = self.get(field, None) | ||
| 556 | if value is not None and not controller(value): | ||
| 557 | warnings.append("Wrong value for '%s': %s" % (field, value)) | ||
| 558 | |||
| 559 | return missing, warnings | ||
| 560 | |||
| 561 | def todict(self, skip_missing=False): | ||
| 562 | """Return fields as a dict. | ||
| 563 | |||
| 564 | Field names will be converted to use the underscore-lowercase style | ||
| 565 | instead of hyphen-mixed case (i.e. home_page instead of Home-page). | ||
| 566 | """ | ||
| 567 | self.set_metadata_version() | ||
| 568 | |||
| 569 | mapping_1_0 = ( | ||
| 570 | ('metadata_version', 'Metadata-Version'), | ||
| 571 | ('name', 'Name'), | ||
| 572 | ('version', 'Version'), | ||
| 573 | ('summary', 'Summary'), | ||
| 574 | ('home_page', 'Home-page'), | ||
| 575 | ('author', 'Author'), | ||
| 576 | ('author_email', 'Author-email'), | ||
| 577 | ('license', 'License'), | ||
| 578 | ('description', 'Description'), | ||
| 579 | ('keywords', 'Keywords'), | ||
| 580 | ('platform', 'Platform'), | ||
| 581 | ('classifiers', 'Classifier'), | ||
| 582 | ('download_url', 'Download-URL'), | ||
| 583 | ) | ||
| 584 | |||
| 585 | data = {} | ||
| 586 | for key, field_name in mapping_1_0: | ||
| 587 | if not skip_missing or field_name in self._fields: | ||
| 588 | data[key] = self[field_name] | ||
| 589 | |||
| 590 | if self['Metadata-Version'] == '1.2': | ||
| 591 | mapping_1_2 = ( | ||
| 592 | ('requires_dist', 'Requires-Dist'), | ||
| 593 | ('requires_python', 'Requires-Python'), | ||
| 594 | ('requires_external', 'Requires-External'), | ||
| 595 | ('provides_dist', 'Provides-Dist'), | ||
| 596 | ('obsoletes_dist', 'Obsoletes-Dist'), | ||
| 597 | ('project_url', 'Project-URL'), | ||
| 598 | ('maintainer', 'Maintainer'), | ||
| 599 | ('maintainer_email', 'Maintainer-email'), | ||
| 600 | ) | ||
| 601 | for key, field_name in mapping_1_2: | ||
| 602 | if not skip_missing or field_name in self._fields: | ||
| 603 | if key != 'project_url': | ||
| 604 | data[key] = self[field_name] | ||
| 605 | else: | ||
| 606 | data[key] = [','.join(u) for u in self[field_name]] | ||
| 607 | |||
| 608 | elif self['Metadata-Version'] == '1.1': | ||
| 609 | mapping_1_1 = ( | ||
| 610 | ('provides', 'Provides'), | ||
| 611 | ('requires', 'Requires'), | ||
| 612 | ('obsoletes', 'Obsoletes'), | ||
| 613 | ) | ||
| 614 | for key, field_name in mapping_1_1: | ||
| 615 | if not skip_missing or field_name in self._fields: | ||
| 616 | data[key] = self[field_name] | ||
| 617 | |||
| 618 | return data | ||
| 619 | |||
| 620 | def add_requirements(self, requirements): | ||
| 621 | if self['Metadata-Version'] == '1.1': | ||
| 622 | # we can't have 1.1 metadata *and* Setuptools requires | ||
| 623 | for field in ('Obsoletes', 'Requires', 'Provides'): | ||
| 624 | if field in self: | ||
| 625 | del self[field] | ||
| 626 | self['Requires-Dist'] += requirements | ||
| 627 | |||
| 628 | # Mapping API | ||
| 629 | # TODO could add iter* variants | ||
| 630 | |||
| 631 | def keys(self): | ||
| 632 | return list(_version2fieldlist(self['Metadata-Version'])) | ||
| 633 | |||
| 634 | def __iter__(self): | ||
| 635 | for key in self.keys(): | ||
| 636 | yield key | ||
| 637 | |||
| 638 | def values(self): | ||
| 639 | return [self[key] for key in self.keys()] | ||
| 640 | |||
| 641 | def items(self): | ||
| 642 | return [(key, self[key]) for key in self.keys()] | ||
| 643 | |||
| 644 | def __repr__(self): | ||
| 645 | return '<%s %s %s>' % (self.__class__.__name__, self.name, | ||
| 646 | self.version) | ||
| 647 | |||
| 648 | |||
| 649 | METADATA_FILENAME = 'pydist.json' | ||
| 650 | WHEEL_METADATA_FILENAME = 'metadata.json' | ||
| 651 | |||
| 652 | |||
| 653 | class Metadata(object): | ||
| 654 | """ | ||
| 655 | The metadata of a release. This implementation uses 2.0 (JSON) | ||
| 656 | metadata where possible. If not possible, it wraps a LegacyMetadata | ||
| 657 | instance which handles the key-value metadata format. | ||
| 658 | """ | ||
| 659 | |||
| 660 | METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') | ||
| 661 | |||
| 662 | NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) | ||
| 663 | |||
| 664 | VERSION_MATCHER = PEP440_VERSION_RE | ||
| 665 | |||
| 666 | SUMMARY_MATCHER = re.compile('.{1,2047}') | ||
| 667 | |||
| 668 | METADATA_VERSION = '2.0' | ||
| 669 | |||
| 670 | GENERATOR = 'distlib (%s)' % __version__ | ||
| 671 | |||
| 672 | MANDATORY_KEYS = { | ||
| 673 | 'name': (), | ||
| 674 | 'version': (), | ||
| 675 | 'summary': ('legacy',), | ||
| 676 | } | ||
| 677 | |||
| 678 | INDEX_KEYS = ('name version license summary description author ' | ||
| 679 | 'author_email keywords platform home_page classifiers ' | ||
| 680 | 'download_url') | ||
| 681 | |||
| 682 | DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' | ||
| 683 | 'dev_requires provides meta_requires obsoleted_by ' | ||
| 684 | 'supports_environments') | ||
| 685 | |||
| 686 | SYNTAX_VALIDATORS = { | ||
| 687 | 'metadata_version': (METADATA_VERSION_MATCHER, ()), | ||
| 688 | 'name': (NAME_MATCHER, ('legacy',)), | ||
| 689 | 'version': (VERSION_MATCHER, ('legacy',)), | ||
| 690 | 'summary': (SUMMARY_MATCHER, ('legacy',)), | ||
| 691 | } | ||
| 692 | |||
| 693 | __slots__ = ('_legacy', '_data', 'scheme') | ||
| 694 | |||
| 695 | def __init__(self, path=None, fileobj=None, mapping=None, | ||
| 696 | scheme='default'): | ||
| 697 | if [path, fileobj, mapping].count(None) < 2: | ||
| 698 | raise TypeError('path, fileobj and mapping are exclusive') | ||
| 699 | self._legacy = None | ||
| 700 | self._data = None | ||
| 701 | self.scheme = scheme | ||
| 702 | #import pdb; pdb.set_trace() | ||
| 703 | if mapping is not None: | ||
| 704 | try: | ||
| 705 | self._validate_mapping(mapping, scheme) | ||
| 706 | self._data = mapping | ||
| 707 | except MetadataUnrecognizedVersionError: | ||
| 708 | self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) | ||
| 709 | self.validate() | ||
| 710 | else: | ||
| 711 | data = None | ||
| 712 | if path: | ||
| 713 | with open(path, 'rb') as f: | ||
| 714 | data = f.read() | ||
| 715 | elif fileobj: | ||
| 716 | data = fileobj.read() | ||
| 717 | if data is None: | ||
| 718 | # Initialised with no args - to be added | ||
| 719 | self._data = { | ||
| 720 | 'metadata_version': self.METADATA_VERSION, | ||
| 721 | 'generator': self.GENERATOR, | ||
| 722 | } | ||
| 723 | else: | ||
| 724 | if not isinstance(data, text_type): | ||
| 725 | data = data.decode('utf-8') | ||
| 726 | try: | ||
| 727 | self._data = json.loads(data) | ||
| 728 | self._validate_mapping(self._data, scheme) | ||
| 729 | except ValueError: | ||
| 730 | # Note: MetadataUnrecognizedVersionError does not | ||
| 731 | # inherit from ValueError (it's a DistlibException, | ||
| 732 | # which should not inherit from ValueError). | ||
| 733 | # The ValueError comes from the json.load - if that | ||
| 734 | # succeeds and we get a validation error, we want | ||
| 735 | # that to propagate | ||
| 736 | self._legacy = LegacyMetadata(fileobj=StringIO(data), | ||
| 737 | scheme=scheme) | ||
| 738 | self.validate() | ||
| 739 | |||
| 740 | common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) | ||
| 741 | |||
| 742 | none_list = (None, list) | ||
| 743 | none_dict = (None, dict) | ||
| 744 | |||
| 745 | mapped_keys = { | ||
| 746 | 'run_requires': ('Requires-Dist', list), | ||
| 747 | 'build_requires': ('Setup-Requires-Dist', list), | ||
| 748 | 'dev_requires': none_list, | ||
| 749 | 'test_requires': none_list, | ||
| 750 | 'meta_requires': none_list, | ||
| 751 | 'extras': ('Provides-Extra', list), | ||
| 752 | 'modules': none_list, | ||
| 753 | 'namespaces': none_list, | ||
| 754 | 'exports': none_dict, | ||
| 755 | 'commands': none_dict, | ||
| 756 | 'classifiers': ('Classifier', list), | ||
| 757 | 'source_url': ('Download-URL', None), | ||
| 758 | 'metadata_version': ('Metadata-Version', None), | ||
| 759 | } | ||
| 760 | |||
| 761 | del none_list, none_dict | ||
| 762 | |||
| 763 | def __getattribute__(self, key): | ||
| 764 | common = object.__getattribute__(self, 'common_keys') | ||
| 765 | mapped = object.__getattribute__(self, 'mapped_keys') | ||
| 766 | if key in mapped: | ||
| 767 | lk, maker = mapped[key] | ||
| 768 | if self._legacy: | ||
| 769 | if lk is None: | ||
| 770 | result = None if maker is None else maker() | ||
| 771 | else: | ||
| 772 | result = self._legacy.get(lk) | ||
| 773 | else: | ||
| 774 | value = None if maker is None else maker() | ||
| 775 | if key not in ('commands', 'exports', 'modules', 'namespaces', | ||
| 776 | 'classifiers'): | ||
| 777 | result = self._data.get(key, value) | ||
| 778 | else: | ||
| 779 | # special cases for PEP 459 | ||
| 780 | sentinel = object() | ||
| 781 | result = sentinel | ||
| 782 | d = self._data.get('extensions') | ||
| 783 | if d: | ||
| 784 | if key == 'commands': | ||
| 785 | result = d.get('python.commands', value) | ||
| 786 | elif key == 'classifiers': | ||
| 787 | d = d.get('python.details') | ||
| 788 | if d: | ||
| 789 | result = d.get(key, value) | ||
| 790 | else: | ||
| 791 | d = d.get('python.exports') | ||
| 792 | if not d: | ||
| 793 | d = self._data.get('python.exports') | ||
| 794 | if d: | ||
| 795 | result = d.get(key, value) | ||
| 796 | if result is sentinel: | ||
| 797 | result = value | ||
| 798 | elif key not in common: | ||
| 799 | result = object.__getattribute__(self, key) | ||
| 800 | elif self._legacy: | ||
| 801 | result = self._legacy.get(key) | ||
| 802 | else: | ||
| 803 | result = self._data.get(key) | ||
| 804 | return result | ||
| 805 | |||
| 806 | def _validate_value(self, key, value, scheme=None): | ||
| 807 | if key in self.SYNTAX_VALIDATORS: | ||
| 808 | pattern, exclusions = self.SYNTAX_VALIDATORS[key] | ||
| 809 | if (scheme or self.scheme) not in exclusions: | ||
| 810 | m = pattern.match(value) | ||
| 811 | if not m: | ||
| 812 | raise MetadataInvalidError("'%s' is an invalid value for " | ||
| 813 | "the '%s' property" % (value, | ||
| 814 | key)) | ||
| 815 | |||
| 816 | def __setattr__(self, key, value): | ||
| 817 | self._validate_value(key, value) | ||
| 818 | common = object.__getattribute__(self, 'common_keys') | ||
| 819 | mapped = object.__getattribute__(self, 'mapped_keys') | ||
| 820 | if key in mapped: | ||
| 821 | lk, _ = mapped[key] | ||
| 822 | if self._legacy: | ||
| 823 | if lk is None: | ||
| 824 | raise NotImplementedError | ||
| 825 | self._legacy[lk] = value | ||
| 826 | elif key not in ('commands', 'exports', 'modules', 'namespaces', | ||
| 827 | 'classifiers'): | ||
| 828 | self._data[key] = value | ||
| 829 | else: | ||
| 830 | # special cases for PEP 459 | ||
| 831 | d = self._data.setdefault('extensions', {}) | ||
| 832 | if key == 'commands': | ||
| 833 | d['python.commands'] = value | ||
| 834 | elif key == 'classifiers': | ||
| 835 | d = d.setdefault('python.details', {}) | ||
| 836 | d[key] = value | ||
| 837 | else: | ||
| 838 | d = d.setdefault('python.exports', {}) | ||
| 839 | d[key] = value | ||
| 840 | elif key not in common: | ||
| 841 | object.__setattr__(self, key, value) | ||
| 842 | else: | ||
| 843 | if key == 'keywords': | ||
| 844 | if isinstance(value, string_types): | ||
| 845 | value = value.strip() | ||
| 846 | if value: | ||
| 847 | value = value.split() | ||
| 848 | else: | ||
| 849 | value = [] | ||
| 850 | if self._legacy: | ||
| 851 | self._legacy[key] = value | ||
| 852 | else: | ||
| 853 | self._data[key] = value | ||
| 854 | |||
| 855 | @property | ||
| 856 | def name_and_version(self): | ||
| 857 | return _get_name_and_version(self.name, self.version, True) | ||
| 858 | |||
| 859 | @property | ||
| 860 | def provides(self): | ||
| 861 | if self._legacy: | ||
| 862 | result = self._legacy['Provides-Dist'] | ||
| 863 | else: | ||
| 864 | result = self._data.setdefault('provides', []) | ||
| 865 | s = '%s (%s)' % (self.name, self.version) | ||
| 866 | if s not in result: | ||
| 867 | result.append(s) | ||
| 868 | return result | ||
| 869 | |||
| 870 | @provides.setter | ||
| 871 | def provides(self, value): | ||
| 872 | if self._legacy: | ||
| 873 | self._legacy['Provides-Dist'] = value | ||
| 874 | else: | ||
| 875 | self._data['provides'] = value | ||
| 876 | |||
| 877 | def get_requirements(self, reqts, extras=None, env=None): | ||
| 878 | """ | ||
| 879 | Base method to get dependencies, given a set of extras | ||
| 880 | to satisfy and an optional environment context. | ||
| 881 | :param reqts: A list of sometimes-wanted dependencies, | ||
| 882 | perhaps dependent on extras and environment. | ||
| 883 | :param extras: A list of optional components being requested. | ||
| 884 | :param env: An optional environment for marker evaluation. | ||
| 885 | """ | ||
| 886 | if self._legacy: | ||
| 887 | result = reqts | ||
| 888 | else: | ||
| 889 | result = [] | ||
| 890 | extras = get_extras(extras or [], self.extras) | ||
| 891 | for d in reqts: | ||
| 892 | if 'extra' not in d and 'environment' not in d: | ||
| 893 | # unconditional | ||
| 894 | include = True | ||
| 895 | else: | ||
| 896 | if 'extra' not in d: | ||
| 897 | # Not extra-dependent - only environment-dependent | ||
| 898 | include = True | ||
| 899 | else: | ||
| 900 | include = d.get('extra') in extras | ||
| 901 | if include: | ||
| 902 | # Not excluded because of extras, check environment | ||
| 903 | marker = d.get('environment') | ||
| 904 | if marker: | ||
| 905 | include = interpret(marker, env) | ||
| 906 | if include: | ||
| 907 | result.extend(d['requires']) | ||
| 908 | for key in ('build', 'dev', 'test'): | ||
| 909 | e = ':%s:' % key | ||
| 910 | if e in extras: | ||
| 911 | extras.remove(e) | ||
| 912 | # A recursive call, but it should terminate since 'test' | ||
| 913 | # has been removed from the extras | ||
| 914 | reqts = self._data.get('%s_requires' % key, []) | ||
| 915 | result.extend(self.get_requirements(reqts, extras=extras, | ||
| 916 | env=env)) | ||
| 917 | return result | ||
| 918 | |||
| 919 | @property | ||
| 920 | def dictionary(self): | ||
| 921 | if self._legacy: | ||
| 922 | return self._from_legacy() | ||
| 923 | return self._data | ||
| 924 | |||
| 925 | @property | ||
| 926 | def dependencies(self): | ||
| 927 | if self._legacy: | ||
| 928 | raise NotImplementedError | ||
| 929 | else: | ||
| 930 | return extract_by_key(self._data, self.DEPENDENCY_KEYS) | ||
| 931 | |||
| 932 | @dependencies.setter | ||
| 933 | def dependencies(self, value): | ||
| 934 | if self._legacy: | ||
| 935 | raise NotImplementedError | ||
| 936 | else: | ||
| 937 | self._data.update(value) | ||
| 938 | |||
| 939 | def _validate_mapping(self, mapping, scheme): | ||
| 940 | if mapping.get('metadata_version') != self.METADATA_VERSION: | ||
| 941 | raise MetadataUnrecognizedVersionError() | ||
| 942 | missing = [] | ||
| 943 | for key, exclusions in self.MANDATORY_KEYS.items(): | ||
| 944 | if key not in mapping: | ||
| 945 | if scheme not in exclusions: | ||
| 946 | missing.append(key) | ||
| 947 | if missing: | ||
| 948 | msg = 'Missing metadata items: %s' % ', '.join(missing) | ||
| 949 | raise MetadataMissingError(msg) | ||
| 950 | for k, v in mapping.items(): | ||
| 951 | self._validate_value(k, v, scheme) | ||
| 952 | |||
| 953 | def validate(self): | ||
| 954 | if self._legacy: | ||
| 955 | missing, warnings = self._legacy.check(True) | ||
| 956 | if missing or warnings: | ||
| 957 | logger.warning('Metadata: missing: %s, warnings: %s', | ||
| 958 | missing, warnings) | ||
| 959 | else: | ||
| 960 | self._validate_mapping(self._data, self.scheme) | ||
| 961 | |||
| 962 | def todict(self): | ||
| 963 | if self._legacy: | ||
| 964 | return self._legacy.todict(True) | ||
| 965 | else: | ||
| 966 | result = extract_by_key(self._data, self.INDEX_KEYS) | ||
| 967 | return result | ||
| 968 | |||
| 969 | def _from_legacy(self): | ||
| 970 | assert self._legacy and not self._data | ||
| 971 | result = { | ||
| 972 | 'metadata_version': self.METADATA_VERSION, | ||
| 973 | 'generator': self.GENERATOR, | ||
| 974 | } | ||
| 975 | lmd = self._legacy.todict(True) # skip missing ones | ||
| 976 | for k in ('name', 'version', 'license', 'summary', 'description', | ||
| 977 | 'classifier'): | ||
| 978 | if k in lmd: | ||
| 979 | if k == 'classifier': | ||
| 980 | nk = 'classifiers' | ||
| 981 | else: | ||
| 982 | nk = k | ||
| 983 | result[nk] = lmd[k] | ||
| 984 | kw = lmd.get('Keywords', []) | ||
| 985 | if kw == ['']: | ||
| 986 | kw = [] | ||
| 987 | result['keywords'] = kw | ||
| 988 | keys = (('requires_dist', 'run_requires'), | ||
| 989 | ('setup_requires_dist', 'build_requires')) | ||
| 990 | for ok, nk in keys: | ||
| 991 | if ok in lmd and lmd[ok]: | ||
| 992 | result[nk] = [{'requires': lmd[ok]}] | ||
| 993 | result['provides'] = self.provides | ||
| 994 | author = {} | ||
| 995 | maintainer = {} | ||
| 996 | return result | ||
| 997 | |||
| 998 | LEGACY_MAPPING = { | ||
| 999 | 'name': 'Name', | ||
| 1000 | 'version': 'Version', | ||
| 1001 | 'license': 'License', | ||
| 1002 | 'summary': 'Summary', | ||
| 1003 | 'description': 'Description', | ||
| 1004 | 'classifiers': 'Classifier', | ||
| 1005 | } | ||
| 1006 | |||
| 1007 | def _to_legacy(self): | ||
| 1008 | def process_entries(entries): | ||
| 1009 | reqts = set() | ||
| 1010 | for e in entries: | ||
| 1011 | extra = e.get('extra') | ||
| 1012 | env = e.get('environment') | ||
| 1013 | rlist = e['requires'] | ||
| 1014 | for r in rlist: | ||
| 1015 | if not env and not extra: | ||
| 1016 | reqts.add(r) | ||
| 1017 | else: | ||
| 1018 | marker = '' | ||
| 1019 | if extra: | ||
| 1020 | marker = 'extra == "%s"' % extra | ||
| 1021 | if env: | ||
| 1022 | if marker: | ||
| 1023 | marker = '(%s) and %s' % (env, marker) | ||
| 1024 | else: | ||
| 1025 | marker = env | ||
| 1026 | reqts.add(';'.join((r, marker))) | ||
| 1027 | return reqts | ||
| 1028 | |||
| 1029 | assert self._data and not self._legacy | ||
| 1030 | result = LegacyMetadata() | ||
| 1031 | nmd = self._data | ||
| 1032 | for nk, ok in self.LEGACY_MAPPING.items(): | ||
| 1033 | if nk in nmd: | ||
| 1034 | result[ok] = nmd[nk] | ||
| 1035 | r1 = process_entries(self.run_requires + self.meta_requires) | ||
| 1036 | r2 = process_entries(self.build_requires + self.dev_requires) | ||
| 1037 | if self.extras: | ||
| 1038 | result['Provides-Extra'] = sorted(self.extras) | ||
| 1039 | result['Requires-Dist'] = sorted(r1) | ||
| 1040 | result['Setup-Requires-Dist'] = sorted(r2) | ||
| 1041 | # TODO: other fields such as contacts | ||
| 1042 | return result | ||
| 1043 | |||
| 1044 | def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): | ||
| 1045 | if [path, fileobj].count(None) != 1: | ||
| 1046 | raise ValueError('Exactly one of path and fileobj is needed') | ||
| 1047 | self.validate() | ||
| 1048 | if legacy: | ||
| 1049 | if self._legacy: | ||
| 1050 | legacy_md = self._legacy | ||
| 1051 | else: | ||
| 1052 | legacy_md = self._to_legacy() | ||
| 1053 | if path: | ||
| 1054 | legacy_md.write(path, skip_unknown=skip_unknown) | ||
| 1055 | else: | ||
| 1056 | legacy_md.write_file(fileobj, skip_unknown=skip_unknown) | ||
| 1057 | else: | ||
| 1058 | if self._legacy: | ||
| 1059 | d = self._from_legacy() | ||
| 1060 | else: | ||
| 1061 | d = self._data | ||
| 1062 | if fileobj: | ||
| 1063 | json.dump(d, fileobj, ensure_ascii=True, indent=2, | ||
| 1064 | sort_keys=True) | ||
| 1065 | else: | ||
| 1066 | with codecs.open(path, 'w', 'utf-8') as f: | ||
| 1067 | json.dump(d, f, ensure_ascii=True, indent=2, | ||
| 1068 | sort_keys=True) | ||
| 1069 | |||
| 1070 | def add_requirements(self, requirements): | ||
| 1071 | if self._legacy: | ||
| 1072 | self._legacy.add_requirements(requirements) | ||
| 1073 | else: | ||
| 1074 | run_requires = self._data.setdefault('run_requires', []) | ||
| 1075 | always = None | ||
| 1076 | for entry in run_requires: | ||
| 1077 | if 'environment' not in entry and 'extra' not in entry: | ||
| 1078 | always = entry | ||
| 1079 | break | ||
| 1080 | if always is None: | ||
| 1081 | always = { 'requires': requirements } | ||
| 1082 | run_requires.insert(0, always) | ||
| 1083 | else: | ||
| 1084 | rset = set(always['requires']) | set(requirements) | ||
| 1085 | always['requires'] = sorted(rset) | ||
| 1086 | |||
| 1087 | def __repr__(self): | ||
| 1088 | name = self.name or '(no name)' | ||
| 1089 | version = self.version or 'no version' | ||
| 1090 | return '<%s %s %s (%s)>' % (self.__class__.__name__, | ||
| 1091 | self.metadata_version, name, version) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..cd618a6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py | |||
| @@ -0,0 +1,355 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | from __future__ import unicode_literals | ||
| 8 | |||
| 9 | import bisect | ||
| 10 | import io | ||
| 11 | import logging | ||
| 12 | import os | ||
| 13 | import pkgutil | ||
| 14 | import shutil | ||
| 15 | import sys | ||
| 16 | import types | ||
| 17 | import zipimport | ||
| 18 | |||
| 19 | from . import DistlibException | ||
| 20 | from .util import cached_property, get_cache_base, path_to_cache_dir, Cache | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | cache = None # created when needed | ||
| 26 | |||
| 27 | |||
| 28 | class ResourceCache(Cache): | ||
| 29 | def __init__(self, base=None): | ||
| 30 | if base is None: | ||
| 31 | # Use native string to avoid issues on 2.x: see Python #20140. | ||
| 32 | base = os.path.join(get_cache_base(), str('resource-cache')) | ||
| 33 | super(ResourceCache, self).__init__(base) | ||
| 34 | |||
| 35 | def is_stale(self, resource, path): | ||
| 36 | """ | ||
| 37 | Is the cache stale for the given resource? | ||
| 38 | |||
| 39 | :param resource: The :class:`Resource` being cached. | ||
| 40 | :param path: The path of the resource in the cache. | ||
| 41 | :return: True if the cache is stale. | ||
| 42 | """ | ||
| 43 | # Cache invalidation is a hard problem :-) | ||
| 44 | return True | ||
| 45 | |||
| 46 | def get(self, resource): | ||
| 47 | """ | ||
| 48 | Get a resource into the cache, | ||
| 49 | |||
| 50 | :param resource: A :class:`Resource` instance. | ||
| 51 | :return: The pathname of the resource in the cache. | ||
| 52 | """ | ||
| 53 | prefix, path = resource.finder.get_cache_info(resource) | ||
| 54 | if prefix is None: | ||
| 55 | result = path | ||
| 56 | else: | ||
| 57 | result = os.path.join(self.base, self.prefix_to_dir(prefix), path) | ||
| 58 | dirname = os.path.dirname(result) | ||
| 59 | if not os.path.isdir(dirname): | ||
| 60 | os.makedirs(dirname) | ||
| 61 | if not os.path.exists(result): | ||
| 62 | stale = True | ||
| 63 | else: | ||
| 64 | stale = self.is_stale(resource, path) | ||
| 65 | if stale: | ||
| 66 | # write the bytes of the resource to the cache location | ||
| 67 | with open(result, 'wb') as f: | ||
| 68 | f.write(resource.bytes) | ||
| 69 | return result | ||
| 70 | |||
| 71 | |||
| 72 | class ResourceBase(object): | ||
| 73 | def __init__(self, finder, name): | ||
| 74 | self.finder = finder | ||
| 75 | self.name = name | ||
| 76 | |||
| 77 | |||
| 78 | class Resource(ResourceBase): | ||
| 79 | """ | ||
| 80 | A class representing an in-package resource, such as a data file. This is | ||
| 81 | not normally instantiated by user code, but rather by a | ||
| 82 | :class:`ResourceFinder` which manages the resource. | ||
| 83 | """ | ||
| 84 | is_container = False # Backwards compatibility | ||
| 85 | |||
| 86 | def as_stream(self): | ||
| 87 | """ | ||
| 88 | Get the resource as a stream. | ||
| 89 | |||
| 90 | This is not a property to make it obvious that it returns a new stream | ||
| 91 | each time. | ||
| 92 | """ | ||
| 93 | return self.finder.get_stream(self) | ||
| 94 | |||
| 95 | @cached_property | ||
| 96 | def file_path(self): | ||
| 97 | global cache | ||
| 98 | if cache is None: | ||
| 99 | cache = ResourceCache() | ||
| 100 | return cache.get(self) | ||
| 101 | |||
| 102 | @cached_property | ||
| 103 | def bytes(self): | ||
| 104 | return self.finder.get_bytes(self) | ||
| 105 | |||
| 106 | @cached_property | ||
| 107 | def size(self): | ||
| 108 | return self.finder.get_size(self) | ||
| 109 | |||
| 110 | |||
| 111 | class ResourceContainer(ResourceBase): | ||
| 112 | is_container = True # Backwards compatibility | ||
| 113 | |||
| 114 | @cached_property | ||
| 115 | def resources(self): | ||
| 116 | return self.finder.get_resources(self) | ||
| 117 | |||
| 118 | |||
| 119 | class ResourceFinder(object): | ||
| 120 | """ | ||
| 121 | Resource finder for file system resources. | ||
| 122 | """ | ||
| 123 | |||
| 124 | if sys.platform.startswith('java'): | ||
| 125 | skipped_extensions = ('.pyc', '.pyo', '.class') | ||
| 126 | else: | ||
| 127 | skipped_extensions = ('.pyc', '.pyo') | ||
| 128 | |||
| 129 | def __init__(self, module): | ||
| 130 | self.module = module | ||
| 131 | self.loader = getattr(module, '__loader__', None) | ||
| 132 | self.base = os.path.dirname(getattr(module, '__file__', '')) | ||
| 133 | |||
| 134 | def _adjust_path(self, path): | ||
| 135 | return os.path.realpath(path) | ||
| 136 | |||
| 137 | def _make_path(self, resource_name): | ||
| 138 | # Issue #50: need to preserve type of path on Python 2.x | ||
| 139 | # like os.path._get_sep | ||
| 140 | if isinstance(resource_name, bytes): # should only happen on 2.x | ||
| 141 | sep = b'/' | ||
| 142 | else: | ||
| 143 | sep = '/' | ||
| 144 | parts = resource_name.split(sep) | ||
| 145 | parts.insert(0, self.base) | ||
| 146 | result = os.path.join(*parts) | ||
| 147 | return self._adjust_path(result) | ||
| 148 | |||
| 149 | def _find(self, path): | ||
| 150 | return os.path.exists(path) | ||
| 151 | |||
| 152 | def get_cache_info(self, resource): | ||
| 153 | return None, resource.path | ||
| 154 | |||
| 155 | def find(self, resource_name): | ||
| 156 | path = self._make_path(resource_name) | ||
| 157 | if not self._find(path): | ||
| 158 | result = None | ||
| 159 | else: | ||
| 160 | if self._is_directory(path): | ||
| 161 | result = ResourceContainer(self, resource_name) | ||
| 162 | else: | ||
| 163 | result = Resource(self, resource_name) | ||
| 164 | result.path = path | ||
| 165 | return result | ||
| 166 | |||
| 167 | def get_stream(self, resource): | ||
| 168 | return open(resource.path, 'rb') | ||
| 169 | |||
| 170 | def get_bytes(self, resource): | ||
| 171 | with open(resource.path, 'rb') as f: | ||
| 172 | return f.read() | ||
| 173 | |||
| 174 | def get_size(self, resource): | ||
| 175 | return os.path.getsize(resource.path) | ||
| 176 | |||
| 177 | def get_resources(self, resource): | ||
| 178 | def allowed(f): | ||
| 179 | return (f != '__pycache__' and not | ||
| 180 | f.endswith(self.skipped_extensions)) | ||
| 181 | return set([f for f in os.listdir(resource.path) if allowed(f)]) | ||
| 182 | |||
| 183 | def is_container(self, resource): | ||
| 184 | return self._is_directory(resource.path) | ||
| 185 | |||
| 186 | _is_directory = staticmethod(os.path.isdir) | ||
| 187 | |||
| 188 | def iterator(self, resource_name): | ||
| 189 | resource = self.find(resource_name) | ||
| 190 | if resource is not None: | ||
| 191 | todo = [resource] | ||
| 192 | while todo: | ||
| 193 | resource = todo.pop(0) | ||
| 194 | yield resource | ||
| 195 | if resource.is_container: | ||
| 196 | rname = resource.name | ||
| 197 | for name in resource.resources: | ||
| 198 | if not rname: | ||
| 199 | new_name = name | ||
| 200 | else: | ||
| 201 | new_name = '/'.join([rname, name]) | ||
| 202 | child = self.find(new_name) | ||
| 203 | if child.is_container: | ||
| 204 | todo.append(child) | ||
| 205 | else: | ||
| 206 | yield child | ||
| 207 | |||
| 208 | |||
| 209 | class ZipResourceFinder(ResourceFinder): | ||
| 210 | """ | ||
| 211 | Resource finder for resources in .zip files. | ||
| 212 | """ | ||
| 213 | def __init__(self, module): | ||
| 214 | super(ZipResourceFinder, self).__init__(module) | ||
| 215 | archive = self.loader.archive | ||
| 216 | self.prefix_len = 1 + len(archive) | ||
| 217 | # PyPy doesn't have a _files attr on zipimporter, and you can't set one | ||
| 218 | if hasattr(self.loader, '_files'): | ||
| 219 | self._files = self.loader._files | ||
| 220 | else: | ||
| 221 | self._files = zipimport._zip_directory_cache[archive] | ||
| 222 | self.index = sorted(self._files) | ||
| 223 | |||
| 224 | def _adjust_path(self, path): | ||
| 225 | return path | ||
| 226 | |||
| 227 | def _find(self, path): | ||
| 228 | path = path[self.prefix_len:] | ||
| 229 | if path in self._files: | ||
| 230 | result = True | ||
| 231 | else: | ||
| 232 | if path and path[-1] != os.sep: | ||
| 233 | path = path + os.sep | ||
| 234 | i = bisect.bisect(self.index, path) | ||
| 235 | try: | ||
| 236 | result = self.index[i].startswith(path) | ||
| 237 | except IndexError: | ||
| 238 | result = False | ||
| 239 | if not result: | ||
| 240 | logger.debug('_find failed: %r %r', path, self.loader.prefix) | ||
| 241 | else: | ||
| 242 | logger.debug('_find worked: %r %r', path, self.loader.prefix) | ||
| 243 | return result | ||
| 244 | |||
| 245 | def get_cache_info(self, resource): | ||
| 246 | prefix = self.loader.archive | ||
| 247 | path = resource.path[1 + len(prefix):] | ||
| 248 | return prefix, path | ||
| 249 | |||
| 250 | def get_bytes(self, resource): | ||
| 251 | return self.loader.get_data(resource.path) | ||
| 252 | |||
| 253 | def get_stream(self, resource): | ||
| 254 | return io.BytesIO(self.get_bytes(resource)) | ||
| 255 | |||
| 256 | def get_size(self, resource): | ||
| 257 | path = resource.path[self.prefix_len:] | ||
| 258 | return self._files[path][3] | ||
| 259 | |||
| 260 | def get_resources(self, resource): | ||
| 261 | path = resource.path[self.prefix_len:] | ||
| 262 | if path and path[-1] != os.sep: | ||
| 263 | path += os.sep | ||
| 264 | plen = len(path) | ||
| 265 | result = set() | ||
| 266 | i = bisect.bisect(self.index, path) | ||
| 267 | while i < len(self.index): | ||
| 268 | if not self.index[i].startswith(path): | ||
| 269 | break | ||
| 270 | s = self.index[i][plen:] | ||
| 271 | result.add(s.split(os.sep, 1)[0]) # only immediate children | ||
| 272 | i += 1 | ||
| 273 | return result | ||
| 274 | |||
| 275 | def _is_directory(self, path): | ||
| 276 | path = path[self.prefix_len:] | ||
| 277 | if path and path[-1] != os.sep: | ||
| 278 | path += os.sep | ||
| 279 | i = bisect.bisect(self.index, path) | ||
| 280 | try: | ||
| 281 | result = self.index[i].startswith(path) | ||
| 282 | except IndexError: | ||
| 283 | result = False | ||
| 284 | return result | ||
| 285 | |||
| 286 | _finder_registry = { | ||
| 287 | type(None): ResourceFinder, | ||
| 288 | zipimport.zipimporter: ZipResourceFinder | ||
| 289 | } | ||
| 290 | |||
| 291 | try: | ||
| 292 | # In Python 3.6, _frozen_importlib -> _frozen_importlib_external | ||
| 293 | try: | ||
| 294 | import _frozen_importlib_external as _fi | ||
| 295 | except ImportError: | ||
| 296 | import _frozen_importlib as _fi | ||
| 297 | _finder_registry[_fi.SourceFileLoader] = ResourceFinder | ||
| 298 | _finder_registry[_fi.FileFinder] = ResourceFinder | ||
| 299 | del _fi | ||
| 300 | except (ImportError, AttributeError): | ||
| 301 | pass | ||
| 302 | |||
| 303 | |||
| 304 | def register_finder(loader, finder_maker): | ||
| 305 | _finder_registry[type(loader)] = finder_maker | ||
| 306 | |||
| 307 | _finder_cache = {} | ||
| 308 | |||
| 309 | |||
| 310 | def finder(package): | ||
| 311 | """ | ||
| 312 | Return a resource finder for a package. | ||
| 313 | :param package: The name of the package. | ||
| 314 | :return: A :class:`ResourceFinder` instance for the package. | ||
| 315 | """ | ||
| 316 | if package in _finder_cache: | ||
| 317 | result = _finder_cache[package] | ||
| 318 | else: | ||
| 319 | if package not in sys.modules: | ||
| 320 | __import__(package) | ||
| 321 | module = sys.modules[package] | ||
| 322 | path = getattr(module, '__path__', None) | ||
| 323 | if path is None: | ||
| 324 | raise DistlibException('You cannot get a finder for a module, ' | ||
| 325 | 'only for a package') | ||
| 326 | loader = getattr(module, '__loader__', None) | ||
| 327 | finder_maker = _finder_registry.get(type(loader)) | ||
| 328 | if finder_maker is None: | ||
| 329 | raise DistlibException('Unable to locate finder for %r' % package) | ||
| 330 | result = finder_maker(module) | ||
| 331 | _finder_cache[package] = result | ||
| 332 | return result | ||
| 333 | |||
| 334 | |||
| 335 | _dummy_module = types.ModuleType(str('__dummy__')) | ||
| 336 | |||
| 337 | |||
| 338 | def finder_for_path(path): | ||
| 339 | """ | ||
| 340 | Return a resource finder for a path, which should represent a container. | ||
| 341 | |||
| 342 | :param path: The path. | ||
| 343 | :return: A :class:`ResourceFinder` instance for the path. | ||
| 344 | """ | ||
| 345 | result = None | ||
| 346 | # calls any path hooks, gets importer into cache | ||
| 347 | pkgutil.get_importer(path) | ||
| 348 | loader = sys.path_importer_cache.get(path) | ||
| 349 | finder = _finder_registry.get(type(loader)) | ||
| 350 | if finder: | ||
| 351 | module = _dummy_module | ||
| 352 | module.__file__ = os.path.join(path, '') | ||
| 353 | module.__loader__ = loader | ||
| 354 | result = finder(module) | ||
| 355 | return result | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..440bd30 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py | |||
| @@ -0,0 +1,415 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2013-2015 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | from io import BytesIO | ||
| 8 | import logging | ||
| 9 | import os | ||
| 10 | import re | ||
| 11 | import struct | ||
| 12 | import sys | ||
| 13 | |||
| 14 | from .compat import sysconfig, detect_encoding, ZipFile | ||
| 15 | from .resources import finder | ||
| 16 | from .util import (FileOperator, get_export_entry, convert_path, | ||
| 17 | get_executable, in_venv) | ||
| 18 | |||
| 19 | logger = logging.getLogger(__name__) | ||
| 20 | |||
| 21 | _DEFAULT_MANIFEST = ''' | ||
| 22 | <?xml version="1.0" encoding="UTF-8" standalone="yes"?> | ||
| 23 | <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> | ||
| 24 | <assemblyIdentity version="1.0.0.0" | ||
| 25 | processorArchitecture="X86" | ||
| 26 | name="%s" | ||
| 27 | type="win32"/> | ||
| 28 | |||
| 29 | <!-- Identify the application security requirements. --> | ||
| 30 | <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> | ||
| 31 | <security> | ||
| 32 | <requestedPrivileges> | ||
| 33 | <requestedExecutionLevel level="asInvoker" uiAccess="false"/> | ||
| 34 | </requestedPrivileges> | ||
| 35 | </security> | ||
| 36 | </trustInfo> | ||
| 37 | </assembly>'''.strip() | ||
| 38 | |||
| 39 | # check if Python is called on the first line with this expression | ||
| 40 | FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') | ||
| 41 | SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- | ||
| 42 | if __name__ == '__main__': | ||
| 43 | import sys, re | ||
| 44 | |||
| 45 | def _resolve(module, func): | ||
| 46 | __import__(module) | ||
| 47 | mod = sys.modules[module] | ||
| 48 | parts = func.split('.') | ||
| 49 | result = getattr(mod, parts.pop(0)) | ||
| 50 | for p in parts: | ||
| 51 | result = getattr(result, p) | ||
| 52 | return result | ||
| 53 | |||
| 54 | try: | ||
| 55 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | ||
| 56 | |||
| 57 | func = _resolve('%(module)s', '%(func)s') | ||
| 58 | rc = func() # None interpreted as 0 | ||
| 59 | except Exception as e: # only supporting Python >= 2.6 | ||
| 60 | sys.stderr.write('%%s\n' %% e) | ||
| 61 | rc = 1 | ||
| 62 | sys.exit(rc) | ||
| 63 | ''' | ||
| 64 | |||
| 65 | |||
| 66 | def _enquote_executable(executable): | ||
| 67 | if ' ' in executable: | ||
| 68 | # make sure we quote only the executable in case of env | ||
| 69 | # for example /usr/bin/env "/dir with spaces/bin/jython" | ||
| 70 | # instead of "/usr/bin/env /dir with spaces/bin/jython" | ||
| 71 | # otherwise whole | ||
| 72 | if executable.startswith('/usr/bin/env '): | ||
| 73 | env, _executable = executable.split(' ', 1) | ||
| 74 | if ' ' in _executable and not _executable.startswith('"'): | ||
| 75 | executable = '%s "%s"' % (env, _executable) | ||
| 76 | else: | ||
| 77 | if not executable.startswith('"'): | ||
| 78 | executable = '"%s"' % executable | ||
| 79 | return executable | ||
| 80 | |||
| 81 | |||
| 82 | class ScriptMaker(object): | ||
| 83 | """ | ||
| 84 | A class to copy or create scripts from source scripts or callable | ||
| 85 | specifications. | ||
| 86 | """ | ||
| 87 | script_template = SCRIPT_TEMPLATE | ||
| 88 | |||
| 89 | executable = None # for shebangs | ||
| 90 | |||
| 91 | def __init__(self, source_dir, target_dir, add_launchers=True, | ||
| 92 | dry_run=False, fileop=None): | ||
| 93 | self.source_dir = source_dir | ||
| 94 | self.target_dir = target_dir | ||
| 95 | self.add_launchers = add_launchers | ||
| 96 | self.force = False | ||
| 97 | self.clobber = False | ||
| 98 | # It only makes sense to set mode bits on POSIX. | ||
| 99 | self.set_mode = (os.name == 'posix') or (os.name == 'java' and | ||
| 100 | os._name == 'posix') | ||
| 101 | self.variants = set(('', 'X.Y')) | ||
| 102 | self._fileop = fileop or FileOperator(dry_run) | ||
| 103 | |||
| 104 | self._is_nt = os.name == 'nt' or ( | ||
| 105 | os.name == 'java' and os._name == 'nt') | ||
| 106 | |||
| 107 | def _get_alternate_executable(self, executable, options): | ||
| 108 | if options.get('gui', False) and self._is_nt: # pragma: no cover | ||
| 109 | dn, fn = os.path.split(executable) | ||
| 110 | fn = fn.replace('python', 'pythonw') | ||
| 111 | executable = os.path.join(dn, fn) | ||
| 112 | return executable | ||
| 113 | |||
| 114 | if sys.platform.startswith('java'): # pragma: no cover | ||
| 115 | def _is_shell(self, executable): | ||
| 116 | """ | ||
| 117 | Determine if the specified executable is a script | ||
| 118 | (contains a #! line) | ||
| 119 | """ | ||
| 120 | try: | ||
| 121 | with open(executable) as fp: | ||
| 122 | return fp.read(2) == '#!' | ||
| 123 | except (OSError, IOError): | ||
| 124 | logger.warning('Failed to open %s', executable) | ||
| 125 | return False | ||
| 126 | |||
| 127 | def _fix_jython_executable(self, executable): | ||
| 128 | if self._is_shell(executable): | ||
| 129 | # Workaround for Jython is not needed on Linux systems. | ||
| 130 | import java | ||
| 131 | |||
| 132 | if java.lang.System.getProperty('os.name') == 'Linux': | ||
| 133 | return executable | ||
| 134 | elif executable.lower().endswith('jython.exe'): | ||
| 135 | # Use wrapper exe for Jython on Windows | ||
| 136 | return executable | ||
| 137 | return '/usr/bin/env %s' % executable | ||
| 138 | |||
| 139 | def _build_shebang(self, executable, post_interp): | ||
| 140 | """ | ||
| 141 | Build a shebang line. In the simple case (on Windows, or a shebang line | ||
| 142 | which is not too long or contains spaces) use a simple formulation for | ||
| 143 | the shebang. Otherwise, use /bin/sh as the executable, with a contrived | ||
| 144 | shebang which allows the script to run either under Python or sh, using | ||
| 145 | suitable quoting. Thanks to Harald Nordgren for his input. | ||
| 146 | |||
| 147 | See also: http://www.in-ulm.de/~mascheck/various/shebang/#length | ||
| 148 | https://hg.mozilla.org/mozilla-central/file/tip/mach | ||
| 149 | """ | ||
| 150 | if os.name != 'posix': | ||
| 151 | simple_shebang = True | ||
| 152 | else: | ||
| 153 | # Add 3 for '#!' prefix and newline suffix. | ||
| 154 | shebang_length = len(executable) + len(post_interp) + 3 | ||
| 155 | if sys.platform == 'darwin': | ||
| 156 | max_shebang_length = 512 | ||
| 157 | else: | ||
| 158 | max_shebang_length = 127 | ||
| 159 | simple_shebang = ((b' ' not in executable) and | ||
| 160 | (shebang_length <= max_shebang_length)) | ||
| 161 | |||
| 162 | if simple_shebang: | ||
| 163 | result = b'#!' + executable + post_interp + b'\n' | ||
| 164 | else: | ||
| 165 | result = b'#!/bin/sh\n' | ||
| 166 | result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' | ||
| 167 | result += b"' '''" | ||
| 168 | return result | ||
| 169 | |||
| 170 | def _get_shebang(self, encoding, post_interp=b'', options=None): | ||
| 171 | enquote = True | ||
| 172 | if self.executable: | ||
| 173 | executable = self.executable | ||
| 174 | enquote = False # assume this will be taken care of | ||
| 175 | elif not sysconfig.is_python_build(): | ||
| 176 | executable = get_executable() | ||
| 177 | elif in_venv(): # pragma: no cover | ||
| 178 | executable = os.path.join(sysconfig.get_path('scripts'), | ||
| 179 | 'python%s' % sysconfig.get_config_var('EXE')) | ||
| 180 | else: # pragma: no cover | ||
| 181 | executable = os.path.join( | ||
| 182 | sysconfig.get_config_var('BINDIR'), | ||
| 183 | 'python%s%s' % (sysconfig.get_config_var('VERSION'), | ||
| 184 | sysconfig.get_config_var('EXE'))) | ||
| 185 | if options: | ||
| 186 | executable = self._get_alternate_executable(executable, options) | ||
| 187 | |||
| 188 | if sys.platform.startswith('java'): # pragma: no cover | ||
| 189 | executable = self._fix_jython_executable(executable) | ||
| 190 | # Normalise case for Windows | ||
| 191 | executable = os.path.normcase(executable) | ||
| 192 | # If the user didn't specify an executable, it may be necessary to | ||
| 193 | # cater for executable paths with spaces (not uncommon on Windows) | ||
| 194 | if enquote: | ||
| 195 | executable = _enquote_executable(executable) | ||
| 196 | # Issue #51: don't use fsencode, since we later try to | ||
| 197 | # check that the shebang is decodable using utf-8. | ||
| 198 | executable = executable.encode('utf-8') | ||
| 199 | # in case of IronPython, play safe and enable frames support | ||
| 200 | if (sys.platform == 'cli' and '-X:Frames' not in post_interp | ||
| 201 | and '-X:FullFrames' not in post_interp): # pragma: no cover | ||
| 202 | post_interp += b' -X:Frames' | ||
| 203 | shebang = self._build_shebang(executable, post_interp) | ||
| 204 | # Python parser starts to read a script using UTF-8 until | ||
| 205 | # it gets a #coding:xxx cookie. The shebang has to be the | ||
| 206 | # first line of a file, the #coding:xxx cookie cannot be | ||
| 207 | # written before. So the shebang has to be decodable from | ||
| 208 | # UTF-8. | ||
| 209 | try: | ||
| 210 | shebang.decode('utf-8') | ||
| 211 | except UnicodeDecodeError: # pragma: no cover | ||
| 212 | raise ValueError( | ||
| 213 | 'The shebang (%r) is not decodable from utf-8' % shebang) | ||
| 214 | # If the script is encoded to a custom encoding (use a | ||
| 215 | # #coding:xxx cookie), the shebang has to be decodable from | ||
| 216 | # the script encoding too. | ||
| 217 | if encoding != 'utf-8': | ||
| 218 | try: | ||
| 219 | shebang.decode(encoding) | ||
| 220 | except UnicodeDecodeError: # pragma: no cover | ||
| 221 | raise ValueError( | ||
| 222 | 'The shebang (%r) is not decodable ' | ||
| 223 | 'from the script encoding (%r)' % (shebang, encoding)) | ||
| 224 | return shebang | ||
| 225 | |||
| 226 | def _get_script_text(self, entry): | ||
| 227 | return self.script_template % dict(module=entry.prefix, | ||
| 228 | func=entry.suffix) | ||
| 229 | |||
| 230 | manifest = _DEFAULT_MANIFEST | ||
| 231 | |||
| 232 | def get_manifest(self, exename): | ||
| 233 | base = os.path.basename(exename) | ||
| 234 | return self.manifest % base | ||
| 235 | |||
| 236 | def _write_script(self, names, shebang, script_bytes, filenames, ext): | ||
| 237 | use_launcher = self.add_launchers and self._is_nt | ||
| 238 | linesep = os.linesep.encode('utf-8') | ||
| 239 | if not use_launcher: | ||
| 240 | script_bytes = shebang + linesep + script_bytes | ||
| 241 | else: # pragma: no cover | ||
| 242 | if ext == 'py': | ||
| 243 | launcher = self._get_launcher('t') | ||
| 244 | else: | ||
| 245 | launcher = self._get_launcher('w') | ||
| 246 | stream = BytesIO() | ||
| 247 | with ZipFile(stream, 'w') as zf: | ||
| 248 | zf.writestr('__main__.py', script_bytes) | ||
| 249 | zip_data = stream.getvalue() | ||
| 250 | script_bytes = launcher + shebang + linesep + zip_data | ||
| 251 | for name in names: | ||
| 252 | outname = os.path.join(self.target_dir, name) | ||
| 253 | if use_launcher: # pragma: no cover | ||
| 254 | n, e = os.path.splitext(outname) | ||
| 255 | if e.startswith('.py'): | ||
| 256 | outname = n | ||
| 257 | outname = '%s.exe' % outname | ||
| 258 | try: | ||
| 259 | self._fileop.write_binary_file(outname, script_bytes) | ||
| 260 | except Exception: | ||
| 261 | # Failed writing an executable - it might be in use. | ||
| 262 | logger.warning('Failed to write executable - trying to ' | ||
| 263 | 'use .deleteme logic') | ||
| 264 | dfname = '%s.deleteme' % outname | ||
| 265 | if os.path.exists(dfname): | ||
| 266 | os.remove(dfname) # Not allowed to fail here | ||
| 267 | os.rename(outname, dfname) # nor here | ||
| 268 | self._fileop.write_binary_file(outname, script_bytes) | ||
| 269 | logger.debug('Able to replace executable using ' | ||
| 270 | '.deleteme logic') | ||
| 271 | try: | ||
| 272 | os.remove(dfname) | ||
| 273 | except Exception: | ||
| 274 | pass # still in use - ignore error | ||
| 275 | else: | ||
| 276 | if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover | ||
| 277 | outname = '%s.%s' % (outname, ext) | ||
| 278 | if os.path.exists(outname) and not self.clobber: | ||
| 279 | logger.warning('Skipping existing file %s', outname) | ||
| 280 | continue | ||
| 281 | self._fileop.write_binary_file(outname, script_bytes) | ||
| 282 | if self.set_mode: | ||
| 283 | self._fileop.set_executable_mode([outname]) | ||
| 284 | filenames.append(outname) | ||
| 285 | |||
| 286 | def _make_script(self, entry, filenames, options=None): | ||
| 287 | post_interp = b'' | ||
| 288 | if options: | ||
| 289 | args = options.get('interpreter_args', []) | ||
| 290 | if args: | ||
| 291 | args = ' %s' % ' '.join(args) | ||
| 292 | post_interp = args.encode('utf-8') | ||
| 293 | shebang = self._get_shebang('utf-8', post_interp, options=options) | ||
| 294 | script = self._get_script_text(entry).encode('utf-8') | ||
| 295 | name = entry.name | ||
| 296 | scriptnames = set() | ||
| 297 | if '' in self.variants: | ||
| 298 | scriptnames.add(name) | ||
| 299 | if 'X' in self.variants: | ||
| 300 | scriptnames.add('%s%s' % (name, sys.version[0])) | ||
| 301 | if 'X.Y' in self.variants: | ||
| 302 | scriptnames.add('%s-%s' % (name, sys.version[:3])) | ||
| 303 | if options and options.get('gui', False): | ||
| 304 | ext = 'pyw' | ||
| 305 | else: | ||
| 306 | ext = 'py' | ||
| 307 | self._write_script(scriptnames, shebang, script, filenames, ext) | ||
| 308 | |||
| 309 | def _copy_script(self, script, filenames): | ||
| 310 | adjust = False | ||
| 311 | script = os.path.join(self.source_dir, convert_path(script)) | ||
| 312 | outname = os.path.join(self.target_dir, os.path.basename(script)) | ||
| 313 | if not self.force and not self._fileop.newer(script, outname): | ||
| 314 | logger.debug('not copying %s (up-to-date)', script) | ||
| 315 | return | ||
| 316 | |||
| 317 | # Always open the file, but ignore failures in dry-run mode -- | ||
| 318 | # that way, we'll get accurate feedback if we can read the | ||
| 319 | # script. | ||
| 320 | try: | ||
| 321 | f = open(script, 'rb') | ||
| 322 | except IOError: # pragma: no cover | ||
| 323 | if not self.dry_run: | ||
| 324 | raise | ||
| 325 | f = None | ||
| 326 | else: | ||
| 327 | first_line = f.readline() | ||
| 328 | if not first_line: # pragma: no cover | ||
| 329 | logger.warning('%s: %s is an empty file (skipping)', | ||
| 330 | self.get_command_name(), script) | ||
| 331 | return | ||
| 332 | |||
| 333 | match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) | ||
| 334 | if match: | ||
| 335 | adjust = True | ||
| 336 | post_interp = match.group(1) or b'' | ||
| 337 | |||
| 338 | if not adjust: | ||
| 339 | if f: | ||
| 340 | f.close() | ||
| 341 | self._fileop.copy_file(script, outname) | ||
| 342 | if self.set_mode: | ||
| 343 | self._fileop.set_executable_mode([outname]) | ||
| 344 | filenames.append(outname) | ||
| 345 | else: | ||
| 346 | logger.info('copying and adjusting %s -> %s', script, | ||
| 347 | self.target_dir) | ||
| 348 | if not self._fileop.dry_run: | ||
| 349 | encoding, lines = detect_encoding(f.readline) | ||
| 350 | f.seek(0) | ||
| 351 | shebang = self._get_shebang(encoding, post_interp) | ||
| 352 | if b'pythonw' in first_line: # pragma: no cover | ||
| 353 | ext = 'pyw' | ||
| 354 | else: | ||
| 355 | ext = 'py' | ||
| 356 | n = os.path.basename(outname) | ||
| 357 | self._write_script([n], shebang, f.read(), filenames, ext) | ||
| 358 | if f: | ||
| 359 | f.close() | ||
| 360 | |||
| 361 | @property | ||
| 362 | def dry_run(self): | ||
| 363 | return self._fileop.dry_run | ||
| 364 | |||
| 365 | @dry_run.setter | ||
| 366 | def dry_run(self, value): | ||
| 367 | self._fileop.dry_run = value | ||
| 368 | |||
| 369 | if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover | ||
| 370 | # Executable launcher support. | ||
| 371 | # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ | ||
| 372 | |||
| 373 | def _get_launcher(self, kind): | ||
| 374 | if struct.calcsize('P') == 8: # 64-bit | ||
| 375 | bits = '64' | ||
| 376 | else: | ||
| 377 | bits = '32' | ||
| 378 | name = '%s%s.exe' % (kind, bits) | ||
| 379 | # Issue 31: don't hardcode an absolute package name, but | ||
| 380 | # determine it relative to the current package | ||
| 381 | distlib_package = __name__.rsplit('.', 1)[0] | ||
| 382 | result = finder(distlib_package).find(name).bytes | ||
| 383 | return result | ||
| 384 | |||
| 385 | # Public API follows | ||
| 386 | |||
| 387 | def make(self, specification, options=None): | ||
| 388 | """ | ||
| 389 | Make a script. | ||
| 390 | |||
| 391 | :param specification: The specification, which is either a valid export | ||
| 392 | entry specification (to make a script from a | ||
| 393 | callable) or a filename (to make a script by | ||
| 394 | copying from a source location). | ||
| 395 | :param options: A dictionary of options controlling script generation. | ||
| 396 | :return: A list of all absolute pathnames written to. | ||
| 397 | """ | ||
| 398 | filenames = [] | ||
| 399 | entry = get_export_entry(specification) | ||
| 400 | if entry is None: | ||
| 401 | self._copy_script(specification, filenames) | ||
| 402 | else: | ||
| 403 | self._make_script(entry, filenames, options=options) | ||
| 404 | return filenames | ||
| 405 | |||
| 406 | def make_multiple(self, specifications, options=None): | ||
| 407 | """ | ||
| 408 | Take a list of specifications and make scripts from them, | ||
| 409 | :param specifications: A list of specifications. | ||
| 410 | :return: A list of all absolute pathnames written to, | ||
| 411 | """ | ||
| 412 | filenames = [] | ||
| 413 | for specification in specifications: | ||
| 414 | filenames.extend(self.make(specification, options)) | ||
| 415 | return filenames | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe new file mode 100755 index 0000000..a09d926 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe | |||
| Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe new file mode 100755 index 0000000..9da9b40 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe | |||
| Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..b1d3f90 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py | |||
| @@ -0,0 +1,1755 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
| 3 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 4 | # | ||
| 5 | import codecs | ||
| 6 | from collections import deque | ||
| 7 | import contextlib | ||
| 8 | import csv | ||
| 9 | from glob import iglob as std_iglob | ||
| 10 | import io | ||
| 11 | import json | ||
| 12 | import logging | ||
| 13 | import os | ||
| 14 | import py_compile | ||
| 15 | import re | ||
| 16 | import socket | ||
| 17 | try: | ||
| 18 | import ssl | ||
| 19 | except ImportError: # pragma: no cover | ||
| 20 | ssl = None | ||
| 21 | import subprocess | ||
| 22 | import sys | ||
| 23 | import tarfile | ||
| 24 | import tempfile | ||
| 25 | import textwrap | ||
| 26 | |||
| 27 | try: | ||
| 28 | import threading | ||
| 29 | except ImportError: # pragma: no cover | ||
| 30 | import dummy_threading as threading | ||
| 31 | import time | ||
| 32 | |||
| 33 | from . import DistlibException | ||
| 34 | from .compat import (string_types, text_type, shutil, raw_input, StringIO, | ||
| 35 | cache_from_source, urlopen, urljoin, httplib, xmlrpclib, | ||
| 36 | splittype, HTTPHandler, BaseConfigurator, valid_ident, | ||
| 37 | Container, configparser, URLError, ZipFile, fsdecode, | ||
| 38 | unquote, urlparse) | ||
| 39 | |||
| 40 | logger = logging.getLogger(__name__) | ||
| 41 | |||
| 42 | # | ||
| 43 | # Requirement parsing code as per PEP 508 | ||
| 44 | # | ||
| 45 | |||
| 46 | IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') | ||
| 47 | VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') | ||
| 48 | COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') | ||
| 49 | MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') | ||
| 50 | OR = re.compile(r'^or\b\s*') | ||
| 51 | AND = re.compile(r'^and\b\s*') | ||
| 52 | NON_SPACE = re.compile(r'(\S+)\s*') | ||
| 53 | STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') | ||
| 54 | |||
| 55 | |||
| 56 | def parse_marker(marker_string): | ||
| 57 | """ | ||
| 58 | Parse a marker string and return a dictionary containing a marker expression. | ||
| 59 | |||
| 60 | The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in | ||
| 61 | the expression grammar, or strings. A string contained in quotes is to be | ||
| 62 | interpreted as a literal string, and a string not contained in quotes is a | ||
| 63 | variable (such as os_name). | ||
| 64 | """ | ||
| 65 | def marker_var(remaining): | ||
| 66 | # either identifier, or literal string | ||
| 67 | m = IDENTIFIER.match(remaining) | ||
| 68 | if m: | ||
| 69 | result = m.groups()[0] | ||
| 70 | remaining = remaining[m.end():] | ||
| 71 | elif not remaining: | ||
| 72 | raise SyntaxError('unexpected end of input') | ||
| 73 | else: | ||
| 74 | q = remaining[0] | ||
| 75 | if q not in '\'"': | ||
| 76 | raise SyntaxError('invalid expression: %s' % remaining) | ||
| 77 | oq = '\'"'.replace(q, '') | ||
| 78 | remaining = remaining[1:] | ||
| 79 | parts = [q] | ||
| 80 | while remaining: | ||
| 81 | # either a string chunk, or oq, or q to terminate | ||
| 82 | if remaining[0] == q: | ||
| 83 | break | ||
| 84 | elif remaining[0] == oq: | ||
| 85 | parts.append(oq) | ||
| 86 | remaining = remaining[1:] | ||
| 87 | else: | ||
| 88 | m = STRING_CHUNK.match(remaining) | ||
| 89 | if not m: | ||
| 90 | raise SyntaxError('error in string literal: %s' % remaining) | ||
| 91 | parts.append(m.groups()[0]) | ||
| 92 | remaining = remaining[m.end():] | ||
| 93 | else: | ||
| 94 | s = ''.join(parts) | ||
| 95 | raise SyntaxError('unterminated string: %s' % s) | ||
| 96 | parts.append(q) | ||
| 97 | result = ''.join(parts) | ||
| 98 | remaining = remaining[1:].lstrip() # skip past closing quote | ||
| 99 | return result, remaining | ||
| 100 | |||
| 101 | def marker_expr(remaining): | ||
| 102 | if remaining and remaining[0] == '(': | ||
| 103 | result, remaining = marker(remaining[1:].lstrip()) | ||
| 104 | if remaining[0] != ')': | ||
| 105 | raise SyntaxError('unterminated parenthesis: %s' % remaining) | ||
| 106 | remaining = remaining[1:].lstrip() | ||
| 107 | else: | ||
| 108 | lhs, remaining = marker_var(remaining) | ||
| 109 | while remaining: | ||
| 110 | m = MARKER_OP.match(remaining) | ||
| 111 | if not m: | ||
| 112 | break | ||
| 113 | op = m.groups()[0] | ||
| 114 | remaining = remaining[m.end():] | ||
| 115 | rhs, remaining = marker_var(remaining) | ||
| 116 | lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} | ||
| 117 | result = lhs | ||
| 118 | return result, remaining | ||
| 119 | |||
| 120 | def marker_and(remaining): | ||
| 121 | lhs, remaining = marker_expr(remaining) | ||
| 122 | while remaining: | ||
| 123 | m = AND.match(remaining) | ||
| 124 | if not m: | ||
| 125 | break | ||
| 126 | remaining = remaining[m.end():] | ||
| 127 | rhs, remaining = marker_expr(remaining) | ||
| 128 | lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} | ||
| 129 | return lhs, remaining | ||
| 130 | |||
| 131 | def marker(remaining): | ||
| 132 | lhs, remaining = marker_and(remaining) | ||
| 133 | while remaining: | ||
| 134 | m = OR.match(remaining) | ||
| 135 | if not m: | ||
| 136 | break | ||
| 137 | remaining = remaining[m.end():] | ||
| 138 | rhs, remaining = marker_and(remaining) | ||
| 139 | lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} | ||
| 140 | return lhs, remaining | ||
| 141 | |||
| 142 | return marker(marker_string) | ||
| 143 | |||
| 144 | |||
| 145 | def parse_requirement(req): | ||
| 146 | """ | ||
| 147 | Parse a requirement passed in as a string. Return a Container | ||
| 148 | whose attributes contain the various parts of the requirement. | ||
| 149 | """ | ||
| 150 | remaining = req.strip() | ||
| 151 | if not remaining or remaining.startswith('#'): | ||
| 152 | return None | ||
| 153 | m = IDENTIFIER.match(remaining) | ||
| 154 | if not m: | ||
| 155 | raise SyntaxError('name expected: %s' % remaining) | ||
| 156 | distname = m.groups()[0] | ||
| 157 | remaining = remaining[m.end():] | ||
| 158 | extras = mark_expr = versions = uri = None | ||
| 159 | if remaining and remaining[0] == '[': | ||
| 160 | i = remaining.find(']', 1) | ||
| 161 | if i < 0: | ||
| 162 | raise SyntaxError('unterminated extra: %s' % remaining) | ||
| 163 | s = remaining[1:i] | ||
| 164 | remaining = remaining[i + 1:].lstrip() | ||
| 165 | extras = [] | ||
| 166 | while s: | ||
| 167 | m = IDENTIFIER.match(s) | ||
| 168 | if not m: | ||
| 169 | raise SyntaxError('malformed extra: %s' % s) | ||
| 170 | extras.append(m.groups()[0]) | ||
| 171 | s = s[m.end():] | ||
| 172 | if not s: | ||
| 173 | break | ||
| 174 | if s[0] != ',': | ||
| 175 | raise SyntaxError('comma expected in extras: %s' % s) | ||
| 176 | s = s[1:].lstrip() | ||
| 177 | if not extras: | ||
| 178 | extras = None | ||
| 179 | if remaining: | ||
| 180 | if remaining[0] == '@': | ||
| 181 | # it's a URI | ||
| 182 | remaining = remaining[1:].lstrip() | ||
| 183 | m = NON_SPACE.match(remaining) | ||
| 184 | if not m: | ||
| 185 | raise SyntaxError('invalid URI: %s' % remaining) | ||
| 186 | uri = m.groups()[0] | ||
| 187 | t = urlparse(uri) | ||
| 188 | # there are issues with Python and URL parsing, so this test | ||
| 189 | # is a bit crude. See bpo-20271, bpo-23505. Python doesn't | ||
| 190 | # always parse invalid URLs correctly - it should raise | ||
| 191 | # exceptions for malformed URLs | ||
| 192 | if not (t.scheme and t.netloc): | ||
| 193 | raise SyntaxError('Invalid URL: %s' % uri) | ||
| 194 | remaining = remaining[m.end():].lstrip() | ||
| 195 | else: | ||
| 196 | |||
| 197 | def get_versions(ver_remaining): | ||
| 198 | """ | ||
| 199 | Return a list of operator, version tuples if any are | ||
| 200 | specified, else None. | ||
| 201 | """ | ||
| 202 | m = COMPARE_OP.match(ver_remaining) | ||
| 203 | versions = None | ||
| 204 | if m: | ||
| 205 | versions = [] | ||
| 206 | while True: | ||
| 207 | op = m.groups()[0] | ||
| 208 | ver_remaining = ver_remaining[m.end():] | ||
| 209 | m = VERSION_IDENTIFIER.match(ver_remaining) | ||
| 210 | if not m: | ||
| 211 | raise SyntaxError('invalid version: %s' % ver_remaining) | ||
| 212 | v = m.groups()[0] | ||
| 213 | versions.append((op, v)) | ||
| 214 | ver_remaining = ver_remaining[m.end():] | ||
| 215 | if not ver_remaining or ver_remaining[0] != ',': | ||
| 216 | break | ||
| 217 | ver_remaining = ver_remaining[1:].lstrip() | ||
| 218 | m = COMPARE_OP.match(ver_remaining) | ||
| 219 | if not m: | ||
| 220 | raise SyntaxError('invalid constraint: %s' % ver_remaining) | ||
| 221 | if not versions: | ||
| 222 | versions = None | ||
| 223 | return versions, ver_remaining | ||
| 224 | |||
| 225 | if remaining[0] != '(': | ||
| 226 | versions, remaining = get_versions(remaining) | ||
| 227 | else: | ||
| 228 | i = remaining.find(')', 1) | ||
| 229 | if i < 0: | ||
| 230 | raise SyntaxError('unterminated parenthesis: %s' % remaining) | ||
| 231 | s = remaining[1:i] | ||
| 232 | remaining = remaining[i + 1:].lstrip() | ||
| 233 | # As a special diversion from PEP 508, allow a version number | ||
| 234 | # a.b.c in parentheses as a synonym for ~= a.b.c (because this | ||
| 235 | # is allowed in earlier PEPs) | ||
| 236 | if COMPARE_OP.match(s): | ||
| 237 | versions, _ = get_versions(s) | ||
| 238 | else: | ||
| 239 | m = VERSION_IDENTIFIER.match(s) | ||
| 240 | if not m: | ||
| 241 | raise SyntaxError('invalid constraint: %s' % s) | ||
| 242 | v = m.groups()[0] | ||
| 243 | s = s[m.end():].lstrip() | ||
| 244 | if s: | ||
| 245 | raise SyntaxError('invalid constraint: %s' % s) | ||
| 246 | versions = [('~=', v)] | ||
| 247 | |||
| 248 | if remaining: | ||
| 249 | if remaining[0] != ';': | ||
| 250 | raise SyntaxError('invalid requirement: %s' % remaining) | ||
| 251 | remaining = remaining[1:].lstrip() | ||
| 252 | |||
| 253 | mark_expr, remaining = parse_marker(remaining) | ||
| 254 | |||
| 255 | if remaining and remaining[0] != '#': | ||
| 256 | raise SyntaxError('unexpected trailing data: %s' % remaining) | ||
| 257 | |||
| 258 | if not versions: | ||
| 259 | rs = distname | ||
| 260 | else: | ||
| 261 | rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) | ||
| 262 | return Container(name=distname, extras=extras, constraints=versions, | ||
| 263 | marker=mark_expr, url=uri, requirement=rs) | ||
| 264 | |||
| 265 | |||
| 266 | def get_resources_dests(resources_root, rules): | ||
| 267 | """Find destinations for resources files""" | ||
| 268 | |||
| 269 | def get_rel_path(root, path): | ||
| 270 | # normalizes and returns a lstripped-/-separated path | ||
| 271 | root = root.replace(os.path.sep, '/') | ||
| 272 | path = path.replace(os.path.sep, '/') | ||
| 273 | assert path.startswith(root) | ||
| 274 | return path[len(root):].lstrip('/') | ||
| 275 | |||
| 276 | destinations = {} | ||
| 277 | for base, suffix, dest in rules: | ||
| 278 | prefix = os.path.join(resources_root, base) | ||
| 279 | for abs_base in iglob(prefix): | ||
| 280 | abs_glob = os.path.join(abs_base, suffix) | ||
| 281 | for abs_path in iglob(abs_glob): | ||
| 282 | resource_file = get_rel_path(resources_root, abs_path) | ||
| 283 | if dest is None: # remove the entry if it was here | ||
| 284 | destinations.pop(resource_file, None) | ||
| 285 | else: | ||
| 286 | rel_path = get_rel_path(abs_base, abs_path) | ||
| 287 | rel_dest = dest.replace(os.path.sep, '/').rstrip('/') | ||
| 288 | destinations[resource_file] = rel_dest + '/' + rel_path | ||
| 289 | return destinations | ||
| 290 | |||
| 291 | |||
| 292 | def in_venv(): | ||
| 293 | if hasattr(sys, 'real_prefix'): | ||
| 294 | # virtualenv venvs | ||
| 295 | result = True | ||
| 296 | else: | ||
| 297 | # PEP 405 venvs | ||
| 298 | result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) | ||
| 299 | return result | ||
| 300 | |||
| 301 | |||
| 302 | def get_executable(): | ||
| 303 | # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as | ||
| 304 | # changes to the stub launcher mean that sys.executable always points | ||
| 305 | # to the stub on OS X | ||
| 306 | # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' | ||
| 307 | # in os.environ): | ||
| 308 | # result = os.environ['__PYVENV_LAUNCHER__'] | ||
| 309 | # else: | ||
| 310 | # result = sys.executable | ||
| 311 | # return result | ||
| 312 | result = os.path.normcase(sys.executable) | ||
| 313 | if not isinstance(result, text_type): | ||
| 314 | result = fsdecode(result) | ||
| 315 | return result | ||
| 316 | |||
| 317 | |||
| 318 | def proceed(prompt, allowed_chars, error_prompt=None, default=None): | ||
| 319 | p = prompt | ||
| 320 | while True: | ||
| 321 | s = raw_input(p) | ||
| 322 | p = prompt | ||
| 323 | if not s and default: | ||
| 324 | s = default | ||
| 325 | if s: | ||
| 326 | c = s[0].lower() | ||
| 327 | if c in allowed_chars: | ||
| 328 | break | ||
| 329 | if error_prompt: | ||
| 330 | p = '%c: %s\n%s' % (c, error_prompt, prompt) | ||
| 331 | return c | ||
| 332 | |||
| 333 | |||
| 334 | def extract_by_key(d, keys): | ||
| 335 | if isinstance(keys, string_types): | ||
| 336 | keys = keys.split() | ||
| 337 | result = {} | ||
| 338 | for key in keys: | ||
| 339 | if key in d: | ||
| 340 | result[key] = d[key] | ||
| 341 | return result | ||
| 342 | |||
| 343 | def read_exports(stream): | ||
| 344 | if sys.version_info[0] >= 3: | ||
| 345 | # needs to be a text stream | ||
| 346 | stream = codecs.getreader('utf-8')(stream) | ||
| 347 | # Try to load as JSON, falling back on legacy format | ||
| 348 | data = stream.read() | ||
| 349 | stream = StringIO(data) | ||
| 350 | try: | ||
| 351 | jdata = json.load(stream) | ||
| 352 | result = jdata['extensions']['python.exports']['exports'] | ||
| 353 | for group, entries in result.items(): | ||
| 354 | for k, v in entries.items(): | ||
| 355 | s = '%s = %s' % (k, v) | ||
| 356 | entry = get_export_entry(s) | ||
| 357 | assert entry is not None | ||
| 358 | entries[k] = entry | ||
| 359 | return result | ||
| 360 | except Exception: | ||
| 361 | stream.seek(0, 0) | ||
| 362 | |||
| 363 | def read_stream(cp, stream): | ||
| 364 | if hasattr(cp, 'read_file'): | ||
| 365 | cp.read_file(stream) | ||
| 366 | else: | ||
| 367 | cp.readfp(stream) | ||
| 368 | |||
| 369 | cp = configparser.ConfigParser() | ||
| 370 | try: | ||
| 371 | read_stream(cp, stream) | ||
| 372 | except configparser.MissingSectionHeaderError: | ||
| 373 | stream.close() | ||
| 374 | data = textwrap.dedent(data) | ||
| 375 | stream = StringIO(data) | ||
| 376 | read_stream(cp, stream) | ||
| 377 | |||
| 378 | result = {} | ||
| 379 | for key in cp.sections(): | ||
| 380 | result[key] = entries = {} | ||
| 381 | for name, value in cp.items(key): | ||
| 382 | s = '%s = %s' % (name, value) | ||
| 383 | entry = get_export_entry(s) | ||
| 384 | assert entry is not None | ||
| 385 | #entry.dist = self | ||
| 386 | entries[name] = entry | ||
| 387 | return result | ||
| 388 | |||
| 389 | |||
| 390 | def write_exports(exports, stream): | ||
| 391 | if sys.version_info[0] >= 3: | ||
| 392 | # needs to be a text stream | ||
| 393 | stream = codecs.getwriter('utf-8')(stream) | ||
| 394 | cp = configparser.ConfigParser() | ||
| 395 | for k, v in exports.items(): | ||
| 396 | # TODO check k, v for valid values | ||
| 397 | cp.add_section(k) | ||
| 398 | for entry in v.values(): | ||
| 399 | if entry.suffix is None: | ||
| 400 | s = entry.prefix | ||
| 401 | else: | ||
| 402 | s = '%s:%s' % (entry.prefix, entry.suffix) | ||
| 403 | if entry.flags: | ||
| 404 | s = '%s [%s]' % (s, ', '.join(entry.flags)) | ||
| 405 | cp.set(k, entry.name, s) | ||
| 406 | cp.write(stream) | ||
| 407 | |||
| 408 | |||
| 409 | @contextlib.contextmanager | ||
| 410 | def tempdir(): | ||
| 411 | td = tempfile.mkdtemp() | ||
| 412 | try: | ||
| 413 | yield td | ||
| 414 | finally: | ||
| 415 | shutil.rmtree(td) | ||
| 416 | |||
| 417 | @contextlib.contextmanager | ||
| 418 | def chdir(d): | ||
| 419 | cwd = os.getcwd() | ||
| 420 | try: | ||
| 421 | os.chdir(d) | ||
| 422 | yield | ||
| 423 | finally: | ||
| 424 | os.chdir(cwd) | ||
| 425 | |||
| 426 | |||
| 427 | @contextlib.contextmanager | ||
| 428 | def socket_timeout(seconds=15): | ||
| 429 | cto = socket.getdefaulttimeout() | ||
| 430 | try: | ||
| 431 | socket.setdefaulttimeout(seconds) | ||
| 432 | yield | ||
| 433 | finally: | ||
| 434 | socket.setdefaulttimeout(cto) | ||
| 435 | |||
| 436 | |||
| 437 | class cached_property(object): | ||
| 438 | def __init__(self, func): | ||
| 439 | self.func = func | ||
| 440 | #for attr in ('__name__', '__module__', '__doc__'): | ||
| 441 | # setattr(self, attr, getattr(func, attr, None)) | ||
| 442 | |||
| 443 | def __get__(self, obj, cls=None): | ||
| 444 | if obj is None: | ||
| 445 | return self | ||
| 446 | value = self.func(obj) | ||
| 447 | object.__setattr__(obj, self.func.__name__, value) | ||
| 448 | #obj.__dict__[self.func.__name__] = value = self.func(obj) | ||
| 449 | return value | ||
| 450 | |||
| 451 | def convert_path(pathname): | ||
| 452 | """Return 'pathname' as a name that will work on the native filesystem. | ||
| 453 | |||
| 454 | The path is split on '/' and put back together again using the current | ||
| 455 | directory separator. Needed because filenames in the setup script are | ||
| 456 | always supplied in Unix style, and have to be converted to the local | ||
| 457 | convention before we can actually use them in the filesystem. Raises | ||
| 458 | ValueError on non-Unix-ish systems if 'pathname' either starts or | ||
| 459 | ends with a slash. | ||
| 460 | """ | ||
| 461 | if os.sep == '/': | ||
| 462 | return pathname | ||
| 463 | if not pathname: | ||
| 464 | return pathname | ||
| 465 | if pathname[0] == '/': | ||
| 466 | raise ValueError("path '%s' cannot be absolute" % pathname) | ||
| 467 | if pathname[-1] == '/': | ||
| 468 | raise ValueError("path '%s' cannot end with '/'" % pathname) | ||
| 469 | |||
| 470 | paths = pathname.split('/') | ||
| 471 | while os.curdir in paths: | ||
| 472 | paths.remove(os.curdir) | ||
| 473 | if not paths: | ||
| 474 | return os.curdir | ||
| 475 | return os.path.join(*paths) | ||
| 476 | |||
| 477 | |||
| 478 | class FileOperator(object): | ||
| 479 | def __init__(self, dry_run=False): | ||
| 480 | self.dry_run = dry_run | ||
| 481 | self.ensured = set() | ||
| 482 | self._init_record() | ||
| 483 | |||
| 484 | def _init_record(self): | ||
| 485 | self.record = False | ||
| 486 | self.files_written = set() | ||
| 487 | self.dirs_created = set() | ||
| 488 | |||
| 489 | def record_as_written(self, path): | ||
| 490 | if self.record: | ||
| 491 | self.files_written.add(path) | ||
| 492 | |||
| 493 | def newer(self, source, target): | ||
| 494 | """Tell if the target is newer than the source. | ||
| 495 | |||
| 496 | Returns true if 'source' exists and is more recently modified than | ||
| 497 | 'target', or if 'source' exists and 'target' doesn't. | ||
| 498 | |||
| 499 | Returns false if both exist and 'target' is the same age or younger | ||
| 500 | than 'source'. Raise PackagingFileError if 'source' does not exist. | ||
| 501 | |||
| 502 | Note that this test is not very accurate: files created in the same | ||
| 503 | second will have the same "age". | ||
| 504 | """ | ||
| 505 | if not os.path.exists(source): | ||
| 506 | raise DistlibException("file '%r' does not exist" % | ||
| 507 | os.path.abspath(source)) | ||
| 508 | if not os.path.exists(target): | ||
| 509 | return True | ||
| 510 | |||
| 511 | return os.stat(source).st_mtime > os.stat(target).st_mtime | ||
| 512 | |||
| 513 | def copy_file(self, infile, outfile, check=True): | ||
| 514 | """Copy a file respecting dry-run and force flags. | ||
| 515 | """ | ||
| 516 | self.ensure_dir(os.path.dirname(outfile)) | ||
| 517 | logger.info('Copying %s to %s', infile, outfile) | ||
| 518 | if not self.dry_run: | ||
| 519 | msg = None | ||
| 520 | if check: | ||
| 521 | if os.path.islink(outfile): | ||
| 522 | msg = '%s is a symlink' % outfile | ||
| 523 | elif os.path.exists(outfile) and not os.path.isfile(outfile): | ||
| 524 | msg = '%s is a non-regular file' % outfile | ||
| 525 | if msg: | ||
| 526 | raise ValueError(msg + ' which would be overwritten') | ||
| 527 | shutil.copyfile(infile, outfile) | ||
| 528 | self.record_as_written(outfile) | ||
| 529 | |||
| 530 | def copy_stream(self, instream, outfile, encoding=None): | ||
| 531 | assert not os.path.isdir(outfile) | ||
| 532 | self.ensure_dir(os.path.dirname(outfile)) | ||
| 533 | logger.info('Copying stream %s to %s', instream, outfile) | ||
| 534 | if not self.dry_run: | ||
| 535 | if encoding is None: | ||
| 536 | outstream = open(outfile, 'wb') | ||
| 537 | else: | ||
| 538 | outstream = codecs.open(outfile, 'w', encoding=encoding) | ||
| 539 | try: | ||
| 540 | shutil.copyfileobj(instream, outstream) | ||
| 541 | finally: | ||
| 542 | outstream.close() | ||
| 543 | self.record_as_written(outfile) | ||
| 544 | |||
| 545 | def write_binary_file(self, path, data): | ||
| 546 | self.ensure_dir(os.path.dirname(path)) | ||
| 547 | if not self.dry_run: | ||
| 548 | with open(path, 'wb') as f: | ||
| 549 | f.write(data) | ||
| 550 | self.record_as_written(path) | ||
| 551 | |||
| 552 | def write_text_file(self, path, data, encoding): | ||
| 553 | self.ensure_dir(os.path.dirname(path)) | ||
| 554 | if not self.dry_run: | ||
| 555 | with open(path, 'wb') as f: | ||
| 556 | f.write(data.encode(encoding)) | ||
| 557 | self.record_as_written(path) | ||
| 558 | |||
| 559 | def set_mode(self, bits, mask, files): | ||
| 560 | if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): | ||
| 561 | # Set the executable bits (owner, group, and world) on | ||
| 562 | # all the files specified. | ||
| 563 | for f in files: | ||
| 564 | if self.dry_run: | ||
| 565 | logger.info("changing mode of %s", f) | ||
| 566 | else: | ||
| 567 | mode = (os.stat(f).st_mode | bits) & mask | ||
| 568 | logger.info("changing mode of %s to %o", f, mode) | ||
| 569 | os.chmod(f, mode) | ||
| 570 | |||
| 571 | set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) | ||
| 572 | |||
| 573 | def ensure_dir(self, path): | ||
| 574 | path = os.path.abspath(path) | ||
| 575 | if path not in self.ensured and not os.path.exists(path): | ||
| 576 | self.ensured.add(path) | ||
| 577 | d, f = os.path.split(path) | ||
| 578 | self.ensure_dir(d) | ||
| 579 | logger.info('Creating %s' % path) | ||
| 580 | if not self.dry_run: | ||
| 581 | os.mkdir(path) | ||
| 582 | if self.record: | ||
| 583 | self.dirs_created.add(path) | ||
| 584 | |||
| 585 | def byte_compile(self, path, optimize=False, force=False, prefix=None): | ||
| 586 | dpath = cache_from_source(path, not optimize) | ||
| 587 | logger.info('Byte-compiling %s to %s', path, dpath) | ||
| 588 | if not self.dry_run: | ||
| 589 | if force or self.newer(path, dpath): | ||
| 590 | if not prefix: | ||
| 591 | diagpath = None | ||
| 592 | else: | ||
| 593 | assert path.startswith(prefix) | ||
| 594 | diagpath = path[len(prefix):] | ||
| 595 | py_compile.compile(path, dpath, diagpath, True) # raise error | ||
| 596 | self.record_as_written(dpath) | ||
| 597 | return dpath | ||
| 598 | |||
| 599 | def ensure_removed(self, path): | ||
| 600 | if os.path.exists(path): | ||
| 601 | if os.path.isdir(path) and not os.path.islink(path): | ||
| 602 | logger.debug('Removing directory tree at %s', path) | ||
| 603 | if not self.dry_run: | ||
| 604 | shutil.rmtree(path) | ||
| 605 | if self.record: | ||
| 606 | if path in self.dirs_created: | ||
| 607 | self.dirs_created.remove(path) | ||
| 608 | else: | ||
| 609 | if os.path.islink(path): | ||
| 610 | s = 'link' | ||
| 611 | else: | ||
| 612 | s = 'file' | ||
| 613 | logger.debug('Removing %s %s', s, path) | ||
| 614 | if not self.dry_run: | ||
| 615 | os.remove(path) | ||
| 616 | if self.record: | ||
| 617 | if path in self.files_written: | ||
| 618 | self.files_written.remove(path) | ||
| 619 | |||
| 620 | def is_writable(self, path): | ||
| 621 | result = False | ||
| 622 | while not result: | ||
| 623 | if os.path.exists(path): | ||
| 624 | result = os.access(path, os.W_OK) | ||
| 625 | break | ||
| 626 | parent = os.path.dirname(path) | ||
| 627 | if parent == path: | ||
| 628 | break | ||
| 629 | path = parent | ||
| 630 | return result | ||
| 631 | |||
| 632 | def commit(self): | ||
| 633 | """ | ||
| 634 | Commit recorded changes, turn off recording, return | ||
| 635 | changes. | ||
| 636 | """ | ||
| 637 | assert self.record | ||
| 638 | result = self.files_written, self.dirs_created | ||
| 639 | self._init_record() | ||
| 640 | return result | ||
| 641 | |||
| 642 | def rollback(self): | ||
| 643 | if not self.dry_run: | ||
| 644 | for f in list(self.files_written): | ||
| 645 | if os.path.exists(f): | ||
| 646 | os.remove(f) | ||
| 647 | # dirs should all be empty now, except perhaps for | ||
| 648 | # __pycache__ subdirs | ||
| 649 | # reverse so that subdirs appear before their parents | ||
| 650 | dirs = sorted(self.dirs_created, reverse=True) | ||
| 651 | for d in dirs: | ||
| 652 | flist = os.listdir(d) | ||
| 653 | if flist: | ||
| 654 | assert flist == ['__pycache__'] | ||
| 655 | sd = os.path.join(d, flist[0]) | ||
| 656 | os.rmdir(sd) | ||
| 657 | os.rmdir(d) # should fail if non-empty | ||
| 658 | self._init_record() | ||
| 659 | |||
| 660 | def resolve(module_name, dotted_path): | ||
| 661 | if module_name in sys.modules: | ||
| 662 | mod = sys.modules[module_name] | ||
| 663 | else: | ||
| 664 | mod = __import__(module_name) | ||
| 665 | if dotted_path is None: | ||
| 666 | result = mod | ||
| 667 | else: | ||
| 668 | parts = dotted_path.split('.') | ||
| 669 | result = getattr(mod, parts.pop(0)) | ||
| 670 | for p in parts: | ||
| 671 | result = getattr(result, p) | ||
| 672 | return result | ||
| 673 | |||
| 674 | |||
| 675 | class ExportEntry(object): | ||
| 676 | def __init__(self, name, prefix, suffix, flags): | ||
| 677 | self.name = name | ||
| 678 | self.prefix = prefix | ||
| 679 | self.suffix = suffix | ||
| 680 | self.flags = flags | ||
| 681 | |||
| 682 | @cached_property | ||
| 683 | def value(self): | ||
| 684 | return resolve(self.prefix, self.suffix) | ||
| 685 | |||
| 686 | def __repr__(self): # pragma: no cover | ||
| 687 | return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, | ||
| 688 | self.suffix, self.flags) | ||
| 689 | |||
| 690 | def __eq__(self, other): | ||
| 691 | if not isinstance(other, ExportEntry): | ||
| 692 | result = False | ||
| 693 | else: | ||
| 694 | result = (self.name == other.name and | ||
| 695 | self.prefix == other.prefix and | ||
| 696 | self.suffix == other.suffix and | ||
| 697 | self.flags == other.flags) | ||
| 698 | return result | ||
| 699 | |||
| 700 | __hash__ = object.__hash__ | ||
| 701 | |||
| 702 | |||
| 703 | ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) | ||
| 704 | \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) | ||
| 705 | \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? | ||
| 706 | ''', re.VERBOSE) | ||
| 707 | |||
| 708 | def get_export_entry(specification): | ||
| 709 | m = ENTRY_RE.search(specification) | ||
| 710 | if not m: | ||
| 711 | result = None | ||
| 712 | if '[' in specification or ']' in specification: | ||
| 713 | raise DistlibException("Invalid specification " | ||
| 714 | "'%s'" % specification) | ||
| 715 | else: | ||
| 716 | d = m.groupdict() | ||
| 717 | name = d['name'] | ||
| 718 | path = d['callable'] | ||
| 719 | colons = path.count(':') | ||
| 720 | if colons == 0: | ||
| 721 | prefix, suffix = path, None | ||
| 722 | else: | ||
| 723 | if colons != 1: | ||
| 724 | raise DistlibException("Invalid specification " | ||
| 725 | "'%s'" % specification) | ||
| 726 | prefix, suffix = path.split(':') | ||
| 727 | flags = d['flags'] | ||
| 728 | if flags is None: | ||
| 729 | if '[' in specification or ']' in specification: | ||
| 730 | raise DistlibException("Invalid specification " | ||
| 731 | "'%s'" % specification) | ||
| 732 | flags = [] | ||
| 733 | else: | ||
| 734 | flags = [f.strip() for f in flags.split(',')] | ||
| 735 | result = ExportEntry(name, prefix, suffix, flags) | ||
| 736 | return result | ||
| 737 | |||
| 738 | |||
| 739 | def get_cache_base(suffix=None): | ||
| 740 | """ | ||
| 741 | Return the default base location for distlib caches. If the directory does | ||
| 742 | not exist, it is created. Use the suffix provided for the base directory, | ||
| 743 | and default to '.distlib' if it isn't provided. | ||
| 744 | |||
| 745 | On Windows, if LOCALAPPDATA is defined in the environment, then it is | ||
| 746 | assumed to be a directory, and will be the parent directory of the result. | ||
| 747 | On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home | ||
| 748 | directory - using os.expanduser('~') - will be the parent directory of | ||
| 749 | the result. | ||
| 750 | |||
| 751 | The result is just the directory '.distlib' in the parent directory as | ||
| 752 | determined above, or with the name specified with ``suffix``. | ||
| 753 | """ | ||
| 754 | if suffix is None: | ||
| 755 | suffix = '.distlib' | ||
| 756 | if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: | ||
| 757 | result = os.path.expandvars('$localappdata') | ||
| 758 | else: | ||
| 759 | # Assume posix, or old Windows | ||
| 760 | result = os.path.expanduser('~') | ||
| 761 | # we use 'isdir' instead of 'exists', because we want to | ||
| 762 | # fail if there's a file with that name | ||
| 763 | if os.path.isdir(result): | ||
| 764 | usable = os.access(result, os.W_OK) | ||
| 765 | if not usable: | ||
| 766 | logger.warning('Directory exists but is not writable: %s', result) | ||
| 767 | else: | ||
| 768 | try: | ||
| 769 | os.makedirs(result) | ||
| 770 | usable = True | ||
| 771 | except OSError: | ||
| 772 | logger.warning('Unable to create %s', result, exc_info=True) | ||
| 773 | usable = False | ||
| 774 | if not usable: | ||
| 775 | result = tempfile.mkdtemp() | ||
| 776 | logger.warning('Default location unusable, using %s', result) | ||
| 777 | return os.path.join(result, suffix) | ||
| 778 | |||
| 779 | |||
| 780 | def path_to_cache_dir(path): | ||
| 781 | """ | ||
| 782 | Convert an absolute path to a directory name for use in a cache. | ||
| 783 | |||
| 784 | The algorithm used is: | ||
| 785 | |||
| 786 | #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. | ||
| 787 | #. Any occurrence of ``os.sep`` is replaced with ``'--'``. | ||
| 788 | #. ``'.cache'`` is appended. | ||
| 789 | """ | ||
| 790 | d, p = os.path.splitdrive(os.path.abspath(path)) | ||
| 791 | if d: | ||
| 792 | d = d.replace(':', '---') | ||
| 793 | p = p.replace(os.sep, '--') | ||
| 794 | return d + p + '.cache' | ||
| 795 | |||
| 796 | |||
| 797 | def ensure_slash(s): | ||
| 798 | if not s.endswith('/'): | ||
| 799 | return s + '/' | ||
| 800 | return s | ||
| 801 | |||
| 802 | |||
| 803 | def parse_credentials(netloc): | ||
| 804 | username = password = None | ||
| 805 | if '@' in netloc: | ||
| 806 | prefix, netloc = netloc.split('@', 1) | ||
| 807 | if ':' not in prefix: | ||
| 808 | username = prefix | ||
| 809 | else: | ||
| 810 | username, password = prefix.split(':', 1) | ||
| 811 | return username, password, netloc | ||
| 812 | |||
| 813 | |||
| 814 | def get_process_umask(): | ||
| 815 | result = os.umask(0o22) | ||
| 816 | os.umask(result) | ||
| 817 | return result | ||
| 818 | |||
| 819 | def is_string_sequence(seq): | ||
| 820 | result = True | ||
| 821 | i = None | ||
| 822 | for i, s in enumerate(seq): | ||
| 823 | if not isinstance(s, string_types): | ||
| 824 | result = False | ||
| 825 | break | ||
| 826 | assert i is not None | ||
| 827 | return result | ||
| 828 | |||
| 829 | PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' | ||
| 830 | '([a-z0-9_.+-]+)', re.I) | ||
| 831 | PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') | ||
| 832 | |||
| 833 | |||
| 834 | def split_filename(filename, project_name=None): | ||
| 835 | """ | ||
| 836 | Extract name, version, python version from a filename (no extension) | ||
| 837 | |||
| 838 | Return name, version, pyver or None | ||
| 839 | """ | ||
| 840 | result = None | ||
| 841 | pyver = None | ||
| 842 | filename = unquote(filename).replace(' ', '-') | ||
| 843 | m = PYTHON_VERSION.search(filename) | ||
| 844 | if m: | ||
| 845 | pyver = m.group(1) | ||
| 846 | filename = filename[:m.start()] | ||
| 847 | if project_name and len(filename) > len(project_name) + 1: | ||
| 848 | m = re.match(re.escape(project_name) + r'\b', filename) | ||
| 849 | if m: | ||
| 850 | n = m.end() | ||
| 851 | result = filename[:n], filename[n + 1:], pyver | ||
| 852 | if result is None: | ||
| 853 | m = PROJECT_NAME_AND_VERSION.match(filename) | ||
| 854 | if m: | ||
| 855 | result = m.group(1), m.group(3), pyver | ||
| 856 | return result | ||
| 857 | |||
| 858 | # Allow spaces in name because of legacy dists like "Twisted Core" | ||
| 859 | NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' | ||
| 860 | r'\(\s*(?P<ver>[^\s)]+)\)$') | ||
| 861 | |||
| 862 | def parse_name_and_version(p): | ||
| 863 | """ | ||
| 864 | A utility method used to get name and version from a string. | ||
| 865 | |||
| 866 | From e.g. a Provides-Dist value. | ||
| 867 | |||
| 868 | :param p: A value in a form 'foo (1.0)' | ||
| 869 | :return: The name and version as a tuple. | ||
| 870 | """ | ||
| 871 | m = NAME_VERSION_RE.match(p) | ||
| 872 | if not m: | ||
| 873 | raise DistlibException('Ill-formed name/version string: \'%s\'' % p) | ||
| 874 | d = m.groupdict() | ||
| 875 | return d['name'].strip().lower(), d['ver'] | ||
| 876 | |||
| 877 | def get_extras(requested, available): | ||
| 878 | result = set() | ||
| 879 | requested = set(requested or []) | ||
| 880 | available = set(available or []) | ||
| 881 | if '*' in requested: | ||
| 882 | requested.remove('*') | ||
| 883 | result |= available | ||
| 884 | for r in requested: | ||
| 885 | if r == '-': | ||
| 886 | result.add(r) | ||
| 887 | elif r.startswith('-'): | ||
| 888 | unwanted = r[1:] | ||
| 889 | if unwanted not in available: | ||
| 890 | logger.warning('undeclared extra: %s' % unwanted) | ||
| 891 | if unwanted in result: | ||
| 892 | result.remove(unwanted) | ||
| 893 | else: | ||
| 894 | if r not in available: | ||
| 895 | logger.warning('undeclared extra: %s' % r) | ||
| 896 | result.add(r) | ||
| 897 | return result | ||
| 898 | # | ||
| 899 | # Extended metadata functionality | ||
| 900 | # | ||
| 901 | |||
| 902 | def _get_external_data(url): | ||
| 903 | result = {} | ||
| 904 | try: | ||
| 905 | # urlopen might fail if it runs into redirections, | ||
| 906 | # because of Python issue #13696. Fixed in locators | ||
| 907 | # using a custom redirect handler. | ||
| 908 | resp = urlopen(url) | ||
| 909 | headers = resp.info() | ||
| 910 | ct = headers.get('Content-Type') | ||
| 911 | if not ct.startswith('application/json'): | ||
| 912 | logger.debug('Unexpected response for JSON request: %s', ct) | ||
| 913 | else: | ||
| 914 | reader = codecs.getreader('utf-8')(resp) | ||
| 915 | #data = reader.read().decode('utf-8') | ||
| 916 | #result = json.loads(data) | ||
| 917 | result = json.load(reader) | ||
| 918 | except Exception as e: | ||
| 919 | logger.exception('Failed to get external data for %s: %s', url, e) | ||
| 920 | return result | ||
| 921 | |||
| 922 | _external_data_base_url = 'https://www.red-dove.com/pypi/projects/' | ||
| 923 | |||
| 924 | def get_project_data(name): | ||
| 925 | url = '%s/%s/project.json' % (name[0].upper(), name) | ||
| 926 | url = urljoin(_external_data_base_url, url) | ||
| 927 | result = _get_external_data(url) | ||
| 928 | return result | ||
| 929 | |||
| 930 | def get_package_data(name, version): | ||
| 931 | url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) | ||
| 932 | url = urljoin(_external_data_base_url, url) | ||
| 933 | return _get_external_data(url) | ||
| 934 | |||
| 935 | |||
| 936 | class Cache(object): | ||
| 937 | """ | ||
| 938 | A class implementing a cache for resources that need to live in the file system | ||
| 939 | e.g. shared libraries. This class was moved from resources to here because it | ||
| 940 | could be used by other modules, e.g. the wheel module. | ||
| 941 | """ | ||
| 942 | |||
| 943 | def __init__(self, base): | ||
| 944 | """ | ||
| 945 | Initialise an instance. | ||
| 946 | |||
| 947 | :param base: The base directory where the cache should be located. | ||
| 948 | """ | ||
| 949 | # we use 'isdir' instead of 'exists', because we want to | ||
| 950 | # fail if there's a file with that name | ||
| 951 | if not os.path.isdir(base): # pragma: no cover | ||
| 952 | os.makedirs(base) | ||
| 953 | if (os.stat(base).st_mode & 0o77) != 0: | ||
| 954 | logger.warning('Directory \'%s\' is not private', base) | ||
| 955 | self.base = os.path.abspath(os.path.normpath(base)) | ||
| 956 | |||
| 957 | def prefix_to_dir(self, prefix): | ||
| 958 | """ | ||
| 959 | Converts a resource prefix to a directory name in the cache. | ||
| 960 | """ | ||
| 961 | return path_to_cache_dir(prefix) | ||
| 962 | |||
| 963 | def clear(self): | ||
| 964 | """ | ||
| 965 | Clear the cache. | ||
| 966 | """ | ||
| 967 | not_removed = [] | ||
| 968 | for fn in os.listdir(self.base): | ||
| 969 | fn = os.path.join(self.base, fn) | ||
| 970 | try: | ||
| 971 | if os.path.islink(fn) or os.path.isfile(fn): | ||
| 972 | os.remove(fn) | ||
| 973 | elif os.path.isdir(fn): | ||
| 974 | shutil.rmtree(fn) | ||
| 975 | except Exception: | ||
| 976 | not_removed.append(fn) | ||
| 977 | return not_removed | ||
| 978 | |||
| 979 | |||
| 980 | class EventMixin(object): | ||
| 981 | """ | ||
| 982 | A very simple publish/subscribe system. | ||
| 983 | """ | ||
| 984 | def __init__(self): | ||
| 985 | self._subscribers = {} | ||
| 986 | |||
| 987 | def add(self, event, subscriber, append=True): | ||
| 988 | """ | ||
| 989 | Add a subscriber for an event. | ||
| 990 | |||
| 991 | :param event: The name of an event. | ||
| 992 | :param subscriber: The subscriber to be added (and called when the | ||
| 993 | event is published). | ||
| 994 | :param append: Whether to append or prepend the subscriber to an | ||
| 995 | existing subscriber list for the event. | ||
| 996 | """ | ||
| 997 | subs = self._subscribers | ||
| 998 | if event not in subs: | ||
| 999 | subs[event] = deque([subscriber]) | ||
| 1000 | else: | ||
| 1001 | sq = subs[event] | ||
| 1002 | if append: | ||
| 1003 | sq.append(subscriber) | ||
| 1004 | else: | ||
| 1005 | sq.appendleft(subscriber) | ||
| 1006 | |||
| 1007 | def remove(self, event, subscriber): | ||
| 1008 | """ | ||
| 1009 | Remove a subscriber for an event. | ||
| 1010 | |||
| 1011 | :param event: The name of an event. | ||
| 1012 | :param subscriber: The subscriber to be removed. | ||
| 1013 | """ | ||
| 1014 | subs = self._subscribers | ||
| 1015 | if event not in subs: | ||
| 1016 | raise ValueError('No subscribers: %r' % event) | ||
| 1017 | subs[event].remove(subscriber) | ||
| 1018 | |||
| 1019 | def get_subscribers(self, event): | ||
| 1020 | """ | ||
| 1021 | Return an iterator for the subscribers for an event. | ||
| 1022 | :param event: The event to return subscribers for. | ||
| 1023 | """ | ||
| 1024 | return iter(self._subscribers.get(event, ())) | ||
| 1025 | |||
| 1026 | def publish(self, event, *args, **kwargs): | ||
| 1027 | """ | ||
| 1028 | Publish a event and return a list of values returned by its | ||
| 1029 | subscribers. | ||
| 1030 | |||
| 1031 | :param event: The event to publish. | ||
| 1032 | :param args: The positional arguments to pass to the event's | ||
| 1033 | subscribers. | ||
| 1034 | :param kwargs: The keyword arguments to pass to the event's | ||
| 1035 | subscribers. | ||
| 1036 | """ | ||
| 1037 | result = [] | ||
| 1038 | for subscriber in self.get_subscribers(event): | ||
| 1039 | try: | ||
| 1040 | value = subscriber(event, *args, **kwargs) | ||
| 1041 | except Exception: | ||
| 1042 | logger.exception('Exception during event publication') | ||
| 1043 | value = None | ||
| 1044 | result.append(value) | ||
| 1045 | logger.debug('publish %s: args = %s, kwargs = %s, result = %s', | ||
| 1046 | event, args, kwargs, result) | ||
| 1047 | return result | ||
| 1048 | |||
| 1049 | # | ||
| 1050 | # Simple sequencing | ||
| 1051 | # | ||
| 1052 | class Sequencer(object): | ||
| 1053 | def __init__(self): | ||
| 1054 | self._preds = {} | ||
| 1055 | self._succs = {} | ||
| 1056 | self._nodes = set() # nodes with no preds/succs | ||
| 1057 | |||
| 1058 | def add_node(self, node): | ||
| 1059 | self._nodes.add(node) | ||
| 1060 | |||
| 1061 | def remove_node(self, node, edges=False): | ||
| 1062 | if node in self._nodes: | ||
| 1063 | self._nodes.remove(node) | ||
| 1064 | if edges: | ||
| 1065 | for p in set(self._preds.get(node, ())): | ||
| 1066 | self.remove(p, node) | ||
| 1067 | for s in set(self._succs.get(node, ())): | ||
| 1068 | self.remove(node, s) | ||
| 1069 | # Remove empties | ||
| 1070 | for k, v in list(self._preds.items()): | ||
| 1071 | if not v: | ||
| 1072 | del self._preds[k] | ||
| 1073 | for k, v in list(self._succs.items()): | ||
| 1074 | if not v: | ||
| 1075 | del self._succs[k] | ||
| 1076 | |||
| 1077 | def add(self, pred, succ): | ||
| 1078 | assert pred != succ | ||
| 1079 | self._preds.setdefault(succ, set()).add(pred) | ||
| 1080 | self._succs.setdefault(pred, set()).add(succ) | ||
| 1081 | |||
| 1082 | def remove(self, pred, succ): | ||
| 1083 | assert pred != succ | ||
| 1084 | try: | ||
| 1085 | preds = self._preds[succ] | ||
| 1086 | succs = self._succs[pred] | ||
| 1087 | except KeyError: # pragma: no cover | ||
| 1088 | raise ValueError('%r not a successor of anything' % succ) | ||
| 1089 | try: | ||
| 1090 | preds.remove(pred) | ||
| 1091 | succs.remove(succ) | ||
| 1092 | except KeyError: # pragma: no cover | ||
| 1093 | raise ValueError('%r not a successor of %r' % (succ, pred)) | ||
| 1094 | |||
| 1095 | def is_step(self, step): | ||
| 1096 | return (step in self._preds or step in self._succs or | ||
| 1097 | step in self._nodes) | ||
| 1098 | |||
| 1099 | def get_steps(self, final): | ||
| 1100 | if not self.is_step(final): | ||
| 1101 | raise ValueError('Unknown: %r' % final) | ||
| 1102 | result = [] | ||
| 1103 | todo = [] | ||
| 1104 | seen = set() | ||
| 1105 | todo.append(final) | ||
| 1106 | while todo: | ||
| 1107 | step = todo.pop(0) | ||
| 1108 | if step in seen: | ||
| 1109 | # if a step was already seen, | ||
| 1110 | # move it to the end (so it will appear earlier | ||
| 1111 | # when reversed on return) ... but not for the | ||
| 1112 | # final step, as that would be confusing for | ||
| 1113 | # users | ||
| 1114 | if step != final: | ||
| 1115 | result.remove(step) | ||
| 1116 | result.append(step) | ||
| 1117 | else: | ||
| 1118 | seen.add(step) | ||
| 1119 | result.append(step) | ||
| 1120 | preds = self._preds.get(step, ()) | ||
| 1121 | todo.extend(preds) | ||
| 1122 | return reversed(result) | ||
| 1123 | |||
| 1124 | @property | ||
| 1125 | def strong_connections(self): | ||
| 1126 | #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm | ||
| 1127 | index_counter = [0] | ||
| 1128 | stack = [] | ||
| 1129 | lowlinks = {} | ||
| 1130 | index = {} | ||
| 1131 | result = [] | ||
| 1132 | |||
| 1133 | graph = self._succs | ||
| 1134 | |||
| 1135 | def strongconnect(node): | ||
| 1136 | # set the depth index for this node to the smallest unused index | ||
| 1137 | index[node] = index_counter[0] | ||
| 1138 | lowlinks[node] = index_counter[0] | ||
| 1139 | index_counter[0] += 1 | ||
| 1140 | stack.append(node) | ||
| 1141 | |||
| 1142 | # Consider successors | ||
| 1143 | try: | ||
| 1144 | successors = graph[node] | ||
| 1145 | except Exception: | ||
| 1146 | successors = [] | ||
| 1147 | for successor in successors: | ||
| 1148 | if successor not in lowlinks: | ||
| 1149 | # Successor has not yet been visited | ||
| 1150 | strongconnect(successor) | ||
| 1151 | lowlinks[node] = min(lowlinks[node],lowlinks[successor]) | ||
| 1152 | elif successor in stack: | ||
| 1153 | # the successor is in the stack and hence in the current | ||
| 1154 | # strongly connected component (SCC) | ||
| 1155 | lowlinks[node] = min(lowlinks[node],index[successor]) | ||
| 1156 | |||
| 1157 | # If `node` is a root node, pop the stack and generate an SCC | ||
| 1158 | if lowlinks[node] == index[node]: | ||
| 1159 | connected_component = [] | ||
| 1160 | |||
| 1161 | while True: | ||
| 1162 | successor = stack.pop() | ||
| 1163 | connected_component.append(successor) | ||
| 1164 | if successor == node: break | ||
| 1165 | component = tuple(connected_component) | ||
| 1166 | # storing the result | ||
| 1167 | result.append(component) | ||
| 1168 | |||
| 1169 | for node in graph: | ||
| 1170 | if node not in lowlinks: | ||
| 1171 | strongconnect(node) | ||
| 1172 | |||
| 1173 | return result | ||
| 1174 | |||
| 1175 | @property | ||
| 1176 | def dot(self): | ||
| 1177 | result = ['digraph G {'] | ||
| 1178 | for succ in self._preds: | ||
| 1179 | preds = self._preds[succ] | ||
| 1180 | for pred in preds: | ||
| 1181 | result.append(' %s -> %s;' % (pred, succ)) | ||
| 1182 | for node in self._nodes: | ||
| 1183 | result.append(' %s;' % node) | ||
| 1184 | result.append('}') | ||
| 1185 | return '\n'.join(result) | ||
| 1186 | |||
| 1187 | # | ||
| 1188 | # Unarchiving functionality for zip, tar, tgz, tbz, whl | ||
| 1189 | # | ||
| 1190 | |||
| 1191 | ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', | ||
| 1192 | '.tgz', '.tbz', '.whl') | ||
| 1193 | |||
| 1194 | def unarchive(archive_filename, dest_dir, format=None, check=True): | ||
| 1195 | |||
| 1196 | def check_path(path): | ||
| 1197 | if not isinstance(path, text_type): | ||
| 1198 | path = path.decode('utf-8') | ||
| 1199 | p = os.path.abspath(os.path.join(dest_dir, path)) | ||
| 1200 | if not p.startswith(dest_dir) or p[plen] != os.sep: | ||
| 1201 | raise ValueError('path outside destination: %r' % p) | ||
| 1202 | |||
| 1203 | dest_dir = os.path.abspath(dest_dir) | ||
| 1204 | plen = len(dest_dir) | ||
| 1205 | archive = None | ||
| 1206 | if format is None: | ||
| 1207 | if archive_filename.endswith(('.zip', '.whl')): | ||
| 1208 | format = 'zip' | ||
| 1209 | elif archive_filename.endswith(('.tar.gz', '.tgz')): | ||
| 1210 | format = 'tgz' | ||
| 1211 | mode = 'r:gz' | ||
| 1212 | elif archive_filename.endswith(('.tar.bz2', '.tbz')): | ||
| 1213 | format = 'tbz' | ||
| 1214 | mode = 'r:bz2' | ||
| 1215 | elif archive_filename.endswith('.tar'): | ||
| 1216 | format = 'tar' | ||
| 1217 | mode = 'r' | ||
| 1218 | else: # pragma: no cover | ||
| 1219 | raise ValueError('Unknown format for %r' % archive_filename) | ||
| 1220 | try: | ||
| 1221 | if format == 'zip': | ||
| 1222 | archive = ZipFile(archive_filename, 'r') | ||
| 1223 | if check: | ||
| 1224 | names = archive.namelist() | ||
| 1225 | for name in names: | ||
| 1226 | check_path(name) | ||
| 1227 | else: | ||
| 1228 | archive = tarfile.open(archive_filename, mode) | ||
| 1229 | if check: | ||
| 1230 | names = archive.getnames() | ||
| 1231 | for name in names: | ||
| 1232 | check_path(name) | ||
| 1233 | if format != 'zip' and sys.version_info[0] < 3: | ||
| 1234 | # See Python issue 17153. If the dest path contains Unicode, | ||
| 1235 | # tarfile extraction fails on Python 2.x if a member path name | ||
| 1236 | # contains non-ASCII characters - it leads to an implicit | ||
| 1237 | # bytes -> unicode conversion using ASCII to decode. | ||
| 1238 | for tarinfo in archive.getmembers(): | ||
| 1239 | if not isinstance(tarinfo.name, text_type): | ||
| 1240 | tarinfo.name = tarinfo.name.decode('utf-8') | ||
| 1241 | archive.extractall(dest_dir) | ||
| 1242 | |||
| 1243 | finally: | ||
| 1244 | if archive: | ||
| 1245 | archive.close() | ||
| 1246 | |||
| 1247 | |||
| 1248 | def zip_dir(directory): | ||
| 1249 | """zip a directory tree into a BytesIO object""" | ||
| 1250 | result = io.BytesIO() | ||
| 1251 | dlen = len(directory) | ||
| 1252 | with ZipFile(result, "w") as zf: | ||
| 1253 | for root, dirs, files in os.walk(directory): | ||
| 1254 | for name in files: | ||
| 1255 | full = os.path.join(root, name) | ||
| 1256 | rel = root[dlen:] | ||
| 1257 | dest = os.path.join(rel, name) | ||
| 1258 | zf.write(full, dest) | ||
| 1259 | return result | ||
| 1260 | |||
| 1261 | # | ||
| 1262 | # Simple progress bar | ||
| 1263 | # | ||
| 1264 | |||
| 1265 | UNITS = ('', 'K', 'M', 'G','T','P') | ||
| 1266 | |||
| 1267 | |||
| 1268 | class Progress(object): | ||
| 1269 | unknown = 'UNKNOWN' | ||
| 1270 | |||
| 1271 | def __init__(self, minval=0, maxval=100): | ||
| 1272 | assert maxval is None or maxval >= minval | ||
| 1273 | self.min = self.cur = minval | ||
| 1274 | self.max = maxval | ||
| 1275 | self.started = None | ||
| 1276 | self.elapsed = 0 | ||
| 1277 | self.done = False | ||
| 1278 | |||
| 1279 | def update(self, curval): | ||
| 1280 | assert self.min <= curval | ||
| 1281 | assert self.max is None or curval <= self.max | ||
| 1282 | self.cur = curval | ||
| 1283 | now = time.time() | ||
| 1284 | if self.started is None: | ||
| 1285 | self.started = now | ||
| 1286 | else: | ||
| 1287 | self.elapsed = now - self.started | ||
| 1288 | |||
| 1289 | def increment(self, incr): | ||
| 1290 | assert incr >= 0 | ||
| 1291 | self.update(self.cur + incr) | ||
| 1292 | |||
| 1293 | def start(self): | ||
| 1294 | self.update(self.min) | ||
| 1295 | return self | ||
| 1296 | |||
| 1297 | def stop(self): | ||
| 1298 | if self.max is not None: | ||
| 1299 | self.update(self.max) | ||
| 1300 | self.done = True | ||
| 1301 | |||
| 1302 | @property | ||
| 1303 | def maximum(self): | ||
| 1304 | return self.unknown if self.max is None else self.max | ||
| 1305 | |||
| 1306 | @property | ||
| 1307 | def percentage(self): | ||
| 1308 | if self.done: | ||
| 1309 | result = '100 %' | ||
| 1310 | elif self.max is None: | ||
| 1311 | result = ' ?? %' | ||
| 1312 | else: | ||
| 1313 | v = 100.0 * (self.cur - self.min) / (self.max - self.min) | ||
| 1314 | result = '%3d %%' % v | ||
| 1315 | return result | ||
| 1316 | |||
| 1317 | def format_duration(self, duration): | ||
| 1318 | if (duration <= 0) and self.max is None or self.cur == self.min: | ||
| 1319 | result = '??:??:??' | ||
| 1320 | #elif duration < 1: | ||
| 1321 | # result = '--:--:--' | ||
| 1322 | else: | ||
| 1323 | result = time.strftime('%H:%M:%S', time.gmtime(duration)) | ||
| 1324 | return result | ||
| 1325 | |||
| 1326 | @property | ||
| 1327 | def ETA(self): | ||
| 1328 | if self.done: | ||
| 1329 | prefix = 'Done' | ||
| 1330 | t = self.elapsed | ||
| 1331 | #import pdb; pdb.set_trace() | ||
| 1332 | else: | ||
| 1333 | prefix = 'ETA ' | ||
| 1334 | if self.max is None: | ||
| 1335 | t = -1 | ||
| 1336 | elif self.elapsed == 0 or (self.cur == self.min): | ||
| 1337 | t = 0 | ||
| 1338 | else: | ||
| 1339 | #import pdb; pdb.set_trace() | ||
| 1340 | t = float(self.max - self.min) | ||
| 1341 | t /= self.cur - self.min | ||
| 1342 | t = (t - 1) * self.elapsed | ||
| 1343 | return '%s: %s' % (prefix, self.format_duration(t)) | ||
| 1344 | |||
| 1345 | @property | ||
| 1346 | def speed(self): | ||
| 1347 | if self.elapsed == 0: | ||
| 1348 | result = 0.0 | ||
| 1349 | else: | ||
| 1350 | result = (self.cur - self.min) / self.elapsed | ||
| 1351 | for unit in UNITS: | ||
| 1352 | if result < 1000: | ||
| 1353 | break | ||
| 1354 | result /= 1000.0 | ||
| 1355 | return '%d %sB/s' % (result, unit) | ||
| 1356 | |||
| 1357 | # | ||
| 1358 | # Glob functionality | ||
| 1359 | # | ||
| 1360 | |||
| 1361 | RICH_GLOB = re.compile(r'\{([^}]*)\}') | ||
| 1362 | _CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') | ||
| 1363 | _CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') | ||
| 1364 | |||
| 1365 | |||
| 1366 | def iglob(path_glob): | ||
| 1367 | """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" | ||
| 1368 | if _CHECK_RECURSIVE_GLOB.search(path_glob): | ||
| 1369 | msg = """invalid glob %r: recursive glob "**" must be used alone""" | ||
| 1370 | raise ValueError(msg % path_glob) | ||
| 1371 | if _CHECK_MISMATCH_SET.search(path_glob): | ||
| 1372 | msg = """invalid glob %r: mismatching set marker '{' or '}'""" | ||
| 1373 | raise ValueError(msg % path_glob) | ||
| 1374 | return _iglob(path_glob) | ||
| 1375 | |||
| 1376 | |||
| 1377 | def _iglob(path_glob): | ||
| 1378 | rich_path_glob = RICH_GLOB.split(path_glob, 1) | ||
| 1379 | if len(rich_path_glob) > 1: | ||
| 1380 | assert len(rich_path_glob) == 3, rich_path_glob | ||
| 1381 | prefix, set, suffix = rich_path_glob | ||
| 1382 | for item in set.split(','): | ||
| 1383 | for path in _iglob(''.join((prefix, item, suffix))): | ||
| 1384 | yield path | ||
| 1385 | else: | ||
| 1386 | if '**' not in path_glob: | ||
| 1387 | for item in std_iglob(path_glob): | ||
| 1388 | yield item | ||
| 1389 | else: | ||
| 1390 | prefix, radical = path_glob.split('**', 1) | ||
| 1391 | if prefix == '': | ||
| 1392 | prefix = '.' | ||
| 1393 | if radical == '': | ||
| 1394 | radical = '*' | ||
| 1395 | else: | ||
| 1396 | # we support both | ||
| 1397 | radical = radical.lstrip('/') | ||
| 1398 | radical = radical.lstrip('\\') | ||
| 1399 | for path, dir, files in os.walk(prefix): | ||
| 1400 | path = os.path.normpath(path) | ||
| 1401 | for fn in _iglob(os.path.join(path, radical)): | ||
| 1402 | yield fn | ||
| 1403 | |||
| 1404 | if ssl: | ||
| 1405 | from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, | ||
| 1406 | CertificateError) | ||
| 1407 | |||
| 1408 | |||
| 1409 | # | ||
| 1410 | # HTTPSConnection which verifies certificates/matches domains | ||
| 1411 | # | ||
| 1412 | |||
| 1413 | class HTTPSConnection(httplib.HTTPSConnection): | ||
| 1414 | ca_certs = None # set this to the path to the certs file (.pem) | ||
| 1415 | check_domain = True # only used if ca_certs is not None | ||
| 1416 | |||
| 1417 | # noinspection PyPropertyAccess | ||
| 1418 | def connect(self): | ||
| 1419 | sock = socket.create_connection((self.host, self.port), self.timeout) | ||
| 1420 | if getattr(self, '_tunnel_host', False): | ||
| 1421 | self.sock = sock | ||
| 1422 | self._tunnel() | ||
| 1423 | |||
| 1424 | if not hasattr(ssl, 'SSLContext'): | ||
| 1425 | # For 2.x | ||
| 1426 | if self.ca_certs: | ||
| 1427 | cert_reqs = ssl.CERT_REQUIRED | ||
| 1428 | else: | ||
| 1429 | cert_reqs = ssl.CERT_NONE | ||
| 1430 | self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, | ||
| 1431 | cert_reqs=cert_reqs, | ||
| 1432 | ssl_version=ssl.PROTOCOL_SSLv23, | ||
| 1433 | ca_certs=self.ca_certs) | ||
| 1434 | else: # pragma: no cover | ||
| 1435 | context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) | ||
| 1436 | context.options |= ssl.OP_NO_SSLv2 | ||
| 1437 | if self.cert_file: | ||
| 1438 | context.load_cert_chain(self.cert_file, self.key_file) | ||
| 1439 | kwargs = {} | ||
| 1440 | if self.ca_certs: | ||
| 1441 | context.verify_mode = ssl.CERT_REQUIRED | ||
| 1442 | context.load_verify_locations(cafile=self.ca_certs) | ||
| 1443 | if getattr(ssl, 'HAS_SNI', False): | ||
| 1444 | kwargs['server_hostname'] = self.host | ||
| 1445 | self.sock = context.wrap_socket(sock, **kwargs) | ||
| 1446 | if self.ca_certs and self.check_domain: | ||
| 1447 | try: | ||
| 1448 | match_hostname(self.sock.getpeercert(), self.host) | ||
| 1449 | logger.debug('Host verified: %s', self.host) | ||
| 1450 | except CertificateError: # pragma: no cover | ||
| 1451 | self.sock.shutdown(socket.SHUT_RDWR) | ||
| 1452 | self.sock.close() | ||
| 1453 | raise | ||
| 1454 | |||
| 1455 | class HTTPSHandler(BaseHTTPSHandler): | ||
| 1456 | def __init__(self, ca_certs, check_domain=True): | ||
| 1457 | BaseHTTPSHandler.__init__(self) | ||
| 1458 | self.ca_certs = ca_certs | ||
| 1459 | self.check_domain = check_domain | ||
| 1460 | |||
| 1461 | def _conn_maker(self, *args, **kwargs): | ||
| 1462 | """ | ||
| 1463 | This is called to create a connection instance. Normally you'd | ||
| 1464 | pass a connection class to do_open, but it doesn't actually check for | ||
| 1465 | a class, and just expects a callable. As long as we behave just as a | ||
| 1466 | constructor would have, we should be OK. If it ever changes so that | ||
| 1467 | we *must* pass a class, we'll create an UnsafeHTTPSConnection class | ||
| 1468 | which just sets check_domain to False in the class definition, and | ||
| 1469 | choose which one to pass to do_open. | ||
| 1470 | """ | ||
| 1471 | result = HTTPSConnection(*args, **kwargs) | ||
| 1472 | if self.ca_certs: | ||
| 1473 | result.ca_certs = self.ca_certs | ||
| 1474 | result.check_domain = self.check_domain | ||
| 1475 | return result | ||
| 1476 | |||
| 1477 | def https_open(self, req): | ||
| 1478 | try: | ||
| 1479 | return self.do_open(self._conn_maker, req) | ||
| 1480 | except URLError as e: | ||
| 1481 | if 'certificate verify failed' in str(e.reason): | ||
| 1482 | raise CertificateError('Unable to verify server certificate ' | ||
| 1483 | 'for %s' % req.host) | ||
| 1484 | else: | ||
| 1485 | raise | ||
| 1486 | |||
| 1487 | # | ||
| 1488 | # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- | ||
| 1489 | # Middle proxy using HTTP listens on port 443, or an index mistakenly serves | ||
| 1490 | # HTML containing a http://xyz link when it should be https://xyz), | ||
| 1491 | # you can use the following handler class, which does not allow HTTP traffic. | ||
| 1492 | # | ||
| 1493 | # It works by inheriting from HTTPHandler - so build_opener won't add a | ||
| 1494 | # handler for HTTP itself. | ||
| 1495 | # | ||
| 1496 | class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): | ||
| 1497 | def http_open(self, req): | ||
| 1498 | raise URLError('Unexpected HTTP request on what should be a secure ' | ||
| 1499 | 'connection: %s' % req) | ||
| 1500 | |||
| 1501 | # | ||
| 1502 | # XML-RPC with timeouts | ||
| 1503 | # | ||
| 1504 | |||
| 1505 | _ver_info = sys.version_info[:2] | ||
| 1506 | |||
| 1507 | if _ver_info == (2, 6): | ||
| 1508 | class HTTP(httplib.HTTP): | ||
| 1509 | def __init__(self, host='', port=None, **kwargs): | ||
| 1510 | if port == 0: # 0 means use port 0, not the default port | ||
| 1511 | port = None | ||
| 1512 | self._setup(self._connection_class(host, port, **kwargs)) | ||
| 1513 | |||
| 1514 | |||
| 1515 | if ssl: | ||
| 1516 | class HTTPS(httplib.HTTPS): | ||
| 1517 | def __init__(self, host='', port=None, **kwargs): | ||
| 1518 | if port == 0: # 0 means use port 0, not the default port | ||
| 1519 | port = None | ||
| 1520 | self._setup(self._connection_class(host, port, **kwargs)) | ||
| 1521 | |||
| 1522 | |||
| 1523 | class Transport(xmlrpclib.Transport): | ||
| 1524 | def __init__(self, timeout, use_datetime=0): | ||
| 1525 | self.timeout = timeout | ||
| 1526 | xmlrpclib.Transport.__init__(self, use_datetime) | ||
| 1527 | |||
| 1528 | def make_connection(self, host): | ||
| 1529 | h, eh, x509 = self.get_host_info(host) | ||
| 1530 | if _ver_info == (2, 6): | ||
| 1531 | result = HTTP(h, timeout=self.timeout) | ||
| 1532 | else: | ||
| 1533 | if not self._connection or host != self._connection[0]: | ||
| 1534 | self._extra_headers = eh | ||
| 1535 | self._connection = host, httplib.HTTPConnection(h) | ||
| 1536 | result = self._connection[1] | ||
| 1537 | return result | ||
| 1538 | |||
| 1539 | if ssl: | ||
| 1540 | class SafeTransport(xmlrpclib.SafeTransport): | ||
| 1541 | def __init__(self, timeout, use_datetime=0): | ||
| 1542 | self.timeout = timeout | ||
| 1543 | xmlrpclib.SafeTransport.__init__(self, use_datetime) | ||
| 1544 | |||
| 1545 | def make_connection(self, host): | ||
| 1546 | h, eh, kwargs = self.get_host_info(host) | ||
| 1547 | if not kwargs: | ||
| 1548 | kwargs = {} | ||
| 1549 | kwargs['timeout'] = self.timeout | ||
| 1550 | if _ver_info == (2, 6): | ||
| 1551 | result = HTTPS(host, None, **kwargs) | ||
| 1552 | else: | ||
| 1553 | if not self._connection or host != self._connection[0]: | ||
| 1554 | self._extra_headers = eh | ||
| 1555 | self._connection = host, httplib.HTTPSConnection(h, None, | ||
| 1556 | **kwargs) | ||
| 1557 | result = self._connection[1] | ||
| 1558 | return result | ||
| 1559 | |||
| 1560 | |||
| 1561 | class ServerProxy(xmlrpclib.ServerProxy): | ||
| 1562 | def __init__(self, uri, **kwargs): | ||
| 1563 | self.timeout = timeout = kwargs.pop('timeout', None) | ||
| 1564 | # The above classes only come into play if a timeout | ||
| 1565 | # is specified | ||
| 1566 | if timeout is not None: | ||
| 1567 | scheme, _ = splittype(uri) | ||
| 1568 | use_datetime = kwargs.get('use_datetime', 0) | ||
| 1569 | if scheme == 'https': | ||
| 1570 | tcls = SafeTransport | ||
| 1571 | else: | ||
| 1572 | tcls = Transport | ||
| 1573 | kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) | ||
| 1574 | self.transport = t | ||
| 1575 | xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) | ||
| 1576 | |||
| 1577 | # | ||
| 1578 | # CSV functionality. This is provided because on 2.x, the csv module can't | ||
| 1579 | # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. | ||
| 1580 | # | ||
| 1581 | |||
| 1582 | def _csv_open(fn, mode, **kwargs): | ||
| 1583 | if sys.version_info[0] < 3: | ||
| 1584 | mode += 'b' | ||
| 1585 | else: | ||
| 1586 | kwargs['newline'] = '' | ||
| 1587 | # Python 3 determines encoding from locale. Force 'utf-8' | ||
| 1588 | # file encoding to match other forced utf-8 encoding | ||
| 1589 | kwargs['encoding'] = 'utf-8' | ||
| 1590 | return open(fn, mode, **kwargs) | ||
| 1591 | |||
| 1592 | |||
| 1593 | class CSVBase(object): | ||
| 1594 | defaults = { | ||
| 1595 | 'delimiter': str(','), # The strs are used because we need native | ||
| 1596 | 'quotechar': str('"'), # str in the csv API (2.x won't take | ||
| 1597 | 'lineterminator': str('\n') # Unicode) | ||
| 1598 | } | ||
| 1599 | |||
| 1600 | def __enter__(self): | ||
| 1601 | return self | ||
| 1602 | |||
| 1603 | def __exit__(self, *exc_info): | ||
| 1604 | self.stream.close() | ||
| 1605 | |||
| 1606 | |||
| 1607 | class CSVReader(CSVBase): | ||
| 1608 | def __init__(self, **kwargs): | ||
| 1609 | if 'stream' in kwargs: | ||
| 1610 | stream = kwargs['stream'] | ||
| 1611 | if sys.version_info[0] >= 3: | ||
| 1612 | # needs to be a text stream | ||
| 1613 | stream = codecs.getreader('utf-8')(stream) | ||
| 1614 | self.stream = stream | ||
| 1615 | else: | ||
| 1616 | self.stream = _csv_open(kwargs['path'], 'r') | ||
| 1617 | self.reader = csv.reader(self.stream, **self.defaults) | ||
| 1618 | |||
| 1619 | def __iter__(self): | ||
| 1620 | return self | ||
| 1621 | |||
| 1622 | def next(self): | ||
| 1623 | result = next(self.reader) | ||
| 1624 | if sys.version_info[0] < 3: | ||
| 1625 | for i, item in enumerate(result): | ||
| 1626 | if not isinstance(item, text_type): | ||
| 1627 | result[i] = item.decode('utf-8') | ||
| 1628 | return result | ||
| 1629 | |||
| 1630 | __next__ = next | ||
| 1631 | |||
| 1632 | class CSVWriter(CSVBase): | ||
| 1633 | def __init__(self, fn, **kwargs): | ||
| 1634 | self.stream = _csv_open(fn, 'w') | ||
| 1635 | self.writer = csv.writer(self.stream, **self.defaults) | ||
| 1636 | |||
| 1637 | def writerow(self, row): | ||
| 1638 | if sys.version_info[0] < 3: | ||
| 1639 | r = [] | ||
| 1640 | for item in row: | ||
| 1641 | if isinstance(item, text_type): | ||
| 1642 | item = item.encode('utf-8') | ||
| 1643 | r.append(item) | ||
| 1644 | row = r | ||
| 1645 | self.writer.writerow(row) | ||
| 1646 | |||
| 1647 | # | ||
| 1648 | # Configurator functionality | ||
| 1649 | # | ||
| 1650 | |||
| 1651 | class Configurator(BaseConfigurator): | ||
| 1652 | |||
| 1653 | value_converters = dict(BaseConfigurator.value_converters) | ||
| 1654 | value_converters['inc'] = 'inc_convert' | ||
| 1655 | |||
| 1656 | def __init__(self, config, base=None): | ||
| 1657 | super(Configurator, self).__init__(config) | ||
| 1658 | self.base = base or os.getcwd() | ||
| 1659 | |||
| 1660 | def configure_custom(self, config): | ||
| 1661 | def convert(o): | ||
| 1662 | if isinstance(o, (list, tuple)): | ||
| 1663 | result = type(o)([convert(i) for i in o]) | ||
| 1664 | elif isinstance(o, dict): | ||
| 1665 | if '()' in o: | ||
| 1666 | result = self.configure_custom(o) | ||
| 1667 | else: | ||
| 1668 | result = {} | ||
| 1669 | for k in o: | ||
| 1670 | result[k] = convert(o[k]) | ||
| 1671 | else: | ||
| 1672 | result = self.convert(o) | ||
| 1673 | return result | ||
| 1674 | |||
| 1675 | c = config.pop('()') | ||
| 1676 | if not callable(c): | ||
| 1677 | c = self.resolve(c) | ||
| 1678 | props = config.pop('.', None) | ||
| 1679 | # Check for valid identifiers | ||
| 1680 | args = config.pop('[]', ()) | ||
| 1681 | if args: | ||
| 1682 | args = tuple([convert(o) for o in args]) | ||
| 1683 | items = [(k, convert(config[k])) for k in config if valid_ident(k)] | ||
| 1684 | kwargs = dict(items) | ||
| 1685 | result = c(*args, **kwargs) | ||
| 1686 | if props: | ||
| 1687 | for n, v in props.items(): | ||
| 1688 | setattr(result, n, convert(v)) | ||
| 1689 | return result | ||
| 1690 | |||
| 1691 | def __getitem__(self, key): | ||
| 1692 | result = self.config[key] | ||
| 1693 | if isinstance(result, dict) and '()' in result: | ||
| 1694 | self.config[key] = result = self.configure_custom(result) | ||
| 1695 | return result | ||
| 1696 | |||
| 1697 | def inc_convert(self, value): | ||
| 1698 | """Default converter for the inc:// protocol.""" | ||
| 1699 | if not os.path.isabs(value): | ||
| 1700 | value = os.path.join(self.base, value) | ||
| 1701 | with codecs.open(value, 'r', encoding='utf-8') as f: | ||
| 1702 | result = json.load(f) | ||
| 1703 | return result | ||
| 1704 | |||
| 1705 | |||
| 1706 | class SubprocessMixin(object): | ||
| 1707 | """ | ||
| 1708 | Mixin for running subprocesses and capturing their output | ||
| 1709 | """ | ||
| 1710 | def __init__(self, verbose=False, progress=None): | ||
| 1711 | self.verbose = verbose | ||
| 1712 | self.progress = progress | ||
| 1713 | |||
| 1714 | def reader(self, stream, context): | ||
| 1715 | """ | ||
| 1716 | Read lines from a subprocess' output stream and either pass to a progress | ||
| 1717 | callable (if specified) or write progress information to sys.stderr. | ||
| 1718 | """ | ||
| 1719 | progress = self.progress | ||
| 1720 | verbose = self.verbose | ||
| 1721 | while True: | ||
| 1722 | s = stream.readline() | ||
| 1723 | if not s: | ||
| 1724 | break | ||
| 1725 | if progress is not None: | ||
| 1726 | progress(s, context) | ||
| 1727 | else: | ||
| 1728 | if not verbose: | ||
| 1729 | sys.stderr.write('.') | ||
| 1730 | else: | ||
| 1731 | sys.stderr.write(s.decode('utf-8')) | ||
| 1732 | sys.stderr.flush() | ||
| 1733 | stream.close() | ||
| 1734 | |||
| 1735 | def run_command(self, cmd, **kwargs): | ||
| 1736 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, | ||
| 1737 | stderr=subprocess.PIPE, **kwargs) | ||
| 1738 | t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) | ||
| 1739 | t1.start() | ||
| 1740 | t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) | ||
| 1741 | t2.start() | ||
| 1742 | p.wait() | ||
| 1743 | t1.join() | ||
| 1744 | t2.join() | ||
| 1745 | if self.progress is not None: | ||
| 1746 | self.progress('done.', 'main') | ||
| 1747 | elif self.verbose: | ||
| 1748 | sys.stderr.write('done.\n') | ||
| 1749 | return p | ||
| 1750 | |||
| 1751 | |||
| 1752 | def normalize_name(name): | ||
| 1753 | """Normalize a python package name a la PEP 503""" | ||
| 1754 | # https://www.python.org/dev/peps/pep-0503/#normalized-names | ||
| 1755 | return re.sub('[-_.]+', '-', name).lower() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..959f153 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py | |||
| @@ -0,0 +1,736 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
| 4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 5 | # | ||
| 6 | """ | ||
| 7 | Implementation of a flexible versioning scheme providing support for PEP-440, | ||
| 8 | setuptools-compatible and semantic versioning. | ||
| 9 | """ | ||
| 10 | |||
| 11 | import logging | ||
| 12 | import re | ||
| 13 | |||
| 14 | from .compat import string_types | ||
| 15 | from .util import parse_requirement | ||
| 16 | |||
| 17 | __all__ = ['NormalizedVersion', 'NormalizedMatcher', | ||
| 18 | 'LegacyVersion', 'LegacyMatcher', | ||
| 19 | 'SemanticVersion', 'SemanticMatcher', | ||
| 20 | 'UnsupportedVersionError', 'get_scheme'] | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | class UnsupportedVersionError(ValueError): | ||
| 26 | """This is an unsupported version.""" | ||
| 27 | pass | ||
| 28 | |||
| 29 | |||
| 30 | class Version(object): | ||
| 31 | def __init__(self, s): | ||
| 32 | self._string = s = s.strip() | ||
| 33 | self._parts = parts = self.parse(s) | ||
| 34 | assert isinstance(parts, tuple) | ||
| 35 | assert len(parts) > 0 | ||
| 36 | |||
| 37 | def parse(self, s): | ||
| 38 | raise NotImplementedError('please implement in a subclass') | ||
| 39 | |||
| 40 | def _check_compatible(self, other): | ||
| 41 | if type(self) != type(other): | ||
| 42 | raise TypeError('cannot compare %r and %r' % (self, other)) | ||
| 43 | |||
| 44 | def __eq__(self, other): | ||
| 45 | self._check_compatible(other) | ||
| 46 | return self._parts == other._parts | ||
| 47 | |||
| 48 | def __ne__(self, other): | ||
| 49 | return not self.__eq__(other) | ||
| 50 | |||
| 51 | def __lt__(self, other): | ||
| 52 | self._check_compatible(other) | ||
| 53 | return self._parts < other._parts | ||
| 54 | |||
| 55 | def __gt__(self, other): | ||
| 56 | return not (self.__lt__(other) or self.__eq__(other)) | ||
| 57 | |||
| 58 | def __le__(self, other): | ||
| 59 | return self.__lt__(other) or self.__eq__(other) | ||
| 60 | |||
| 61 | def __ge__(self, other): | ||
| 62 | return self.__gt__(other) or self.__eq__(other) | ||
| 63 | |||
| 64 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
| 65 | def __hash__(self): | ||
| 66 | return hash(self._parts) | ||
| 67 | |||
| 68 | def __repr__(self): | ||
| 69 | return "%s('%s')" % (self.__class__.__name__, self._string) | ||
| 70 | |||
| 71 | def __str__(self): | ||
| 72 | return self._string | ||
| 73 | |||
| 74 | @property | ||
| 75 | def is_prerelease(self): | ||
| 76 | raise NotImplementedError('Please implement in subclasses.') | ||
| 77 | |||
| 78 | |||
| 79 | class Matcher(object): | ||
| 80 | version_class = None | ||
| 81 | |||
| 82 | # value is either a callable or the name of a method | ||
| 83 | _operators = { | ||
| 84 | '<': lambda v, c, p: v < c, | ||
| 85 | '>': lambda v, c, p: v > c, | ||
| 86 | '<=': lambda v, c, p: v == c or v < c, | ||
| 87 | '>=': lambda v, c, p: v == c or v > c, | ||
| 88 | '==': lambda v, c, p: v == c, | ||
| 89 | '===': lambda v, c, p: v == c, | ||
| 90 | # by default, compatible => >=. | ||
| 91 | '~=': lambda v, c, p: v == c or v > c, | ||
| 92 | '!=': lambda v, c, p: v != c, | ||
| 93 | } | ||
| 94 | |||
| 95 | # this is a method only to support alternative implementations | ||
| 96 | # via overriding | ||
| 97 | def parse_requirement(self, s): | ||
| 98 | return parse_requirement(s) | ||
| 99 | |||
| 100 | def __init__(self, s): | ||
| 101 | if self.version_class is None: | ||
| 102 | raise ValueError('Please specify a version class') | ||
| 103 | self._string = s = s.strip() | ||
| 104 | r = self.parse_requirement(s) | ||
| 105 | if not r: | ||
| 106 | raise ValueError('Not valid: %r' % s) | ||
| 107 | self.name = r.name | ||
| 108 | self.key = self.name.lower() # for case-insensitive comparisons | ||
| 109 | clist = [] | ||
| 110 | if r.constraints: | ||
| 111 | # import pdb; pdb.set_trace() | ||
| 112 | for op, s in r.constraints: | ||
| 113 | if s.endswith('.*'): | ||
| 114 | if op not in ('==', '!='): | ||
| 115 | raise ValueError('\'.*\' not allowed for ' | ||
| 116 | '%r constraints' % op) | ||
| 117 | # Could be a partial version (e.g. for '2.*') which | ||
| 118 | # won't parse as a version, so keep it as a string | ||
| 119 | vn, prefix = s[:-2], True | ||
| 120 | # Just to check that vn is a valid version | ||
| 121 | self.version_class(vn) | ||
| 122 | else: | ||
| 123 | # Should parse as a version, so we can create an | ||
| 124 | # instance for the comparison | ||
| 125 | vn, prefix = self.version_class(s), False | ||
| 126 | clist.append((op, vn, prefix)) | ||
| 127 | self._parts = tuple(clist) | ||
| 128 | |||
| 129 | def match(self, version): | ||
| 130 | """ | ||
| 131 | Check if the provided version matches the constraints. | ||
| 132 | |||
| 133 | :param version: The version to match against this instance. | ||
| 134 | :type version: String or :class:`Version` instance. | ||
| 135 | """ | ||
| 136 | if isinstance(version, string_types): | ||
| 137 | version = self.version_class(version) | ||
| 138 | for operator, constraint, prefix in self._parts: | ||
| 139 | f = self._operators.get(operator) | ||
| 140 | if isinstance(f, string_types): | ||
| 141 | f = getattr(self, f) | ||
| 142 | if not f: | ||
| 143 | msg = ('%r not implemented ' | ||
| 144 | 'for %s' % (operator, self.__class__.__name__)) | ||
| 145 | raise NotImplementedError(msg) | ||
| 146 | if not f(version, constraint, prefix): | ||
| 147 | return False | ||
| 148 | return True | ||
| 149 | |||
| 150 | @property | ||
| 151 | def exact_version(self): | ||
| 152 | result = None | ||
| 153 | if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): | ||
| 154 | result = self._parts[0][1] | ||
| 155 | return result | ||
| 156 | |||
| 157 | def _check_compatible(self, other): | ||
| 158 | if type(self) != type(other) or self.name != other.name: | ||
| 159 | raise TypeError('cannot compare %s and %s' % (self, other)) | ||
| 160 | |||
| 161 | def __eq__(self, other): | ||
| 162 | self._check_compatible(other) | ||
| 163 | return self.key == other.key and self._parts == other._parts | ||
| 164 | |||
| 165 | def __ne__(self, other): | ||
| 166 | return not self.__eq__(other) | ||
| 167 | |||
| 168 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
| 169 | def __hash__(self): | ||
| 170 | return hash(self.key) + hash(self._parts) | ||
| 171 | |||
| 172 | def __repr__(self): | ||
| 173 | return "%s(%r)" % (self.__class__.__name__, self._string) | ||
| 174 | |||
| 175 | def __str__(self): | ||
| 176 | return self._string | ||
| 177 | |||
| 178 | |||
| 179 | PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' | ||
| 180 | r'(\.(post)(\d+))?(\.(dev)(\d+))?' | ||
| 181 | r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') | ||
| 182 | |||
| 183 | |||
| 184 | def _pep_440_key(s): | ||
| 185 | s = s.strip() | ||
| 186 | m = PEP440_VERSION_RE.match(s) | ||
| 187 | if not m: | ||
| 188 | raise UnsupportedVersionError('Not a valid version: %s' % s) | ||
| 189 | groups = m.groups() | ||
| 190 | nums = tuple(int(v) for v in groups[1].split('.')) | ||
| 191 | while len(nums) > 1 and nums[-1] == 0: | ||
| 192 | nums = nums[:-1] | ||
| 193 | |||
| 194 | if not groups[0]: | ||
| 195 | epoch = 0 | ||
| 196 | else: | ||
| 197 | epoch = int(groups[0]) | ||
| 198 | pre = groups[4:6] | ||
| 199 | post = groups[7:9] | ||
| 200 | dev = groups[10:12] | ||
| 201 | local = groups[13] | ||
| 202 | if pre == (None, None): | ||
| 203 | pre = () | ||
| 204 | else: | ||
| 205 | pre = pre[0], int(pre[1]) | ||
| 206 | if post == (None, None): | ||
| 207 | post = () | ||
| 208 | else: | ||
| 209 | post = post[0], int(post[1]) | ||
| 210 | if dev == (None, None): | ||
| 211 | dev = () | ||
| 212 | else: | ||
| 213 | dev = dev[0], int(dev[1]) | ||
| 214 | if local is None: | ||
| 215 | local = () | ||
| 216 | else: | ||
| 217 | parts = [] | ||
| 218 | for part in local.split('.'): | ||
| 219 | # to ensure that numeric compares as > lexicographic, avoid | ||
| 220 | # comparing them directly, but encode a tuple which ensures | ||
| 221 | # correct sorting | ||
| 222 | if part.isdigit(): | ||
| 223 | part = (1, int(part)) | ||
| 224 | else: | ||
| 225 | part = (0, part) | ||
| 226 | parts.append(part) | ||
| 227 | local = tuple(parts) | ||
| 228 | if not pre: | ||
| 229 | # either before pre-release, or final release and after | ||
| 230 | if not post and dev: | ||
| 231 | # before pre-release | ||
| 232 | pre = ('a', -1) # to sort before a0 | ||
| 233 | else: | ||
| 234 | pre = ('z',) # to sort after all pre-releases | ||
| 235 | # now look at the state of post and dev. | ||
| 236 | if not post: | ||
| 237 | post = ('_',) # sort before 'a' | ||
| 238 | if not dev: | ||
| 239 | dev = ('final',) | ||
| 240 | |||
| 241 | #print('%s -> %s' % (s, m.groups())) | ||
| 242 | return epoch, nums, pre, post, dev, local | ||
| 243 | |||
| 244 | |||
| 245 | _normalized_key = _pep_440_key | ||
| 246 | |||
| 247 | |||
| 248 | class NormalizedVersion(Version): | ||
| 249 | """A rational version. | ||
| 250 | |||
| 251 | Good: | ||
| 252 | 1.2 # equivalent to "1.2.0" | ||
| 253 | 1.2.0 | ||
| 254 | 1.2a1 | ||
| 255 | 1.2.3a2 | ||
| 256 | 1.2.3b1 | ||
| 257 | 1.2.3c1 | ||
| 258 | 1.2.3.4 | ||
| 259 | TODO: fill this out | ||
| 260 | |||
| 261 | Bad: | ||
| 262 | 1 # minimum two numbers | ||
| 263 | 1.2a # release level must have a release serial | ||
| 264 | 1.2.3b | ||
| 265 | """ | ||
| 266 | def parse(self, s): | ||
| 267 | result = _normalized_key(s) | ||
| 268 | # _normalized_key loses trailing zeroes in the release | ||
| 269 | # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 | ||
| 270 | # However, PEP 440 prefix matching needs it: for example, | ||
| 271 | # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). | ||
| 272 | m = PEP440_VERSION_RE.match(s) # must succeed | ||
| 273 | groups = m.groups() | ||
| 274 | self._release_clause = tuple(int(v) for v in groups[1].split('.')) | ||
| 275 | return result | ||
| 276 | |||
| 277 | PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) | ||
| 278 | |||
| 279 | @property | ||
| 280 | def is_prerelease(self): | ||
| 281 | return any(t[0] in self.PREREL_TAGS for t in self._parts if t) | ||
| 282 | |||
| 283 | |||
| 284 | def _match_prefix(x, y): | ||
| 285 | x = str(x) | ||
| 286 | y = str(y) | ||
| 287 | if x == y: | ||
| 288 | return True | ||
| 289 | if not x.startswith(y): | ||
| 290 | return False | ||
| 291 | n = len(y) | ||
| 292 | return x[n] == '.' | ||
| 293 | |||
| 294 | |||
| 295 | class NormalizedMatcher(Matcher): | ||
| 296 | version_class = NormalizedVersion | ||
| 297 | |||
| 298 | # value is either a callable or the name of a method | ||
| 299 | _operators = { | ||
| 300 | '~=': '_match_compatible', | ||
| 301 | '<': '_match_lt', | ||
| 302 | '>': '_match_gt', | ||
| 303 | '<=': '_match_le', | ||
| 304 | '>=': '_match_ge', | ||
| 305 | '==': '_match_eq', | ||
| 306 | '===': '_match_arbitrary', | ||
| 307 | '!=': '_match_ne', | ||
| 308 | } | ||
| 309 | |||
| 310 | def _adjust_local(self, version, constraint, prefix): | ||
| 311 | if prefix: | ||
| 312 | strip_local = '+' not in constraint and version._parts[-1] | ||
| 313 | else: | ||
| 314 | # both constraint and version are | ||
| 315 | # NormalizedVersion instances. | ||
| 316 | # If constraint does not have a local component, | ||
| 317 | # ensure the version doesn't, either. | ||
| 318 | strip_local = not constraint._parts[-1] and version._parts[-1] | ||
| 319 | if strip_local: | ||
| 320 | s = version._string.split('+', 1)[0] | ||
| 321 | version = self.version_class(s) | ||
| 322 | return version, constraint | ||
| 323 | |||
| 324 | def _match_lt(self, version, constraint, prefix): | ||
| 325 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 326 | if version >= constraint: | ||
| 327 | return False | ||
| 328 | release_clause = constraint._release_clause | ||
| 329 | pfx = '.'.join([str(i) for i in release_clause]) | ||
| 330 | return not _match_prefix(version, pfx) | ||
| 331 | |||
| 332 | def _match_gt(self, version, constraint, prefix): | ||
| 333 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 334 | if version <= constraint: | ||
| 335 | return False | ||
| 336 | release_clause = constraint._release_clause | ||
| 337 | pfx = '.'.join([str(i) for i in release_clause]) | ||
| 338 | return not _match_prefix(version, pfx) | ||
| 339 | |||
| 340 | def _match_le(self, version, constraint, prefix): | ||
| 341 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 342 | return version <= constraint | ||
| 343 | |||
| 344 | def _match_ge(self, version, constraint, prefix): | ||
| 345 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 346 | return version >= constraint | ||
| 347 | |||
| 348 | def _match_eq(self, version, constraint, prefix): | ||
| 349 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 350 | if not prefix: | ||
| 351 | result = (version == constraint) | ||
| 352 | else: | ||
| 353 | result = _match_prefix(version, constraint) | ||
| 354 | return result | ||
| 355 | |||
| 356 | def _match_arbitrary(self, version, constraint, prefix): | ||
| 357 | return str(version) == str(constraint) | ||
| 358 | |||
| 359 | def _match_ne(self, version, constraint, prefix): | ||
| 360 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 361 | if not prefix: | ||
| 362 | result = (version != constraint) | ||
| 363 | else: | ||
| 364 | result = not _match_prefix(version, constraint) | ||
| 365 | return result | ||
| 366 | |||
| 367 | def _match_compatible(self, version, constraint, prefix): | ||
| 368 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
| 369 | if version == constraint: | ||
| 370 | return True | ||
| 371 | if version < constraint: | ||
| 372 | return False | ||
| 373 | # if not prefix: | ||
| 374 | # return True | ||
| 375 | release_clause = constraint._release_clause | ||
| 376 | if len(release_clause) > 1: | ||
| 377 | release_clause = release_clause[:-1] | ||
| 378 | pfx = '.'.join([str(i) for i in release_clause]) | ||
| 379 | return _match_prefix(version, pfx) | ||
| 380 | |||
| 381 | _REPLACEMENTS = ( | ||
| 382 | (re.compile('[.+-]$'), ''), # remove trailing puncts | ||
| 383 | (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start | ||
| 384 | (re.compile('^[.-]'), ''), # remove leading puncts | ||
| 385 | (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses | ||
| 386 | (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) | ||
| 387 | (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) | ||
| 388 | (re.compile('[.]{2,}'), '.'), # multiple runs of '.' | ||
| 389 | (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha | ||
| 390 | (re.compile(r'\b(pre-alpha|prealpha)\b'), | ||
| 391 | 'pre.alpha'), # standardise | ||
| 392 | (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses | ||
| 393 | ) | ||
| 394 | |||
| 395 | _SUFFIX_REPLACEMENTS = ( | ||
| 396 | (re.compile('^[:~._+-]+'), ''), # remove leading puncts | ||
| 397 | (re.compile('[,*")([\\]]'), ''), # remove unwanted chars | ||
| 398 | (re.compile('[~:+_ -]'), '.'), # replace illegal chars | ||
| 399 | (re.compile('[.]{2,}'), '.'), # multiple runs of '.' | ||
| 400 | (re.compile(r'\.$'), ''), # trailing '.' | ||
| 401 | ) | ||
| 402 | |||
| 403 | _NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') | ||
| 404 | |||
| 405 | |||
| 406 | def _suggest_semantic_version(s): | ||
| 407 | """ | ||
| 408 | Try to suggest a semantic form for a version for which | ||
| 409 | _suggest_normalized_version couldn't come up with anything. | ||
| 410 | """ | ||
| 411 | result = s.strip().lower() | ||
| 412 | for pat, repl in _REPLACEMENTS: | ||
| 413 | result = pat.sub(repl, result) | ||
| 414 | if not result: | ||
| 415 | result = '0.0.0' | ||
| 416 | |||
| 417 | # Now look for numeric prefix, and separate it out from | ||
| 418 | # the rest. | ||
| 419 | #import pdb; pdb.set_trace() | ||
| 420 | m = _NUMERIC_PREFIX.match(result) | ||
| 421 | if not m: | ||
| 422 | prefix = '0.0.0' | ||
| 423 | suffix = result | ||
| 424 | else: | ||
| 425 | prefix = m.groups()[0].split('.') | ||
| 426 | prefix = [int(i) for i in prefix] | ||
| 427 | while len(prefix) < 3: | ||
| 428 | prefix.append(0) | ||
| 429 | if len(prefix) == 3: | ||
| 430 | suffix = result[m.end():] | ||
| 431 | else: | ||
| 432 | suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] | ||
| 433 | prefix = prefix[:3] | ||
| 434 | prefix = '.'.join([str(i) for i in prefix]) | ||
| 435 | suffix = suffix.strip() | ||
| 436 | if suffix: | ||
| 437 | #import pdb; pdb.set_trace() | ||
| 438 | # massage the suffix. | ||
| 439 | for pat, repl in _SUFFIX_REPLACEMENTS: | ||
| 440 | suffix = pat.sub(repl, suffix) | ||
| 441 | |||
| 442 | if not suffix: | ||
| 443 | result = prefix | ||
| 444 | else: | ||
| 445 | sep = '-' if 'dev' in suffix else '+' | ||
| 446 | result = prefix + sep + suffix | ||
| 447 | if not is_semver(result): | ||
| 448 | result = None | ||
| 449 | return result | ||
| 450 | |||
| 451 | |||
| 452 | def _suggest_normalized_version(s): | ||
| 453 | """Suggest a normalized version close to the given version string. | ||
| 454 | |||
| 455 | If you have a version string that isn't rational (i.e. NormalizedVersion | ||
| 456 | doesn't like it) then you might be able to get an equivalent (or close) | ||
| 457 | rational version from this function. | ||
| 458 | |||
| 459 | This does a number of simple normalizations to the given string, based | ||
| 460 | on observation of versions currently in use on PyPI. Given a dump of | ||
| 461 | those version during PyCon 2009, 4287 of them: | ||
| 462 | - 2312 (53.93%) match NormalizedVersion without change | ||
| 463 | with the automatic suggestion | ||
| 464 | - 3474 (81.04%) match when using this suggestion method | ||
| 465 | |||
| 466 | @param s {str} An irrational version string. | ||
| 467 | @returns A rational version string, or None, if couldn't determine one. | ||
| 468 | """ | ||
| 469 | try: | ||
| 470 | _normalized_key(s) | ||
| 471 | return s # already rational | ||
| 472 | except UnsupportedVersionError: | ||
| 473 | pass | ||
| 474 | |||
| 475 | rs = s.lower() | ||
| 476 | |||
| 477 | # part of this could use maketrans | ||
| 478 | for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), | ||
| 479 | ('beta', 'b'), ('rc', 'c'), ('-final', ''), | ||
| 480 | ('-pre', 'c'), | ||
| 481 | ('-release', ''), ('.release', ''), ('-stable', ''), | ||
| 482 | ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), | ||
| 483 | ('final', '')): | ||
| 484 | rs = rs.replace(orig, repl) | ||
| 485 | |||
| 486 | # if something ends with dev or pre, we add a 0 | ||
| 487 | rs = re.sub(r"pre$", r"pre0", rs) | ||
| 488 | rs = re.sub(r"dev$", r"dev0", rs) | ||
| 489 | |||
| 490 | # if we have something like "b-2" or "a.2" at the end of the | ||
| 491 | # version, that is probably beta, alpha, etc | ||
| 492 | # let's remove the dash or dot | ||
| 493 | rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) | ||
| 494 | |||
| 495 | # 1.0-dev-r371 -> 1.0.dev371 | ||
| 496 | # 0.1-dev-r79 -> 0.1.dev79 | ||
| 497 | rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) | ||
| 498 | |||
| 499 | # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 | ||
| 500 | rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) | ||
| 501 | |||
| 502 | # Clean: v0.3, v1.0 | ||
| 503 | if rs.startswith('v'): | ||
| 504 | rs = rs[1:] | ||
| 505 | |||
| 506 | # Clean leading '0's on numbers. | ||
| 507 | #TODO: unintended side-effect on, e.g., "2003.05.09" | ||
| 508 | # PyPI stats: 77 (~2%) better | ||
| 509 | rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) | ||
| 510 | |||
| 511 | # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers | ||
| 512 | # zero. | ||
| 513 | # PyPI stats: 245 (7.56%) better | ||
| 514 | rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) | ||
| 515 | |||
| 516 | # the 'dev-rNNN' tag is a dev tag | ||
| 517 | rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) | ||
| 518 | |||
| 519 | # clean the - when used as a pre delimiter | ||
| 520 | rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) | ||
| 521 | |||
| 522 | # a terminal "dev" or "devel" can be changed into ".dev0" | ||
| 523 | rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) | ||
| 524 | |||
| 525 | # a terminal "dev" can be changed into ".dev0" | ||
| 526 | rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) | ||
| 527 | |||
| 528 | # a terminal "final" or "stable" can be removed | ||
| 529 | rs = re.sub(r"(final|stable)$", "", rs) | ||
| 530 | |||
| 531 | # The 'r' and the '-' tags are post release tags | ||
| 532 | # 0.4a1.r10 -> 0.4a1.post10 | ||
| 533 | # 0.9.33-17222 -> 0.9.33.post17222 | ||
| 534 | # 0.9.33-r17222 -> 0.9.33.post17222 | ||
| 535 | rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) | ||
| 536 | |||
| 537 | # Clean 'r' instead of 'dev' usage: | ||
| 538 | # 0.9.33+r17222 -> 0.9.33.dev17222 | ||
| 539 | # 1.0dev123 -> 1.0.dev123 | ||
| 540 | # 1.0.git123 -> 1.0.dev123 | ||
| 541 | # 1.0.bzr123 -> 1.0.dev123 | ||
| 542 | # 0.1a0dev.123 -> 0.1a0.dev123 | ||
| 543 | # PyPI stats: ~150 (~4%) better | ||
| 544 | rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) | ||
| 545 | |||
| 546 | # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: | ||
| 547 | # 0.2.pre1 -> 0.2c1 | ||
| 548 | # 0.2-c1 -> 0.2c1 | ||
| 549 | # 1.0preview123 -> 1.0c123 | ||
| 550 | # PyPI stats: ~21 (0.62%) better | ||
| 551 | rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) | ||
| 552 | |||
| 553 | # Tcl/Tk uses "px" for their post release markers | ||
| 554 | rs = re.sub(r"p(\d+)$", r".post\1", rs) | ||
| 555 | |||
| 556 | try: | ||
| 557 | _normalized_key(rs) | ||
| 558 | except UnsupportedVersionError: | ||
| 559 | rs = None | ||
| 560 | return rs | ||
| 561 | |||
| 562 | # | ||
| 563 | # Legacy version processing (distribute-compatible) | ||
| 564 | # | ||
| 565 | |||
| 566 | _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) | ||
| 567 | _VERSION_REPLACE = { | ||
| 568 | 'pre': 'c', | ||
| 569 | 'preview': 'c', | ||
| 570 | '-': 'final-', | ||
| 571 | 'rc': 'c', | ||
| 572 | 'dev': '@', | ||
| 573 | '': None, | ||
| 574 | '.': None, | ||
| 575 | } | ||
| 576 | |||
| 577 | |||
| 578 | def _legacy_key(s): | ||
| 579 | def get_parts(s): | ||
| 580 | result = [] | ||
| 581 | for p in _VERSION_PART.split(s.lower()): | ||
| 582 | p = _VERSION_REPLACE.get(p, p) | ||
| 583 | if p: | ||
| 584 | if '0' <= p[:1] <= '9': | ||
| 585 | p = p.zfill(8) | ||
| 586 | else: | ||
| 587 | p = '*' + p | ||
| 588 | result.append(p) | ||
| 589 | result.append('*final') | ||
| 590 | return result | ||
| 591 | |||
| 592 | result = [] | ||
| 593 | for p in get_parts(s): | ||
| 594 | if p.startswith('*'): | ||
| 595 | if p < '*final': | ||
| 596 | while result and result[-1] == '*final-': | ||
| 597 | result.pop() | ||
| 598 | while result and result[-1] == '00000000': | ||
| 599 | result.pop() | ||
| 600 | result.append(p) | ||
| 601 | return tuple(result) | ||
| 602 | |||
| 603 | |||
| 604 | class LegacyVersion(Version): | ||
| 605 | def parse(self, s): | ||
| 606 | return _legacy_key(s) | ||
| 607 | |||
| 608 | @property | ||
| 609 | def is_prerelease(self): | ||
| 610 | result = False | ||
| 611 | for x in self._parts: | ||
| 612 | if (isinstance(x, string_types) and x.startswith('*') and | ||
| 613 | x < '*final'): | ||
| 614 | result = True | ||
| 615 | break | ||
| 616 | return result | ||
| 617 | |||
| 618 | |||
| 619 | class LegacyMatcher(Matcher): | ||
| 620 | version_class = LegacyVersion | ||
| 621 | |||
| 622 | _operators = dict(Matcher._operators) | ||
| 623 | _operators['~='] = '_match_compatible' | ||
| 624 | |||
| 625 | numeric_re = re.compile(r'^(\d+(\.\d+)*)') | ||
| 626 | |||
| 627 | def _match_compatible(self, version, constraint, prefix): | ||
| 628 | if version < constraint: | ||
| 629 | return False | ||
| 630 | m = self.numeric_re.match(str(constraint)) | ||
| 631 | if not m: | ||
| 632 | logger.warning('Cannot compute compatible match for version %s ' | ||
| 633 | ' and constraint %s', version, constraint) | ||
| 634 | return True | ||
| 635 | s = m.groups()[0] | ||
| 636 | if '.' in s: | ||
| 637 | s = s.rsplit('.', 1)[0] | ||
| 638 | return _match_prefix(version, s) | ||
| 639 | |||
| 640 | # | ||
| 641 | # Semantic versioning | ||
| 642 | # | ||
| 643 | |||
| 644 | _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' | ||
| 645 | r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' | ||
| 646 | r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) | ||
| 647 | |||
| 648 | |||
| 649 | def is_semver(s): | ||
| 650 | return _SEMVER_RE.match(s) | ||
| 651 | |||
| 652 | |||
| 653 | def _semantic_key(s): | ||
| 654 | def make_tuple(s, absent): | ||
| 655 | if s is None: | ||
| 656 | result = (absent,) | ||
| 657 | else: | ||
| 658 | parts = s[1:].split('.') | ||
| 659 | # We can't compare ints and strings on Python 3, so fudge it | ||
| 660 | # by zero-filling numeric values so simulate a numeric comparison | ||
| 661 | result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) | ||
| 662 | return result | ||
| 663 | |||
| 664 | m = is_semver(s) | ||
| 665 | if not m: | ||
| 666 | raise UnsupportedVersionError(s) | ||
| 667 | groups = m.groups() | ||
| 668 | major, minor, patch = [int(i) for i in groups[:3]] | ||
| 669 | # choose the '|' and '*' so that versions sort correctly | ||
| 670 | pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') | ||
| 671 | return (major, minor, patch), pre, build | ||
| 672 | |||
| 673 | |||
| 674 | class SemanticVersion(Version): | ||
| 675 | def parse(self, s): | ||
| 676 | return _semantic_key(s) | ||
| 677 | |||
| 678 | @property | ||
| 679 | def is_prerelease(self): | ||
| 680 | return self._parts[1][0] != '|' | ||
| 681 | |||
| 682 | |||
| 683 | class SemanticMatcher(Matcher): | ||
| 684 | version_class = SemanticVersion | ||
| 685 | |||
| 686 | |||
| 687 | class VersionScheme(object): | ||
| 688 | def __init__(self, key, matcher, suggester=None): | ||
| 689 | self.key = key | ||
| 690 | self.matcher = matcher | ||
| 691 | self.suggester = suggester | ||
| 692 | |||
| 693 | def is_valid_version(self, s): | ||
| 694 | try: | ||
| 695 | self.matcher.version_class(s) | ||
| 696 | result = True | ||
| 697 | except UnsupportedVersionError: | ||
| 698 | result = False | ||
| 699 | return result | ||
| 700 | |||
| 701 | def is_valid_matcher(self, s): | ||
| 702 | try: | ||
| 703 | self.matcher(s) | ||
| 704 | result = True | ||
| 705 | except UnsupportedVersionError: | ||
| 706 | result = False | ||
| 707 | return result | ||
| 708 | |||
| 709 | def is_valid_constraint_list(self, s): | ||
| 710 | """ | ||
| 711 | Used for processing some metadata fields | ||
| 712 | """ | ||
| 713 | return self.is_valid_matcher('dummy_name (%s)' % s) | ||
| 714 | |||
| 715 | def suggest(self, s): | ||
| 716 | if self.suggester is None: | ||
| 717 | result = None | ||
| 718 | else: | ||
| 719 | result = self.suggester(s) | ||
| 720 | return result | ||
| 721 | |||
| 722 | _SCHEMES = { | ||
| 723 | 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, | ||
| 724 | _suggest_normalized_version), | ||
| 725 | 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), | ||
| 726 | 'semantic': VersionScheme(_semantic_key, SemanticMatcher, | ||
| 727 | _suggest_semantic_version), | ||
| 728 | } | ||
| 729 | |||
| 730 | _SCHEMES['default'] = _SCHEMES['normalized'] | ||
| 731 | |||
| 732 | |||
| 733 | def get_scheme(name): | ||
| 734 | if name not in _SCHEMES: | ||
| 735 | raise ValueError('unknown scheme name: %r' % name) | ||
| 736 | return _SCHEMES[name] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe new file mode 100755 index 0000000..732215a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe | |||
| Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe new file mode 100755 index 0000000..c41bd0a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe | |||
| Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..3693410 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py | |||
| @@ -0,0 +1,984 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | # | ||
| 3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
| 4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
| 5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
| 6 | # | ||
| 7 | from __future__ import unicode_literals | ||
| 8 | |||
| 9 | import base64 | ||
| 10 | import codecs | ||
| 11 | import datetime | ||
| 12 | import distutils.util | ||
| 13 | from email import message_from_file | ||
| 14 | import hashlib | ||
| 15 | import imp | ||
| 16 | import json | ||
| 17 | import logging | ||
| 18 | import os | ||
| 19 | import posixpath | ||
| 20 | import re | ||
| 21 | import shutil | ||
| 22 | import sys | ||
| 23 | import tempfile | ||
| 24 | import zipfile | ||
| 25 | |||
| 26 | from . import __version__, DistlibException | ||
| 27 | from .compat import sysconfig, ZipFile, fsdecode, text_type, filter | ||
| 28 | from .database import InstalledDistribution | ||
| 29 | from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | ||
| 30 | from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, | ||
| 31 | cached_property, get_cache_base, read_exports, tempdir) | ||
| 32 | from .version import NormalizedVersion, UnsupportedVersionError | ||
| 33 | |||
| 34 | logger = logging.getLogger(__name__) | ||
| 35 | |||
| 36 | cache = None # created when needed | ||
| 37 | |||
| 38 | if hasattr(sys, 'pypy_version_info'): # pragma: no cover | ||
| 39 | IMP_PREFIX = 'pp' | ||
| 40 | elif sys.platform.startswith('java'): # pragma: no cover | ||
| 41 | IMP_PREFIX = 'jy' | ||
| 42 | elif sys.platform == 'cli': # pragma: no cover | ||
| 43 | IMP_PREFIX = 'ip' | ||
| 44 | else: | ||
| 45 | IMP_PREFIX = 'cp' | ||
| 46 | |||
| 47 | VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') | ||
| 48 | if not VER_SUFFIX: # pragma: no cover | ||
| 49 | VER_SUFFIX = '%s%s' % sys.version_info[:2] | ||
| 50 | PYVER = 'py' + VER_SUFFIX | ||
| 51 | IMPVER = IMP_PREFIX + VER_SUFFIX | ||
| 52 | |||
| 53 | ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') | ||
| 54 | |||
| 55 | ABI = sysconfig.get_config_var('SOABI') | ||
| 56 | if ABI and ABI.startswith('cpython-'): | ||
| 57 | ABI = ABI.replace('cpython-', 'cp') | ||
| 58 | else: | ||
| 59 | def _derive_abi(): | ||
| 60 | parts = ['cp', VER_SUFFIX] | ||
| 61 | if sysconfig.get_config_var('Py_DEBUG'): | ||
| 62 | parts.append('d') | ||
| 63 | if sysconfig.get_config_var('WITH_PYMALLOC'): | ||
| 64 | parts.append('m') | ||
| 65 | if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: | ||
| 66 | parts.append('u') | ||
| 67 | return ''.join(parts) | ||
| 68 | ABI = _derive_abi() | ||
| 69 | del _derive_abi | ||
| 70 | |||
| 71 | FILENAME_RE = re.compile(r''' | ||
| 72 | (?P<nm>[^-]+) | ||
| 73 | -(?P<vn>\d+[^-]*) | ||
| 74 | (-(?P<bn>\d+[^-]*))? | ||
| 75 | -(?P<py>\w+\d+(\.\w+\d+)*) | ||
| 76 | -(?P<bi>\w+) | ||
| 77 | -(?P<ar>\w+(\.\w+)*) | ||
| 78 | \.whl$ | ||
| 79 | ''', re.IGNORECASE | re.VERBOSE) | ||
| 80 | |||
| 81 | NAME_VERSION_RE = re.compile(r''' | ||
| 82 | (?P<nm>[^-]+) | ||
| 83 | -(?P<vn>\d+[^-]*) | ||
| 84 | (-(?P<bn>\d+[^-]*))?$ | ||
| 85 | ''', re.IGNORECASE | re.VERBOSE) | ||
| 86 | |||
| 87 | SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') | ||
| 88 | SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') | ||
| 89 | SHEBANG_PYTHON = b'#!python' | ||
| 90 | SHEBANG_PYTHONW = b'#!pythonw' | ||
| 91 | |||
| 92 | if os.sep == '/': | ||
| 93 | to_posix = lambda o: o | ||
| 94 | else: | ||
| 95 | to_posix = lambda o: o.replace(os.sep, '/') | ||
| 96 | |||
| 97 | |||
| 98 | class Mounter(object): | ||
| 99 | def __init__(self): | ||
| 100 | self.impure_wheels = {} | ||
| 101 | self.libs = {} | ||
| 102 | |||
| 103 | def add(self, pathname, extensions): | ||
| 104 | self.impure_wheels[pathname] = extensions | ||
| 105 | self.libs.update(extensions) | ||
| 106 | |||
| 107 | def remove(self, pathname): | ||
| 108 | extensions = self.impure_wheels.pop(pathname) | ||
| 109 | for k, v in extensions: | ||
| 110 | if k in self.libs: | ||
| 111 | del self.libs[k] | ||
| 112 | |||
| 113 | def find_module(self, fullname, path=None): | ||
| 114 | if fullname in self.libs: | ||
| 115 | result = self | ||
| 116 | else: | ||
| 117 | result = None | ||
| 118 | return result | ||
| 119 | |||
| 120 | def load_module(self, fullname): | ||
| 121 | if fullname in sys.modules: | ||
| 122 | result = sys.modules[fullname] | ||
| 123 | else: | ||
| 124 | if fullname not in self.libs: | ||
| 125 | raise ImportError('unable to find extension for %s' % fullname) | ||
| 126 | result = imp.load_dynamic(fullname, self.libs[fullname]) | ||
| 127 | result.__loader__ = self | ||
| 128 | parts = fullname.rsplit('.', 1) | ||
| 129 | if len(parts) > 1: | ||
| 130 | result.__package__ = parts[0] | ||
| 131 | return result | ||
| 132 | |||
| 133 | _hook = Mounter() | ||
| 134 | |||
| 135 | |||
| 136 | class Wheel(object): | ||
| 137 | """ | ||
| 138 | Class to build and install from Wheel files (PEP 427). | ||
| 139 | """ | ||
| 140 | |||
| 141 | wheel_version = (1, 1) | ||
| 142 | hash_kind = 'sha256' | ||
| 143 | |||
| 144 | def __init__(self, filename=None, sign=False, verify=False): | ||
| 145 | """ | ||
| 146 | Initialise an instance using a (valid) filename. | ||
| 147 | """ | ||
| 148 | self.sign = sign | ||
| 149 | self.should_verify = verify | ||
| 150 | self.buildver = '' | ||
| 151 | self.pyver = [PYVER] | ||
| 152 | self.abi = ['none'] | ||
| 153 | self.arch = ['any'] | ||
| 154 | self.dirname = os.getcwd() | ||
| 155 | if filename is None: | ||
| 156 | self.name = 'dummy' | ||
| 157 | self.version = '0.1' | ||
| 158 | self._filename = self.filename | ||
| 159 | else: | ||
| 160 | m = NAME_VERSION_RE.match(filename) | ||
| 161 | if m: | ||
| 162 | info = m.groupdict('') | ||
| 163 | self.name = info['nm'] | ||
| 164 | # Reinstate the local version separator | ||
| 165 | self.version = info['vn'].replace('_', '-') | ||
| 166 | self.buildver = info['bn'] | ||
| 167 | self._filename = self.filename | ||
| 168 | else: | ||
| 169 | dirname, filename = os.path.split(filename) | ||
| 170 | m = FILENAME_RE.match(filename) | ||
| 171 | if not m: | ||
| 172 | raise DistlibException('Invalid name or ' | ||
| 173 | 'filename: %r' % filename) | ||
| 174 | if dirname: | ||
| 175 | self.dirname = os.path.abspath(dirname) | ||
| 176 | self._filename = filename | ||
| 177 | info = m.groupdict('') | ||
| 178 | self.name = info['nm'] | ||
| 179 | self.version = info['vn'] | ||
| 180 | self.buildver = info['bn'] | ||
| 181 | self.pyver = info['py'].split('.') | ||
| 182 | self.abi = info['bi'].split('.') | ||
| 183 | self.arch = info['ar'].split('.') | ||
| 184 | |||
| 185 | @property | ||
| 186 | def filename(self): | ||
| 187 | """ | ||
| 188 | Build and return a filename from the various components. | ||
| 189 | """ | ||
| 190 | if self.buildver: | ||
| 191 | buildver = '-' + self.buildver | ||
| 192 | else: | ||
| 193 | buildver = '' | ||
| 194 | pyver = '.'.join(self.pyver) | ||
| 195 | abi = '.'.join(self.abi) | ||
| 196 | arch = '.'.join(self.arch) | ||
| 197 | # replace - with _ as a local version separator | ||
| 198 | version = self.version.replace('-', '_') | ||
| 199 | return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, | ||
| 200 | pyver, abi, arch) | ||
| 201 | |||
| 202 | @property | ||
| 203 | def exists(self): | ||
| 204 | path = os.path.join(self.dirname, self.filename) | ||
| 205 | return os.path.isfile(path) | ||
| 206 | |||
| 207 | @property | ||
| 208 | def tags(self): | ||
| 209 | for pyver in self.pyver: | ||
| 210 | for abi in self.abi: | ||
| 211 | for arch in self.arch: | ||
| 212 | yield pyver, abi, arch | ||
| 213 | |||
| 214 | @cached_property | ||
| 215 | def metadata(self): | ||
| 216 | pathname = os.path.join(self.dirname, self.filename) | ||
| 217 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 218 | info_dir = '%s.dist-info' % name_ver | ||
| 219 | wrapper = codecs.getreader('utf-8') | ||
| 220 | with ZipFile(pathname, 'r') as zf: | ||
| 221 | wheel_metadata = self.get_wheel_metadata(zf) | ||
| 222 | wv = wheel_metadata['Wheel-Version'].split('.', 1) | ||
| 223 | file_version = tuple([int(i) for i in wv]) | ||
| 224 | if file_version < (1, 1): | ||
| 225 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] | ||
| 226 | else: | ||
| 227 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] | ||
| 228 | result = None | ||
| 229 | for fn in fns: | ||
| 230 | try: | ||
| 231 | metadata_filename = posixpath.join(info_dir, fn) | ||
| 232 | with zf.open(metadata_filename) as bf: | ||
| 233 | wf = wrapper(bf) | ||
| 234 | result = Metadata(fileobj=wf) | ||
| 235 | if result: | ||
| 236 | break | ||
| 237 | except KeyError: | ||
| 238 | pass | ||
| 239 | if not result: | ||
| 240 | raise ValueError('Invalid wheel, because metadata is ' | ||
| 241 | 'missing: looked in %s' % ', '.join(fns)) | ||
| 242 | return result | ||
| 243 | |||
| 244 | def get_wheel_metadata(self, zf): | ||
| 245 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 246 | info_dir = '%s.dist-info' % name_ver | ||
| 247 | metadata_filename = posixpath.join(info_dir, 'WHEEL') | ||
| 248 | with zf.open(metadata_filename) as bf: | ||
| 249 | wf = codecs.getreader('utf-8')(bf) | ||
| 250 | message = message_from_file(wf) | ||
| 251 | return dict(message) | ||
| 252 | |||
| 253 | @cached_property | ||
| 254 | def info(self): | ||
| 255 | pathname = os.path.join(self.dirname, self.filename) | ||
| 256 | with ZipFile(pathname, 'r') as zf: | ||
| 257 | result = self.get_wheel_metadata(zf) | ||
| 258 | return result | ||
| 259 | |||
| 260 | def process_shebang(self, data): | ||
| 261 | m = SHEBANG_RE.match(data) | ||
| 262 | if m: | ||
| 263 | end = m.end() | ||
| 264 | shebang, data_after_shebang = data[:end], data[end:] | ||
| 265 | # Preserve any arguments after the interpreter | ||
| 266 | if b'pythonw' in shebang.lower(): | ||
| 267 | shebang_python = SHEBANG_PYTHONW | ||
| 268 | else: | ||
| 269 | shebang_python = SHEBANG_PYTHON | ||
| 270 | m = SHEBANG_DETAIL_RE.match(shebang) | ||
| 271 | if m: | ||
| 272 | args = b' ' + m.groups()[-1] | ||
| 273 | else: | ||
| 274 | args = b'' | ||
| 275 | shebang = shebang_python + args | ||
| 276 | data = shebang + data_after_shebang | ||
| 277 | else: | ||
| 278 | cr = data.find(b'\r') | ||
| 279 | lf = data.find(b'\n') | ||
| 280 | if cr < 0 or cr > lf: | ||
| 281 | term = b'\n' | ||
| 282 | else: | ||
| 283 | if data[cr:cr + 2] == b'\r\n': | ||
| 284 | term = b'\r\n' | ||
| 285 | else: | ||
| 286 | term = b'\r' | ||
| 287 | data = SHEBANG_PYTHON + term + data | ||
| 288 | return data | ||
| 289 | |||
| 290 | def get_hash(self, data, hash_kind=None): | ||
| 291 | if hash_kind is None: | ||
| 292 | hash_kind = self.hash_kind | ||
| 293 | try: | ||
| 294 | hasher = getattr(hashlib, hash_kind) | ||
| 295 | except AttributeError: | ||
| 296 | raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) | ||
| 297 | result = hasher(data).digest() | ||
| 298 | result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') | ||
| 299 | return hash_kind, result | ||
| 300 | |||
| 301 | def write_record(self, records, record_path, base): | ||
| 302 | records = list(records) # make a copy for sorting | ||
| 303 | p = to_posix(os.path.relpath(record_path, base)) | ||
| 304 | records.append((p, '', '')) | ||
| 305 | records.sort() | ||
| 306 | with CSVWriter(record_path) as writer: | ||
| 307 | for row in records: | ||
| 308 | writer.writerow(row) | ||
| 309 | |||
| 310 | def write_records(self, info, libdir, archive_paths): | ||
| 311 | records = [] | ||
| 312 | distinfo, info_dir = info | ||
| 313 | hasher = getattr(hashlib, self.hash_kind) | ||
| 314 | for ap, p in archive_paths: | ||
| 315 | with open(p, 'rb') as f: | ||
| 316 | data = f.read() | ||
| 317 | digest = '%s=%s' % self.get_hash(data) | ||
| 318 | size = os.path.getsize(p) | ||
| 319 | records.append((ap, digest, size)) | ||
| 320 | |||
| 321 | p = os.path.join(distinfo, 'RECORD') | ||
| 322 | self.write_record(records, p, libdir) | ||
| 323 | ap = to_posix(os.path.join(info_dir, 'RECORD')) | ||
| 324 | archive_paths.append((ap, p)) | ||
| 325 | |||
| 326 | def build_zip(self, pathname, archive_paths): | ||
| 327 | with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: | ||
| 328 | for ap, p in archive_paths: | ||
| 329 | logger.debug('Wrote %s to %s in wheel', p, ap) | ||
| 330 | zf.write(p, ap) | ||
| 331 | |||
| 332 | def build(self, paths, tags=None, wheel_version=None): | ||
| 333 | """ | ||
| 334 | Build a wheel from files in specified paths, and use any specified tags | ||
| 335 | when determining the name of the wheel. | ||
| 336 | """ | ||
| 337 | if tags is None: | ||
| 338 | tags = {} | ||
| 339 | |||
| 340 | libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] | ||
| 341 | if libkey == 'platlib': | ||
| 342 | is_pure = 'false' | ||
| 343 | default_pyver = [IMPVER] | ||
| 344 | default_abi = [ABI] | ||
| 345 | default_arch = [ARCH] | ||
| 346 | else: | ||
| 347 | is_pure = 'true' | ||
| 348 | default_pyver = [PYVER] | ||
| 349 | default_abi = ['none'] | ||
| 350 | default_arch = ['any'] | ||
| 351 | |||
| 352 | self.pyver = tags.get('pyver', default_pyver) | ||
| 353 | self.abi = tags.get('abi', default_abi) | ||
| 354 | self.arch = tags.get('arch', default_arch) | ||
| 355 | |||
| 356 | libdir = paths[libkey] | ||
| 357 | |||
| 358 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 359 | data_dir = '%s.data' % name_ver | ||
| 360 | info_dir = '%s.dist-info' % name_ver | ||
| 361 | |||
| 362 | archive_paths = [] | ||
| 363 | |||
| 364 | # First, stuff which is not in site-packages | ||
| 365 | for key in ('data', 'headers', 'scripts'): | ||
| 366 | if key not in paths: | ||
| 367 | continue | ||
| 368 | path = paths[key] | ||
| 369 | if os.path.isdir(path): | ||
| 370 | for root, dirs, files in os.walk(path): | ||
| 371 | for fn in files: | ||
| 372 | p = fsdecode(os.path.join(root, fn)) | ||
| 373 | rp = os.path.relpath(p, path) | ||
| 374 | ap = to_posix(os.path.join(data_dir, key, rp)) | ||
| 375 | archive_paths.append((ap, p)) | ||
| 376 | if key == 'scripts' and not p.endswith('.exe'): | ||
| 377 | with open(p, 'rb') as f: | ||
| 378 | data = f.read() | ||
| 379 | data = self.process_shebang(data) | ||
| 380 | with open(p, 'wb') as f: | ||
| 381 | f.write(data) | ||
| 382 | |||
| 383 | # Now, stuff which is in site-packages, other than the | ||
| 384 | # distinfo stuff. | ||
| 385 | path = libdir | ||
| 386 | distinfo = None | ||
| 387 | for root, dirs, files in os.walk(path): | ||
| 388 | if root == path: | ||
| 389 | # At the top level only, save distinfo for later | ||
| 390 | # and skip it for now | ||
| 391 | for i, dn in enumerate(dirs): | ||
| 392 | dn = fsdecode(dn) | ||
| 393 | if dn.endswith('.dist-info'): | ||
| 394 | distinfo = os.path.join(root, dn) | ||
| 395 | del dirs[i] | ||
| 396 | break | ||
| 397 | assert distinfo, '.dist-info directory expected, not found' | ||
| 398 | |||
| 399 | for fn in files: | ||
| 400 | # comment out next suite to leave .pyc files in | ||
| 401 | if fsdecode(fn).endswith(('.pyc', '.pyo')): | ||
| 402 | continue | ||
| 403 | p = os.path.join(root, fn) | ||
| 404 | rp = to_posix(os.path.relpath(p, path)) | ||
| 405 | archive_paths.append((rp, p)) | ||
| 406 | |||
| 407 | # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. | ||
| 408 | files = os.listdir(distinfo) | ||
| 409 | for fn in files: | ||
| 410 | if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): | ||
| 411 | p = fsdecode(os.path.join(distinfo, fn)) | ||
| 412 | ap = to_posix(os.path.join(info_dir, fn)) | ||
| 413 | archive_paths.append((ap, p)) | ||
| 414 | |||
| 415 | wheel_metadata = [ | ||
| 416 | 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), | ||
| 417 | 'Generator: distlib %s' % __version__, | ||
| 418 | 'Root-Is-Purelib: %s' % is_pure, | ||
| 419 | ] | ||
| 420 | for pyver, abi, arch in self.tags: | ||
| 421 | wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) | ||
| 422 | p = os.path.join(distinfo, 'WHEEL') | ||
| 423 | with open(p, 'w') as f: | ||
| 424 | f.write('\n'.join(wheel_metadata)) | ||
| 425 | ap = to_posix(os.path.join(info_dir, 'WHEEL')) | ||
| 426 | archive_paths.append((ap, p)) | ||
| 427 | |||
| 428 | # Now, at last, RECORD. | ||
| 429 | # Paths in here are archive paths - nothing else makes sense. | ||
| 430 | self.write_records((distinfo, info_dir), libdir, archive_paths) | ||
| 431 | # Now, ready to build the zip file | ||
| 432 | pathname = os.path.join(self.dirname, self.filename) | ||
| 433 | self.build_zip(pathname, archive_paths) | ||
| 434 | return pathname | ||
| 435 | |||
| 436 | def install(self, paths, maker, **kwargs): | ||
| 437 | """ | ||
| 438 | Install a wheel to the specified paths. If kwarg ``warner`` is | ||
| 439 | specified, it should be a callable, which will be called with two | ||
| 440 | tuples indicating the wheel version of this software and the wheel | ||
| 441 | version in the file, if there is a discrepancy in the versions. | ||
| 442 | This can be used to issue any warnings to raise any exceptions. | ||
| 443 | If kwarg ``lib_only`` is True, only the purelib/platlib files are | ||
| 444 | installed, and the headers, scripts, data and dist-info metadata are | ||
| 445 | not written. | ||
| 446 | |||
| 447 | The return value is a :class:`InstalledDistribution` instance unless | ||
| 448 | ``options.lib_only`` is True, in which case the return value is ``None``. | ||
| 449 | """ | ||
| 450 | |||
| 451 | dry_run = maker.dry_run | ||
| 452 | warner = kwargs.get('warner') | ||
| 453 | lib_only = kwargs.get('lib_only', False) | ||
| 454 | |||
| 455 | pathname = os.path.join(self.dirname, self.filename) | ||
| 456 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 457 | data_dir = '%s.data' % name_ver | ||
| 458 | info_dir = '%s.dist-info' % name_ver | ||
| 459 | |||
| 460 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
| 461 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
| 462 | record_name = posixpath.join(info_dir, 'RECORD') | ||
| 463 | |||
| 464 | wrapper = codecs.getreader('utf-8') | ||
| 465 | |||
| 466 | with ZipFile(pathname, 'r') as zf: | ||
| 467 | with zf.open(wheel_metadata_name) as bwf: | ||
| 468 | wf = wrapper(bwf) | ||
| 469 | message = message_from_file(wf) | ||
| 470 | wv = message['Wheel-Version'].split('.', 1) | ||
| 471 | file_version = tuple([int(i) for i in wv]) | ||
| 472 | if (file_version != self.wheel_version) and warner: | ||
| 473 | warner(self.wheel_version, file_version) | ||
| 474 | |||
| 475 | if message['Root-Is-Purelib'] == 'true': | ||
| 476 | libdir = paths['purelib'] | ||
| 477 | else: | ||
| 478 | libdir = paths['platlib'] | ||
| 479 | |||
| 480 | records = {} | ||
| 481 | with zf.open(record_name) as bf: | ||
| 482 | with CSVReader(stream=bf) as reader: | ||
| 483 | for row in reader: | ||
| 484 | p = row[0] | ||
| 485 | records[p] = row | ||
| 486 | |||
| 487 | data_pfx = posixpath.join(data_dir, '') | ||
| 488 | info_pfx = posixpath.join(info_dir, '') | ||
| 489 | script_pfx = posixpath.join(data_dir, 'scripts', '') | ||
| 490 | |||
| 491 | # make a new instance rather than a copy of maker's, | ||
| 492 | # as we mutate it | ||
| 493 | fileop = FileOperator(dry_run=dry_run) | ||
| 494 | fileop.record = True # so we can rollback if needed | ||
| 495 | |||
| 496 | bc = not sys.dont_write_bytecode # Double negatives. Lovely! | ||
| 497 | |||
| 498 | outfiles = [] # for RECORD writing | ||
| 499 | |||
| 500 | # for script copying/shebang processing | ||
| 501 | workdir = tempfile.mkdtemp() | ||
| 502 | # set target dir later | ||
| 503 | # we default add_launchers to False, as the | ||
| 504 | # Python Launcher should be used instead | ||
| 505 | maker.source_dir = workdir | ||
| 506 | maker.target_dir = None | ||
| 507 | try: | ||
| 508 | for zinfo in zf.infolist(): | ||
| 509 | arcname = zinfo.filename | ||
| 510 | if isinstance(arcname, text_type): | ||
| 511 | u_arcname = arcname | ||
| 512 | else: | ||
| 513 | u_arcname = arcname.decode('utf-8') | ||
| 514 | # The signature file won't be in RECORD, | ||
| 515 | # and we don't currently don't do anything with it | ||
| 516 | if u_arcname.endswith('/RECORD.jws'): | ||
| 517 | continue | ||
| 518 | row = records[u_arcname] | ||
| 519 | if row[2] and str(zinfo.file_size) != row[2]: | ||
| 520 | raise DistlibException('size mismatch for ' | ||
| 521 | '%s' % u_arcname) | ||
| 522 | if row[1]: | ||
| 523 | kind, value = row[1].split('=', 1) | ||
| 524 | with zf.open(arcname) as bf: | ||
| 525 | data = bf.read() | ||
| 526 | _, digest = self.get_hash(data, kind) | ||
| 527 | if digest != value: | ||
| 528 | raise DistlibException('digest mismatch for ' | ||
| 529 | '%s' % arcname) | ||
| 530 | |||
| 531 | if lib_only and u_arcname.startswith((info_pfx, data_pfx)): | ||
| 532 | logger.debug('lib_only: skipping %s', u_arcname) | ||
| 533 | continue | ||
| 534 | is_script = (u_arcname.startswith(script_pfx) | ||
| 535 | and not u_arcname.endswith('.exe')) | ||
| 536 | |||
| 537 | if u_arcname.startswith(data_pfx): | ||
| 538 | _, where, rp = u_arcname.split('/', 2) | ||
| 539 | outfile = os.path.join(paths[where], convert_path(rp)) | ||
| 540 | else: | ||
| 541 | # meant for site-packages. | ||
| 542 | if u_arcname in (wheel_metadata_name, record_name): | ||
| 543 | continue | ||
| 544 | outfile = os.path.join(libdir, convert_path(u_arcname)) | ||
| 545 | if not is_script: | ||
| 546 | with zf.open(arcname) as bf: | ||
| 547 | fileop.copy_stream(bf, outfile) | ||
| 548 | outfiles.append(outfile) | ||
| 549 | # Double check the digest of the written file | ||
| 550 | if not dry_run and row[1]: | ||
| 551 | with open(outfile, 'rb') as bf: | ||
| 552 | data = bf.read() | ||
| 553 | _, newdigest = self.get_hash(data, kind) | ||
| 554 | if newdigest != digest: | ||
| 555 | raise DistlibException('digest mismatch ' | ||
| 556 | 'on write for ' | ||
| 557 | '%s' % outfile) | ||
| 558 | if bc and outfile.endswith('.py'): | ||
| 559 | try: | ||
| 560 | pyc = fileop.byte_compile(outfile) | ||
| 561 | outfiles.append(pyc) | ||
| 562 | except Exception: | ||
| 563 | # Don't give up if byte-compilation fails, | ||
| 564 | # but log it and perhaps warn the user | ||
| 565 | logger.warning('Byte-compilation failed', | ||
| 566 | exc_info=True) | ||
| 567 | else: | ||
| 568 | fn = os.path.basename(convert_path(arcname)) | ||
| 569 | workname = os.path.join(workdir, fn) | ||
| 570 | with zf.open(arcname) as bf: | ||
| 571 | fileop.copy_stream(bf, workname) | ||
| 572 | |||
| 573 | dn, fn = os.path.split(outfile) | ||
| 574 | maker.target_dir = dn | ||
| 575 | filenames = maker.make(fn) | ||
| 576 | fileop.set_executable_mode(filenames) | ||
| 577 | outfiles.extend(filenames) | ||
| 578 | |||
| 579 | if lib_only: | ||
| 580 | logger.debug('lib_only: returning None') | ||
| 581 | dist = None | ||
| 582 | else: | ||
| 583 | # Generate scripts | ||
| 584 | |||
| 585 | # Try to get pydist.json so we can see if there are | ||
| 586 | # any commands to generate. If this fails (e.g. because | ||
| 587 | # of a legacy wheel), log a warning but don't give up. | ||
| 588 | commands = None | ||
| 589 | file_version = self.info['Wheel-Version'] | ||
| 590 | if file_version == '1.0': | ||
| 591 | # Use legacy info | ||
| 592 | ep = posixpath.join(info_dir, 'entry_points.txt') | ||
| 593 | try: | ||
| 594 | with zf.open(ep) as bwf: | ||
| 595 | epdata = read_exports(bwf) | ||
| 596 | commands = {} | ||
| 597 | for key in ('console', 'gui'): | ||
| 598 | k = '%s_scripts' % key | ||
| 599 | if k in epdata: | ||
| 600 | commands['wrap_%s' % key] = d = {} | ||
| 601 | for v in epdata[k].values(): | ||
| 602 | s = '%s:%s' % (v.prefix, v.suffix) | ||
| 603 | if v.flags: | ||
| 604 | s += ' %s' % v.flags | ||
| 605 | d[v.name] = s | ||
| 606 | except Exception: | ||
| 607 | logger.warning('Unable to read legacy script ' | ||
| 608 | 'metadata, so cannot generate ' | ||
| 609 | 'scripts') | ||
| 610 | else: | ||
| 611 | try: | ||
| 612 | with zf.open(metadata_name) as bwf: | ||
| 613 | wf = wrapper(bwf) | ||
| 614 | commands = json.load(wf).get('extensions') | ||
| 615 | if commands: | ||
| 616 | commands = commands.get('python.commands') | ||
| 617 | except Exception: | ||
| 618 | logger.warning('Unable to read JSON metadata, so ' | ||
| 619 | 'cannot generate scripts') | ||
| 620 | if commands: | ||
| 621 | console_scripts = commands.get('wrap_console', {}) | ||
| 622 | gui_scripts = commands.get('wrap_gui', {}) | ||
| 623 | if console_scripts or gui_scripts: | ||
| 624 | script_dir = paths.get('scripts', '') | ||
| 625 | if not os.path.isdir(script_dir): | ||
| 626 | raise ValueError('Valid script path not ' | ||
| 627 | 'specified') | ||
| 628 | maker.target_dir = script_dir | ||
| 629 | for k, v in console_scripts.items(): | ||
| 630 | script = '%s = %s' % (k, v) | ||
| 631 | filenames = maker.make(script) | ||
| 632 | fileop.set_executable_mode(filenames) | ||
| 633 | |||
| 634 | if gui_scripts: | ||
| 635 | options = {'gui': True } | ||
| 636 | for k, v in gui_scripts.items(): | ||
| 637 | script = '%s = %s' % (k, v) | ||
| 638 | filenames = maker.make(script, options) | ||
| 639 | fileop.set_executable_mode(filenames) | ||
| 640 | |||
| 641 | p = os.path.join(libdir, info_dir) | ||
| 642 | dist = InstalledDistribution(p) | ||
| 643 | |||
| 644 | # Write SHARED | ||
| 645 | paths = dict(paths) # don't change passed in dict | ||
| 646 | del paths['purelib'] | ||
| 647 | del paths['platlib'] | ||
| 648 | paths['lib'] = libdir | ||
| 649 | p = dist.write_shared_locations(paths, dry_run) | ||
| 650 | if p: | ||
| 651 | outfiles.append(p) | ||
| 652 | |||
| 653 | # Write RECORD | ||
| 654 | dist.write_installed_files(outfiles, paths['prefix'], | ||
| 655 | dry_run) | ||
| 656 | return dist | ||
| 657 | except Exception: # pragma: no cover | ||
| 658 | logger.exception('installation failed.') | ||
| 659 | fileop.rollback() | ||
| 660 | raise | ||
| 661 | finally: | ||
| 662 | shutil.rmtree(workdir) | ||
| 663 | |||
| 664 | def _get_dylib_cache(self): | ||
| 665 | global cache | ||
| 666 | if cache is None: | ||
| 667 | # Use native string to avoid issues on 2.x: see Python #20140. | ||
| 668 | base = os.path.join(get_cache_base(), str('dylib-cache'), | ||
| 669 | sys.version[:3]) | ||
| 670 | cache = Cache(base) | ||
| 671 | return cache | ||
| 672 | |||
| 673 | def _get_extensions(self): | ||
| 674 | pathname = os.path.join(self.dirname, self.filename) | ||
| 675 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 676 | info_dir = '%s.dist-info' % name_ver | ||
| 677 | arcname = posixpath.join(info_dir, 'EXTENSIONS') | ||
| 678 | wrapper = codecs.getreader('utf-8') | ||
| 679 | result = [] | ||
| 680 | with ZipFile(pathname, 'r') as zf: | ||
| 681 | try: | ||
| 682 | with zf.open(arcname) as bf: | ||
| 683 | wf = wrapper(bf) | ||
| 684 | extensions = json.load(wf) | ||
| 685 | cache = self._get_dylib_cache() | ||
| 686 | prefix = cache.prefix_to_dir(pathname) | ||
| 687 | cache_base = os.path.join(cache.base, prefix) | ||
| 688 | if not os.path.isdir(cache_base): | ||
| 689 | os.makedirs(cache_base) | ||
| 690 | for name, relpath in extensions.items(): | ||
| 691 | dest = os.path.join(cache_base, convert_path(relpath)) | ||
| 692 | if not os.path.exists(dest): | ||
| 693 | extract = True | ||
| 694 | else: | ||
| 695 | file_time = os.stat(dest).st_mtime | ||
| 696 | file_time = datetime.datetime.fromtimestamp(file_time) | ||
| 697 | info = zf.getinfo(relpath) | ||
| 698 | wheel_time = datetime.datetime(*info.date_time) | ||
| 699 | extract = wheel_time > file_time | ||
| 700 | if extract: | ||
| 701 | zf.extract(relpath, cache_base) | ||
| 702 | result.append((name, dest)) | ||
| 703 | except KeyError: | ||
| 704 | pass | ||
| 705 | return result | ||
| 706 | |||
| 707 | def is_compatible(self): | ||
| 708 | """ | ||
| 709 | Determine if a wheel is compatible with the running system. | ||
| 710 | """ | ||
| 711 | return is_compatible(self) | ||
| 712 | |||
| 713 | def is_mountable(self): | ||
| 714 | """ | ||
| 715 | Determine if a wheel is asserted as mountable by its metadata. | ||
| 716 | """ | ||
| 717 | return True # for now - metadata details TBD | ||
| 718 | |||
| 719 | def mount(self, append=False): | ||
| 720 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
| 721 | if not self.is_compatible(): | ||
| 722 | msg = 'Wheel %s not compatible with this Python.' % pathname | ||
| 723 | raise DistlibException(msg) | ||
| 724 | if not self.is_mountable(): | ||
| 725 | msg = 'Wheel %s is marked as not mountable.' % pathname | ||
| 726 | raise DistlibException(msg) | ||
| 727 | if pathname in sys.path: | ||
| 728 | logger.debug('%s already in path', pathname) | ||
| 729 | else: | ||
| 730 | if append: | ||
| 731 | sys.path.append(pathname) | ||
| 732 | else: | ||
| 733 | sys.path.insert(0, pathname) | ||
| 734 | extensions = self._get_extensions() | ||
| 735 | if extensions: | ||
| 736 | if _hook not in sys.meta_path: | ||
| 737 | sys.meta_path.append(_hook) | ||
| 738 | _hook.add(pathname, extensions) | ||
| 739 | |||
| 740 | def unmount(self): | ||
| 741 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
| 742 | if pathname not in sys.path: | ||
| 743 | logger.debug('%s not in path', pathname) | ||
| 744 | else: | ||
| 745 | sys.path.remove(pathname) | ||
| 746 | if pathname in _hook.impure_wheels: | ||
| 747 | _hook.remove(pathname) | ||
| 748 | if not _hook.impure_wheels: | ||
| 749 | if _hook in sys.meta_path: | ||
| 750 | sys.meta_path.remove(_hook) | ||
| 751 | |||
| 752 | def verify(self): | ||
| 753 | pathname = os.path.join(self.dirname, self.filename) | ||
| 754 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 755 | data_dir = '%s.data' % name_ver | ||
| 756 | info_dir = '%s.dist-info' % name_ver | ||
| 757 | |||
| 758 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
| 759 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
| 760 | record_name = posixpath.join(info_dir, 'RECORD') | ||
| 761 | |||
| 762 | wrapper = codecs.getreader('utf-8') | ||
| 763 | |||
| 764 | with ZipFile(pathname, 'r') as zf: | ||
| 765 | with zf.open(wheel_metadata_name) as bwf: | ||
| 766 | wf = wrapper(bwf) | ||
| 767 | message = message_from_file(wf) | ||
| 768 | wv = message['Wheel-Version'].split('.', 1) | ||
| 769 | file_version = tuple([int(i) for i in wv]) | ||
| 770 | # TODO version verification | ||
| 771 | |||
| 772 | records = {} | ||
| 773 | with zf.open(record_name) as bf: | ||
| 774 | with CSVReader(stream=bf) as reader: | ||
| 775 | for row in reader: | ||
| 776 | p = row[0] | ||
| 777 | records[p] = row | ||
| 778 | |||
| 779 | for zinfo in zf.infolist(): | ||
| 780 | arcname = zinfo.filename | ||
| 781 | if isinstance(arcname, text_type): | ||
| 782 | u_arcname = arcname | ||
| 783 | else: | ||
| 784 | u_arcname = arcname.decode('utf-8') | ||
| 785 | if '..' in u_arcname: | ||
| 786 | raise DistlibException('invalid entry in ' | ||
| 787 | 'wheel: %r' % u_arcname) | ||
| 788 | |||
| 789 | # The signature file won't be in RECORD, | ||
| 790 | # and we don't currently don't do anything with it | ||
| 791 | if u_arcname.endswith('/RECORD.jws'): | ||
| 792 | continue | ||
| 793 | row = records[u_arcname] | ||
| 794 | if row[2] and str(zinfo.file_size) != row[2]: | ||
| 795 | raise DistlibException('size mismatch for ' | ||
| 796 | '%s' % u_arcname) | ||
| 797 | if row[1]: | ||
| 798 | kind, value = row[1].split('=', 1) | ||
| 799 | with zf.open(arcname) as bf: | ||
| 800 | data = bf.read() | ||
| 801 | _, digest = self.get_hash(data, kind) | ||
| 802 | if digest != value: | ||
| 803 | raise DistlibException('digest mismatch for ' | ||
| 804 | '%s' % arcname) | ||
| 805 | |||
| 806 | def update(self, modifier, dest_dir=None, **kwargs): | ||
| 807 | """ | ||
| 808 | Update the contents of a wheel in a generic way. The modifier should | ||
| 809 | be a callable which expects a dictionary argument: its keys are | ||
| 810 | archive-entry paths, and its values are absolute filesystem paths | ||
| 811 | where the contents the corresponding archive entries can be found. The | ||
| 812 | modifier is free to change the contents of the files pointed to, add | ||
| 813 | new entries and remove entries, before returning. This method will | ||
| 814 | extract the entire contents of the wheel to a temporary location, call | ||
| 815 | the modifier, and then use the passed (and possibly updated) | ||
| 816 | dictionary to write a new wheel. If ``dest_dir`` is specified, the new | ||
| 817 | wheel is written there -- otherwise, the original wheel is overwritten. | ||
| 818 | |||
| 819 | The modifier should return True if it updated the wheel, else False. | ||
| 820 | This method returns the same value the modifier returns. | ||
| 821 | """ | ||
| 822 | |||
| 823 | def get_version(path_map, info_dir): | ||
| 824 | version = path = None | ||
| 825 | key = '%s/%s' % (info_dir, METADATA_FILENAME) | ||
| 826 | if key not in path_map: | ||
| 827 | key = '%s/PKG-INFO' % info_dir | ||
| 828 | if key in path_map: | ||
| 829 | path = path_map[key] | ||
| 830 | version = Metadata(path=path).version | ||
| 831 | return version, path | ||
| 832 | |||
| 833 | def update_version(version, path): | ||
| 834 | updated = None | ||
| 835 | try: | ||
| 836 | v = NormalizedVersion(version) | ||
| 837 | i = version.find('-') | ||
| 838 | if i < 0: | ||
| 839 | updated = '%s+1' % version | ||
| 840 | else: | ||
| 841 | parts = [int(s) for s in version[i + 1:].split('.')] | ||
| 842 | parts[-1] += 1 | ||
| 843 | updated = '%s+%s' % (version[:i], | ||
| 844 | '.'.join(str(i) for i in parts)) | ||
| 845 | except UnsupportedVersionError: | ||
| 846 | logger.debug('Cannot update non-compliant (PEP-440) ' | ||
| 847 | 'version %r', version) | ||
| 848 | if updated: | ||
| 849 | md = Metadata(path=path) | ||
| 850 | md.version = updated | ||
| 851 | legacy = not path.endswith(METADATA_FILENAME) | ||
| 852 | md.write(path=path, legacy=legacy) | ||
| 853 | logger.debug('Version updated from %r to %r', version, | ||
| 854 | updated) | ||
| 855 | |||
| 856 | pathname = os.path.join(self.dirname, self.filename) | ||
| 857 | name_ver = '%s-%s' % (self.name, self.version) | ||
| 858 | info_dir = '%s.dist-info' % name_ver | ||
| 859 | record_name = posixpath.join(info_dir, 'RECORD') | ||
| 860 | with tempdir() as workdir: | ||
| 861 | with ZipFile(pathname, 'r') as zf: | ||
| 862 | path_map = {} | ||
| 863 | for zinfo in zf.infolist(): | ||
| 864 | arcname = zinfo.filename | ||
| 865 | if isinstance(arcname, text_type): | ||
| 866 | u_arcname = arcname | ||
| 867 | else: | ||
| 868 | u_arcname = arcname.decode('utf-8') | ||
| 869 | if u_arcname == record_name: | ||
| 870 | continue | ||
| 871 | if '..' in u_arcname: | ||
| 872 | raise DistlibException('invalid entry in ' | ||
| 873 | 'wheel: %r' % u_arcname) | ||
| 874 | zf.extract(zinfo, workdir) | ||
| 875 | path = os.path.join(workdir, convert_path(u_arcname)) | ||
| 876 | path_map[u_arcname] = path | ||
| 877 | |||
| 878 | # Remember the version. | ||
| 879 | original_version, _ = get_version(path_map, info_dir) | ||
| 880 | # Files extracted. Call the modifier. | ||
| 881 | modified = modifier(path_map, **kwargs) | ||
| 882 | if modified: | ||
| 883 | # Something changed - need to build a new wheel. | ||
| 884 | current_version, path = get_version(path_map, info_dir) | ||
| 885 | if current_version and (current_version == original_version): | ||
| 886 | # Add or update local version to signify changes. | ||
| 887 | update_version(current_version, path) | ||
| 888 | # Decide where the new wheel goes. | ||
| 889 | if dest_dir is None: | ||
| 890 | fd, newpath = tempfile.mkstemp(suffix='.whl', | ||
| 891 | prefix='wheel-update-', | ||
| 892 | dir=workdir) | ||
| 893 | os.close(fd) | ||
| 894 | else: | ||
| 895 | if not os.path.isdir(dest_dir): | ||
| 896 | raise DistlibException('Not a directory: %r' % dest_dir) | ||
| 897 | newpath = os.path.join(dest_dir, self.filename) | ||
| 898 | archive_paths = list(path_map.items()) | ||
| 899 | distinfo = os.path.join(workdir, info_dir) | ||
| 900 | info = distinfo, info_dir | ||
| 901 | self.write_records(info, workdir, archive_paths) | ||
| 902 | self.build_zip(newpath, archive_paths) | ||
| 903 | if dest_dir is None: | ||
| 904 | shutil.copyfile(newpath, pathname) | ||
| 905 | return modified | ||
| 906 | |||
| 907 | def compatible_tags(): | ||
| 908 | """ | ||
| 909 | Return (pyver, abi, arch) tuples compatible with this Python. | ||
| 910 | """ | ||
| 911 | versions = [VER_SUFFIX] | ||
| 912 | major = VER_SUFFIX[0] | ||
| 913 | for minor in range(sys.version_info[1] - 1, - 1, -1): | ||
| 914 | versions.append(''.join([major, str(minor)])) | ||
| 915 | |||
| 916 | abis = [] | ||
| 917 | for suffix, _, _ in imp.get_suffixes(): | ||
| 918 | if suffix.startswith('.abi'): | ||
| 919 | abis.append(suffix.split('.', 2)[1]) | ||
| 920 | abis.sort() | ||
| 921 | if ABI != 'none': | ||
| 922 | abis.insert(0, ABI) | ||
| 923 | abis.append('none') | ||
| 924 | result = [] | ||
| 925 | |||
| 926 | arches = [ARCH] | ||
| 927 | if sys.platform == 'darwin': | ||
| 928 | m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) | ||
| 929 | if m: | ||
| 930 | name, major, minor, arch = m.groups() | ||
| 931 | minor = int(minor) | ||
| 932 | matches = [arch] | ||
| 933 | if arch in ('i386', 'ppc'): | ||
| 934 | matches.append('fat') | ||
| 935 | if arch in ('i386', 'ppc', 'x86_64'): | ||
| 936 | matches.append('fat3') | ||
| 937 | if arch in ('ppc64', 'x86_64'): | ||
| 938 | matches.append('fat64') | ||
| 939 | if arch in ('i386', 'x86_64'): | ||
| 940 | matches.append('intel') | ||
| 941 | if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): | ||
| 942 | matches.append('universal') | ||
| 943 | while minor >= 0: | ||
| 944 | for match in matches: | ||
| 945 | s = '%s_%s_%s_%s' % (name, major, minor, match) | ||
| 946 | if s != ARCH: # already there | ||
| 947 | arches.append(s) | ||
| 948 | minor -= 1 | ||
| 949 | |||
| 950 | # Most specific - our Python version, ABI and arch | ||
| 951 | for abi in abis: | ||
| 952 | for arch in arches: | ||
| 953 | result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) | ||
| 954 | |||
| 955 | # where no ABI / arch dependency, but IMP_PREFIX dependency | ||
| 956 | for i, version in enumerate(versions): | ||
| 957 | result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) | ||
| 958 | if i == 0: | ||
| 959 | result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) | ||
| 960 | |||
| 961 | # no IMP_PREFIX, ABI or arch dependency | ||
| 962 | for i, version in enumerate(versions): | ||
| 963 | result.append((''.join(('py', version)), 'none', 'any')) | ||
| 964 | if i == 0: | ||
| 965 | result.append((''.join(('py', version[0])), 'none', 'any')) | ||
| 966 | return set(result) | ||
| 967 | |||
| 968 | |||
| 969 | COMPATIBLE_TAGS = compatible_tags() | ||
| 970 | |||
| 971 | del compatible_tags | ||
| 972 | |||
| 973 | |||
| 974 | def is_compatible(wheel, tags=None): | ||
| 975 | if not isinstance(wheel, Wheel): | ||
| 976 | wheel = Wheel(wheel) # assume it's a filename | ||
| 977 | result = False | ||
| 978 | if tags is None: | ||
| 979 | tags = COMPATIBLE_TAGS | ||
| 980 | for ver, abi, arch in tags: | ||
| 981 | if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: | ||
| 982 | result = True | ||
| 983 | break | ||
| 984 | return result | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distro.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distro.py new file mode 100644 index 0000000..0f792ea --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distro.py | |||
| @@ -0,0 +1,1104 @@ | |||
| 1 | # Copyright 2015,2016 Nir Cohen | ||
| 2 | # | ||
| 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 4 | # you may not use this file except in compliance with the License. | ||
| 5 | # You may obtain a copy of the License at | ||
| 6 | # | ||
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 8 | # | ||
| 9 | # Unless required by applicable law or agreed to in writing, software | ||
| 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 12 | # See the License for the specific language governing permissions and | ||
| 13 | # limitations under the License. | ||
| 14 | |||
| 15 | """ | ||
| 16 | The ``distro`` package (``distro`` stands for Linux Distribution) provides | ||
| 17 | information about the Linux distribution it runs on, such as a reliable | ||
| 18 | machine-readable distro ID, or version information. | ||
| 19 | |||
| 20 | It is a renewed alternative implementation for Python's original | ||
| 21 | :py:func:`platform.linux_distribution` function, but it provides much more | ||
| 22 | functionality. An alternative implementation became necessary because Python | ||
| 23 | 3.5 deprecated this function, and Python 3.7 is expected to remove it | ||
| 24 | altogether. Its predecessor function :py:func:`platform.dist` was already | ||
| 25 | deprecated since Python 2.6 and is also expected to be removed in Python 3.7. | ||
| 26 | Still, there are many cases in which access to Linux distribution information | ||
| 27 | is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for | ||
| 28 | more information. | ||
| 29 | """ | ||
| 30 | |||
| 31 | import os | ||
| 32 | import re | ||
| 33 | import sys | ||
| 34 | import json | ||
| 35 | import shlex | ||
| 36 | import logging | ||
| 37 | import argparse | ||
| 38 | import subprocess | ||
| 39 | |||
| 40 | |||
| 41 | _UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc') | ||
| 42 | _OS_RELEASE_BASENAME = 'os-release' | ||
| 43 | |||
| 44 | #: Translation table for normalizing the "ID" attribute defined in os-release | ||
| 45 | #: files, for use by the :func:`distro.id` method. | ||
| 46 | #: | ||
| 47 | #: * Key: Value as defined in the os-release file, translated to lower case, | ||
| 48 | #: with blanks translated to underscores. | ||
| 49 | #: | ||
| 50 | #: * Value: Normalized value. | ||
| 51 | NORMALIZED_OS_ID = {} | ||
| 52 | |||
| 53 | #: Translation table for normalizing the "Distributor ID" attribute returned by | ||
| 54 | #: the lsb_release command, for use by the :func:`distro.id` method. | ||
| 55 | #: | ||
| 56 | #: * Key: Value as returned by the lsb_release command, translated to lower | ||
| 57 | #: case, with blanks translated to underscores. | ||
| 58 | #: | ||
| 59 | #: * Value: Normalized value. | ||
| 60 | NORMALIZED_LSB_ID = { | ||
| 61 | 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux | ||
| 62 | 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation | ||
| 63 | 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server | ||
| 64 | } | ||
| 65 | |||
| 66 | #: Translation table for normalizing the distro ID derived from the file name | ||
| 67 | #: of distro release files, for use by the :func:`distro.id` method. | ||
| 68 | #: | ||
| 69 | #: * Key: Value as derived from the file name of a distro release file, | ||
| 70 | #: translated to lower case, with blanks translated to underscores. | ||
| 71 | #: | ||
| 72 | #: * Value: Normalized value. | ||
| 73 | NORMALIZED_DISTRO_ID = { | ||
| 74 | 'redhat': 'rhel', # RHEL 6.x, 7.x | ||
| 75 | } | ||
| 76 | |||
| 77 | # Pattern for content of distro release file (reversed) | ||
| 78 | _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( | ||
| 79 | r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)') | ||
| 80 | |||
| 81 | # Pattern for base file name of distro release file | ||
| 82 | _DISTRO_RELEASE_BASENAME_PATTERN = re.compile( | ||
| 83 | r'(\w+)[-_](release|version)$') | ||
| 84 | |||
| 85 | # Base file names to be ignored when searching for distro release file | ||
| 86 | _DISTRO_RELEASE_IGNORE_BASENAMES = ( | ||
| 87 | 'debian_version', | ||
| 88 | 'lsb-release', | ||
| 89 | 'oem-release', | ||
| 90 | _OS_RELEASE_BASENAME, | ||
| 91 | 'system-release' | ||
| 92 | ) | ||
| 93 | |||
| 94 | |||
| 95 | def linux_distribution(full_distribution_name=True): | ||
| 96 | """ | ||
| 97 | Return information about the current Linux distribution as a tuple | ||
| 98 | ``(id_name, version, codename)`` with items as follows: | ||
| 99 | |||
| 100 | * ``id_name``: If *full_distribution_name* is false, the result of | ||
| 101 | :func:`distro.id`. Otherwise, the result of :func:`distro.name`. | ||
| 102 | |||
| 103 | * ``version``: The result of :func:`distro.version`. | ||
| 104 | |||
| 105 | * ``codename``: The result of :func:`distro.codename`. | ||
| 106 | |||
| 107 | The interface of this function is compatible with the original | ||
| 108 | :py:func:`platform.linux_distribution` function, supporting a subset of | ||
| 109 | its parameters. | ||
| 110 | |||
| 111 | The data it returns may not exactly be the same, because it uses more data | ||
| 112 | sources than the original function, and that may lead to different data if | ||
| 113 | the Linux distribution is not consistent across multiple data sources it | ||
| 114 | provides (there are indeed such distributions ...). | ||
| 115 | |||
| 116 | Another reason for differences is the fact that the :func:`distro.id` | ||
| 117 | method normalizes the distro ID string to a reliable machine-readable value | ||
| 118 | for a number of popular Linux distributions. | ||
| 119 | """ | ||
| 120 | return _distro.linux_distribution(full_distribution_name) | ||
| 121 | |||
| 122 | |||
| 123 | def id(): | ||
| 124 | """ | ||
| 125 | Return the distro ID of the current Linux distribution, as a | ||
| 126 | machine-readable string. | ||
| 127 | |||
| 128 | For a number of Linux distributions, the returned distro ID value is | ||
| 129 | *reliable*, in the sense that it is documented and that it does not change | ||
| 130 | across releases of the distribution. | ||
| 131 | |||
| 132 | This package maintains the following reliable distro ID values: | ||
| 133 | |||
| 134 | ============== ========================================= | ||
| 135 | Distro ID Distribution | ||
| 136 | ============== ========================================= | ||
| 137 | "ubuntu" Ubuntu | ||
| 138 | "debian" Debian | ||
| 139 | "rhel" RedHat Enterprise Linux | ||
| 140 | "centos" CentOS | ||
| 141 | "fedora" Fedora | ||
| 142 | "sles" SUSE Linux Enterprise Server | ||
| 143 | "opensuse" openSUSE | ||
| 144 | "amazon" Amazon Linux | ||
| 145 | "arch" Arch Linux | ||
| 146 | "cloudlinux" CloudLinux OS | ||
| 147 | "exherbo" Exherbo Linux | ||
| 148 | "gentoo" GenToo Linux | ||
| 149 | "ibm_powerkvm" IBM PowerKVM | ||
| 150 | "kvmibm" KVM for IBM z Systems | ||
| 151 | "linuxmint" Linux Mint | ||
| 152 | "mageia" Mageia | ||
| 153 | "mandriva" Mandriva Linux | ||
| 154 | "parallels" Parallels | ||
| 155 | "pidora" Pidora | ||
| 156 | "raspbian" Raspbian | ||
| 157 | "oracle" Oracle Linux (and Oracle Enterprise Linux) | ||
| 158 | "scientific" Scientific Linux | ||
| 159 | "slackware" Slackware | ||
| 160 | "xenserver" XenServer | ||
| 161 | ============== ========================================= | ||
| 162 | |||
| 163 | If you have a need to get distros for reliable IDs added into this set, | ||
| 164 | or if you find that the :func:`distro.id` function returns a different | ||
| 165 | distro ID for one of the listed distros, please create an issue in the | ||
| 166 | `distro issue tracker`_. | ||
| 167 | |||
| 168 | **Lookup hierarchy and transformations:** | ||
| 169 | |||
| 170 | First, the ID is obtained from the following sources, in the specified | ||
| 171 | order. The first available and non-empty value is used: | ||
| 172 | |||
| 173 | * the value of the "ID" attribute of the os-release file, | ||
| 174 | |||
| 175 | * the value of the "Distributor ID" attribute returned by the lsb_release | ||
| 176 | command, | ||
| 177 | |||
| 178 | * the first part of the file name of the distro release file, | ||
| 179 | |||
| 180 | The so determined ID value then passes the following transformations, | ||
| 181 | before it is returned by this method: | ||
| 182 | |||
| 183 | * it is translated to lower case, | ||
| 184 | |||
| 185 | * blanks (which should not be there anyway) are translated to underscores, | ||
| 186 | |||
| 187 | * a normalization of the ID is performed, based upon | ||
| 188 | `normalization tables`_. The purpose of this normalization is to ensure | ||
| 189 | that the ID is as reliable as possible, even across incompatible changes | ||
| 190 | in the Linux distributions. A common reason for an incompatible change is | ||
| 191 | the addition of an os-release file, or the addition of the lsb_release | ||
| 192 | command, with ID values that differ from what was previously determined | ||
| 193 | from the distro release file name. | ||
| 194 | """ | ||
| 195 | return _distro.id() | ||
| 196 | |||
| 197 | |||
| 198 | def name(pretty=False): | ||
| 199 | """ | ||
| 200 | Return the name of the current Linux distribution, as a human-readable | ||
| 201 | string. | ||
| 202 | |||
| 203 | If *pretty* is false, the name is returned without version or codename. | ||
| 204 | (e.g. "CentOS Linux") | ||
| 205 | |||
| 206 | If *pretty* is true, the version and codename are appended. | ||
| 207 | (e.g. "CentOS Linux 7.1.1503 (Core)") | ||
| 208 | |||
| 209 | **Lookup hierarchy:** | ||
| 210 | |||
| 211 | The name is obtained from the following sources, in the specified order. | ||
| 212 | The first available and non-empty value is used: | ||
| 213 | |||
| 214 | * If *pretty* is false: | ||
| 215 | |||
| 216 | - the value of the "NAME" attribute of the os-release file, | ||
| 217 | |||
| 218 | - the value of the "Distributor ID" attribute returned by the lsb_release | ||
| 219 | command, | ||
| 220 | |||
| 221 | - the value of the "<name>" field of the distro release file. | ||
| 222 | |||
| 223 | * If *pretty* is true: | ||
| 224 | |||
| 225 | - the value of the "PRETTY_NAME" attribute of the os-release file, | ||
| 226 | |||
| 227 | - the value of the "Description" attribute returned by the lsb_release | ||
| 228 | command, | ||
| 229 | |||
| 230 | - the value of the "<name>" field of the distro release file, appended | ||
| 231 | with the value of the pretty version ("<version_id>" and "<codename>" | ||
| 232 | fields) of the distro release file, if available. | ||
| 233 | """ | ||
| 234 | return _distro.name(pretty) | ||
| 235 | |||
| 236 | |||
| 237 | def version(pretty=False, best=False): | ||
| 238 | """ | ||
| 239 | Return the version of the current Linux distribution, as a human-readable | ||
| 240 | string. | ||
| 241 | |||
| 242 | If *pretty* is false, the version is returned without codename (e.g. | ||
| 243 | "7.0"). | ||
| 244 | |||
| 245 | If *pretty* is true, the codename in parenthesis is appended, if the | ||
| 246 | codename is non-empty (e.g. "7.0 (Maipo)"). | ||
| 247 | |||
| 248 | Some distributions provide version numbers with different precisions in | ||
| 249 | the different sources of distribution information. Examining the different | ||
| 250 | sources in a fixed priority order does not always yield the most precise | ||
| 251 | version (e.g. for Debian 8.2, or CentOS 7.1). | ||
| 252 | |||
| 253 | The *best* parameter can be used to control the approach for the returned | ||
| 254 | version: | ||
| 255 | |||
| 256 | If *best* is false, the first non-empty version number in priority order of | ||
| 257 | the examined sources is returned. | ||
| 258 | |||
| 259 | If *best* is true, the most precise version number out of all examined | ||
| 260 | sources is returned. | ||
| 261 | |||
| 262 | **Lookup hierarchy:** | ||
| 263 | |||
| 264 | In all cases, the version number is obtained from the following sources. | ||
| 265 | If *best* is false, this order represents the priority order: | ||
| 266 | |||
| 267 | * the value of the "VERSION_ID" attribute of the os-release file, | ||
| 268 | * the value of the "Release" attribute returned by the lsb_release | ||
| 269 | command, | ||
| 270 | * the version number parsed from the "<version_id>" field of the first line | ||
| 271 | of the distro release file, | ||
| 272 | * the version number parsed from the "PRETTY_NAME" attribute of the | ||
| 273 | os-release file, if it follows the format of the distro release files. | ||
| 274 | * the version number parsed from the "Description" attribute returned by | ||
| 275 | the lsb_release command, if it follows the format of the distro release | ||
| 276 | files. | ||
| 277 | """ | ||
| 278 | return _distro.version(pretty, best) | ||
| 279 | |||
| 280 | |||
| 281 | def version_parts(best=False): | ||
| 282 | """ | ||
| 283 | Return the version of the current Linux distribution as a tuple | ||
| 284 | ``(major, minor, build_number)`` with items as follows: | ||
| 285 | |||
| 286 | * ``major``: The result of :func:`distro.major_version`. | ||
| 287 | |||
| 288 | * ``minor``: The result of :func:`distro.minor_version`. | ||
| 289 | |||
| 290 | * ``build_number``: The result of :func:`distro.build_number`. | ||
| 291 | |||
| 292 | For a description of the *best* parameter, see the :func:`distro.version` | ||
| 293 | method. | ||
| 294 | """ | ||
| 295 | return _distro.version_parts(best) | ||
| 296 | |||
| 297 | |||
| 298 | def major_version(best=False): | ||
| 299 | """ | ||
| 300 | Return the major version of the current Linux distribution, as a string, | ||
| 301 | if provided. | ||
| 302 | Otherwise, the empty string is returned. The major version is the first | ||
| 303 | part of the dot-separated version string. | ||
| 304 | |||
| 305 | For a description of the *best* parameter, see the :func:`distro.version` | ||
| 306 | method. | ||
| 307 | """ | ||
| 308 | return _distro.major_version(best) | ||
| 309 | |||
| 310 | |||
| 311 | def minor_version(best=False): | ||
| 312 | """ | ||
| 313 | Return the minor version of the current Linux distribution, as a string, | ||
| 314 | if provided. | ||
| 315 | Otherwise, the empty string is returned. The minor version is the second | ||
| 316 | part of the dot-separated version string. | ||
| 317 | |||
| 318 | For a description of the *best* parameter, see the :func:`distro.version` | ||
| 319 | method. | ||
| 320 | """ | ||
| 321 | return _distro.minor_version(best) | ||
| 322 | |||
| 323 | |||
| 324 | def build_number(best=False): | ||
| 325 | """ | ||
| 326 | Return the build number of the current Linux distribution, as a string, | ||
| 327 | if provided. | ||
| 328 | Otherwise, the empty string is returned. The build number is the third part | ||
| 329 | of the dot-separated version string. | ||
| 330 | |||
| 331 | For a description of the *best* parameter, see the :func:`distro.version` | ||
| 332 | method. | ||
| 333 | """ | ||
| 334 | return _distro.build_number(best) | ||
| 335 | |||
| 336 | |||
| 337 | def like(): | ||
| 338 | """ | ||
| 339 | Return a space-separated list of distro IDs of distributions that are | ||
| 340 | closely related to the current Linux distribution in regards to packaging | ||
| 341 | and programming interfaces, for example distributions the current | ||
| 342 | distribution is a derivative from. | ||
| 343 | |||
| 344 | **Lookup hierarchy:** | ||
| 345 | |||
| 346 | This information item is only provided by the os-release file. | ||
| 347 | For details, see the description of the "ID_LIKE" attribute in the | ||
| 348 | `os-release man page | ||
| 349 | <http://www.freedesktop.org/software/systemd/man/os-release.html>`_. | ||
| 350 | """ | ||
| 351 | return _distro.like() | ||
| 352 | |||
| 353 | |||
| 354 | def codename(): | ||
| 355 | """ | ||
| 356 | Return the codename for the release of the current Linux distribution, | ||
| 357 | as a string. | ||
| 358 | |||
| 359 | If the distribution does not have a codename, an empty string is returned. | ||
| 360 | |||
| 361 | Note that the returned codename is not always really a codename. For | ||
| 362 | example, openSUSE returns "x86_64". This function does not handle such | ||
| 363 | cases in any special way and just returns the string it finds, if any. | ||
| 364 | |||
| 365 | **Lookup hierarchy:** | ||
| 366 | |||
| 367 | * the codename within the "VERSION" attribute of the os-release file, if | ||
| 368 | provided, | ||
| 369 | |||
| 370 | * the value of the "Codename" attribute returned by the lsb_release | ||
| 371 | command, | ||
| 372 | |||
| 373 | * the value of the "<codename>" field of the distro release file. | ||
| 374 | """ | ||
| 375 | return _distro.codename() | ||
| 376 | |||
| 377 | |||
| 378 | def info(pretty=False, best=False): | ||
| 379 | """ | ||
| 380 | Return certain machine-readable information items about the current Linux | ||
| 381 | distribution in a dictionary, as shown in the following example: | ||
| 382 | |||
| 383 | .. sourcecode:: python | ||
| 384 | |||
| 385 | { | ||
| 386 | 'id': 'rhel', | ||
| 387 | 'version': '7.0', | ||
| 388 | 'version_parts': { | ||
| 389 | 'major': '7', | ||
| 390 | 'minor': '0', | ||
| 391 | 'build_number': '' | ||
| 392 | }, | ||
| 393 | 'like': 'fedora', | ||
| 394 | 'codename': 'Maipo' | ||
| 395 | } | ||
| 396 | |||
| 397 | The dictionary structure and keys are always the same, regardless of which | ||
| 398 | information items are available in the underlying data sources. The values | ||
| 399 | for the various keys are as follows: | ||
| 400 | |||
| 401 | * ``id``: The result of :func:`distro.id`. | ||
| 402 | |||
| 403 | * ``version``: The result of :func:`distro.version`. | ||
| 404 | |||
| 405 | * ``version_parts -> major``: The result of :func:`distro.major_version`. | ||
| 406 | |||
| 407 | * ``version_parts -> minor``: The result of :func:`distro.minor_version`. | ||
| 408 | |||
| 409 | * ``version_parts -> build_number``: The result of | ||
| 410 | :func:`distro.build_number`. | ||
| 411 | |||
| 412 | * ``like``: The result of :func:`distro.like`. | ||
| 413 | |||
| 414 | * ``codename``: The result of :func:`distro.codename`. | ||
| 415 | |||
| 416 | For a description of the *pretty* and *best* parameters, see the | ||
| 417 | :func:`distro.version` method. | ||
| 418 | """ | ||
| 419 | return _distro.info(pretty, best) | ||
| 420 | |||
| 421 | |||
| 422 | def os_release_info(): | ||
| 423 | """ | ||
| 424 | Return a dictionary containing key-value pairs for the information items | ||
| 425 | from the os-release file data source of the current Linux distribution. | ||
| 426 | |||
| 427 | See `os-release file`_ for details about these information items. | ||
| 428 | """ | ||
| 429 | return _distro.os_release_info() | ||
| 430 | |||
| 431 | |||
| 432 | def lsb_release_info(): | ||
| 433 | """ | ||
| 434 | Return a dictionary containing key-value pairs for the information items | ||
| 435 | from the lsb_release command data source of the current Linux distribution. | ||
| 436 | |||
| 437 | See `lsb_release command output`_ for details about these information | ||
| 438 | items. | ||
| 439 | """ | ||
| 440 | return _distro.lsb_release_info() | ||
| 441 | |||
| 442 | |||
| 443 | def distro_release_info(): | ||
| 444 | """ | ||
| 445 | Return a dictionary containing key-value pairs for the information items | ||
| 446 | from the distro release file data source of the current Linux distribution. | ||
| 447 | |||
| 448 | See `distro release file`_ for details about these information items. | ||
| 449 | """ | ||
| 450 | return _distro.distro_release_info() | ||
| 451 | |||
| 452 | |||
| 453 | def os_release_attr(attribute): | ||
| 454 | """ | ||
| 455 | Return a single named information item from the os-release file data source | ||
| 456 | of the current Linux distribution. | ||
| 457 | |||
| 458 | Parameters: | ||
| 459 | |||
| 460 | * ``attribute`` (string): Key of the information item. | ||
| 461 | |||
| 462 | Returns: | ||
| 463 | |||
| 464 | * (string): Value of the information item, if the item exists. | ||
| 465 | The empty string, if the item does not exist. | ||
| 466 | |||
| 467 | See `os-release file`_ for details about these information items. | ||
| 468 | """ | ||
| 469 | return _distro.os_release_attr(attribute) | ||
| 470 | |||
| 471 | |||
| 472 | def lsb_release_attr(attribute): | ||
| 473 | """ | ||
| 474 | Return a single named information item from the lsb_release command output | ||
| 475 | data source of the current Linux distribution. | ||
| 476 | |||
| 477 | Parameters: | ||
| 478 | |||
| 479 | * ``attribute`` (string): Key of the information item. | ||
| 480 | |||
| 481 | Returns: | ||
| 482 | |||
| 483 | * (string): Value of the information item, if the item exists. | ||
| 484 | The empty string, if the item does not exist. | ||
| 485 | |||
| 486 | See `lsb_release command output`_ for details about these information | ||
| 487 | items. | ||
| 488 | """ | ||
| 489 | return _distro.lsb_release_attr(attribute) | ||
| 490 | |||
| 491 | |||
| 492 | def distro_release_attr(attribute): | ||
| 493 | """ | ||
| 494 | Return a single named information item from the distro release file | ||
| 495 | data source of the current Linux distribution. | ||
| 496 | |||
| 497 | Parameters: | ||
| 498 | |||
| 499 | * ``attribute`` (string): Key of the information item. | ||
| 500 | |||
| 501 | Returns: | ||
| 502 | |||
| 503 | * (string): Value of the information item, if the item exists. | ||
| 504 | The empty string, if the item does not exist. | ||
| 505 | |||
| 506 | See `distro release file`_ for details about these information items. | ||
| 507 | """ | ||
| 508 | return _distro.distro_release_attr(attribute) | ||
| 509 | |||
| 510 | |||
| 511 | class cached_property(object): | ||
| 512 | """A version of @property which caches the value. On access, it calls the | ||
| 513 | underlying function and sets the value in `__dict__` so future accesses | ||
| 514 | will not re-call the property. | ||
| 515 | """ | ||
| 516 | def __init__(self, f): | ||
| 517 | self._fname = f.__name__ | ||
| 518 | self._f = f | ||
| 519 | |||
| 520 | def __get__(self, obj, owner): | ||
| 521 | assert obj is not None, 'call {} on an instance'.format(self._fname) | ||
| 522 | ret = obj.__dict__[self._fname] = self._f(obj) | ||
| 523 | return ret | ||
| 524 | |||
| 525 | |||
| 526 | class LinuxDistribution(object): | ||
| 527 | """ | ||
| 528 | Provides information about a Linux distribution. | ||
| 529 | |||
| 530 | This package creates a private module-global instance of this class with | ||
| 531 | default initialization arguments, that is used by the | ||
| 532 | `consolidated accessor functions`_ and `single source accessor functions`_. | ||
| 533 | By using default initialization arguments, that module-global instance | ||
| 534 | returns data about the current Linux distribution (i.e. the distro this | ||
| 535 | package runs on). | ||
| 536 | |||
| 537 | Normally, it is not necessary to create additional instances of this class. | ||
| 538 | However, in situations where control is needed over the exact data sources | ||
| 539 | that are used, instances of this class can be created with a specific | ||
| 540 | distro release file, or a specific os-release file, or without invoking the | ||
| 541 | lsb_release command. | ||
| 542 | """ | ||
| 543 | |||
| 544 | def __init__(self, | ||
| 545 | include_lsb=True, | ||
| 546 | os_release_file='', | ||
| 547 | distro_release_file=''): | ||
| 548 | """ | ||
| 549 | The initialization method of this class gathers information from the | ||
| 550 | available data sources, and stores that in private instance attributes. | ||
| 551 | Subsequent access to the information items uses these private instance | ||
| 552 | attributes, so that the data sources are read only once. | ||
| 553 | |||
| 554 | Parameters: | ||
| 555 | |||
| 556 | * ``include_lsb`` (bool): Controls whether the | ||
| 557 | `lsb_release command output`_ is included as a data source. | ||
| 558 | |||
| 559 | If the lsb_release command is not available in the program execution | ||
| 560 | path, the data source for the lsb_release command will be empty. | ||
| 561 | |||
| 562 | * ``os_release_file`` (string): The path name of the | ||
| 563 | `os-release file`_ that is to be used as a data source. | ||
| 564 | |||
| 565 | An empty string (the default) will cause the default path name to | ||
| 566 | be used (see `os-release file`_ for details). | ||
| 567 | |||
| 568 | If the specified or defaulted os-release file does not exist, the | ||
| 569 | data source for the os-release file will be empty. | ||
| 570 | |||
| 571 | * ``distro_release_file`` (string): The path name of the | ||
| 572 | `distro release file`_ that is to be used as a data source. | ||
| 573 | |||
| 574 | An empty string (the default) will cause a default search algorithm | ||
| 575 | to be used (see `distro release file`_ for details). | ||
| 576 | |||
| 577 | If the specified distro release file does not exist, or if no default | ||
| 578 | distro release file can be found, the data source for the distro | ||
| 579 | release file will be empty. | ||
| 580 | |||
| 581 | Public instance attributes: | ||
| 582 | |||
| 583 | * ``os_release_file`` (string): The path name of the | ||
| 584 | `os-release file`_ that is actually used as a data source. The | ||
| 585 | empty string if no distro release file is used as a data source. | ||
| 586 | |||
| 587 | * ``distro_release_file`` (string): The path name of the | ||
| 588 | `distro release file`_ that is actually used as a data source. The | ||
| 589 | empty string if no distro release file is used as a data source. | ||
| 590 | |||
| 591 | * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. | ||
| 592 | This controls whether the lsb information will be loaded. | ||
| 593 | |||
| 594 | Raises: | ||
| 595 | |||
| 596 | * :py:exc:`IOError`: Some I/O issue with an os-release file or distro | ||
| 597 | release file. | ||
| 598 | |||
| 599 | * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had | ||
| 600 | some issue (other than not being available in the program execution | ||
| 601 | path). | ||
| 602 | |||
| 603 | * :py:exc:`UnicodeError`: A data source has unexpected characters or | ||
| 604 | uses an unexpected encoding. | ||
| 605 | """ | ||
| 606 | self.os_release_file = os_release_file or \ | ||
| 607 | os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME) | ||
| 608 | self.distro_release_file = distro_release_file or '' # updated later | ||
| 609 | self.include_lsb = include_lsb | ||
| 610 | |||
| 611 | def __repr__(self): | ||
| 612 | """Return repr of all info | ||
| 613 | """ | ||
| 614 | return \ | ||
| 615 | "LinuxDistribution(" \ | ||
| 616 | "os_release_file={self.os_release_file!r}, " \ | ||
| 617 | "distro_release_file={self.distro_release_file!r}, " \ | ||
| 618 | "include_lsb={self.include_lsb!r}, " \ | ||
| 619 | "_os_release_info={self._os_release_info!r}, " \ | ||
| 620 | "_lsb_release_info={self._lsb_release_info!r}, " \ | ||
| 621 | "_distro_release_info={self._distro_release_info!r})".format( | ||
| 622 | self=self) | ||
| 623 | |||
| 624 | def linux_distribution(self, full_distribution_name=True): | ||
| 625 | """ | ||
| 626 | Return information about the Linux distribution that is compatible | ||
| 627 | with Python's :func:`platform.linux_distribution`, supporting a subset | ||
| 628 | of its parameters. | ||
| 629 | |||
| 630 | For details, see :func:`distro.linux_distribution`. | ||
| 631 | """ | ||
| 632 | return ( | ||
| 633 | self.name() if full_distribution_name else self.id(), | ||
| 634 | self.version(), | ||
| 635 | self.codename() | ||
| 636 | ) | ||
| 637 | |||
| 638 | def id(self): | ||
| 639 | """Return the distro ID of the Linux distribution, as a string. | ||
| 640 | |||
| 641 | For details, see :func:`distro.id`. | ||
| 642 | """ | ||
| 643 | def normalize(distro_id, table): | ||
| 644 | distro_id = distro_id.lower().replace(' ', '_') | ||
| 645 | return table.get(distro_id, distro_id) | ||
| 646 | |||
| 647 | distro_id = self.os_release_attr('id') | ||
| 648 | if distro_id: | ||
| 649 | return normalize(distro_id, NORMALIZED_OS_ID) | ||
| 650 | |||
| 651 | distro_id = self.lsb_release_attr('distributor_id') | ||
| 652 | if distro_id: | ||
| 653 | return normalize(distro_id, NORMALIZED_LSB_ID) | ||
| 654 | |||
| 655 | distro_id = self.distro_release_attr('id') | ||
| 656 | if distro_id: | ||
| 657 | return normalize(distro_id, NORMALIZED_DISTRO_ID) | ||
| 658 | |||
| 659 | return '' | ||
| 660 | |||
| 661 | def name(self, pretty=False): | ||
| 662 | """ | ||
| 663 | Return the name of the Linux distribution, as a string. | ||
| 664 | |||
| 665 | For details, see :func:`distro.name`. | ||
| 666 | """ | ||
| 667 | name = self.os_release_attr('name') \ | ||
| 668 | or self.lsb_release_attr('distributor_id') \ | ||
| 669 | or self.distro_release_attr('name') | ||
| 670 | if pretty: | ||
| 671 | name = self.os_release_attr('pretty_name') \ | ||
| 672 | or self.lsb_release_attr('description') | ||
| 673 | if not name: | ||
| 674 | name = self.distro_release_attr('name') | ||
| 675 | version = self.version(pretty=True) | ||
| 676 | if version: | ||
| 677 | name = name + ' ' + version | ||
| 678 | return name or '' | ||
| 679 | |||
| 680 | def version(self, pretty=False, best=False): | ||
| 681 | """ | ||
| 682 | Return the version of the Linux distribution, as a string. | ||
| 683 | |||
| 684 | For details, see :func:`distro.version`. | ||
| 685 | """ | ||
| 686 | versions = [ | ||
| 687 | self.os_release_attr('version_id'), | ||
| 688 | self.lsb_release_attr('release'), | ||
| 689 | self.distro_release_attr('version_id'), | ||
| 690 | self._parse_distro_release_content( | ||
| 691 | self.os_release_attr('pretty_name')).get('version_id', ''), | ||
| 692 | self._parse_distro_release_content( | ||
| 693 | self.lsb_release_attr('description')).get('version_id', '') | ||
| 694 | ] | ||
| 695 | version = '' | ||
| 696 | if best: | ||
| 697 | # This algorithm uses the last version in priority order that has | ||
| 698 | # the best precision. If the versions are not in conflict, that | ||
| 699 | # does not matter; otherwise, using the last one instead of the | ||
| 700 | # first one might be considered a surprise. | ||
| 701 | for v in versions: | ||
| 702 | if v.count(".") > version.count(".") or version == '': | ||
| 703 | version = v | ||
| 704 | else: | ||
| 705 | for v in versions: | ||
| 706 | if v != '': | ||
| 707 | version = v | ||
| 708 | break | ||
| 709 | if pretty and version and self.codename(): | ||
| 710 | version = u'{0} ({1})'.format(version, self.codename()) | ||
| 711 | return version | ||
| 712 | |||
| 713 | def version_parts(self, best=False): | ||
| 714 | """ | ||
| 715 | Return the version of the Linux distribution, as a tuple of version | ||
| 716 | numbers. | ||
| 717 | |||
| 718 | For details, see :func:`distro.version_parts`. | ||
| 719 | """ | ||
| 720 | version_str = self.version(best=best) | ||
| 721 | if version_str: | ||
| 722 | version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?') | ||
| 723 | matches = version_regex.match(version_str) | ||
| 724 | if matches: | ||
| 725 | major, minor, build_number = matches.groups() | ||
| 726 | return major, minor or '', build_number or '' | ||
| 727 | return '', '', '' | ||
| 728 | |||
| 729 | def major_version(self, best=False): | ||
| 730 | """ | ||
| 731 | Return the major version number of the current distribution. | ||
| 732 | |||
| 733 | For details, see :func:`distro.major_version`. | ||
| 734 | """ | ||
| 735 | return self.version_parts(best)[0] | ||
| 736 | |||
| 737 | def minor_version(self, best=False): | ||
| 738 | """ | ||
| 739 | Return the minor version number of the Linux distribution. | ||
| 740 | |||
| 741 | For details, see :func:`distro.minor_version`. | ||
| 742 | """ | ||
| 743 | return self.version_parts(best)[1] | ||
| 744 | |||
| 745 | def build_number(self, best=False): | ||
| 746 | """ | ||
| 747 | Return the build number of the Linux distribution. | ||
| 748 | |||
| 749 | For details, see :func:`distro.build_number`. | ||
| 750 | """ | ||
| 751 | return self.version_parts(best)[2] | ||
| 752 | |||
| 753 | def like(self): | ||
| 754 | """ | ||
| 755 | Return the IDs of distributions that are like the Linux distribution. | ||
| 756 | |||
| 757 | For details, see :func:`distro.like`. | ||
| 758 | """ | ||
| 759 | return self.os_release_attr('id_like') or '' | ||
| 760 | |||
| 761 | def codename(self): | ||
| 762 | """ | ||
| 763 | Return the codename of the Linux distribution. | ||
| 764 | |||
| 765 | For details, see :func:`distro.codename`. | ||
| 766 | """ | ||
| 767 | return self.os_release_attr('codename') \ | ||
| 768 | or self.lsb_release_attr('codename') \ | ||
| 769 | or self.distro_release_attr('codename') \ | ||
| 770 | or '' | ||
| 771 | |||
| 772 | def info(self, pretty=False, best=False): | ||
| 773 | """ | ||
| 774 | Return certain machine-readable information about the Linux | ||
| 775 | distribution. | ||
| 776 | |||
| 777 | For details, see :func:`distro.info`. | ||
| 778 | """ | ||
| 779 | return dict( | ||
| 780 | id=self.id(), | ||
| 781 | version=self.version(pretty, best), | ||
| 782 | version_parts=dict( | ||
| 783 | major=self.major_version(best), | ||
| 784 | minor=self.minor_version(best), | ||
| 785 | build_number=self.build_number(best) | ||
| 786 | ), | ||
| 787 | like=self.like(), | ||
| 788 | codename=self.codename(), | ||
| 789 | ) | ||
| 790 | |||
| 791 | def os_release_info(self): | ||
| 792 | """ | ||
| 793 | Return a dictionary containing key-value pairs for the information | ||
| 794 | items from the os-release file data source of the Linux distribution. | ||
| 795 | |||
| 796 | For details, see :func:`distro.os_release_info`. | ||
| 797 | """ | ||
| 798 | return self._os_release_info | ||
| 799 | |||
| 800 | def lsb_release_info(self): | ||
| 801 | """ | ||
| 802 | Return a dictionary containing key-value pairs for the information | ||
| 803 | items from the lsb_release command data source of the Linux | ||
| 804 | distribution. | ||
| 805 | |||
| 806 | For details, see :func:`distro.lsb_release_info`. | ||
| 807 | """ | ||
| 808 | return self._lsb_release_info | ||
| 809 | |||
| 810 | def distro_release_info(self): | ||
| 811 | """ | ||
| 812 | Return a dictionary containing key-value pairs for the information | ||
| 813 | items from the distro release file data source of the Linux | ||
| 814 | distribution. | ||
| 815 | |||
| 816 | For details, see :func:`distro.distro_release_info`. | ||
| 817 | """ | ||
| 818 | return self._distro_release_info | ||
| 819 | |||
| 820 | def os_release_attr(self, attribute): | ||
| 821 | """ | ||
| 822 | Return a single named information item from the os-release file data | ||
| 823 | source of the Linux distribution. | ||
| 824 | |||
| 825 | For details, see :func:`distro.os_release_attr`. | ||
| 826 | """ | ||
| 827 | return self._os_release_info.get(attribute, '') | ||
| 828 | |||
| 829 | def lsb_release_attr(self, attribute): | ||
| 830 | """ | ||
| 831 | Return a single named information item from the lsb_release command | ||
| 832 | output data source of the Linux distribution. | ||
| 833 | |||
| 834 | For details, see :func:`distro.lsb_release_attr`. | ||
| 835 | """ | ||
| 836 | return self._lsb_release_info.get(attribute, '') | ||
| 837 | |||
| 838 | def distro_release_attr(self, attribute): | ||
| 839 | """ | ||
| 840 | Return a single named information item from the distro release file | ||
| 841 | data source of the Linux distribution. | ||
| 842 | |||
| 843 | For details, see :func:`distro.distro_release_attr`. | ||
| 844 | """ | ||
| 845 | return self._distro_release_info.get(attribute, '') | ||
| 846 | |||
| 847 | @cached_property | ||
| 848 | def _os_release_info(self): | ||
| 849 | """ | ||
| 850 | Get the information items from the specified os-release file. | ||
| 851 | |||
| 852 | Returns: | ||
| 853 | A dictionary containing all information items. | ||
| 854 | """ | ||
| 855 | if os.path.isfile(self.os_release_file): | ||
| 856 | with open(self.os_release_file) as release_file: | ||
| 857 | return self._parse_os_release_content(release_file) | ||
| 858 | return {} | ||
| 859 | |||
| 860 | @staticmethod | ||
| 861 | def _parse_os_release_content(lines): | ||
| 862 | """ | ||
| 863 | Parse the lines of an os-release file. | ||
| 864 | |||
| 865 | Parameters: | ||
| 866 | |||
| 867 | * lines: Iterable through the lines in the os-release file. | ||
| 868 | Each line must be a unicode string or a UTF-8 encoded byte | ||
| 869 | string. | ||
| 870 | |||
| 871 | Returns: | ||
| 872 | A dictionary containing all information items. | ||
| 873 | """ | ||
| 874 | props = {} | ||
| 875 | lexer = shlex.shlex(lines, posix=True) | ||
| 876 | lexer.whitespace_split = True | ||
| 877 | |||
| 878 | # The shlex module defines its `wordchars` variable using literals, | ||
| 879 | # making it dependent on the encoding of the Python source file. | ||
| 880 | # In Python 2.6 and 2.7, the shlex source file is encoded in | ||
| 881 | # 'iso-8859-1', and the `wordchars` variable is defined as a byte | ||
| 882 | # string. This causes a UnicodeDecodeError to be raised when the | ||
| 883 | # parsed content is a unicode object. The following fix resolves that | ||
| 884 | # (... but it should be fixed in shlex...): | ||
| 885 | if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): | ||
| 886 | lexer.wordchars = lexer.wordchars.decode('iso-8859-1') | ||
| 887 | |||
| 888 | tokens = list(lexer) | ||
| 889 | for token in tokens: | ||
| 890 | # At this point, all shell-like parsing has been done (i.e. | ||
| 891 | # comments processed, quotes and backslash escape sequences | ||
| 892 | # processed, multi-line values assembled, trailing newlines | ||
| 893 | # stripped, etc.), so the tokens are now either: | ||
| 894 | # * variable assignments: var=value | ||
| 895 | # * commands or their arguments (not allowed in os-release) | ||
| 896 | if '=' in token: | ||
| 897 | k, v = token.split('=', 1) | ||
| 898 | if isinstance(v, bytes): | ||
| 899 | v = v.decode('utf-8') | ||
| 900 | props[k.lower()] = v | ||
| 901 | if k == 'VERSION': | ||
| 902 | # this handles cases in which the codename is in | ||
| 903 | # the `(CODENAME)` (rhel, centos, fedora) format | ||
| 904 | # or in the `, CODENAME` format (Ubuntu). | ||
| 905 | codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) | ||
| 906 | if codename: | ||
| 907 | codename = codename.group() | ||
| 908 | codename = codename.strip('()') | ||
| 909 | codename = codename.strip(',') | ||
| 910 | codename = codename.strip() | ||
| 911 | # codename appears within paranthese. | ||
| 912 | props['codename'] = codename | ||
| 913 | else: | ||
| 914 | props['codename'] = '' | ||
| 915 | else: | ||
| 916 | # Ignore any tokens that are not variable assignments | ||
| 917 | pass | ||
| 918 | return props | ||
| 919 | |||
| 920 | @cached_property | ||
| 921 | def _lsb_release_info(self): | ||
| 922 | """ | ||
| 923 | Get the information items from the lsb_release command output. | ||
| 924 | |||
| 925 | Returns: | ||
| 926 | A dictionary containing all information items. | ||
| 927 | """ | ||
| 928 | if not self.include_lsb: | ||
| 929 | return {} | ||
| 930 | with open(os.devnull, 'w') as devnull: | ||
| 931 | try: | ||
| 932 | cmd = ('lsb_release', '-a') | ||
| 933 | stdout = subprocess.check_output(cmd, stderr=devnull) | ||
| 934 | except OSError: # Command not found | ||
| 935 | return {} | ||
| 936 | content = stdout.decode(sys.getfilesystemencoding()).splitlines() | ||
| 937 | return self._parse_lsb_release_content(content) | ||
| 938 | |||
| 939 | @staticmethod | ||
| 940 | def _parse_lsb_release_content(lines): | ||
| 941 | """ | ||
| 942 | Parse the output of the lsb_release command. | ||
| 943 | |||
| 944 | Parameters: | ||
| 945 | |||
| 946 | * lines: Iterable through the lines of the lsb_release output. | ||
| 947 | Each line must be a unicode string or a UTF-8 encoded byte | ||
| 948 | string. | ||
| 949 | |||
| 950 | Returns: | ||
| 951 | A dictionary containing all information items. | ||
| 952 | """ | ||
| 953 | props = {} | ||
| 954 | for line in lines: | ||
| 955 | kv = line.strip('\n').split(':', 1) | ||
| 956 | if len(kv) != 2: | ||
| 957 | # Ignore lines without colon. | ||
| 958 | continue | ||
| 959 | k, v = kv | ||
| 960 | props.update({k.replace(' ', '_').lower(): v.strip()}) | ||
| 961 | return props | ||
| 962 | |||
| 963 | @cached_property | ||
| 964 | def _distro_release_info(self): | ||
| 965 | """ | ||
| 966 | Get the information items from the specified distro release file. | ||
| 967 | |||
| 968 | Returns: | ||
| 969 | A dictionary containing all information items. | ||
| 970 | """ | ||
| 971 | if self.distro_release_file: | ||
| 972 | # If it was specified, we use it and parse what we can, even if | ||
| 973 | # its file name or content does not match the expected pattern. | ||
| 974 | distro_info = self._parse_distro_release_file( | ||
| 975 | self.distro_release_file) | ||
| 976 | basename = os.path.basename(self.distro_release_file) | ||
| 977 | # The file name pattern for user-specified distro release files | ||
| 978 | # is somewhat more tolerant (compared to when searching for the | ||
| 979 | # file), because we want to use what was specified as best as | ||
| 980 | # possible. | ||
| 981 | match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) | ||
| 982 | if match: | ||
| 983 | distro_info['id'] = match.group(1) | ||
| 984 | return distro_info | ||
| 985 | else: | ||
| 986 | try: | ||
| 987 | basenames = os.listdir(_UNIXCONFDIR) | ||
| 988 | # We sort for repeatability in cases where there are multiple | ||
| 989 | # distro specific files; e.g. CentOS, Oracle, Enterprise all | ||
| 990 | # containing `redhat-release` on top of their own. | ||
| 991 | basenames.sort() | ||
| 992 | except OSError: | ||
| 993 | # This may occur when /etc is not readable but we can't be | ||
| 994 | # sure about the *-release files. Check common entries of | ||
| 995 | # /etc for information. If they turn out to not be there the | ||
| 996 | # error is handled in `_parse_distro_release_file()`. | ||
| 997 | basenames = ['SuSE-release', | ||
| 998 | 'arch-release', | ||
| 999 | 'base-release', | ||
| 1000 | 'centos-release', | ||
| 1001 | 'fedora-release', | ||
| 1002 | 'gentoo-release', | ||
| 1003 | 'mageia-release', | ||
| 1004 | 'mandrake-release', | ||
| 1005 | 'mandriva-release', | ||
| 1006 | 'mandrivalinux-release', | ||
| 1007 | 'manjaro-release', | ||
| 1008 | 'oracle-release', | ||
| 1009 | 'redhat-release', | ||
| 1010 | 'sl-release', | ||
| 1011 | 'slackware-version'] | ||
| 1012 | for basename in basenames: | ||
| 1013 | if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: | ||
| 1014 | continue | ||
| 1015 | match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) | ||
| 1016 | if match: | ||
| 1017 | filepath = os.path.join(_UNIXCONFDIR, basename) | ||
| 1018 | distro_info = self._parse_distro_release_file(filepath) | ||
| 1019 | if 'name' in distro_info: | ||
| 1020 | # The name is always present if the pattern matches | ||
| 1021 | self.distro_release_file = filepath | ||
| 1022 | distro_info['id'] = match.group(1) | ||
| 1023 | return distro_info | ||
| 1024 | return {} | ||
| 1025 | |||
| 1026 | def _parse_distro_release_file(self, filepath): | ||
| 1027 | """ | ||
| 1028 | Parse a distro release file. | ||
| 1029 | |||
| 1030 | Parameters: | ||
| 1031 | |||
| 1032 | * filepath: Path name of the distro release file. | ||
| 1033 | |||
| 1034 | Returns: | ||
| 1035 | A dictionary containing all information items. | ||
| 1036 | """ | ||
| 1037 | try: | ||
| 1038 | with open(filepath) as fp: | ||
| 1039 | # Only parse the first line. For instance, on SLES there | ||
| 1040 | # are multiple lines. We don't want them... | ||
| 1041 | return self._parse_distro_release_content(fp.readline()) | ||
| 1042 | except (OSError, IOError): | ||
| 1043 | # Ignore not being able to read a specific, seemingly version | ||
| 1044 | # related file. | ||
| 1045 | # See https://github.com/nir0s/distro/issues/162 | ||
| 1046 | return {} | ||
| 1047 | |||
| 1048 | @staticmethod | ||
| 1049 | def _parse_distro_release_content(line): | ||
| 1050 | """ | ||
| 1051 | Parse a line from a distro release file. | ||
| 1052 | |||
| 1053 | Parameters: | ||
| 1054 | * line: Line from the distro release file. Must be a unicode string | ||
| 1055 | or a UTF-8 encoded byte string. | ||
| 1056 | |||
| 1057 | Returns: | ||
| 1058 | A dictionary containing all information items. | ||
| 1059 | """ | ||
| 1060 | if isinstance(line, bytes): | ||
| 1061 | line = line.decode('utf-8') | ||
| 1062 | matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( | ||
| 1063 | line.strip()[::-1]) | ||
| 1064 | distro_info = {} | ||
| 1065 | if matches: | ||
| 1066 | # regexp ensures non-None | ||
| 1067 | distro_info['name'] = matches.group(3)[::-1] | ||
| 1068 | if matches.group(2): | ||
| 1069 | distro_info['version_id'] = matches.group(2)[::-1] | ||
| 1070 | if matches.group(1): | ||
| 1071 | distro_info['codename'] = matches.group(1)[::-1] | ||
| 1072 | elif line: | ||
| 1073 | distro_info['name'] = line.strip() | ||
| 1074 | return distro_info | ||
| 1075 | |||
| 1076 | |||
| 1077 | _distro = LinuxDistribution() | ||
| 1078 | |||
| 1079 | |||
| 1080 | def main(): | ||
| 1081 | logger = logging.getLogger(__name__) | ||
| 1082 | logger.setLevel(logging.DEBUG) | ||
| 1083 | logger.addHandler(logging.StreamHandler(sys.stdout)) | ||
| 1084 | |||
| 1085 | parser = argparse.ArgumentParser(description="Linux distro info tool") | ||
| 1086 | parser.add_argument( | ||
| 1087 | '--json', | ||
| 1088 | '-j', | ||
| 1089 | help="Output in machine readable format", | ||
| 1090 | action="store_true") | ||
| 1091 | args = parser.parse_args() | ||
| 1092 | |||
| 1093 | if args.json: | ||
| 1094 | logger.info(json.dumps(info(), indent=4, sort_keys=True)) | ||
| 1095 | else: | ||
| 1096 | logger.info('Name: %s', name(pretty=True)) | ||
| 1097 | distribution_version = version(pretty=True) | ||
| 1098 | logger.info('Version: %s', distribution_version) | ||
| 1099 | distribution_codename = codename() | ||
| 1100 | logger.info('Codename: %s', distribution_codename) | ||
| 1101 | |||
| 1102 | |||
| 1103 | if __name__ == '__main__': | ||
| 1104 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/__init__.py new file mode 100644 index 0000000..0b54002 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/__init__.py | |||
| @@ -0,0 +1,35 @@ | |||
| 1 | """ | ||
| 2 | HTML parsing library based on the `WHATWG HTML specification | ||
| 3 | <https://whatwg.org/html>`_. The parser is designed to be compatible with | ||
| 4 | existing HTML found in the wild and implements well-defined error recovery that | ||
| 5 | is largely compatible with modern desktop web browsers. | ||
| 6 | |||
| 7 | Example usage:: | ||
| 8 | |||
| 9 | from pip._vendor import html5lib | ||
| 10 | with open("my_document.html", "rb") as f: | ||
| 11 | tree = html5lib.parse(f) | ||
| 12 | |||
| 13 | For convenience, this module re-exports the following names: | ||
| 14 | |||
| 15 | * :func:`~.html5parser.parse` | ||
| 16 | * :func:`~.html5parser.parseFragment` | ||
| 17 | * :class:`~.html5parser.HTMLParser` | ||
| 18 | * :func:`~.treebuilders.getTreeBuilder` | ||
| 19 | * :func:`~.treewalkers.getTreeWalker` | ||
| 20 | * :func:`~.serializer.serialize` | ||
| 21 | """ | ||
| 22 | |||
| 23 | from __future__ import absolute_import, division, unicode_literals | ||
| 24 | |||
| 25 | from .html5parser import HTMLParser, parse, parseFragment | ||
| 26 | from .treebuilders import getTreeBuilder | ||
| 27 | from .treewalkers import getTreeWalker | ||
| 28 | from .serializer import serialize | ||
| 29 | |||
| 30 | __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", | ||
| 31 | "getTreeWalker", "serialize"] | ||
| 32 | |||
| 33 | # this has to be at the top level, see how setup.py parses this | ||
| 34 | #: Distribution version number. | ||
| 35 | __version__ = "1.0.1" | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_ihatexml.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_ihatexml.py new file mode 100644 index 0000000..68f9b1e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_ihatexml.py | |||
| @@ -0,0 +1,288 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | import re | ||
| 4 | import warnings | ||
| 5 | |||
| 6 | from .constants import DataLossWarning | ||
| 7 | |||
| 8 | baseChar = """ | ||
| 9 | [#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | | ||
| 10 | [#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | | ||
| 11 | [#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | | ||
| 12 | [#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | | ||
| 13 | [#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | | ||
| 14 | [#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | | ||
| 15 | [#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | | ||
| 16 | [#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | | ||
| 17 | [#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | | ||
| 18 | [#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | | ||
| 19 | [#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | | ||
| 20 | [#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | | ||
| 21 | [#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | | ||
| 22 | [#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | | ||
| 23 | [#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | | ||
| 24 | [#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | | ||
| 25 | [#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | | ||
| 26 | [#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | | ||
| 27 | [#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | | ||
| 28 | [#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | | ||
| 29 | [#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | | ||
| 30 | [#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | | ||
| 31 | [#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | | ||
| 32 | [#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | | ||
| 33 | [#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | | ||
| 34 | [#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | | ||
| 35 | [#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | | ||
| 36 | [#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | | ||
| 37 | [#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | | ||
| 38 | [#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | | ||
| 39 | #x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | | ||
| 40 | #x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | | ||
| 41 | #x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | | ||
| 42 | [#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | | ||
| 43 | [#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | | ||
| 44 | #x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | | ||
| 45 | [#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | | ||
| 46 | [#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | | ||
| 47 | [#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | | ||
| 48 | [#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | | ||
| 49 | [#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | | ||
| 50 | #x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | | ||
| 51 | [#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | | ||
| 52 | [#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | | ||
| 53 | [#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | | ||
| 54 | [#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]""" | ||
| 55 | |||
| 56 | ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]""" | ||
| 57 | |||
| 58 | combiningCharacter = """ | ||
| 59 | [#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | | ||
| 60 | [#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | | ||
| 61 | [#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | | ||
| 62 | [#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | | ||
| 63 | #x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | | ||
| 64 | [#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | | ||
| 65 | [#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | | ||
| 66 | #x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | | ||
| 67 | [#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | | ||
| 68 | [#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | | ||
| 69 | #x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | | ||
| 70 | [#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | | ||
| 71 | [#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | | ||
| 72 | [#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | | ||
| 73 | [#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | | ||
| 74 | [#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | | ||
| 75 | #x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | | ||
| 76 | [#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | | ||
| 77 | #x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | | ||
| 78 | [#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | | ||
| 79 | [#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | | ||
| 80 | #x3099 | #x309A""" | ||
| 81 | |||
| 82 | digit = """ | ||
| 83 | [#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | | ||
| 84 | [#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | | ||
| 85 | [#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | | ||
| 86 | [#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]""" | ||
| 87 | |||
| 88 | extender = """ | ||
| 89 | #x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | | ||
| 90 | #[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]""" | ||
| 91 | |||
| 92 | letter = " | ".join([baseChar, ideographic]) | ||
| 93 | |||
| 94 | # Without the | ||
| 95 | name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter, | ||
| 96 | extender]) | ||
| 97 | nameFirst = " | ".join([letter, "_"]) | ||
| 98 | |||
| 99 | reChar = re.compile(r"#x([\d|A-F]{4,4})") | ||
| 100 | reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]") | ||
| 101 | |||
| 102 | |||
| 103 | def charStringToList(chars): | ||
| 104 | charRanges = [item.strip() for item in chars.split(" | ")] | ||
| 105 | rv = [] | ||
| 106 | for item in charRanges: | ||
| 107 | foundMatch = False | ||
| 108 | for regexp in (reChar, reCharRange): | ||
| 109 | match = regexp.match(item) | ||
| 110 | if match is not None: | ||
| 111 | rv.append([hexToInt(item) for item in match.groups()]) | ||
| 112 | if len(rv[-1]) == 1: | ||
| 113 | rv[-1] = rv[-1] * 2 | ||
| 114 | foundMatch = True | ||
| 115 | break | ||
| 116 | if not foundMatch: | ||
| 117 | assert len(item) == 1 | ||
| 118 | |||
| 119 | rv.append([ord(item)] * 2) | ||
| 120 | rv = normaliseCharList(rv) | ||
| 121 | return rv | ||
| 122 | |||
| 123 | |||
| 124 | def normaliseCharList(charList): | ||
| 125 | charList = sorted(charList) | ||
| 126 | for item in charList: | ||
| 127 | assert item[1] >= item[0] | ||
| 128 | rv = [] | ||
| 129 | i = 0 | ||
| 130 | while i < len(charList): | ||
| 131 | j = 1 | ||
| 132 | rv.append(charList[i]) | ||
| 133 | while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1: | ||
| 134 | rv[-1][1] = charList[i + j][1] | ||
| 135 | j += 1 | ||
| 136 | i += j | ||
| 137 | return rv | ||
| 138 | |||
| 139 | # We don't really support characters above the BMP :( | ||
| 140 | max_unicode = int("FFFF", 16) | ||
| 141 | |||
| 142 | |||
| 143 | def missingRanges(charList): | ||
| 144 | rv = [] | ||
| 145 | if charList[0] != 0: | ||
| 146 | rv.append([0, charList[0][0] - 1]) | ||
| 147 | for i, item in enumerate(charList[:-1]): | ||
| 148 | rv.append([item[1] + 1, charList[i + 1][0] - 1]) | ||
| 149 | if charList[-1][1] != max_unicode: | ||
| 150 | rv.append([charList[-1][1] + 1, max_unicode]) | ||
| 151 | return rv | ||
| 152 | |||
| 153 | |||
| 154 | def listToRegexpStr(charList): | ||
| 155 | rv = [] | ||
| 156 | for item in charList: | ||
| 157 | if item[0] == item[1]: | ||
| 158 | rv.append(escapeRegexp(chr(item[0]))) | ||
| 159 | else: | ||
| 160 | rv.append(escapeRegexp(chr(item[0])) + "-" + | ||
| 161 | escapeRegexp(chr(item[1]))) | ||
| 162 | return "[%s]" % "".join(rv) | ||
| 163 | |||
| 164 | |||
| 165 | def hexToInt(hex_str): | ||
| 166 | return int(hex_str, 16) | ||
| 167 | |||
| 168 | |||
| 169 | def escapeRegexp(string): | ||
| 170 | specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}", | ||
| 171 | "[", "]", "|", "(", ")", "-") | ||
| 172 | for char in specialCharacters: | ||
| 173 | string = string.replace(char, "\\" + char) | ||
| 174 | |||
| 175 | return string | ||
| 176 | |||
| 177 | # output from the above | ||
| 178 | nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa | ||
| 179 | |||
| 180 | nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa | ||
| 181 | |||
| 182 | # Simpler things | ||
| 183 | nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]") | ||
| 184 | |||
| 185 | |||
| 186 | class InfosetFilter(object): | ||
| 187 | replacementRegexp = re.compile(r"U[\dA-F]{5,5}") | ||
| 188 | |||
| 189 | def __init__(self, | ||
| 190 | dropXmlnsLocalName=False, | ||
| 191 | dropXmlnsAttrNs=False, | ||
| 192 | preventDoubleDashComments=False, | ||
| 193 | preventDashAtCommentEnd=False, | ||
| 194 | replaceFormFeedCharacters=True, | ||
| 195 | preventSingleQuotePubid=False): | ||
| 196 | |||
| 197 | self.dropXmlnsLocalName = dropXmlnsLocalName | ||
| 198 | self.dropXmlnsAttrNs = dropXmlnsAttrNs | ||
| 199 | |||
| 200 | self.preventDoubleDashComments = preventDoubleDashComments | ||
| 201 | self.preventDashAtCommentEnd = preventDashAtCommentEnd | ||
| 202 | |||
| 203 | self.replaceFormFeedCharacters = replaceFormFeedCharacters | ||
| 204 | |||
| 205 | self.preventSingleQuotePubid = preventSingleQuotePubid | ||
| 206 | |||
| 207 | self.replaceCache = {} | ||
| 208 | |||
| 209 | def coerceAttribute(self, name, namespace=None): | ||
| 210 | if self.dropXmlnsLocalName and name.startswith("xmlns:"): | ||
| 211 | warnings.warn("Attributes cannot begin with xmlns", DataLossWarning) | ||
| 212 | return None | ||
| 213 | elif (self.dropXmlnsAttrNs and | ||
| 214 | namespace == "http://www.w3.org/2000/xmlns/"): | ||
| 215 | warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning) | ||
| 216 | return None | ||
| 217 | else: | ||
| 218 | return self.toXmlName(name) | ||
| 219 | |||
| 220 | def coerceElement(self, name): | ||
| 221 | return self.toXmlName(name) | ||
| 222 | |||
| 223 | def coerceComment(self, data): | ||
| 224 | if self.preventDoubleDashComments: | ||
| 225 | while "--" in data: | ||
| 226 | warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning) | ||
| 227 | data = data.replace("--", "- -") | ||
| 228 | if data.endswith("-"): | ||
| 229 | warnings.warn("Comments cannot end in a dash", DataLossWarning) | ||
| 230 | data += " " | ||
| 231 | return data | ||
| 232 | |||
| 233 | def coerceCharacters(self, data): | ||
| 234 | if self.replaceFormFeedCharacters: | ||
| 235 | for _ in range(data.count("\x0C")): | ||
| 236 | warnings.warn("Text cannot contain U+000C", DataLossWarning) | ||
| 237 | data = data.replace("\x0C", " ") | ||
| 238 | # Other non-xml characters | ||
| 239 | return data | ||
| 240 | |||
| 241 | def coercePubid(self, data): | ||
| 242 | dataOutput = data | ||
| 243 | for char in nonPubidCharRegexp.findall(data): | ||
| 244 | warnings.warn("Coercing non-XML pubid", DataLossWarning) | ||
| 245 | replacement = self.getReplacementCharacter(char) | ||
| 246 | dataOutput = dataOutput.replace(char, replacement) | ||
| 247 | if self.preventSingleQuotePubid and dataOutput.find("'") >= 0: | ||
| 248 | warnings.warn("Pubid cannot contain single quote", DataLossWarning) | ||
| 249 | dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'")) | ||
| 250 | return dataOutput | ||
| 251 | |||
| 252 | def toXmlName(self, name): | ||
| 253 | nameFirst = name[0] | ||
| 254 | nameRest = name[1:] | ||
| 255 | m = nonXmlNameFirstBMPRegexp.match(nameFirst) | ||
| 256 | if m: | ||
| 257 | warnings.warn("Coercing non-XML name", DataLossWarning) | ||
| 258 | nameFirstOutput = self.getReplacementCharacter(nameFirst) | ||
| 259 | else: | ||
| 260 | nameFirstOutput = nameFirst | ||
| 261 | |||
| 262 | nameRestOutput = nameRest | ||
| 263 | replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest)) | ||
| 264 | for char in replaceChars: | ||
| 265 | warnings.warn("Coercing non-XML name", DataLossWarning) | ||
| 266 | replacement = self.getReplacementCharacter(char) | ||
| 267 | nameRestOutput = nameRestOutput.replace(char, replacement) | ||
| 268 | return nameFirstOutput + nameRestOutput | ||
| 269 | |||
| 270 | def getReplacementCharacter(self, char): | ||
| 271 | if char in self.replaceCache: | ||
| 272 | replacement = self.replaceCache[char] | ||
| 273 | else: | ||
| 274 | replacement = self.escapeChar(char) | ||
| 275 | return replacement | ||
| 276 | |||
| 277 | def fromXmlName(self, name): | ||
| 278 | for item in set(self.replacementRegexp.findall(name)): | ||
| 279 | name = name.replace(item, self.unescapeChar(item)) | ||
| 280 | return name | ||
| 281 | |||
| 282 | def escapeChar(self, char): | ||
| 283 | replacement = "U%05X" % ord(char) | ||
| 284 | self.replaceCache[char] = replacement | ||
| 285 | return replacement | ||
| 286 | |||
| 287 | def unescapeChar(self, charcode): | ||
| 288 | return chr(int(charcode[1:], 16)) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_inputstream.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_inputstream.py new file mode 100644 index 0000000..21c6bbc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_inputstream.py | |||
| @@ -0,0 +1,923 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from pip._vendor.six import text_type, binary_type | ||
| 4 | from pip._vendor.six.moves import http_client, urllib | ||
| 5 | |||
| 6 | import codecs | ||
| 7 | import re | ||
| 8 | |||
| 9 | from pip._vendor import webencodings | ||
| 10 | |||
| 11 | from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase | ||
| 12 | from .constants import _ReparseException | ||
| 13 | from . import _utils | ||
| 14 | |||
| 15 | from io import StringIO | ||
| 16 | |||
| 17 | try: | ||
| 18 | from io import BytesIO | ||
| 19 | except ImportError: | ||
| 20 | BytesIO = StringIO | ||
| 21 | |||
| 22 | # Non-unicode versions of constants for use in the pre-parser | ||
| 23 | spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) | ||
| 24 | asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) | ||
| 25 | asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) | ||
| 26 | spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) | ||
| 27 | |||
| 28 | |||
| 29 | invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa | ||
| 30 | |||
| 31 | if _utils.supports_lone_surrogates: | ||
| 32 | # Use one extra step of indirection and create surrogates with | ||
| 33 | # eval. Not using this indirection would introduce an illegal | ||
| 34 | # unicode literal on platforms not supporting such lone | ||
| 35 | # surrogates. | ||
| 36 | assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 | ||
| 37 | invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + | ||
| 38 | eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used | ||
| 39 | "]") | ||
| 40 | else: | ||
| 41 | invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) | ||
| 42 | |||
| 43 | non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, | ||
| 44 | 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, | ||
| 45 | 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, | ||
| 46 | 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, | ||
| 47 | 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, | ||
| 48 | 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, | ||
| 49 | 0x10FFFE, 0x10FFFF]) | ||
| 50 | |||
| 51 | ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]") | ||
| 52 | |||
| 53 | # Cache for charsUntil() | ||
| 54 | charsUntilRegEx = {} | ||
| 55 | |||
| 56 | |||
| 57 | class BufferedStream(object): | ||
| 58 | """Buffering for streams that do not have buffering of their own | ||
| 59 | |||
| 60 | The buffer is implemented as a list of chunks on the assumption that | ||
| 61 | joining many strings will be slow since it is O(n**2) | ||
| 62 | """ | ||
| 63 | |||
| 64 | def __init__(self, stream): | ||
| 65 | self.stream = stream | ||
| 66 | self.buffer = [] | ||
| 67 | self.position = [-1, 0] # chunk number, offset | ||
| 68 | |||
| 69 | def tell(self): | ||
| 70 | pos = 0 | ||
| 71 | for chunk in self.buffer[:self.position[0]]: | ||
| 72 | pos += len(chunk) | ||
| 73 | pos += self.position[1] | ||
| 74 | return pos | ||
| 75 | |||
| 76 | def seek(self, pos): | ||
| 77 | assert pos <= self._bufferedBytes() | ||
| 78 | offset = pos | ||
| 79 | i = 0 | ||
| 80 | while len(self.buffer[i]) < offset: | ||
| 81 | offset -= len(self.buffer[i]) | ||
| 82 | i += 1 | ||
| 83 | self.position = [i, offset] | ||
| 84 | |||
| 85 | def read(self, bytes): | ||
| 86 | if not self.buffer: | ||
| 87 | return self._readStream(bytes) | ||
| 88 | elif (self.position[0] == len(self.buffer) and | ||
| 89 | self.position[1] == len(self.buffer[-1])): | ||
| 90 | return self._readStream(bytes) | ||
| 91 | else: | ||
| 92 | return self._readFromBuffer(bytes) | ||
| 93 | |||
| 94 | def _bufferedBytes(self): | ||
| 95 | return sum([len(item) for item in self.buffer]) | ||
| 96 | |||
| 97 | def _readStream(self, bytes): | ||
| 98 | data = self.stream.read(bytes) | ||
| 99 | self.buffer.append(data) | ||
| 100 | self.position[0] += 1 | ||
| 101 | self.position[1] = len(data) | ||
| 102 | return data | ||
| 103 | |||
| 104 | def _readFromBuffer(self, bytes): | ||
| 105 | remainingBytes = bytes | ||
| 106 | rv = [] | ||
| 107 | bufferIndex = self.position[0] | ||
| 108 | bufferOffset = self.position[1] | ||
| 109 | while bufferIndex < len(self.buffer) and remainingBytes != 0: | ||
| 110 | assert remainingBytes > 0 | ||
| 111 | bufferedData = self.buffer[bufferIndex] | ||
| 112 | |||
| 113 | if remainingBytes <= len(bufferedData) - bufferOffset: | ||
| 114 | bytesToRead = remainingBytes | ||
| 115 | self.position = [bufferIndex, bufferOffset + bytesToRead] | ||
| 116 | else: | ||
| 117 | bytesToRead = len(bufferedData) - bufferOffset | ||
| 118 | self.position = [bufferIndex, len(bufferedData)] | ||
| 119 | bufferIndex += 1 | ||
| 120 | rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) | ||
| 121 | remainingBytes -= bytesToRead | ||
| 122 | |||
| 123 | bufferOffset = 0 | ||
| 124 | |||
| 125 | if remainingBytes: | ||
| 126 | rv.append(self._readStream(remainingBytes)) | ||
| 127 | |||
| 128 | return b"".join(rv) | ||
| 129 | |||
| 130 | |||
| 131 | def HTMLInputStream(source, **kwargs): | ||
| 132 | # Work around Python bug #20007: read(0) closes the connection. | ||
| 133 | # http://bugs.python.org/issue20007 | ||
| 134 | if (isinstance(source, http_client.HTTPResponse) or | ||
| 135 | # Also check for addinfourl wrapping HTTPResponse | ||
| 136 | (isinstance(source, urllib.response.addbase) and | ||
| 137 | isinstance(source.fp, http_client.HTTPResponse))): | ||
| 138 | isUnicode = False | ||
| 139 | elif hasattr(source, "read"): | ||
| 140 | isUnicode = isinstance(source.read(0), text_type) | ||
| 141 | else: | ||
| 142 | isUnicode = isinstance(source, text_type) | ||
| 143 | |||
| 144 | if isUnicode: | ||
| 145 | encodings = [x for x in kwargs if x.endswith("_encoding")] | ||
| 146 | if encodings: | ||
| 147 | raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) | ||
| 148 | |||
| 149 | return HTMLUnicodeInputStream(source, **kwargs) | ||
| 150 | else: | ||
| 151 | return HTMLBinaryInputStream(source, **kwargs) | ||
| 152 | |||
| 153 | |||
| 154 | class HTMLUnicodeInputStream(object): | ||
| 155 | """Provides a unicode stream of characters to the HTMLTokenizer. | ||
| 156 | |||
| 157 | This class takes care of character encoding and removing or replacing | ||
| 158 | incorrect byte-sequences and also provides column and line tracking. | ||
| 159 | |||
| 160 | """ | ||
| 161 | |||
| 162 | _defaultChunkSize = 10240 | ||
| 163 | |||
| 164 | def __init__(self, source): | ||
| 165 | """Initialises the HTMLInputStream. | ||
| 166 | |||
| 167 | HTMLInputStream(source, [encoding]) -> Normalized stream from source | ||
| 168 | for use by html5lib. | ||
| 169 | |||
| 170 | source can be either a file-object, local filename or a string. | ||
| 171 | |||
| 172 | The optional encoding parameter must be a string that indicates | ||
| 173 | the encoding. If specified, that encoding will be used, | ||
| 174 | regardless of any BOM or later declaration (such as in a meta | ||
| 175 | element) | ||
| 176 | |||
| 177 | """ | ||
| 178 | |||
| 179 | if not _utils.supports_lone_surrogates: | ||
| 180 | # Such platforms will have already checked for such | ||
| 181 | # surrogate errors, so no need to do this checking. | ||
| 182 | self.reportCharacterErrors = None | ||
| 183 | elif len("\U0010FFFF") == 1: | ||
| 184 | self.reportCharacterErrors = self.characterErrorsUCS4 | ||
| 185 | else: | ||
| 186 | self.reportCharacterErrors = self.characterErrorsUCS2 | ||
| 187 | |||
| 188 | # List of where new lines occur | ||
| 189 | self.newLines = [0] | ||
| 190 | |||
| 191 | self.charEncoding = (lookupEncoding("utf-8"), "certain") | ||
| 192 | self.dataStream = self.openStream(source) | ||
| 193 | |||
| 194 | self.reset() | ||
| 195 | |||
| 196 | def reset(self): | ||
| 197 | self.chunk = "" | ||
| 198 | self.chunkSize = 0 | ||
| 199 | self.chunkOffset = 0 | ||
| 200 | self.errors = [] | ||
| 201 | |||
| 202 | # number of (complete) lines in previous chunks | ||
| 203 | self.prevNumLines = 0 | ||
| 204 | # number of columns in the last line of the previous chunk | ||
| 205 | self.prevNumCols = 0 | ||
| 206 | |||
| 207 | # Deal with CR LF and surrogates split over chunk boundaries | ||
| 208 | self._bufferedCharacter = None | ||
| 209 | |||
| 210 | def openStream(self, source): | ||
| 211 | """Produces a file object from source. | ||
| 212 | |||
| 213 | source can be either a file object, local filename or a string. | ||
| 214 | |||
| 215 | """ | ||
| 216 | # Already a file object | ||
| 217 | if hasattr(source, 'read'): | ||
| 218 | stream = source | ||
| 219 | else: | ||
| 220 | stream = StringIO(source) | ||
| 221 | |||
| 222 | return stream | ||
| 223 | |||
| 224 | def _position(self, offset): | ||
| 225 | chunk = self.chunk | ||
| 226 | nLines = chunk.count('\n', 0, offset) | ||
| 227 | positionLine = self.prevNumLines + nLines | ||
| 228 | lastLinePos = chunk.rfind('\n', 0, offset) | ||
| 229 | if lastLinePos == -1: | ||
| 230 | positionColumn = self.prevNumCols + offset | ||
| 231 | else: | ||
| 232 | positionColumn = offset - (lastLinePos + 1) | ||
| 233 | return (positionLine, positionColumn) | ||
| 234 | |||
| 235 | def position(self): | ||
| 236 | """Returns (line, col) of the current position in the stream.""" | ||
| 237 | line, col = self._position(self.chunkOffset) | ||
| 238 | return (line + 1, col) | ||
| 239 | |||
| 240 | def char(self): | ||
| 241 | """ Read one character from the stream or queue if available. Return | ||
| 242 | EOF when EOF is reached. | ||
| 243 | """ | ||
| 244 | # Read a new chunk from the input stream if necessary | ||
| 245 | if self.chunkOffset >= self.chunkSize: | ||
| 246 | if not self.readChunk(): | ||
| 247 | return EOF | ||
| 248 | |||
| 249 | chunkOffset = self.chunkOffset | ||
| 250 | char = self.chunk[chunkOffset] | ||
| 251 | self.chunkOffset = chunkOffset + 1 | ||
| 252 | |||
| 253 | return char | ||
| 254 | |||
| 255 | def readChunk(self, chunkSize=None): | ||
| 256 | if chunkSize is None: | ||
| 257 | chunkSize = self._defaultChunkSize | ||
| 258 | |||
| 259 | self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) | ||
| 260 | |||
| 261 | self.chunk = "" | ||
| 262 | self.chunkSize = 0 | ||
| 263 | self.chunkOffset = 0 | ||
| 264 | |||
| 265 | data = self.dataStream.read(chunkSize) | ||
| 266 | |||
| 267 | # Deal with CR LF and surrogates broken across chunks | ||
| 268 | if self._bufferedCharacter: | ||
| 269 | data = self._bufferedCharacter + data | ||
| 270 | self._bufferedCharacter = None | ||
| 271 | elif not data: | ||
| 272 | # We have no more data, bye-bye stream | ||
| 273 | return False | ||
| 274 | |||
| 275 | if len(data) > 1: | ||
| 276 | lastv = ord(data[-1]) | ||
| 277 | if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: | ||
| 278 | self._bufferedCharacter = data[-1] | ||
| 279 | data = data[:-1] | ||
| 280 | |||
| 281 | if self.reportCharacterErrors: | ||
| 282 | self.reportCharacterErrors(data) | ||
| 283 | |||
| 284 | # Replace invalid characters | ||
| 285 | data = data.replace("\r\n", "\n") | ||
| 286 | data = data.replace("\r", "\n") | ||
| 287 | |||
| 288 | self.chunk = data | ||
| 289 | self.chunkSize = len(data) | ||
| 290 | |||
| 291 | return True | ||
| 292 | |||
| 293 | def characterErrorsUCS4(self, data): | ||
| 294 | for _ in range(len(invalid_unicode_re.findall(data))): | ||
| 295 | self.errors.append("invalid-codepoint") | ||
| 296 | |||
| 297 | def characterErrorsUCS2(self, data): | ||
| 298 | # Someone picked the wrong compile option | ||
| 299 | # You lose | ||
| 300 | skip = False | ||
| 301 | for match in invalid_unicode_re.finditer(data): | ||
| 302 | if skip: | ||
| 303 | continue | ||
| 304 | codepoint = ord(match.group()) | ||
| 305 | pos = match.start() | ||
| 306 | # Pretty sure there should be endianness issues here | ||
| 307 | if _utils.isSurrogatePair(data[pos:pos + 2]): | ||
| 308 | # We have a surrogate pair! | ||
| 309 | char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) | ||
| 310 | if char_val in non_bmp_invalid_codepoints: | ||
| 311 | self.errors.append("invalid-codepoint") | ||
| 312 | skip = True | ||
| 313 | elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and | ||
| 314 | pos == len(data) - 1): | ||
| 315 | self.errors.append("invalid-codepoint") | ||
| 316 | else: | ||
| 317 | skip = False | ||
| 318 | self.errors.append("invalid-codepoint") | ||
| 319 | |||
| 320 | def charsUntil(self, characters, opposite=False): | ||
| 321 | """ Returns a string of characters from the stream up to but not | ||
| 322 | including any character in 'characters' or EOF. 'characters' must be | ||
| 323 | a container that supports the 'in' method and iteration over its | ||
| 324 | characters. | ||
| 325 | """ | ||
| 326 | |||
| 327 | # Use a cache of regexps to find the required characters | ||
| 328 | try: | ||
| 329 | chars = charsUntilRegEx[(characters, opposite)] | ||
| 330 | except KeyError: | ||
| 331 | if __debug__: | ||
| 332 | for c in characters: | ||
| 333 | assert(ord(c) < 128) | ||
| 334 | regex = "".join(["\\x%02x" % ord(c) for c in characters]) | ||
| 335 | if not opposite: | ||
| 336 | regex = "^%s" % regex | ||
| 337 | chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) | ||
| 338 | |||
| 339 | rv = [] | ||
| 340 | |||
| 341 | while True: | ||
| 342 | # Find the longest matching prefix | ||
| 343 | m = chars.match(self.chunk, self.chunkOffset) | ||
| 344 | if m is None: | ||
| 345 | # If nothing matched, and it wasn't because we ran out of chunk, | ||
| 346 | # then stop | ||
| 347 | if self.chunkOffset != self.chunkSize: | ||
| 348 | break | ||
| 349 | else: | ||
| 350 | end = m.end() | ||
| 351 | # If not the whole chunk matched, return everything | ||
| 352 | # up to the part that didn't match | ||
| 353 | if end != self.chunkSize: | ||
| 354 | rv.append(self.chunk[self.chunkOffset:end]) | ||
| 355 | self.chunkOffset = end | ||
| 356 | break | ||
| 357 | # If the whole remainder of the chunk matched, | ||
| 358 | # use it all and read the next chunk | ||
| 359 | rv.append(self.chunk[self.chunkOffset:]) | ||
| 360 | if not self.readChunk(): | ||
| 361 | # Reached EOF | ||
| 362 | break | ||
| 363 | |||
| 364 | r = "".join(rv) | ||
| 365 | return r | ||
| 366 | |||
| 367 | def unget(self, char): | ||
| 368 | # Only one character is allowed to be ungotten at once - it must | ||
| 369 | # be consumed again before any further call to unget | ||
| 370 | if char is not None: | ||
| 371 | if self.chunkOffset == 0: | ||
| 372 | # unget is called quite rarely, so it's a good idea to do | ||
| 373 | # more work here if it saves a bit of work in the frequently | ||
| 374 | # called char and charsUntil. | ||
| 375 | # So, just prepend the ungotten character onto the current | ||
| 376 | # chunk: | ||
| 377 | self.chunk = char + self.chunk | ||
| 378 | self.chunkSize += 1 | ||
| 379 | else: | ||
| 380 | self.chunkOffset -= 1 | ||
| 381 | assert self.chunk[self.chunkOffset] == char | ||
| 382 | |||
| 383 | |||
| 384 | class HTMLBinaryInputStream(HTMLUnicodeInputStream): | ||
| 385 | """Provides a unicode stream of characters to the HTMLTokenizer. | ||
| 386 | |||
| 387 | This class takes care of character encoding and removing or replacing | ||
| 388 | incorrect byte-sequences and also provides column and line tracking. | ||
| 389 | |||
| 390 | """ | ||
| 391 | |||
| 392 | def __init__(self, source, override_encoding=None, transport_encoding=None, | ||
| 393 | same_origin_parent_encoding=None, likely_encoding=None, | ||
| 394 | default_encoding="windows-1252", useChardet=True): | ||
| 395 | """Initialises the HTMLInputStream. | ||
| 396 | |||
| 397 | HTMLInputStream(source, [encoding]) -> Normalized stream from source | ||
| 398 | for use by html5lib. | ||
| 399 | |||
| 400 | source can be either a file-object, local filename or a string. | ||
| 401 | |||
| 402 | The optional encoding parameter must be a string that indicates | ||
| 403 | the encoding. If specified, that encoding will be used, | ||
| 404 | regardless of any BOM or later declaration (such as in a meta | ||
| 405 | element) | ||
| 406 | |||
| 407 | """ | ||
| 408 | # Raw Stream - for unicode objects this will encode to utf-8 and set | ||
| 409 | # self.charEncoding as appropriate | ||
| 410 | self.rawStream = self.openStream(source) | ||
| 411 | |||
| 412 | HTMLUnicodeInputStream.__init__(self, self.rawStream) | ||
| 413 | |||
| 414 | # Encoding Information | ||
| 415 | # Number of bytes to use when looking for a meta element with | ||
| 416 | # encoding information | ||
| 417 | self.numBytesMeta = 1024 | ||
| 418 | # Number of bytes to use when using detecting encoding using chardet | ||
| 419 | self.numBytesChardet = 100 | ||
| 420 | # Things from args | ||
| 421 | self.override_encoding = override_encoding | ||
| 422 | self.transport_encoding = transport_encoding | ||
| 423 | self.same_origin_parent_encoding = same_origin_parent_encoding | ||
| 424 | self.likely_encoding = likely_encoding | ||
| 425 | self.default_encoding = default_encoding | ||
| 426 | |||
| 427 | # Determine encoding | ||
| 428 | self.charEncoding = self.determineEncoding(useChardet) | ||
| 429 | assert self.charEncoding[0] is not None | ||
| 430 | |||
| 431 | # Call superclass | ||
| 432 | self.reset() | ||
| 433 | |||
| 434 | def reset(self): | ||
| 435 | self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') | ||
| 436 | HTMLUnicodeInputStream.reset(self) | ||
| 437 | |||
| 438 | def openStream(self, source): | ||
| 439 | """Produces a file object from source. | ||
| 440 | |||
| 441 | source can be either a file object, local filename or a string. | ||
| 442 | |||
| 443 | """ | ||
| 444 | # Already a file object | ||
| 445 | if hasattr(source, 'read'): | ||
| 446 | stream = source | ||
| 447 | else: | ||
| 448 | stream = BytesIO(source) | ||
| 449 | |||
| 450 | try: | ||
| 451 | stream.seek(stream.tell()) | ||
| 452 | except: # pylint:disable=bare-except | ||
| 453 | stream = BufferedStream(stream) | ||
| 454 | |||
| 455 | return stream | ||
| 456 | |||
| 457 | def determineEncoding(self, chardet=True): | ||
| 458 | # BOMs take precedence over everything | ||
| 459 | # This will also read past the BOM if present | ||
| 460 | charEncoding = self.detectBOM(), "certain" | ||
| 461 | if charEncoding[0] is not None: | ||
| 462 | return charEncoding | ||
| 463 | |||
| 464 | # If we've been overriden, we've been overriden | ||
| 465 | charEncoding = lookupEncoding(self.override_encoding), "certain" | ||
| 466 | if charEncoding[0] is not None: | ||
| 467 | return charEncoding | ||
| 468 | |||
| 469 | # Now check the transport layer | ||
| 470 | charEncoding = lookupEncoding(self.transport_encoding), "certain" | ||
| 471 | if charEncoding[0] is not None: | ||
| 472 | return charEncoding | ||
| 473 | |||
| 474 | # Look for meta elements with encoding information | ||
| 475 | charEncoding = self.detectEncodingMeta(), "tentative" | ||
| 476 | if charEncoding[0] is not None: | ||
| 477 | return charEncoding | ||
| 478 | |||
| 479 | # Parent document encoding | ||
| 480 | charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" | ||
| 481 | if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): | ||
| 482 | return charEncoding | ||
| 483 | |||
| 484 | # "likely" encoding | ||
| 485 | charEncoding = lookupEncoding(self.likely_encoding), "tentative" | ||
| 486 | if charEncoding[0] is not None: | ||
| 487 | return charEncoding | ||
| 488 | |||
| 489 | # Guess with chardet, if available | ||
| 490 | if chardet: | ||
| 491 | try: | ||
| 492 | from pip._vendor.chardet.universaldetector import UniversalDetector | ||
| 493 | except ImportError: | ||
| 494 | pass | ||
| 495 | else: | ||
| 496 | buffers = [] | ||
| 497 | detector = UniversalDetector() | ||
| 498 | while not detector.done: | ||
| 499 | buffer = self.rawStream.read(self.numBytesChardet) | ||
| 500 | assert isinstance(buffer, bytes) | ||
| 501 | if not buffer: | ||
| 502 | break | ||
| 503 | buffers.append(buffer) | ||
| 504 | detector.feed(buffer) | ||
| 505 | detector.close() | ||
| 506 | encoding = lookupEncoding(detector.result['encoding']) | ||
| 507 | self.rawStream.seek(0) | ||
| 508 | if encoding is not None: | ||
| 509 | return encoding, "tentative" | ||
| 510 | |||
| 511 | # Try the default encoding | ||
| 512 | charEncoding = lookupEncoding(self.default_encoding), "tentative" | ||
| 513 | if charEncoding[0] is not None: | ||
| 514 | return charEncoding | ||
| 515 | |||
| 516 | # Fallback to html5lib's default if even that hasn't worked | ||
| 517 | return lookupEncoding("windows-1252"), "tentative" | ||
| 518 | |||
| 519 | def changeEncoding(self, newEncoding): | ||
| 520 | assert self.charEncoding[1] != "certain" | ||
| 521 | newEncoding = lookupEncoding(newEncoding) | ||
| 522 | if newEncoding is None: | ||
| 523 | return | ||
| 524 | if newEncoding.name in ("utf-16be", "utf-16le"): | ||
| 525 | newEncoding = lookupEncoding("utf-8") | ||
| 526 | assert newEncoding is not None | ||
| 527 | elif newEncoding == self.charEncoding[0]: | ||
| 528 | self.charEncoding = (self.charEncoding[0], "certain") | ||
| 529 | else: | ||
| 530 | self.rawStream.seek(0) | ||
| 531 | self.charEncoding = (newEncoding, "certain") | ||
| 532 | self.reset() | ||
| 533 | raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) | ||
| 534 | |||
| 535 | def detectBOM(self): | ||
| 536 | """Attempts to detect at BOM at the start of the stream. If | ||
| 537 | an encoding can be determined from the BOM return the name of the | ||
| 538 | encoding otherwise return None""" | ||
| 539 | bomDict = { | ||
| 540 | codecs.BOM_UTF8: 'utf-8', | ||
| 541 | codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', | ||
| 542 | codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' | ||
| 543 | } | ||
| 544 | |||
| 545 | # Go to beginning of file and read in 4 bytes | ||
| 546 | string = self.rawStream.read(4) | ||
| 547 | assert isinstance(string, bytes) | ||
| 548 | |||
| 549 | # Try detecting the BOM using bytes from the string | ||
| 550 | encoding = bomDict.get(string[:3]) # UTF-8 | ||
| 551 | seek = 3 | ||
| 552 | if not encoding: | ||
| 553 | # Need to detect UTF-32 before UTF-16 | ||
| 554 | encoding = bomDict.get(string) # UTF-32 | ||
| 555 | seek = 4 | ||
| 556 | if not encoding: | ||
| 557 | encoding = bomDict.get(string[:2]) # UTF-16 | ||
| 558 | seek = 2 | ||
| 559 | |||
| 560 | # Set the read position past the BOM if one was found, otherwise | ||
| 561 | # set it to the start of the stream | ||
| 562 | if encoding: | ||
| 563 | self.rawStream.seek(seek) | ||
| 564 | return lookupEncoding(encoding) | ||
| 565 | else: | ||
| 566 | self.rawStream.seek(0) | ||
| 567 | return None | ||
| 568 | |||
| 569 | def detectEncodingMeta(self): | ||
| 570 | """Report the encoding declared by the meta element | ||
| 571 | """ | ||
| 572 | buffer = self.rawStream.read(self.numBytesMeta) | ||
| 573 | assert isinstance(buffer, bytes) | ||
| 574 | parser = EncodingParser(buffer) | ||
| 575 | self.rawStream.seek(0) | ||
| 576 | encoding = parser.getEncoding() | ||
| 577 | |||
| 578 | if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): | ||
| 579 | encoding = lookupEncoding("utf-8") | ||
| 580 | |||
| 581 | return encoding | ||
| 582 | |||
| 583 | |||
| 584 | class EncodingBytes(bytes): | ||
| 585 | """String-like object with an associated position and various extra methods | ||
| 586 | If the position is ever greater than the string length then an exception is | ||
| 587 | raised""" | ||
| 588 | def __new__(self, value): | ||
| 589 | assert isinstance(value, bytes) | ||
| 590 | return bytes.__new__(self, value.lower()) | ||
| 591 | |||
| 592 | def __init__(self, value): | ||
| 593 | # pylint:disable=unused-argument | ||
| 594 | self._position = -1 | ||
| 595 | |||
| 596 | def __iter__(self): | ||
| 597 | return self | ||
| 598 | |||
| 599 | def __next__(self): | ||
| 600 | p = self._position = self._position + 1 | ||
| 601 | if p >= len(self): | ||
| 602 | raise StopIteration | ||
| 603 | elif p < 0: | ||
| 604 | raise TypeError | ||
| 605 | return self[p:p + 1] | ||
| 606 | |||
| 607 | def next(self): | ||
| 608 | # Py2 compat | ||
| 609 | return self.__next__() | ||
| 610 | |||
| 611 | def previous(self): | ||
| 612 | p = self._position | ||
| 613 | if p >= len(self): | ||
| 614 | raise StopIteration | ||
| 615 | elif p < 0: | ||
| 616 | raise TypeError | ||
| 617 | self._position = p = p - 1 | ||
| 618 | return self[p:p + 1] | ||
| 619 | |||
| 620 | def setPosition(self, position): | ||
| 621 | if self._position >= len(self): | ||
| 622 | raise StopIteration | ||
| 623 | self._position = position | ||
| 624 | |||
| 625 | def getPosition(self): | ||
| 626 | if self._position >= len(self): | ||
| 627 | raise StopIteration | ||
| 628 | if self._position >= 0: | ||
| 629 | return self._position | ||
| 630 | else: | ||
| 631 | return None | ||
| 632 | |||
| 633 | position = property(getPosition, setPosition) | ||
| 634 | |||
| 635 | def getCurrentByte(self): | ||
| 636 | return self[self.position:self.position + 1] | ||
| 637 | |||
| 638 | currentByte = property(getCurrentByte) | ||
| 639 | |||
| 640 | def skip(self, chars=spaceCharactersBytes): | ||
| 641 | """Skip past a list of characters""" | ||
| 642 | p = self.position # use property for the error-checking | ||
| 643 | while p < len(self): | ||
| 644 | c = self[p:p + 1] | ||
| 645 | if c not in chars: | ||
| 646 | self._position = p | ||
| 647 | return c | ||
| 648 | p += 1 | ||
| 649 | self._position = p | ||
| 650 | return None | ||
| 651 | |||
| 652 | def skipUntil(self, chars): | ||
| 653 | p = self.position | ||
| 654 | while p < len(self): | ||
| 655 | c = self[p:p + 1] | ||
| 656 | if c in chars: | ||
| 657 | self._position = p | ||
| 658 | return c | ||
| 659 | p += 1 | ||
| 660 | self._position = p | ||
| 661 | return None | ||
| 662 | |||
| 663 | def matchBytes(self, bytes): | ||
| 664 | """Look for a sequence of bytes at the start of a string. If the bytes | ||
| 665 | are found return True and advance the position to the byte after the | ||
| 666 | match. Otherwise return False and leave the position alone""" | ||
| 667 | p = self.position | ||
| 668 | data = self[p:p + len(bytes)] | ||
| 669 | rv = data.startswith(bytes) | ||
| 670 | if rv: | ||
| 671 | self.position += len(bytes) | ||
| 672 | return rv | ||
| 673 | |||
| 674 | def jumpTo(self, bytes): | ||
| 675 | """Look for the next sequence of bytes matching a given sequence. If | ||
| 676 | a match is found advance the position to the last byte of the match""" | ||
| 677 | newPosition = self[self.position:].find(bytes) | ||
| 678 | if newPosition > -1: | ||
| 679 | # XXX: This is ugly, but I can't see a nicer way to fix this. | ||
| 680 | if self._position == -1: | ||
| 681 | self._position = 0 | ||
| 682 | self._position += (newPosition + len(bytes) - 1) | ||
| 683 | return True | ||
| 684 | else: | ||
| 685 | raise StopIteration | ||
| 686 | |||
| 687 | |||
| 688 | class EncodingParser(object): | ||
| 689 | """Mini parser for detecting character encoding from meta elements""" | ||
| 690 | |||
| 691 | def __init__(self, data): | ||
| 692 | """string - the data to work on for encoding detection""" | ||
| 693 | self.data = EncodingBytes(data) | ||
| 694 | self.encoding = None | ||
| 695 | |||
| 696 | def getEncoding(self): | ||
| 697 | methodDispatch = ( | ||
| 698 | (b"<!--", self.handleComment), | ||
| 699 | (b"<meta", self.handleMeta), | ||
| 700 | (b"</", self.handlePossibleEndTag), | ||
| 701 | (b"<!", self.handleOther), | ||
| 702 | (b"<?", self.handleOther), | ||
| 703 | (b"<", self.handlePossibleStartTag)) | ||
| 704 | for _ in self.data: | ||
| 705 | keepParsing = True | ||
| 706 | for key, method in methodDispatch: | ||
| 707 | if self.data.matchBytes(key): | ||
| 708 | try: | ||
| 709 | keepParsing = method() | ||
| 710 | break | ||
| 711 | except StopIteration: | ||
| 712 | keepParsing = False | ||
| 713 | break | ||
| 714 | if not keepParsing: | ||
| 715 | break | ||
| 716 | |||
| 717 | return self.encoding | ||
| 718 | |||
| 719 | def handleComment(self): | ||
| 720 | """Skip over comments""" | ||
| 721 | return self.data.jumpTo(b"-->") | ||
| 722 | |||
| 723 | def handleMeta(self): | ||
| 724 | if self.data.currentByte not in spaceCharactersBytes: | ||
| 725 | # if we have <meta not followed by a space so just keep going | ||
| 726 | return True | ||
| 727 | # We have a valid meta element we want to search for attributes | ||
| 728 | hasPragma = False | ||
| 729 | pendingEncoding = None | ||
| 730 | while True: | ||
| 731 | # Try to find the next attribute after the current position | ||
| 732 | attr = self.getAttribute() | ||
| 733 | if attr is None: | ||
| 734 | return True | ||
| 735 | else: | ||
| 736 | if attr[0] == b"http-equiv": | ||
| 737 | hasPragma = attr[1] == b"content-type" | ||
| 738 | if hasPragma and pendingEncoding is not None: | ||
| 739 | self.encoding = pendingEncoding | ||
| 740 | return False | ||
| 741 | elif attr[0] == b"charset": | ||
| 742 | tentativeEncoding = attr[1] | ||
| 743 | codec = lookupEncoding(tentativeEncoding) | ||
| 744 | if codec is not None: | ||
| 745 | self.encoding = codec | ||
| 746 | return False | ||
| 747 | elif attr[0] == b"content": | ||
| 748 | contentParser = ContentAttrParser(EncodingBytes(attr[1])) | ||
| 749 | tentativeEncoding = contentParser.parse() | ||
| 750 | if tentativeEncoding is not None: | ||
| 751 | codec = lookupEncoding(tentativeEncoding) | ||
| 752 | if codec is not None: | ||
| 753 | if hasPragma: | ||
| 754 | self.encoding = codec | ||
| 755 | return False | ||
| 756 | else: | ||
| 757 | pendingEncoding = codec | ||
| 758 | |||
| 759 | def handlePossibleStartTag(self): | ||
| 760 | return self.handlePossibleTag(False) | ||
| 761 | |||
| 762 | def handlePossibleEndTag(self): | ||
| 763 | next(self.data) | ||
| 764 | return self.handlePossibleTag(True) | ||
| 765 | |||
| 766 | def handlePossibleTag(self, endTag): | ||
| 767 | data = self.data | ||
| 768 | if data.currentByte not in asciiLettersBytes: | ||
| 769 | # If the next byte is not an ascii letter either ignore this | ||
| 770 | # fragment (possible start tag case) or treat it according to | ||
| 771 | # handleOther | ||
| 772 | if endTag: | ||
| 773 | data.previous() | ||
| 774 | self.handleOther() | ||
| 775 | return True | ||
| 776 | |||
| 777 | c = data.skipUntil(spacesAngleBrackets) | ||
| 778 | if c == b"<": | ||
| 779 | # return to the first step in the overall "two step" algorithm | ||
| 780 | # reprocessing the < byte | ||
| 781 | data.previous() | ||
| 782 | else: | ||
| 783 | # Read all attributes | ||
| 784 | attr = self.getAttribute() | ||
| 785 | while attr is not None: | ||
| 786 | attr = self.getAttribute() | ||
| 787 | return True | ||
| 788 | |||
| 789 | def handleOther(self): | ||
| 790 | return self.data.jumpTo(b">") | ||
| 791 | |||
| 792 | def getAttribute(self): | ||
| 793 | """Return a name,value pair for the next attribute in the stream, | ||
| 794 | if one is found, or None""" | ||
| 795 | data = self.data | ||
| 796 | # Step 1 (skip chars) | ||
| 797 | c = data.skip(spaceCharactersBytes | frozenset([b"/"])) | ||
| 798 | assert c is None or len(c) == 1 | ||
| 799 | # Step 2 | ||
| 800 | if c in (b">", None): | ||
| 801 | return None | ||
| 802 | # Step 3 | ||
| 803 | attrName = [] | ||
| 804 | attrValue = [] | ||
| 805 | # Step 4 attribute name | ||
| 806 | while True: | ||
| 807 | if c == b"=" and attrName: | ||
| 808 | break | ||
| 809 | elif c in spaceCharactersBytes: | ||
| 810 | # Step 6! | ||
| 811 | c = data.skip() | ||
| 812 | break | ||
| 813 | elif c in (b"/", b">"): | ||
| 814 | return b"".join(attrName), b"" | ||
| 815 | elif c in asciiUppercaseBytes: | ||
| 816 | attrName.append(c.lower()) | ||
| 817 | elif c is None: | ||
| 818 | return None | ||
| 819 | else: | ||
| 820 | attrName.append(c) | ||
| 821 | # Step 5 | ||
| 822 | c = next(data) | ||
| 823 | # Step 7 | ||
| 824 | if c != b"=": | ||
| 825 | data.previous() | ||
| 826 | return b"".join(attrName), b"" | ||
| 827 | # Step 8 | ||
| 828 | next(data) | ||
| 829 | # Step 9 | ||
| 830 | c = data.skip() | ||
| 831 | # Step 10 | ||
| 832 | if c in (b"'", b'"'): | ||
| 833 | # 10.1 | ||
| 834 | quoteChar = c | ||
| 835 | while True: | ||
| 836 | # 10.2 | ||
| 837 | c = next(data) | ||
| 838 | # 10.3 | ||
| 839 | if c == quoteChar: | ||
| 840 | next(data) | ||
| 841 | return b"".join(attrName), b"".join(attrValue) | ||
| 842 | # 10.4 | ||
| 843 | elif c in asciiUppercaseBytes: | ||
| 844 | attrValue.append(c.lower()) | ||
| 845 | # 10.5 | ||
| 846 | else: | ||
| 847 | attrValue.append(c) | ||
| 848 | elif c == b">": | ||
| 849 | return b"".join(attrName), b"" | ||
| 850 | elif c in asciiUppercaseBytes: | ||
| 851 | attrValue.append(c.lower()) | ||
| 852 | elif c is None: | ||
| 853 | return None | ||
| 854 | else: | ||
| 855 | attrValue.append(c) | ||
| 856 | # Step 11 | ||
| 857 | while True: | ||
| 858 | c = next(data) | ||
| 859 | if c in spacesAngleBrackets: | ||
| 860 | return b"".join(attrName), b"".join(attrValue) | ||
| 861 | elif c in asciiUppercaseBytes: | ||
| 862 | attrValue.append(c.lower()) | ||
| 863 | elif c is None: | ||
| 864 | return None | ||
| 865 | else: | ||
| 866 | attrValue.append(c) | ||
| 867 | |||
| 868 | |||
| 869 | class ContentAttrParser(object): | ||
| 870 | def __init__(self, data): | ||
| 871 | assert isinstance(data, bytes) | ||
| 872 | self.data = data | ||
| 873 | |||
| 874 | def parse(self): | ||
| 875 | try: | ||
| 876 | # Check if the attr name is charset | ||
| 877 | # otherwise return | ||
| 878 | self.data.jumpTo(b"charset") | ||
| 879 | self.data.position += 1 | ||
| 880 | self.data.skip() | ||
| 881 | if not self.data.currentByte == b"=": | ||
| 882 | # If there is no = sign keep looking for attrs | ||
| 883 | return None | ||
| 884 | self.data.position += 1 | ||
| 885 | self.data.skip() | ||
| 886 | # Look for an encoding between matching quote marks | ||
| 887 | if self.data.currentByte in (b'"', b"'"): | ||
| 888 | quoteMark = self.data.currentByte | ||
| 889 | self.data.position += 1 | ||
| 890 | oldPosition = self.data.position | ||
| 891 | if self.data.jumpTo(quoteMark): | ||
| 892 | return self.data[oldPosition:self.data.position] | ||
| 893 | else: | ||
| 894 | return None | ||
| 895 | else: | ||
| 896 | # Unquoted value | ||
| 897 | oldPosition = self.data.position | ||
| 898 | try: | ||
| 899 | self.data.skipUntil(spaceCharactersBytes) | ||
| 900 | return self.data[oldPosition:self.data.position] | ||
| 901 | except StopIteration: | ||
| 902 | # Return the whole remaining value | ||
| 903 | return self.data[oldPosition:] | ||
| 904 | except StopIteration: | ||
| 905 | return None | ||
| 906 | |||
| 907 | |||
| 908 | def lookupEncoding(encoding): | ||
| 909 | """Return the python codec name corresponding to an encoding or None if the | ||
| 910 | string doesn't correspond to a valid encoding.""" | ||
| 911 | if isinstance(encoding, binary_type): | ||
| 912 | try: | ||
| 913 | encoding = encoding.decode("ascii") | ||
| 914 | except UnicodeDecodeError: | ||
| 915 | return None | ||
| 916 | |||
| 917 | if encoding is not None: | ||
| 918 | try: | ||
| 919 | return webencodings.lookup(encoding) | ||
| 920 | except AttributeError: | ||
| 921 | return None | ||
| 922 | else: | ||
| 923 | return None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_tokenizer.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_tokenizer.py new file mode 100644 index 0000000..ef1ccf8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_tokenizer.py | |||
| @@ -0,0 +1,1721 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from pip._vendor.six import unichr as chr | ||
| 4 | |||
| 5 | from collections import deque | ||
| 6 | |||
| 7 | from .constants import spaceCharacters | ||
| 8 | from .constants import entities | ||
| 9 | from .constants import asciiLetters, asciiUpper2Lower | ||
| 10 | from .constants import digits, hexDigits, EOF | ||
| 11 | from .constants import tokenTypes, tagTokenTypes | ||
| 12 | from .constants import replacementCharacters | ||
| 13 | |||
| 14 | from ._inputstream import HTMLInputStream | ||
| 15 | |||
| 16 | from ._trie import Trie | ||
| 17 | |||
| 18 | entitiesTrie = Trie(entities) | ||
| 19 | |||
| 20 | |||
| 21 | class HTMLTokenizer(object): | ||
| 22 | """ This class takes care of tokenizing HTML. | ||
| 23 | |||
| 24 | * self.currentToken | ||
| 25 | Holds the token that is currently being processed. | ||
| 26 | |||
| 27 | * self.state | ||
| 28 | Holds a reference to the method to be invoked... XXX | ||
| 29 | |||
| 30 | * self.stream | ||
| 31 | Points to HTMLInputStream object. | ||
| 32 | """ | ||
| 33 | |||
| 34 | def __init__(self, stream, parser=None, **kwargs): | ||
| 35 | |||
| 36 | self.stream = HTMLInputStream(stream, **kwargs) | ||
| 37 | self.parser = parser | ||
| 38 | |||
| 39 | # Setup the initial tokenizer state | ||
| 40 | self.escapeFlag = False | ||
| 41 | self.lastFourChars = [] | ||
| 42 | self.state = self.dataState | ||
| 43 | self.escape = False | ||
| 44 | |||
| 45 | # The current token being created | ||
| 46 | self.currentToken = None | ||
| 47 | super(HTMLTokenizer, self).__init__() | ||
| 48 | |||
| 49 | def __iter__(self): | ||
| 50 | """ This is where the magic happens. | ||
| 51 | |||
| 52 | We do our usually processing through the states and when we have a token | ||
| 53 | to return we yield the token which pauses processing until the next token | ||
| 54 | is requested. | ||
| 55 | """ | ||
| 56 | self.tokenQueue = deque([]) | ||
| 57 | # Start processing. When EOF is reached self.state will return False | ||
| 58 | # instead of True and the loop will terminate. | ||
| 59 | while self.state(): | ||
| 60 | while self.stream.errors: | ||
| 61 | yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} | ||
| 62 | while self.tokenQueue: | ||
| 63 | yield self.tokenQueue.popleft() | ||
| 64 | |||
| 65 | def consumeNumberEntity(self, isHex): | ||
| 66 | """This function returns either U+FFFD or the character based on the | ||
| 67 | decimal or hexadecimal representation. It also discards ";" if present. | ||
| 68 | If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. | ||
| 69 | """ | ||
| 70 | |||
| 71 | allowed = digits | ||
| 72 | radix = 10 | ||
| 73 | if isHex: | ||
| 74 | allowed = hexDigits | ||
| 75 | radix = 16 | ||
| 76 | |||
| 77 | charStack = [] | ||
| 78 | |||
| 79 | # Consume all the characters that are in range while making sure we | ||
| 80 | # don't hit an EOF. | ||
| 81 | c = self.stream.char() | ||
| 82 | while c in allowed and c is not EOF: | ||
| 83 | charStack.append(c) | ||
| 84 | c = self.stream.char() | ||
| 85 | |||
| 86 | # Convert the set of characters consumed to an int. | ||
| 87 | charAsInt = int("".join(charStack), radix) | ||
| 88 | |||
| 89 | # Certain characters get replaced with others | ||
| 90 | if charAsInt in replacementCharacters: | ||
| 91 | char = replacementCharacters[charAsInt] | ||
| 92 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 93 | "illegal-codepoint-for-numeric-entity", | ||
| 94 | "datavars": {"charAsInt": charAsInt}}) | ||
| 95 | elif ((0xD800 <= charAsInt <= 0xDFFF) or | ||
| 96 | (charAsInt > 0x10FFFF)): | ||
| 97 | char = "\uFFFD" | ||
| 98 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 99 | "illegal-codepoint-for-numeric-entity", | ||
| 100 | "datavars": {"charAsInt": charAsInt}}) | ||
| 101 | else: | ||
| 102 | # Should speed up this check somehow (e.g. move the set to a constant) | ||
| 103 | if ((0x0001 <= charAsInt <= 0x0008) or | ||
| 104 | (0x000E <= charAsInt <= 0x001F) or | ||
| 105 | (0x007F <= charAsInt <= 0x009F) or | ||
| 106 | (0xFDD0 <= charAsInt <= 0xFDEF) or | ||
| 107 | charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, | ||
| 108 | 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, | ||
| 109 | 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, | ||
| 110 | 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, | ||
| 111 | 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, | ||
| 112 | 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, | ||
| 113 | 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, | ||
| 114 | 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, | ||
| 115 | 0xFFFFF, 0x10FFFE, 0x10FFFF])): | ||
| 116 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 117 | "data": | ||
| 118 | "illegal-codepoint-for-numeric-entity", | ||
| 119 | "datavars": {"charAsInt": charAsInt}}) | ||
| 120 | try: | ||
| 121 | # Try/except needed as UCS-2 Python builds' unichar only works | ||
| 122 | # within the BMP. | ||
| 123 | char = chr(charAsInt) | ||
| 124 | except ValueError: | ||
| 125 | v = charAsInt - 0x10000 | ||
| 126 | char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) | ||
| 127 | |||
| 128 | # Discard the ; if present. Otherwise, put it back on the queue and | ||
| 129 | # invoke parseError on parser. | ||
| 130 | if c != ";": | ||
| 131 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 132 | "numeric-entity-without-semicolon"}) | ||
| 133 | self.stream.unget(c) | ||
| 134 | |||
| 135 | return char | ||
| 136 | |||
| 137 | def consumeEntity(self, allowedChar=None, fromAttribute=False): | ||
| 138 | # Initialise to the default output for when no entity is matched | ||
| 139 | output = "&" | ||
| 140 | |||
| 141 | charStack = [self.stream.char()] | ||
| 142 | if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or | ||
| 143 | (allowedChar is not None and allowedChar == charStack[0])): | ||
| 144 | self.stream.unget(charStack[0]) | ||
| 145 | |||
| 146 | elif charStack[0] == "#": | ||
| 147 | # Read the next character to see if it's hex or decimal | ||
| 148 | hex = False | ||
| 149 | charStack.append(self.stream.char()) | ||
| 150 | if charStack[-1] in ("x", "X"): | ||
| 151 | hex = True | ||
| 152 | charStack.append(self.stream.char()) | ||
| 153 | |||
| 154 | # charStack[-1] should be the first digit | ||
| 155 | if (hex and charStack[-1] in hexDigits) \ | ||
| 156 | or (not hex and charStack[-1] in digits): | ||
| 157 | # At least one digit found, so consume the whole number | ||
| 158 | self.stream.unget(charStack[-1]) | ||
| 159 | output = self.consumeNumberEntity(hex) | ||
| 160 | else: | ||
| 161 | # No digits found | ||
| 162 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 163 | "data": "expected-numeric-entity"}) | ||
| 164 | self.stream.unget(charStack.pop()) | ||
| 165 | output = "&" + "".join(charStack) | ||
| 166 | |||
| 167 | else: | ||
| 168 | # At this point in the process might have named entity. Entities | ||
| 169 | # are stored in the global variable "entities". | ||
| 170 | # | ||
| 171 | # Consume characters and compare to these to a substring of the | ||
| 172 | # entity names in the list until the substring no longer matches. | ||
| 173 | while (charStack[-1] is not EOF): | ||
| 174 | if not entitiesTrie.has_keys_with_prefix("".join(charStack)): | ||
| 175 | break | ||
| 176 | charStack.append(self.stream.char()) | ||
| 177 | |||
| 178 | # At this point we have a string that starts with some characters | ||
| 179 | # that may match an entity | ||
| 180 | # Try to find the longest entity the string will match to take care | ||
| 181 | # of ¬i for instance. | ||
| 182 | try: | ||
| 183 | entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) | ||
| 184 | entityLength = len(entityName) | ||
| 185 | except KeyError: | ||
| 186 | entityName = None | ||
| 187 | |||
| 188 | if entityName is not None: | ||
| 189 | if entityName[-1] != ";": | ||
| 190 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 191 | "named-entity-without-semicolon"}) | ||
| 192 | if (entityName[-1] != ";" and fromAttribute and | ||
| 193 | (charStack[entityLength] in asciiLetters or | ||
| 194 | charStack[entityLength] in digits or | ||
| 195 | charStack[entityLength] == "=")): | ||
| 196 | self.stream.unget(charStack.pop()) | ||
| 197 | output = "&" + "".join(charStack) | ||
| 198 | else: | ||
| 199 | output = entities[entityName] | ||
| 200 | self.stream.unget(charStack.pop()) | ||
| 201 | output += "".join(charStack[entityLength:]) | ||
| 202 | else: | ||
| 203 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 204 | "expected-named-entity"}) | ||
| 205 | self.stream.unget(charStack.pop()) | ||
| 206 | output = "&" + "".join(charStack) | ||
| 207 | |||
| 208 | if fromAttribute: | ||
| 209 | self.currentToken["data"][-1][1] += output | ||
| 210 | else: | ||
| 211 | if output in spaceCharacters: | ||
| 212 | tokenType = "SpaceCharacters" | ||
| 213 | else: | ||
| 214 | tokenType = "Characters" | ||
| 215 | self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) | ||
| 216 | |||
| 217 | def processEntityInAttribute(self, allowedChar): | ||
| 218 | """This method replaces the need for "entityInAttributeValueState". | ||
| 219 | """ | ||
| 220 | self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) | ||
| 221 | |||
| 222 | def emitCurrentToken(self): | ||
| 223 | """This method is a generic handler for emitting the tags. It also sets | ||
| 224 | the state to "data" because that's what's needed after a token has been | ||
| 225 | emitted. | ||
| 226 | """ | ||
| 227 | token = self.currentToken | ||
| 228 | # Add token to the queue to be yielded | ||
| 229 | if (token["type"] in tagTokenTypes): | ||
| 230 | token["name"] = token["name"].translate(asciiUpper2Lower) | ||
| 231 | if token["type"] == tokenTypes["EndTag"]: | ||
| 232 | if token["data"]: | ||
| 233 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 234 | "data": "attributes-in-end-tag"}) | ||
| 235 | if token["selfClosing"]: | ||
| 236 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 237 | "data": "self-closing-flag-on-end-tag"}) | ||
| 238 | self.tokenQueue.append(token) | ||
| 239 | self.state = self.dataState | ||
| 240 | |||
| 241 | # Below are the various tokenizer states worked out. | ||
| 242 | def dataState(self): | ||
| 243 | data = self.stream.char() | ||
| 244 | if data == "&": | ||
| 245 | self.state = self.entityDataState | ||
| 246 | elif data == "<": | ||
| 247 | self.state = self.tagOpenState | ||
| 248 | elif data == "\u0000": | ||
| 249 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 250 | "data": "invalid-codepoint"}) | ||
| 251 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 252 | "data": "\u0000"}) | ||
| 253 | elif data is EOF: | ||
| 254 | # Tokenization ends. | ||
| 255 | return False | ||
| 256 | elif data in spaceCharacters: | ||
| 257 | # Directly after emitting a token you switch back to the "data | ||
| 258 | # state". At that point spaceCharacters are important so they are | ||
| 259 | # emitted separately. | ||
| 260 | self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": | ||
| 261 | data + self.stream.charsUntil(spaceCharacters, True)}) | ||
| 262 | # No need to update lastFourChars here, since the first space will | ||
| 263 | # have already been appended to lastFourChars and will have broken | ||
| 264 | # any <!-- or --> sequences | ||
| 265 | else: | ||
| 266 | chars = self.stream.charsUntil(("&", "<", "\u0000")) | ||
| 267 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 268 | data + chars}) | ||
| 269 | return True | ||
| 270 | |||
| 271 | def entityDataState(self): | ||
| 272 | self.consumeEntity() | ||
| 273 | self.state = self.dataState | ||
| 274 | return True | ||
| 275 | |||
| 276 | def rcdataState(self): | ||
| 277 | data = self.stream.char() | ||
| 278 | if data == "&": | ||
| 279 | self.state = self.characterReferenceInRcdata | ||
| 280 | elif data == "<": | ||
| 281 | self.state = self.rcdataLessThanSignState | ||
| 282 | elif data == EOF: | ||
| 283 | # Tokenization ends. | ||
| 284 | return False | ||
| 285 | elif data == "\u0000": | ||
| 286 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 287 | "data": "invalid-codepoint"}) | ||
| 288 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 289 | "data": "\uFFFD"}) | ||
| 290 | elif data in spaceCharacters: | ||
| 291 | # Directly after emitting a token you switch back to the "data | ||
| 292 | # state". At that point spaceCharacters are important so they are | ||
| 293 | # emitted separately. | ||
| 294 | self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": | ||
| 295 | data + self.stream.charsUntil(spaceCharacters, True)}) | ||
| 296 | # No need to update lastFourChars here, since the first space will | ||
| 297 | # have already been appended to lastFourChars and will have broken | ||
| 298 | # any <!-- or --> sequences | ||
| 299 | else: | ||
| 300 | chars = self.stream.charsUntil(("&", "<", "\u0000")) | ||
| 301 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 302 | data + chars}) | ||
| 303 | return True | ||
| 304 | |||
| 305 | def characterReferenceInRcdata(self): | ||
| 306 | self.consumeEntity() | ||
| 307 | self.state = self.rcdataState | ||
| 308 | return True | ||
| 309 | |||
| 310 | def rawtextState(self): | ||
| 311 | data = self.stream.char() | ||
| 312 | if data == "<": | ||
| 313 | self.state = self.rawtextLessThanSignState | ||
| 314 | elif data == "\u0000": | ||
| 315 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 316 | "data": "invalid-codepoint"}) | ||
| 317 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 318 | "data": "\uFFFD"}) | ||
| 319 | elif data == EOF: | ||
| 320 | # Tokenization ends. | ||
| 321 | return False | ||
| 322 | else: | ||
| 323 | chars = self.stream.charsUntil(("<", "\u0000")) | ||
| 324 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 325 | data + chars}) | ||
| 326 | return True | ||
| 327 | |||
| 328 | def scriptDataState(self): | ||
| 329 | data = self.stream.char() | ||
| 330 | if data == "<": | ||
| 331 | self.state = self.scriptDataLessThanSignState | ||
| 332 | elif data == "\u0000": | ||
| 333 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 334 | "data": "invalid-codepoint"}) | ||
| 335 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 336 | "data": "\uFFFD"}) | ||
| 337 | elif data == EOF: | ||
| 338 | # Tokenization ends. | ||
| 339 | return False | ||
| 340 | else: | ||
| 341 | chars = self.stream.charsUntil(("<", "\u0000")) | ||
| 342 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 343 | data + chars}) | ||
| 344 | return True | ||
| 345 | |||
| 346 | def plaintextState(self): | ||
| 347 | data = self.stream.char() | ||
| 348 | if data == EOF: | ||
| 349 | # Tokenization ends. | ||
| 350 | return False | ||
| 351 | elif data == "\u0000": | ||
| 352 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 353 | "data": "invalid-codepoint"}) | ||
| 354 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 355 | "data": "\uFFFD"}) | ||
| 356 | else: | ||
| 357 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 358 | data + self.stream.charsUntil("\u0000")}) | ||
| 359 | return True | ||
| 360 | |||
| 361 | def tagOpenState(self): | ||
| 362 | data = self.stream.char() | ||
| 363 | if data == "!": | ||
| 364 | self.state = self.markupDeclarationOpenState | ||
| 365 | elif data == "/": | ||
| 366 | self.state = self.closeTagOpenState | ||
| 367 | elif data in asciiLetters: | ||
| 368 | self.currentToken = {"type": tokenTypes["StartTag"], | ||
| 369 | "name": data, "data": [], | ||
| 370 | "selfClosing": False, | ||
| 371 | "selfClosingAcknowledged": False} | ||
| 372 | self.state = self.tagNameState | ||
| 373 | elif data == ">": | ||
| 374 | # XXX In theory it could be something besides a tag name. But | ||
| 375 | # do we really care? | ||
| 376 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 377 | "expected-tag-name-but-got-right-bracket"}) | ||
| 378 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) | ||
| 379 | self.state = self.dataState | ||
| 380 | elif data == "?": | ||
| 381 | # XXX In theory it could be something besides a tag name. But | ||
| 382 | # do we really care? | ||
| 383 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 384 | "expected-tag-name-but-got-question-mark"}) | ||
| 385 | self.stream.unget(data) | ||
| 386 | self.state = self.bogusCommentState | ||
| 387 | else: | ||
| 388 | # XXX | ||
| 389 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 390 | "expected-tag-name"}) | ||
| 391 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 392 | self.stream.unget(data) | ||
| 393 | self.state = self.dataState | ||
| 394 | return True | ||
| 395 | |||
| 396 | def closeTagOpenState(self): | ||
| 397 | data = self.stream.char() | ||
| 398 | if data in asciiLetters: | ||
| 399 | self.currentToken = {"type": tokenTypes["EndTag"], "name": data, | ||
| 400 | "data": [], "selfClosing": False} | ||
| 401 | self.state = self.tagNameState | ||
| 402 | elif data == ">": | ||
| 403 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 404 | "expected-closing-tag-but-got-right-bracket"}) | ||
| 405 | self.state = self.dataState | ||
| 406 | elif data is EOF: | ||
| 407 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 408 | "expected-closing-tag-but-got-eof"}) | ||
| 409 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) | ||
| 410 | self.state = self.dataState | ||
| 411 | else: | ||
| 412 | # XXX data can be _'_... | ||
| 413 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 414 | "expected-closing-tag-but-got-char", | ||
| 415 | "datavars": {"data": data}}) | ||
| 416 | self.stream.unget(data) | ||
| 417 | self.state = self.bogusCommentState | ||
| 418 | return True | ||
| 419 | |||
| 420 | def tagNameState(self): | ||
| 421 | data = self.stream.char() | ||
| 422 | if data in spaceCharacters: | ||
| 423 | self.state = self.beforeAttributeNameState | ||
| 424 | elif data == ">": | ||
| 425 | self.emitCurrentToken() | ||
| 426 | elif data is EOF: | ||
| 427 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 428 | "eof-in-tag-name"}) | ||
| 429 | self.state = self.dataState | ||
| 430 | elif data == "/": | ||
| 431 | self.state = self.selfClosingStartTagState | ||
| 432 | elif data == "\u0000": | ||
| 433 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 434 | "data": "invalid-codepoint"}) | ||
| 435 | self.currentToken["name"] += "\uFFFD" | ||
| 436 | else: | ||
| 437 | self.currentToken["name"] += data | ||
| 438 | # (Don't use charsUntil here, because tag names are | ||
| 439 | # very short and it's faster to not do anything fancy) | ||
| 440 | return True | ||
| 441 | |||
| 442 | def rcdataLessThanSignState(self): | ||
| 443 | data = self.stream.char() | ||
| 444 | if data == "/": | ||
| 445 | self.temporaryBuffer = "" | ||
| 446 | self.state = self.rcdataEndTagOpenState | ||
| 447 | else: | ||
| 448 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 449 | self.stream.unget(data) | ||
| 450 | self.state = self.rcdataState | ||
| 451 | return True | ||
| 452 | |||
| 453 | def rcdataEndTagOpenState(self): | ||
| 454 | data = self.stream.char() | ||
| 455 | if data in asciiLetters: | ||
| 456 | self.temporaryBuffer += data | ||
| 457 | self.state = self.rcdataEndTagNameState | ||
| 458 | else: | ||
| 459 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) | ||
| 460 | self.stream.unget(data) | ||
| 461 | self.state = self.rcdataState | ||
| 462 | return True | ||
| 463 | |||
| 464 | def rcdataEndTagNameState(self): | ||
| 465 | appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() | ||
| 466 | data = self.stream.char() | ||
| 467 | if data in spaceCharacters and appropriate: | ||
| 468 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 469 | "name": self.temporaryBuffer, | ||
| 470 | "data": [], "selfClosing": False} | ||
| 471 | self.state = self.beforeAttributeNameState | ||
| 472 | elif data == "/" and appropriate: | ||
| 473 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 474 | "name": self.temporaryBuffer, | ||
| 475 | "data": [], "selfClosing": False} | ||
| 476 | self.state = self.selfClosingStartTagState | ||
| 477 | elif data == ">" and appropriate: | ||
| 478 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 479 | "name": self.temporaryBuffer, | ||
| 480 | "data": [], "selfClosing": False} | ||
| 481 | self.emitCurrentToken() | ||
| 482 | self.state = self.dataState | ||
| 483 | elif data in asciiLetters: | ||
| 484 | self.temporaryBuffer += data | ||
| 485 | else: | ||
| 486 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 487 | "data": "</" + self.temporaryBuffer}) | ||
| 488 | self.stream.unget(data) | ||
| 489 | self.state = self.rcdataState | ||
| 490 | return True | ||
| 491 | |||
| 492 | def rawtextLessThanSignState(self): | ||
| 493 | data = self.stream.char() | ||
| 494 | if data == "/": | ||
| 495 | self.temporaryBuffer = "" | ||
| 496 | self.state = self.rawtextEndTagOpenState | ||
| 497 | else: | ||
| 498 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 499 | self.stream.unget(data) | ||
| 500 | self.state = self.rawtextState | ||
| 501 | return True | ||
| 502 | |||
| 503 | def rawtextEndTagOpenState(self): | ||
| 504 | data = self.stream.char() | ||
| 505 | if data in asciiLetters: | ||
| 506 | self.temporaryBuffer += data | ||
| 507 | self.state = self.rawtextEndTagNameState | ||
| 508 | else: | ||
| 509 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) | ||
| 510 | self.stream.unget(data) | ||
| 511 | self.state = self.rawtextState | ||
| 512 | return True | ||
| 513 | |||
| 514 | def rawtextEndTagNameState(self): | ||
| 515 | appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() | ||
| 516 | data = self.stream.char() | ||
| 517 | if data in spaceCharacters and appropriate: | ||
| 518 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 519 | "name": self.temporaryBuffer, | ||
| 520 | "data": [], "selfClosing": False} | ||
| 521 | self.state = self.beforeAttributeNameState | ||
| 522 | elif data == "/" and appropriate: | ||
| 523 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 524 | "name": self.temporaryBuffer, | ||
| 525 | "data": [], "selfClosing": False} | ||
| 526 | self.state = self.selfClosingStartTagState | ||
| 527 | elif data == ">" and appropriate: | ||
| 528 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 529 | "name": self.temporaryBuffer, | ||
| 530 | "data": [], "selfClosing": False} | ||
| 531 | self.emitCurrentToken() | ||
| 532 | self.state = self.dataState | ||
| 533 | elif data in asciiLetters: | ||
| 534 | self.temporaryBuffer += data | ||
| 535 | else: | ||
| 536 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 537 | "data": "</" + self.temporaryBuffer}) | ||
| 538 | self.stream.unget(data) | ||
| 539 | self.state = self.rawtextState | ||
| 540 | return True | ||
| 541 | |||
| 542 | def scriptDataLessThanSignState(self): | ||
| 543 | data = self.stream.char() | ||
| 544 | if data == "/": | ||
| 545 | self.temporaryBuffer = "" | ||
| 546 | self.state = self.scriptDataEndTagOpenState | ||
| 547 | elif data == "!": | ||
| 548 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"}) | ||
| 549 | self.state = self.scriptDataEscapeStartState | ||
| 550 | else: | ||
| 551 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 552 | self.stream.unget(data) | ||
| 553 | self.state = self.scriptDataState | ||
| 554 | return True | ||
| 555 | |||
| 556 | def scriptDataEndTagOpenState(self): | ||
| 557 | data = self.stream.char() | ||
| 558 | if data in asciiLetters: | ||
| 559 | self.temporaryBuffer += data | ||
| 560 | self.state = self.scriptDataEndTagNameState | ||
| 561 | else: | ||
| 562 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) | ||
| 563 | self.stream.unget(data) | ||
| 564 | self.state = self.scriptDataState | ||
| 565 | return True | ||
| 566 | |||
| 567 | def scriptDataEndTagNameState(self): | ||
| 568 | appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() | ||
| 569 | data = self.stream.char() | ||
| 570 | if data in spaceCharacters and appropriate: | ||
| 571 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 572 | "name": self.temporaryBuffer, | ||
| 573 | "data": [], "selfClosing": False} | ||
| 574 | self.state = self.beforeAttributeNameState | ||
| 575 | elif data == "/" and appropriate: | ||
| 576 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 577 | "name": self.temporaryBuffer, | ||
| 578 | "data": [], "selfClosing": False} | ||
| 579 | self.state = self.selfClosingStartTagState | ||
| 580 | elif data == ">" and appropriate: | ||
| 581 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 582 | "name": self.temporaryBuffer, | ||
| 583 | "data": [], "selfClosing": False} | ||
| 584 | self.emitCurrentToken() | ||
| 585 | self.state = self.dataState | ||
| 586 | elif data in asciiLetters: | ||
| 587 | self.temporaryBuffer += data | ||
| 588 | else: | ||
| 589 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 590 | "data": "</" + self.temporaryBuffer}) | ||
| 591 | self.stream.unget(data) | ||
| 592 | self.state = self.scriptDataState | ||
| 593 | return True | ||
| 594 | |||
| 595 | def scriptDataEscapeStartState(self): | ||
| 596 | data = self.stream.char() | ||
| 597 | if data == "-": | ||
| 598 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 599 | self.state = self.scriptDataEscapeStartDashState | ||
| 600 | else: | ||
| 601 | self.stream.unget(data) | ||
| 602 | self.state = self.scriptDataState | ||
| 603 | return True | ||
| 604 | |||
| 605 | def scriptDataEscapeStartDashState(self): | ||
| 606 | data = self.stream.char() | ||
| 607 | if data == "-": | ||
| 608 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 609 | self.state = self.scriptDataEscapedDashDashState | ||
| 610 | else: | ||
| 611 | self.stream.unget(data) | ||
| 612 | self.state = self.scriptDataState | ||
| 613 | return True | ||
| 614 | |||
| 615 | def scriptDataEscapedState(self): | ||
| 616 | data = self.stream.char() | ||
| 617 | if data == "-": | ||
| 618 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 619 | self.state = self.scriptDataEscapedDashState | ||
| 620 | elif data == "<": | ||
| 621 | self.state = self.scriptDataEscapedLessThanSignState | ||
| 622 | elif data == "\u0000": | ||
| 623 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 624 | "data": "invalid-codepoint"}) | ||
| 625 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 626 | "data": "\uFFFD"}) | ||
| 627 | elif data == EOF: | ||
| 628 | self.state = self.dataState | ||
| 629 | else: | ||
| 630 | chars = self.stream.charsUntil(("<", "-", "\u0000")) | ||
| 631 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": | ||
| 632 | data + chars}) | ||
| 633 | return True | ||
| 634 | |||
| 635 | def scriptDataEscapedDashState(self): | ||
| 636 | data = self.stream.char() | ||
| 637 | if data == "-": | ||
| 638 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 639 | self.state = self.scriptDataEscapedDashDashState | ||
| 640 | elif data == "<": | ||
| 641 | self.state = self.scriptDataEscapedLessThanSignState | ||
| 642 | elif data == "\u0000": | ||
| 643 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 644 | "data": "invalid-codepoint"}) | ||
| 645 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 646 | "data": "\uFFFD"}) | ||
| 647 | self.state = self.scriptDataEscapedState | ||
| 648 | elif data == EOF: | ||
| 649 | self.state = self.dataState | ||
| 650 | else: | ||
| 651 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 652 | self.state = self.scriptDataEscapedState | ||
| 653 | return True | ||
| 654 | |||
| 655 | def scriptDataEscapedDashDashState(self): | ||
| 656 | data = self.stream.char() | ||
| 657 | if data == "-": | ||
| 658 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 659 | elif data == "<": | ||
| 660 | self.state = self.scriptDataEscapedLessThanSignState | ||
| 661 | elif data == ">": | ||
| 662 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) | ||
| 663 | self.state = self.scriptDataState | ||
| 664 | elif data == "\u0000": | ||
| 665 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 666 | "data": "invalid-codepoint"}) | ||
| 667 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 668 | "data": "\uFFFD"}) | ||
| 669 | self.state = self.scriptDataEscapedState | ||
| 670 | elif data == EOF: | ||
| 671 | self.state = self.dataState | ||
| 672 | else: | ||
| 673 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 674 | self.state = self.scriptDataEscapedState | ||
| 675 | return True | ||
| 676 | |||
| 677 | def scriptDataEscapedLessThanSignState(self): | ||
| 678 | data = self.stream.char() | ||
| 679 | if data == "/": | ||
| 680 | self.temporaryBuffer = "" | ||
| 681 | self.state = self.scriptDataEscapedEndTagOpenState | ||
| 682 | elif data in asciiLetters: | ||
| 683 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) | ||
| 684 | self.temporaryBuffer = data | ||
| 685 | self.state = self.scriptDataDoubleEscapeStartState | ||
| 686 | else: | ||
| 687 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 688 | self.stream.unget(data) | ||
| 689 | self.state = self.scriptDataEscapedState | ||
| 690 | return True | ||
| 691 | |||
| 692 | def scriptDataEscapedEndTagOpenState(self): | ||
| 693 | data = self.stream.char() | ||
| 694 | if data in asciiLetters: | ||
| 695 | self.temporaryBuffer = data | ||
| 696 | self.state = self.scriptDataEscapedEndTagNameState | ||
| 697 | else: | ||
| 698 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) | ||
| 699 | self.stream.unget(data) | ||
| 700 | self.state = self.scriptDataEscapedState | ||
| 701 | return True | ||
| 702 | |||
| 703 | def scriptDataEscapedEndTagNameState(self): | ||
| 704 | appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() | ||
| 705 | data = self.stream.char() | ||
| 706 | if data in spaceCharacters and appropriate: | ||
| 707 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 708 | "name": self.temporaryBuffer, | ||
| 709 | "data": [], "selfClosing": False} | ||
| 710 | self.state = self.beforeAttributeNameState | ||
| 711 | elif data == "/" and appropriate: | ||
| 712 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 713 | "name": self.temporaryBuffer, | ||
| 714 | "data": [], "selfClosing": False} | ||
| 715 | self.state = self.selfClosingStartTagState | ||
| 716 | elif data == ">" and appropriate: | ||
| 717 | self.currentToken = {"type": tokenTypes["EndTag"], | ||
| 718 | "name": self.temporaryBuffer, | ||
| 719 | "data": [], "selfClosing": False} | ||
| 720 | self.emitCurrentToken() | ||
| 721 | self.state = self.dataState | ||
| 722 | elif data in asciiLetters: | ||
| 723 | self.temporaryBuffer += data | ||
| 724 | else: | ||
| 725 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 726 | "data": "</" + self.temporaryBuffer}) | ||
| 727 | self.stream.unget(data) | ||
| 728 | self.state = self.scriptDataEscapedState | ||
| 729 | return True | ||
| 730 | |||
| 731 | def scriptDataDoubleEscapeStartState(self): | ||
| 732 | data = self.stream.char() | ||
| 733 | if data in (spaceCharacters | frozenset(("/", ">"))): | ||
| 734 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 735 | if self.temporaryBuffer.lower() == "script": | ||
| 736 | self.state = self.scriptDataDoubleEscapedState | ||
| 737 | else: | ||
| 738 | self.state = self.scriptDataEscapedState | ||
| 739 | elif data in asciiLetters: | ||
| 740 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 741 | self.temporaryBuffer += data | ||
| 742 | else: | ||
| 743 | self.stream.unget(data) | ||
| 744 | self.state = self.scriptDataEscapedState | ||
| 745 | return True | ||
| 746 | |||
| 747 | def scriptDataDoubleEscapedState(self): | ||
| 748 | data = self.stream.char() | ||
| 749 | if data == "-": | ||
| 750 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 751 | self.state = self.scriptDataDoubleEscapedDashState | ||
| 752 | elif data == "<": | ||
| 753 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 754 | self.state = self.scriptDataDoubleEscapedLessThanSignState | ||
| 755 | elif data == "\u0000": | ||
| 756 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 757 | "data": "invalid-codepoint"}) | ||
| 758 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 759 | "data": "\uFFFD"}) | ||
| 760 | elif data == EOF: | ||
| 761 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 762 | "eof-in-script-in-script"}) | ||
| 763 | self.state = self.dataState | ||
| 764 | else: | ||
| 765 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 766 | return True | ||
| 767 | |||
| 768 | def scriptDataDoubleEscapedDashState(self): | ||
| 769 | data = self.stream.char() | ||
| 770 | if data == "-": | ||
| 771 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 772 | self.state = self.scriptDataDoubleEscapedDashDashState | ||
| 773 | elif data == "<": | ||
| 774 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 775 | self.state = self.scriptDataDoubleEscapedLessThanSignState | ||
| 776 | elif data == "\u0000": | ||
| 777 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 778 | "data": "invalid-codepoint"}) | ||
| 779 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 780 | "data": "\uFFFD"}) | ||
| 781 | self.state = self.scriptDataDoubleEscapedState | ||
| 782 | elif data == EOF: | ||
| 783 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 784 | "eof-in-script-in-script"}) | ||
| 785 | self.state = self.dataState | ||
| 786 | else: | ||
| 787 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 788 | self.state = self.scriptDataDoubleEscapedState | ||
| 789 | return True | ||
| 790 | |||
| 791 | def scriptDataDoubleEscapedDashDashState(self): | ||
| 792 | data = self.stream.char() | ||
| 793 | if data == "-": | ||
| 794 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) | ||
| 795 | elif data == "<": | ||
| 796 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) | ||
| 797 | self.state = self.scriptDataDoubleEscapedLessThanSignState | ||
| 798 | elif data == ">": | ||
| 799 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) | ||
| 800 | self.state = self.scriptDataState | ||
| 801 | elif data == "\u0000": | ||
| 802 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 803 | "data": "invalid-codepoint"}) | ||
| 804 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 805 | "data": "\uFFFD"}) | ||
| 806 | self.state = self.scriptDataDoubleEscapedState | ||
| 807 | elif data == EOF: | ||
| 808 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 809 | "eof-in-script-in-script"}) | ||
| 810 | self.state = self.dataState | ||
| 811 | else: | ||
| 812 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 813 | self.state = self.scriptDataDoubleEscapedState | ||
| 814 | return True | ||
| 815 | |||
| 816 | def scriptDataDoubleEscapedLessThanSignState(self): | ||
| 817 | data = self.stream.char() | ||
| 818 | if data == "/": | ||
| 819 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) | ||
| 820 | self.temporaryBuffer = "" | ||
| 821 | self.state = self.scriptDataDoubleEscapeEndState | ||
| 822 | else: | ||
| 823 | self.stream.unget(data) | ||
| 824 | self.state = self.scriptDataDoubleEscapedState | ||
| 825 | return True | ||
| 826 | |||
| 827 | def scriptDataDoubleEscapeEndState(self): | ||
| 828 | data = self.stream.char() | ||
| 829 | if data in (spaceCharacters | frozenset(("/", ">"))): | ||
| 830 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 831 | if self.temporaryBuffer.lower() == "script": | ||
| 832 | self.state = self.scriptDataEscapedState | ||
| 833 | else: | ||
| 834 | self.state = self.scriptDataDoubleEscapedState | ||
| 835 | elif data in asciiLetters: | ||
| 836 | self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) | ||
| 837 | self.temporaryBuffer += data | ||
| 838 | else: | ||
| 839 | self.stream.unget(data) | ||
| 840 | self.state = self.scriptDataDoubleEscapedState | ||
| 841 | return True | ||
| 842 | |||
| 843 | def beforeAttributeNameState(self): | ||
| 844 | data = self.stream.char() | ||
| 845 | if data in spaceCharacters: | ||
| 846 | self.stream.charsUntil(spaceCharacters, True) | ||
| 847 | elif data in asciiLetters: | ||
| 848 | self.currentToken["data"].append([data, ""]) | ||
| 849 | self.state = self.attributeNameState | ||
| 850 | elif data == ">": | ||
| 851 | self.emitCurrentToken() | ||
| 852 | elif data == "/": | ||
| 853 | self.state = self.selfClosingStartTagState | ||
| 854 | elif data in ("'", '"', "=", "<"): | ||
| 855 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 856 | "invalid-character-in-attribute-name"}) | ||
| 857 | self.currentToken["data"].append([data, ""]) | ||
| 858 | self.state = self.attributeNameState | ||
| 859 | elif data == "\u0000": | ||
| 860 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 861 | "data": "invalid-codepoint"}) | ||
| 862 | self.currentToken["data"].append(["\uFFFD", ""]) | ||
| 863 | self.state = self.attributeNameState | ||
| 864 | elif data is EOF: | ||
| 865 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 866 | "expected-attribute-name-but-got-eof"}) | ||
| 867 | self.state = self.dataState | ||
| 868 | else: | ||
| 869 | self.currentToken["data"].append([data, ""]) | ||
| 870 | self.state = self.attributeNameState | ||
| 871 | return True | ||
| 872 | |||
| 873 | def attributeNameState(self): | ||
| 874 | data = self.stream.char() | ||
| 875 | leavingThisState = True | ||
| 876 | emitToken = False | ||
| 877 | if data == "=": | ||
| 878 | self.state = self.beforeAttributeValueState | ||
| 879 | elif data in asciiLetters: | ||
| 880 | self.currentToken["data"][-1][0] += data +\ | ||
| 881 | self.stream.charsUntil(asciiLetters, True) | ||
| 882 | leavingThisState = False | ||
| 883 | elif data == ">": | ||
| 884 | # XXX If we emit here the attributes are converted to a dict | ||
| 885 | # without being checked and when the code below runs we error | ||
| 886 | # because data is a dict not a list | ||
| 887 | emitToken = True | ||
| 888 | elif data in spaceCharacters: | ||
| 889 | self.state = self.afterAttributeNameState | ||
| 890 | elif data == "/": | ||
| 891 | self.state = self.selfClosingStartTagState | ||
| 892 | elif data == "\u0000": | ||
| 893 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 894 | "data": "invalid-codepoint"}) | ||
| 895 | self.currentToken["data"][-1][0] += "\uFFFD" | ||
| 896 | leavingThisState = False | ||
| 897 | elif data in ("'", '"', "<"): | ||
| 898 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 899 | "data": | ||
| 900 | "invalid-character-in-attribute-name"}) | ||
| 901 | self.currentToken["data"][-1][0] += data | ||
| 902 | leavingThisState = False | ||
| 903 | elif data is EOF: | ||
| 904 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 905 | "data": "eof-in-attribute-name"}) | ||
| 906 | self.state = self.dataState | ||
| 907 | else: | ||
| 908 | self.currentToken["data"][-1][0] += data | ||
| 909 | leavingThisState = False | ||
| 910 | |||
| 911 | if leavingThisState: | ||
| 912 | # Attributes are not dropped at this stage. That happens when the | ||
| 913 | # start tag token is emitted so values can still be safely appended | ||
| 914 | # to attributes, but we do want to report the parse error in time. | ||
| 915 | self.currentToken["data"][-1][0] = ( | ||
| 916 | self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) | ||
| 917 | for name, _ in self.currentToken["data"][:-1]: | ||
| 918 | if self.currentToken["data"][-1][0] == name: | ||
| 919 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 920 | "duplicate-attribute"}) | ||
| 921 | break | ||
| 922 | # XXX Fix for above XXX | ||
| 923 | if emitToken: | ||
| 924 | self.emitCurrentToken() | ||
| 925 | return True | ||
| 926 | |||
| 927 | def afterAttributeNameState(self): | ||
| 928 | data = self.stream.char() | ||
| 929 | if data in spaceCharacters: | ||
| 930 | self.stream.charsUntil(spaceCharacters, True) | ||
| 931 | elif data == "=": | ||
| 932 | self.state = self.beforeAttributeValueState | ||
| 933 | elif data == ">": | ||
| 934 | self.emitCurrentToken() | ||
| 935 | elif data in asciiLetters: | ||
| 936 | self.currentToken["data"].append([data, ""]) | ||
| 937 | self.state = self.attributeNameState | ||
| 938 | elif data == "/": | ||
| 939 | self.state = self.selfClosingStartTagState | ||
| 940 | elif data == "\u0000": | ||
| 941 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 942 | "data": "invalid-codepoint"}) | ||
| 943 | self.currentToken["data"].append(["\uFFFD", ""]) | ||
| 944 | self.state = self.attributeNameState | ||
| 945 | elif data in ("'", '"', "<"): | ||
| 946 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 947 | "invalid-character-after-attribute-name"}) | ||
| 948 | self.currentToken["data"].append([data, ""]) | ||
| 949 | self.state = self.attributeNameState | ||
| 950 | elif data is EOF: | ||
| 951 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 952 | "expected-end-of-tag-but-got-eof"}) | ||
| 953 | self.state = self.dataState | ||
| 954 | else: | ||
| 955 | self.currentToken["data"].append([data, ""]) | ||
| 956 | self.state = self.attributeNameState | ||
| 957 | return True | ||
| 958 | |||
| 959 | def beforeAttributeValueState(self): | ||
| 960 | data = self.stream.char() | ||
| 961 | if data in spaceCharacters: | ||
| 962 | self.stream.charsUntil(spaceCharacters, True) | ||
| 963 | elif data == "\"": | ||
| 964 | self.state = self.attributeValueDoubleQuotedState | ||
| 965 | elif data == "&": | ||
| 966 | self.state = self.attributeValueUnQuotedState | ||
| 967 | self.stream.unget(data) | ||
| 968 | elif data == "'": | ||
| 969 | self.state = self.attributeValueSingleQuotedState | ||
| 970 | elif data == ">": | ||
| 971 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 972 | "expected-attribute-value-but-got-right-bracket"}) | ||
| 973 | self.emitCurrentToken() | ||
| 974 | elif data == "\u0000": | ||
| 975 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 976 | "data": "invalid-codepoint"}) | ||
| 977 | self.currentToken["data"][-1][1] += "\uFFFD" | ||
| 978 | self.state = self.attributeValueUnQuotedState | ||
| 979 | elif data in ("=", "<", "`"): | ||
| 980 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 981 | "equals-in-unquoted-attribute-value"}) | ||
| 982 | self.currentToken["data"][-1][1] += data | ||
| 983 | self.state = self.attributeValueUnQuotedState | ||
| 984 | elif data is EOF: | ||
| 985 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 986 | "expected-attribute-value-but-got-eof"}) | ||
| 987 | self.state = self.dataState | ||
| 988 | else: | ||
| 989 | self.currentToken["data"][-1][1] += data | ||
| 990 | self.state = self.attributeValueUnQuotedState | ||
| 991 | return True | ||
| 992 | |||
| 993 | def attributeValueDoubleQuotedState(self): | ||
| 994 | data = self.stream.char() | ||
| 995 | if data == "\"": | ||
| 996 | self.state = self.afterAttributeValueState | ||
| 997 | elif data == "&": | ||
| 998 | self.processEntityInAttribute('"') | ||
| 999 | elif data == "\u0000": | ||
| 1000 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1001 | "data": "invalid-codepoint"}) | ||
| 1002 | self.currentToken["data"][-1][1] += "\uFFFD" | ||
| 1003 | elif data is EOF: | ||
| 1004 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1005 | "eof-in-attribute-value-double-quote"}) | ||
| 1006 | self.state = self.dataState | ||
| 1007 | else: | ||
| 1008 | self.currentToken["data"][-1][1] += data +\ | ||
| 1009 | self.stream.charsUntil(("\"", "&", "\u0000")) | ||
| 1010 | return True | ||
| 1011 | |||
| 1012 | def attributeValueSingleQuotedState(self): | ||
| 1013 | data = self.stream.char() | ||
| 1014 | if data == "'": | ||
| 1015 | self.state = self.afterAttributeValueState | ||
| 1016 | elif data == "&": | ||
| 1017 | self.processEntityInAttribute("'") | ||
| 1018 | elif data == "\u0000": | ||
| 1019 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1020 | "data": "invalid-codepoint"}) | ||
| 1021 | self.currentToken["data"][-1][1] += "\uFFFD" | ||
| 1022 | elif data is EOF: | ||
| 1023 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1024 | "eof-in-attribute-value-single-quote"}) | ||
| 1025 | self.state = self.dataState | ||
| 1026 | else: | ||
| 1027 | self.currentToken["data"][-1][1] += data +\ | ||
| 1028 | self.stream.charsUntil(("'", "&", "\u0000")) | ||
| 1029 | return True | ||
| 1030 | |||
| 1031 | def attributeValueUnQuotedState(self): | ||
| 1032 | data = self.stream.char() | ||
| 1033 | if data in spaceCharacters: | ||
| 1034 | self.state = self.beforeAttributeNameState | ||
| 1035 | elif data == "&": | ||
| 1036 | self.processEntityInAttribute(">") | ||
| 1037 | elif data == ">": | ||
| 1038 | self.emitCurrentToken() | ||
| 1039 | elif data in ('"', "'", "=", "<", "`"): | ||
| 1040 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1041 | "unexpected-character-in-unquoted-attribute-value"}) | ||
| 1042 | self.currentToken["data"][-1][1] += data | ||
| 1043 | elif data == "\u0000": | ||
| 1044 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1045 | "data": "invalid-codepoint"}) | ||
| 1046 | self.currentToken["data"][-1][1] += "\uFFFD" | ||
| 1047 | elif data is EOF: | ||
| 1048 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1049 | "eof-in-attribute-value-no-quotes"}) | ||
| 1050 | self.state = self.dataState | ||
| 1051 | else: | ||
| 1052 | self.currentToken["data"][-1][1] += data + self.stream.charsUntil( | ||
| 1053 | frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) | ||
| 1054 | return True | ||
| 1055 | |||
| 1056 | def afterAttributeValueState(self): | ||
| 1057 | data = self.stream.char() | ||
| 1058 | if data in spaceCharacters: | ||
| 1059 | self.state = self.beforeAttributeNameState | ||
| 1060 | elif data == ">": | ||
| 1061 | self.emitCurrentToken() | ||
| 1062 | elif data == "/": | ||
| 1063 | self.state = self.selfClosingStartTagState | ||
| 1064 | elif data is EOF: | ||
| 1065 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1066 | "unexpected-EOF-after-attribute-value"}) | ||
| 1067 | self.stream.unget(data) | ||
| 1068 | self.state = self.dataState | ||
| 1069 | else: | ||
| 1070 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1071 | "unexpected-character-after-attribute-value"}) | ||
| 1072 | self.stream.unget(data) | ||
| 1073 | self.state = self.beforeAttributeNameState | ||
| 1074 | return True | ||
| 1075 | |||
| 1076 | def selfClosingStartTagState(self): | ||
| 1077 | data = self.stream.char() | ||
| 1078 | if data == ">": | ||
| 1079 | self.currentToken["selfClosing"] = True | ||
| 1080 | self.emitCurrentToken() | ||
| 1081 | elif data is EOF: | ||
| 1082 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1083 | "data": | ||
| 1084 | "unexpected-EOF-after-solidus-in-tag"}) | ||
| 1085 | self.stream.unget(data) | ||
| 1086 | self.state = self.dataState | ||
| 1087 | else: | ||
| 1088 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1089 | "unexpected-character-after-solidus-in-tag"}) | ||
| 1090 | self.stream.unget(data) | ||
| 1091 | self.state = self.beforeAttributeNameState | ||
| 1092 | return True | ||
| 1093 | |||
| 1094 | def bogusCommentState(self): | ||
| 1095 | # Make a new comment token and give it as value all the characters | ||
| 1096 | # until the first > or EOF (charsUntil checks for EOF automatically) | ||
| 1097 | # and emit it. | ||
| 1098 | data = self.stream.charsUntil(">") | ||
| 1099 | data = data.replace("\u0000", "\uFFFD") | ||
| 1100 | self.tokenQueue.append( | ||
| 1101 | {"type": tokenTypes["Comment"], "data": data}) | ||
| 1102 | |||
| 1103 | # Eat the character directly after the bogus comment which is either a | ||
| 1104 | # ">" or an EOF. | ||
| 1105 | self.stream.char() | ||
| 1106 | self.state = self.dataState | ||
| 1107 | return True | ||
| 1108 | |||
| 1109 | def markupDeclarationOpenState(self): | ||
| 1110 | charStack = [self.stream.char()] | ||
| 1111 | if charStack[-1] == "-": | ||
| 1112 | charStack.append(self.stream.char()) | ||
| 1113 | if charStack[-1] == "-": | ||
| 1114 | self.currentToken = {"type": tokenTypes["Comment"], "data": ""} | ||
| 1115 | self.state = self.commentStartState | ||
| 1116 | return True | ||
| 1117 | elif charStack[-1] in ('d', 'D'): | ||
| 1118 | matched = True | ||
| 1119 | for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), | ||
| 1120 | ('y', 'Y'), ('p', 'P'), ('e', 'E')): | ||
| 1121 | charStack.append(self.stream.char()) | ||
| 1122 | if charStack[-1] not in expected: | ||
| 1123 | matched = False | ||
| 1124 | break | ||
| 1125 | if matched: | ||
| 1126 | self.currentToken = {"type": tokenTypes["Doctype"], | ||
| 1127 | "name": "", | ||
| 1128 | "publicId": None, "systemId": None, | ||
| 1129 | "correct": True} | ||
| 1130 | self.state = self.doctypeState | ||
| 1131 | return True | ||
| 1132 | elif (charStack[-1] == "[" and | ||
| 1133 | self.parser is not None and | ||
| 1134 | self.parser.tree.openElements and | ||
| 1135 | self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): | ||
| 1136 | matched = True | ||
| 1137 | for expected in ["C", "D", "A", "T", "A", "["]: | ||
| 1138 | charStack.append(self.stream.char()) | ||
| 1139 | if charStack[-1] != expected: | ||
| 1140 | matched = False | ||
| 1141 | break | ||
| 1142 | if matched: | ||
| 1143 | self.state = self.cdataSectionState | ||
| 1144 | return True | ||
| 1145 | |||
| 1146 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1147 | "expected-dashes-or-doctype"}) | ||
| 1148 | |||
| 1149 | while charStack: | ||
| 1150 | self.stream.unget(charStack.pop()) | ||
| 1151 | self.state = self.bogusCommentState | ||
| 1152 | return True | ||
| 1153 | |||
| 1154 | def commentStartState(self): | ||
| 1155 | data = self.stream.char() | ||
| 1156 | if data == "-": | ||
| 1157 | self.state = self.commentStartDashState | ||
| 1158 | elif data == "\u0000": | ||
| 1159 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1160 | "data": "invalid-codepoint"}) | ||
| 1161 | self.currentToken["data"] += "\uFFFD" | ||
| 1162 | elif data == ">": | ||
| 1163 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1164 | "incorrect-comment"}) | ||
| 1165 | self.tokenQueue.append(self.currentToken) | ||
| 1166 | self.state = self.dataState | ||
| 1167 | elif data is EOF: | ||
| 1168 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1169 | "eof-in-comment"}) | ||
| 1170 | self.tokenQueue.append(self.currentToken) | ||
| 1171 | self.state = self.dataState | ||
| 1172 | else: | ||
| 1173 | self.currentToken["data"] += data | ||
| 1174 | self.state = self.commentState | ||
| 1175 | return True | ||
| 1176 | |||
| 1177 | def commentStartDashState(self): | ||
| 1178 | data = self.stream.char() | ||
| 1179 | if data == "-": | ||
| 1180 | self.state = self.commentEndState | ||
| 1181 | elif data == "\u0000": | ||
| 1182 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1183 | "data": "invalid-codepoint"}) | ||
| 1184 | self.currentToken["data"] += "-\uFFFD" | ||
| 1185 | elif data == ">": | ||
| 1186 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1187 | "incorrect-comment"}) | ||
| 1188 | self.tokenQueue.append(self.currentToken) | ||
| 1189 | self.state = self.dataState | ||
| 1190 | elif data is EOF: | ||
| 1191 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1192 | "eof-in-comment"}) | ||
| 1193 | self.tokenQueue.append(self.currentToken) | ||
| 1194 | self.state = self.dataState | ||
| 1195 | else: | ||
| 1196 | self.currentToken["data"] += "-" + data | ||
| 1197 | self.state = self.commentState | ||
| 1198 | return True | ||
| 1199 | |||
| 1200 | def commentState(self): | ||
| 1201 | data = self.stream.char() | ||
| 1202 | if data == "-": | ||
| 1203 | self.state = self.commentEndDashState | ||
| 1204 | elif data == "\u0000": | ||
| 1205 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1206 | "data": "invalid-codepoint"}) | ||
| 1207 | self.currentToken["data"] += "\uFFFD" | ||
| 1208 | elif data is EOF: | ||
| 1209 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1210 | "data": "eof-in-comment"}) | ||
| 1211 | self.tokenQueue.append(self.currentToken) | ||
| 1212 | self.state = self.dataState | ||
| 1213 | else: | ||
| 1214 | self.currentToken["data"] += data + \ | ||
| 1215 | self.stream.charsUntil(("-", "\u0000")) | ||
| 1216 | return True | ||
| 1217 | |||
| 1218 | def commentEndDashState(self): | ||
| 1219 | data = self.stream.char() | ||
| 1220 | if data == "-": | ||
| 1221 | self.state = self.commentEndState | ||
| 1222 | elif data == "\u0000": | ||
| 1223 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1224 | "data": "invalid-codepoint"}) | ||
| 1225 | self.currentToken["data"] += "-\uFFFD" | ||
| 1226 | self.state = self.commentState | ||
| 1227 | elif data is EOF: | ||
| 1228 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1229 | "eof-in-comment-end-dash"}) | ||
| 1230 | self.tokenQueue.append(self.currentToken) | ||
| 1231 | self.state = self.dataState | ||
| 1232 | else: | ||
| 1233 | self.currentToken["data"] += "-" + data | ||
| 1234 | self.state = self.commentState | ||
| 1235 | return True | ||
| 1236 | |||
| 1237 | def commentEndState(self): | ||
| 1238 | data = self.stream.char() | ||
| 1239 | if data == ">": | ||
| 1240 | self.tokenQueue.append(self.currentToken) | ||
| 1241 | self.state = self.dataState | ||
| 1242 | elif data == "\u0000": | ||
| 1243 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1244 | "data": "invalid-codepoint"}) | ||
| 1245 | self.currentToken["data"] += "--\uFFFD" | ||
| 1246 | self.state = self.commentState | ||
| 1247 | elif data == "!": | ||
| 1248 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1249 | "unexpected-bang-after-double-dash-in-comment"}) | ||
| 1250 | self.state = self.commentEndBangState | ||
| 1251 | elif data == "-": | ||
| 1252 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1253 | "unexpected-dash-after-double-dash-in-comment"}) | ||
| 1254 | self.currentToken["data"] += data | ||
| 1255 | elif data is EOF: | ||
| 1256 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1257 | "eof-in-comment-double-dash"}) | ||
| 1258 | self.tokenQueue.append(self.currentToken) | ||
| 1259 | self.state = self.dataState | ||
| 1260 | else: | ||
| 1261 | # XXX | ||
| 1262 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1263 | "unexpected-char-in-comment"}) | ||
| 1264 | self.currentToken["data"] += "--" + data | ||
| 1265 | self.state = self.commentState | ||
| 1266 | return True | ||
| 1267 | |||
| 1268 | def commentEndBangState(self): | ||
| 1269 | data = self.stream.char() | ||
| 1270 | if data == ">": | ||
| 1271 | self.tokenQueue.append(self.currentToken) | ||
| 1272 | self.state = self.dataState | ||
| 1273 | elif data == "-": | ||
| 1274 | self.currentToken["data"] += "--!" | ||
| 1275 | self.state = self.commentEndDashState | ||
| 1276 | elif data == "\u0000": | ||
| 1277 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1278 | "data": "invalid-codepoint"}) | ||
| 1279 | self.currentToken["data"] += "--!\uFFFD" | ||
| 1280 | self.state = self.commentState | ||
| 1281 | elif data is EOF: | ||
| 1282 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1283 | "eof-in-comment-end-bang-state"}) | ||
| 1284 | self.tokenQueue.append(self.currentToken) | ||
| 1285 | self.state = self.dataState | ||
| 1286 | else: | ||
| 1287 | self.currentToken["data"] += "--!" + data | ||
| 1288 | self.state = self.commentState | ||
| 1289 | return True | ||
| 1290 | |||
| 1291 | def doctypeState(self): | ||
| 1292 | data = self.stream.char() | ||
| 1293 | if data in spaceCharacters: | ||
| 1294 | self.state = self.beforeDoctypeNameState | ||
| 1295 | elif data is EOF: | ||
| 1296 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1297 | "expected-doctype-name-but-got-eof"}) | ||
| 1298 | self.currentToken["correct"] = False | ||
| 1299 | self.tokenQueue.append(self.currentToken) | ||
| 1300 | self.state = self.dataState | ||
| 1301 | else: | ||
| 1302 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1303 | "need-space-after-doctype"}) | ||
| 1304 | self.stream.unget(data) | ||
| 1305 | self.state = self.beforeDoctypeNameState | ||
| 1306 | return True | ||
| 1307 | |||
| 1308 | def beforeDoctypeNameState(self): | ||
| 1309 | data = self.stream.char() | ||
| 1310 | if data in spaceCharacters: | ||
| 1311 | pass | ||
| 1312 | elif data == ">": | ||
| 1313 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1314 | "expected-doctype-name-but-got-right-bracket"}) | ||
| 1315 | self.currentToken["correct"] = False | ||
| 1316 | self.tokenQueue.append(self.currentToken) | ||
| 1317 | self.state = self.dataState | ||
| 1318 | elif data == "\u0000": | ||
| 1319 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1320 | "data": "invalid-codepoint"}) | ||
| 1321 | self.currentToken["name"] = "\uFFFD" | ||
| 1322 | self.state = self.doctypeNameState | ||
| 1323 | elif data is EOF: | ||
| 1324 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1325 | "expected-doctype-name-but-got-eof"}) | ||
| 1326 | self.currentToken["correct"] = False | ||
| 1327 | self.tokenQueue.append(self.currentToken) | ||
| 1328 | self.state = self.dataState | ||
| 1329 | else: | ||
| 1330 | self.currentToken["name"] = data | ||
| 1331 | self.state = self.doctypeNameState | ||
| 1332 | return True | ||
| 1333 | |||
| 1334 | def doctypeNameState(self): | ||
| 1335 | data = self.stream.char() | ||
| 1336 | if data in spaceCharacters: | ||
| 1337 | self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) | ||
| 1338 | self.state = self.afterDoctypeNameState | ||
| 1339 | elif data == ">": | ||
| 1340 | self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) | ||
| 1341 | self.tokenQueue.append(self.currentToken) | ||
| 1342 | self.state = self.dataState | ||
| 1343 | elif data == "\u0000": | ||
| 1344 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1345 | "data": "invalid-codepoint"}) | ||
| 1346 | self.currentToken["name"] += "\uFFFD" | ||
| 1347 | self.state = self.doctypeNameState | ||
| 1348 | elif data is EOF: | ||
| 1349 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1350 | "eof-in-doctype-name"}) | ||
| 1351 | self.currentToken["correct"] = False | ||
| 1352 | self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) | ||
| 1353 | self.tokenQueue.append(self.currentToken) | ||
| 1354 | self.state = self.dataState | ||
| 1355 | else: | ||
| 1356 | self.currentToken["name"] += data | ||
| 1357 | return True | ||
| 1358 | |||
| 1359 | def afterDoctypeNameState(self): | ||
| 1360 | data = self.stream.char() | ||
| 1361 | if data in spaceCharacters: | ||
| 1362 | pass | ||
| 1363 | elif data == ">": | ||
| 1364 | self.tokenQueue.append(self.currentToken) | ||
| 1365 | self.state = self.dataState | ||
| 1366 | elif data is EOF: | ||
| 1367 | self.currentToken["correct"] = False | ||
| 1368 | self.stream.unget(data) | ||
| 1369 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1370 | "eof-in-doctype"}) | ||
| 1371 | self.tokenQueue.append(self.currentToken) | ||
| 1372 | self.state = self.dataState | ||
| 1373 | else: | ||
| 1374 | if data in ("p", "P"): | ||
| 1375 | matched = True | ||
| 1376 | for expected in (("u", "U"), ("b", "B"), ("l", "L"), | ||
| 1377 | ("i", "I"), ("c", "C")): | ||
| 1378 | data = self.stream.char() | ||
| 1379 | if data not in expected: | ||
| 1380 | matched = False | ||
| 1381 | break | ||
| 1382 | if matched: | ||
| 1383 | self.state = self.afterDoctypePublicKeywordState | ||
| 1384 | return True | ||
| 1385 | elif data in ("s", "S"): | ||
| 1386 | matched = True | ||
| 1387 | for expected in (("y", "Y"), ("s", "S"), ("t", "T"), | ||
| 1388 | ("e", "E"), ("m", "M")): | ||
| 1389 | data = self.stream.char() | ||
| 1390 | if data not in expected: | ||
| 1391 | matched = False | ||
| 1392 | break | ||
| 1393 | if matched: | ||
| 1394 | self.state = self.afterDoctypeSystemKeywordState | ||
| 1395 | return True | ||
| 1396 | |||
| 1397 | # All the characters read before the current 'data' will be | ||
| 1398 | # [a-zA-Z], so they're garbage in the bogus doctype and can be | ||
| 1399 | # discarded; only the latest character might be '>' or EOF | ||
| 1400 | # and needs to be ungetted | ||
| 1401 | self.stream.unget(data) | ||
| 1402 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1403 | "expected-space-or-right-bracket-in-doctype", "datavars": | ||
| 1404 | {"data": data}}) | ||
| 1405 | self.currentToken["correct"] = False | ||
| 1406 | self.state = self.bogusDoctypeState | ||
| 1407 | |||
| 1408 | return True | ||
| 1409 | |||
| 1410 | def afterDoctypePublicKeywordState(self): | ||
| 1411 | data = self.stream.char() | ||
| 1412 | if data in spaceCharacters: | ||
| 1413 | self.state = self.beforeDoctypePublicIdentifierState | ||
| 1414 | elif data in ("'", '"'): | ||
| 1415 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1416 | "unexpected-char-in-doctype"}) | ||
| 1417 | self.stream.unget(data) | ||
| 1418 | self.state = self.beforeDoctypePublicIdentifierState | ||
| 1419 | elif data is EOF: | ||
| 1420 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1421 | "eof-in-doctype"}) | ||
| 1422 | self.currentToken["correct"] = False | ||
| 1423 | self.tokenQueue.append(self.currentToken) | ||
| 1424 | self.state = self.dataState | ||
| 1425 | else: | ||
| 1426 | self.stream.unget(data) | ||
| 1427 | self.state = self.beforeDoctypePublicIdentifierState | ||
| 1428 | return True | ||
| 1429 | |||
| 1430 | def beforeDoctypePublicIdentifierState(self): | ||
| 1431 | data = self.stream.char() | ||
| 1432 | if data in spaceCharacters: | ||
| 1433 | pass | ||
| 1434 | elif data == "\"": | ||
| 1435 | self.currentToken["publicId"] = "" | ||
| 1436 | self.state = self.doctypePublicIdentifierDoubleQuotedState | ||
| 1437 | elif data == "'": | ||
| 1438 | self.currentToken["publicId"] = "" | ||
| 1439 | self.state = self.doctypePublicIdentifierSingleQuotedState | ||
| 1440 | elif data == ">": | ||
| 1441 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1442 | "unexpected-end-of-doctype"}) | ||
| 1443 | self.currentToken["correct"] = False | ||
| 1444 | self.tokenQueue.append(self.currentToken) | ||
| 1445 | self.state = self.dataState | ||
| 1446 | elif data is EOF: | ||
| 1447 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1448 | "eof-in-doctype"}) | ||
| 1449 | self.currentToken["correct"] = False | ||
| 1450 | self.tokenQueue.append(self.currentToken) | ||
| 1451 | self.state = self.dataState | ||
| 1452 | else: | ||
| 1453 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1454 | "unexpected-char-in-doctype"}) | ||
| 1455 | self.currentToken["correct"] = False | ||
| 1456 | self.state = self.bogusDoctypeState | ||
| 1457 | return True | ||
| 1458 | |||
| 1459 | def doctypePublicIdentifierDoubleQuotedState(self): | ||
| 1460 | data = self.stream.char() | ||
| 1461 | if data == "\"": | ||
| 1462 | self.state = self.afterDoctypePublicIdentifierState | ||
| 1463 | elif data == "\u0000": | ||
| 1464 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1465 | "data": "invalid-codepoint"}) | ||
| 1466 | self.currentToken["publicId"] += "\uFFFD" | ||
| 1467 | elif data == ">": | ||
| 1468 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1469 | "unexpected-end-of-doctype"}) | ||
| 1470 | self.currentToken["correct"] = False | ||
| 1471 | self.tokenQueue.append(self.currentToken) | ||
| 1472 | self.state = self.dataState | ||
| 1473 | elif data is EOF: | ||
| 1474 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1475 | "eof-in-doctype"}) | ||
| 1476 | self.currentToken["correct"] = False | ||
| 1477 | self.tokenQueue.append(self.currentToken) | ||
| 1478 | self.state = self.dataState | ||
| 1479 | else: | ||
| 1480 | self.currentToken["publicId"] += data | ||
| 1481 | return True | ||
| 1482 | |||
| 1483 | def doctypePublicIdentifierSingleQuotedState(self): | ||
| 1484 | data = self.stream.char() | ||
| 1485 | if data == "'": | ||
| 1486 | self.state = self.afterDoctypePublicIdentifierState | ||
| 1487 | elif data == "\u0000": | ||
| 1488 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1489 | "data": "invalid-codepoint"}) | ||
| 1490 | self.currentToken["publicId"] += "\uFFFD" | ||
| 1491 | elif data == ">": | ||
| 1492 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1493 | "unexpected-end-of-doctype"}) | ||
| 1494 | self.currentToken["correct"] = False | ||
| 1495 | self.tokenQueue.append(self.currentToken) | ||
| 1496 | self.state = self.dataState | ||
| 1497 | elif data is EOF: | ||
| 1498 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1499 | "eof-in-doctype"}) | ||
| 1500 | self.currentToken["correct"] = False | ||
| 1501 | self.tokenQueue.append(self.currentToken) | ||
| 1502 | self.state = self.dataState | ||
| 1503 | else: | ||
| 1504 | self.currentToken["publicId"] += data | ||
| 1505 | return True | ||
| 1506 | |||
| 1507 | def afterDoctypePublicIdentifierState(self): | ||
| 1508 | data = self.stream.char() | ||
| 1509 | if data in spaceCharacters: | ||
| 1510 | self.state = self.betweenDoctypePublicAndSystemIdentifiersState | ||
| 1511 | elif data == ">": | ||
| 1512 | self.tokenQueue.append(self.currentToken) | ||
| 1513 | self.state = self.dataState | ||
| 1514 | elif data == '"': | ||
| 1515 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1516 | "unexpected-char-in-doctype"}) | ||
| 1517 | self.currentToken["systemId"] = "" | ||
| 1518 | self.state = self.doctypeSystemIdentifierDoubleQuotedState | ||
| 1519 | elif data == "'": | ||
| 1520 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1521 | "unexpected-char-in-doctype"}) | ||
| 1522 | self.currentToken["systemId"] = "" | ||
| 1523 | self.state = self.doctypeSystemIdentifierSingleQuotedState | ||
| 1524 | elif data is EOF: | ||
| 1525 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1526 | "eof-in-doctype"}) | ||
| 1527 | self.currentToken["correct"] = False | ||
| 1528 | self.tokenQueue.append(self.currentToken) | ||
| 1529 | self.state = self.dataState | ||
| 1530 | else: | ||
| 1531 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1532 | "unexpected-char-in-doctype"}) | ||
| 1533 | self.currentToken["correct"] = False | ||
| 1534 | self.state = self.bogusDoctypeState | ||
| 1535 | return True | ||
| 1536 | |||
| 1537 | def betweenDoctypePublicAndSystemIdentifiersState(self): | ||
| 1538 | data = self.stream.char() | ||
| 1539 | if data in spaceCharacters: | ||
| 1540 | pass | ||
| 1541 | elif data == ">": | ||
| 1542 | self.tokenQueue.append(self.currentToken) | ||
| 1543 | self.state = self.dataState | ||
| 1544 | elif data == '"': | ||
| 1545 | self.currentToken["systemId"] = "" | ||
| 1546 | self.state = self.doctypeSystemIdentifierDoubleQuotedState | ||
| 1547 | elif data == "'": | ||
| 1548 | self.currentToken["systemId"] = "" | ||
| 1549 | self.state = self.doctypeSystemIdentifierSingleQuotedState | ||
| 1550 | elif data == EOF: | ||
| 1551 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1552 | "eof-in-doctype"}) | ||
| 1553 | self.currentToken["correct"] = False | ||
| 1554 | self.tokenQueue.append(self.currentToken) | ||
| 1555 | self.state = self.dataState | ||
| 1556 | else: | ||
| 1557 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1558 | "unexpected-char-in-doctype"}) | ||
| 1559 | self.currentToken["correct"] = False | ||
| 1560 | self.state = self.bogusDoctypeState | ||
| 1561 | return True | ||
| 1562 | |||
| 1563 | def afterDoctypeSystemKeywordState(self): | ||
| 1564 | data = self.stream.char() | ||
| 1565 | if data in spaceCharacters: | ||
| 1566 | self.state = self.beforeDoctypeSystemIdentifierState | ||
| 1567 | elif data in ("'", '"'): | ||
| 1568 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1569 | "unexpected-char-in-doctype"}) | ||
| 1570 | self.stream.unget(data) | ||
| 1571 | self.state = self.beforeDoctypeSystemIdentifierState | ||
| 1572 | elif data is EOF: | ||
| 1573 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1574 | "eof-in-doctype"}) | ||
| 1575 | self.currentToken["correct"] = False | ||
| 1576 | self.tokenQueue.append(self.currentToken) | ||
| 1577 | self.state = self.dataState | ||
| 1578 | else: | ||
| 1579 | self.stream.unget(data) | ||
| 1580 | self.state = self.beforeDoctypeSystemIdentifierState | ||
| 1581 | return True | ||
| 1582 | |||
| 1583 | def beforeDoctypeSystemIdentifierState(self): | ||
| 1584 | data = self.stream.char() | ||
| 1585 | if data in spaceCharacters: | ||
| 1586 | pass | ||
| 1587 | elif data == "\"": | ||
| 1588 | self.currentToken["systemId"] = "" | ||
| 1589 | self.state = self.doctypeSystemIdentifierDoubleQuotedState | ||
| 1590 | elif data == "'": | ||
| 1591 | self.currentToken["systemId"] = "" | ||
| 1592 | self.state = self.doctypeSystemIdentifierSingleQuotedState | ||
| 1593 | elif data == ">": | ||
| 1594 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1595 | "unexpected-char-in-doctype"}) | ||
| 1596 | self.currentToken["correct"] = False | ||
| 1597 | self.tokenQueue.append(self.currentToken) | ||
| 1598 | self.state = self.dataState | ||
| 1599 | elif data is EOF: | ||
| 1600 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1601 | "eof-in-doctype"}) | ||
| 1602 | self.currentToken["correct"] = False | ||
| 1603 | self.tokenQueue.append(self.currentToken) | ||
| 1604 | self.state = self.dataState | ||
| 1605 | else: | ||
| 1606 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1607 | "unexpected-char-in-doctype"}) | ||
| 1608 | self.currentToken["correct"] = False | ||
| 1609 | self.state = self.bogusDoctypeState | ||
| 1610 | return True | ||
| 1611 | |||
| 1612 | def doctypeSystemIdentifierDoubleQuotedState(self): | ||
| 1613 | data = self.stream.char() | ||
| 1614 | if data == "\"": | ||
| 1615 | self.state = self.afterDoctypeSystemIdentifierState | ||
| 1616 | elif data == "\u0000": | ||
| 1617 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1618 | "data": "invalid-codepoint"}) | ||
| 1619 | self.currentToken["systemId"] += "\uFFFD" | ||
| 1620 | elif data == ">": | ||
| 1621 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1622 | "unexpected-end-of-doctype"}) | ||
| 1623 | self.currentToken["correct"] = False | ||
| 1624 | self.tokenQueue.append(self.currentToken) | ||
| 1625 | self.state = self.dataState | ||
| 1626 | elif data is EOF: | ||
| 1627 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1628 | "eof-in-doctype"}) | ||
| 1629 | self.currentToken["correct"] = False | ||
| 1630 | self.tokenQueue.append(self.currentToken) | ||
| 1631 | self.state = self.dataState | ||
| 1632 | else: | ||
| 1633 | self.currentToken["systemId"] += data | ||
| 1634 | return True | ||
| 1635 | |||
| 1636 | def doctypeSystemIdentifierSingleQuotedState(self): | ||
| 1637 | data = self.stream.char() | ||
| 1638 | if data == "'": | ||
| 1639 | self.state = self.afterDoctypeSystemIdentifierState | ||
| 1640 | elif data == "\u0000": | ||
| 1641 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1642 | "data": "invalid-codepoint"}) | ||
| 1643 | self.currentToken["systemId"] += "\uFFFD" | ||
| 1644 | elif data == ">": | ||
| 1645 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1646 | "unexpected-end-of-doctype"}) | ||
| 1647 | self.currentToken["correct"] = False | ||
| 1648 | self.tokenQueue.append(self.currentToken) | ||
| 1649 | self.state = self.dataState | ||
| 1650 | elif data is EOF: | ||
| 1651 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1652 | "eof-in-doctype"}) | ||
| 1653 | self.currentToken["correct"] = False | ||
| 1654 | self.tokenQueue.append(self.currentToken) | ||
| 1655 | self.state = self.dataState | ||
| 1656 | else: | ||
| 1657 | self.currentToken["systemId"] += data | ||
| 1658 | return True | ||
| 1659 | |||
| 1660 | def afterDoctypeSystemIdentifierState(self): | ||
| 1661 | data = self.stream.char() | ||
| 1662 | if data in spaceCharacters: | ||
| 1663 | pass | ||
| 1664 | elif data == ">": | ||
| 1665 | self.tokenQueue.append(self.currentToken) | ||
| 1666 | self.state = self.dataState | ||
| 1667 | elif data is EOF: | ||
| 1668 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1669 | "eof-in-doctype"}) | ||
| 1670 | self.currentToken["correct"] = False | ||
| 1671 | self.tokenQueue.append(self.currentToken) | ||
| 1672 | self.state = self.dataState | ||
| 1673 | else: | ||
| 1674 | self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": | ||
| 1675 | "unexpected-char-in-doctype"}) | ||
| 1676 | self.state = self.bogusDoctypeState | ||
| 1677 | return True | ||
| 1678 | |||
| 1679 | def bogusDoctypeState(self): | ||
| 1680 | data = self.stream.char() | ||
| 1681 | if data == ">": | ||
| 1682 | self.tokenQueue.append(self.currentToken) | ||
| 1683 | self.state = self.dataState | ||
| 1684 | elif data is EOF: | ||
| 1685 | # XXX EMIT | ||
| 1686 | self.stream.unget(data) | ||
| 1687 | self.tokenQueue.append(self.currentToken) | ||
| 1688 | self.state = self.dataState | ||
| 1689 | else: | ||
| 1690 | pass | ||
| 1691 | return True | ||
| 1692 | |||
| 1693 | def cdataSectionState(self): | ||
| 1694 | data = [] | ||
| 1695 | while True: | ||
| 1696 | data.append(self.stream.charsUntil("]")) | ||
| 1697 | data.append(self.stream.charsUntil(">")) | ||
| 1698 | char = self.stream.char() | ||
| 1699 | if char == EOF: | ||
| 1700 | break | ||
| 1701 | else: | ||
| 1702 | assert char == ">" | ||
| 1703 | if data[-1][-2:] == "]]": | ||
| 1704 | data[-1] = data[-1][:-2] | ||
| 1705 | break | ||
| 1706 | else: | ||
| 1707 | data.append(char) | ||
| 1708 | |||
| 1709 | data = "".join(data) # pylint:disable=redefined-variable-type | ||
| 1710 | # Deal with null here rather than in the parser | ||
| 1711 | nullCount = data.count("\u0000") | ||
| 1712 | if nullCount > 0: | ||
| 1713 | for _ in range(nullCount): | ||
| 1714 | self.tokenQueue.append({"type": tokenTypes["ParseError"], | ||
| 1715 | "data": "invalid-codepoint"}) | ||
| 1716 | data = data.replace("\u0000", "\uFFFD") | ||
| 1717 | if data: | ||
| 1718 | self.tokenQueue.append({"type": tokenTypes["Characters"], | ||
| 1719 | "data": data}) | ||
| 1720 | self.state = self.dataState | ||
| 1721 | return True | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py new file mode 100644 index 0000000..ccc70bd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py | |||
| @@ -0,0 +1,14 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from .py import Trie as PyTrie | ||
| 4 | |||
| 5 | Trie = PyTrie | ||
| 6 | |||
| 7 | # pylint:disable=wrong-import-position | ||
| 8 | try: | ||
| 9 | from .datrie import Trie as DATrie | ||
| 10 | except ImportError: | ||
| 11 | pass | ||
| 12 | else: | ||
| 13 | Trie = DATrie | ||
| 14 | # pylint:enable=wrong-import-position | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py new file mode 100644 index 0000000..ecfff32 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py | |||
| @@ -0,0 +1,37 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from collections import Mapping | ||
| 4 | |||
| 5 | |||
| 6 | class Trie(Mapping): | ||
| 7 | """Abstract base class for tries""" | ||
| 8 | |||
| 9 | def keys(self, prefix=None): | ||
| 10 | # pylint:disable=arguments-differ | ||
| 11 | keys = super(Trie, self).keys() | ||
| 12 | |||
| 13 | if prefix is None: | ||
| 14 | return set(keys) | ||
| 15 | |||
| 16 | return {x for x in keys if x.startswith(prefix)} | ||
| 17 | |||
| 18 | def has_keys_with_prefix(self, prefix): | ||
| 19 | for key in self.keys(): | ||
| 20 | if key.startswith(prefix): | ||
| 21 | return True | ||
| 22 | |||
| 23 | return False | ||
| 24 | |||
| 25 | def longest_prefix(self, prefix): | ||
| 26 | if prefix in self: | ||
| 27 | return prefix | ||
| 28 | |||
| 29 | for i in range(1, len(prefix) + 1): | ||
| 30 | if prefix[:-i] in self: | ||
| 31 | return prefix[:-i] | ||
| 32 | |||
| 33 | raise KeyError(prefix) | ||
| 34 | |||
| 35 | def longest_prefix_item(self, prefix): | ||
| 36 | lprefix = self.longest_prefix(prefix) | ||
| 37 | return (lprefix, self[lprefix]) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py new file mode 100644 index 0000000..cb1af60 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py | |||
| @@ -0,0 +1,44 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from datrie import Trie as DATrie | ||
| 4 | from pip._vendor.six import text_type | ||
| 5 | |||
| 6 | from ._base import Trie as ABCTrie | ||
| 7 | |||
| 8 | |||
| 9 | class Trie(ABCTrie): | ||
| 10 | def __init__(self, data): | ||
| 11 | chars = set() | ||
| 12 | for key in data.keys(): | ||
| 13 | if not isinstance(key, text_type): | ||
| 14 | raise TypeError("All keys must be strings") | ||
| 15 | for char in key: | ||
| 16 | chars.add(char) | ||
| 17 | |||
| 18 | self._data = DATrie("".join(chars)) | ||
| 19 | for key, value in data.items(): | ||
| 20 | self._data[key] = value | ||
| 21 | |||
| 22 | def __contains__(self, key): | ||
| 23 | return key in self._data | ||
| 24 | |||
| 25 | def __len__(self): | ||
| 26 | return len(self._data) | ||
| 27 | |||
| 28 | def __iter__(self): | ||
| 29 | raise NotImplementedError() | ||
| 30 | |||
| 31 | def __getitem__(self, key): | ||
| 32 | return self._data[key] | ||
| 33 | |||
| 34 | def keys(self, prefix=None): | ||
| 35 | return self._data.keys(prefix) | ||
| 36 | |||
| 37 | def has_keys_with_prefix(self, prefix): | ||
| 38 | return self._data.has_keys_with_prefix(prefix) | ||
| 39 | |||
| 40 | def longest_prefix(self, prefix): | ||
| 41 | return self._data.longest_prefix(prefix) | ||
| 42 | |||
| 43 | def longest_prefix_item(self, prefix): | ||
| 44 | return self._data.longest_prefix_item(prefix) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/py.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/py.py new file mode 100644 index 0000000..5531263 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_trie/py.py | |||
| @@ -0,0 +1,67 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | from pip._vendor.six import text_type | ||
| 3 | |||
| 4 | from bisect import bisect_left | ||
| 5 | |||
| 6 | from ._base import Trie as ABCTrie | ||
| 7 | |||
| 8 | |||
| 9 | class Trie(ABCTrie): | ||
| 10 | def __init__(self, data): | ||
| 11 | if not all(isinstance(x, text_type) for x in data.keys()): | ||
| 12 | raise TypeError("All keys must be strings") | ||
| 13 | |||
| 14 | self._data = data | ||
| 15 | self._keys = sorted(data.keys()) | ||
| 16 | self._cachestr = "" | ||
| 17 | self._cachepoints = (0, len(data)) | ||
| 18 | |||
| 19 | def __contains__(self, key): | ||
| 20 | return key in self._data | ||
| 21 | |||
| 22 | def __len__(self): | ||
| 23 | return len(self._data) | ||
| 24 | |||
| 25 | def __iter__(self): | ||
| 26 | return iter(self._data) | ||
| 27 | |||
| 28 | def __getitem__(self, key): | ||
| 29 | return self._data[key] | ||
| 30 | |||
| 31 | def keys(self, prefix=None): | ||
| 32 | if prefix is None or prefix == "" or not self._keys: | ||
| 33 | return set(self._keys) | ||
| 34 | |||
| 35 | if prefix.startswith(self._cachestr): | ||
| 36 | lo, hi = self._cachepoints | ||
| 37 | start = i = bisect_left(self._keys, prefix, lo, hi) | ||
| 38 | else: | ||
| 39 | start = i = bisect_left(self._keys, prefix) | ||
| 40 | |||
| 41 | keys = set() | ||
| 42 | if start == len(self._keys): | ||
| 43 | return keys | ||
| 44 | |||
| 45 | while self._keys[i].startswith(prefix): | ||
| 46 | keys.add(self._keys[i]) | ||
| 47 | i += 1 | ||
| 48 | |||
| 49 | self._cachestr = prefix | ||
| 50 | self._cachepoints = (start, i) | ||
| 51 | |||
| 52 | return keys | ||
| 53 | |||
| 54 | def has_keys_with_prefix(self, prefix): | ||
| 55 | if prefix in self._data: | ||
| 56 | return True | ||
| 57 | |||
| 58 | if prefix.startswith(self._cachestr): | ||
| 59 | lo, hi = self._cachepoints | ||
| 60 | i = bisect_left(self._keys, prefix, lo, hi) | ||
| 61 | else: | ||
| 62 | i = bisect_left(self._keys, prefix) | ||
| 63 | |||
| 64 | if i == len(self._keys): | ||
| 65 | return False | ||
| 66 | |||
| 67 | return self._keys[i].startswith(prefix) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_utils.py new file mode 100644 index 0000000..a559fa0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/_utils.py | |||
| @@ -0,0 +1,124 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from types import ModuleType | ||
| 4 | |||
| 5 | from pip._vendor.six import text_type | ||
| 6 | |||
| 7 | try: | ||
| 8 | import xml.etree.cElementTree as default_etree | ||
| 9 | except ImportError: | ||
| 10 | import xml.etree.ElementTree as default_etree | ||
| 11 | |||
| 12 | |||
| 13 | __all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair", | ||
| 14 | "surrogatePairToCodepoint", "moduleFactoryFactory", | ||
| 15 | "supports_lone_surrogates"] | ||
| 16 | |||
| 17 | |||
| 18 | # Platforms not supporting lone surrogates (\uD800-\uDFFF) should be | ||
| 19 | # caught by the below test. In general this would be any platform | ||
| 20 | # using UTF-16 as its encoding of unicode strings, such as | ||
| 21 | # Jython. This is because UTF-16 itself is based on the use of such | ||
| 22 | # surrogates, and there is no mechanism to further escape such | ||
| 23 | # escapes. | ||
| 24 | try: | ||
| 25 | _x = eval('"\\uD800"') # pylint:disable=eval-used | ||
| 26 | if not isinstance(_x, text_type): | ||
| 27 | # We need this with u"" because of http://bugs.jython.org/issue2039 | ||
| 28 | _x = eval('u"\\uD800"') # pylint:disable=eval-used | ||
| 29 | assert isinstance(_x, text_type) | ||
| 30 | except: # pylint:disable=bare-except | ||
| 31 | supports_lone_surrogates = False | ||
| 32 | else: | ||
| 33 | supports_lone_surrogates = True | ||
| 34 | |||
| 35 | |||
| 36 | class MethodDispatcher(dict): | ||
| 37 | """Dict with 2 special properties: | ||
| 38 | |||
| 39 | On initiation, keys that are lists, sets or tuples are converted to | ||
| 40 | multiple keys so accessing any one of the items in the original | ||
| 41 | list-like object returns the matching value | ||
| 42 | |||
| 43 | md = MethodDispatcher({("foo", "bar"):"baz"}) | ||
| 44 | md["foo"] == "baz" | ||
| 45 | |||
| 46 | A default value which can be set through the default attribute. | ||
| 47 | """ | ||
| 48 | |||
| 49 | def __init__(self, items=()): | ||
| 50 | # Using _dictEntries instead of directly assigning to self is about | ||
| 51 | # twice as fast. Please do careful performance testing before changing | ||
| 52 | # anything here. | ||
| 53 | _dictEntries = [] | ||
| 54 | for name, value in items: | ||
| 55 | if isinstance(name, (list, tuple, frozenset, set)): | ||
| 56 | for item in name: | ||
| 57 | _dictEntries.append((item, value)) | ||
| 58 | else: | ||
| 59 | _dictEntries.append((name, value)) | ||
| 60 | dict.__init__(self, _dictEntries) | ||
| 61 | assert len(self) == len(_dictEntries) | ||
| 62 | self.default = None | ||
| 63 | |||
| 64 | def __getitem__(self, key): | ||
| 65 | return dict.get(self, key, self.default) | ||
| 66 | |||
| 67 | |||
| 68 | # Some utility functions to deal with weirdness around UCS2 vs UCS4 | ||
| 69 | # python builds | ||
| 70 | |||
| 71 | def isSurrogatePair(data): | ||
| 72 | return (len(data) == 2 and | ||
| 73 | ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and | ||
| 74 | ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF) | ||
| 75 | |||
| 76 | |||
| 77 | def surrogatePairToCodepoint(data): | ||
| 78 | char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 + | ||
| 79 | (ord(data[1]) - 0xDC00)) | ||
| 80 | return char_val | ||
| 81 | |||
| 82 | # Module Factory Factory (no, this isn't Java, I know) | ||
| 83 | # Here to stop this being duplicated all over the place. | ||
| 84 | |||
| 85 | |||
| 86 | def moduleFactoryFactory(factory): | ||
| 87 | moduleCache = {} | ||
| 88 | |||
| 89 | def moduleFactory(baseModule, *args, **kwargs): | ||
| 90 | if isinstance(ModuleType.__name__, type("")): | ||
| 91 | name = "_%s_factory" % baseModule.__name__ | ||
| 92 | else: | ||
| 93 | name = b"_%s_factory" % baseModule.__name__ | ||
| 94 | |||
| 95 | kwargs_tuple = tuple(kwargs.items()) | ||
| 96 | |||
| 97 | try: | ||
| 98 | return moduleCache[name][args][kwargs_tuple] | ||
| 99 | except KeyError: | ||
| 100 | mod = ModuleType(name) | ||
| 101 | objs = factory(baseModule, *args, **kwargs) | ||
| 102 | mod.__dict__.update(objs) | ||
| 103 | if "name" not in moduleCache: | ||
| 104 | moduleCache[name] = {} | ||
| 105 | if "args" not in moduleCache[name]: | ||
| 106 | moduleCache[name][args] = {} | ||
| 107 | if "kwargs" not in moduleCache[name][args]: | ||
| 108 | moduleCache[name][args][kwargs_tuple] = {} | ||
| 109 | moduleCache[name][args][kwargs_tuple] = mod | ||
| 110 | return mod | ||
| 111 | |||
| 112 | return moduleFactory | ||
| 113 | |||
| 114 | |||
| 115 | def memoize(func): | ||
| 116 | cache = {} | ||
| 117 | |||
| 118 | def wrapped(*args, **kwargs): | ||
| 119 | key = (tuple(args), tuple(kwargs.items())) | ||
| 120 | if key not in cache: | ||
| 121 | cache[key] = func(*args, **kwargs) | ||
| 122 | return cache[key] | ||
| 123 | |||
| 124 | return wrapped | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/constants.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/constants.py new file mode 100644 index 0000000..bca155e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/constants.py | |||
| @@ -0,0 +1,2947 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | import string | ||
| 4 | |||
| 5 | EOF = None | ||
| 6 | |||
| 7 | E = { | ||
| 8 | "null-character": | ||
| 9 | "Null character in input stream, replaced with U+FFFD.", | ||
| 10 | "invalid-codepoint": | ||
| 11 | "Invalid codepoint in stream.", | ||
| 12 | "incorrectly-placed-solidus": | ||
| 13 | "Solidus (/) incorrectly placed in tag.", | ||
| 14 | "incorrect-cr-newline-entity": | ||
| 15 | "Incorrect CR newline entity, replaced with LF.", | ||
| 16 | "illegal-windows-1252-entity": | ||
| 17 | "Entity used with illegal number (windows-1252 reference).", | ||
| 18 | "cant-convert-numeric-entity": | ||
| 19 | "Numeric entity couldn't be converted to character " | ||
| 20 | "(codepoint U+%(charAsInt)08x).", | ||
| 21 | "illegal-codepoint-for-numeric-entity": | ||
| 22 | "Numeric entity represents an illegal codepoint: " | ||
| 23 | "U+%(charAsInt)08x.", | ||
| 24 | "numeric-entity-without-semicolon": | ||
| 25 | "Numeric entity didn't end with ';'.", | ||
| 26 | "expected-numeric-entity-but-got-eof": | ||
| 27 | "Numeric entity expected. Got end of file instead.", | ||
| 28 | "expected-numeric-entity": | ||
| 29 | "Numeric entity expected but none found.", | ||
| 30 | "named-entity-without-semicolon": | ||
| 31 | "Named entity didn't end with ';'.", | ||
| 32 | "expected-named-entity": | ||
| 33 | "Named entity expected. Got none.", | ||
| 34 | "attributes-in-end-tag": | ||
| 35 | "End tag contains unexpected attributes.", | ||
| 36 | 'self-closing-flag-on-end-tag': | ||
| 37 | "End tag contains unexpected self-closing flag.", | ||
| 38 | "expected-tag-name-but-got-right-bracket": | ||
| 39 | "Expected tag name. Got '>' instead.", | ||
| 40 | "expected-tag-name-but-got-question-mark": | ||
| 41 | "Expected tag name. Got '?' instead. (HTML doesn't " | ||
| 42 | "support processing instructions.)", | ||
| 43 | "expected-tag-name": | ||
| 44 | "Expected tag name. Got something else instead", | ||
| 45 | "expected-closing-tag-but-got-right-bracket": | ||
| 46 | "Expected closing tag. Got '>' instead. Ignoring '</>'.", | ||
| 47 | "expected-closing-tag-but-got-eof": | ||
| 48 | "Expected closing tag. Unexpected end of file.", | ||
| 49 | "expected-closing-tag-but-got-char": | ||
| 50 | "Expected closing tag. Unexpected character '%(data)s' found.", | ||
| 51 | "eof-in-tag-name": | ||
| 52 | "Unexpected end of file in the tag name.", | ||
| 53 | "expected-attribute-name-but-got-eof": | ||
| 54 | "Unexpected end of file. Expected attribute name instead.", | ||
| 55 | "eof-in-attribute-name": | ||
| 56 | "Unexpected end of file in attribute name.", | ||
| 57 | "invalid-character-in-attribute-name": | ||
| 58 | "Invalid character in attribute name", | ||
| 59 | "duplicate-attribute": | ||
| 60 | "Dropped duplicate attribute on tag.", | ||
| 61 | "expected-end-of-tag-name-but-got-eof": | ||
| 62 | "Unexpected end of file. Expected = or end of tag.", | ||
| 63 | "expected-attribute-value-but-got-eof": | ||
| 64 | "Unexpected end of file. Expected attribute value.", | ||
| 65 | "expected-attribute-value-but-got-right-bracket": | ||
| 66 | "Expected attribute value. Got '>' instead.", | ||
| 67 | 'equals-in-unquoted-attribute-value': | ||
| 68 | "Unexpected = in unquoted attribute", | ||
| 69 | 'unexpected-character-in-unquoted-attribute-value': | ||
| 70 | "Unexpected character in unquoted attribute", | ||
| 71 | "invalid-character-after-attribute-name": | ||
| 72 | "Unexpected character after attribute name.", | ||
| 73 | "unexpected-character-after-attribute-value": | ||
| 74 | "Unexpected character after attribute value.", | ||
| 75 | "eof-in-attribute-value-double-quote": | ||
| 76 | "Unexpected end of file in attribute value (\").", | ||
| 77 | "eof-in-attribute-value-single-quote": | ||
| 78 | "Unexpected end of file in attribute value (').", | ||
| 79 | "eof-in-attribute-value-no-quotes": | ||
| 80 | "Unexpected end of file in attribute value.", | ||
| 81 | "unexpected-EOF-after-solidus-in-tag": | ||
| 82 | "Unexpected end of file in tag. Expected >", | ||
| 83 | "unexpected-character-after-solidus-in-tag": | ||
| 84 | "Unexpected character after / in tag. Expected >", | ||
| 85 | "expected-dashes-or-doctype": | ||
| 86 | "Expected '--' or 'DOCTYPE'. Not found.", | ||
| 87 | "unexpected-bang-after-double-dash-in-comment": | ||
| 88 | "Unexpected ! after -- in comment", | ||
| 89 | "unexpected-space-after-double-dash-in-comment": | ||
| 90 | "Unexpected space after -- in comment", | ||
| 91 | "incorrect-comment": | ||
| 92 | "Incorrect comment.", | ||
| 93 | "eof-in-comment": | ||
| 94 | "Unexpected end of file in comment.", | ||
| 95 | "eof-in-comment-end-dash": | ||
| 96 | "Unexpected end of file in comment (-)", | ||
| 97 | "unexpected-dash-after-double-dash-in-comment": | ||
| 98 | "Unexpected '-' after '--' found in comment.", | ||
| 99 | "eof-in-comment-double-dash": | ||
| 100 | "Unexpected end of file in comment (--).", | ||
| 101 | "eof-in-comment-end-space-state": | ||
| 102 | "Unexpected end of file in comment.", | ||
| 103 | "eof-in-comment-end-bang-state": | ||
| 104 | "Unexpected end of file in comment.", | ||
| 105 | "unexpected-char-in-comment": | ||
| 106 | "Unexpected character in comment found.", | ||
| 107 | "need-space-after-doctype": | ||
| 108 | "No space after literal string 'DOCTYPE'.", | ||
| 109 | "expected-doctype-name-but-got-right-bracket": | ||
| 110 | "Unexpected > character. Expected DOCTYPE name.", | ||
| 111 | "expected-doctype-name-but-got-eof": | ||
| 112 | "Unexpected end of file. Expected DOCTYPE name.", | ||
| 113 | "eof-in-doctype-name": | ||
| 114 | "Unexpected end of file in DOCTYPE name.", | ||
| 115 | "eof-in-doctype": | ||
| 116 | "Unexpected end of file in DOCTYPE.", | ||
| 117 | "expected-space-or-right-bracket-in-doctype": | ||
| 118 | "Expected space or '>'. Got '%(data)s'", | ||
| 119 | "unexpected-end-of-doctype": | ||
| 120 | "Unexpected end of DOCTYPE.", | ||
| 121 | "unexpected-char-in-doctype": | ||
| 122 | "Unexpected character in DOCTYPE.", | ||
| 123 | "eof-in-innerhtml": | ||
| 124 | "XXX innerHTML EOF", | ||
| 125 | "unexpected-doctype": | ||
| 126 | "Unexpected DOCTYPE. Ignored.", | ||
| 127 | "non-html-root": | ||
| 128 | "html needs to be the first start tag.", | ||
| 129 | "expected-doctype-but-got-eof": | ||
| 130 | "Unexpected End of file. Expected DOCTYPE.", | ||
| 131 | "unknown-doctype": | ||
| 132 | "Erroneous DOCTYPE.", | ||
| 133 | "expected-doctype-but-got-chars": | ||
| 134 | "Unexpected non-space characters. Expected DOCTYPE.", | ||
| 135 | "expected-doctype-but-got-start-tag": | ||
| 136 | "Unexpected start tag (%(name)s). Expected DOCTYPE.", | ||
| 137 | "expected-doctype-but-got-end-tag": | ||
| 138 | "Unexpected end tag (%(name)s). Expected DOCTYPE.", | ||
| 139 | "end-tag-after-implied-root": | ||
| 140 | "Unexpected end tag (%(name)s) after the (implied) root element.", | ||
| 141 | "expected-named-closing-tag-but-got-eof": | ||
| 142 | "Unexpected end of file. Expected end tag (%(name)s).", | ||
| 143 | "two-heads-are-not-better-than-one": | ||
| 144 | "Unexpected start tag head in existing head. Ignored.", | ||
| 145 | "unexpected-end-tag": | ||
| 146 | "Unexpected end tag (%(name)s). Ignored.", | ||
| 147 | "unexpected-start-tag-out-of-my-head": | ||
| 148 | "Unexpected start tag (%(name)s) that can be in head. Moved.", | ||
| 149 | "unexpected-start-tag": | ||
| 150 | "Unexpected start tag (%(name)s).", | ||
| 151 | "missing-end-tag": | ||
| 152 | "Missing end tag (%(name)s).", | ||
| 153 | "missing-end-tags": | ||
| 154 | "Missing end tags (%(name)s).", | ||
| 155 | "unexpected-start-tag-implies-end-tag": | ||
| 156 | "Unexpected start tag (%(startName)s) " | ||
| 157 | "implies end tag (%(endName)s).", | ||
| 158 | "unexpected-start-tag-treated-as": | ||
| 159 | "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", | ||
| 160 | "deprecated-tag": | ||
| 161 | "Unexpected start tag %(name)s. Don't use it!", | ||
| 162 | "unexpected-start-tag-ignored": | ||
| 163 | "Unexpected start tag %(name)s. Ignored.", | ||
| 164 | "expected-one-end-tag-but-got-another": | ||
| 165 | "Unexpected end tag (%(gotName)s). " | ||
| 166 | "Missing end tag (%(expectedName)s).", | ||
| 167 | "end-tag-too-early": | ||
| 168 | "End tag (%(name)s) seen too early. Expected other end tag.", | ||
| 169 | "end-tag-too-early-named": | ||
| 170 | "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", | ||
| 171 | "end-tag-too-early-ignored": | ||
| 172 | "End tag (%(name)s) seen too early. Ignored.", | ||
| 173 | "adoption-agency-1.1": | ||
| 174 | "End tag (%(name)s) violates step 1, " | ||
| 175 | "paragraph 1 of the adoption agency algorithm.", | ||
| 176 | "adoption-agency-1.2": | ||
| 177 | "End tag (%(name)s) violates step 1, " | ||
| 178 | "paragraph 2 of the adoption agency algorithm.", | ||
| 179 | "adoption-agency-1.3": | ||
| 180 | "End tag (%(name)s) violates step 1, " | ||
| 181 | "paragraph 3 of the adoption agency algorithm.", | ||
| 182 | "adoption-agency-4.4": | ||
| 183 | "End tag (%(name)s) violates step 4, " | ||
| 184 | "paragraph 4 of the adoption agency algorithm.", | ||
| 185 | "unexpected-end-tag-treated-as": | ||
| 186 | "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", | ||
| 187 | "no-end-tag": | ||
| 188 | "This element (%(name)s) has no end tag.", | ||
| 189 | "unexpected-implied-end-tag-in-table": | ||
| 190 | "Unexpected implied end tag (%(name)s) in the table phase.", | ||
| 191 | "unexpected-implied-end-tag-in-table-body": | ||
| 192 | "Unexpected implied end tag (%(name)s) in the table body phase.", | ||
| 193 | "unexpected-char-implies-table-voodoo": | ||
| 194 | "Unexpected non-space characters in " | ||
| 195 | "table context caused voodoo mode.", | ||
| 196 | "unexpected-hidden-input-in-table": | ||
| 197 | "Unexpected input with type hidden in table context.", | ||
| 198 | "unexpected-form-in-table": | ||
| 199 | "Unexpected form in table context.", | ||
| 200 | "unexpected-start-tag-implies-table-voodoo": | ||
| 201 | "Unexpected start tag (%(name)s) in " | ||
| 202 | "table context caused voodoo mode.", | ||
| 203 | "unexpected-end-tag-implies-table-voodoo": | ||
| 204 | "Unexpected end tag (%(name)s) in " | ||
| 205 | "table context caused voodoo mode.", | ||
| 206 | "unexpected-cell-in-table-body": | ||
| 207 | "Unexpected table cell start tag (%(name)s) " | ||
| 208 | "in the table body phase.", | ||
| 209 | "unexpected-cell-end-tag": | ||
| 210 | "Got table cell end tag (%(name)s) " | ||
| 211 | "while required end tags are missing.", | ||
| 212 | "unexpected-end-tag-in-table-body": | ||
| 213 | "Unexpected end tag (%(name)s) in the table body phase. Ignored.", | ||
| 214 | "unexpected-implied-end-tag-in-table-row": | ||
| 215 | "Unexpected implied end tag (%(name)s) in the table row phase.", | ||
| 216 | "unexpected-end-tag-in-table-row": | ||
| 217 | "Unexpected end tag (%(name)s) in the table row phase. Ignored.", | ||
| 218 | "unexpected-select-in-select": | ||
| 219 | "Unexpected select start tag in the select phase " | ||
| 220 | "treated as select end tag.", | ||
| 221 | "unexpected-input-in-select": | ||
| 222 | "Unexpected input start tag in the select phase.", | ||
| 223 | "unexpected-start-tag-in-select": | ||
| 224 | "Unexpected start tag token (%(name)s in the select phase. " | ||
| 225 | "Ignored.", | ||
| 226 | "unexpected-end-tag-in-select": | ||
| 227 | "Unexpected end tag (%(name)s) in the select phase. Ignored.", | ||
| 228 | "unexpected-table-element-start-tag-in-select-in-table": | ||
| 229 | "Unexpected table element start tag (%(name)s) in the select in table phase.", | ||
| 230 | "unexpected-table-element-end-tag-in-select-in-table": | ||
| 231 | "Unexpected table element end tag (%(name)s) in the select in table phase.", | ||
| 232 | "unexpected-char-after-body": | ||
| 233 | "Unexpected non-space characters in the after body phase.", | ||
| 234 | "unexpected-start-tag-after-body": | ||
| 235 | "Unexpected start tag token (%(name)s)" | ||
| 236 | " in the after body phase.", | ||
| 237 | "unexpected-end-tag-after-body": | ||
| 238 | "Unexpected end tag token (%(name)s)" | ||
| 239 | " in the after body phase.", | ||
| 240 | "unexpected-char-in-frameset": | ||
| 241 | "Unexpected characters in the frameset phase. Characters ignored.", | ||
| 242 | "unexpected-start-tag-in-frameset": | ||
| 243 | "Unexpected start tag token (%(name)s)" | ||
| 244 | " in the frameset phase. Ignored.", | ||
| 245 | "unexpected-frameset-in-frameset-innerhtml": | ||
| 246 | "Unexpected end tag token (frameset) " | ||
| 247 | "in the frameset phase (innerHTML).", | ||
| 248 | "unexpected-end-tag-in-frameset": | ||
| 249 | "Unexpected end tag token (%(name)s)" | ||
| 250 | " in the frameset phase. Ignored.", | ||
| 251 | "unexpected-char-after-frameset": | ||
| 252 | "Unexpected non-space characters in the " | ||
| 253 | "after frameset phase. Ignored.", | ||
| 254 | "unexpected-start-tag-after-frameset": | ||
| 255 | "Unexpected start tag (%(name)s)" | ||
| 256 | " in the after frameset phase. Ignored.", | ||
| 257 | "unexpected-end-tag-after-frameset": | ||
| 258 | "Unexpected end tag (%(name)s)" | ||
| 259 | " in the after frameset phase. Ignored.", | ||
| 260 | "unexpected-end-tag-after-body-innerhtml": | ||
| 261 | "Unexpected end tag after body(innerHtml)", | ||
| 262 | "expected-eof-but-got-char": | ||
| 263 | "Unexpected non-space characters. Expected end of file.", | ||
| 264 | "expected-eof-but-got-start-tag": | ||
| 265 | "Unexpected start tag (%(name)s)" | ||
| 266 | ". Expected end of file.", | ||
| 267 | "expected-eof-but-got-end-tag": | ||
| 268 | "Unexpected end tag (%(name)s)" | ||
| 269 | ". Expected end of file.", | ||
| 270 | "eof-in-table": | ||
| 271 | "Unexpected end of file. Expected table content.", | ||
| 272 | "eof-in-select": | ||
| 273 | "Unexpected end of file. Expected select content.", | ||
| 274 | "eof-in-frameset": | ||
| 275 | "Unexpected end of file. Expected frameset content.", | ||
| 276 | "eof-in-script-in-script": | ||
| 277 | "Unexpected end of file. Expected script content.", | ||
| 278 | "eof-in-foreign-lands": | ||
| 279 | "Unexpected end of file. Expected foreign content", | ||
| 280 | "non-void-element-with-trailing-solidus": | ||
| 281 | "Trailing solidus not allowed on element %(name)s", | ||
| 282 | "unexpected-html-element-in-foreign-content": | ||
| 283 | "Element %(name)s not allowed in a non-html context", | ||
| 284 | "unexpected-end-tag-before-html": | ||
| 285 | "Unexpected end tag (%(name)s) before html.", | ||
| 286 | "unexpected-inhead-noscript-tag": | ||
| 287 | "Element %(name)s not allowed in a inhead-noscript context", | ||
| 288 | "eof-in-head-noscript": | ||
| 289 | "Unexpected end of file. Expected inhead-noscript content", | ||
| 290 | "char-in-head-noscript": | ||
| 291 | "Unexpected non-space character. Expected inhead-noscript content", | ||
| 292 | "XXX-undefined-error": | ||
| 293 | "Undefined error (this sucks and should be fixed)", | ||
| 294 | } | ||
| 295 | |||
| 296 | namespaces = { | ||
| 297 | "html": "http://www.w3.org/1999/xhtml", | ||
| 298 | "mathml": "http://www.w3.org/1998/Math/MathML", | ||
| 299 | "svg": "http://www.w3.org/2000/svg", | ||
| 300 | "xlink": "http://www.w3.org/1999/xlink", | ||
| 301 | "xml": "http://www.w3.org/XML/1998/namespace", | ||
| 302 | "xmlns": "http://www.w3.org/2000/xmlns/" | ||
| 303 | } | ||
| 304 | |||
| 305 | scopingElements = frozenset([ | ||
| 306 | (namespaces["html"], "applet"), | ||
| 307 | (namespaces["html"], "caption"), | ||
| 308 | (namespaces["html"], "html"), | ||
| 309 | (namespaces["html"], "marquee"), | ||
| 310 | (namespaces["html"], "object"), | ||
| 311 | (namespaces["html"], "table"), | ||
| 312 | (namespaces["html"], "td"), | ||
| 313 | (namespaces["html"], "th"), | ||
| 314 | (namespaces["mathml"], "mi"), | ||
| 315 | (namespaces["mathml"], "mo"), | ||
| 316 | (namespaces["mathml"], "mn"), | ||
| 317 | (namespaces["mathml"], "ms"), | ||
| 318 | (namespaces["mathml"], "mtext"), | ||
| 319 | (namespaces["mathml"], "annotation-xml"), | ||
| 320 | (namespaces["svg"], "foreignObject"), | ||
| 321 | (namespaces["svg"], "desc"), | ||
| 322 | (namespaces["svg"], "title"), | ||
| 323 | ]) | ||
| 324 | |||
| 325 | formattingElements = frozenset([ | ||
| 326 | (namespaces["html"], "a"), | ||
| 327 | (namespaces["html"], "b"), | ||
| 328 | (namespaces["html"], "big"), | ||
| 329 | (namespaces["html"], "code"), | ||
| 330 | (namespaces["html"], "em"), | ||
| 331 | (namespaces["html"], "font"), | ||
| 332 | (namespaces["html"], "i"), | ||
| 333 | (namespaces["html"], "nobr"), | ||
| 334 | (namespaces["html"], "s"), | ||
| 335 | (namespaces["html"], "small"), | ||
| 336 | (namespaces["html"], "strike"), | ||
| 337 | (namespaces["html"], "strong"), | ||
| 338 | (namespaces["html"], "tt"), | ||
| 339 | (namespaces["html"], "u") | ||
| 340 | ]) | ||
| 341 | |||
| 342 | specialElements = frozenset([ | ||
| 343 | (namespaces["html"], "address"), | ||
| 344 | (namespaces["html"], "applet"), | ||
| 345 | (namespaces["html"], "area"), | ||
| 346 | (namespaces["html"], "article"), | ||
| 347 | (namespaces["html"], "aside"), | ||
| 348 | (namespaces["html"], "base"), | ||
| 349 | (namespaces["html"], "basefont"), | ||
| 350 | (namespaces["html"], "bgsound"), | ||
| 351 | (namespaces["html"], "blockquote"), | ||
| 352 | (namespaces["html"], "body"), | ||
| 353 | (namespaces["html"], "br"), | ||
| 354 | (namespaces["html"], "button"), | ||
| 355 | (namespaces["html"], "caption"), | ||
| 356 | (namespaces["html"], "center"), | ||
| 357 | (namespaces["html"], "col"), | ||
| 358 | (namespaces["html"], "colgroup"), | ||
| 359 | (namespaces["html"], "command"), | ||
| 360 | (namespaces["html"], "dd"), | ||
| 361 | (namespaces["html"], "details"), | ||
| 362 | (namespaces["html"], "dir"), | ||
| 363 | (namespaces["html"], "div"), | ||
| 364 | (namespaces["html"], "dl"), | ||
| 365 | (namespaces["html"], "dt"), | ||
| 366 | (namespaces["html"], "embed"), | ||
| 367 | (namespaces["html"], "fieldset"), | ||
| 368 | (namespaces["html"], "figure"), | ||
| 369 | (namespaces["html"], "footer"), | ||
| 370 | (namespaces["html"], "form"), | ||
| 371 | (namespaces["html"], "frame"), | ||
| 372 | (namespaces["html"], "frameset"), | ||
| 373 | (namespaces["html"], "h1"), | ||
| 374 | (namespaces["html"], "h2"), | ||
| 375 | (namespaces["html"], "h3"), | ||
| 376 | (namespaces["html"], "h4"), | ||
| 377 | (namespaces["html"], "h5"), | ||
| 378 | (namespaces["html"], "h6"), | ||
| 379 | (namespaces["html"], "head"), | ||
| 380 | (namespaces["html"], "header"), | ||
| 381 | (namespaces["html"], "hr"), | ||
| 382 | (namespaces["html"], "html"), | ||
| 383 | (namespaces["html"], "iframe"), | ||
| 384 | # Note that image is commented out in the spec as "this isn't an | ||
| 385 | # element that can end up on the stack, so it doesn't matter," | ||
| 386 | (namespaces["html"], "image"), | ||
| 387 | (namespaces["html"], "img"), | ||
| 388 | (namespaces["html"], "input"), | ||
| 389 | (namespaces["html"], "isindex"), | ||
| 390 | (namespaces["html"], "li"), | ||
| 391 | (namespaces["html"], "link"), | ||
| 392 | (namespaces["html"], "listing"), | ||
| 393 | (namespaces["html"], "marquee"), | ||
| 394 | (namespaces["html"], "menu"), | ||
| 395 | (namespaces["html"], "meta"), | ||
| 396 | (namespaces["html"], "nav"), | ||
| 397 | (namespaces["html"], "noembed"), | ||
| 398 | (namespaces["html"], "noframes"), | ||
| 399 | (namespaces["html"], "noscript"), | ||
| 400 | (namespaces["html"], "object"), | ||
| 401 | (namespaces["html"], "ol"), | ||
| 402 | (namespaces["html"], "p"), | ||
| 403 | (namespaces["html"], "param"), | ||
| 404 | (namespaces["html"], "plaintext"), | ||
| 405 | (namespaces["html"], "pre"), | ||
| 406 | (namespaces["html"], "script"), | ||
| 407 | (namespaces["html"], "section"), | ||
| 408 | (namespaces["html"], "select"), | ||
| 409 | (namespaces["html"], "style"), | ||
| 410 | (namespaces["html"], "table"), | ||
| 411 | (namespaces["html"], "tbody"), | ||
| 412 | (namespaces["html"], "td"), | ||
| 413 | (namespaces["html"], "textarea"), | ||
| 414 | (namespaces["html"], "tfoot"), | ||
| 415 | (namespaces["html"], "th"), | ||
| 416 | (namespaces["html"], "thead"), | ||
| 417 | (namespaces["html"], "title"), | ||
| 418 | (namespaces["html"], "tr"), | ||
| 419 | (namespaces["html"], "ul"), | ||
| 420 | (namespaces["html"], "wbr"), | ||
| 421 | (namespaces["html"], "xmp"), | ||
| 422 | (namespaces["svg"], "foreignObject") | ||
| 423 | ]) | ||
| 424 | |||
| 425 | htmlIntegrationPointElements = frozenset([ | ||
| 426 | (namespaces["mathml"], "annotation-xml"), | ||
| 427 | (namespaces["svg"], "foreignObject"), | ||
| 428 | (namespaces["svg"], "desc"), | ||
| 429 | (namespaces["svg"], "title") | ||
| 430 | ]) | ||
| 431 | |||
| 432 | mathmlTextIntegrationPointElements = frozenset([ | ||
| 433 | (namespaces["mathml"], "mi"), | ||
| 434 | (namespaces["mathml"], "mo"), | ||
| 435 | (namespaces["mathml"], "mn"), | ||
| 436 | (namespaces["mathml"], "ms"), | ||
| 437 | (namespaces["mathml"], "mtext") | ||
| 438 | ]) | ||
| 439 | |||
| 440 | adjustSVGAttributes = { | ||
| 441 | "attributename": "attributeName", | ||
| 442 | "attributetype": "attributeType", | ||
| 443 | "basefrequency": "baseFrequency", | ||
| 444 | "baseprofile": "baseProfile", | ||
| 445 | "calcmode": "calcMode", | ||
| 446 | "clippathunits": "clipPathUnits", | ||
| 447 | "contentscripttype": "contentScriptType", | ||
| 448 | "contentstyletype": "contentStyleType", | ||
| 449 | "diffuseconstant": "diffuseConstant", | ||
| 450 | "edgemode": "edgeMode", | ||
| 451 | "externalresourcesrequired": "externalResourcesRequired", | ||
| 452 | "filterres": "filterRes", | ||
| 453 | "filterunits": "filterUnits", | ||
| 454 | "glyphref": "glyphRef", | ||
| 455 | "gradienttransform": "gradientTransform", | ||
| 456 | "gradientunits": "gradientUnits", | ||
| 457 | "kernelmatrix": "kernelMatrix", | ||
| 458 | "kernelunitlength": "kernelUnitLength", | ||
| 459 | "keypoints": "keyPoints", | ||
| 460 | "keysplines": "keySplines", | ||
| 461 | "keytimes": "keyTimes", | ||
| 462 | "lengthadjust": "lengthAdjust", | ||
| 463 | "limitingconeangle": "limitingConeAngle", | ||
| 464 | "markerheight": "markerHeight", | ||
| 465 | "markerunits": "markerUnits", | ||
| 466 | "markerwidth": "markerWidth", | ||
| 467 | "maskcontentunits": "maskContentUnits", | ||
| 468 | "maskunits": "maskUnits", | ||
| 469 | "numoctaves": "numOctaves", | ||
| 470 | "pathlength": "pathLength", | ||
| 471 | "patterncontentunits": "patternContentUnits", | ||
| 472 | "patterntransform": "patternTransform", | ||
| 473 | "patternunits": "patternUnits", | ||
| 474 | "pointsatx": "pointsAtX", | ||
| 475 | "pointsaty": "pointsAtY", | ||
| 476 | "pointsatz": "pointsAtZ", | ||
| 477 | "preservealpha": "preserveAlpha", | ||
| 478 | "preserveaspectratio": "preserveAspectRatio", | ||
| 479 | "primitiveunits": "primitiveUnits", | ||
| 480 | "refx": "refX", | ||
| 481 | "refy": "refY", | ||
| 482 | "repeatcount": "repeatCount", | ||
| 483 | "repeatdur": "repeatDur", | ||
| 484 | "requiredextensions": "requiredExtensions", | ||
| 485 | "requiredfeatures": "requiredFeatures", | ||
| 486 | "specularconstant": "specularConstant", | ||
| 487 | "specularexponent": "specularExponent", | ||
| 488 | "spreadmethod": "spreadMethod", | ||
| 489 | "startoffset": "startOffset", | ||
| 490 | "stddeviation": "stdDeviation", | ||
| 491 | "stitchtiles": "stitchTiles", | ||
| 492 | "surfacescale": "surfaceScale", | ||
| 493 | "systemlanguage": "systemLanguage", | ||
| 494 | "tablevalues": "tableValues", | ||
| 495 | "targetx": "targetX", | ||
| 496 | "targety": "targetY", | ||
| 497 | "textlength": "textLength", | ||
| 498 | "viewbox": "viewBox", | ||
| 499 | "viewtarget": "viewTarget", | ||
| 500 | "xchannelselector": "xChannelSelector", | ||
| 501 | "ychannelselector": "yChannelSelector", | ||
| 502 | "zoomandpan": "zoomAndPan" | ||
| 503 | } | ||
| 504 | |||
| 505 | adjustMathMLAttributes = {"definitionurl": "definitionURL"} | ||
| 506 | |||
| 507 | adjustForeignAttributes = { | ||
| 508 | "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), | ||
| 509 | "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), | ||
| 510 | "xlink:href": ("xlink", "href", namespaces["xlink"]), | ||
| 511 | "xlink:role": ("xlink", "role", namespaces["xlink"]), | ||
| 512 | "xlink:show": ("xlink", "show", namespaces["xlink"]), | ||
| 513 | "xlink:title": ("xlink", "title", namespaces["xlink"]), | ||
| 514 | "xlink:type": ("xlink", "type", namespaces["xlink"]), | ||
| 515 | "xml:base": ("xml", "base", namespaces["xml"]), | ||
| 516 | "xml:lang": ("xml", "lang", namespaces["xml"]), | ||
| 517 | "xml:space": ("xml", "space", namespaces["xml"]), | ||
| 518 | "xmlns": (None, "xmlns", namespaces["xmlns"]), | ||
| 519 | "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) | ||
| 520 | } | ||
| 521 | |||
| 522 | unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in | ||
| 523 | adjustForeignAttributes.items()]) | ||
| 524 | |||
| 525 | spaceCharacters = frozenset([ | ||
| 526 | "\t", | ||
| 527 | "\n", | ||
| 528 | "\u000C", | ||
| 529 | " ", | ||
| 530 | "\r" | ||
| 531 | ]) | ||
| 532 | |||
| 533 | tableInsertModeElements = frozenset([ | ||
| 534 | "table", | ||
| 535 | "tbody", | ||
| 536 | "tfoot", | ||
| 537 | "thead", | ||
| 538 | "tr" | ||
| 539 | ]) | ||
| 540 | |||
| 541 | asciiLowercase = frozenset(string.ascii_lowercase) | ||
| 542 | asciiUppercase = frozenset(string.ascii_uppercase) | ||
| 543 | asciiLetters = frozenset(string.ascii_letters) | ||
| 544 | digits = frozenset(string.digits) | ||
| 545 | hexDigits = frozenset(string.hexdigits) | ||
| 546 | |||
| 547 | asciiUpper2Lower = dict([(ord(c), ord(c.lower())) | ||
| 548 | for c in string.ascii_uppercase]) | ||
| 549 | |||
| 550 | # Heading elements need to be ordered | ||
| 551 | headingElements = ( | ||
| 552 | "h1", | ||
| 553 | "h2", | ||
| 554 | "h3", | ||
| 555 | "h4", | ||
| 556 | "h5", | ||
| 557 | "h6" | ||
| 558 | ) | ||
| 559 | |||
| 560 | voidElements = frozenset([ | ||
| 561 | "base", | ||
| 562 | "command", | ||
| 563 | "event-source", | ||
| 564 | "link", | ||
| 565 | "meta", | ||
| 566 | "hr", | ||
| 567 | "br", | ||
| 568 | "img", | ||
| 569 | "embed", | ||
| 570 | "param", | ||
| 571 | "area", | ||
| 572 | "col", | ||
| 573 | "input", | ||
| 574 | "source", | ||
| 575 | "track" | ||
| 576 | ]) | ||
| 577 | |||
| 578 | cdataElements = frozenset(['title', 'textarea']) | ||
| 579 | |||
| 580 | rcdataElements = frozenset([ | ||
| 581 | 'style', | ||
| 582 | 'script', | ||
| 583 | 'xmp', | ||
| 584 | 'iframe', | ||
| 585 | 'noembed', | ||
| 586 | 'noframes', | ||
| 587 | 'noscript' | ||
| 588 | ]) | ||
| 589 | |||
| 590 | booleanAttributes = { | ||
| 591 | "": frozenset(["irrelevant", "itemscope"]), | ||
| 592 | "style": frozenset(["scoped"]), | ||
| 593 | "img": frozenset(["ismap"]), | ||
| 594 | "audio": frozenset(["autoplay", "controls"]), | ||
| 595 | "video": frozenset(["autoplay", "controls"]), | ||
| 596 | "script": frozenset(["defer", "async"]), | ||
| 597 | "details": frozenset(["open"]), | ||
| 598 | "datagrid": frozenset(["multiple", "disabled"]), | ||
| 599 | "command": frozenset(["hidden", "disabled", "checked", "default"]), | ||
| 600 | "hr": frozenset(["noshade"]), | ||
| 601 | "menu": frozenset(["autosubmit"]), | ||
| 602 | "fieldset": frozenset(["disabled", "readonly"]), | ||
| 603 | "option": frozenset(["disabled", "readonly", "selected"]), | ||
| 604 | "optgroup": frozenset(["disabled", "readonly"]), | ||
| 605 | "button": frozenset(["disabled", "autofocus"]), | ||
| 606 | "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), | ||
| 607 | "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), | ||
| 608 | "output": frozenset(["disabled", "readonly"]), | ||
| 609 | "iframe": frozenset(["seamless"]), | ||
| 610 | } | ||
| 611 | |||
| 612 | # entitiesWindows1252 has to be _ordered_ and needs to have an index. It | ||
| 613 | # therefore can't be a frozenset. | ||
| 614 | entitiesWindows1252 = ( | ||
| 615 | 8364, # 0x80 0x20AC EURO SIGN | ||
| 616 | 65533, # 0x81 UNDEFINED | ||
| 617 | 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK | ||
| 618 | 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK | ||
| 619 | 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK | ||
| 620 | 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS | ||
| 621 | 8224, # 0x86 0x2020 DAGGER | ||
| 622 | 8225, # 0x87 0x2021 DOUBLE DAGGER | ||
| 623 | 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT | ||
| 624 | 8240, # 0x89 0x2030 PER MILLE SIGN | ||
| 625 | 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON | ||
| 626 | 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK | ||
| 627 | 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE | ||
| 628 | 65533, # 0x8D UNDEFINED | ||
| 629 | 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON | ||
| 630 | 65533, # 0x8F UNDEFINED | ||
| 631 | 65533, # 0x90 UNDEFINED | ||
| 632 | 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK | ||
| 633 | 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK | ||
| 634 | 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK | ||
| 635 | 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK | ||
| 636 | 8226, # 0x95 0x2022 BULLET | ||
| 637 | 8211, # 0x96 0x2013 EN DASH | ||
| 638 | 8212, # 0x97 0x2014 EM DASH | ||
| 639 | 732, # 0x98 0x02DC SMALL TILDE | ||
| 640 | 8482, # 0x99 0x2122 TRADE MARK SIGN | ||
| 641 | 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON | ||
| 642 | 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK | ||
| 643 | 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE | ||
| 644 | 65533, # 0x9D UNDEFINED | ||
| 645 | 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON | ||
| 646 | 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS | ||
| 647 | ) | ||
| 648 | |||
| 649 | xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) | ||
| 650 | |||
| 651 | entities = { | ||
| 652 | "AElig": "\xc6", | ||
| 653 | "AElig;": "\xc6", | ||
| 654 | "AMP": "&", | ||
| 655 | "AMP;": "&", | ||
| 656 | "Aacute": "\xc1", | ||
| 657 | "Aacute;": "\xc1", | ||
| 658 | "Abreve;": "\u0102", | ||
| 659 | "Acirc": "\xc2", | ||
| 660 | "Acirc;": "\xc2", | ||
| 661 | "Acy;": "\u0410", | ||
| 662 | "Afr;": "\U0001d504", | ||
| 663 | "Agrave": "\xc0", | ||
| 664 | "Agrave;": "\xc0", | ||
| 665 | "Alpha;": "\u0391", | ||
| 666 | "Amacr;": "\u0100", | ||
| 667 | "And;": "\u2a53", | ||
| 668 | "Aogon;": "\u0104", | ||
| 669 | "Aopf;": "\U0001d538", | ||
| 670 | "ApplyFunction;": "\u2061", | ||
| 671 | "Aring": "\xc5", | ||
| 672 | "Aring;": "\xc5", | ||
| 673 | "Ascr;": "\U0001d49c", | ||
| 674 | "Assign;": "\u2254", | ||
| 675 | "Atilde": "\xc3", | ||
| 676 | "Atilde;": "\xc3", | ||
| 677 | "Auml": "\xc4", | ||
| 678 | "Auml;": "\xc4", | ||
| 679 | "Backslash;": "\u2216", | ||
| 680 | "Barv;": "\u2ae7", | ||
| 681 | "Barwed;": "\u2306", | ||
| 682 | "Bcy;": "\u0411", | ||
| 683 | "Because;": "\u2235", | ||
| 684 | "Bernoullis;": "\u212c", | ||
| 685 | "Beta;": "\u0392", | ||
| 686 | "Bfr;": "\U0001d505", | ||
| 687 | "Bopf;": "\U0001d539", | ||
| 688 | "Breve;": "\u02d8", | ||
| 689 | "Bscr;": "\u212c", | ||
| 690 | "Bumpeq;": "\u224e", | ||
| 691 | "CHcy;": "\u0427", | ||
| 692 | "COPY": "\xa9", | ||
| 693 | "COPY;": "\xa9", | ||
| 694 | "Cacute;": "\u0106", | ||
| 695 | "Cap;": "\u22d2", | ||
| 696 | "CapitalDifferentialD;": "\u2145", | ||
| 697 | "Cayleys;": "\u212d", | ||
| 698 | "Ccaron;": "\u010c", | ||
| 699 | "Ccedil": "\xc7", | ||
| 700 | "Ccedil;": "\xc7", | ||
| 701 | "Ccirc;": "\u0108", | ||
| 702 | "Cconint;": "\u2230", | ||
| 703 | "Cdot;": "\u010a", | ||
| 704 | "Cedilla;": "\xb8", | ||
| 705 | "CenterDot;": "\xb7", | ||
| 706 | "Cfr;": "\u212d", | ||
| 707 | "Chi;": "\u03a7", | ||
| 708 | "CircleDot;": "\u2299", | ||
| 709 | "CircleMinus;": "\u2296", | ||
| 710 | "CirclePlus;": "\u2295", | ||
| 711 | "CircleTimes;": "\u2297", | ||
| 712 | "ClockwiseContourIntegral;": "\u2232", | ||
| 713 | "CloseCurlyDoubleQuote;": "\u201d", | ||
| 714 | "CloseCurlyQuote;": "\u2019", | ||
| 715 | "Colon;": "\u2237", | ||
| 716 | "Colone;": "\u2a74", | ||
| 717 | "Congruent;": "\u2261", | ||
| 718 | "Conint;": "\u222f", | ||
| 719 | "ContourIntegral;": "\u222e", | ||
| 720 | "Copf;": "\u2102", | ||
| 721 | "Coproduct;": "\u2210", | ||
| 722 | "CounterClockwiseContourIntegral;": "\u2233", | ||
| 723 | "Cross;": "\u2a2f", | ||
| 724 | "Cscr;": "\U0001d49e", | ||
| 725 | "Cup;": "\u22d3", | ||
| 726 | "CupCap;": "\u224d", | ||
| 727 | "DD;": "\u2145", | ||
| 728 | "DDotrahd;": "\u2911", | ||
| 729 | "DJcy;": "\u0402", | ||
| 730 | "DScy;": "\u0405", | ||
| 731 | "DZcy;": "\u040f", | ||
| 732 | "Dagger;": "\u2021", | ||
| 733 | "Darr;": "\u21a1", | ||
| 734 | "Dashv;": "\u2ae4", | ||
| 735 | "Dcaron;": "\u010e", | ||
| 736 | "Dcy;": "\u0414", | ||
| 737 | "Del;": "\u2207", | ||
| 738 | "Delta;": "\u0394", | ||
| 739 | "Dfr;": "\U0001d507", | ||
| 740 | "DiacriticalAcute;": "\xb4", | ||
| 741 | "DiacriticalDot;": "\u02d9", | ||
| 742 | "DiacriticalDoubleAcute;": "\u02dd", | ||
| 743 | "DiacriticalGrave;": "`", | ||
| 744 | "DiacriticalTilde;": "\u02dc", | ||
| 745 | "Diamond;": "\u22c4", | ||
| 746 | "DifferentialD;": "\u2146", | ||
| 747 | "Dopf;": "\U0001d53b", | ||
| 748 | "Dot;": "\xa8", | ||
| 749 | "DotDot;": "\u20dc", | ||
| 750 | "DotEqual;": "\u2250", | ||
| 751 | "DoubleContourIntegral;": "\u222f", | ||
| 752 | "DoubleDot;": "\xa8", | ||
| 753 | "DoubleDownArrow;": "\u21d3", | ||
| 754 | "DoubleLeftArrow;": "\u21d0", | ||
| 755 | "DoubleLeftRightArrow;": "\u21d4", | ||
| 756 | "DoubleLeftTee;": "\u2ae4", | ||
| 757 | "DoubleLongLeftArrow;": "\u27f8", | ||
| 758 | "DoubleLongLeftRightArrow;": "\u27fa", | ||
| 759 | "DoubleLongRightArrow;": "\u27f9", | ||
| 760 | "DoubleRightArrow;": "\u21d2", | ||
| 761 | "DoubleRightTee;": "\u22a8", | ||
| 762 | "DoubleUpArrow;": "\u21d1", | ||
| 763 | "DoubleUpDownArrow;": "\u21d5", | ||
| 764 | "DoubleVerticalBar;": "\u2225", | ||
| 765 | "DownArrow;": "\u2193", | ||
| 766 | "DownArrowBar;": "\u2913", | ||
| 767 | "DownArrowUpArrow;": "\u21f5", | ||
| 768 | "DownBreve;": "\u0311", | ||
| 769 | "DownLeftRightVector;": "\u2950", | ||
| 770 | "DownLeftTeeVector;": "\u295e", | ||
| 771 | "DownLeftVector;": "\u21bd", | ||
| 772 | "DownLeftVectorBar;": "\u2956", | ||
| 773 | "DownRightTeeVector;": "\u295f", | ||
| 774 | "DownRightVector;": "\u21c1", | ||
| 775 | "DownRightVectorBar;": "\u2957", | ||
| 776 | "DownTee;": "\u22a4", | ||
| 777 | "DownTeeArrow;": "\u21a7", | ||
| 778 | "Downarrow;": "\u21d3", | ||
| 779 | "Dscr;": "\U0001d49f", | ||
| 780 | "Dstrok;": "\u0110", | ||
| 781 | "ENG;": "\u014a", | ||
| 782 | "ETH": "\xd0", | ||
| 783 | "ETH;": "\xd0", | ||
| 784 | "Eacute": "\xc9", | ||
| 785 | "Eacute;": "\xc9", | ||
| 786 | "Ecaron;": "\u011a", | ||
| 787 | "Ecirc": "\xca", | ||
| 788 | "Ecirc;": "\xca", | ||
| 789 | "Ecy;": "\u042d", | ||
| 790 | "Edot;": "\u0116", | ||
| 791 | "Efr;": "\U0001d508", | ||
| 792 | "Egrave": "\xc8", | ||
| 793 | "Egrave;": "\xc8", | ||
| 794 | "Element;": "\u2208", | ||
| 795 | "Emacr;": "\u0112", | ||
| 796 | "EmptySmallSquare;": "\u25fb", | ||
| 797 | "EmptyVerySmallSquare;": "\u25ab", | ||
| 798 | "Eogon;": "\u0118", | ||
| 799 | "Eopf;": "\U0001d53c", | ||
| 800 | "Epsilon;": "\u0395", | ||
| 801 | "Equal;": "\u2a75", | ||
| 802 | "EqualTilde;": "\u2242", | ||
| 803 | "Equilibrium;": "\u21cc", | ||
| 804 | "Escr;": "\u2130", | ||
| 805 | "Esim;": "\u2a73", | ||
| 806 | "Eta;": "\u0397", | ||
| 807 | "Euml": "\xcb", | ||
| 808 | "Euml;": "\xcb", | ||
| 809 | "Exists;": "\u2203", | ||
| 810 | "ExponentialE;": "\u2147", | ||
| 811 | "Fcy;": "\u0424", | ||
| 812 | "Ffr;": "\U0001d509", | ||
| 813 | "FilledSmallSquare;": "\u25fc", | ||
| 814 | "FilledVerySmallSquare;": "\u25aa", | ||
| 815 | "Fopf;": "\U0001d53d", | ||
| 816 | "ForAll;": "\u2200", | ||
| 817 | "Fouriertrf;": "\u2131", | ||
| 818 | "Fscr;": "\u2131", | ||
| 819 | "GJcy;": "\u0403", | ||
| 820 | "GT": ">", | ||
| 821 | "GT;": ">", | ||
| 822 | "Gamma;": "\u0393", | ||
| 823 | "Gammad;": "\u03dc", | ||
| 824 | "Gbreve;": "\u011e", | ||
| 825 | "Gcedil;": "\u0122", | ||
| 826 | "Gcirc;": "\u011c", | ||
| 827 | "Gcy;": "\u0413", | ||
| 828 | "Gdot;": "\u0120", | ||
| 829 | "Gfr;": "\U0001d50a", | ||
| 830 | "Gg;": "\u22d9", | ||
| 831 | "Gopf;": "\U0001d53e", | ||
| 832 | "GreaterEqual;": "\u2265", | ||
| 833 | "GreaterEqualLess;": "\u22db", | ||
| 834 | "GreaterFullEqual;": "\u2267", | ||
| 835 | "GreaterGreater;": "\u2aa2", | ||
| 836 | "GreaterLess;": "\u2277", | ||
| 837 | "GreaterSlantEqual;": "\u2a7e", | ||
| 838 | "GreaterTilde;": "\u2273", | ||
| 839 | "Gscr;": "\U0001d4a2", | ||
| 840 | "Gt;": "\u226b", | ||
| 841 | "HARDcy;": "\u042a", | ||
| 842 | "Hacek;": "\u02c7", | ||
| 843 | "Hat;": "^", | ||
| 844 | "Hcirc;": "\u0124", | ||
| 845 | "Hfr;": "\u210c", | ||
| 846 | "HilbertSpace;": "\u210b", | ||
| 847 | "Hopf;": "\u210d", | ||
| 848 | "HorizontalLine;": "\u2500", | ||
| 849 | "Hscr;": "\u210b", | ||
| 850 | "Hstrok;": "\u0126", | ||
| 851 | "HumpDownHump;": "\u224e", | ||
| 852 | "HumpEqual;": "\u224f", | ||
| 853 | "IEcy;": "\u0415", | ||
| 854 | "IJlig;": "\u0132", | ||
| 855 | "IOcy;": "\u0401", | ||
| 856 | "Iacute": "\xcd", | ||
| 857 | "Iacute;": "\xcd", | ||
| 858 | "Icirc": "\xce", | ||
| 859 | "Icirc;": "\xce", | ||
| 860 | "Icy;": "\u0418", | ||
| 861 | "Idot;": "\u0130", | ||
| 862 | "Ifr;": "\u2111", | ||
| 863 | "Igrave": "\xcc", | ||
| 864 | "Igrave;": "\xcc", | ||
| 865 | "Im;": "\u2111", | ||
| 866 | "Imacr;": "\u012a", | ||
| 867 | "ImaginaryI;": "\u2148", | ||
| 868 | "Implies;": "\u21d2", | ||
| 869 | "Int;": "\u222c", | ||
| 870 | "Integral;": "\u222b", | ||
| 871 | "Intersection;": "\u22c2", | ||
| 872 | "InvisibleComma;": "\u2063", | ||
| 873 | "InvisibleTimes;": "\u2062", | ||
| 874 | "Iogon;": "\u012e", | ||
| 875 | "Iopf;": "\U0001d540", | ||
| 876 | "Iota;": "\u0399", | ||
| 877 | "Iscr;": "\u2110", | ||
| 878 | "Itilde;": "\u0128", | ||
| 879 | "Iukcy;": "\u0406", | ||
| 880 | "Iuml": "\xcf", | ||
| 881 | "Iuml;": "\xcf", | ||
| 882 | "Jcirc;": "\u0134", | ||
| 883 | "Jcy;": "\u0419", | ||
| 884 | "Jfr;": "\U0001d50d", | ||
| 885 | "Jopf;": "\U0001d541", | ||
| 886 | "Jscr;": "\U0001d4a5", | ||
| 887 | "Jsercy;": "\u0408", | ||
| 888 | "Jukcy;": "\u0404", | ||
| 889 | "KHcy;": "\u0425", | ||
| 890 | "KJcy;": "\u040c", | ||
| 891 | "Kappa;": "\u039a", | ||
| 892 | "Kcedil;": "\u0136", | ||
| 893 | "Kcy;": "\u041a", | ||
| 894 | "Kfr;": "\U0001d50e", | ||
| 895 | "Kopf;": "\U0001d542", | ||
| 896 | "Kscr;": "\U0001d4a6", | ||
| 897 | "LJcy;": "\u0409", | ||
| 898 | "LT": "<", | ||
| 899 | "LT;": "<", | ||
| 900 | "Lacute;": "\u0139", | ||
| 901 | "Lambda;": "\u039b", | ||
| 902 | "Lang;": "\u27ea", | ||
| 903 | "Laplacetrf;": "\u2112", | ||
| 904 | "Larr;": "\u219e", | ||
| 905 | "Lcaron;": "\u013d", | ||
| 906 | "Lcedil;": "\u013b", | ||
| 907 | "Lcy;": "\u041b", | ||
| 908 | "LeftAngleBracket;": "\u27e8", | ||
| 909 | "LeftArrow;": "\u2190", | ||
| 910 | "LeftArrowBar;": "\u21e4", | ||
| 911 | "LeftArrowRightArrow;": "\u21c6", | ||
| 912 | "LeftCeiling;": "\u2308", | ||
| 913 | "LeftDoubleBracket;": "\u27e6", | ||
| 914 | "LeftDownTeeVector;": "\u2961", | ||
| 915 | "LeftDownVector;": "\u21c3", | ||
| 916 | "LeftDownVectorBar;": "\u2959", | ||
| 917 | "LeftFloor;": "\u230a", | ||
| 918 | "LeftRightArrow;": "\u2194", | ||
| 919 | "LeftRightVector;": "\u294e", | ||
| 920 | "LeftTee;": "\u22a3", | ||
| 921 | "LeftTeeArrow;": "\u21a4", | ||
| 922 | "LeftTeeVector;": "\u295a", | ||
| 923 | "LeftTriangle;": "\u22b2", | ||
| 924 | "LeftTriangleBar;": "\u29cf", | ||
| 925 | "LeftTriangleEqual;": "\u22b4", | ||
| 926 | "LeftUpDownVector;": "\u2951", | ||
| 927 | "LeftUpTeeVector;": "\u2960", | ||
| 928 | "LeftUpVector;": "\u21bf", | ||
| 929 | "LeftUpVectorBar;": "\u2958", | ||
| 930 | "LeftVector;": "\u21bc", | ||
| 931 | "LeftVectorBar;": "\u2952", | ||
| 932 | "Leftarrow;": "\u21d0", | ||
| 933 | "Leftrightarrow;": "\u21d4", | ||
| 934 | "LessEqualGreater;": "\u22da", | ||
| 935 | "LessFullEqual;": "\u2266", | ||
| 936 | "LessGreater;": "\u2276", | ||
| 937 | "LessLess;": "\u2aa1", | ||
| 938 | "LessSlantEqual;": "\u2a7d", | ||
| 939 | "LessTilde;": "\u2272", | ||
| 940 | "Lfr;": "\U0001d50f", | ||
| 941 | "Ll;": "\u22d8", | ||
| 942 | "Lleftarrow;": "\u21da", | ||
| 943 | "Lmidot;": "\u013f", | ||
| 944 | "LongLeftArrow;": "\u27f5", | ||
| 945 | "LongLeftRightArrow;": "\u27f7", | ||
| 946 | "LongRightArrow;": "\u27f6", | ||
| 947 | "Longleftarrow;": "\u27f8", | ||
| 948 | "Longleftrightarrow;": "\u27fa", | ||
| 949 | "Longrightarrow;": "\u27f9", | ||
| 950 | "Lopf;": "\U0001d543", | ||
| 951 | "LowerLeftArrow;": "\u2199", | ||
| 952 | "LowerRightArrow;": "\u2198", | ||
| 953 | "Lscr;": "\u2112", | ||
| 954 | "Lsh;": "\u21b0", | ||
| 955 | "Lstrok;": "\u0141", | ||
| 956 | "Lt;": "\u226a", | ||
| 957 | "Map;": "\u2905", | ||
| 958 | "Mcy;": "\u041c", | ||
| 959 | "MediumSpace;": "\u205f", | ||
| 960 | "Mellintrf;": "\u2133", | ||
| 961 | "Mfr;": "\U0001d510", | ||
| 962 | "MinusPlus;": "\u2213", | ||
| 963 | "Mopf;": "\U0001d544", | ||
| 964 | "Mscr;": "\u2133", | ||
| 965 | "Mu;": "\u039c", | ||
| 966 | "NJcy;": "\u040a", | ||
| 967 | "Nacute;": "\u0143", | ||
| 968 | "Ncaron;": "\u0147", | ||
| 969 | "Ncedil;": "\u0145", | ||
| 970 | "Ncy;": "\u041d", | ||
| 971 | "NegativeMediumSpace;": "\u200b", | ||
| 972 | "NegativeThickSpace;": "\u200b", | ||
| 973 | "NegativeThinSpace;": "\u200b", | ||
| 974 | "NegativeVeryThinSpace;": "\u200b", | ||
| 975 | "NestedGreaterGreater;": "\u226b", | ||
| 976 | "NestedLessLess;": "\u226a", | ||
| 977 | "NewLine;": "\n", | ||
| 978 | "Nfr;": "\U0001d511", | ||
| 979 | "NoBreak;": "\u2060", | ||
| 980 | "NonBreakingSpace;": "\xa0", | ||
| 981 | "Nopf;": "\u2115", | ||
| 982 | "Not;": "\u2aec", | ||
| 983 | "NotCongruent;": "\u2262", | ||
| 984 | "NotCupCap;": "\u226d", | ||
| 985 | "NotDoubleVerticalBar;": "\u2226", | ||
| 986 | "NotElement;": "\u2209", | ||
| 987 | "NotEqual;": "\u2260", | ||
| 988 | "NotEqualTilde;": "\u2242\u0338", | ||
| 989 | "NotExists;": "\u2204", | ||
| 990 | "NotGreater;": "\u226f", | ||
| 991 | "NotGreaterEqual;": "\u2271", | ||
| 992 | "NotGreaterFullEqual;": "\u2267\u0338", | ||
| 993 | "NotGreaterGreater;": "\u226b\u0338", | ||
| 994 | "NotGreaterLess;": "\u2279", | ||
| 995 | "NotGreaterSlantEqual;": "\u2a7e\u0338", | ||
| 996 | "NotGreaterTilde;": "\u2275", | ||
| 997 | "NotHumpDownHump;": "\u224e\u0338", | ||
| 998 | "NotHumpEqual;": "\u224f\u0338", | ||
| 999 | "NotLeftTriangle;": "\u22ea", | ||
| 1000 | "NotLeftTriangleBar;": "\u29cf\u0338", | ||
| 1001 | "NotLeftTriangleEqual;": "\u22ec", | ||
| 1002 | "NotLess;": "\u226e", | ||
| 1003 | "NotLessEqual;": "\u2270", | ||
| 1004 | "NotLessGreater;": "\u2278", | ||
| 1005 | "NotLessLess;": "\u226a\u0338", | ||
| 1006 | "NotLessSlantEqual;": "\u2a7d\u0338", | ||
| 1007 | "NotLessTilde;": "\u2274", | ||
| 1008 | "NotNestedGreaterGreater;": "\u2aa2\u0338", | ||
| 1009 | "NotNestedLessLess;": "\u2aa1\u0338", | ||
| 1010 | "NotPrecedes;": "\u2280", | ||
| 1011 | "NotPrecedesEqual;": "\u2aaf\u0338", | ||
| 1012 | "NotPrecedesSlantEqual;": "\u22e0", | ||
| 1013 | "NotReverseElement;": "\u220c", | ||
| 1014 | "NotRightTriangle;": "\u22eb", | ||
| 1015 | "NotRightTriangleBar;": "\u29d0\u0338", | ||
| 1016 | "NotRightTriangleEqual;": "\u22ed", | ||
| 1017 | "NotSquareSubset;": "\u228f\u0338", | ||
| 1018 | "NotSquareSubsetEqual;": "\u22e2", | ||
| 1019 | "NotSquareSuperset;": "\u2290\u0338", | ||
| 1020 | "NotSquareSupersetEqual;": "\u22e3", | ||
| 1021 | "NotSubset;": "\u2282\u20d2", | ||
| 1022 | "NotSubsetEqual;": "\u2288", | ||
| 1023 | "NotSucceeds;": "\u2281", | ||
| 1024 | "NotSucceedsEqual;": "\u2ab0\u0338", | ||
| 1025 | "NotSucceedsSlantEqual;": "\u22e1", | ||
| 1026 | "NotSucceedsTilde;": "\u227f\u0338", | ||
| 1027 | "NotSuperset;": "\u2283\u20d2", | ||
| 1028 | "NotSupersetEqual;": "\u2289", | ||
| 1029 | "NotTilde;": "\u2241", | ||
| 1030 | "NotTildeEqual;": "\u2244", | ||
| 1031 | "NotTildeFullEqual;": "\u2247", | ||
| 1032 | "NotTildeTilde;": "\u2249", | ||
| 1033 | "NotVerticalBar;": "\u2224", | ||
| 1034 | "Nscr;": "\U0001d4a9", | ||
| 1035 | "Ntilde": "\xd1", | ||
| 1036 | "Ntilde;": "\xd1", | ||
| 1037 | "Nu;": "\u039d", | ||
| 1038 | "OElig;": "\u0152", | ||
| 1039 | "Oacute": "\xd3", | ||
| 1040 | "Oacute;": "\xd3", | ||
| 1041 | "Ocirc": "\xd4", | ||
| 1042 | "Ocirc;": "\xd4", | ||
| 1043 | "Ocy;": "\u041e", | ||
| 1044 | "Odblac;": "\u0150", | ||
| 1045 | "Ofr;": "\U0001d512", | ||
| 1046 | "Ograve": "\xd2", | ||
| 1047 | "Ograve;": "\xd2", | ||
| 1048 | "Omacr;": "\u014c", | ||
| 1049 | "Omega;": "\u03a9", | ||
| 1050 | "Omicron;": "\u039f", | ||
| 1051 | "Oopf;": "\U0001d546", | ||
| 1052 | "OpenCurlyDoubleQuote;": "\u201c", | ||
| 1053 | "OpenCurlyQuote;": "\u2018", | ||
| 1054 | "Or;": "\u2a54", | ||
| 1055 | "Oscr;": "\U0001d4aa", | ||
| 1056 | "Oslash": "\xd8", | ||
| 1057 | "Oslash;": "\xd8", | ||
| 1058 | "Otilde": "\xd5", | ||
| 1059 | "Otilde;": "\xd5", | ||
| 1060 | "Otimes;": "\u2a37", | ||
| 1061 | "Ouml": "\xd6", | ||
| 1062 | "Ouml;": "\xd6", | ||
| 1063 | "OverBar;": "\u203e", | ||
| 1064 | "OverBrace;": "\u23de", | ||
| 1065 | "OverBracket;": "\u23b4", | ||
| 1066 | "OverParenthesis;": "\u23dc", | ||
| 1067 | "PartialD;": "\u2202", | ||
| 1068 | "Pcy;": "\u041f", | ||
| 1069 | "Pfr;": "\U0001d513", | ||
| 1070 | "Phi;": "\u03a6", | ||
| 1071 | "Pi;": "\u03a0", | ||
| 1072 | "PlusMinus;": "\xb1", | ||
| 1073 | "Poincareplane;": "\u210c", | ||
| 1074 | "Popf;": "\u2119", | ||
| 1075 | "Pr;": "\u2abb", | ||
| 1076 | "Precedes;": "\u227a", | ||
| 1077 | "PrecedesEqual;": "\u2aaf", | ||
| 1078 | "PrecedesSlantEqual;": "\u227c", | ||
| 1079 | "PrecedesTilde;": "\u227e", | ||
| 1080 | "Prime;": "\u2033", | ||
| 1081 | "Product;": "\u220f", | ||
| 1082 | "Proportion;": "\u2237", | ||
| 1083 | "Proportional;": "\u221d", | ||
| 1084 | "Pscr;": "\U0001d4ab", | ||
| 1085 | "Psi;": "\u03a8", | ||
| 1086 | "QUOT": "\"", | ||
| 1087 | "QUOT;": "\"", | ||
| 1088 | "Qfr;": "\U0001d514", | ||
| 1089 | "Qopf;": "\u211a", | ||
| 1090 | "Qscr;": "\U0001d4ac", | ||
| 1091 | "RBarr;": "\u2910", | ||
| 1092 | "REG": "\xae", | ||
| 1093 | "REG;": "\xae", | ||
| 1094 | "Racute;": "\u0154", | ||
| 1095 | "Rang;": "\u27eb", | ||
| 1096 | "Rarr;": "\u21a0", | ||
| 1097 | "Rarrtl;": "\u2916", | ||
| 1098 | "Rcaron;": "\u0158", | ||
| 1099 | "Rcedil;": "\u0156", | ||
| 1100 | "Rcy;": "\u0420", | ||
| 1101 | "Re;": "\u211c", | ||
| 1102 | "ReverseElement;": "\u220b", | ||
| 1103 | "ReverseEquilibrium;": "\u21cb", | ||
| 1104 | "ReverseUpEquilibrium;": "\u296f", | ||
| 1105 | "Rfr;": "\u211c", | ||
| 1106 | "Rho;": "\u03a1", | ||
| 1107 | "RightAngleBracket;": "\u27e9", | ||
| 1108 | "RightArrow;": "\u2192", | ||
| 1109 | "RightArrowBar;": "\u21e5", | ||
| 1110 | "RightArrowLeftArrow;": "\u21c4", | ||
| 1111 | "RightCeiling;": "\u2309", | ||
| 1112 | "RightDoubleBracket;": "\u27e7", | ||
| 1113 | "RightDownTeeVector;": "\u295d", | ||
| 1114 | "RightDownVector;": "\u21c2", | ||
| 1115 | "RightDownVectorBar;": "\u2955", | ||
| 1116 | "RightFloor;": "\u230b", | ||
| 1117 | "RightTee;": "\u22a2", | ||
| 1118 | "RightTeeArrow;": "\u21a6", | ||
| 1119 | "RightTeeVector;": "\u295b", | ||
| 1120 | "RightTriangle;": "\u22b3", | ||
| 1121 | "RightTriangleBar;": "\u29d0", | ||
| 1122 | "RightTriangleEqual;": "\u22b5", | ||
| 1123 | "RightUpDownVector;": "\u294f", | ||
| 1124 | "RightUpTeeVector;": "\u295c", | ||
| 1125 | "RightUpVector;": "\u21be", | ||
| 1126 | "RightUpVectorBar;": "\u2954", | ||
| 1127 | "RightVector;": "\u21c0", | ||
| 1128 | "RightVectorBar;": "\u2953", | ||
| 1129 | "Rightarrow;": "\u21d2", | ||
| 1130 | "Ropf;": "\u211d", | ||
| 1131 | "RoundImplies;": "\u2970", | ||
| 1132 | "Rrightarrow;": "\u21db", | ||
| 1133 | "Rscr;": "\u211b", | ||
| 1134 | "Rsh;": "\u21b1", | ||
| 1135 | "RuleDelayed;": "\u29f4", | ||
| 1136 | "SHCHcy;": "\u0429", | ||
| 1137 | "SHcy;": "\u0428", | ||
| 1138 | "SOFTcy;": "\u042c", | ||
| 1139 | "Sacute;": "\u015a", | ||
| 1140 | "Sc;": "\u2abc", | ||
| 1141 | "Scaron;": "\u0160", | ||
| 1142 | "Scedil;": "\u015e", | ||
| 1143 | "Scirc;": "\u015c", | ||
| 1144 | "Scy;": "\u0421", | ||
| 1145 | "Sfr;": "\U0001d516", | ||
| 1146 | "ShortDownArrow;": "\u2193", | ||
| 1147 | "ShortLeftArrow;": "\u2190", | ||
| 1148 | "ShortRightArrow;": "\u2192", | ||
| 1149 | "ShortUpArrow;": "\u2191", | ||
| 1150 | "Sigma;": "\u03a3", | ||
| 1151 | "SmallCircle;": "\u2218", | ||
| 1152 | "Sopf;": "\U0001d54a", | ||
| 1153 | "Sqrt;": "\u221a", | ||
| 1154 | "Square;": "\u25a1", | ||
| 1155 | "SquareIntersection;": "\u2293", | ||
| 1156 | "SquareSubset;": "\u228f", | ||
| 1157 | "SquareSubsetEqual;": "\u2291", | ||
| 1158 | "SquareSuperset;": "\u2290", | ||
| 1159 | "SquareSupersetEqual;": "\u2292", | ||
| 1160 | "SquareUnion;": "\u2294", | ||
| 1161 | "Sscr;": "\U0001d4ae", | ||
| 1162 | "Star;": "\u22c6", | ||
| 1163 | "Sub;": "\u22d0", | ||
| 1164 | "Subset;": "\u22d0", | ||
| 1165 | "SubsetEqual;": "\u2286", | ||
| 1166 | "Succeeds;": "\u227b", | ||
| 1167 | "SucceedsEqual;": "\u2ab0", | ||
| 1168 | "SucceedsSlantEqual;": "\u227d", | ||
| 1169 | "SucceedsTilde;": "\u227f", | ||
| 1170 | "SuchThat;": "\u220b", | ||
| 1171 | "Sum;": "\u2211", | ||
| 1172 | "Sup;": "\u22d1", | ||
| 1173 | "Superset;": "\u2283", | ||
| 1174 | "SupersetEqual;": "\u2287", | ||
| 1175 | "Supset;": "\u22d1", | ||
| 1176 | "THORN": "\xde", | ||
| 1177 | "THORN;": "\xde", | ||
| 1178 | "TRADE;": "\u2122", | ||
| 1179 | "TSHcy;": "\u040b", | ||
| 1180 | "TScy;": "\u0426", | ||
| 1181 | "Tab;": "\t", | ||
| 1182 | "Tau;": "\u03a4", | ||
| 1183 | "Tcaron;": "\u0164", | ||
| 1184 | "Tcedil;": "\u0162", | ||
| 1185 | "Tcy;": "\u0422", | ||
| 1186 | "Tfr;": "\U0001d517", | ||
| 1187 | "Therefore;": "\u2234", | ||
| 1188 | "Theta;": "\u0398", | ||
| 1189 | "ThickSpace;": "\u205f\u200a", | ||
| 1190 | "ThinSpace;": "\u2009", | ||
| 1191 | "Tilde;": "\u223c", | ||
| 1192 | "TildeEqual;": "\u2243", | ||
| 1193 | "TildeFullEqual;": "\u2245", | ||
| 1194 | "TildeTilde;": "\u2248", | ||
| 1195 | "Topf;": "\U0001d54b", | ||
| 1196 | "TripleDot;": "\u20db", | ||
| 1197 | "Tscr;": "\U0001d4af", | ||
| 1198 | "Tstrok;": "\u0166", | ||
| 1199 | "Uacute": "\xda", | ||
| 1200 | "Uacute;": "\xda", | ||
| 1201 | "Uarr;": "\u219f", | ||
| 1202 | "Uarrocir;": "\u2949", | ||
| 1203 | "Ubrcy;": "\u040e", | ||
| 1204 | "Ubreve;": "\u016c", | ||
| 1205 | "Ucirc": "\xdb", | ||
| 1206 | "Ucirc;": "\xdb", | ||
| 1207 | "Ucy;": "\u0423", | ||
| 1208 | "Udblac;": "\u0170", | ||
| 1209 | "Ufr;": "\U0001d518", | ||
| 1210 | "Ugrave": "\xd9", | ||
| 1211 | "Ugrave;": "\xd9", | ||
| 1212 | "Umacr;": "\u016a", | ||
| 1213 | "UnderBar;": "_", | ||
| 1214 | "UnderBrace;": "\u23df", | ||
| 1215 | "UnderBracket;": "\u23b5", | ||
| 1216 | "UnderParenthesis;": "\u23dd", | ||
| 1217 | "Union;": "\u22c3", | ||
| 1218 | "UnionPlus;": "\u228e", | ||
| 1219 | "Uogon;": "\u0172", | ||
| 1220 | "Uopf;": "\U0001d54c", | ||
| 1221 | "UpArrow;": "\u2191", | ||
| 1222 | "UpArrowBar;": "\u2912", | ||
| 1223 | "UpArrowDownArrow;": "\u21c5", | ||
| 1224 | "UpDownArrow;": "\u2195", | ||
| 1225 | "UpEquilibrium;": "\u296e", | ||
| 1226 | "UpTee;": "\u22a5", | ||
| 1227 | "UpTeeArrow;": "\u21a5", | ||
| 1228 | "Uparrow;": "\u21d1", | ||
| 1229 | "Updownarrow;": "\u21d5", | ||
| 1230 | "UpperLeftArrow;": "\u2196", | ||
| 1231 | "UpperRightArrow;": "\u2197", | ||
| 1232 | "Upsi;": "\u03d2", | ||
| 1233 | "Upsilon;": "\u03a5", | ||
| 1234 | "Uring;": "\u016e", | ||
| 1235 | "Uscr;": "\U0001d4b0", | ||
| 1236 | "Utilde;": "\u0168", | ||
| 1237 | "Uuml": "\xdc", | ||
| 1238 | "Uuml;": "\xdc", | ||
| 1239 | "VDash;": "\u22ab", | ||
| 1240 | "Vbar;": "\u2aeb", | ||
| 1241 | "Vcy;": "\u0412", | ||
| 1242 | "Vdash;": "\u22a9", | ||
| 1243 | "Vdashl;": "\u2ae6", | ||
| 1244 | "Vee;": "\u22c1", | ||
| 1245 | "Verbar;": "\u2016", | ||
| 1246 | "Vert;": "\u2016", | ||
| 1247 | "VerticalBar;": "\u2223", | ||
| 1248 | "VerticalLine;": "|", | ||
| 1249 | "VerticalSeparator;": "\u2758", | ||
| 1250 | "VerticalTilde;": "\u2240", | ||
| 1251 | "VeryThinSpace;": "\u200a", | ||
| 1252 | "Vfr;": "\U0001d519", | ||
| 1253 | "Vopf;": "\U0001d54d", | ||
| 1254 | "Vscr;": "\U0001d4b1", | ||
| 1255 | "Vvdash;": "\u22aa", | ||
| 1256 | "Wcirc;": "\u0174", | ||
| 1257 | "Wedge;": "\u22c0", | ||
| 1258 | "Wfr;": "\U0001d51a", | ||
| 1259 | "Wopf;": "\U0001d54e", | ||
| 1260 | "Wscr;": "\U0001d4b2", | ||
| 1261 | "Xfr;": "\U0001d51b", | ||
| 1262 | "Xi;": "\u039e", | ||
| 1263 | "Xopf;": "\U0001d54f", | ||
| 1264 | "Xscr;": "\U0001d4b3", | ||
| 1265 | "YAcy;": "\u042f", | ||
| 1266 | "YIcy;": "\u0407", | ||
| 1267 | "YUcy;": "\u042e", | ||
| 1268 | "Yacute": "\xdd", | ||
| 1269 | "Yacute;": "\xdd", | ||
| 1270 | "Ycirc;": "\u0176", | ||
| 1271 | "Ycy;": "\u042b", | ||
| 1272 | "Yfr;": "\U0001d51c", | ||
| 1273 | "Yopf;": "\U0001d550", | ||
| 1274 | "Yscr;": "\U0001d4b4", | ||
| 1275 | "Yuml;": "\u0178", | ||
| 1276 | "ZHcy;": "\u0416", | ||
| 1277 | "Zacute;": "\u0179", | ||
| 1278 | "Zcaron;": "\u017d", | ||
| 1279 | "Zcy;": "\u0417", | ||
| 1280 | "Zdot;": "\u017b", | ||
| 1281 | "ZeroWidthSpace;": "\u200b", | ||
| 1282 | "Zeta;": "\u0396", | ||
| 1283 | "Zfr;": "\u2128", | ||
| 1284 | "Zopf;": "\u2124", | ||
| 1285 | "Zscr;": "\U0001d4b5", | ||
| 1286 | "aacute": "\xe1", | ||
| 1287 | "aacute;": "\xe1", | ||
| 1288 | "abreve;": "\u0103", | ||
| 1289 | "ac;": "\u223e", | ||
| 1290 | "acE;": "\u223e\u0333", | ||
| 1291 | "acd;": "\u223f", | ||
| 1292 | "acirc": "\xe2", | ||
| 1293 | "acirc;": "\xe2", | ||
| 1294 | "acute": "\xb4", | ||
| 1295 | "acute;": "\xb4", | ||
| 1296 | "acy;": "\u0430", | ||
| 1297 | "aelig": "\xe6", | ||
| 1298 | "aelig;": "\xe6", | ||
| 1299 | "af;": "\u2061", | ||
| 1300 | "afr;": "\U0001d51e", | ||
| 1301 | "agrave": "\xe0", | ||
| 1302 | "agrave;": "\xe0", | ||
| 1303 | "alefsym;": "\u2135", | ||
| 1304 | "aleph;": "\u2135", | ||
| 1305 | "alpha;": "\u03b1", | ||
| 1306 | "amacr;": "\u0101", | ||
| 1307 | "amalg;": "\u2a3f", | ||
| 1308 | "amp": "&", | ||
| 1309 | "amp;": "&", | ||
| 1310 | "and;": "\u2227", | ||
| 1311 | "andand;": "\u2a55", | ||
| 1312 | "andd;": "\u2a5c", | ||
| 1313 | "andslope;": "\u2a58", | ||
| 1314 | "andv;": "\u2a5a", | ||
| 1315 | "ang;": "\u2220", | ||
| 1316 | "ange;": "\u29a4", | ||
| 1317 | "angle;": "\u2220", | ||
| 1318 | "angmsd;": "\u2221", | ||
| 1319 | "angmsdaa;": "\u29a8", | ||
| 1320 | "angmsdab;": "\u29a9", | ||
| 1321 | "angmsdac;": "\u29aa", | ||
| 1322 | "angmsdad;": "\u29ab", | ||
| 1323 | "angmsdae;": "\u29ac", | ||
| 1324 | "angmsdaf;": "\u29ad", | ||
| 1325 | "angmsdag;": "\u29ae", | ||
| 1326 | "angmsdah;": "\u29af", | ||
| 1327 | "angrt;": "\u221f", | ||
| 1328 | "angrtvb;": "\u22be", | ||
| 1329 | "angrtvbd;": "\u299d", | ||
| 1330 | "angsph;": "\u2222", | ||
| 1331 | "angst;": "\xc5", | ||
| 1332 | "angzarr;": "\u237c", | ||
| 1333 | "aogon;": "\u0105", | ||
| 1334 | "aopf;": "\U0001d552", | ||
| 1335 | "ap;": "\u2248", | ||
| 1336 | "apE;": "\u2a70", | ||
| 1337 | "apacir;": "\u2a6f", | ||
| 1338 | "ape;": "\u224a", | ||
| 1339 | "apid;": "\u224b", | ||
| 1340 | "apos;": "'", | ||
| 1341 | "approx;": "\u2248", | ||
| 1342 | "approxeq;": "\u224a", | ||
| 1343 | "aring": "\xe5", | ||
| 1344 | "aring;": "\xe5", | ||
| 1345 | "ascr;": "\U0001d4b6", | ||
| 1346 | "ast;": "*", | ||
| 1347 | "asymp;": "\u2248", | ||
| 1348 | "asympeq;": "\u224d", | ||
| 1349 | "atilde": "\xe3", | ||
| 1350 | "atilde;": "\xe3", | ||
| 1351 | "auml": "\xe4", | ||
| 1352 | "auml;": "\xe4", | ||
| 1353 | "awconint;": "\u2233", | ||
| 1354 | "awint;": "\u2a11", | ||
| 1355 | "bNot;": "\u2aed", | ||
| 1356 | "backcong;": "\u224c", | ||
| 1357 | "backepsilon;": "\u03f6", | ||
| 1358 | "backprime;": "\u2035", | ||
| 1359 | "backsim;": "\u223d", | ||
| 1360 | "backsimeq;": "\u22cd", | ||
| 1361 | "barvee;": "\u22bd", | ||
| 1362 | "barwed;": "\u2305", | ||
| 1363 | "barwedge;": "\u2305", | ||
| 1364 | "bbrk;": "\u23b5", | ||
| 1365 | "bbrktbrk;": "\u23b6", | ||
| 1366 | "bcong;": "\u224c", | ||
| 1367 | "bcy;": "\u0431", | ||
| 1368 | "bdquo;": "\u201e", | ||
| 1369 | "becaus;": "\u2235", | ||
| 1370 | "because;": "\u2235", | ||
| 1371 | "bemptyv;": "\u29b0", | ||
| 1372 | "bepsi;": "\u03f6", | ||
| 1373 | "bernou;": "\u212c", | ||
| 1374 | "beta;": "\u03b2", | ||
| 1375 | "beth;": "\u2136", | ||
| 1376 | "between;": "\u226c", | ||
| 1377 | "bfr;": "\U0001d51f", | ||
| 1378 | "bigcap;": "\u22c2", | ||
| 1379 | "bigcirc;": "\u25ef", | ||
| 1380 | "bigcup;": "\u22c3", | ||
| 1381 | "bigodot;": "\u2a00", | ||
| 1382 | "bigoplus;": "\u2a01", | ||
| 1383 | "bigotimes;": "\u2a02", | ||
| 1384 | "bigsqcup;": "\u2a06", | ||
| 1385 | "bigstar;": "\u2605", | ||
| 1386 | "bigtriangledown;": "\u25bd", | ||
| 1387 | "bigtriangleup;": "\u25b3", | ||
| 1388 | "biguplus;": "\u2a04", | ||
| 1389 | "bigvee;": "\u22c1", | ||
| 1390 | "bigwedge;": "\u22c0", | ||
| 1391 | "bkarow;": "\u290d", | ||
| 1392 | "blacklozenge;": "\u29eb", | ||
| 1393 | "blacksquare;": "\u25aa", | ||
| 1394 | "blacktriangle;": "\u25b4", | ||
| 1395 | "blacktriangledown;": "\u25be", | ||
| 1396 | "blacktriangleleft;": "\u25c2", | ||
| 1397 | "blacktriangleright;": "\u25b8", | ||
| 1398 | "blank;": "\u2423", | ||
| 1399 | "blk12;": "\u2592", | ||
| 1400 | "blk14;": "\u2591", | ||
| 1401 | "blk34;": "\u2593", | ||
| 1402 | "block;": "\u2588", | ||
| 1403 | "bne;": "=\u20e5", | ||
| 1404 | "bnequiv;": "\u2261\u20e5", | ||
| 1405 | "bnot;": "\u2310", | ||
| 1406 | "bopf;": "\U0001d553", | ||
| 1407 | "bot;": "\u22a5", | ||
| 1408 | "bottom;": "\u22a5", | ||
| 1409 | "bowtie;": "\u22c8", | ||
| 1410 | "boxDL;": "\u2557", | ||
| 1411 | "boxDR;": "\u2554", | ||
| 1412 | "boxDl;": "\u2556", | ||
| 1413 | "boxDr;": "\u2553", | ||
| 1414 | "boxH;": "\u2550", | ||
| 1415 | "boxHD;": "\u2566", | ||
| 1416 | "boxHU;": "\u2569", | ||
| 1417 | "boxHd;": "\u2564", | ||
| 1418 | "boxHu;": "\u2567", | ||
| 1419 | "boxUL;": "\u255d", | ||
| 1420 | "boxUR;": "\u255a", | ||
| 1421 | "boxUl;": "\u255c", | ||
| 1422 | "boxUr;": "\u2559", | ||
| 1423 | "boxV;": "\u2551", | ||
| 1424 | "boxVH;": "\u256c", | ||
| 1425 | "boxVL;": "\u2563", | ||
| 1426 | "boxVR;": "\u2560", | ||
| 1427 | "boxVh;": "\u256b", | ||
| 1428 | "boxVl;": "\u2562", | ||
| 1429 | "boxVr;": "\u255f", | ||
| 1430 | "boxbox;": "\u29c9", | ||
| 1431 | "boxdL;": "\u2555", | ||
| 1432 | "boxdR;": "\u2552", | ||
| 1433 | "boxdl;": "\u2510", | ||
| 1434 | "boxdr;": "\u250c", | ||
| 1435 | "boxh;": "\u2500", | ||
| 1436 | "boxhD;": "\u2565", | ||
| 1437 | "boxhU;": "\u2568", | ||
| 1438 | "boxhd;": "\u252c", | ||
| 1439 | "boxhu;": "\u2534", | ||
| 1440 | "boxminus;": "\u229f", | ||
| 1441 | "boxplus;": "\u229e", | ||
| 1442 | "boxtimes;": "\u22a0", | ||
| 1443 | "boxuL;": "\u255b", | ||
| 1444 | "boxuR;": "\u2558", | ||
| 1445 | "boxul;": "\u2518", | ||
| 1446 | "boxur;": "\u2514", | ||
| 1447 | "boxv;": "\u2502", | ||
| 1448 | "boxvH;": "\u256a", | ||
| 1449 | "boxvL;": "\u2561", | ||
| 1450 | "boxvR;": "\u255e", | ||
| 1451 | "boxvh;": "\u253c", | ||
| 1452 | "boxvl;": "\u2524", | ||
| 1453 | "boxvr;": "\u251c", | ||
| 1454 | "bprime;": "\u2035", | ||
| 1455 | "breve;": "\u02d8", | ||
| 1456 | "brvbar": "\xa6", | ||
| 1457 | "brvbar;": "\xa6", | ||
| 1458 | "bscr;": "\U0001d4b7", | ||
| 1459 | "bsemi;": "\u204f", | ||
| 1460 | "bsim;": "\u223d", | ||
| 1461 | "bsime;": "\u22cd", | ||
| 1462 | "bsol;": "\\", | ||
| 1463 | "bsolb;": "\u29c5", | ||
| 1464 | "bsolhsub;": "\u27c8", | ||
| 1465 | "bull;": "\u2022", | ||
| 1466 | "bullet;": "\u2022", | ||
| 1467 | "bump;": "\u224e", | ||
| 1468 | "bumpE;": "\u2aae", | ||
| 1469 | "bumpe;": "\u224f", | ||
| 1470 | "bumpeq;": "\u224f", | ||
| 1471 | "cacute;": "\u0107", | ||
| 1472 | "cap;": "\u2229", | ||
| 1473 | "capand;": "\u2a44", | ||
| 1474 | "capbrcup;": "\u2a49", | ||
| 1475 | "capcap;": "\u2a4b", | ||
| 1476 | "capcup;": "\u2a47", | ||
| 1477 | "capdot;": "\u2a40", | ||
| 1478 | "caps;": "\u2229\ufe00", | ||
| 1479 | "caret;": "\u2041", | ||
| 1480 | "caron;": "\u02c7", | ||
| 1481 | "ccaps;": "\u2a4d", | ||
| 1482 | "ccaron;": "\u010d", | ||
| 1483 | "ccedil": "\xe7", | ||
| 1484 | "ccedil;": "\xe7", | ||
| 1485 | "ccirc;": "\u0109", | ||
| 1486 | "ccups;": "\u2a4c", | ||
| 1487 | "ccupssm;": "\u2a50", | ||
| 1488 | "cdot;": "\u010b", | ||
| 1489 | "cedil": "\xb8", | ||
| 1490 | "cedil;": "\xb8", | ||
| 1491 | "cemptyv;": "\u29b2", | ||
| 1492 | "cent": "\xa2", | ||
| 1493 | "cent;": "\xa2", | ||
| 1494 | "centerdot;": "\xb7", | ||
| 1495 | "cfr;": "\U0001d520", | ||
| 1496 | "chcy;": "\u0447", | ||
| 1497 | "check;": "\u2713", | ||
| 1498 | "checkmark;": "\u2713", | ||
| 1499 | "chi;": "\u03c7", | ||
| 1500 | "cir;": "\u25cb", | ||
| 1501 | "cirE;": "\u29c3", | ||
| 1502 | "circ;": "\u02c6", | ||
| 1503 | "circeq;": "\u2257", | ||
| 1504 | "circlearrowleft;": "\u21ba", | ||
| 1505 | "circlearrowright;": "\u21bb", | ||
| 1506 | "circledR;": "\xae", | ||
| 1507 | "circledS;": "\u24c8", | ||
| 1508 | "circledast;": "\u229b", | ||
| 1509 | "circledcirc;": "\u229a", | ||
| 1510 | "circleddash;": "\u229d", | ||
| 1511 | "cire;": "\u2257", | ||
| 1512 | "cirfnint;": "\u2a10", | ||
| 1513 | "cirmid;": "\u2aef", | ||
| 1514 | "cirscir;": "\u29c2", | ||
| 1515 | "clubs;": "\u2663", | ||
| 1516 | "clubsuit;": "\u2663", | ||
| 1517 | "colon;": ":", | ||
| 1518 | "colone;": "\u2254", | ||
| 1519 | "coloneq;": "\u2254", | ||
| 1520 | "comma;": ",", | ||
| 1521 | "commat;": "@", | ||
| 1522 | "comp;": "\u2201", | ||
| 1523 | "compfn;": "\u2218", | ||
| 1524 | "complement;": "\u2201", | ||
| 1525 | "complexes;": "\u2102", | ||
| 1526 | "cong;": "\u2245", | ||
| 1527 | "congdot;": "\u2a6d", | ||
| 1528 | "conint;": "\u222e", | ||
| 1529 | "copf;": "\U0001d554", | ||
| 1530 | "coprod;": "\u2210", | ||
| 1531 | "copy": "\xa9", | ||
| 1532 | "copy;": "\xa9", | ||
| 1533 | "copysr;": "\u2117", | ||
| 1534 | "crarr;": "\u21b5", | ||
| 1535 | "cross;": "\u2717", | ||
| 1536 | "cscr;": "\U0001d4b8", | ||
| 1537 | "csub;": "\u2acf", | ||
| 1538 | "csube;": "\u2ad1", | ||
| 1539 | "csup;": "\u2ad0", | ||
| 1540 | "csupe;": "\u2ad2", | ||
| 1541 | "ctdot;": "\u22ef", | ||
| 1542 | "cudarrl;": "\u2938", | ||
| 1543 | "cudarrr;": "\u2935", | ||
| 1544 | "cuepr;": "\u22de", | ||
| 1545 | "cuesc;": "\u22df", | ||
| 1546 | "cularr;": "\u21b6", | ||
| 1547 | "cularrp;": "\u293d", | ||
| 1548 | "cup;": "\u222a", | ||
| 1549 | "cupbrcap;": "\u2a48", | ||
| 1550 | "cupcap;": "\u2a46", | ||
| 1551 | "cupcup;": "\u2a4a", | ||
| 1552 | "cupdot;": "\u228d", | ||
| 1553 | "cupor;": "\u2a45", | ||
| 1554 | "cups;": "\u222a\ufe00", | ||
| 1555 | "curarr;": "\u21b7", | ||
| 1556 | "curarrm;": "\u293c", | ||
| 1557 | "curlyeqprec;": "\u22de", | ||
| 1558 | "curlyeqsucc;": "\u22df", | ||
| 1559 | "curlyvee;": "\u22ce", | ||
| 1560 | "curlywedge;": "\u22cf", | ||
| 1561 | "curren": "\xa4", | ||
| 1562 | "curren;": "\xa4", | ||
| 1563 | "curvearrowleft;": "\u21b6", | ||
| 1564 | "curvearrowright;": "\u21b7", | ||
| 1565 | "cuvee;": "\u22ce", | ||
| 1566 | "cuwed;": "\u22cf", | ||
| 1567 | "cwconint;": "\u2232", | ||
| 1568 | "cwint;": "\u2231", | ||
| 1569 | "cylcty;": "\u232d", | ||
| 1570 | "dArr;": "\u21d3", | ||
| 1571 | "dHar;": "\u2965", | ||
| 1572 | "dagger;": "\u2020", | ||
| 1573 | "daleth;": "\u2138", | ||
| 1574 | "darr;": "\u2193", | ||
| 1575 | "dash;": "\u2010", | ||
| 1576 | "dashv;": "\u22a3", | ||
| 1577 | "dbkarow;": "\u290f", | ||
| 1578 | "dblac;": "\u02dd", | ||
| 1579 | "dcaron;": "\u010f", | ||
| 1580 | "dcy;": "\u0434", | ||
| 1581 | "dd;": "\u2146", | ||
| 1582 | "ddagger;": "\u2021", | ||
| 1583 | "ddarr;": "\u21ca", | ||
| 1584 | "ddotseq;": "\u2a77", | ||
| 1585 | "deg": "\xb0", | ||
| 1586 | "deg;": "\xb0", | ||
| 1587 | "delta;": "\u03b4", | ||
| 1588 | "demptyv;": "\u29b1", | ||
| 1589 | "dfisht;": "\u297f", | ||
| 1590 | "dfr;": "\U0001d521", | ||
| 1591 | "dharl;": "\u21c3", | ||
| 1592 | "dharr;": "\u21c2", | ||
| 1593 | "diam;": "\u22c4", | ||
| 1594 | "diamond;": "\u22c4", | ||
| 1595 | "diamondsuit;": "\u2666", | ||
| 1596 | "diams;": "\u2666", | ||
| 1597 | "die;": "\xa8", | ||
| 1598 | "digamma;": "\u03dd", | ||
| 1599 | "disin;": "\u22f2", | ||
| 1600 | "div;": "\xf7", | ||
| 1601 | "divide": "\xf7", | ||
| 1602 | "divide;": "\xf7", | ||
| 1603 | "divideontimes;": "\u22c7", | ||
| 1604 | "divonx;": "\u22c7", | ||
| 1605 | "djcy;": "\u0452", | ||
| 1606 | "dlcorn;": "\u231e", | ||
| 1607 | "dlcrop;": "\u230d", | ||
| 1608 | "dollar;": "$", | ||
| 1609 | "dopf;": "\U0001d555", | ||
| 1610 | "dot;": "\u02d9", | ||
| 1611 | "doteq;": "\u2250", | ||
| 1612 | "doteqdot;": "\u2251", | ||
| 1613 | "dotminus;": "\u2238", | ||
| 1614 | "dotplus;": "\u2214", | ||
| 1615 | "dotsquare;": "\u22a1", | ||
| 1616 | "doublebarwedge;": "\u2306", | ||
| 1617 | "downarrow;": "\u2193", | ||
| 1618 | "downdownarrows;": "\u21ca", | ||
| 1619 | "downharpoonleft;": "\u21c3", | ||
| 1620 | "downharpoonright;": "\u21c2", | ||
| 1621 | "drbkarow;": "\u2910", | ||
| 1622 | "drcorn;": "\u231f", | ||
| 1623 | "drcrop;": "\u230c", | ||
| 1624 | "dscr;": "\U0001d4b9", | ||
| 1625 | "dscy;": "\u0455", | ||
| 1626 | "dsol;": "\u29f6", | ||
| 1627 | "dstrok;": "\u0111", | ||
| 1628 | "dtdot;": "\u22f1", | ||
| 1629 | "dtri;": "\u25bf", | ||
| 1630 | "dtrif;": "\u25be", | ||
| 1631 | "duarr;": "\u21f5", | ||
| 1632 | "duhar;": "\u296f", | ||
| 1633 | "dwangle;": "\u29a6", | ||
| 1634 | "dzcy;": "\u045f", | ||
| 1635 | "dzigrarr;": "\u27ff", | ||
| 1636 | "eDDot;": "\u2a77", | ||
| 1637 | "eDot;": "\u2251", | ||
| 1638 | "eacute": "\xe9", | ||
| 1639 | "eacute;": "\xe9", | ||
| 1640 | "easter;": "\u2a6e", | ||
| 1641 | "ecaron;": "\u011b", | ||
| 1642 | "ecir;": "\u2256", | ||
| 1643 | "ecirc": "\xea", | ||
| 1644 | "ecirc;": "\xea", | ||
| 1645 | "ecolon;": "\u2255", | ||
| 1646 | "ecy;": "\u044d", | ||
| 1647 | "edot;": "\u0117", | ||
| 1648 | "ee;": "\u2147", | ||
| 1649 | "efDot;": "\u2252", | ||
| 1650 | "efr;": "\U0001d522", | ||
| 1651 | "eg;": "\u2a9a", | ||
| 1652 | "egrave": "\xe8", | ||
| 1653 | "egrave;": "\xe8", | ||
| 1654 | "egs;": "\u2a96", | ||
| 1655 | "egsdot;": "\u2a98", | ||
| 1656 | "el;": "\u2a99", | ||
| 1657 | "elinters;": "\u23e7", | ||
| 1658 | "ell;": "\u2113", | ||
| 1659 | "els;": "\u2a95", | ||
| 1660 | "elsdot;": "\u2a97", | ||
| 1661 | "emacr;": "\u0113", | ||
| 1662 | "empty;": "\u2205", | ||
| 1663 | "emptyset;": "\u2205", | ||
| 1664 | "emptyv;": "\u2205", | ||
| 1665 | "emsp13;": "\u2004", | ||
| 1666 | "emsp14;": "\u2005", | ||
| 1667 | "emsp;": "\u2003", | ||
| 1668 | "eng;": "\u014b", | ||
| 1669 | "ensp;": "\u2002", | ||
| 1670 | "eogon;": "\u0119", | ||
| 1671 | "eopf;": "\U0001d556", | ||
| 1672 | "epar;": "\u22d5", | ||
| 1673 | "eparsl;": "\u29e3", | ||
| 1674 | "eplus;": "\u2a71", | ||
| 1675 | "epsi;": "\u03b5", | ||
| 1676 | "epsilon;": "\u03b5", | ||
| 1677 | "epsiv;": "\u03f5", | ||
| 1678 | "eqcirc;": "\u2256", | ||
| 1679 | "eqcolon;": "\u2255", | ||
| 1680 | "eqsim;": "\u2242", | ||
| 1681 | "eqslantgtr;": "\u2a96", | ||
| 1682 | "eqslantless;": "\u2a95", | ||
| 1683 | "equals;": "=", | ||
| 1684 | "equest;": "\u225f", | ||
| 1685 | "equiv;": "\u2261", | ||
| 1686 | "equivDD;": "\u2a78", | ||
| 1687 | "eqvparsl;": "\u29e5", | ||
| 1688 | "erDot;": "\u2253", | ||
| 1689 | "erarr;": "\u2971", | ||
| 1690 | "escr;": "\u212f", | ||
| 1691 | "esdot;": "\u2250", | ||
| 1692 | "esim;": "\u2242", | ||
| 1693 | "eta;": "\u03b7", | ||
| 1694 | "eth": "\xf0", | ||
| 1695 | "eth;": "\xf0", | ||
| 1696 | "euml": "\xeb", | ||
| 1697 | "euml;": "\xeb", | ||
| 1698 | "euro;": "\u20ac", | ||
| 1699 | "excl;": "!", | ||
| 1700 | "exist;": "\u2203", | ||
| 1701 | "expectation;": "\u2130", | ||
| 1702 | "exponentiale;": "\u2147", | ||
| 1703 | "fallingdotseq;": "\u2252", | ||
| 1704 | "fcy;": "\u0444", | ||
| 1705 | "female;": "\u2640", | ||
| 1706 | "ffilig;": "\ufb03", | ||
| 1707 | "fflig;": "\ufb00", | ||
| 1708 | "ffllig;": "\ufb04", | ||
| 1709 | "ffr;": "\U0001d523", | ||
| 1710 | "filig;": "\ufb01", | ||
| 1711 | "fjlig;": "fj", | ||
| 1712 | "flat;": "\u266d", | ||
| 1713 | "fllig;": "\ufb02", | ||
| 1714 | "fltns;": "\u25b1", | ||
| 1715 | "fnof;": "\u0192", | ||
| 1716 | "fopf;": "\U0001d557", | ||
| 1717 | "forall;": "\u2200", | ||
| 1718 | "fork;": "\u22d4", | ||
| 1719 | "forkv;": "\u2ad9", | ||
| 1720 | "fpartint;": "\u2a0d", | ||
| 1721 | "frac12": "\xbd", | ||
| 1722 | "frac12;": "\xbd", | ||
| 1723 | "frac13;": "\u2153", | ||
| 1724 | "frac14": "\xbc", | ||
| 1725 | "frac14;": "\xbc", | ||
| 1726 | "frac15;": "\u2155", | ||
| 1727 | "frac16;": "\u2159", | ||
| 1728 | "frac18;": "\u215b", | ||
| 1729 | "frac23;": "\u2154", | ||
| 1730 | "frac25;": "\u2156", | ||
| 1731 | "frac34": "\xbe", | ||
| 1732 | "frac34;": "\xbe", | ||
| 1733 | "frac35;": "\u2157", | ||
| 1734 | "frac38;": "\u215c", | ||
| 1735 | "frac45;": "\u2158", | ||
| 1736 | "frac56;": "\u215a", | ||
| 1737 | "frac58;": "\u215d", | ||
| 1738 | "frac78;": "\u215e", | ||
| 1739 | "frasl;": "\u2044", | ||
| 1740 | "frown;": "\u2322", | ||
| 1741 | "fscr;": "\U0001d4bb", | ||
| 1742 | "gE;": "\u2267", | ||
| 1743 | "gEl;": "\u2a8c", | ||
| 1744 | "gacute;": "\u01f5", | ||
| 1745 | "gamma;": "\u03b3", | ||
| 1746 | "gammad;": "\u03dd", | ||
| 1747 | "gap;": "\u2a86", | ||
| 1748 | "gbreve;": "\u011f", | ||
| 1749 | "gcirc;": "\u011d", | ||
| 1750 | "gcy;": "\u0433", | ||
| 1751 | "gdot;": "\u0121", | ||
| 1752 | "ge;": "\u2265", | ||
| 1753 | "gel;": "\u22db", | ||
| 1754 | "geq;": "\u2265", | ||
| 1755 | "geqq;": "\u2267", | ||
| 1756 | "geqslant;": "\u2a7e", | ||
| 1757 | "ges;": "\u2a7e", | ||
| 1758 | "gescc;": "\u2aa9", | ||
| 1759 | "gesdot;": "\u2a80", | ||
| 1760 | "gesdoto;": "\u2a82", | ||
| 1761 | "gesdotol;": "\u2a84", | ||
| 1762 | "gesl;": "\u22db\ufe00", | ||
| 1763 | "gesles;": "\u2a94", | ||
| 1764 | "gfr;": "\U0001d524", | ||
| 1765 | "gg;": "\u226b", | ||
| 1766 | "ggg;": "\u22d9", | ||
| 1767 | "gimel;": "\u2137", | ||
| 1768 | "gjcy;": "\u0453", | ||
| 1769 | "gl;": "\u2277", | ||
| 1770 | "glE;": "\u2a92", | ||
| 1771 | "gla;": "\u2aa5", | ||
| 1772 | "glj;": "\u2aa4", | ||
| 1773 | "gnE;": "\u2269", | ||
| 1774 | "gnap;": "\u2a8a", | ||
| 1775 | "gnapprox;": "\u2a8a", | ||
| 1776 | "gne;": "\u2a88", | ||
| 1777 | "gneq;": "\u2a88", | ||
| 1778 | "gneqq;": "\u2269", | ||
| 1779 | "gnsim;": "\u22e7", | ||
| 1780 | "gopf;": "\U0001d558", | ||
| 1781 | "grave;": "`", | ||
| 1782 | "gscr;": "\u210a", | ||
| 1783 | "gsim;": "\u2273", | ||
| 1784 | "gsime;": "\u2a8e", | ||
| 1785 | "gsiml;": "\u2a90", | ||
| 1786 | "gt": ">", | ||
| 1787 | "gt;": ">", | ||
| 1788 | "gtcc;": "\u2aa7", | ||
| 1789 | "gtcir;": "\u2a7a", | ||
| 1790 | "gtdot;": "\u22d7", | ||
| 1791 | "gtlPar;": "\u2995", | ||
| 1792 | "gtquest;": "\u2a7c", | ||
| 1793 | "gtrapprox;": "\u2a86", | ||
| 1794 | "gtrarr;": "\u2978", | ||
| 1795 | "gtrdot;": "\u22d7", | ||
| 1796 | "gtreqless;": "\u22db", | ||
| 1797 | "gtreqqless;": "\u2a8c", | ||
| 1798 | "gtrless;": "\u2277", | ||
| 1799 | "gtrsim;": "\u2273", | ||
| 1800 | "gvertneqq;": "\u2269\ufe00", | ||
| 1801 | "gvnE;": "\u2269\ufe00", | ||
| 1802 | "hArr;": "\u21d4", | ||
| 1803 | "hairsp;": "\u200a", | ||
| 1804 | "half;": "\xbd", | ||
| 1805 | "hamilt;": "\u210b", | ||
| 1806 | "hardcy;": "\u044a", | ||
| 1807 | "harr;": "\u2194", | ||
| 1808 | "harrcir;": "\u2948", | ||
| 1809 | "harrw;": "\u21ad", | ||
| 1810 | "hbar;": "\u210f", | ||
| 1811 | "hcirc;": "\u0125", | ||
| 1812 | "hearts;": "\u2665", | ||
| 1813 | "heartsuit;": "\u2665", | ||
| 1814 | "hellip;": "\u2026", | ||
| 1815 | "hercon;": "\u22b9", | ||
| 1816 | "hfr;": "\U0001d525", | ||
| 1817 | "hksearow;": "\u2925", | ||
| 1818 | "hkswarow;": "\u2926", | ||
| 1819 | "hoarr;": "\u21ff", | ||
| 1820 | "homtht;": "\u223b", | ||
| 1821 | "hookleftarrow;": "\u21a9", | ||
| 1822 | "hookrightarrow;": "\u21aa", | ||
| 1823 | "hopf;": "\U0001d559", | ||
| 1824 | "horbar;": "\u2015", | ||
| 1825 | "hscr;": "\U0001d4bd", | ||
| 1826 | "hslash;": "\u210f", | ||
| 1827 | "hstrok;": "\u0127", | ||
| 1828 | "hybull;": "\u2043", | ||
| 1829 | "hyphen;": "\u2010", | ||
| 1830 | "iacute": "\xed", | ||
| 1831 | "iacute;": "\xed", | ||
| 1832 | "ic;": "\u2063", | ||
| 1833 | "icirc": "\xee", | ||
| 1834 | "icirc;": "\xee", | ||
| 1835 | "icy;": "\u0438", | ||
| 1836 | "iecy;": "\u0435", | ||
| 1837 | "iexcl": "\xa1", | ||
| 1838 | "iexcl;": "\xa1", | ||
| 1839 | "iff;": "\u21d4", | ||
| 1840 | "ifr;": "\U0001d526", | ||
| 1841 | "igrave": "\xec", | ||
| 1842 | "igrave;": "\xec", | ||
| 1843 | "ii;": "\u2148", | ||
| 1844 | "iiiint;": "\u2a0c", | ||
| 1845 | "iiint;": "\u222d", | ||
| 1846 | "iinfin;": "\u29dc", | ||
| 1847 | "iiota;": "\u2129", | ||
| 1848 | "ijlig;": "\u0133", | ||
| 1849 | "imacr;": "\u012b", | ||
| 1850 | "image;": "\u2111", | ||
| 1851 | "imagline;": "\u2110", | ||
| 1852 | "imagpart;": "\u2111", | ||
| 1853 | "imath;": "\u0131", | ||
| 1854 | "imof;": "\u22b7", | ||
| 1855 | "imped;": "\u01b5", | ||
| 1856 | "in;": "\u2208", | ||
| 1857 | "incare;": "\u2105", | ||
| 1858 | "infin;": "\u221e", | ||
| 1859 | "infintie;": "\u29dd", | ||
| 1860 | "inodot;": "\u0131", | ||
| 1861 | "int;": "\u222b", | ||
| 1862 | "intcal;": "\u22ba", | ||
| 1863 | "integers;": "\u2124", | ||
| 1864 | "intercal;": "\u22ba", | ||
| 1865 | "intlarhk;": "\u2a17", | ||
| 1866 | "intprod;": "\u2a3c", | ||
| 1867 | "iocy;": "\u0451", | ||
| 1868 | "iogon;": "\u012f", | ||
| 1869 | "iopf;": "\U0001d55a", | ||
| 1870 | "iota;": "\u03b9", | ||
| 1871 | "iprod;": "\u2a3c", | ||
| 1872 | "iquest": "\xbf", | ||
| 1873 | "iquest;": "\xbf", | ||
| 1874 | "iscr;": "\U0001d4be", | ||
| 1875 | "isin;": "\u2208", | ||
| 1876 | "isinE;": "\u22f9", | ||
| 1877 | "isindot;": "\u22f5", | ||
| 1878 | "isins;": "\u22f4", | ||
| 1879 | "isinsv;": "\u22f3", | ||
| 1880 | "isinv;": "\u2208", | ||
| 1881 | "it;": "\u2062", | ||
| 1882 | "itilde;": "\u0129", | ||
| 1883 | "iukcy;": "\u0456", | ||
| 1884 | "iuml": "\xef", | ||
| 1885 | "iuml;": "\xef", | ||
| 1886 | "jcirc;": "\u0135", | ||
| 1887 | "jcy;": "\u0439", | ||
| 1888 | "jfr;": "\U0001d527", | ||
| 1889 | "jmath;": "\u0237", | ||
| 1890 | "jopf;": "\U0001d55b", | ||
| 1891 | "jscr;": "\U0001d4bf", | ||
| 1892 | "jsercy;": "\u0458", | ||
| 1893 | "jukcy;": "\u0454", | ||
| 1894 | "kappa;": "\u03ba", | ||
| 1895 | "kappav;": "\u03f0", | ||
| 1896 | "kcedil;": "\u0137", | ||
| 1897 | "kcy;": "\u043a", | ||
| 1898 | "kfr;": "\U0001d528", | ||
| 1899 | "kgreen;": "\u0138", | ||
| 1900 | "khcy;": "\u0445", | ||
| 1901 | "kjcy;": "\u045c", | ||
| 1902 | "kopf;": "\U0001d55c", | ||
| 1903 | "kscr;": "\U0001d4c0", | ||
| 1904 | "lAarr;": "\u21da", | ||
| 1905 | "lArr;": "\u21d0", | ||
| 1906 | "lAtail;": "\u291b", | ||
| 1907 | "lBarr;": "\u290e", | ||
| 1908 | "lE;": "\u2266", | ||
| 1909 | "lEg;": "\u2a8b", | ||
| 1910 | "lHar;": "\u2962", | ||
| 1911 | "lacute;": "\u013a", | ||
| 1912 | "laemptyv;": "\u29b4", | ||
| 1913 | "lagran;": "\u2112", | ||
| 1914 | "lambda;": "\u03bb", | ||
| 1915 | "lang;": "\u27e8", | ||
| 1916 | "langd;": "\u2991", | ||
| 1917 | "langle;": "\u27e8", | ||
| 1918 | "lap;": "\u2a85", | ||
| 1919 | "laquo": "\xab", | ||
| 1920 | "laquo;": "\xab", | ||
| 1921 | "larr;": "\u2190", | ||
| 1922 | "larrb;": "\u21e4", | ||
| 1923 | "larrbfs;": "\u291f", | ||
| 1924 | "larrfs;": "\u291d", | ||
| 1925 | "larrhk;": "\u21a9", | ||
| 1926 | "larrlp;": "\u21ab", | ||
| 1927 | "larrpl;": "\u2939", | ||
| 1928 | "larrsim;": "\u2973", | ||
| 1929 | "larrtl;": "\u21a2", | ||
| 1930 | "lat;": "\u2aab", | ||
| 1931 | "latail;": "\u2919", | ||
| 1932 | "late;": "\u2aad", | ||
| 1933 | "lates;": "\u2aad\ufe00", | ||
| 1934 | "lbarr;": "\u290c", | ||
| 1935 | "lbbrk;": "\u2772", | ||
| 1936 | "lbrace;": "{", | ||
| 1937 | "lbrack;": "[", | ||
| 1938 | "lbrke;": "\u298b", | ||
| 1939 | "lbrksld;": "\u298f", | ||
| 1940 | "lbrkslu;": "\u298d", | ||
| 1941 | "lcaron;": "\u013e", | ||
| 1942 | "lcedil;": "\u013c", | ||
| 1943 | "lceil;": "\u2308", | ||
| 1944 | "lcub;": "{", | ||
| 1945 | "lcy;": "\u043b", | ||
| 1946 | "ldca;": "\u2936", | ||
| 1947 | "ldquo;": "\u201c", | ||
| 1948 | "ldquor;": "\u201e", | ||
| 1949 | "ldrdhar;": "\u2967", | ||
| 1950 | "ldrushar;": "\u294b", | ||
| 1951 | "ldsh;": "\u21b2", | ||
| 1952 | "le;": "\u2264", | ||
| 1953 | "leftarrow;": "\u2190", | ||
| 1954 | "leftarrowtail;": "\u21a2", | ||
| 1955 | "leftharpoondown;": "\u21bd", | ||
| 1956 | "leftharpoonup;": "\u21bc", | ||
| 1957 | "leftleftarrows;": "\u21c7", | ||
| 1958 | "leftrightarrow;": "\u2194", | ||
| 1959 | "leftrightarrows;": "\u21c6", | ||
| 1960 | "leftrightharpoons;": "\u21cb", | ||
| 1961 | "leftrightsquigarrow;": "\u21ad", | ||
| 1962 | "leftthreetimes;": "\u22cb", | ||
| 1963 | "leg;": "\u22da", | ||
| 1964 | "leq;": "\u2264", | ||
| 1965 | "leqq;": "\u2266", | ||
| 1966 | "leqslant;": "\u2a7d", | ||
| 1967 | "les;": "\u2a7d", | ||
| 1968 | "lescc;": "\u2aa8", | ||
| 1969 | "lesdot;": "\u2a7f", | ||
| 1970 | "lesdoto;": "\u2a81", | ||
| 1971 | "lesdotor;": "\u2a83", | ||
| 1972 | "lesg;": "\u22da\ufe00", | ||
| 1973 | "lesges;": "\u2a93", | ||
| 1974 | "lessapprox;": "\u2a85", | ||
| 1975 | "lessdot;": "\u22d6", | ||
| 1976 | "lesseqgtr;": "\u22da", | ||
| 1977 | "lesseqqgtr;": "\u2a8b", | ||
| 1978 | "lessgtr;": "\u2276", | ||
| 1979 | "lesssim;": "\u2272", | ||
| 1980 | "lfisht;": "\u297c", | ||
| 1981 | "lfloor;": "\u230a", | ||
| 1982 | "lfr;": "\U0001d529", | ||
| 1983 | "lg;": "\u2276", | ||
| 1984 | "lgE;": "\u2a91", | ||
| 1985 | "lhard;": "\u21bd", | ||
| 1986 | "lharu;": "\u21bc", | ||
| 1987 | "lharul;": "\u296a", | ||
| 1988 | "lhblk;": "\u2584", | ||
| 1989 | "ljcy;": "\u0459", | ||
| 1990 | "ll;": "\u226a", | ||
| 1991 | "llarr;": "\u21c7", | ||
| 1992 | "llcorner;": "\u231e", | ||
| 1993 | "llhard;": "\u296b", | ||
| 1994 | "lltri;": "\u25fa", | ||
| 1995 | "lmidot;": "\u0140", | ||
| 1996 | "lmoust;": "\u23b0", | ||
| 1997 | "lmoustache;": "\u23b0", | ||
| 1998 | "lnE;": "\u2268", | ||
| 1999 | "lnap;": "\u2a89", | ||
| 2000 | "lnapprox;": "\u2a89", | ||
| 2001 | "lne;": "\u2a87", | ||
| 2002 | "lneq;": "\u2a87", | ||
| 2003 | "lneqq;": "\u2268", | ||
| 2004 | "lnsim;": "\u22e6", | ||
| 2005 | "loang;": "\u27ec", | ||
| 2006 | "loarr;": "\u21fd", | ||
| 2007 | "lobrk;": "\u27e6", | ||
| 2008 | "longleftarrow;": "\u27f5", | ||
| 2009 | "longleftrightarrow;": "\u27f7", | ||
| 2010 | "longmapsto;": "\u27fc", | ||
| 2011 | "longrightarrow;": "\u27f6", | ||
| 2012 | "looparrowleft;": "\u21ab", | ||
| 2013 | "looparrowright;": "\u21ac", | ||
| 2014 | "lopar;": "\u2985", | ||
| 2015 | "lopf;": "\U0001d55d", | ||
| 2016 | "loplus;": "\u2a2d", | ||
| 2017 | "lotimes;": "\u2a34", | ||
| 2018 | "lowast;": "\u2217", | ||
| 2019 | "lowbar;": "_", | ||
| 2020 | "loz;": "\u25ca", | ||
| 2021 | "lozenge;": "\u25ca", | ||
| 2022 | "lozf;": "\u29eb", | ||
| 2023 | "lpar;": "(", | ||
| 2024 | "lparlt;": "\u2993", | ||
| 2025 | "lrarr;": "\u21c6", | ||
| 2026 | "lrcorner;": "\u231f", | ||
| 2027 | "lrhar;": "\u21cb", | ||
| 2028 | "lrhard;": "\u296d", | ||
| 2029 | "lrm;": "\u200e", | ||
| 2030 | "lrtri;": "\u22bf", | ||
| 2031 | "lsaquo;": "\u2039", | ||
| 2032 | "lscr;": "\U0001d4c1", | ||
| 2033 | "lsh;": "\u21b0", | ||
| 2034 | "lsim;": "\u2272", | ||
| 2035 | "lsime;": "\u2a8d", | ||
| 2036 | "lsimg;": "\u2a8f", | ||
| 2037 | "lsqb;": "[", | ||
| 2038 | "lsquo;": "\u2018", | ||
| 2039 | "lsquor;": "\u201a", | ||
| 2040 | "lstrok;": "\u0142", | ||
| 2041 | "lt": "<", | ||
| 2042 | "lt;": "<", | ||
| 2043 | "ltcc;": "\u2aa6", | ||
| 2044 | "ltcir;": "\u2a79", | ||
| 2045 | "ltdot;": "\u22d6", | ||
| 2046 | "lthree;": "\u22cb", | ||
| 2047 | "ltimes;": "\u22c9", | ||
| 2048 | "ltlarr;": "\u2976", | ||
| 2049 | "ltquest;": "\u2a7b", | ||
| 2050 | "ltrPar;": "\u2996", | ||
| 2051 | "ltri;": "\u25c3", | ||
| 2052 | "ltrie;": "\u22b4", | ||
| 2053 | "ltrif;": "\u25c2", | ||
| 2054 | "lurdshar;": "\u294a", | ||
| 2055 | "luruhar;": "\u2966", | ||
| 2056 | "lvertneqq;": "\u2268\ufe00", | ||
| 2057 | "lvnE;": "\u2268\ufe00", | ||
| 2058 | "mDDot;": "\u223a", | ||
| 2059 | "macr": "\xaf", | ||
| 2060 | "macr;": "\xaf", | ||
| 2061 | "male;": "\u2642", | ||
| 2062 | "malt;": "\u2720", | ||
| 2063 | "maltese;": "\u2720", | ||
| 2064 | "map;": "\u21a6", | ||
| 2065 | "mapsto;": "\u21a6", | ||
| 2066 | "mapstodown;": "\u21a7", | ||
| 2067 | "mapstoleft;": "\u21a4", | ||
| 2068 | "mapstoup;": "\u21a5", | ||
| 2069 | "marker;": "\u25ae", | ||
| 2070 | "mcomma;": "\u2a29", | ||
| 2071 | "mcy;": "\u043c", | ||
| 2072 | "mdash;": "\u2014", | ||
| 2073 | "measuredangle;": "\u2221", | ||
| 2074 | "mfr;": "\U0001d52a", | ||
| 2075 | "mho;": "\u2127", | ||
| 2076 | "micro": "\xb5", | ||
| 2077 | "micro;": "\xb5", | ||
| 2078 | "mid;": "\u2223", | ||
| 2079 | "midast;": "*", | ||
| 2080 | "midcir;": "\u2af0", | ||
| 2081 | "middot": "\xb7", | ||
| 2082 | "middot;": "\xb7", | ||
| 2083 | "minus;": "\u2212", | ||
| 2084 | "minusb;": "\u229f", | ||
| 2085 | "minusd;": "\u2238", | ||
| 2086 | "minusdu;": "\u2a2a", | ||
| 2087 | "mlcp;": "\u2adb", | ||
| 2088 | "mldr;": "\u2026", | ||
| 2089 | "mnplus;": "\u2213", | ||
| 2090 | "models;": "\u22a7", | ||
| 2091 | "mopf;": "\U0001d55e", | ||
| 2092 | "mp;": "\u2213", | ||
| 2093 | "mscr;": "\U0001d4c2", | ||
| 2094 | "mstpos;": "\u223e", | ||
| 2095 | "mu;": "\u03bc", | ||
| 2096 | "multimap;": "\u22b8", | ||
| 2097 | "mumap;": "\u22b8", | ||
| 2098 | "nGg;": "\u22d9\u0338", | ||
| 2099 | "nGt;": "\u226b\u20d2", | ||
| 2100 | "nGtv;": "\u226b\u0338", | ||
| 2101 | "nLeftarrow;": "\u21cd", | ||
| 2102 | "nLeftrightarrow;": "\u21ce", | ||
| 2103 | "nLl;": "\u22d8\u0338", | ||
| 2104 | "nLt;": "\u226a\u20d2", | ||
| 2105 | "nLtv;": "\u226a\u0338", | ||
| 2106 | "nRightarrow;": "\u21cf", | ||
| 2107 | "nVDash;": "\u22af", | ||
| 2108 | "nVdash;": "\u22ae", | ||
| 2109 | "nabla;": "\u2207", | ||
| 2110 | "nacute;": "\u0144", | ||
| 2111 | "nang;": "\u2220\u20d2", | ||
| 2112 | "nap;": "\u2249", | ||
| 2113 | "napE;": "\u2a70\u0338", | ||
| 2114 | "napid;": "\u224b\u0338", | ||
| 2115 | "napos;": "\u0149", | ||
| 2116 | "napprox;": "\u2249", | ||
| 2117 | "natur;": "\u266e", | ||
| 2118 | "natural;": "\u266e", | ||
| 2119 | "naturals;": "\u2115", | ||
| 2120 | "nbsp": "\xa0", | ||
| 2121 | "nbsp;": "\xa0", | ||
| 2122 | "nbump;": "\u224e\u0338", | ||
| 2123 | "nbumpe;": "\u224f\u0338", | ||
| 2124 | "ncap;": "\u2a43", | ||
| 2125 | "ncaron;": "\u0148", | ||
| 2126 | "ncedil;": "\u0146", | ||
| 2127 | "ncong;": "\u2247", | ||
| 2128 | "ncongdot;": "\u2a6d\u0338", | ||
| 2129 | "ncup;": "\u2a42", | ||
| 2130 | "ncy;": "\u043d", | ||
| 2131 | "ndash;": "\u2013", | ||
| 2132 | "ne;": "\u2260", | ||
| 2133 | "neArr;": "\u21d7", | ||
| 2134 | "nearhk;": "\u2924", | ||
| 2135 | "nearr;": "\u2197", | ||
| 2136 | "nearrow;": "\u2197", | ||
| 2137 | "nedot;": "\u2250\u0338", | ||
| 2138 | "nequiv;": "\u2262", | ||
| 2139 | "nesear;": "\u2928", | ||
| 2140 | "nesim;": "\u2242\u0338", | ||
| 2141 | "nexist;": "\u2204", | ||
| 2142 | "nexists;": "\u2204", | ||
| 2143 | "nfr;": "\U0001d52b", | ||
| 2144 | "ngE;": "\u2267\u0338", | ||
| 2145 | "nge;": "\u2271", | ||
| 2146 | "ngeq;": "\u2271", | ||
| 2147 | "ngeqq;": "\u2267\u0338", | ||
| 2148 | "ngeqslant;": "\u2a7e\u0338", | ||
| 2149 | "nges;": "\u2a7e\u0338", | ||
| 2150 | "ngsim;": "\u2275", | ||
| 2151 | "ngt;": "\u226f", | ||
| 2152 | "ngtr;": "\u226f", | ||
| 2153 | "nhArr;": "\u21ce", | ||
| 2154 | "nharr;": "\u21ae", | ||
| 2155 | "nhpar;": "\u2af2", | ||
| 2156 | "ni;": "\u220b", | ||
| 2157 | "nis;": "\u22fc", | ||
| 2158 | "nisd;": "\u22fa", | ||
| 2159 | "niv;": "\u220b", | ||
| 2160 | "njcy;": "\u045a", | ||
| 2161 | "nlArr;": "\u21cd", | ||
| 2162 | "nlE;": "\u2266\u0338", | ||
| 2163 | "nlarr;": "\u219a", | ||
| 2164 | "nldr;": "\u2025", | ||
| 2165 | "nle;": "\u2270", | ||
| 2166 | "nleftarrow;": "\u219a", | ||
| 2167 | "nleftrightarrow;": "\u21ae", | ||
| 2168 | "nleq;": "\u2270", | ||
| 2169 | "nleqq;": "\u2266\u0338", | ||
| 2170 | "nleqslant;": "\u2a7d\u0338", | ||
| 2171 | "nles;": "\u2a7d\u0338", | ||
| 2172 | "nless;": "\u226e", | ||
| 2173 | "nlsim;": "\u2274", | ||
| 2174 | "nlt;": "\u226e", | ||
| 2175 | "nltri;": "\u22ea", | ||
| 2176 | "nltrie;": "\u22ec", | ||
| 2177 | "nmid;": "\u2224", | ||
| 2178 | "nopf;": "\U0001d55f", | ||
| 2179 | "not": "\xac", | ||
| 2180 | "not;": "\xac", | ||
| 2181 | "notin;": "\u2209", | ||
| 2182 | "notinE;": "\u22f9\u0338", | ||
| 2183 | "notindot;": "\u22f5\u0338", | ||
| 2184 | "notinva;": "\u2209", | ||
| 2185 | "notinvb;": "\u22f7", | ||
| 2186 | "notinvc;": "\u22f6", | ||
| 2187 | "notni;": "\u220c", | ||
| 2188 | "notniva;": "\u220c", | ||
| 2189 | "notnivb;": "\u22fe", | ||
| 2190 | "notnivc;": "\u22fd", | ||
| 2191 | "npar;": "\u2226", | ||
| 2192 | "nparallel;": "\u2226", | ||
| 2193 | "nparsl;": "\u2afd\u20e5", | ||
| 2194 | "npart;": "\u2202\u0338", | ||
| 2195 | "npolint;": "\u2a14", | ||
| 2196 | "npr;": "\u2280", | ||
| 2197 | "nprcue;": "\u22e0", | ||
| 2198 | "npre;": "\u2aaf\u0338", | ||
| 2199 | "nprec;": "\u2280", | ||
| 2200 | "npreceq;": "\u2aaf\u0338", | ||
| 2201 | "nrArr;": "\u21cf", | ||
| 2202 | "nrarr;": "\u219b", | ||
| 2203 | "nrarrc;": "\u2933\u0338", | ||
| 2204 | "nrarrw;": "\u219d\u0338", | ||
| 2205 | "nrightarrow;": "\u219b", | ||
| 2206 | "nrtri;": "\u22eb", | ||
| 2207 | "nrtrie;": "\u22ed", | ||
| 2208 | "nsc;": "\u2281", | ||
| 2209 | "nsccue;": "\u22e1", | ||
| 2210 | "nsce;": "\u2ab0\u0338", | ||
| 2211 | "nscr;": "\U0001d4c3", | ||
| 2212 | "nshortmid;": "\u2224", | ||
| 2213 | "nshortparallel;": "\u2226", | ||
| 2214 | "nsim;": "\u2241", | ||
| 2215 | "nsime;": "\u2244", | ||
| 2216 | "nsimeq;": "\u2244", | ||
| 2217 | "nsmid;": "\u2224", | ||
| 2218 | "nspar;": "\u2226", | ||
| 2219 | "nsqsube;": "\u22e2", | ||
| 2220 | "nsqsupe;": "\u22e3", | ||
| 2221 | "nsub;": "\u2284", | ||
| 2222 | "nsubE;": "\u2ac5\u0338", | ||
| 2223 | "nsube;": "\u2288", | ||
| 2224 | "nsubset;": "\u2282\u20d2", | ||
| 2225 | "nsubseteq;": "\u2288", | ||
| 2226 | "nsubseteqq;": "\u2ac5\u0338", | ||
| 2227 | "nsucc;": "\u2281", | ||
| 2228 | "nsucceq;": "\u2ab0\u0338", | ||
| 2229 | "nsup;": "\u2285", | ||
| 2230 | "nsupE;": "\u2ac6\u0338", | ||
| 2231 | "nsupe;": "\u2289", | ||
| 2232 | "nsupset;": "\u2283\u20d2", | ||
| 2233 | "nsupseteq;": "\u2289", | ||
| 2234 | "nsupseteqq;": "\u2ac6\u0338", | ||
| 2235 | "ntgl;": "\u2279", | ||
| 2236 | "ntilde": "\xf1", | ||
| 2237 | "ntilde;": "\xf1", | ||
| 2238 | "ntlg;": "\u2278", | ||
| 2239 | "ntriangleleft;": "\u22ea", | ||
| 2240 | "ntrianglelefteq;": "\u22ec", | ||
| 2241 | "ntriangleright;": "\u22eb", | ||
| 2242 | "ntrianglerighteq;": "\u22ed", | ||
| 2243 | "nu;": "\u03bd", | ||
| 2244 | "num;": "#", | ||
| 2245 | "numero;": "\u2116", | ||
| 2246 | "numsp;": "\u2007", | ||
| 2247 | "nvDash;": "\u22ad", | ||
| 2248 | "nvHarr;": "\u2904", | ||
| 2249 | "nvap;": "\u224d\u20d2", | ||
| 2250 | "nvdash;": "\u22ac", | ||
| 2251 | "nvge;": "\u2265\u20d2", | ||
| 2252 | "nvgt;": ">\u20d2", | ||
| 2253 | "nvinfin;": "\u29de", | ||
| 2254 | "nvlArr;": "\u2902", | ||
| 2255 | "nvle;": "\u2264\u20d2", | ||
| 2256 | "nvlt;": "<\u20d2", | ||
| 2257 | "nvltrie;": "\u22b4\u20d2", | ||
| 2258 | "nvrArr;": "\u2903", | ||
| 2259 | "nvrtrie;": "\u22b5\u20d2", | ||
| 2260 | "nvsim;": "\u223c\u20d2", | ||
| 2261 | "nwArr;": "\u21d6", | ||
| 2262 | "nwarhk;": "\u2923", | ||
| 2263 | "nwarr;": "\u2196", | ||
| 2264 | "nwarrow;": "\u2196", | ||
| 2265 | "nwnear;": "\u2927", | ||
| 2266 | "oS;": "\u24c8", | ||
| 2267 | "oacute": "\xf3", | ||
| 2268 | "oacute;": "\xf3", | ||
| 2269 | "oast;": "\u229b", | ||
| 2270 | "ocir;": "\u229a", | ||
| 2271 | "ocirc": "\xf4", | ||
| 2272 | "ocirc;": "\xf4", | ||
| 2273 | "ocy;": "\u043e", | ||
| 2274 | "odash;": "\u229d", | ||
| 2275 | "odblac;": "\u0151", | ||
| 2276 | "odiv;": "\u2a38", | ||
| 2277 | "odot;": "\u2299", | ||
| 2278 | "odsold;": "\u29bc", | ||
| 2279 | "oelig;": "\u0153", | ||
| 2280 | "ofcir;": "\u29bf", | ||
| 2281 | "ofr;": "\U0001d52c", | ||
| 2282 | "ogon;": "\u02db", | ||
| 2283 | "ograve": "\xf2", | ||
| 2284 | "ograve;": "\xf2", | ||
| 2285 | "ogt;": "\u29c1", | ||
| 2286 | "ohbar;": "\u29b5", | ||
| 2287 | "ohm;": "\u03a9", | ||
| 2288 | "oint;": "\u222e", | ||
| 2289 | "olarr;": "\u21ba", | ||
| 2290 | "olcir;": "\u29be", | ||
| 2291 | "olcross;": "\u29bb", | ||
| 2292 | "oline;": "\u203e", | ||
| 2293 | "olt;": "\u29c0", | ||
| 2294 | "omacr;": "\u014d", | ||
| 2295 | "omega;": "\u03c9", | ||
| 2296 | "omicron;": "\u03bf", | ||
| 2297 | "omid;": "\u29b6", | ||
| 2298 | "ominus;": "\u2296", | ||
| 2299 | "oopf;": "\U0001d560", | ||
| 2300 | "opar;": "\u29b7", | ||
| 2301 | "operp;": "\u29b9", | ||
| 2302 | "oplus;": "\u2295", | ||
| 2303 | "or;": "\u2228", | ||
| 2304 | "orarr;": "\u21bb", | ||
| 2305 | "ord;": "\u2a5d", | ||
| 2306 | "order;": "\u2134", | ||
| 2307 | "orderof;": "\u2134", | ||
| 2308 | "ordf": "\xaa", | ||
| 2309 | "ordf;": "\xaa", | ||
| 2310 | "ordm": "\xba", | ||
| 2311 | "ordm;": "\xba", | ||
| 2312 | "origof;": "\u22b6", | ||
| 2313 | "oror;": "\u2a56", | ||
| 2314 | "orslope;": "\u2a57", | ||
| 2315 | "orv;": "\u2a5b", | ||
| 2316 | "oscr;": "\u2134", | ||
| 2317 | "oslash": "\xf8", | ||
| 2318 | "oslash;": "\xf8", | ||
| 2319 | "osol;": "\u2298", | ||
| 2320 | "otilde": "\xf5", | ||
| 2321 | "otilde;": "\xf5", | ||
| 2322 | "otimes;": "\u2297", | ||
| 2323 | "otimesas;": "\u2a36", | ||
| 2324 | "ouml": "\xf6", | ||
| 2325 | "ouml;": "\xf6", | ||
| 2326 | "ovbar;": "\u233d", | ||
| 2327 | "par;": "\u2225", | ||
| 2328 | "para": "\xb6", | ||
| 2329 | "para;": "\xb6", | ||
| 2330 | "parallel;": "\u2225", | ||
| 2331 | "parsim;": "\u2af3", | ||
| 2332 | "parsl;": "\u2afd", | ||
| 2333 | "part;": "\u2202", | ||
| 2334 | "pcy;": "\u043f", | ||
| 2335 | "percnt;": "%", | ||
| 2336 | "period;": ".", | ||
| 2337 | "permil;": "\u2030", | ||
| 2338 | "perp;": "\u22a5", | ||
| 2339 | "pertenk;": "\u2031", | ||
| 2340 | "pfr;": "\U0001d52d", | ||
| 2341 | "phi;": "\u03c6", | ||
| 2342 | "phiv;": "\u03d5", | ||
| 2343 | "phmmat;": "\u2133", | ||
| 2344 | "phone;": "\u260e", | ||
| 2345 | "pi;": "\u03c0", | ||
| 2346 | "pitchfork;": "\u22d4", | ||
| 2347 | "piv;": "\u03d6", | ||
| 2348 | "planck;": "\u210f", | ||
| 2349 | "planckh;": "\u210e", | ||
| 2350 | "plankv;": "\u210f", | ||
| 2351 | "plus;": "+", | ||
| 2352 | "plusacir;": "\u2a23", | ||
| 2353 | "plusb;": "\u229e", | ||
| 2354 | "pluscir;": "\u2a22", | ||
| 2355 | "plusdo;": "\u2214", | ||
| 2356 | "plusdu;": "\u2a25", | ||
| 2357 | "pluse;": "\u2a72", | ||
| 2358 | "plusmn": "\xb1", | ||
| 2359 | "plusmn;": "\xb1", | ||
| 2360 | "plussim;": "\u2a26", | ||
| 2361 | "plustwo;": "\u2a27", | ||
| 2362 | "pm;": "\xb1", | ||
| 2363 | "pointint;": "\u2a15", | ||
| 2364 | "popf;": "\U0001d561", | ||
| 2365 | "pound": "\xa3", | ||
| 2366 | "pound;": "\xa3", | ||
| 2367 | "pr;": "\u227a", | ||
| 2368 | "prE;": "\u2ab3", | ||
| 2369 | "prap;": "\u2ab7", | ||
| 2370 | "prcue;": "\u227c", | ||
| 2371 | "pre;": "\u2aaf", | ||
| 2372 | "prec;": "\u227a", | ||
| 2373 | "precapprox;": "\u2ab7", | ||
| 2374 | "preccurlyeq;": "\u227c", | ||
| 2375 | "preceq;": "\u2aaf", | ||
| 2376 | "precnapprox;": "\u2ab9", | ||
| 2377 | "precneqq;": "\u2ab5", | ||
| 2378 | "precnsim;": "\u22e8", | ||
| 2379 | "precsim;": "\u227e", | ||
| 2380 | "prime;": "\u2032", | ||
| 2381 | "primes;": "\u2119", | ||
| 2382 | "prnE;": "\u2ab5", | ||
| 2383 | "prnap;": "\u2ab9", | ||
| 2384 | "prnsim;": "\u22e8", | ||
| 2385 | "prod;": "\u220f", | ||
| 2386 | "profalar;": "\u232e", | ||
| 2387 | "profline;": "\u2312", | ||
| 2388 | "profsurf;": "\u2313", | ||
| 2389 | "prop;": "\u221d", | ||
| 2390 | "propto;": "\u221d", | ||
| 2391 | "prsim;": "\u227e", | ||
| 2392 | "prurel;": "\u22b0", | ||
| 2393 | "pscr;": "\U0001d4c5", | ||
| 2394 | "psi;": "\u03c8", | ||
| 2395 | "puncsp;": "\u2008", | ||
| 2396 | "qfr;": "\U0001d52e", | ||
| 2397 | "qint;": "\u2a0c", | ||
| 2398 | "qopf;": "\U0001d562", | ||
| 2399 | "qprime;": "\u2057", | ||
| 2400 | "qscr;": "\U0001d4c6", | ||
| 2401 | "quaternions;": "\u210d", | ||
| 2402 | "quatint;": "\u2a16", | ||
| 2403 | "quest;": "?", | ||
| 2404 | "questeq;": "\u225f", | ||
| 2405 | "quot": "\"", | ||
| 2406 | "quot;": "\"", | ||
| 2407 | "rAarr;": "\u21db", | ||
| 2408 | "rArr;": "\u21d2", | ||
| 2409 | "rAtail;": "\u291c", | ||
| 2410 | "rBarr;": "\u290f", | ||
| 2411 | "rHar;": "\u2964", | ||
| 2412 | "race;": "\u223d\u0331", | ||
| 2413 | "racute;": "\u0155", | ||
| 2414 | "radic;": "\u221a", | ||
| 2415 | "raemptyv;": "\u29b3", | ||
| 2416 | "rang;": "\u27e9", | ||
| 2417 | "rangd;": "\u2992", | ||
| 2418 | "range;": "\u29a5", | ||
| 2419 | "rangle;": "\u27e9", | ||
| 2420 | "raquo": "\xbb", | ||
| 2421 | "raquo;": "\xbb", | ||
| 2422 | "rarr;": "\u2192", | ||
| 2423 | "rarrap;": "\u2975", | ||
| 2424 | "rarrb;": "\u21e5", | ||
| 2425 | "rarrbfs;": "\u2920", | ||
| 2426 | "rarrc;": "\u2933", | ||
| 2427 | "rarrfs;": "\u291e", | ||
| 2428 | "rarrhk;": "\u21aa", | ||
| 2429 | "rarrlp;": "\u21ac", | ||
| 2430 | "rarrpl;": "\u2945", | ||
| 2431 | "rarrsim;": "\u2974", | ||
| 2432 | "rarrtl;": "\u21a3", | ||
| 2433 | "rarrw;": "\u219d", | ||
| 2434 | "ratail;": "\u291a", | ||
| 2435 | "ratio;": "\u2236", | ||
| 2436 | "rationals;": "\u211a", | ||
| 2437 | "rbarr;": "\u290d", | ||
| 2438 | "rbbrk;": "\u2773", | ||
| 2439 | "rbrace;": "}", | ||
| 2440 | "rbrack;": "]", | ||
| 2441 | "rbrke;": "\u298c", | ||
| 2442 | "rbrksld;": "\u298e", | ||
| 2443 | "rbrkslu;": "\u2990", | ||
| 2444 | "rcaron;": "\u0159", | ||
| 2445 | "rcedil;": "\u0157", | ||
| 2446 | "rceil;": "\u2309", | ||
| 2447 | "rcub;": "}", | ||
| 2448 | "rcy;": "\u0440", | ||
| 2449 | "rdca;": "\u2937", | ||
| 2450 | "rdldhar;": "\u2969", | ||
| 2451 | "rdquo;": "\u201d", | ||
| 2452 | "rdquor;": "\u201d", | ||
| 2453 | "rdsh;": "\u21b3", | ||
| 2454 | "real;": "\u211c", | ||
| 2455 | "realine;": "\u211b", | ||
| 2456 | "realpart;": "\u211c", | ||
| 2457 | "reals;": "\u211d", | ||
| 2458 | "rect;": "\u25ad", | ||
| 2459 | "reg": "\xae", | ||
| 2460 | "reg;": "\xae", | ||
| 2461 | "rfisht;": "\u297d", | ||
| 2462 | "rfloor;": "\u230b", | ||
| 2463 | "rfr;": "\U0001d52f", | ||
| 2464 | "rhard;": "\u21c1", | ||
| 2465 | "rharu;": "\u21c0", | ||
| 2466 | "rharul;": "\u296c", | ||
| 2467 | "rho;": "\u03c1", | ||
| 2468 | "rhov;": "\u03f1", | ||
| 2469 | "rightarrow;": "\u2192", | ||
| 2470 | "rightarrowtail;": "\u21a3", | ||
| 2471 | "rightharpoondown;": "\u21c1", | ||
| 2472 | "rightharpoonup;": "\u21c0", | ||
| 2473 | "rightleftarrows;": "\u21c4", | ||
| 2474 | "rightleftharpoons;": "\u21cc", | ||
| 2475 | "rightrightarrows;": "\u21c9", | ||
| 2476 | "rightsquigarrow;": "\u219d", | ||
| 2477 | "rightthreetimes;": "\u22cc", | ||
| 2478 | "ring;": "\u02da", | ||
| 2479 | "risingdotseq;": "\u2253", | ||
| 2480 | "rlarr;": "\u21c4", | ||
| 2481 | "rlhar;": "\u21cc", | ||
| 2482 | "rlm;": "\u200f", | ||
| 2483 | "rmoust;": "\u23b1", | ||
| 2484 | "rmoustache;": "\u23b1", | ||
| 2485 | "rnmid;": "\u2aee", | ||
| 2486 | "roang;": "\u27ed", | ||
| 2487 | "roarr;": "\u21fe", | ||
| 2488 | "robrk;": "\u27e7", | ||
| 2489 | "ropar;": "\u2986", | ||
| 2490 | "ropf;": "\U0001d563", | ||
| 2491 | "roplus;": "\u2a2e", | ||
| 2492 | "rotimes;": "\u2a35", | ||
| 2493 | "rpar;": ")", | ||
| 2494 | "rpargt;": "\u2994", | ||
| 2495 | "rppolint;": "\u2a12", | ||
| 2496 | "rrarr;": "\u21c9", | ||
| 2497 | "rsaquo;": "\u203a", | ||
| 2498 | "rscr;": "\U0001d4c7", | ||
| 2499 | "rsh;": "\u21b1", | ||
| 2500 | "rsqb;": "]", | ||
| 2501 | "rsquo;": "\u2019", | ||
| 2502 | "rsquor;": "\u2019", | ||
| 2503 | "rthree;": "\u22cc", | ||
| 2504 | "rtimes;": "\u22ca", | ||
| 2505 | "rtri;": "\u25b9", | ||
| 2506 | "rtrie;": "\u22b5", | ||
| 2507 | "rtrif;": "\u25b8", | ||
| 2508 | "rtriltri;": "\u29ce", | ||
| 2509 | "ruluhar;": "\u2968", | ||
| 2510 | "rx;": "\u211e", | ||
| 2511 | "sacute;": "\u015b", | ||
| 2512 | "sbquo;": "\u201a", | ||
| 2513 | "sc;": "\u227b", | ||
| 2514 | "scE;": "\u2ab4", | ||
| 2515 | "scap;": "\u2ab8", | ||
| 2516 | "scaron;": "\u0161", | ||
| 2517 | "sccue;": "\u227d", | ||
| 2518 | "sce;": "\u2ab0", | ||
| 2519 | "scedil;": "\u015f", | ||
| 2520 | "scirc;": "\u015d", | ||
| 2521 | "scnE;": "\u2ab6", | ||
| 2522 | "scnap;": "\u2aba", | ||
| 2523 | "scnsim;": "\u22e9", | ||
| 2524 | "scpolint;": "\u2a13", | ||
| 2525 | "scsim;": "\u227f", | ||
| 2526 | "scy;": "\u0441", | ||
| 2527 | "sdot;": "\u22c5", | ||
| 2528 | "sdotb;": "\u22a1", | ||
| 2529 | "sdote;": "\u2a66", | ||
| 2530 | "seArr;": "\u21d8", | ||
| 2531 | "searhk;": "\u2925", | ||
| 2532 | "searr;": "\u2198", | ||
| 2533 | "searrow;": "\u2198", | ||
| 2534 | "sect": "\xa7", | ||
| 2535 | "sect;": "\xa7", | ||
| 2536 | "semi;": ";", | ||
| 2537 | "seswar;": "\u2929", | ||
| 2538 | "setminus;": "\u2216", | ||
| 2539 | "setmn;": "\u2216", | ||
| 2540 | "sext;": "\u2736", | ||
| 2541 | "sfr;": "\U0001d530", | ||
| 2542 | "sfrown;": "\u2322", | ||
| 2543 | "sharp;": "\u266f", | ||
| 2544 | "shchcy;": "\u0449", | ||
| 2545 | "shcy;": "\u0448", | ||
| 2546 | "shortmid;": "\u2223", | ||
| 2547 | "shortparallel;": "\u2225", | ||
| 2548 | "shy": "\xad", | ||
| 2549 | "shy;": "\xad", | ||
| 2550 | "sigma;": "\u03c3", | ||
| 2551 | "sigmaf;": "\u03c2", | ||
| 2552 | "sigmav;": "\u03c2", | ||
| 2553 | "sim;": "\u223c", | ||
| 2554 | "simdot;": "\u2a6a", | ||
| 2555 | "sime;": "\u2243", | ||
| 2556 | "simeq;": "\u2243", | ||
| 2557 | "simg;": "\u2a9e", | ||
| 2558 | "simgE;": "\u2aa0", | ||
| 2559 | "siml;": "\u2a9d", | ||
| 2560 | "simlE;": "\u2a9f", | ||
| 2561 | "simne;": "\u2246", | ||
| 2562 | "simplus;": "\u2a24", | ||
| 2563 | "simrarr;": "\u2972", | ||
| 2564 | "slarr;": "\u2190", | ||
| 2565 | "smallsetminus;": "\u2216", | ||
| 2566 | "smashp;": "\u2a33", | ||
| 2567 | "smeparsl;": "\u29e4", | ||
| 2568 | "smid;": "\u2223", | ||
| 2569 | "smile;": "\u2323", | ||
| 2570 | "smt;": "\u2aaa", | ||
| 2571 | "smte;": "\u2aac", | ||
| 2572 | "smtes;": "\u2aac\ufe00", | ||
| 2573 | "softcy;": "\u044c", | ||
| 2574 | "sol;": "/", | ||
| 2575 | "solb;": "\u29c4", | ||
| 2576 | "solbar;": "\u233f", | ||
| 2577 | "sopf;": "\U0001d564", | ||
| 2578 | "spades;": "\u2660", | ||
| 2579 | "spadesuit;": "\u2660", | ||
| 2580 | "spar;": "\u2225", | ||
| 2581 | "sqcap;": "\u2293", | ||
| 2582 | "sqcaps;": "\u2293\ufe00", | ||
| 2583 | "sqcup;": "\u2294", | ||
| 2584 | "sqcups;": "\u2294\ufe00", | ||
| 2585 | "sqsub;": "\u228f", | ||
| 2586 | "sqsube;": "\u2291", | ||
| 2587 | "sqsubset;": "\u228f", | ||
| 2588 | "sqsubseteq;": "\u2291", | ||
| 2589 | "sqsup;": "\u2290", | ||
| 2590 | "sqsupe;": "\u2292", | ||
| 2591 | "sqsupset;": "\u2290", | ||
| 2592 | "sqsupseteq;": "\u2292", | ||
| 2593 | "squ;": "\u25a1", | ||
| 2594 | "square;": "\u25a1", | ||
| 2595 | "squarf;": "\u25aa", | ||
| 2596 | "squf;": "\u25aa", | ||
| 2597 | "srarr;": "\u2192", | ||
| 2598 | "sscr;": "\U0001d4c8", | ||
| 2599 | "ssetmn;": "\u2216", | ||
| 2600 | "ssmile;": "\u2323", | ||
| 2601 | "sstarf;": "\u22c6", | ||
| 2602 | "star;": "\u2606", | ||
| 2603 | "starf;": "\u2605", | ||
| 2604 | "straightepsilon;": "\u03f5", | ||
| 2605 | "straightphi;": "\u03d5", | ||
| 2606 | "strns;": "\xaf", | ||
| 2607 | "sub;": "\u2282", | ||
| 2608 | "subE;": "\u2ac5", | ||
| 2609 | "subdot;": "\u2abd", | ||
| 2610 | "sube;": "\u2286", | ||
| 2611 | "subedot;": "\u2ac3", | ||
| 2612 | "submult;": "\u2ac1", | ||
| 2613 | "subnE;": "\u2acb", | ||
| 2614 | "subne;": "\u228a", | ||
| 2615 | "subplus;": "\u2abf", | ||
| 2616 | "subrarr;": "\u2979", | ||
| 2617 | "subset;": "\u2282", | ||
| 2618 | "subseteq;": "\u2286", | ||
| 2619 | "subseteqq;": "\u2ac5", | ||
| 2620 | "subsetneq;": "\u228a", | ||
| 2621 | "subsetneqq;": "\u2acb", | ||
| 2622 | "subsim;": "\u2ac7", | ||
| 2623 | "subsub;": "\u2ad5", | ||
| 2624 | "subsup;": "\u2ad3", | ||
| 2625 | "succ;": "\u227b", | ||
| 2626 | "succapprox;": "\u2ab8", | ||
| 2627 | "succcurlyeq;": "\u227d", | ||
| 2628 | "succeq;": "\u2ab0", | ||
| 2629 | "succnapprox;": "\u2aba", | ||
| 2630 | "succneqq;": "\u2ab6", | ||
| 2631 | "succnsim;": "\u22e9", | ||
| 2632 | "succsim;": "\u227f", | ||
| 2633 | "sum;": "\u2211", | ||
| 2634 | "sung;": "\u266a", | ||
| 2635 | "sup1": "\xb9", | ||
| 2636 | "sup1;": "\xb9", | ||
| 2637 | "sup2": "\xb2", | ||
| 2638 | "sup2;": "\xb2", | ||
| 2639 | "sup3": "\xb3", | ||
| 2640 | "sup3;": "\xb3", | ||
| 2641 | "sup;": "\u2283", | ||
| 2642 | "supE;": "\u2ac6", | ||
| 2643 | "supdot;": "\u2abe", | ||
| 2644 | "supdsub;": "\u2ad8", | ||
| 2645 | "supe;": "\u2287", | ||
| 2646 | "supedot;": "\u2ac4", | ||
| 2647 | "suphsol;": "\u27c9", | ||
| 2648 | "suphsub;": "\u2ad7", | ||
| 2649 | "suplarr;": "\u297b", | ||
| 2650 | "supmult;": "\u2ac2", | ||
| 2651 | "supnE;": "\u2acc", | ||
| 2652 | "supne;": "\u228b", | ||
| 2653 | "supplus;": "\u2ac0", | ||
| 2654 | "supset;": "\u2283", | ||
| 2655 | "supseteq;": "\u2287", | ||
| 2656 | "supseteqq;": "\u2ac6", | ||
| 2657 | "supsetneq;": "\u228b", | ||
| 2658 | "supsetneqq;": "\u2acc", | ||
| 2659 | "supsim;": "\u2ac8", | ||
| 2660 | "supsub;": "\u2ad4", | ||
| 2661 | "supsup;": "\u2ad6", | ||
| 2662 | "swArr;": "\u21d9", | ||
| 2663 | "swarhk;": "\u2926", | ||
| 2664 | "swarr;": "\u2199", | ||
| 2665 | "swarrow;": "\u2199", | ||
| 2666 | "swnwar;": "\u292a", | ||
| 2667 | "szlig": "\xdf", | ||
| 2668 | "szlig;": "\xdf", | ||
| 2669 | "target;": "\u2316", | ||
| 2670 | "tau;": "\u03c4", | ||
| 2671 | "tbrk;": "\u23b4", | ||
| 2672 | "tcaron;": "\u0165", | ||
| 2673 | "tcedil;": "\u0163", | ||
| 2674 | "tcy;": "\u0442", | ||
| 2675 | "tdot;": "\u20db", | ||
| 2676 | "telrec;": "\u2315", | ||
| 2677 | "tfr;": "\U0001d531", | ||
| 2678 | "there4;": "\u2234", | ||
| 2679 | "therefore;": "\u2234", | ||
| 2680 | "theta;": "\u03b8", | ||
| 2681 | "thetasym;": "\u03d1", | ||
| 2682 | "thetav;": "\u03d1", | ||
| 2683 | "thickapprox;": "\u2248", | ||
| 2684 | "thicksim;": "\u223c", | ||
| 2685 | "thinsp;": "\u2009", | ||
| 2686 | "thkap;": "\u2248", | ||
| 2687 | "thksim;": "\u223c", | ||
| 2688 | "thorn": "\xfe", | ||
| 2689 | "thorn;": "\xfe", | ||
| 2690 | "tilde;": "\u02dc", | ||
| 2691 | "times": "\xd7", | ||
| 2692 | "times;": "\xd7", | ||
| 2693 | "timesb;": "\u22a0", | ||
| 2694 | "timesbar;": "\u2a31", | ||
| 2695 | "timesd;": "\u2a30", | ||
| 2696 | "tint;": "\u222d", | ||
| 2697 | "toea;": "\u2928", | ||
| 2698 | "top;": "\u22a4", | ||
| 2699 | "topbot;": "\u2336", | ||
| 2700 | "topcir;": "\u2af1", | ||
| 2701 | "topf;": "\U0001d565", | ||
| 2702 | "topfork;": "\u2ada", | ||
| 2703 | "tosa;": "\u2929", | ||
| 2704 | "tprime;": "\u2034", | ||
| 2705 | "trade;": "\u2122", | ||
| 2706 | "triangle;": "\u25b5", | ||
| 2707 | "triangledown;": "\u25bf", | ||
| 2708 | "triangleleft;": "\u25c3", | ||
| 2709 | "trianglelefteq;": "\u22b4", | ||
| 2710 | "triangleq;": "\u225c", | ||
| 2711 | "triangleright;": "\u25b9", | ||
| 2712 | "trianglerighteq;": "\u22b5", | ||
| 2713 | "tridot;": "\u25ec", | ||
| 2714 | "trie;": "\u225c", | ||
| 2715 | "triminus;": "\u2a3a", | ||
| 2716 | "triplus;": "\u2a39", | ||
| 2717 | "trisb;": "\u29cd", | ||
| 2718 | "tritime;": "\u2a3b", | ||
| 2719 | "trpezium;": "\u23e2", | ||
| 2720 | "tscr;": "\U0001d4c9", | ||
| 2721 | "tscy;": "\u0446", | ||
| 2722 | "tshcy;": "\u045b", | ||
| 2723 | "tstrok;": "\u0167", | ||
| 2724 | "twixt;": "\u226c", | ||
| 2725 | "twoheadleftarrow;": "\u219e", | ||
| 2726 | "twoheadrightarrow;": "\u21a0", | ||
| 2727 | "uArr;": "\u21d1", | ||
| 2728 | "uHar;": "\u2963", | ||
| 2729 | "uacute": "\xfa", | ||
| 2730 | "uacute;": "\xfa", | ||
| 2731 | "uarr;": "\u2191", | ||
| 2732 | "ubrcy;": "\u045e", | ||
| 2733 | "ubreve;": "\u016d", | ||
| 2734 | "ucirc": "\xfb", | ||
| 2735 | "ucirc;": "\xfb", | ||
| 2736 | "ucy;": "\u0443", | ||
| 2737 | "udarr;": "\u21c5", | ||
| 2738 | "udblac;": "\u0171", | ||
| 2739 | "udhar;": "\u296e", | ||
| 2740 | "ufisht;": "\u297e", | ||
| 2741 | "ufr;": "\U0001d532", | ||
| 2742 | "ugrave": "\xf9", | ||
| 2743 | "ugrave;": "\xf9", | ||
| 2744 | "uharl;": "\u21bf", | ||
| 2745 | "uharr;": "\u21be", | ||
| 2746 | "uhblk;": "\u2580", | ||
| 2747 | "ulcorn;": "\u231c", | ||
| 2748 | "ulcorner;": "\u231c", | ||
| 2749 | "ulcrop;": "\u230f", | ||
| 2750 | "ultri;": "\u25f8", | ||
| 2751 | "umacr;": "\u016b", | ||
| 2752 | "uml": "\xa8", | ||
| 2753 | "uml;": "\xa8", | ||
| 2754 | "uogon;": "\u0173", | ||
| 2755 | "uopf;": "\U0001d566", | ||
| 2756 | "uparrow;": "\u2191", | ||
| 2757 | "updownarrow;": "\u2195", | ||
| 2758 | "upharpoonleft;": "\u21bf", | ||
| 2759 | "upharpoonright;": "\u21be", | ||
| 2760 | "uplus;": "\u228e", | ||
| 2761 | "upsi;": "\u03c5", | ||
| 2762 | "upsih;": "\u03d2", | ||
| 2763 | "upsilon;": "\u03c5", | ||
| 2764 | "upuparrows;": "\u21c8", | ||
| 2765 | "urcorn;": "\u231d", | ||
| 2766 | "urcorner;": "\u231d", | ||
| 2767 | "urcrop;": "\u230e", | ||
| 2768 | "uring;": "\u016f", | ||
| 2769 | "urtri;": "\u25f9", | ||
| 2770 | "uscr;": "\U0001d4ca", | ||
| 2771 | "utdot;": "\u22f0", | ||
| 2772 | "utilde;": "\u0169", | ||
| 2773 | "utri;": "\u25b5", | ||
| 2774 | "utrif;": "\u25b4", | ||
| 2775 | "uuarr;": "\u21c8", | ||
| 2776 | "uuml": "\xfc", | ||
| 2777 | "uuml;": "\xfc", | ||
| 2778 | "uwangle;": "\u29a7", | ||
| 2779 | "vArr;": "\u21d5", | ||
| 2780 | "vBar;": "\u2ae8", | ||
| 2781 | "vBarv;": "\u2ae9", | ||
| 2782 | "vDash;": "\u22a8", | ||
| 2783 | "vangrt;": "\u299c", | ||
| 2784 | "varepsilon;": "\u03f5", | ||
| 2785 | "varkappa;": "\u03f0", | ||
| 2786 | "varnothing;": "\u2205", | ||
| 2787 | "varphi;": "\u03d5", | ||
| 2788 | "varpi;": "\u03d6", | ||
| 2789 | "varpropto;": "\u221d", | ||
| 2790 | "varr;": "\u2195", | ||
| 2791 | "varrho;": "\u03f1", | ||
| 2792 | "varsigma;": "\u03c2", | ||
| 2793 | "varsubsetneq;": "\u228a\ufe00", | ||
| 2794 | "varsubsetneqq;": "\u2acb\ufe00", | ||
| 2795 | "varsupsetneq;": "\u228b\ufe00", | ||
| 2796 | "varsupsetneqq;": "\u2acc\ufe00", | ||
| 2797 | "vartheta;": "\u03d1", | ||
| 2798 | "vartriangleleft;": "\u22b2", | ||
| 2799 | "vartriangleright;": "\u22b3", | ||
| 2800 | "vcy;": "\u0432", | ||
| 2801 | "vdash;": "\u22a2", | ||
| 2802 | "vee;": "\u2228", | ||
| 2803 | "veebar;": "\u22bb", | ||
| 2804 | "veeeq;": "\u225a", | ||
| 2805 | "vellip;": "\u22ee", | ||
| 2806 | "verbar;": "|", | ||
| 2807 | "vert;": "|", | ||
| 2808 | "vfr;": "\U0001d533", | ||
| 2809 | "vltri;": "\u22b2", | ||
| 2810 | "vnsub;": "\u2282\u20d2", | ||
| 2811 | "vnsup;": "\u2283\u20d2", | ||
| 2812 | "vopf;": "\U0001d567", | ||
| 2813 | "vprop;": "\u221d", | ||
| 2814 | "vrtri;": "\u22b3", | ||
| 2815 | "vscr;": "\U0001d4cb", | ||
| 2816 | "vsubnE;": "\u2acb\ufe00", | ||
| 2817 | "vsubne;": "\u228a\ufe00", | ||
| 2818 | "vsupnE;": "\u2acc\ufe00", | ||
| 2819 | "vsupne;": "\u228b\ufe00", | ||
| 2820 | "vzigzag;": "\u299a", | ||
| 2821 | "wcirc;": "\u0175", | ||
| 2822 | "wedbar;": "\u2a5f", | ||
| 2823 | "wedge;": "\u2227", | ||
| 2824 | "wedgeq;": "\u2259", | ||
| 2825 | "weierp;": "\u2118", | ||
| 2826 | "wfr;": "\U0001d534", | ||
| 2827 | "wopf;": "\U0001d568", | ||
| 2828 | "wp;": "\u2118", | ||
| 2829 | "wr;": "\u2240", | ||
| 2830 | "wreath;": "\u2240", | ||
| 2831 | "wscr;": "\U0001d4cc", | ||
| 2832 | "xcap;": "\u22c2", | ||
| 2833 | "xcirc;": "\u25ef", | ||
| 2834 | "xcup;": "\u22c3", | ||
| 2835 | "xdtri;": "\u25bd", | ||
| 2836 | "xfr;": "\U0001d535", | ||
| 2837 | "xhArr;": "\u27fa", | ||
| 2838 | "xharr;": "\u27f7", | ||
| 2839 | "xi;": "\u03be", | ||
| 2840 | "xlArr;": "\u27f8", | ||
| 2841 | "xlarr;": "\u27f5", | ||
| 2842 | "xmap;": "\u27fc", | ||
| 2843 | "xnis;": "\u22fb", | ||
| 2844 | "xodot;": "\u2a00", | ||
| 2845 | "xopf;": "\U0001d569", | ||
| 2846 | "xoplus;": "\u2a01", | ||
| 2847 | "xotime;": "\u2a02", | ||
| 2848 | "xrArr;": "\u27f9", | ||
| 2849 | "xrarr;": "\u27f6", | ||
| 2850 | "xscr;": "\U0001d4cd", | ||
| 2851 | "xsqcup;": "\u2a06", | ||
| 2852 | "xuplus;": "\u2a04", | ||
| 2853 | "xutri;": "\u25b3", | ||
| 2854 | "xvee;": "\u22c1", | ||
| 2855 | "xwedge;": "\u22c0", | ||
| 2856 | "yacute": "\xfd", | ||
| 2857 | "yacute;": "\xfd", | ||
| 2858 | "yacy;": "\u044f", | ||
| 2859 | "ycirc;": "\u0177", | ||
| 2860 | "ycy;": "\u044b", | ||
| 2861 | "yen": "\xa5", | ||
| 2862 | "yen;": "\xa5", | ||
| 2863 | "yfr;": "\U0001d536", | ||
| 2864 | "yicy;": "\u0457", | ||
| 2865 | "yopf;": "\U0001d56a", | ||
| 2866 | "yscr;": "\U0001d4ce", | ||
| 2867 | "yucy;": "\u044e", | ||
| 2868 | "yuml": "\xff", | ||
| 2869 | "yuml;": "\xff", | ||
| 2870 | "zacute;": "\u017a", | ||
| 2871 | "zcaron;": "\u017e", | ||
| 2872 | "zcy;": "\u0437", | ||
| 2873 | "zdot;": "\u017c", | ||
| 2874 | "zeetrf;": "\u2128", | ||
| 2875 | "zeta;": "\u03b6", | ||
| 2876 | "zfr;": "\U0001d537", | ||
| 2877 | "zhcy;": "\u0436", | ||
| 2878 | "zigrarr;": "\u21dd", | ||
| 2879 | "zopf;": "\U0001d56b", | ||
| 2880 | "zscr;": "\U0001d4cf", | ||
| 2881 | "zwj;": "\u200d", | ||
| 2882 | "zwnj;": "\u200c", | ||
| 2883 | } | ||
| 2884 | |||
| 2885 | replacementCharacters = { | ||
| 2886 | 0x0: "\uFFFD", | ||
| 2887 | 0x0d: "\u000D", | ||
| 2888 | 0x80: "\u20AC", | ||
| 2889 | 0x81: "\u0081", | ||
| 2890 | 0x82: "\u201A", | ||
| 2891 | 0x83: "\u0192", | ||
| 2892 | 0x84: "\u201E", | ||
| 2893 | 0x85: "\u2026", | ||
| 2894 | 0x86: "\u2020", | ||
| 2895 | 0x87: "\u2021", | ||
| 2896 | 0x88: "\u02C6", | ||
| 2897 | 0x89: "\u2030", | ||
| 2898 | 0x8A: "\u0160", | ||
| 2899 | 0x8B: "\u2039", | ||
| 2900 | 0x8C: "\u0152", | ||
| 2901 | 0x8D: "\u008D", | ||
| 2902 | 0x8E: "\u017D", | ||
| 2903 | 0x8F: "\u008F", | ||
| 2904 | 0x90: "\u0090", | ||
| 2905 | 0x91: "\u2018", | ||
| 2906 | 0x92: "\u2019", | ||
| 2907 | 0x93: "\u201C", | ||
| 2908 | 0x94: "\u201D", | ||
| 2909 | 0x95: "\u2022", | ||
| 2910 | 0x96: "\u2013", | ||
| 2911 | 0x97: "\u2014", | ||
| 2912 | 0x98: "\u02DC", | ||
| 2913 | 0x99: "\u2122", | ||
| 2914 | 0x9A: "\u0161", | ||
| 2915 | 0x9B: "\u203A", | ||
| 2916 | 0x9C: "\u0153", | ||
| 2917 | 0x9D: "\u009D", | ||
| 2918 | 0x9E: "\u017E", | ||
| 2919 | 0x9F: "\u0178", | ||
| 2920 | } | ||
| 2921 | |||
| 2922 | tokenTypes = { | ||
| 2923 | "Doctype": 0, | ||
| 2924 | "Characters": 1, | ||
| 2925 | "SpaceCharacters": 2, | ||
| 2926 | "StartTag": 3, | ||
| 2927 | "EndTag": 4, | ||
| 2928 | "EmptyTag": 5, | ||
| 2929 | "Comment": 6, | ||
| 2930 | "ParseError": 7 | ||
| 2931 | } | ||
| 2932 | |||
| 2933 | tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], | ||
| 2934 | tokenTypes["EmptyTag"]]) | ||
| 2935 | |||
| 2936 | |||
| 2937 | prefixes = dict([(v, k) for k, v in namespaces.items()]) | ||
| 2938 | prefixes["http://www.w3.org/1998/Math/MathML"] = "math" | ||
| 2939 | |||
| 2940 | |||
| 2941 | class DataLossWarning(UserWarning): | ||
| 2942 | """Raised when the current tree is unable to represent the input data""" | ||
| 2943 | pass | ||
| 2944 | |||
| 2945 | |||
| 2946 | class _ReparseException(Exception): | ||
| 2947 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 0000000..d9e234a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py | |||
| @@ -0,0 +1,29 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from . import base | ||
| 4 | |||
| 5 | from collections import OrderedDict | ||
| 6 | |||
| 7 | |||
| 8 | def _attr_key(attr): | ||
| 9 | """Return an appropriate key for an attribute for sorting | ||
| 10 | |||
| 11 | Attributes have a namespace that can be either ``None`` or a string. We | ||
| 12 | can't compare the two because they're different types, so we convert | ||
| 13 | ``None`` to an empty string first. | ||
| 14 | |||
| 15 | """ | ||
| 16 | return (attr[0][0] or ''), attr[0][1] | ||
| 17 | |||
| 18 | |||
| 19 | class Filter(base.Filter): | ||
| 20 | """Alphabetizes attributes for elements""" | ||
| 21 | def __iter__(self): | ||
| 22 | for token in base.Filter.__iter__(self): | ||
| 23 | if token["type"] in ("StartTag", "EmptyTag"): | ||
| 24 | attrs = OrderedDict() | ||
| 25 | for name, value in sorted(token["data"].items(), | ||
| 26 | key=_attr_key): | ||
| 27 | attrs[name] = value | ||
| 28 | token["data"] = attrs | ||
| 29 | yield token | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/base.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/base.py new file mode 100644 index 0000000..f5aa523 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/base.py | |||
| @@ -0,0 +1,12 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | |||
| 4 | class Filter(object): | ||
| 5 | def __init__(self, source): | ||
| 6 | self.source = source | ||
| 7 | |||
| 8 | def __iter__(self): | ||
| 9 | return iter(self.source) | ||
| 10 | |||
| 11 | def __getattr__(self, name): | ||
| 12 | return getattr(self.source, name) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 0000000..2f8ec4f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py | |||
| @@ -0,0 +1,73 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from . import base | ||
| 4 | |||
| 5 | |||
| 6 | class Filter(base.Filter): | ||
| 7 | """Injects ``<meta charset=ENCODING>`` tag into head of document""" | ||
| 8 | def __init__(self, source, encoding): | ||
| 9 | """Creates a Filter | ||
| 10 | |||
| 11 | :arg source: the source token stream | ||
| 12 | |||
| 13 | :arg encoding: the encoding to set | ||
| 14 | |||
| 15 | """ | ||
| 16 | base.Filter.__init__(self, source) | ||
| 17 | self.encoding = encoding | ||
| 18 | |||
| 19 | def __iter__(self): | ||
| 20 | state = "pre_head" | ||
| 21 | meta_found = (self.encoding is None) | ||
| 22 | pending = [] | ||
| 23 | |||
| 24 | for token in base.Filter.__iter__(self): | ||
| 25 | type = token["type"] | ||
| 26 | if type == "StartTag": | ||
| 27 | if token["name"].lower() == "head": | ||
| 28 | state = "in_head" | ||
| 29 | |||
| 30 | elif type == "EmptyTag": | ||
| 31 | if token["name"].lower() == "meta": | ||
| 32 | # replace charset with actual encoding | ||
| 33 | has_http_equiv_content_type = False | ||
| 34 | for (namespace, name), value in token["data"].items(): | ||
| 35 | if namespace is not None: | ||
| 36 | continue | ||
| 37 | elif name.lower() == 'charset': | ||
| 38 | token["data"][(namespace, name)] = self.encoding | ||
| 39 | meta_found = True | ||
| 40 | break | ||
| 41 | elif name == 'http-equiv' and value.lower() == 'content-type': | ||
| 42 | has_http_equiv_content_type = True | ||
| 43 | else: | ||
| 44 | if has_http_equiv_content_type and (None, "content") in token["data"]: | ||
| 45 | token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding | ||
| 46 | meta_found = True | ||
| 47 | |||
| 48 | elif token["name"].lower() == "head" and not meta_found: | ||
| 49 | # insert meta into empty head | ||
| 50 | yield {"type": "StartTag", "name": "head", | ||
| 51 | "data": token["data"]} | ||
| 52 | yield {"type": "EmptyTag", "name": "meta", | ||
| 53 | "data": {(None, "charset"): self.encoding}} | ||
| 54 | yield {"type": "EndTag", "name": "head"} | ||
| 55 | meta_found = True | ||
| 56 | continue | ||
| 57 | |||
| 58 | elif type == "EndTag": | ||
| 59 | if token["name"].lower() == "head" and pending: | ||
| 60 | # insert meta into head (if necessary) and flush pending queue | ||
| 61 | yield pending.pop(0) | ||
| 62 | if not meta_found: | ||
| 63 | yield {"type": "EmptyTag", "name": "meta", | ||
| 64 | "data": {(None, "charset"): self.encoding}} | ||
| 65 | while pending: | ||
| 66 | yield pending.pop(0) | ||
| 67 | meta_found = True | ||
| 68 | state = "post_head" | ||
| 69 | |||
| 70 | if state == "in_head": | ||
| 71 | pending.append(token) | ||
| 72 | else: | ||
| 73 | yield token | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/lint.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 0000000..b5bbd97 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/lint.py | |||
| @@ -0,0 +1,93 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from pip._vendor.six import text_type | ||
| 4 | |||
| 5 | from . import base | ||
| 6 | from ..constants import namespaces, voidElements | ||
| 7 | |||
| 8 | from ..constants import spaceCharacters | ||
| 9 | spaceCharacters = "".join(spaceCharacters) | ||
| 10 | |||
| 11 | |||
| 12 | class Filter(base.Filter): | ||
| 13 | """Lints the token stream for errors | ||
| 14 | |||
| 15 | If it finds any errors, it'll raise an ``AssertionError``. | ||
| 16 | |||
| 17 | """ | ||
| 18 | def __init__(self, source, require_matching_tags=True): | ||
| 19 | """Creates a Filter | ||
| 20 | |||
| 21 | :arg source: the source token stream | ||
| 22 | |||
| 23 | :arg require_matching_tags: whether or not to require matching tags | ||
| 24 | |||
| 25 | """ | ||
| 26 | super(Filter, self).__init__(source) | ||
| 27 | self.require_matching_tags = require_matching_tags | ||
| 28 | |||
| 29 | def __iter__(self): | ||
| 30 | open_elements = [] | ||
| 31 | for token in base.Filter.__iter__(self): | ||
| 32 | type = token["type"] | ||
| 33 | if type in ("StartTag", "EmptyTag"): | ||
| 34 | namespace = token["namespace"] | ||
| 35 | name = token["name"] | ||
| 36 | assert namespace is None or isinstance(namespace, text_type) | ||
| 37 | assert namespace != "" | ||
| 38 | assert isinstance(name, text_type) | ||
| 39 | assert name != "" | ||
| 40 | assert isinstance(token["data"], dict) | ||
| 41 | if (not namespace or namespace == namespaces["html"]) and name in voidElements: | ||
| 42 | assert type == "EmptyTag" | ||
| 43 | else: | ||
| 44 | assert type == "StartTag" | ||
| 45 | if type == "StartTag" and self.require_matching_tags: | ||
| 46 | open_elements.append((namespace, name)) | ||
| 47 | for (namespace, name), value in token["data"].items(): | ||
| 48 | assert namespace is None or isinstance(namespace, text_type) | ||
| 49 | assert namespace != "" | ||
| 50 | assert isinstance(name, text_type) | ||
| 51 | assert name != "" | ||
| 52 | assert isinstance(value, text_type) | ||
| 53 | |||
| 54 | elif type == "EndTag": | ||
| 55 | namespace = token["namespace"] | ||
| 56 | name = token["name"] | ||
| 57 | assert namespace is None or isinstance(namespace, text_type) | ||
| 58 | assert namespace != "" | ||
| 59 | assert isinstance(name, text_type) | ||
| 60 | assert name != "" | ||
| 61 | if (not namespace or namespace == namespaces["html"]) and name in voidElements: | ||
| 62 | assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name} | ||
| 63 | elif self.require_matching_tags: | ||
| 64 | start = open_elements.pop() | ||
| 65 | assert start == (namespace, name) | ||
| 66 | |||
| 67 | elif type == "Comment": | ||
| 68 | data = token["data"] | ||
| 69 | assert isinstance(data, text_type) | ||
| 70 | |||
| 71 | elif type in ("Characters", "SpaceCharacters"): | ||
| 72 | data = token["data"] | ||
| 73 | assert isinstance(data, text_type) | ||
| 74 | assert data != "" | ||
| 75 | if type == "SpaceCharacters": | ||
| 76 | assert data.strip(spaceCharacters) == "" | ||
| 77 | |||
| 78 | elif type == "Doctype": | ||
| 79 | name = token["name"] | ||
| 80 | assert name is None or isinstance(name, text_type) | ||
| 81 | assert token["publicId"] is None or isinstance(name, text_type) | ||
| 82 | assert token["systemId"] is None or isinstance(name, text_type) | ||
| 83 | |||
| 84 | elif type == "Entity": | ||
| 85 | assert isinstance(token["name"], text_type) | ||
| 86 | |||
| 87 | elif type == "SerializerError": | ||
| 88 | assert isinstance(token["data"], text_type) | ||
| 89 | |||
| 90 | else: | ||
| 91 | assert False, "Unknown token type: %(type)s" % {"type": type} | ||
| 92 | |||
| 93 | yield token | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/optionaltags.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 0000000..c8d5e54 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/optionaltags.py | |||
| @@ -0,0 +1,207 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from . import base | ||
| 4 | |||
| 5 | |||
| 6 | class Filter(base.Filter): | ||
| 7 | """Removes optional tags from the token stream""" | ||
| 8 | def slider(self): | ||
| 9 | previous1 = previous2 = None | ||
| 10 | for token in self.source: | ||
| 11 | if previous1 is not None: | ||
| 12 | yield previous2, previous1, token | ||
| 13 | previous2 = previous1 | ||
| 14 | previous1 = token | ||
| 15 | if previous1 is not None: | ||
| 16 | yield previous2, previous1, None | ||
| 17 | |||
| 18 | def __iter__(self): | ||
| 19 | for previous, token, next in self.slider(): | ||
| 20 | type = token["type"] | ||
| 21 | if type == "StartTag": | ||
| 22 | if (token["data"] or | ||
| 23 | not self.is_optional_start(token["name"], previous, next)): | ||
| 24 | yield token | ||
| 25 | elif type == "EndTag": | ||
| 26 | if not self.is_optional_end(token["name"], next): | ||
| 27 | yield token | ||
| 28 | else: | ||
| 29 | yield token | ||
| 30 | |||
| 31 | def is_optional_start(self, tagname, previous, next): | ||
| 32 | type = next and next["type"] or None | ||
| 33 | if tagname in 'html': | ||
| 34 | # An html element's start tag may be omitted if the first thing | ||
| 35 | # inside the html element is not a space character or a comment. | ||
| 36 | return type not in ("Comment", "SpaceCharacters") | ||
| 37 | elif tagname == 'head': | ||
| 38 | # A head element's start tag may be omitted if the first thing | ||
| 39 | # inside the head element is an element. | ||
| 40 | # XXX: we also omit the start tag if the head element is empty | ||
| 41 | if type in ("StartTag", "EmptyTag"): | ||
| 42 | return True | ||
| 43 | elif type == "EndTag": | ||
| 44 | return next["name"] == "head" | ||
| 45 | elif tagname == 'body': | ||
| 46 | # A body element's start tag may be omitted if the first thing | ||
| 47 | # inside the body element is not a space character or a comment, | ||
| 48 | # except if the first thing inside the body element is a script | ||
| 49 | # or style element and the node immediately preceding the body | ||
| 50 | # element is a head element whose end tag has been omitted. | ||
| 51 | if type in ("Comment", "SpaceCharacters"): | ||
| 52 | return False | ||
| 53 | elif type == "StartTag": | ||
| 54 | # XXX: we do not look at the preceding event, so we never omit | ||
| 55 | # the body element's start tag if it's followed by a script or | ||
| 56 | # a style element. | ||
| 57 | return next["name"] not in ('script', 'style') | ||
| 58 | else: | ||
| 59 | return True | ||
| 60 | elif tagname == 'colgroup': | ||
| 61 | # A colgroup element's start tag may be omitted if the first thing | ||
| 62 | # inside the colgroup element is a col element, and if the element | ||
| 63 | # is not immediately preceded by another colgroup element whose | ||
| 64 | # end tag has been omitted. | ||
| 65 | if type in ("StartTag", "EmptyTag"): | ||
| 66 | # XXX: we do not look at the preceding event, so instead we never | ||
| 67 | # omit the colgroup element's end tag when it is immediately | ||
| 68 | # followed by another colgroup element. See is_optional_end. | ||
| 69 | return next["name"] == "col" | ||
| 70 | else: | ||
| 71 | return False | ||
| 72 | elif tagname == 'tbody': | ||
| 73 | # A tbody element's start tag may be omitted if the first thing | ||
| 74 | # inside the tbody element is a tr element, and if the element is | ||
| 75 | # not immediately preceded by a tbody, thead, or tfoot element | ||
| 76 | # whose end tag has been omitted. | ||
| 77 | if type == "StartTag": | ||
| 78 | # omit the thead and tfoot elements' end tag when they are | ||
| 79 | # immediately followed by a tbody element. See is_optional_end. | ||
| 80 | if previous and previous['type'] == 'EndTag' and \ | ||
| 81 | previous['name'] in ('tbody', 'thead', 'tfoot'): | ||
| 82 | return False | ||
| 83 | return next["name"] == 'tr' | ||
| 84 | else: | ||
| 85 | return False | ||
| 86 | return False | ||
| 87 | |||
| 88 | def is_optional_end(self, tagname, next): | ||
| 89 | type = next and next["type"] or None | ||
| 90 | if tagname in ('html', 'head', 'body'): | ||
| 91 | # An html element's end tag may be omitted if the html element | ||
| 92 | # is not immediately followed by a space character or a comment. | ||
| 93 | return type not in ("Comment", "SpaceCharacters") | ||
| 94 | elif tagname in ('li', 'optgroup', 'tr'): | ||
| 95 | # A li element's end tag may be omitted if the li element is | ||
| 96 | # immediately followed by another li element or if there is | ||
| 97 | # no more content in the parent element. | ||
| 98 | # An optgroup element's end tag may be omitted if the optgroup | ||
| 99 | # element is immediately followed by another optgroup element, | ||
| 100 | # or if there is no more content in the parent element. | ||
| 101 | # A tr element's end tag may be omitted if the tr element is | ||
| 102 | # immediately followed by another tr element, or if there is | ||
| 103 | # no more content in the parent element. | ||
| 104 | if type == "StartTag": | ||
| 105 | return next["name"] == tagname | ||
| 106 | else: | ||
| 107 | return type == "EndTag" or type is None | ||
| 108 | elif tagname in ('dt', 'dd'): | ||
| 109 | # A dt element's end tag may be omitted if the dt element is | ||
| 110 | # immediately followed by another dt element or a dd element. | ||
| 111 | # A dd element's end tag may be omitted if the dd element is | ||
| 112 | # immediately followed by another dd element or a dt element, | ||
| 113 | # or if there is no more content in the parent element. | ||
| 114 | if type == "StartTag": | ||
| 115 | return next["name"] in ('dt', 'dd') | ||
| 116 | elif tagname == 'dd': | ||
| 117 | return type == "EndTag" or type is None | ||
| 118 | else: | ||
| 119 | return False | ||
| 120 | elif tagname == 'p': | ||
| 121 | # A p element's end tag may be omitted if the p element is | ||
| 122 | # immediately followed by an address, article, aside, | ||
| 123 | # blockquote, datagrid, dialog, dir, div, dl, fieldset, | ||
| 124 | # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, | ||
| 125 | # nav, ol, p, pre, section, table, or ul, element, or if | ||
| 126 | # there is no more content in the parent element. | ||
| 127 | if type in ("StartTag", "EmptyTag"): | ||
| 128 | return next["name"] in ('address', 'article', 'aside', | ||
| 129 | 'blockquote', 'datagrid', 'dialog', | ||
| 130 | 'dir', 'div', 'dl', 'fieldset', 'footer', | ||
| 131 | 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', | ||
| 132 | 'header', 'hr', 'menu', 'nav', 'ol', | ||
| 133 | 'p', 'pre', 'section', 'table', 'ul') | ||
| 134 | else: | ||
| 135 | return type == "EndTag" or type is None | ||
| 136 | elif tagname == 'option': | ||
| 137 | # An option element's end tag may be omitted if the option | ||
| 138 | # element is immediately followed by another option element, | ||
| 139 | # or if it is immediately followed by an <code>optgroup</code> | ||
| 140 | # element, or if there is no more content in the parent | ||
| 141 | # element. | ||
| 142 | if type == "StartTag": | ||
| 143 | return next["name"] in ('option', 'optgroup') | ||
| 144 | else: | ||
| 145 | return type == "EndTag" or type is None | ||
| 146 | elif tagname in ('rt', 'rp'): | ||
| 147 | # An rt element's end tag may be omitted if the rt element is | ||
| 148 | # immediately followed by an rt or rp element, or if there is | ||
| 149 | # no more content in the parent element. | ||
| 150 | # An rp element's end tag may be omitted if the rp element is | ||
| 151 | # immediately followed by an rt or rp element, or if there is | ||
| 152 | # no more content in the parent element. | ||
| 153 | if type == "StartTag": | ||
| 154 | return next["name"] in ('rt', 'rp') | ||
| 155 | else: | ||
| 156 | return type == "EndTag" or type is None | ||
| 157 | elif tagname == 'colgroup': | ||
| 158 | # A colgroup element's end tag may be omitted if the colgroup | ||
| 159 | # element is not immediately followed by a space character or | ||
| 160 | # a comment. | ||
| 161 | if type in ("Comment", "SpaceCharacters"): | ||
| 162 | return False | ||
| 163 | elif type == "StartTag": | ||
| 164 | # XXX: we also look for an immediately following colgroup | ||
| 165 | # element. See is_optional_start. | ||
| 166 | return next["name"] != 'colgroup' | ||
| 167 | else: | ||
| 168 | return True | ||
| 169 | elif tagname in ('thead', 'tbody'): | ||
| 170 | # A thead element's end tag may be omitted if the thead element | ||
| 171 | # is immediately followed by a tbody or tfoot element. | ||
| 172 | # A tbody element's end tag may be omitted if the tbody element | ||
| 173 | # is immediately followed by a tbody or tfoot element, or if | ||
| 174 | # there is no more content in the parent element. | ||
| 175 | # A tfoot element's end tag may be omitted if the tfoot element | ||
| 176 | # is immediately followed by a tbody element, or if there is no | ||
| 177 | # more content in the parent element. | ||
| 178 | # XXX: we never omit the end tag when the following element is | ||
| 179 | # a tbody. See is_optional_start. | ||
| 180 | if type == "StartTag": | ||
| 181 | return next["name"] in ['tbody', 'tfoot'] | ||
| 182 | elif tagname == 'tbody': | ||
| 183 | return type == "EndTag" or type is None | ||
| 184 | else: | ||
| 185 | return False | ||
| 186 | elif tagname == 'tfoot': | ||
| 187 | # A tfoot element's end tag may be omitted if the tfoot element | ||
| 188 | # is immediately followed by a tbody element, or if there is no | ||
| 189 | # more content in the parent element. | ||
| 190 | # XXX: we never omit the end tag when the following element is | ||
| 191 | # a tbody. See is_optional_start. | ||
| 192 | if type == "StartTag": | ||
| 193 | return next["name"] == 'tbody' | ||
| 194 | else: | ||
| 195 | return type == "EndTag" or type is None | ||
| 196 | elif tagname in ('td', 'th'): | ||
| 197 | # A td element's end tag may be omitted if the td element is | ||
| 198 | # immediately followed by a td or th element, or if there is | ||
| 199 | # no more content in the parent element. | ||
| 200 | # A th element's end tag may be omitted if the th element is | ||
| 201 | # immediately followed by a td or th element, or if there is | ||
| 202 | # no more content in the parent element. | ||
| 203 | if type == "StartTag": | ||
| 204 | return next["name"] in ('td', 'th') | ||
| 205 | else: | ||
| 206 | return type == "EndTag" or type is None | ||
| 207 | return False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/sanitizer.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 0000000..c3199a5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/sanitizer.py | |||
| @@ -0,0 +1,896 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | import re | ||
| 4 | from xml.sax.saxutils import escape, unescape | ||
| 5 | |||
| 6 | from pip._vendor.six.moves import urllib_parse as urlparse | ||
| 7 | |||
| 8 | from . import base | ||
| 9 | from ..constants import namespaces, prefixes | ||
| 10 | |||
| 11 | __all__ = ["Filter"] | ||
| 12 | |||
| 13 | |||
| 14 | allowed_elements = frozenset(( | ||
| 15 | (namespaces['html'], 'a'), | ||
| 16 | (namespaces['html'], 'abbr'), | ||
| 17 | (namespaces['html'], 'acronym'), | ||
| 18 | (namespaces['html'], 'address'), | ||
| 19 | (namespaces['html'], 'area'), | ||
| 20 | (namespaces['html'], 'article'), | ||
| 21 | (namespaces['html'], 'aside'), | ||
| 22 | (namespaces['html'], 'audio'), | ||
| 23 | (namespaces['html'], 'b'), | ||
| 24 | (namespaces['html'], 'big'), | ||
| 25 | (namespaces['html'], 'blockquote'), | ||
| 26 | (namespaces['html'], 'br'), | ||
| 27 | (namespaces['html'], 'button'), | ||
| 28 | (namespaces['html'], 'canvas'), | ||
| 29 | (namespaces['html'], 'caption'), | ||
| 30 | (namespaces['html'], 'center'), | ||
| 31 | (namespaces['html'], 'cite'), | ||
| 32 | (namespaces['html'], 'code'), | ||
| 33 | (namespaces['html'], 'col'), | ||
| 34 | (namespaces['html'], 'colgroup'), | ||
| 35 | (namespaces['html'], 'command'), | ||
| 36 | (namespaces['html'], 'datagrid'), | ||
| 37 | (namespaces['html'], 'datalist'), | ||
| 38 | (namespaces['html'], 'dd'), | ||
| 39 | (namespaces['html'], 'del'), | ||
| 40 | (namespaces['html'], 'details'), | ||
| 41 | (namespaces['html'], 'dfn'), | ||
| 42 | (namespaces['html'], 'dialog'), | ||
| 43 | (namespaces['html'], 'dir'), | ||
| 44 | (namespaces['html'], 'div'), | ||
| 45 | (namespaces['html'], 'dl'), | ||
| 46 | (namespaces['html'], 'dt'), | ||
| 47 | (namespaces['html'], 'em'), | ||
| 48 | (namespaces['html'], 'event-source'), | ||
| 49 | (namespaces['html'], 'fieldset'), | ||
| 50 | (namespaces['html'], 'figcaption'), | ||
| 51 | (namespaces['html'], 'figure'), | ||
| 52 | (namespaces['html'], 'footer'), | ||
| 53 | (namespaces['html'], 'font'), | ||
| 54 | (namespaces['html'], 'form'), | ||
| 55 | (namespaces['html'], 'header'), | ||
| 56 | (namespaces['html'], 'h1'), | ||
| 57 | (namespaces['html'], 'h2'), | ||
| 58 | (namespaces['html'], 'h3'), | ||
| 59 | (namespaces['html'], 'h4'), | ||
| 60 | (namespaces['html'], 'h5'), | ||
| 61 | (namespaces['html'], 'h6'), | ||
| 62 | (namespaces['html'], 'hr'), | ||
| 63 | (namespaces['html'], 'i'), | ||
| 64 | (namespaces['html'], 'img'), | ||
| 65 | (namespaces['html'], 'input'), | ||
| 66 | (namespaces['html'], 'ins'), | ||
| 67 | (namespaces['html'], 'keygen'), | ||
| 68 | (namespaces['html'], 'kbd'), | ||
| 69 | (namespaces['html'], 'label'), | ||
| 70 | (namespaces['html'], 'legend'), | ||
| 71 | (namespaces['html'], 'li'), | ||
| 72 | (namespaces['html'], 'm'), | ||
| 73 | (namespaces['html'], 'map'), | ||
| 74 | (namespaces['html'], 'menu'), | ||
| 75 | (namespaces['html'], 'meter'), | ||
| 76 | (namespaces['html'], 'multicol'), | ||
| 77 | (namespaces['html'], 'nav'), | ||
| 78 | (namespaces['html'], 'nextid'), | ||
| 79 | (namespaces['html'], 'ol'), | ||
| 80 | (namespaces['html'], 'output'), | ||
| 81 | (namespaces['html'], 'optgroup'), | ||
| 82 | (namespaces['html'], 'option'), | ||
| 83 | (namespaces['html'], 'p'), | ||
| 84 | (namespaces['html'], 'pre'), | ||
| 85 | (namespaces['html'], 'progress'), | ||
| 86 | (namespaces['html'], 'q'), | ||
| 87 | (namespaces['html'], 's'), | ||
| 88 | (namespaces['html'], 'samp'), | ||
| 89 | (namespaces['html'], 'section'), | ||
| 90 | (namespaces['html'], 'select'), | ||
| 91 | (namespaces['html'], 'small'), | ||
| 92 | (namespaces['html'], 'sound'), | ||
| 93 | (namespaces['html'], 'source'), | ||
| 94 | (namespaces['html'], 'spacer'), | ||
| 95 | (namespaces['html'], 'span'), | ||
| 96 | (namespaces['html'], 'strike'), | ||
| 97 | (namespaces['html'], 'strong'), | ||
| 98 | (namespaces['html'], 'sub'), | ||
| 99 | (namespaces['html'], 'sup'), | ||
| 100 | (namespaces['html'], 'table'), | ||
| 101 | (namespaces['html'], 'tbody'), | ||
| 102 | (namespaces['html'], 'td'), | ||
| 103 | (namespaces['html'], 'textarea'), | ||
| 104 | (namespaces['html'], 'time'), | ||
| 105 | (namespaces['html'], 'tfoot'), | ||
| 106 | (namespaces['html'], 'th'), | ||
| 107 | (namespaces['html'], 'thead'), | ||
| 108 | (namespaces['html'], 'tr'), | ||
| 109 | (namespaces['html'], 'tt'), | ||
| 110 | (namespaces['html'], 'u'), | ||
| 111 | (namespaces['html'], 'ul'), | ||
| 112 | (namespaces['html'], 'var'), | ||
| 113 | (namespaces['html'], 'video'), | ||
| 114 | (namespaces['mathml'], 'maction'), | ||
| 115 | (namespaces['mathml'], 'math'), | ||
| 116 | (namespaces['mathml'], 'merror'), | ||
| 117 | (namespaces['mathml'], 'mfrac'), | ||
| 118 | (namespaces['mathml'], 'mi'), | ||
| 119 | (namespaces['mathml'], 'mmultiscripts'), | ||
| 120 | (namespaces['mathml'], 'mn'), | ||
| 121 | (namespaces['mathml'], 'mo'), | ||
| 122 | (namespaces['mathml'], 'mover'), | ||
| 123 | (namespaces['mathml'], 'mpadded'), | ||
| 124 | (namespaces['mathml'], 'mphantom'), | ||
| 125 | (namespaces['mathml'], 'mprescripts'), | ||
| 126 | (namespaces['mathml'], 'mroot'), | ||
| 127 | (namespaces['mathml'], 'mrow'), | ||
| 128 | (namespaces['mathml'], 'mspace'), | ||
| 129 | (namespaces['mathml'], 'msqrt'), | ||
| 130 | (namespaces['mathml'], 'mstyle'), | ||
| 131 | (namespaces['mathml'], 'msub'), | ||
| 132 | (namespaces['mathml'], 'msubsup'), | ||
| 133 | (namespaces['mathml'], 'msup'), | ||
| 134 | (namespaces['mathml'], 'mtable'), | ||
| 135 | (namespaces['mathml'], 'mtd'), | ||
| 136 | (namespaces['mathml'], 'mtext'), | ||
| 137 | (namespaces['mathml'], 'mtr'), | ||
| 138 | (namespaces['mathml'], 'munder'), | ||
| 139 | (namespaces['mathml'], 'munderover'), | ||
| 140 | (namespaces['mathml'], 'none'), | ||
| 141 | (namespaces['svg'], 'a'), | ||
| 142 | (namespaces['svg'], 'animate'), | ||
| 143 | (namespaces['svg'], 'animateColor'), | ||
| 144 | (namespaces['svg'], 'animateMotion'), | ||
| 145 | (namespaces['svg'], 'animateTransform'), | ||
| 146 | (namespaces['svg'], 'clipPath'), | ||
| 147 | (namespaces['svg'], 'circle'), | ||
| 148 | (namespaces['svg'], 'defs'), | ||
| 149 | (namespaces['svg'], 'desc'), | ||
| 150 | (namespaces['svg'], 'ellipse'), | ||
| 151 | (namespaces['svg'], 'font-face'), | ||
| 152 | (namespaces['svg'], 'font-face-name'), | ||
| 153 | (namespaces['svg'], 'font-face-src'), | ||
| 154 | (namespaces['svg'], 'g'), | ||
| 155 | (namespaces['svg'], 'glyph'), | ||
| 156 | (namespaces['svg'], 'hkern'), | ||
| 157 | (namespaces['svg'], 'linearGradient'), | ||
| 158 | (namespaces['svg'], 'line'), | ||
| 159 | (namespaces['svg'], 'marker'), | ||
| 160 | (namespaces['svg'], 'metadata'), | ||
| 161 | (namespaces['svg'], 'missing-glyph'), | ||
| 162 | (namespaces['svg'], 'mpath'), | ||
| 163 | (namespaces['svg'], 'path'), | ||
| 164 | (namespaces['svg'], 'polygon'), | ||
| 165 | (namespaces['svg'], 'polyline'), | ||
| 166 | (namespaces['svg'], 'radialGradient'), | ||
| 167 | (namespaces['svg'], 'rect'), | ||
| 168 | (namespaces['svg'], 'set'), | ||
| 169 | (namespaces['svg'], 'stop'), | ||
| 170 | (namespaces['svg'], 'svg'), | ||
| 171 | (namespaces['svg'], 'switch'), | ||
| 172 | (namespaces['svg'], 'text'), | ||
| 173 | (namespaces['svg'], 'title'), | ||
| 174 | (namespaces['svg'], 'tspan'), | ||
| 175 | (namespaces['svg'], 'use'), | ||
| 176 | )) | ||
| 177 | |||
| 178 | allowed_attributes = frozenset(( | ||
| 179 | # HTML attributes | ||
| 180 | (None, 'abbr'), | ||
| 181 | (None, 'accept'), | ||
| 182 | (None, 'accept-charset'), | ||
| 183 | (None, 'accesskey'), | ||
| 184 | (None, 'action'), | ||
| 185 | (None, 'align'), | ||
| 186 | (None, 'alt'), | ||
| 187 | (None, 'autocomplete'), | ||
| 188 | (None, 'autofocus'), | ||
| 189 | (None, 'axis'), | ||
| 190 | (None, 'background'), | ||
| 191 | (None, 'balance'), | ||
| 192 | (None, 'bgcolor'), | ||
| 193 | (None, 'bgproperties'), | ||
| 194 | (None, 'border'), | ||
| 195 | (None, 'bordercolor'), | ||
| 196 | (None, 'bordercolordark'), | ||
| 197 | (None, 'bordercolorlight'), | ||
| 198 | (None, 'bottompadding'), | ||
| 199 | (None, 'cellpadding'), | ||
| 200 | (None, 'cellspacing'), | ||
| 201 | (None, 'ch'), | ||
| 202 | (None, 'challenge'), | ||
| 203 | (None, 'char'), | ||
| 204 | (None, 'charoff'), | ||
| 205 | (None, 'choff'), | ||
| 206 | (None, 'charset'), | ||
| 207 | (None, 'checked'), | ||
| 208 | (None, 'cite'), | ||
| 209 | (None, 'class'), | ||
| 210 | (None, 'clear'), | ||
| 211 | (None, 'color'), | ||
| 212 | (None, 'cols'), | ||
| 213 | (None, 'colspan'), | ||
| 214 | (None, 'compact'), | ||
| 215 | (None, 'contenteditable'), | ||
| 216 | (None, 'controls'), | ||
| 217 | (None, 'coords'), | ||
| 218 | (None, 'data'), | ||
| 219 | (None, 'datafld'), | ||
| 220 | (None, 'datapagesize'), | ||
| 221 | (None, 'datasrc'), | ||
| 222 | (None, 'datetime'), | ||
| 223 | (None, 'default'), | ||
| 224 | (None, 'delay'), | ||
| 225 | (None, 'dir'), | ||
| 226 | (None, 'disabled'), | ||
| 227 | (None, 'draggable'), | ||
| 228 | (None, 'dynsrc'), | ||
| 229 | (None, 'enctype'), | ||
| 230 | (None, 'end'), | ||
| 231 | (None, 'face'), | ||
| 232 | (None, 'for'), | ||
| 233 | (None, 'form'), | ||
| 234 | (None, 'frame'), | ||
| 235 | (None, 'galleryimg'), | ||
| 236 | (None, 'gutter'), | ||
| 237 | (None, 'headers'), | ||
| 238 | (None, 'height'), | ||
| 239 | (None, 'hidefocus'), | ||
| 240 | (None, 'hidden'), | ||
| 241 | (None, 'high'), | ||
| 242 | (None, 'href'), | ||
| 243 | (None, 'hreflang'), | ||
| 244 | (None, 'hspace'), | ||
| 245 | (None, 'icon'), | ||
| 246 | (None, 'id'), | ||
| 247 | (None, 'inputmode'), | ||
| 248 | (None, 'ismap'), | ||
| 249 | (None, 'keytype'), | ||
| 250 | (None, 'label'), | ||
| 251 | (None, 'leftspacing'), | ||
| 252 | (None, 'lang'), | ||
| 253 | (None, 'list'), | ||
| 254 | (None, 'longdesc'), | ||
| 255 | (None, 'loop'), | ||
| 256 | (None, 'loopcount'), | ||
| 257 | (None, 'loopend'), | ||
| 258 | (None, 'loopstart'), | ||
| 259 | (None, 'low'), | ||
| 260 | (None, 'lowsrc'), | ||
| 261 | (None, 'max'), | ||
| 262 | (None, 'maxlength'), | ||
| 263 | (None, 'media'), | ||
| 264 | (None, 'method'), | ||
| 265 | (None, 'min'), | ||
| 266 | (None, 'multiple'), | ||
| 267 | (None, 'name'), | ||
| 268 | (None, 'nohref'), | ||
| 269 | (None, 'noshade'), | ||
| 270 | (None, 'nowrap'), | ||
| 271 | (None, 'open'), | ||
| 272 | (None, 'optimum'), | ||
| 273 | (None, 'pattern'), | ||
| 274 | (None, 'ping'), | ||
| 275 | (None, 'point-size'), | ||
| 276 | (None, 'poster'), | ||
| 277 | (None, 'pqg'), | ||
| 278 | (None, 'preload'), | ||
| 279 | (None, 'prompt'), | ||
| 280 | (None, 'radiogroup'), | ||
| 281 | (None, 'readonly'), | ||
| 282 | (None, 'rel'), | ||
| 283 | (None, 'repeat-max'), | ||
| 284 | (None, 'repeat-min'), | ||
| 285 | (None, 'replace'), | ||
| 286 | (None, 'required'), | ||
| 287 | (None, 'rev'), | ||
| 288 | (None, 'rightspacing'), | ||
| 289 | (None, 'rows'), | ||
| 290 | (None, 'rowspan'), | ||
| 291 | (None, 'rules'), | ||
| 292 | (None, 'scope'), | ||
| 293 | (None, 'selected'), | ||
| 294 | (None, 'shape'), | ||
| 295 | (None, 'size'), | ||
| 296 | (None, 'span'), | ||
| 297 | (None, 'src'), | ||
| 298 | (None, 'start'), | ||
| 299 | (None, 'step'), | ||
| 300 | (None, 'style'), | ||
| 301 | (None, 'summary'), | ||
| 302 | (None, 'suppress'), | ||
| 303 | (None, 'tabindex'), | ||
| 304 | (None, 'target'), | ||
| 305 | (None, 'template'), | ||
| 306 | (None, 'title'), | ||
| 307 | (None, 'toppadding'), | ||
| 308 | (None, 'type'), | ||
| 309 | (None, 'unselectable'), | ||
| 310 | (None, 'usemap'), | ||
| 311 | (None, 'urn'), | ||
| 312 | (None, 'valign'), | ||
| 313 | (None, 'value'), | ||
| 314 | (None, 'variable'), | ||
| 315 | (None, 'volume'), | ||
| 316 | (None, 'vspace'), | ||
| 317 | (None, 'vrml'), | ||
| 318 | (None, 'width'), | ||
| 319 | (None, 'wrap'), | ||
| 320 | (namespaces['xml'], 'lang'), | ||
| 321 | # MathML attributes | ||
| 322 | (None, 'actiontype'), | ||
| 323 | (None, 'align'), | ||
| 324 | (None, 'columnalign'), | ||
| 325 | (None, 'columnalign'), | ||
| 326 | (None, 'columnalign'), | ||
| 327 | (None, 'columnlines'), | ||
| 328 | (None, 'columnspacing'), | ||
| 329 | (None, 'columnspan'), | ||
| 330 | (None, 'depth'), | ||
| 331 | (None, 'display'), | ||
| 332 | (None, 'displaystyle'), | ||
| 333 | (None, 'equalcolumns'), | ||
| 334 | (None, 'equalrows'), | ||
| 335 | (None, 'fence'), | ||
| 336 | (None, 'fontstyle'), | ||
| 337 | (None, 'fontweight'), | ||
| 338 | (None, 'frame'), | ||
| 339 | (None, 'height'), | ||
| 340 | (None, 'linethickness'), | ||
| 341 | (None, 'lspace'), | ||
| 342 | (None, 'mathbackground'), | ||
| 343 | (None, 'mathcolor'), | ||
| 344 | (None, 'mathvariant'), | ||
| 345 | (None, 'mathvariant'), | ||
| 346 | (None, 'maxsize'), | ||
| 347 | (None, 'minsize'), | ||
| 348 | (None, 'other'), | ||
| 349 | (None, 'rowalign'), | ||
| 350 | (None, 'rowalign'), | ||
| 351 | (None, 'rowalign'), | ||
| 352 | (None, 'rowlines'), | ||
| 353 | (None, 'rowspacing'), | ||
| 354 | (None, 'rowspan'), | ||
| 355 | (None, 'rspace'), | ||
| 356 | (None, 'scriptlevel'), | ||
| 357 | (None, 'selection'), | ||
| 358 | (None, 'separator'), | ||
| 359 | (None, 'stretchy'), | ||
| 360 | (None, 'width'), | ||
| 361 | (None, 'width'), | ||
| 362 | (namespaces['xlink'], 'href'), | ||
| 363 | (namespaces['xlink'], 'show'), | ||
| 364 | (namespaces['xlink'], 'type'), | ||
| 365 | # SVG attributes | ||
| 366 | (None, 'accent-height'), | ||
| 367 | (None, 'accumulate'), | ||
| 368 | (None, 'additive'), | ||
| 369 | (None, 'alphabetic'), | ||
| 370 | (None, 'arabic-form'), | ||
| 371 | (None, 'ascent'), | ||
| 372 | (None, 'attributeName'), | ||
| 373 | (None, 'attributeType'), | ||
| 374 | (None, 'baseProfile'), | ||
| 375 | (None, 'bbox'), | ||
| 376 | (None, 'begin'), | ||
| 377 | (None, 'by'), | ||
| 378 | (None, 'calcMode'), | ||
| 379 | (None, 'cap-height'), | ||
| 380 | (None, 'class'), | ||
| 381 | (None, 'clip-path'), | ||
| 382 | (None, 'color'), | ||
| 383 | (None, 'color-rendering'), | ||
| 384 | (None, 'content'), | ||
| 385 | (None, 'cx'), | ||
| 386 | (None, 'cy'), | ||
| 387 | (None, 'd'), | ||
| 388 | (None, 'dx'), | ||
| 389 | (None, 'dy'), | ||
| 390 | (None, 'descent'), | ||
| 391 | (None, 'display'), | ||
| 392 | (None, 'dur'), | ||
| 393 | (None, 'end'), | ||
| 394 | (None, 'fill'), | ||
| 395 | (None, 'fill-opacity'), | ||
| 396 | (None, 'fill-rule'), | ||
| 397 | (None, 'font-family'), | ||
| 398 | (None, 'font-size'), | ||
| 399 | (None, 'font-stretch'), | ||
| 400 | (None, 'font-style'), | ||
| 401 | (None, 'font-variant'), | ||
| 402 | (None, 'font-weight'), | ||
| 403 | (None, 'from'), | ||
| 404 | (None, 'fx'), | ||
| 405 | (None, 'fy'), | ||
| 406 | (None, 'g1'), | ||
| 407 | (None, 'g2'), | ||
| 408 | (None, 'glyph-name'), | ||
| 409 | (None, 'gradientUnits'), | ||
| 410 | (None, 'hanging'), | ||
| 411 | (None, 'height'), | ||
| 412 | (None, 'horiz-adv-x'), | ||
| 413 | (None, 'horiz-origin-x'), | ||
| 414 | (None, 'id'), | ||
| 415 | (None, 'ideographic'), | ||
| 416 | (None, 'k'), | ||
| 417 | (None, 'keyPoints'), | ||
| 418 | (None, 'keySplines'), | ||
| 419 | (None, 'keyTimes'), | ||
| 420 | (None, 'lang'), | ||
| 421 | (None, 'marker-end'), | ||
| 422 | (None, 'marker-mid'), | ||
| 423 | (None, 'marker-start'), | ||
| 424 | (None, 'markerHeight'), | ||
| 425 | (None, 'markerUnits'), | ||
| 426 | (None, 'markerWidth'), | ||
| 427 | (None, 'mathematical'), | ||
| 428 | (None, 'max'), | ||
| 429 | (None, 'min'), | ||
| 430 | (None, 'name'), | ||
| 431 | (None, 'offset'), | ||
| 432 | (None, 'opacity'), | ||
| 433 | (None, 'orient'), | ||
| 434 | (None, 'origin'), | ||
| 435 | (None, 'overline-position'), | ||
| 436 | (None, 'overline-thickness'), | ||
| 437 | (None, 'panose-1'), | ||
| 438 | (None, 'path'), | ||
| 439 | (None, 'pathLength'), | ||
| 440 | (None, 'points'), | ||
| 441 | (None, 'preserveAspectRatio'), | ||
| 442 | (None, 'r'), | ||
| 443 | (None, 'refX'), | ||
| 444 | (None, 'refY'), | ||
| 445 | (None, 'repeatCount'), | ||
| 446 | (None, 'repeatDur'), | ||
| 447 | (None, 'requiredExtensions'), | ||
| 448 | (None, 'requiredFeatures'), | ||
| 449 | (None, 'restart'), | ||
| 450 | (None, 'rotate'), | ||
| 451 | (None, 'rx'), | ||
| 452 | (None, 'ry'), | ||
| 453 | (None, 'slope'), | ||
| 454 | (None, 'stemh'), | ||
| 455 | (None, 'stemv'), | ||
| 456 | (None, 'stop-color'), | ||
| 457 | (None, 'stop-opacity'), | ||
| 458 | (None, 'strikethrough-position'), | ||
| 459 | (None, 'strikethrough-thickness'), | ||
| 460 | (None, 'stroke'), | ||
| 461 | (None, 'stroke-dasharray'), | ||
| 462 | (None, 'stroke-dashoffset'), | ||
| 463 | (None, 'stroke-linecap'), | ||
| 464 | (None, 'stroke-linejoin'), | ||
| 465 | (None, 'stroke-miterlimit'), | ||
| 466 | (None, 'stroke-opacity'), | ||
| 467 | (None, 'stroke-width'), | ||
| 468 | (None, 'systemLanguage'), | ||
| 469 | (None, 'target'), | ||
| 470 | (None, 'text-anchor'), | ||
| 471 | (None, 'to'), | ||
| 472 | (None, 'transform'), | ||
| 473 | (None, 'type'), | ||
| 474 | (None, 'u1'), | ||
| 475 | (None, 'u2'), | ||
| 476 | (None, 'underline-position'), | ||
| 477 | (None, 'underline-thickness'), | ||
| 478 | (None, 'unicode'), | ||
| 479 | (None, 'unicode-range'), | ||
| 480 | (None, 'units-per-em'), | ||
| 481 | (None, 'values'), | ||
| 482 | (None, 'version'), | ||
| 483 | (None, 'viewBox'), | ||
| 484 | (None, 'visibility'), | ||
| 485 | (None, 'width'), | ||
| 486 | (None, 'widths'), | ||
| 487 | (None, 'x'), | ||
| 488 | (None, 'x-height'), | ||
| 489 | (None, 'x1'), | ||
| 490 | (None, 'x2'), | ||
| 491 | (namespaces['xlink'], 'actuate'), | ||
| 492 | (namespaces['xlink'], 'arcrole'), | ||
| 493 | (namespaces['xlink'], 'href'), | ||
| 494 | (namespaces['xlink'], 'role'), | ||
| 495 | (namespaces['xlink'], 'show'), | ||
| 496 | (namespaces['xlink'], 'title'), | ||
| 497 | (namespaces['xlink'], 'type'), | ||
| 498 | (namespaces['xml'], 'base'), | ||
| 499 | (namespaces['xml'], 'lang'), | ||
| 500 | (namespaces['xml'], 'space'), | ||
| 501 | (None, 'y'), | ||
| 502 | (None, 'y1'), | ||
| 503 | (None, 'y2'), | ||
| 504 | (None, 'zoomAndPan'), | ||
| 505 | )) | ||
| 506 | |||
| 507 | attr_val_is_uri = frozenset(( | ||
| 508 | (None, 'href'), | ||
| 509 | (None, 'src'), | ||
| 510 | (None, 'cite'), | ||
| 511 | (None, 'action'), | ||
| 512 | (None, 'longdesc'), | ||
| 513 | (None, 'poster'), | ||
| 514 | (None, 'background'), | ||
| 515 | (None, 'datasrc'), | ||
| 516 | (None, 'dynsrc'), | ||
| 517 | (None, 'lowsrc'), | ||
| 518 | (None, 'ping'), | ||
| 519 | (namespaces['xlink'], 'href'), | ||
| 520 | (namespaces['xml'], 'base'), | ||
| 521 | )) | ||
| 522 | |||
| 523 | svg_attr_val_allows_ref = frozenset(( | ||
| 524 | (None, 'clip-path'), | ||
| 525 | (None, 'color-profile'), | ||
| 526 | (None, 'cursor'), | ||
| 527 | (None, 'fill'), | ||
| 528 | (None, 'filter'), | ||
| 529 | (None, 'marker'), | ||
| 530 | (None, 'marker-start'), | ||
| 531 | (None, 'marker-mid'), | ||
| 532 | (None, 'marker-end'), | ||
| 533 | (None, 'mask'), | ||
| 534 | (None, 'stroke'), | ||
| 535 | )) | ||
| 536 | |||
| 537 | svg_allow_local_href = frozenset(( | ||
| 538 | (None, 'altGlyph'), | ||
| 539 | (None, 'animate'), | ||
| 540 | (None, 'animateColor'), | ||
| 541 | (None, 'animateMotion'), | ||
| 542 | (None, 'animateTransform'), | ||
| 543 | (None, 'cursor'), | ||
| 544 | (None, 'feImage'), | ||
| 545 | (None, 'filter'), | ||
| 546 | (None, 'linearGradient'), | ||
| 547 | (None, 'pattern'), | ||
| 548 | (None, 'radialGradient'), | ||
| 549 | (None, 'textpath'), | ||
| 550 | (None, 'tref'), | ||
| 551 | (None, 'set'), | ||
| 552 | (None, 'use') | ||
| 553 | )) | ||
| 554 | |||
| 555 | allowed_css_properties = frozenset(( | ||
| 556 | 'azimuth', | ||
| 557 | 'background-color', | ||
| 558 | 'border-bottom-color', | ||
| 559 | 'border-collapse', | ||
| 560 | 'border-color', | ||
| 561 | 'border-left-color', | ||
| 562 | 'border-right-color', | ||
| 563 | 'border-top-color', | ||
| 564 | 'clear', | ||
| 565 | 'color', | ||
| 566 | 'cursor', | ||
| 567 | 'direction', | ||
| 568 | 'display', | ||
| 569 | 'elevation', | ||
| 570 | 'float', | ||
| 571 | 'font', | ||
| 572 | 'font-family', | ||
| 573 | 'font-size', | ||
| 574 | 'font-style', | ||
| 575 | 'font-variant', | ||
| 576 | 'font-weight', | ||
| 577 | 'height', | ||
| 578 | 'letter-spacing', | ||
| 579 | 'line-height', | ||
| 580 | 'overflow', | ||
| 581 | 'pause', | ||
| 582 | 'pause-after', | ||
| 583 | 'pause-before', | ||
| 584 | 'pitch', | ||
| 585 | 'pitch-range', | ||
| 586 | 'richness', | ||
| 587 | 'speak', | ||
| 588 | 'speak-header', | ||
| 589 | 'speak-numeral', | ||
| 590 | 'speak-punctuation', | ||
| 591 | 'speech-rate', | ||
| 592 | 'stress', | ||
| 593 | 'text-align', | ||
| 594 | 'text-decoration', | ||
| 595 | 'text-indent', | ||
| 596 | 'unicode-bidi', | ||
| 597 | 'vertical-align', | ||
| 598 | 'voice-family', | ||
| 599 | 'volume', | ||
| 600 | 'white-space', | ||
| 601 | 'width', | ||
| 602 | )) | ||
| 603 | |||
| 604 | allowed_css_keywords = frozenset(( | ||
| 605 | 'auto', | ||
| 606 | 'aqua', | ||
| 607 | 'black', | ||
| 608 | 'block', | ||
| 609 | 'blue', | ||
| 610 | 'bold', | ||
| 611 | 'both', | ||
| 612 | 'bottom', | ||
| 613 | 'brown', | ||
| 614 | 'center', | ||
| 615 | 'collapse', | ||
| 616 | 'dashed', | ||
| 617 | 'dotted', | ||
| 618 | 'fuchsia', | ||
| 619 | 'gray', | ||
| 620 | 'green', | ||
| 621 | '!important', | ||
| 622 | 'italic', | ||
| 623 | 'left', | ||
| 624 | 'lime', | ||
| 625 | 'maroon', | ||
| 626 | 'medium', | ||
| 627 | 'none', | ||
| 628 | 'navy', | ||
| 629 | 'normal', | ||
| 630 | 'nowrap', | ||
| 631 | 'olive', | ||
| 632 | 'pointer', | ||
| 633 | 'purple', | ||
| 634 | 'red', | ||
| 635 | 'right', | ||
| 636 | 'solid', | ||
| 637 | 'silver', | ||
| 638 | 'teal', | ||
| 639 | 'top', | ||
| 640 | 'transparent', | ||
| 641 | 'underline', | ||
| 642 | 'white', | ||
| 643 | 'yellow', | ||
| 644 | )) | ||
| 645 | |||
| 646 | allowed_svg_properties = frozenset(( | ||
| 647 | 'fill', | ||
| 648 | 'fill-opacity', | ||
| 649 | 'fill-rule', | ||
| 650 | 'stroke', | ||
| 651 | 'stroke-width', | ||
| 652 | 'stroke-linecap', | ||
| 653 | 'stroke-linejoin', | ||
| 654 | 'stroke-opacity', | ||
| 655 | )) | ||
| 656 | |||
| 657 | allowed_protocols = frozenset(( | ||
| 658 | 'ed2k', | ||
| 659 | 'ftp', | ||
| 660 | 'http', | ||
| 661 | 'https', | ||
| 662 | 'irc', | ||
| 663 | 'mailto', | ||
| 664 | 'news', | ||
| 665 | 'gopher', | ||
| 666 | 'nntp', | ||
| 667 | 'telnet', | ||
| 668 | 'webcal', | ||
| 669 | 'xmpp', | ||
| 670 | 'callto', | ||
| 671 | 'feed', | ||
| 672 | 'urn', | ||
| 673 | 'aim', | ||
| 674 | 'rsync', | ||
| 675 | 'tag', | ||
| 676 | 'ssh', | ||
| 677 | 'sftp', | ||
| 678 | 'rtsp', | ||
| 679 | 'afs', | ||
| 680 | 'data', | ||
| 681 | )) | ||
| 682 | |||
| 683 | allowed_content_types = frozenset(( | ||
| 684 | 'image/png', | ||
| 685 | 'image/jpeg', | ||
| 686 | 'image/gif', | ||
| 687 | 'image/webp', | ||
| 688 | 'image/bmp', | ||
| 689 | 'text/plain', | ||
| 690 | )) | ||
| 691 | |||
| 692 | |||
| 693 | data_content_type = re.compile(r''' | ||
| 694 | ^ | ||
| 695 | # Match a content type <application>/<type> | ||
| 696 | (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+) | ||
| 697 | # Match any character set and encoding | ||
| 698 | (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?) | ||
| 699 | |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?) | ||
| 700 | # Assume the rest is data | ||
| 701 | ,.* | ||
| 702 | $ | ||
| 703 | ''', | ||
| 704 | re.VERBOSE) | ||
| 705 | |||
| 706 | |||
| 707 | class Filter(base.Filter): | ||
| 708 | """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes""" | ||
| 709 | def __init__(self, | ||
| 710 | source, | ||
| 711 | allowed_elements=allowed_elements, | ||
| 712 | allowed_attributes=allowed_attributes, | ||
| 713 | allowed_css_properties=allowed_css_properties, | ||
| 714 | allowed_css_keywords=allowed_css_keywords, | ||
| 715 | allowed_svg_properties=allowed_svg_properties, | ||
| 716 | allowed_protocols=allowed_protocols, | ||
| 717 | allowed_content_types=allowed_content_types, | ||
| 718 | attr_val_is_uri=attr_val_is_uri, | ||
| 719 | svg_attr_val_allows_ref=svg_attr_val_allows_ref, | ||
| 720 | svg_allow_local_href=svg_allow_local_href): | ||
| 721 | """Creates a Filter | ||
| 722 | |||
| 723 | :arg allowed_elements: set of elements to allow--everything else will | ||
| 724 | be escaped | ||
| 725 | |||
| 726 | :arg allowed_attributes: set of attributes to allow in | ||
| 727 | elements--everything else will be stripped | ||
| 728 | |||
| 729 | :arg allowed_css_properties: set of CSS properties to allow--everything | ||
| 730 | else will be stripped | ||
| 731 | |||
| 732 | :arg allowed_css_keywords: set of CSS keywords to allow--everything | ||
| 733 | else will be stripped | ||
| 734 | |||
| 735 | :arg allowed_svg_properties: set of SVG properties to allow--everything | ||
| 736 | else will be removed | ||
| 737 | |||
| 738 | :arg allowed_protocols: set of allowed protocols for URIs | ||
| 739 | |||
| 740 | :arg allowed_content_types: set of allowed content types for ``data`` URIs. | ||
| 741 | |||
| 742 | :arg attr_val_is_uri: set of attributes that have URI values--values | ||
| 743 | that have a scheme not listed in ``allowed_protocols`` are removed | ||
| 744 | |||
| 745 | :arg svg_attr_val_allows_ref: set of SVG attributes that can have | ||
| 746 | references | ||
| 747 | |||
| 748 | :arg svg_allow_local_href: set of SVG elements that can have local | ||
| 749 | hrefs--these are removed | ||
| 750 | |||
| 751 | """ | ||
| 752 | super(Filter, self).__init__(source) | ||
| 753 | self.allowed_elements = allowed_elements | ||
| 754 | self.allowed_attributes = allowed_attributes | ||
| 755 | self.allowed_css_properties = allowed_css_properties | ||
| 756 | self.allowed_css_keywords = allowed_css_keywords | ||
| 757 | self.allowed_svg_properties = allowed_svg_properties | ||
| 758 | self.allowed_protocols = allowed_protocols | ||
| 759 | self.allowed_content_types = allowed_content_types | ||
| 760 | self.attr_val_is_uri = attr_val_is_uri | ||
| 761 | self.svg_attr_val_allows_ref = svg_attr_val_allows_ref | ||
| 762 | self.svg_allow_local_href = svg_allow_local_href | ||
| 763 | |||
| 764 | def __iter__(self): | ||
| 765 | for token in base.Filter.__iter__(self): | ||
| 766 | token = self.sanitize_token(token) | ||
| 767 | if token: | ||
| 768 | yield token | ||
| 769 | |||
| 770 | # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and | ||
| 771 | # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes | ||
| 772 | # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and | ||
| 773 | # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI | ||
| 774 | # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are | ||
| 775 | # allowed. | ||
| 776 | # | ||
| 777 | # sanitize_html('<script> do_nasty_stuff() </script>') | ||
| 778 | # => <script> do_nasty_stuff() </script> | ||
| 779 | # sanitize_html('<a href="javascript: sucker();">Click here for $100</a>') | ||
| 780 | # => <a>Click here for $100</a> | ||
| 781 | def sanitize_token(self, token): | ||
| 782 | |||
| 783 | # accommodate filters which use token_type differently | ||
| 784 | token_type = token["type"] | ||
| 785 | if token_type in ("StartTag", "EndTag", "EmptyTag"): | ||
| 786 | name = token["name"] | ||
| 787 | namespace = token["namespace"] | ||
| 788 | if ((namespace, name) in self.allowed_elements or | ||
| 789 | (namespace is None and | ||
| 790 | (namespaces["html"], name) in self.allowed_elements)): | ||
| 791 | return self.allowed_token(token) | ||
| 792 | else: | ||
| 793 | return self.disallowed_token(token) | ||
| 794 | elif token_type == "Comment": | ||
| 795 | pass | ||
| 796 | else: | ||
| 797 | return token | ||
| 798 | |||
| 799 | def allowed_token(self, token): | ||
| 800 | if "data" in token: | ||
| 801 | attrs = token["data"] | ||
| 802 | attr_names = set(attrs.keys()) | ||
| 803 | |||
| 804 | # Remove forbidden attributes | ||
| 805 | for to_remove in (attr_names - self.allowed_attributes): | ||
| 806 | del token["data"][to_remove] | ||
| 807 | attr_names.remove(to_remove) | ||
| 808 | |||
| 809 | # Remove attributes with disallowed URL values | ||
| 810 | for attr in (attr_names & self.attr_val_is_uri): | ||
| 811 | assert attr in attrs | ||
| 812 | # I don't have a clue where this regexp comes from or why it matches those | ||
| 813 | # characters, nor why we call unescape. I just know it's always been here. | ||
| 814 | # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all | ||
| 815 | # this will do is remove *more* than it otherwise would. | ||
| 816 | val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', | ||
| 817 | unescape(attrs[attr])).lower() | ||
| 818 | # remove replacement characters from unescaped characters | ||
| 819 | val_unescaped = val_unescaped.replace("\ufffd", "") | ||
| 820 | try: | ||
| 821 | uri = urlparse.urlparse(val_unescaped) | ||
| 822 | except ValueError: | ||
| 823 | uri = None | ||
| 824 | del attrs[attr] | ||
| 825 | if uri and uri.scheme: | ||
| 826 | if uri.scheme not in self.allowed_protocols: | ||
| 827 | del attrs[attr] | ||
| 828 | if uri.scheme == 'data': | ||
| 829 | m = data_content_type.match(uri.path) | ||
| 830 | if not m: | ||
| 831 | del attrs[attr] | ||
| 832 | elif m.group('content_type') not in self.allowed_content_types: | ||
| 833 | del attrs[attr] | ||
| 834 | |||
| 835 | for attr in self.svg_attr_val_allows_ref: | ||
| 836 | if attr in attrs: | ||
| 837 | attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', | ||
| 838 | ' ', | ||
| 839 | unescape(attrs[attr])) | ||
| 840 | if (token["name"] in self.svg_allow_local_href and | ||
| 841 | (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', | ||
| 842 | attrs[(namespaces['xlink'], 'href')])): | ||
| 843 | del attrs[(namespaces['xlink'], 'href')] | ||
| 844 | if (None, 'style') in attrs: | ||
| 845 | attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) | ||
| 846 | token["data"] = attrs | ||
| 847 | return token | ||
| 848 | |||
| 849 | def disallowed_token(self, token): | ||
| 850 | token_type = token["type"] | ||
| 851 | if token_type == "EndTag": | ||
| 852 | token["data"] = "</%s>" % token["name"] | ||
| 853 | elif token["data"]: | ||
| 854 | assert token_type in ("StartTag", "EmptyTag") | ||
| 855 | attrs = [] | ||
| 856 | for (ns, name), v in token["data"].items(): | ||
| 857 | attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) | ||
| 858 | token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) | ||
| 859 | else: | ||
| 860 | token["data"] = "<%s>" % token["name"] | ||
| 861 | if token.get("selfClosing"): | ||
| 862 | token["data"] = token["data"][:-1] + "/>" | ||
| 863 | |||
| 864 | token["type"] = "Characters" | ||
| 865 | |||
| 866 | del token["name"] | ||
| 867 | return token | ||
| 868 | |||
| 869 | def sanitize_css(self, style): | ||
| 870 | # disallow urls | ||
| 871 | style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) | ||
| 872 | |||
| 873 | # gauntlet | ||
| 874 | if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): | ||
| 875 | return '' | ||
| 876 | if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): | ||
| 877 | return '' | ||
| 878 | |||
| 879 | clean = [] | ||
| 880 | for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): | ||
| 881 | if not value: | ||
| 882 | continue | ||
| 883 | if prop.lower() in self.allowed_css_properties: | ||
| 884 | clean.append(prop + ': ' + value + ';') | ||
| 885 | elif prop.split('-')[0].lower() in ['background', 'border', 'margin', | ||
| 886 | 'padding']: | ||
| 887 | for keyword in value.split(): | ||
| 888 | if keyword not in self.allowed_css_keywords and \ | ||
| 889 | not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa | ||
| 890 | break | ||
| 891 | else: | ||
| 892 | clean.append(prop + ': ' + value + ';') | ||
| 893 | elif prop.lower() in self.allowed_svg_properties: | ||
| 894 | clean.append(prop + ': ' + value + ';') | ||
| 895 | |||
| 896 | return ' '.join(clean) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..24bb0de --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py | |||
| @@ -0,0 +1,38 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | import re | ||
| 4 | |||
| 5 | from . import base | ||
| 6 | from ..constants import rcdataElements, spaceCharacters | ||
| 7 | spaceCharacters = "".join(spaceCharacters) | ||
| 8 | |||
| 9 | SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) | ||
| 10 | |||
| 11 | |||
| 12 | class Filter(base.Filter): | ||
| 13 | """Collapses whitespace except in pre, textarea, and script elements""" | ||
| 14 | spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) | ||
| 15 | |||
| 16 | def __iter__(self): | ||
| 17 | preserve = 0 | ||
| 18 | for token in base.Filter.__iter__(self): | ||
| 19 | type = token["type"] | ||
| 20 | if type == "StartTag" \ | ||
| 21 | and (preserve or token["name"] in self.spacePreserveElements): | ||
| 22 | preserve += 1 | ||
| 23 | |||
| 24 | elif type == "EndTag" and preserve: | ||
| 25 | preserve -= 1 | ||
| 26 | |||
| 27 | elif not preserve and type == "SpaceCharacters" and token["data"]: | ||
| 28 | # Test on token["data"] above to not introduce spaces where there were not | ||
| 29 | token["data"] = " " | ||
| 30 | |||
| 31 | elif not preserve and type == "Characters": | ||
| 32 | token["data"] = collapse_spaces(token["data"]) | ||
| 33 | |||
| 34 | yield token | ||
| 35 | |||
| 36 | |||
| 37 | def collapse_spaces(text): | ||
| 38 | return SPACES_REGEX.sub(' ', text) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/html5parser.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..b185971 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/html5parser.py | |||
| @@ -0,0 +1,2791 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | from pip._vendor.six import with_metaclass, viewkeys | ||
| 3 | |||
| 4 | import types | ||
| 5 | from collections import OrderedDict | ||
| 6 | |||
| 7 | from . import _inputstream | ||
| 8 | from . import _tokenizer | ||
| 9 | |||
| 10 | from . import treebuilders | ||
| 11 | from .treebuilders.base import Marker | ||
| 12 | |||
| 13 | from . import _utils | ||
| 14 | from .constants import ( | ||
| 15 | spaceCharacters, asciiUpper2Lower, | ||
| 16 | specialElements, headingElements, cdataElements, rcdataElements, | ||
| 17 | tokenTypes, tagTokenTypes, | ||
| 18 | namespaces, | ||
| 19 | htmlIntegrationPointElements, mathmlTextIntegrationPointElements, | ||
| 20 | adjustForeignAttributes as adjustForeignAttributesMap, | ||
| 21 | adjustMathMLAttributes, adjustSVGAttributes, | ||
| 22 | E, | ||
| 23 | _ReparseException | ||
| 24 | ) | ||
| 25 | |||
| 26 | |||
| 27 | def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): | ||
| 28 | """Parse an HTML document as a string or file-like object into a tree | ||
| 29 | |||
| 30 | :arg doc: the document to parse as a string or file-like object | ||
| 31 | |||
| 32 | :arg treebuilder: the treebuilder to use when parsing | ||
| 33 | |||
| 34 | :arg namespaceHTMLElements: whether or not to namespace HTML elements | ||
| 35 | |||
| 36 | :returns: parsed tree | ||
| 37 | |||
| 38 | Example: | ||
| 39 | |||
| 40 | >>> from html5lib.html5parser import parse | ||
| 41 | >>> parse('<html><body><p>This is a doc</p></body></html>') | ||
| 42 | <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0> | ||
| 43 | |||
| 44 | """ | ||
| 45 | tb = treebuilders.getTreeBuilder(treebuilder) | ||
| 46 | p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) | ||
| 47 | return p.parse(doc, **kwargs) | ||
| 48 | |||
| 49 | |||
| 50 | def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): | ||
| 51 | """Parse an HTML fragment as a string or file-like object into a tree | ||
| 52 | |||
| 53 | :arg doc: the fragment to parse as a string or file-like object | ||
| 54 | |||
| 55 | :arg container: the container context to parse the fragment in | ||
| 56 | |||
| 57 | :arg treebuilder: the treebuilder to use when parsing | ||
| 58 | |||
| 59 | :arg namespaceHTMLElements: whether or not to namespace HTML elements | ||
| 60 | |||
| 61 | :returns: parsed tree | ||
| 62 | |||
| 63 | Example: | ||
| 64 | |||
| 65 | >>> from html5lib.html5libparser import parseFragment | ||
| 66 | >>> parseFragment('<b>this is a fragment</b>') | ||
| 67 | <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090> | ||
| 68 | |||
| 69 | """ | ||
| 70 | tb = treebuilders.getTreeBuilder(treebuilder) | ||
| 71 | p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) | ||
| 72 | return p.parseFragment(doc, container=container, **kwargs) | ||
| 73 | |||
| 74 | |||
| 75 | def method_decorator_metaclass(function): | ||
| 76 | class Decorated(type): | ||
| 77 | def __new__(meta, classname, bases, classDict): | ||
| 78 | for attributeName, attribute in classDict.items(): | ||
| 79 | if isinstance(attribute, types.FunctionType): | ||
| 80 | attribute = function(attribute) | ||
| 81 | |||
| 82 | classDict[attributeName] = attribute | ||
| 83 | return type.__new__(meta, classname, bases, classDict) | ||
| 84 | return Decorated | ||
| 85 | |||
| 86 | |||
| 87 | class HTMLParser(object): | ||
| 88 | """HTML parser | ||
| 89 | |||
| 90 | Generates a tree structure from a stream of (possibly malformed) HTML. | ||
| 91 | |||
| 92 | """ | ||
| 93 | |||
| 94 | def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): | ||
| 95 | """ | ||
| 96 | :arg tree: a treebuilder class controlling the type of tree that will be | ||
| 97 | returned. Built in treebuilders can be accessed through | ||
| 98 | html5lib.treebuilders.getTreeBuilder(treeType) | ||
| 99 | |||
| 100 | :arg strict: raise an exception when a parse error is encountered | ||
| 101 | |||
| 102 | :arg namespaceHTMLElements: whether or not to namespace HTML elements | ||
| 103 | |||
| 104 | :arg debug: whether or not to enable debug mode which logs things | ||
| 105 | |||
| 106 | Example: | ||
| 107 | |||
| 108 | >>> from html5lib.html5parser import HTMLParser | ||
| 109 | >>> parser = HTMLParser() # generates parser with etree builder | ||
| 110 | >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict | ||
| 111 | |||
| 112 | """ | ||
| 113 | |||
| 114 | # Raise an exception on the first error encountered | ||
| 115 | self.strict = strict | ||
| 116 | |||
| 117 | if tree is None: | ||
| 118 | tree = treebuilders.getTreeBuilder("etree") | ||
| 119 | self.tree = tree(namespaceHTMLElements) | ||
| 120 | self.errors = [] | ||
| 121 | |||
| 122 | self.phases = dict([(name, cls(self, self.tree)) for name, cls in | ||
| 123 | getPhases(debug).items()]) | ||
| 124 | |||
| 125 | def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): | ||
| 126 | |||
| 127 | self.innerHTMLMode = innerHTML | ||
| 128 | self.container = container | ||
| 129 | self.scripting = scripting | ||
| 130 | self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) | ||
| 131 | self.reset() | ||
| 132 | |||
| 133 | try: | ||
| 134 | self.mainLoop() | ||
| 135 | except _ReparseException: | ||
| 136 | self.reset() | ||
| 137 | self.mainLoop() | ||
| 138 | |||
| 139 | def reset(self): | ||
| 140 | self.tree.reset() | ||
| 141 | self.firstStartTag = False | ||
| 142 | self.errors = [] | ||
| 143 | self.log = [] # only used with debug mode | ||
| 144 | # "quirks" / "limited quirks" / "no quirks" | ||
| 145 | self.compatMode = "no quirks" | ||
| 146 | |||
| 147 | if self.innerHTMLMode: | ||
| 148 | self.innerHTML = self.container.lower() | ||
| 149 | |||
| 150 | if self.innerHTML in cdataElements: | ||
| 151 | self.tokenizer.state = self.tokenizer.rcdataState | ||
| 152 | elif self.innerHTML in rcdataElements: | ||
| 153 | self.tokenizer.state = self.tokenizer.rawtextState | ||
| 154 | elif self.innerHTML == 'plaintext': | ||
| 155 | self.tokenizer.state = self.tokenizer.plaintextState | ||
| 156 | else: | ||
| 157 | # state already is data state | ||
| 158 | # self.tokenizer.state = self.tokenizer.dataState | ||
| 159 | pass | ||
| 160 | self.phase = self.phases["beforeHtml"] | ||
| 161 | self.phase.insertHtmlElement() | ||
| 162 | self.resetInsertionMode() | ||
| 163 | else: | ||
| 164 | self.innerHTML = False # pylint:disable=redefined-variable-type | ||
| 165 | self.phase = self.phases["initial"] | ||
| 166 | |||
| 167 | self.lastPhase = None | ||
| 168 | |||
| 169 | self.beforeRCDataPhase = None | ||
| 170 | |||
| 171 | self.framesetOK = True | ||
| 172 | |||
| 173 | @property | ||
| 174 | def documentEncoding(self): | ||
| 175 | """Name of the character encoding that was used to decode the input stream, or | ||
| 176 | :obj:`None` if that is not determined yet | ||
| 177 | |||
| 178 | """ | ||
| 179 | if not hasattr(self, 'tokenizer'): | ||
| 180 | return None | ||
| 181 | return self.tokenizer.stream.charEncoding[0].name | ||
| 182 | |||
| 183 | def isHTMLIntegrationPoint(self, element): | ||
| 184 | if (element.name == "annotation-xml" and | ||
| 185 | element.namespace == namespaces["mathml"]): | ||
| 186 | return ("encoding" in element.attributes and | ||
| 187 | element.attributes["encoding"].translate( | ||
| 188 | asciiUpper2Lower) in | ||
| 189 | ("text/html", "application/xhtml+xml")) | ||
| 190 | else: | ||
| 191 | return (element.namespace, element.name) in htmlIntegrationPointElements | ||
| 192 | |||
| 193 | def isMathMLTextIntegrationPoint(self, element): | ||
| 194 | return (element.namespace, element.name) in mathmlTextIntegrationPointElements | ||
| 195 | |||
| 196 | def mainLoop(self): | ||
| 197 | CharactersToken = tokenTypes["Characters"] | ||
| 198 | SpaceCharactersToken = tokenTypes["SpaceCharacters"] | ||
| 199 | StartTagToken = tokenTypes["StartTag"] | ||
| 200 | EndTagToken = tokenTypes["EndTag"] | ||
| 201 | CommentToken = tokenTypes["Comment"] | ||
| 202 | DoctypeToken = tokenTypes["Doctype"] | ||
| 203 | ParseErrorToken = tokenTypes["ParseError"] | ||
| 204 | |||
| 205 | for token in self.normalizedTokens(): | ||
| 206 | prev_token = None | ||
| 207 | new_token = token | ||
| 208 | while new_token is not None: | ||
| 209 | prev_token = new_token | ||
| 210 | currentNode = self.tree.openElements[-1] if self.tree.openElements else None | ||
| 211 | currentNodeNamespace = currentNode.namespace if currentNode else None | ||
| 212 | currentNodeName = currentNode.name if currentNode else None | ||
| 213 | |||
| 214 | type = new_token["type"] | ||
| 215 | |||
| 216 | if type == ParseErrorToken: | ||
| 217 | self.parseError(new_token["data"], new_token.get("datavars", {})) | ||
| 218 | new_token = None | ||
| 219 | else: | ||
| 220 | if (len(self.tree.openElements) == 0 or | ||
| 221 | currentNodeNamespace == self.tree.defaultNamespace or | ||
| 222 | (self.isMathMLTextIntegrationPoint(currentNode) and | ||
| 223 | ((type == StartTagToken and | ||
| 224 | token["name"] not in frozenset(["mglyph", "malignmark"])) or | ||
| 225 | type in (CharactersToken, SpaceCharactersToken))) or | ||
| 226 | (currentNodeNamespace == namespaces["mathml"] and | ||
| 227 | currentNodeName == "annotation-xml" and | ||
| 228 | type == StartTagToken and | ||
| 229 | token["name"] == "svg") or | ||
| 230 | (self.isHTMLIntegrationPoint(currentNode) and | ||
| 231 | type in (StartTagToken, CharactersToken, SpaceCharactersToken))): | ||
| 232 | phase = self.phase | ||
| 233 | else: | ||
| 234 | phase = self.phases["inForeignContent"] | ||
| 235 | |||
| 236 | if type == CharactersToken: | ||
| 237 | new_token = phase.processCharacters(new_token) | ||
| 238 | elif type == SpaceCharactersToken: | ||
| 239 | new_token = phase.processSpaceCharacters(new_token) | ||
| 240 | elif type == StartTagToken: | ||
| 241 | new_token = phase.processStartTag(new_token) | ||
| 242 | elif type == EndTagToken: | ||
| 243 | new_token = phase.processEndTag(new_token) | ||
| 244 | elif type == CommentToken: | ||
| 245 | new_token = phase.processComment(new_token) | ||
| 246 | elif type == DoctypeToken: | ||
| 247 | new_token = phase.processDoctype(new_token) | ||
| 248 | |||
| 249 | if (type == StartTagToken and prev_token["selfClosing"] and | ||
| 250 | not prev_token["selfClosingAcknowledged"]): | ||
| 251 | self.parseError("non-void-element-with-trailing-solidus", | ||
| 252 | {"name": prev_token["name"]}) | ||
| 253 | |||
| 254 | # When the loop finishes it's EOF | ||
| 255 | reprocess = True | ||
| 256 | phases = [] | ||
| 257 | while reprocess: | ||
| 258 | phases.append(self.phase) | ||
| 259 | reprocess = self.phase.processEOF() | ||
| 260 | if reprocess: | ||
| 261 | assert self.phase not in phases | ||
| 262 | |||
| 263 | def normalizedTokens(self): | ||
| 264 | for token in self.tokenizer: | ||
| 265 | yield self.normalizeToken(token) | ||
| 266 | |||
| 267 | def parse(self, stream, *args, **kwargs): | ||
| 268 | """Parse a HTML document into a well-formed tree | ||
| 269 | |||
| 270 | :arg stream: a file-like object or string containing the HTML to be parsed | ||
| 271 | |||
| 272 | The optional encoding parameter must be a string that indicates | ||
| 273 | the encoding. If specified, that encoding will be used, | ||
| 274 | regardless of any BOM or later declaration (such as in a meta | ||
| 275 | element). | ||
| 276 | |||
| 277 | :arg scripting: treat noscript elements as if JavaScript was turned on | ||
| 278 | |||
| 279 | :returns: parsed tree | ||
| 280 | |||
| 281 | Example: | ||
| 282 | |||
| 283 | >>> from html5lib.html5parser import HTMLParser | ||
| 284 | >>> parser = HTMLParser() | ||
| 285 | >>> parser.parse('<html><body><p>This is a doc</p></body></html>') | ||
| 286 | <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0> | ||
| 287 | |||
| 288 | """ | ||
| 289 | self._parse(stream, False, None, *args, **kwargs) | ||
| 290 | return self.tree.getDocument() | ||
| 291 | |||
| 292 | def parseFragment(self, stream, *args, **kwargs): | ||
| 293 | """Parse a HTML fragment into a well-formed tree fragment | ||
| 294 | |||
| 295 | :arg container: name of the element we're setting the innerHTML | ||
| 296 | property if set to None, default to 'div' | ||
| 297 | |||
| 298 | :arg stream: a file-like object or string containing the HTML to be parsed | ||
| 299 | |||
| 300 | The optional encoding parameter must be a string that indicates | ||
| 301 | the encoding. If specified, that encoding will be used, | ||
| 302 | regardless of any BOM or later declaration (such as in a meta | ||
| 303 | element) | ||
| 304 | |||
| 305 | :arg scripting: treat noscript elements as if JavaScript was turned on | ||
| 306 | |||
| 307 | :returns: parsed tree | ||
| 308 | |||
| 309 | Example: | ||
| 310 | |||
| 311 | >>> from html5lib.html5libparser import HTMLParser | ||
| 312 | >>> parser = HTMLParser() | ||
| 313 | >>> parser.parseFragment('<b>this is a fragment</b>') | ||
| 314 | <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090> | ||
| 315 | |||
| 316 | """ | ||
| 317 | self._parse(stream, True, *args, **kwargs) | ||
| 318 | return self.tree.getFragment() | ||
| 319 | |||
| 320 | def parseError(self, errorcode="XXX-undefined-error", datavars=None): | ||
| 321 | # XXX The idea is to make errorcode mandatory. | ||
| 322 | if datavars is None: | ||
| 323 | datavars = {} | ||
| 324 | self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) | ||
| 325 | if self.strict: | ||
| 326 | raise ParseError(E[errorcode] % datavars) | ||
| 327 | |||
| 328 | def normalizeToken(self, token): | ||
| 329 | # HTML5 specific normalizations to the token stream | ||
| 330 | if token["type"] == tokenTypes["StartTag"]: | ||
| 331 | raw = token["data"] | ||
| 332 | token["data"] = OrderedDict(raw) | ||
| 333 | if len(raw) > len(token["data"]): | ||
| 334 | # we had some duplicated attribute, fix so first wins | ||
| 335 | token["data"].update(raw[::-1]) | ||
| 336 | |||
| 337 | return token | ||
| 338 | |||
| 339 | def adjustMathMLAttributes(self, token): | ||
| 340 | adjust_attributes(token, adjustMathMLAttributes) | ||
| 341 | |||
| 342 | def adjustSVGAttributes(self, token): | ||
| 343 | adjust_attributes(token, adjustSVGAttributes) | ||
| 344 | |||
| 345 | def adjustForeignAttributes(self, token): | ||
| 346 | adjust_attributes(token, adjustForeignAttributesMap) | ||
| 347 | |||
| 348 | def reparseTokenNormal(self, token): | ||
| 349 | # pylint:disable=unused-argument | ||
| 350 | self.parser.phase() | ||
| 351 | |||
| 352 | def resetInsertionMode(self): | ||
| 353 | # The name of this method is mostly historical. (It's also used in the | ||
| 354 | # specification.) | ||
| 355 | last = False | ||
| 356 | newModes = { | ||
| 357 | "select": "inSelect", | ||
| 358 | "td": "inCell", | ||
| 359 | "th": "inCell", | ||
| 360 | "tr": "inRow", | ||
| 361 | "tbody": "inTableBody", | ||
| 362 | "thead": "inTableBody", | ||
| 363 | "tfoot": "inTableBody", | ||
| 364 | "caption": "inCaption", | ||
| 365 | "colgroup": "inColumnGroup", | ||
| 366 | "table": "inTable", | ||
| 367 | "head": "inBody", | ||
| 368 | "body": "inBody", | ||
| 369 | "frameset": "inFrameset", | ||
| 370 | "html": "beforeHead" | ||
| 371 | } | ||
| 372 | for node in self.tree.openElements[::-1]: | ||
| 373 | nodeName = node.name | ||
| 374 | new_phase = None | ||
| 375 | if node == self.tree.openElements[0]: | ||
| 376 | assert self.innerHTML | ||
| 377 | last = True | ||
| 378 | nodeName = self.innerHTML | ||
| 379 | # Check for conditions that should only happen in the innerHTML | ||
| 380 | # case | ||
| 381 | if nodeName in ("select", "colgroup", "head", "html"): | ||
| 382 | assert self.innerHTML | ||
| 383 | |||
| 384 | if not last and node.namespace != self.tree.defaultNamespace: | ||
| 385 | continue | ||
| 386 | |||
| 387 | if nodeName in newModes: | ||
| 388 | new_phase = self.phases[newModes[nodeName]] | ||
| 389 | break | ||
| 390 | elif last: | ||
| 391 | new_phase = self.phases["inBody"] | ||
| 392 | break | ||
| 393 | |||
| 394 | self.phase = new_phase | ||
| 395 | |||
| 396 | def parseRCDataRawtext(self, token, contentType): | ||
| 397 | # Generic RCDATA/RAWTEXT Parsing algorithm | ||
| 398 | assert contentType in ("RAWTEXT", "RCDATA") | ||
| 399 | |||
| 400 | self.tree.insertElement(token) | ||
| 401 | |||
| 402 | if contentType == "RAWTEXT": | ||
| 403 | self.tokenizer.state = self.tokenizer.rawtextState | ||
| 404 | else: | ||
| 405 | self.tokenizer.state = self.tokenizer.rcdataState | ||
| 406 | |||
| 407 | self.originalPhase = self.phase | ||
| 408 | |||
| 409 | self.phase = self.phases["text"] | ||
| 410 | |||
| 411 | |||
| 412 | @_utils.memoize | ||
| 413 | def getPhases(debug): | ||
| 414 | def log(function): | ||
| 415 | """Logger that records which phase processes each token""" | ||
| 416 | type_names = dict((value, key) for key, value in | ||
| 417 | tokenTypes.items()) | ||
| 418 | |||
| 419 | def wrapped(self, *args, **kwargs): | ||
| 420 | if function.__name__.startswith("process") and len(args) > 0: | ||
| 421 | token = args[0] | ||
| 422 | try: | ||
| 423 | info = {"type": type_names[token['type']]} | ||
| 424 | except: | ||
| 425 | raise | ||
| 426 | if token['type'] in tagTokenTypes: | ||
| 427 | info["name"] = token['name'] | ||
| 428 | |||
| 429 | self.parser.log.append((self.parser.tokenizer.state.__name__, | ||
| 430 | self.parser.phase.__class__.__name__, | ||
| 431 | self.__class__.__name__, | ||
| 432 | function.__name__, | ||
| 433 | info)) | ||
| 434 | return function(self, *args, **kwargs) | ||
| 435 | else: | ||
| 436 | return function(self, *args, **kwargs) | ||
| 437 | return wrapped | ||
| 438 | |||
| 439 | def getMetaclass(use_metaclass, metaclass_func): | ||
| 440 | if use_metaclass: | ||
| 441 | return method_decorator_metaclass(metaclass_func) | ||
| 442 | else: | ||
| 443 | return type | ||
| 444 | |||
| 445 | # pylint:disable=unused-argument | ||
| 446 | class Phase(with_metaclass(getMetaclass(debug, log))): | ||
| 447 | """Base class for helper object that implements each phase of processing | ||
| 448 | """ | ||
| 449 | |||
| 450 | def __init__(self, parser, tree): | ||
| 451 | self.parser = parser | ||
| 452 | self.tree = tree | ||
| 453 | |||
| 454 | def processEOF(self): | ||
| 455 | raise NotImplementedError | ||
| 456 | |||
| 457 | def processComment(self, token): | ||
| 458 | # For most phases the following is correct. Where it's not it will be | ||
| 459 | # overridden. | ||
| 460 | self.tree.insertComment(token, self.tree.openElements[-1]) | ||
| 461 | |||
| 462 | def processDoctype(self, token): | ||
| 463 | self.parser.parseError("unexpected-doctype") | ||
| 464 | |||
| 465 | def processCharacters(self, token): | ||
| 466 | self.tree.insertText(token["data"]) | ||
| 467 | |||
| 468 | def processSpaceCharacters(self, token): | ||
| 469 | self.tree.insertText(token["data"]) | ||
| 470 | |||
| 471 | def processStartTag(self, token): | ||
| 472 | return self.startTagHandler[token["name"]](token) | ||
| 473 | |||
| 474 | def startTagHtml(self, token): | ||
| 475 | if not self.parser.firstStartTag and token["name"] == "html": | ||
| 476 | self.parser.parseError("non-html-root") | ||
| 477 | # XXX Need a check here to see if the first start tag token emitted is | ||
| 478 | # this token... If it's not, invoke self.parser.parseError(). | ||
| 479 | for attr, value in token["data"].items(): | ||
| 480 | if attr not in self.tree.openElements[0].attributes: | ||
| 481 | self.tree.openElements[0].attributes[attr] = value | ||
| 482 | self.parser.firstStartTag = False | ||
| 483 | |||
| 484 | def processEndTag(self, token): | ||
| 485 | return self.endTagHandler[token["name"]](token) | ||
| 486 | |||
| 487 | class InitialPhase(Phase): | ||
| 488 | def processSpaceCharacters(self, token): | ||
| 489 | pass | ||
| 490 | |||
| 491 | def processComment(self, token): | ||
| 492 | self.tree.insertComment(token, self.tree.document) | ||
| 493 | |||
| 494 | def processDoctype(self, token): | ||
| 495 | name = token["name"] | ||
| 496 | publicId = token["publicId"] | ||
| 497 | systemId = token["systemId"] | ||
| 498 | correct = token["correct"] | ||
| 499 | |||
| 500 | if (name != "html" or publicId is not None or | ||
| 501 | systemId is not None and systemId != "about:legacy-compat"): | ||
| 502 | self.parser.parseError("unknown-doctype") | ||
| 503 | |||
| 504 | if publicId is None: | ||
| 505 | publicId = "" | ||
| 506 | |||
| 507 | self.tree.insertDoctype(token) | ||
| 508 | |||
| 509 | if publicId != "": | ||
| 510 | publicId = publicId.translate(asciiUpper2Lower) | ||
| 511 | |||
| 512 | if (not correct or token["name"] != "html" or | ||
| 513 | publicId.startswith( | ||
| 514 | ("+//silmaril//dtd html pro v0r11 19970101//", | ||
| 515 | "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", | ||
| 516 | "-//as//dtd html 3.0 aswedit + extensions//", | ||
| 517 | "-//ietf//dtd html 2.0 level 1//", | ||
| 518 | "-//ietf//dtd html 2.0 level 2//", | ||
| 519 | "-//ietf//dtd html 2.0 strict level 1//", | ||
| 520 | "-//ietf//dtd html 2.0 strict level 2//", | ||
| 521 | "-//ietf//dtd html 2.0 strict//", | ||
| 522 | "-//ietf//dtd html 2.0//", | ||
| 523 | "-//ietf//dtd html 2.1e//", | ||
| 524 | "-//ietf//dtd html 3.0//", | ||
| 525 | "-//ietf//dtd html 3.2 final//", | ||
| 526 | "-//ietf//dtd html 3.2//", | ||
| 527 | "-//ietf//dtd html 3//", | ||
| 528 | "-//ietf//dtd html level 0//", | ||
| 529 | "-//ietf//dtd html level 1//", | ||
| 530 | "-//ietf//dtd html level 2//", | ||
| 531 | "-//ietf//dtd html level 3//", | ||
| 532 | "-//ietf//dtd html strict level 0//", | ||
| 533 | "-//ietf//dtd html strict level 1//", | ||
| 534 | "-//ietf//dtd html strict level 2//", | ||
| 535 | "-//ietf//dtd html strict level 3//", | ||
| 536 | "-//ietf//dtd html strict//", | ||
| 537 | "-//ietf//dtd html//", | ||
| 538 | "-//metrius//dtd metrius presentational//", | ||
| 539 | "-//microsoft//dtd internet explorer 2.0 html strict//", | ||
| 540 | "-//microsoft//dtd internet explorer 2.0 html//", | ||
| 541 | "-//microsoft//dtd internet explorer 2.0 tables//", | ||
| 542 | "-//microsoft//dtd internet explorer 3.0 html strict//", | ||
| 543 | "-//microsoft//dtd internet explorer 3.0 html//", | ||
| 544 | "-//microsoft//dtd internet explorer 3.0 tables//", | ||
| 545 | "-//netscape comm. corp.//dtd html//", | ||
| 546 | "-//netscape comm. corp.//dtd strict html//", | ||
| 547 | "-//o'reilly and associates//dtd html 2.0//", | ||
| 548 | "-//o'reilly and associates//dtd html extended 1.0//", | ||
| 549 | "-//o'reilly and associates//dtd html extended relaxed 1.0//", | ||
| 550 | "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", | ||
| 551 | "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", | ||
| 552 | "-//spyglass//dtd html 2.0 extended//", | ||
| 553 | "-//sq//dtd html 2.0 hotmetal + extensions//", | ||
| 554 | "-//sun microsystems corp.//dtd hotjava html//", | ||
| 555 | "-//sun microsystems corp.//dtd hotjava strict html//", | ||
| 556 | "-//w3c//dtd html 3 1995-03-24//", | ||
| 557 | "-//w3c//dtd html 3.2 draft//", | ||
| 558 | "-//w3c//dtd html 3.2 final//", | ||
| 559 | "-//w3c//dtd html 3.2//", | ||
| 560 | "-//w3c//dtd html 3.2s draft//", | ||
| 561 | "-//w3c//dtd html 4.0 frameset//", | ||
| 562 | "-//w3c//dtd html 4.0 transitional//", | ||
| 563 | "-//w3c//dtd html experimental 19960712//", | ||
| 564 | "-//w3c//dtd html experimental 970421//", | ||
| 565 | "-//w3c//dtd w3 html//", | ||
| 566 | "-//w3o//dtd w3 html 3.0//", | ||
| 567 | "-//webtechs//dtd mozilla html 2.0//", | ||
| 568 | "-//webtechs//dtd mozilla html//")) or | ||
| 569 | publicId in ("-//w3o//dtd w3 html strict 3.0//en//", | ||
| 570 | "-/w3c/dtd html 4.0 transitional/en", | ||
| 571 | "html") or | ||
| 572 | publicId.startswith( | ||
| 573 | ("-//w3c//dtd html 4.01 frameset//", | ||
| 574 | "-//w3c//dtd html 4.01 transitional//")) and | ||
| 575 | systemId is None or | ||
| 576 | systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): | ||
| 577 | self.parser.compatMode = "quirks" | ||
| 578 | elif (publicId.startswith( | ||
| 579 | ("-//w3c//dtd xhtml 1.0 frameset//", | ||
| 580 | "-//w3c//dtd xhtml 1.0 transitional//")) or | ||
| 581 | publicId.startswith( | ||
| 582 | ("-//w3c//dtd html 4.01 frameset//", | ||
| 583 | "-//w3c//dtd html 4.01 transitional//")) and | ||
| 584 | systemId is not None): | ||
| 585 | self.parser.compatMode = "limited quirks" | ||
| 586 | |||
| 587 | self.parser.phase = self.parser.phases["beforeHtml"] | ||
| 588 | |||
| 589 | def anythingElse(self): | ||
| 590 | self.parser.compatMode = "quirks" | ||
| 591 | self.parser.phase = self.parser.phases["beforeHtml"] | ||
| 592 | |||
| 593 | def processCharacters(self, token): | ||
| 594 | self.parser.parseError("expected-doctype-but-got-chars") | ||
| 595 | self.anythingElse() | ||
| 596 | return token | ||
| 597 | |||
| 598 | def processStartTag(self, token): | ||
| 599 | self.parser.parseError("expected-doctype-but-got-start-tag", | ||
| 600 | {"name": token["name"]}) | ||
| 601 | self.anythingElse() | ||
| 602 | return token | ||
| 603 | |||
| 604 | def processEndTag(self, token): | ||
| 605 | self.parser.parseError("expected-doctype-but-got-end-tag", | ||
| 606 | {"name": token["name"]}) | ||
| 607 | self.anythingElse() | ||
| 608 | return token | ||
| 609 | |||
| 610 | def processEOF(self): | ||
| 611 | self.parser.parseError("expected-doctype-but-got-eof") | ||
| 612 | self.anythingElse() | ||
| 613 | return True | ||
| 614 | |||
| 615 | class BeforeHtmlPhase(Phase): | ||
| 616 | # helper methods | ||
| 617 | def insertHtmlElement(self): | ||
| 618 | self.tree.insertRoot(impliedTagToken("html", "StartTag")) | ||
| 619 | self.parser.phase = self.parser.phases["beforeHead"] | ||
| 620 | |||
| 621 | # other | ||
| 622 | def processEOF(self): | ||
| 623 | self.insertHtmlElement() | ||
| 624 | return True | ||
| 625 | |||
| 626 | def processComment(self, token): | ||
| 627 | self.tree.insertComment(token, self.tree.document) | ||
| 628 | |||
| 629 | def processSpaceCharacters(self, token): | ||
| 630 | pass | ||
| 631 | |||
| 632 | def processCharacters(self, token): | ||
| 633 | self.insertHtmlElement() | ||
| 634 | return token | ||
| 635 | |||
| 636 | def processStartTag(self, token): | ||
| 637 | if token["name"] == "html": | ||
| 638 | self.parser.firstStartTag = True | ||
| 639 | self.insertHtmlElement() | ||
| 640 | return token | ||
| 641 | |||
| 642 | def processEndTag(self, token): | ||
| 643 | if token["name"] not in ("head", "body", "html", "br"): | ||
| 644 | self.parser.parseError("unexpected-end-tag-before-html", | ||
| 645 | {"name": token["name"]}) | ||
| 646 | else: | ||
| 647 | self.insertHtmlElement() | ||
| 648 | return token | ||
| 649 | |||
| 650 | class BeforeHeadPhase(Phase): | ||
| 651 | def __init__(self, parser, tree): | ||
| 652 | Phase.__init__(self, parser, tree) | ||
| 653 | |||
| 654 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 655 | ("html", self.startTagHtml), | ||
| 656 | ("head", self.startTagHead) | ||
| 657 | ]) | ||
| 658 | self.startTagHandler.default = self.startTagOther | ||
| 659 | |||
| 660 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 661 | (("head", "body", "html", "br"), self.endTagImplyHead) | ||
| 662 | ]) | ||
| 663 | self.endTagHandler.default = self.endTagOther | ||
| 664 | |||
| 665 | def processEOF(self): | ||
| 666 | self.startTagHead(impliedTagToken("head", "StartTag")) | ||
| 667 | return True | ||
| 668 | |||
| 669 | def processSpaceCharacters(self, token): | ||
| 670 | pass | ||
| 671 | |||
| 672 | def processCharacters(self, token): | ||
| 673 | self.startTagHead(impliedTagToken("head", "StartTag")) | ||
| 674 | return token | ||
| 675 | |||
| 676 | def startTagHtml(self, token): | ||
| 677 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 678 | |||
| 679 | def startTagHead(self, token): | ||
| 680 | self.tree.insertElement(token) | ||
| 681 | self.tree.headPointer = self.tree.openElements[-1] | ||
| 682 | self.parser.phase = self.parser.phases["inHead"] | ||
| 683 | |||
| 684 | def startTagOther(self, token): | ||
| 685 | self.startTagHead(impliedTagToken("head", "StartTag")) | ||
| 686 | return token | ||
| 687 | |||
| 688 | def endTagImplyHead(self, token): | ||
| 689 | self.startTagHead(impliedTagToken("head", "StartTag")) | ||
| 690 | return token | ||
| 691 | |||
| 692 | def endTagOther(self, token): | ||
| 693 | self.parser.parseError("end-tag-after-implied-root", | ||
| 694 | {"name": token["name"]}) | ||
| 695 | |||
| 696 | class InHeadPhase(Phase): | ||
| 697 | def __init__(self, parser, tree): | ||
| 698 | Phase.__init__(self, parser, tree) | ||
| 699 | |||
| 700 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 701 | ("html", self.startTagHtml), | ||
| 702 | ("title", self.startTagTitle), | ||
| 703 | (("noframes", "style"), self.startTagNoFramesStyle), | ||
| 704 | ("noscript", self.startTagNoscript), | ||
| 705 | ("script", self.startTagScript), | ||
| 706 | (("base", "basefont", "bgsound", "command", "link"), | ||
| 707 | self.startTagBaseLinkCommand), | ||
| 708 | ("meta", self.startTagMeta), | ||
| 709 | ("head", self.startTagHead) | ||
| 710 | ]) | ||
| 711 | self.startTagHandler.default = self.startTagOther | ||
| 712 | |||
| 713 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 714 | ("head", self.endTagHead), | ||
| 715 | (("br", "html", "body"), self.endTagHtmlBodyBr) | ||
| 716 | ]) | ||
| 717 | self.endTagHandler.default = self.endTagOther | ||
| 718 | |||
| 719 | # the real thing | ||
| 720 | def processEOF(self): | ||
| 721 | self.anythingElse() | ||
| 722 | return True | ||
| 723 | |||
| 724 | def processCharacters(self, token): | ||
| 725 | self.anythingElse() | ||
| 726 | return token | ||
| 727 | |||
| 728 | def startTagHtml(self, token): | ||
| 729 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 730 | |||
| 731 | def startTagHead(self, token): | ||
| 732 | self.parser.parseError("two-heads-are-not-better-than-one") | ||
| 733 | |||
| 734 | def startTagBaseLinkCommand(self, token): | ||
| 735 | self.tree.insertElement(token) | ||
| 736 | self.tree.openElements.pop() | ||
| 737 | token["selfClosingAcknowledged"] = True | ||
| 738 | |||
| 739 | def startTagMeta(self, token): | ||
| 740 | self.tree.insertElement(token) | ||
| 741 | self.tree.openElements.pop() | ||
| 742 | token["selfClosingAcknowledged"] = True | ||
| 743 | |||
| 744 | attributes = token["data"] | ||
| 745 | if self.parser.tokenizer.stream.charEncoding[1] == "tentative": | ||
| 746 | if "charset" in attributes: | ||
| 747 | self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) | ||
| 748 | elif ("content" in attributes and | ||
| 749 | "http-equiv" in attributes and | ||
| 750 | attributes["http-equiv"].lower() == "content-type"): | ||
| 751 | # Encoding it as UTF-8 here is a hack, as really we should pass | ||
| 752 | # the abstract Unicode string, and just use the | ||
| 753 | # ContentAttrParser on that, but using UTF-8 allows all chars | ||
| 754 | # to be encoded and as a ASCII-superset works. | ||
| 755 | data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) | ||
| 756 | parser = _inputstream.ContentAttrParser(data) | ||
| 757 | codec = parser.parse() | ||
| 758 | self.parser.tokenizer.stream.changeEncoding(codec) | ||
| 759 | |||
| 760 | def startTagTitle(self, token): | ||
| 761 | self.parser.parseRCDataRawtext(token, "RCDATA") | ||
| 762 | |||
| 763 | def startTagNoFramesStyle(self, token): | ||
| 764 | # Need to decide whether to implement the scripting-disabled case | ||
| 765 | self.parser.parseRCDataRawtext(token, "RAWTEXT") | ||
| 766 | |||
| 767 | def startTagNoscript(self, token): | ||
| 768 | if self.parser.scripting: | ||
| 769 | self.parser.parseRCDataRawtext(token, "RAWTEXT") | ||
| 770 | else: | ||
| 771 | self.tree.insertElement(token) | ||
| 772 | self.parser.phase = self.parser.phases["inHeadNoscript"] | ||
| 773 | |||
| 774 | def startTagScript(self, token): | ||
| 775 | self.tree.insertElement(token) | ||
| 776 | self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState | ||
| 777 | self.parser.originalPhase = self.parser.phase | ||
| 778 | self.parser.phase = self.parser.phases["text"] | ||
| 779 | |||
| 780 | def startTagOther(self, token): | ||
| 781 | self.anythingElse() | ||
| 782 | return token | ||
| 783 | |||
| 784 | def endTagHead(self, token): | ||
| 785 | node = self.parser.tree.openElements.pop() | ||
| 786 | assert node.name == "head", "Expected head got %s" % node.name | ||
| 787 | self.parser.phase = self.parser.phases["afterHead"] | ||
| 788 | |||
| 789 | def endTagHtmlBodyBr(self, token): | ||
| 790 | self.anythingElse() | ||
| 791 | return token | ||
| 792 | |||
| 793 | def endTagOther(self, token): | ||
| 794 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 795 | |||
| 796 | def anythingElse(self): | ||
| 797 | self.endTagHead(impliedTagToken("head")) | ||
| 798 | |||
| 799 | class InHeadNoscriptPhase(Phase): | ||
| 800 | def __init__(self, parser, tree): | ||
| 801 | Phase.__init__(self, parser, tree) | ||
| 802 | |||
| 803 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 804 | ("html", self.startTagHtml), | ||
| 805 | (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), | ||
| 806 | (("head", "noscript"), self.startTagHeadNoscript), | ||
| 807 | ]) | ||
| 808 | self.startTagHandler.default = self.startTagOther | ||
| 809 | |||
| 810 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 811 | ("noscript", self.endTagNoscript), | ||
| 812 | ("br", self.endTagBr), | ||
| 813 | ]) | ||
| 814 | self.endTagHandler.default = self.endTagOther | ||
| 815 | |||
| 816 | def processEOF(self): | ||
| 817 | self.parser.parseError("eof-in-head-noscript") | ||
| 818 | self.anythingElse() | ||
| 819 | return True | ||
| 820 | |||
| 821 | def processComment(self, token): | ||
| 822 | return self.parser.phases["inHead"].processComment(token) | ||
| 823 | |||
| 824 | def processCharacters(self, token): | ||
| 825 | self.parser.parseError("char-in-head-noscript") | ||
| 826 | self.anythingElse() | ||
| 827 | return token | ||
| 828 | |||
| 829 | def processSpaceCharacters(self, token): | ||
| 830 | return self.parser.phases["inHead"].processSpaceCharacters(token) | ||
| 831 | |||
| 832 | def startTagHtml(self, token): | ||
| 833 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 834 | |||
| 835 | def startTagBaseLinkCommand(self, token): | ||
| 836 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 837 | |||
| 838 | def startTagHeadNoscript(self, token): | ||
| 839 | self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) | ||
| 840 | |||
| 841 | def startTagOther(self, token): | ||
| 842 | self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) | ||
| 843 | self.anythingElse() | ||
| 844 | return token | ||
| 845 | |||
| 846 | def endTagNoscript(self, token): | ||
| 847 | node = self.parser.tree.openElements.pop() | ||
| 848 | assert node.name == "noscript", "Expected noscript got %s" % node.name | ||
| 849 | self.parser.phase = self.parser.phases["inHead"] | ||
| 850 | |||
| 851 | def endTagBr(self, token): | ||
| 852 | self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) | ||
| 853 | self.anythingElse() | ||
| 854 | return token | ||
| 855 | |||
| 856 | def endTagOther(self, token): | ||
| 857 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 858 | |||
| 859 | def anythingElse(self): | ||
| 860 | # Caller must raise parse error first! | ||
| 861 | self.endTagNoscript(impliedTagToken("noscript")) | ||
| 862 | |||
| 863 | class AfterHeadPhase(Phase): | ||
| 864 | def __init__(self, parser, tree): | ||
| 865 | Phase.__init__(self, parser, tree) | ||
| 866 | |||
| 867 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 868 | ("html", self.startTagHtml), | ||
| 869 | ("body", self.startTagBody), | ||
| 870 | ("frameset", self.startTagFrameset), | ||
| 871 | (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", | ||
| 872 | "style", "title"), | ||
| 873 | self.startTagFromHead), | ||
| 874 | ("head", self.startTagHead) | ||
| 875 | ]) | ||
| 876 | self.startTagHandler.default = self.startTagOther | ||
| 877 | self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), | ||
| 878 | self.endTagHtmlBodyBr)]) | ||
| 879 | self.endTagHandler.default = self.endTagOther | ||
| 880 | |||
| 881 | def processEOF(self): | ||
| 882 | self.anythingElse() | ||
| 883 | return True | ||
| 884 | |||
| 885 | def processCharacters(self, token): | ||
| 886 | self.anythingElse() | ||
| 887 | return token | ||
| 888 | |||
| 889 | def startTagHtml(self, token): | ||
| 890 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 891 | |||
| 892 | def startTagBody(self, token): | ||
| 893 | self.parser.framesetOK = False | ||
| 894 | self.tree.insertElement(token) | ||
| 895 | self.parser.phase = self.parser.phases["inBody"] | ||
| 896 | |||
| 897 | def startTagFrameset(self, token): | ||
| 898 | self.tree.insertElement(token) | ||
| 899 | self.parser.phase = self.parser.phases["inFrameset"] | ||
| 900 | |||
| 901 | def startTagFromHead(self, token): | ||
| 902 | self.parser.parseError("unexpected-start-tag-out-of-my-head", | ||
| 903 | {"name": token["name"]}) | ||
| 904 | self.tree.openElements.append(self.tree.headPointer) | ||
| 905 | self.parser.phases["inHead"].processStartTag(token) | ||
| 906 | for node in self.tree.openElements[::-1]: | ||
| 907 | if node.name == "head": | ||
| 908 | self.tree.openElements.remove(node) | ||
| 909 | break | ||
| 910 | |||
| 911 | def startTagHead(self, token): | ||
| 912 | self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) | ||
| 913 | |||
| 914 | def startTagOther(self, token): | ||
| 915 | self.anythingElse() | ||
| 916 | return token | ||
| 917 | |||
| 918 | def endTagHtmlBodyBr(self, token): | ||
| 919 | self.anythingElse() | ||
| 920 | return token | ||
| 921 | |||
| 922 | def endTagOther(self, token): | ||
| 923 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 924 | |||
| 925 | def anythingElse(self): | ||
| 926 | self.tree.insertElement(impliedTagToken("body", "StartTag")) | ||
| 927 | self.parser.phase = self.parser.phases["inBody"] | ||
| 928 | self.parser.framesetOK = True | ||
| 929 | |||
| 930 | class InBodyPhase(Phase): | ||
| 931 | # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody | ||
| 932 | # the really-really-really-very crazy mode | ||
| 933 | def __init__(self, parser, tree): | ||
| 934 | Phase.__init__(self, parser, tree) | ||
| 935 | |||
| 936 | # Set this to the default handler | ||
| 937 | self.processSpaceCharacters = self.processSpaceCharactersNonPre | ||
| 938 | |||
| 939 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 940 | ("html", self.startTagHtml), | ||
| 941 | (("base", "basefont", "bgsound", "command", "link", "meta", | ||
| 942 | "script", "style", "title"), | ||
| 943 | self.startTagProcessInHead), | ||
| 944 | ("body", self.startTagBody), | ||
| 945 | ("frameset", self.startTagFrameset), | ||
| 946 | (("address", "article", "aside", "blockquote", "center", "details", | ||
| 947 | "dir", "div", "dl", "fieldset", "figcaption", "figure", | ||
| 948 | "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", | ||
| 949 | "section", "summary", "ul"), | ||
| 950 | self.startTagCloseP), | ||
| 951 | (headingElements, self.startTagHeading), | ||
| 952 | (("pre", "listing"), self.startTagPreListing), | ||
| 953 | ("form", self.startTagForm), | ||
| 954 | (("li", "dd", "dt"), self.startTagListItem), | ||
| 955 | ("plaintext", self.startTagPlaintext), | ||
| 956 | ("a", self.startTagA), | ||
| 957 | (("b", "big", "code", "em", "font", "i", "s", "small", "strike", | ||
| 958 | "strong", "tt", "u"), self.startTagFormatting), | ||
| 959 | ("nobr", self.startTagNobr), | ||
| 960 | ("button", self.startTagButton), | ||
| 961 | (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), | ||
| 962 | ("xmp", self.startTagXmp), | ||
| 963 | ("table", self.startTagTable), | ||
| 964 | (("area", "br", "embed", "img", "keygen", "wbr"), | ||
| 965 | self.startTagVoidFormatting), | ||
| 966 | (("param", "source", "track"), self.startTagParamSource), | ||
| 967 | ("input", self.startTagInput), | ||
| 968 | ("hr", self.startTagHr), | ||
| 969 | ("image", self.startTagImage), | ||
| 970 | ("isindex", self.startTagIsIndex), | ||
| 971 | ("textarea", self.startTagTextarea), | ||
| 972 | ("iframe", self.startTagIFrame), | ||
| 973 | ("noscript", self.startTagNoscript), | ||
| 974 | (("noembed", "noframes"), self.startTagRawtext), | ||
| 975 | ("select", self.startTagSelect), | ||
| 976 | (("rp", "rt"), self.startTagRpRt), | ||
| 977 | (("option", "optgroup"), self.startTagOpt), | ||
| 978 | (("math"), self.startTagMath), | ||
| 979 | (("svg"), self.startTagSvg), | ||
| 980 | (("caption", "col", "colgroup", "frame", "head", | ||
| 981 | "tbody", "td", "tfoot", "th", "thead", | ||
| 982 | "tr"), self.startTagMisplaced) | ||
| 983 | ]) | ||
| 984 | self.startTagHandler.default = self.startTagOther | ||
| 985 | |||
| 986 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 987 | ("body", self.endTagBody), | ||
| 988 | ("html", self.endTagHtml), | ||
| 989 | (("address", "article", "aside", "blockquote", "button", "center", | ||
| 990 | "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", | ||
| 991 | "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", | ||
| 992 | "section", "summary", "ul"), self.endTagBlock), | ||
| 993 | ("form", self.endTagForm), | ||
| 994 | ("p", self.endTagP), | ||
| 995 | (("dd", "dt", "li"), self.endTagListItem), | ||
| 996 | (headingElements, self.endTagHeading), | ||
| 997 | (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", | ||
| 998 | "strike", "strong", "tt", "u"), self.endTagFormatting), | ||
| 999 | (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), | ||
| 1000 | ("br", self.endTagBr), | ||
| 1001 | ]) | ||
| 1002 | self.endTagHandler.default = self.endTagOther | ||
| 1003 | |||
| 1004 | def isMatchingFormattingElement(self, node1, node2): | ||
| 1005 | return (node1.name == node2.name and | ||
| 1006 | node1.namespace == node2.namespace and | ||
| 1007 | node1.attributes == node2.attributes) | ||
| 1008 | |||
| 1009 | # helper | ||
| 1010 | def addFormattingElement(self, token): | ||
| 1011 | self.tree.insertElement(token) | ||
| 1012 | element = self.tree.openElements[-1] | ||
| 1013 | |||
| 1014 | matchingElements = [] | ||
| 1015 | for node in self.tree.activeFormattingElements[::-1]: | ||
| 1016 | if node is Marker: | ||
| 1017 | break | ||
| 1018 | elif self.isMatchingFormattingElement(node, element): | ||
| 1019 | matchingElements.append(node) | ||
| 1020 | |||
| 1021 | assert len(matchingElements) <= 3 | ||
| 1022 | if len(matchingElements) == 3: | ||
| 1023 | self.tree.activeFormattingElements.remove(matchingElements[-1]) | ||
| 1024 | self.tree.activeFormattingElements.append(element) | ||
| 1025 | |||
| 1026 | # the real deal | ||
| 1027 | def processEOF(self): | ||
| 1028 | allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", | ||
| 1029 | "tfoot", "th", "thead", "tr", "body", | ||
| 1030 | "html")) | ||
| 1031 | for node in self.tree.openElements[::-1]: | ||
| 1032 | if node.name not in allowed_elements: | ||
| 1033 | self.parser.parseError("expected-closing-tag-but-got-eof") | ||
| 1034 | break | ||
| 1035 | # Stop parsing | ||
| 1036 | |||
| 1037 | def processSpaceCharactersDropNewline(self, token): | ||
| 1038 | # Sometimes (start of <pre>, <listing>, and <textarea> blocks) we | ||
| 1039 | # want to drop leading newlines | ||
| 1040 | data = token["data"] | ||
| 1041 | self.processSpaceCharacters = self.processSpaceCharactersNonPre | ||
| 1042 | if (data.startswith("\n") and | ||
| 1043 | self.tree.openElements[-1].name in ("pre", "listing", "textarea") and | ||
| 1044 | not self.tree.openElements[-1].hasContent()): | ||
| 1045 | data = data[1:] | ||
| 1046 | if data: | ||
| 1047 | self.tree.reconstructActiveFormattingElements() | ||
| 1048 | self.tree.insertText(data) | ||
| 1049 | |||
| 1050 | def processCharacters(self, token): | ||
| 1051 | if token["data"] == "\u0000": | ||
| 1052 | # The tokenizer should always emit null on its own | ||
| 1053 | return | ||
| 1054 | self.tree.reconstructActiveFormattingElements() | ||
| 1055 | self.tree.insertText(token["data"]) | ||
| 1056 | # This must be bad for performance | ||
| 1057 | if (self.parser.framesetOK and | ||
| 1058 | any([char not in spaceCharacters | ||
| 1059 | for char in token["data"]])): | ||
| 1060 | self.parser.framesetOK = False | ||
| 1061 | |||
| 1062 | def processSpaceCharactersNonPre(self, token): | ||
| 1063 | self.tree.reconstructActiveFormattingElements() | ||
| 1064 | self.tree.insertText(token["data"]) | ||
| 1065 | |||
| 1066 | def startTagProcessInHead(self, token): | ||
| 1067 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 1068 | |||
| 1069 | def startTagBody(self, token): | ||
| 1070 | self.parser.parseError("unexpected-start-tag", {"name": "body"}) | ||
| 1071 | if (len(self.tree.openElements) == 1 or | ||
| 1072 | self.tree.openElements[1].name != "body"): | ||
| 1073 | assert self.parser.innerHTML | ||
| 1074 | else: | ||
| 1075 | self.parser.framesetOK = False | ||
| 1076 | for attr, value in token["data"].items(): | ||
| 1077 | if attr not in self.tree.openElements[1].attributes: | ||
| 1078 | self.tree.openElements[1].attributes[attr] = value | ||
| 1079 | |||
| 1080 | def startTagFrameset(self, token): | ||
| 1081 | self.parser.parseError("unexpected-start-tag", {"name": "frameset"}) | ||
| 1082 | if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"): | ||
| 1083 | assert self.parser.innerHTML | ||
| 1084 | elif not self.parser.framesetOK: | ||
| 1085 | pass | ||
| 1086 | else: | ||
| 1087 | if self.tree.openElements[1].parent: | ||
| 1088 | self.tree.openElements[1].parent.removeChild(self.tree.openElements[1]) | ||
| 1089 | while self.tree.openElements[-1].name != "html": | ||
| 1090 | self.tree.openElements.pop() | ||
| 1091 | self.tree.insertElement(token) | ||
| 1092 | self.parser.phase = self.parser.phases["inFrameset"] | ||
| 1093 | |||
| 1094 | def startTagCloseP(self, token): | ||
| 1095 | if self.tree.elementInScope("p", variant="button"): | ||
| 1096 | self.endTagP(impliedTagToken("p")) | ||
| 1097 | self.tree.insertElement(token) | ||
| 1098 | |||
| 1099 | def startTagPreListing(self, token): | ||
| 1100 | if self.tree.elementInScope("p", variant="button"): | ||
| 1101 | self.endTagP(impliedTagToken("p")) | ||
| 1102 | self.tree.insertElement(token) | ||
| 1103 | self.parser.framesetOK = False | ||
| 1104 | self.processSpaceCharacters = self.processSpaceCharactersDropNewline | ||
| 1105 | |||
| 1106 | def startTagForm(self, token): | ||
| 1107 | if self.tree.formPointer: | ||
| 1108 | self.parser.parseError("unexpected-start-tag", {"name": "form"}) | ||
| 1109 | else: | ||
| 1110 | if self.tree.elementInScope("p", variant="button"): | ||
| 1111 | self.endTagP(impliedTagToken("p")) | ||
| 1112 | self.tree.insertElement(token) | ||
| 1113 | self.tree.formPointer = self.tree.openElements[-1] | ||
| 1114 | |||
| 1115 | def startTagListItem(self, token): | ||
| 1116 | self.parser.framesetOK = False | ||
| 1117 | |||
| 1118 | stopNamesMap = {"li": ["li"], | ||
| 1119 | "dt": ["dt", "dd"], | ||
| 1120 | "dd": ["dt", "dd"]} | ||
| 1121 | stopNames = stopNamesMap[token["name"]] | ||
| 1122 | for node in reversed(self.tree.openElements): | ||
| 1123 | if node.name in stopNames: | ||
| 1124 | self.parser.phase.processEndTag( | ||
| 1125 | impliedTagToken(node.name, "EndTag")) | ||
| 1126 | break | ||
| 1127 | if (node.nameTuple in specialElements and | ||
| 1128 | node.name not in ("address", "div", "p")): | ||
| 1129 | break | ||
| 1130 | |||
| 1131 | if self.tree.elementInScope("p", variant="button"): | ||
| 1132 | self.parser.phase.processEndTag( | ||
| 1133 | impliedTagToken("p", "EndTag")) | ||
| 1134 | |||
| 1135 | self.tree.insertElement(token) | ||
| 1136 | |||
| 1137 | def startTagPlaintext(self, token): | ||
| 1138 | if self.tree.elementInScope("p", variant="button"): | ||
| 1139 | self.endTagP(impliedTagToken("p")) | ||
| 1140 | self.tree.insertElement(token) | ||
| 1141 | self.parser.tokenizer.state = self.parser.tokenizer.plaintextState | ||
| 1142 | |||
| 1143 | def startTagHeading(self, token): | ||
| 1144 | if self.tree.elementInScope("p", variant="button"): | ||
| 1145 | self.endTagP(impliedTagToken("p")) | ||
| 1146 | if self.tree.openElements[-1].name in headingElements: | ||
| 1147 | self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) | ||
| 1148 | self.tree.openElements.pop() | ||
| 1149 | self.tree.insertElement(token) | ||
| 1150 | |||
| 1151 | def startTagA(self, token): | ||
| 1152 | afeAElement = self.tree.elementInActiveFormattingElements("a") | ||
| 1153 | if afeAElement: | ||
| 1154 | self.parser.parseError("unexpected-start-tag-implies-end-tag", | ||
| 1155 | {"startName": "a", "endName": "a"}) | ||
| 1156 | self.endTagFormatting(impliedTagToken("a")) | ||
| 1157 | if afeAElement in self.tree.openElements: | ||
| 1158 | self.tree.openElements.remove(afeAElement) | ||
| 1159 | if afeAElement in self.tree.activeFormattingElements: | ||
| 1160 | self.tree.activeFormattingElements.remove(afeAElement) | ||
| 1161 | self.tree.reconstructActiveFormattingElements() | ||
| 1162 | self.addFormattingElement(token) | ||
| 1163 | |||
| 1164 | def startTagFormatting(self, token): | ||
| 1165 | self.tree.reconstructActiveFormattingElements() | ||
| 1166 | self.addFormattingElement(token) | ||
| 1167 | |||
| 1168 | def startTagNobr(self, token): | ||
| 1169 | self.tree.reconstructActiveFormattingElements() | ||
| 1170 | if self.tree.elementInScope("nobr"): | ||
| 1171 | self.parser.parseError("unexpected-start-tag-implies-end-tag", | ||
| 1172 | {"startName": "nobr", "endName": "nobr"}) | ||
| 1173 | self.processEndTag(impliedTagToken("nobr")) | ||
| 1174 | # XXX Need tests that trigger the following | ||
| 1175 | self.tree.reconstructActiveFormattingElements() | ||
| 1176 | self.addFormattingElement(token) | ||
| 1177 | |||
| 1178 | def startTagButton(self, token): | ||
| 1179 | if self.tree.elementInScope("button"): | ||
| 1180 | self.parser.parseError("unexpected-start-tag-implies-end-tag", | ||
| 1181 | {"startName": "button", "endName": "button"}) | ||
| 1182 | self.processEndTag(impliedTagToken("button")) | ||
| 1183 | return token | ||
| 1184 | else: | ||
| 1185 | self.tree.reconstructActiveFormattingElements() | ||
| 1186 | self.tree.insertElement(token) | ||
| 1187 | self.parser.framesetOK = False | ||
| 1188 | |||
| 1189 | def startTagAppletMarqueeObject(self, token): | ||
| 1190 | self.tree.reconstructActiveFormattingElements() | ||
| 1191 | self.tree.insertElement(token) | ||
| 1192 | self.tree.activeFormattingElements.append(Marker) | ||
| 1193 | self.parser.framesetOK = False | ||
| 1194 | |||
| 1195 | def startTagXmp(self, token): | ||
| 1196 | if self.tree.elementInScope("p", variant="button"): | ||
| 1197 | self.endTagP(impliedTagToken("p")) | ||
| 1198 | self.tree.reconstructActiveFormattingElements() | ||
| 1199 | self.parser.framesetOK = False | ||
| 1200 | self.parser.parseRCDataRawtext(token, "RAWTEXT") | ||
| 1201 | |||
| 1202 | def startTagTable(self, token): | ||
| 1203 | if self.parser.compatMode != "quirks": | ||
| 1204 | if self.tree.elementInScope("p", variant="button"): | ||
| 1205 | self.processEndTag(impliedTagToken("p")) | ||
| 1206 | self.tree.insertElement(token) | ||
| 1207 | self.parser.framesetOK = False | ||
| 1208 | self.parser.phase = self.parser.phases["inTable"] | ||
| 1209 | |||
| 1210 | def startTagVoidFormatting(self, token): | ||
| 1211 | self.tree.reconstructActiveFormattingElements() | ||
| 1212 | self.tree.insertElement(token) | ||
| 1213 | self.tree.openElements.pop() | ||
| 1214 | token["selfClosingAcknowledged"] = True | ||
| 1215 | self.parser.framesetOK = False | ||
| 1216 | |||
| 1217 | def startTagInput(self, token): | ||
| 1218 | framesetOK = self.parser.framesetOK | ||
| 1219 | self.startTagVoidFormatting(token) | ||
| 1220 | if ("type" in token["data"] and | ||
| 1221 | token["data"]["type"].translate(asciiUpper2Lower) == "hidden"): | ||
| 1222 | # input type=hidden doesn't change framesetOK | ||
| 1223 | self.parser.framesetOK = framesetOK | ||
| 1224 | |||
| 1225 | def startTagParamSource(self, token): | ||
| 1226 | self.tree.insertElement(token) | ||
| 1227 | self.tree.openElements.pop() | ||
| 1228 | token["selfClosingAcknowledged"] = True | ||
| 1229 | |||
| 1230 | def startTagHr(self, token): | ||
| 1231 | if self.tree.elementInScope("p", variant="button"): | ||
| 1232 | self.endTagP(impliedTagToken("p")) | ||
| 1233 | self.tree.insertElement(token) | ||
| 1234 | self.tree.openElements.pop() | ||
| 1235 | token["selfClosingAcknowledged"] = True | ||
| 1236 | self.parser.framesetOK = False | ||
| 1237 | |||
| 1238 | def startTagImage(self, token): | ||
| 1239 | # No really... | ||
| 1240 | self.parser.parseError("unexpected-start-tag-treated-as", | ||
| 1241 | {"originalName": "image", "newName": "img"}) | ||
| 1242 | self.processStartTag(impliedTagToken("img", "StartTag", | ||
| 1243 | attributes=token["data"], | ||
| 1244 | selfClosing=token["selfClosing"])) | ||
| 1245 | |||
| 1246 | def startTagIsIndex(self, token): | ||
| 1247 | self.parser.parseError("deprecated-tag", {"name": "isindex"}) | ||
| 1248 | if self.tree.formPointer: | ||
| 1249 | return | ||
| 1250 | form_attrs = {} | ||
| 1251 | if "action" in token["data"]: | ||
| 1252 | form_attrs["action"] = token["data"]["action"] | ||
| 1253 | self.processStartTag(impliedTagToken("form", "StartTag", | ||
| 1254 | attributes=form_attrs)) | ||
| 1255 | self.processStartTag(impliedTagToken("hr", "StartTag")) | ||
| 1256 | self.processStartTag(impliedTagToken("label", "StartTag")) | ||
| 1257 | # XXX Localization ... | ||
| 1258 | if "prompt" in token["data"]: | ||
| 1259 | prompt = token["data"]["prompt"] | ||
| 1260 | else: | ||
| 1261 | prompt = "This is a searchable index. Enter search keywords: " | ||
| 1262 | self.processCharacters( | ||
| 1263 | {"type": tokenTypes["Characters"], "data": prompt}) | ||
| 1264 | attributes = token["data"].copy() | ||
| 1265 | if "action" in attributes: | ||
| 1266 | del attributes["action"] | ||
| 1267 | if "prompt" in attributes: | ||
| 1268 | del attributes["prompt"] | ||
| 1269 | attributes["name"] = "isindex" | ||
| 1270 | self.processStartTag(impliedTagToken("input", "StartTag", | ||
| 1271 | attributes=attributes, | ||
| 1272 | selfClosing=token["selfClosing"])) | ||
| 1273 | self.processEndTag(impliedTagToken("label")) | ||
| 1274 | self.processStartTag(impliedTagToken("hr", "StartTag")) | ||
| 1275 | self.processEndTag(impliedTagToken("form")) | ||
| 1276 | |||
| 1277 | def startTagTextarea(self, token): | ||
| 1278 | self.tree.insertElement(token) | ||
| 1279 | self.parser.tokenizer.state = self.parser.tokenizer.rcdataState | ||
| 1280 | self.processSpaceCharacters = self.processSpaceCharactersDropNewline | ||
| 1281 | self.parser.framesetOK = False | ||
| 1282 | |||
| 1283 | def startTagIFrame(self, token): | ||
| 1284 | self.parser.framesetOK = False | ||
| 1285 | self.startTagRawtext(token) | ||
| 1286 | |||
| 1287 | def startTagNoscript(self, token): | ||
| 1288 | if self.parser.scripting: | ||
| 1289 | self.startTagRawtext(token) | ||
| 1290 | else: | ||
| 1291 | self.startTagOther(token) | ||
| 1292 | |||
| 1293 | def startTagRawtext(self, token): | ||
| 1294 | """iframe, noembed noframes, noscript(if scripting enabled)""" | ||
| 1295 | self.parser.parseRCDataRawtext(token, "RAWTEXT") | ||
| 1296 | |||
| 1297 | def startTagOpt(self, token): | ||
| 1298 | if self.tree.openElements[-1].name == "option": | ||
| 1299 | self.parser.phase.processEndTag(impliedTagToken("option")) | ||
| 1300 | self.tree.reconstructActiveFormattingElements() | ||
| 1301 | self.parser.tree.insertElement(token) | ||
| 1302 | |||
| 1303 | def startTagSelect(self, token): | ||
| 1304 | self.tree.reconstructActiveFormattingElements() | ||
| 1305 | self.tree.insertElement(token) | ||
| 1306 | self.parser.framesetOK = False | ||
| 1307 | if self.parser.phase in (self.parser.phases["inTable"], | ||
| 1308 | self.parser.phases["inCaption"], | ||
| 1309 | self.parser.phases["inColumnGroup"], | ||
| 1310 | self.parser.phases["inTableBody"], | ||
| 1311 | self.parser.phases["inRow"], | ||
| 1312 | self.parser.phases["inCell"]): | ||
| 1313 | self.parser.phase = self.parser.phases["inSelectInTable"] | ||
| 1314 | else: | ||
| 1315 | self.parser.phase = self.parser.phases["inSelect"] | ||
| 1316 | |||
| 1317 | def startTagRpRt(self, token): | ||
| 1318 | if self.tree.elementInScope("ruby"): | ||
| 1319 | self.tree.generateImpliedEndTags() | ||
| 1320 | if self.tree.openElements[-1].name != "ruby": | ||
| 1321 | self.parser.parseError() | ||
| 1322 | self.tree.insertElement(token) | ||
| 1323 | |||
| 1324 | def startTagMath(self, token): | ||
| 1325 | self.tree.reconstructActiveFormattingElements() | ||
| 1326 | self.parser.adjustMathMLAttributes(token) | ||
| 1327 | self.parser.adjustForeignAttributes(token) | ||
| 1328 | token["namespace"] = namespaces["mathml"] | ||
| 1329 | self.tree.insertElement(token) | ||
| 1330 | # Need to get the parse error right for the case where the token | ||
| 1331 | # has a namespace not equal to the xmlns attribute | ||
| 1332 | if token["selfClosing"]: | ||
| 1333 | self.tree.openElements.pop() | ||
| 1334 | token["selfClosingAcknowledged"] = True | ||
| 1335 | |||
| 1336 | def startTagSvg(self, token): | ||
| 1337 | self.tree.reconstructActiveFormattingElements() | ||
| 1338 | self.parser.adjustSVGAttributes(token) | ||
| 1339 | self.parser.adjustForeignAttributes(token) | ||
| 1340 | token["namespace"] = namespaces["svg"] | ||
| 1341 | self.tree.insertElement(token) | ||
| 1342 | # Need to get the parse error right for the case where the token | ||
| 1343 | # has a namespace not equal to the xmlns attribute | ||
| 1344 | if token["selfClosing"]: | ||
| 1345 | self.tree.openElements.pop() | ||
| 1346 | token["selfClosingAcknowledged"] = True | ||
| 1347 | |||
| 1348 | def startTagMisplaced(self, token): | ||
| 1349 | """ Elements that should be children of other elements that have a | ||
| 1350 | different insertion mode; here they are ignored | ||
| 1351 | "caption", "col", "colgroup", "frame", "frameset", "head", | ||
| 1352 | "option", "optgroup", "tbody", "td", "tfoot", "th", "thead", | ||
| 1353 | "tr", "noscript" | ||
| 1354 | """ | ||
| 1355 | self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]}) | ||
| 1356 | |||
| 1357 | def startTagOther(self, token): | ||
| 1358 | self.tree.reconstructActiveFormattingElements() | ||
| 1359 | self.tree.insertElement(token) | ||
| 1360 | |||
| 1361 | def endTagP(self, token): | ||
| 1362 | if not self.tree.elementInScope("p", variant="button"): | ||
| 1363 | self.startTagCloseP(impliedTagToken("p", "StartTag")) | ||
| 1364 | self.parser.parseError("unexpected-end-tag", {"name": "p"}) | ||
| 1365 | self.endTagP(impliedTagToken("p", "EndTag")) | ||
| 1366 | else: | ||
| 1367 | self.tree.generateImpliedEndTags("p") | ||
| 1368 | if self.tree.openElements[-1].name != "p": | ||
| 1369 | self.parser.parseError("unexpected-end-tag", {"name": "p"}) | ||
| 1370 | node = self.tree.openElements.pop() | ||
| 1371 | while node.name != "p": | ||
| 1372 | node = self.tree.openElements.pop() | ||
| 1373 | |||
| 1374 | def endTagBody(self, token): | ||
| 1375 | if not self.tree.elementInScope("body"): | ||
| 1376 | self.parser.parseError() | ||
| 1377 | return | ||
| 1378 | elif self.tree.openElements[-1].name != "body": | ||
| 1379 | for node in self.tree.openElements[2:]: | ||
| 1380 | if node.name not in frozenset(("dd", "dt", "li", "optgroup", | ||
| 1381 | "option", "p", "rp", "rt", | ||
| 1382 | "tbody", "td", "tfoot", | ||
| 1383 | "th", "thead", "tr", "body", | ||
| 1384 | "html")): | ||
| 1385 | # Not sure this is the correct name for the parse error | ||
| 1386 | self.parser.parseError( | ||
| 1387 | "expected-one-end-tag-but-got-another", | ||
| 1388 | {"gotName": "body", "expectedName": node.name}) | ||
| 1389 | break | ||
| 1390 | self.parser.phase = self.parser.phases["afterBody"] | ||
| 1391 | |||
| 1392 | def endTagHtml(self, token): | ||
| 1393 | # We repeat the test for the body end tag token being ignored here | ||
| 1394 | if self.tree.elementInScope("body"): | ||
| 1395 | self.endTagBody(impliedTagToken("body")) | ||
| 1396 | return token | ||
| 1397 | |||
| 1398 | def endTagBlock(self, token): | ||
| 1399 | # Put us back in the right whitespace handling mode | ||
| 1400 | if token["name"] == "pre": | ||
| 1401 | self.processSpaceCharacters = self.processSpaceCharactersNonPre | ||
| 1402 | inScope = self.tree.elementInScope(token["name"]) | ||
| 1403 | if inScope: | ||
| 1404 | self.tree.generateImpliedEndTags() | ||
| 1405 | if self.tree.openElements[-1].name != token["name"]: | ||
| 1406 | self.parser.parseError("end-tag-too-early", {"name": token["name"]}) | ||
| 1407 | if inScope: | ||
| 1408 | node = self.tree.openElements.pop() | ||
| 1409 | while node.name != token["name"]: | ||
| 1410 | node = self.tree.openElements.pop() | ||
| 1411 | |||
| 1412 | def endTagForm(self, token): | ||
| 1413 | node = self.tree.formPointer | ||
| 1414 | self.tree.formPointer = None | ||
| 1415 | if node is None or not self.tree.elementInScope(node): | ||
| 1416 | self.parser.parseError("unexpected-end-tag", | ||
| 1417 | {"name": "form"}) | ||
| 1418 | else: | ||
| 1419 | self.tree.generateImpliedEndTags() | ||
| 1420 | if self.tree.openElements[-1] != node: | ||
| 1421 | self.parser.parseError("end-tag-too-early-ignored", | ||
| 1422 | {"name": "form"}) | ||
| 1423 | self.tree.openElements.remove(node) | ||
| 1424 | |||
| 1425 | def endTagListItem(self, token): | ||
| 1426 | if token["name"] == "li": | ||
| 1427 | variant = "list" | ||
| 1428 | else: | ||
| 1429 | variant = None | ||
| 1430 | if not self.tree.elementInScope(token["name"], variant=variant): | ||
| 1431 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 1432 | else: | ||
| 1433 | self.tree.generateImpliedEndTags(exclude=token["name"]) | ||
| 1434 | if self.tree.openElements[-1].name != token["name"]: | ||
| 1435 | self.parser.parseError( | ||
| 1436 | "end-tag-too-early", | ||
| 1437 | {"name": token["name"]}) | ||
| 1438 | node = self.tree.openElements.pop() | ||
| 1439 | while node.name != token["name"]: | ||
| 1440 | node = self.tree.openElements.pop() | ||
| 1441 | |||
| 1442 | def endTagHeading(self, token): | ||
| 1443 | for item in headingElements: | ||
| 1444 | if self.tree.elementInScope(item): | ||
| 1445 | self.tree.generateImpliedEndTags() | ||
| 1446 | break | ||
| 1447 | if self.tree.openElements[-1].name != token["name"]: | ||
| 1448 | self.parser.parseError("end-tag-too-early", {"name": token["name"]}) | ||
| 1449 | |||
| 1450 | for item in headingElements: | ||
| 1451 | if self.tree.elementInScope(item): | ||
| 1452 | item = self.tree.openElements.pop() | ||
| 1453 | while item.name not in headingElements: | ||
| 1454 | item = self.tree.openElements.pop() | ||
| 1455 | break | ||
| 1456 | |||
| 1457 | def endTagFormatting(self, token): | ||
| 1458 | """The much-feared adoption agency algorithm""" | ||
| 1459 | # http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867 | ||
| 1460 | # XXX Better parseError messages appreciated. | ||
| 1461 | |||
| 1462 | # Step 1 | ||
| 1463 | outerLoopCounter = 0 | ||
| 1464 | |||
| 1465 | # Step 2 | ||
| 1466 | while outerLoopCounter < 8: | ||
| 1467 | |||
| 1468 | # Step 3 | ||
| 1469 | outerLoopCounter += 1 | ||
| 1470 | |||
| 1471 | # Step 4: | ||
| 1472 | |||
| 1473 | # Let the formatting element be the last element in | ||
| 1474 | # the list of active formatting elements that: | ||
| 1475 | # - is between the end of the list and the last scope | ||
| 1476 | # marker in the list, if any, or the start of the list | ||
| 1477 | # otherwise, and | ||
| 1478 | # - has the same tag name as the token. | ||
| 1479 | formattingElement = self.tree.elementInActiveFormattingElements( | ||
| 1480 | token["name"]) | ||
| 1481 | if (not formattingElement or | ||
| 1482 | (formattingElement in self.tree.openElements and | ||
| 1483 | not self.tree.elementInScope(formattingElement.name))): | ||
| 1484 | # If there is no such node, then abort these steps | ||
| 1485 | # and instead act as described in the "any other | ||
| 1486 | # end tag" entry below. | ||
| 1487 | self.endTagOther(token) | ||
| 1488 | return | ||
| 1489 | |||
| 1490 | # Otherwise, if there is such a node, but that node is | ||
| 1491 | # not in the stack of open elements, then this is a | ||
| 1492 | # parse error; remove the element from the list, and | ||
| 1493 | # abort these steps. | ||
| 1494 | elif formattingElement not in self.tree.openElements: | ||
| 1495 | self.parser.parseError("adoption-agency-1.2", {"name": token["name"]}) | ||
| 1496 | self.tree.activeFormattingElements.remove(formattingElement) | ||
| 1497 | return | ||
| 1498 | |||
| 1499 | # Otherwise, if there is such a node, and that node is | ||
| 1500 | # also in the stack of open elements, but the element | ||
| 1501 | # is not in scope, then this is a parse error; ignore | ||
| 1502 | # the token, and abort these steps. | ||
| 1503 | elif not self.tree.elementInScope(formattingElement.name): | ||
| 1504 | self.parser.parseError("adoption-agency-4.4", {"name": token["name"]}) | ||
| 1505 | return | ||
| 1506 | |||
| 1507 | # Otherwise, there is a formatting element and that | ||
| 1508 | # element is in the stack and is in scope. If the | ||
| 1509 | # element is not the current node, this is a parse | ||
| 1510 | # error. In any case, proceed with the algorithm as | ||
| 1511 | # written in the following steps. | ||
| 1512 | else: | ||
| 1513 | if formattingElement != self.tree.openElements[-1]: | ||
| 1514 | self.parser.parseError("adoption-agency-1.3", {"name": token["name"]}) | ||
| 1515 | |||
| 1516 | # Step 5: | ||
| 1517 | |||
| 1518 | # Let the furthest block be the topmost node in the | ||
| 1519 | # stack of open elements that is lower in the stack | ||
| 1520 | # than the formatting element, and is an element in | ||
| 1521 | # the special category. There might not be one. | ||
| 1522 | afeIndex = self.tree.openElements.index(formattingElement) | ||
| 1523 | furthestBlock = None | ||
| 1524 | for element in self.tree.openElements[afeIndex:]: | ||
| 1525 | if element.nameTuple in specialElements: | ||
| 1526 | furthestBlock = element | ||
| 1527 | break | ||
| 1528 | |||
| 1529 | # Step 6: | ||
| 1530 | |||
| 1531 | # If there is no furthest block, then the UA must | ||
| 1532 | # first pop all the nodes from the bottom of the stack | ||
| 1533 | # of open elements, from the current node up to and | ||
| 1534 | # including the formatting element, then remove the | ||
| 1535 | # formatting element from the list of active | ||
| 1536 | # formatting elements, and finally abort these steps. | ||
| 1537 | if furthestBlock is None: | ||
| 1538 | element = self.tree.openElements.pop() | ||
| 1539 | while element != formattingElement: | ||
| 1540 | element = self.tree.openElements.pop() | ||
| 1541 | self.tree.activeFormattingElements.remove(element) | ||
| 1542 | return | ||
| 1543 | |||
| 1544 | # Step 7 | ||
| 1545 | commonAncestor = self.tree.openElements[afeIndex - 1] | ||
| 1546 | |||
| 1547 | # Step 8: | ||
| 1548 | # The bookmark is supposed to help us identify where to reinsert | ||
| 1549 | # nodes in step 15. We have to ensure that we reinsert nodes after | ||
| 1550 | # the node before the active formatting element. Note the bookmark | ||
| 1551 | # can move in step 9.7 | ||
| 1552 | bookmark = self.tree.activeFormattingElements.index(formattingElement) | ||
| 1553 | |||
| 1554 | # Step 9 | ||
| 1555 | lastNode = node = furthestBlock | ||
| 1556 | innerLoopCounter = 0 | ||
| 1557 | |||
| 1558 | index = self.tree.openElements.index(node) | ||
| 1559 | while innerLoopCounter < 3: | ||
| 1560 | innerLoopCounter += 1 | ||
| 1561 | # Node is element before node in open elements | ||
| 1562 | index -= 1 | ||
| 1563 | node = self.tree.openElements[index] | ||
| 1564 | if node not in self.tree.activeFormattingElements: | ||
| 1565 | self.tree.openElements.remove(node) | ||
| 1566 | continue | ||
| 1567 | # Step 9.6 | ||
| 1568 | if node == formattingElement: | ||
| 1569 | break | ||
| 1570 | # Step 9.7 | ||
| 1571 | if lastNode == furthestBlock: | ||
| 1572 | bookmark = self.tree.activeFormattingElements.index(node) + 1 | ||
| 1573 | # Step 9.8 | ||
| 1574 | clone = node.cloneNode() | ||
| 1575 | # Replace node with clone | ||
| 1576 | self.tree.activeFormattingElements[ | ||
| 1577 | self.tree.activeFormattingElements.index(node)] = clone | ||
| 1578 | self.tree.openElements[ | ||
| 1579 | self.tree.openElements.index(node)] = clone | ||
| 1580 | node = clone | ||
| 1581 | # Step 9.9 | ||
| 1582 | # Remove lastNode from its parents, if any | ||
| 1583 | if lastNode.parent: | ||
| 1584 | lastNode.parent.removeChild(lastNode) | ||
| 1585 | node.appendChild(lastNode) | ||
| 1586 | # Step 9.10 | ||
| 1587 | lastNode = node | ||
| 1588 | |||
| 1589 | # Step 10 | ||
| 1590 | # Foster parent lastNode if commonAncestor is a | ||
| 1591 | # table, tbody, tfoot, thead, or tr we need to foster | ||
| 1592 | # parent the lastNode | ||
| 1593 | if lastNode.parent: | ||
| 1594 | lastNode.parent.removeChild(lastNode) | ||
| 1595 | |||
| 1596 | if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")): | ||
| 1597 | parent, insertBefore = self.tree.getTableMisnestedNodePosition() | ||
| 1598 | parent.insertBefore(lastNode, insertBefore) | ||
| 1599 | else: | ||
| 1600 | commonAncestor.appendChild(lastNode) | ||
| 1601 | |||
| 1602 | # Step 11 | ||
| 1603 | clone = formattingElement.cloneNode() | ||
| 1604 | |||
| 1605 | # Step 12 | ||
| 1606 | furthestBlock.reparentChildren(clone) | ||
| 1607 | |||
| 1608 | # Step 13 | ||
| 1609 | furthestBlock.appendChild(clone) | ||
| 1610 | |||
| 1611 | # Step 14 | ||
| 1612 | self.tree.activeFormattingElements.remove(formattingElement) | ||
| 1613 | self.tree.activeFormattingElements.insert(bookmark, clone) | ||
| 1614 | |||
| 1615 | # Step 15 | ||
| 1616 | self.tree.openElements.remove(formattingElement) | ||
| 1617 | self.tree.openElements.insert( | ||
| 1618 | self.tree.openElements.index(furthestBlock) + 1, clone) | ||
| 1619 | |||
| 1620 | def endTagAppletMarqueeObject(self, token): | ||
| 1621 | if self.tree.elementInScope(token["name"]): | ||
| 1622 | self.tree.generateImpliedEndTags() | ||
| 1623 | if self.tree.openElements[-1].name != token["name"]: | ||
| 1624 | self.parser.parseError("end-tag-too-early", {"name": token["name"]}) | ||
| 1625 | |||
| 1626 | if self.tree.elementInScope(token["name"]): | ||
| 1627 | element = self.tree.openElements.pop() | ||
| 1628 | while element.name != token["name"]: | ||
| 1629 | element = self.tree.openElements.pop() | ||
| 1630 | self.tree.clearActiveFormattingElements() | ||
| 1631 | |||
| 1632 | def endTagBr(self, token): | ||
| 1633 | self.parser.parseError("unexpected-end-tag-treated-as", | ||
| 1634 | {"originalName": "br", "newName": "br element"}) | ||
| 1635 | self.tree.reconstructActiveFormattingElements() | ||
| 1636 | self.tree.insertElement(impliedTagToken("br", "StartTag")) | ||
| 1637 | self.tree.openElements.pop() | ||
| 1638 | |||
| 1639 | def endTagOther(self, token): | ||
| 1640 | for node in self.tree.openElements[::-1]: | ||
| 1641 | if node.name == token["name"]: | ||
| 1642 | self.tree.generateImpliedEndTags(exclude=token["name"]) | ||
| 1643 | if self.tree.openElements[-1].name != token["name"]: | ||
| 1644 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 1645 | while self.tree.openElements.pop() != node: | ||
| 1646 | pass | ||
| 1647 | break | ||
| 1648 | else: | ||
| 1649 | if node.nameTuple in specialElements: | ||
| 1650 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 1651 | break | ||
| 1652 | |||
| 1653 | class TextPhase(Phase): | ||
| 1654 | def __init__(self, parser, tree): | ||
| 1655 | Phase.__init__(self, parser, tree) | ||
| 1656 | self.startTagHandler = _utils.MethodDispatcher([]) | ||
| 1657 | self.startTagHandler.default = self.startTagOther | ||
| 1658 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 1659 | ("script", self.endTagScript)]) | ||
| 1660 | self.endTagHandler.default = self.endTagOther | ||
| 1661 | |||
| 1662 | def processCharacters(self, token): | ||
| 1663 | self.tree.insertText(token["data"]) | ||
| 1664 | |||
| 1665 | def processEOF(self): | ||
| 1666 | self.parser.parseError("expected-named-closing-tag-but-got-eof", | ||
| 1667 | {"name": self.tree.openElements[-1].name}) | ||
| 1668 | self.tree.openElements.pop() | ||
| 1669 | self.parser.phase = self.parser.originalPhase | ||
| 1670 | return True | ||
| 1671 | |||
| 1672 | def startTagOther(self, token): | ||
| 1673 | assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name'] | ||
| 1674 | |||
| 1675 | def endTagScript(self, token): | ||
| 1676 | node = self.tree.openElements.pop() | ||
| 1677 | assert node.name == "script" | ||
| 1678 | self.parser.phase = self.parser.originalPhase | ||
| 1679 | # The rest of this method is all stuff that only happens if | ||
| 1680 | # document.write works | ||
| 1681 | |||
| 1682 | def endTagOther(self, token): | ||
| 1683 | self.tree.openElements.pop() | ||
| 1684 | self.parser.phase = self.parser.originalPhase | ||
| 1685 | |||
| 1686 | class InTablePhase(Phase): | ||
| 1687 | # http://www.whatwg.org/specs/web-apps/current-work/#in-table | ||
| 1688 | def __init__(self, parser, tree): | ||
| 1689 | Phase.__init__(self, parser, tree) | ||
| 1690 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 1691 | ("html", self.startTagHtml), | ||
| 1692 | ("caption", self.startTagCaption), | ||
| 1693 | ("colgroup", self.startTagColgroup), | ||
| 1694 | ("col", self.startTagCol), | ||
| 1695 | (("tbody", "tfoot", "thead"), self.startTagRowGroup), | ||
| 1696 | (("td", "th", "tr"), self.startTagImplyTbody), | ||
| 1697 | ("table", self.startTagTable), | ||
| 1698 | (("style", "script"), self.startTagStyleScript), | ||
| 1699 | ("input", self.startTagInput), | ||
| 1700 | ("form", self.startTagForm) | ||
| 1701 | ]) | ||
| 1702 | self.startTagHandler.default = self.startTagOther | ||
| 1703 | |||
| 1704 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 1705 | ("table", self.endTagTable), | ||
| 1706 | (("body", "caption", "col", "colgroup", "html", "tbody", "td", | ||
| 1707 | "tfoot", "th", "thead", "tr"), self.endTagIgnore) | ||
| 1708 | ]) | ||
| 1709 | self.endTagHandler.default = self.endTagOther | ||
| 1710 | |||
| 1711 | # helper methods | ||
| 1712 | def clearStackToTableContext(self): | ||
| 1713 | # "clear the stack back to a table context" | ||
| 1714 | while self.tree.openElements[-1].name not in ("table", "html"): | ||
| 1715 | # self.parser.parseError("unexpected-implied-end-tag-in-table", | ||
| 1716 | # {"name": self.tree.openElements[-1].name}) | ||
| 1717 | self.tree.openElements.pop() | ||
| 1718 | # When the current node is <html> it's an innerHTML case | ||
| 1719 | |||
| 1720 | # processing methods | ||
| 1721 | def processEOF(self): | ||
| 1722 | if self.tree.openElements[-1].name != "html": | ||
| 1723 | self.parser.parseError("eof-in-table") | ||
| 1724 | else: | ||
| 1725 | assert self.parser.innerHTML | ||
| 1726 | # Stop parsing | ||
| 1727 | |||
| 1728 | def processSpaceCharacters(self, token): | ||
| 1729 | originalPhase = self.parser.phase | ||
| 1730 | self.parser.phase = self.parser.phases["inTableText"] | ||
| 1731 | self.parser.phase.originalPhase = originalPhase | ||
| 1732 | self.parser.phase.processSpaceCharacters(token) | ||
| 1733 | |||
| 1734 | def processCharacters(self, token): | ||
| 1735 | originalPhase = self.parser.phase | ||
| 1736 | self.parser.phase = self.parser.phases["inTableText"] | ||
| 1737 | self.parser.phase.originalPhase = originalPhase | ||
| 1738 | self.parser.phase.processCharacters(token) | ||
| 1739 | |||
| 1740 | def insertText(self, token): | ||
| 1741 | # If we get here there must be at least one non-whitespace character | ||
| 1742 | # Do the table magic! | ||
| 1743 | self.tree.insertFromTable = True | ||
| 1744 | self.parser.phases["inBody"].processCharacters(token) | ||
| 1745 | self.tree.insertFromTable = False | ||
| 1746 | |||
| 1747 | def startTagCaption(self, token): | ||
| 1748 | self.clearStackToTableContext() | ||
| 1749 | self.tree.activeFormattingElements.append(Marker) | ||
| 1750 | self.tree.insertElement(token) | ||
| 1751 | self.parser.phase = self.parser.phases["inCaption"] | ||
| 1752 | |||
| 1753 | def startTagColgroup(self, token): | ||
| 1754 | self.clearStackToTableContext() | ||
| 1755 | self.tree.insertElement(token) | ||
| 1756 | self.parser.phase = self.parser.phases["inColumnGroup"] | ||
| 1757 | |||
| 1758 | def startTagCol(self, token): | ||
| 1759 | self.startTagColgroup(impliedTagToken("colgroup", "StartTag")) | ||
| 1760 | return token | ||
| 1761 | |||
| 1762 | def startTagRowGroup(self, token): | ||
| 1763 | self.clearStackToTableContext() | ||
| 1764 | self.tree.insertElement(token) | ||
| 1765 | self.parser.phase = self.parser.phases["inTableBody"] | ||
| 1766 | |||
| 1767 | def startTagImplyTbody(self, token): | ||
| 1768 | self.startTagRowGroup(impliedTagToken("tbody", "StartTag")) | ||
| 1769 | return token | ||
| 1770 | |||
| 1771 | def startTagTable(self, token): | ||
| 1772 | self.parser.parseError("unexpected-start-tag-implies-end-tag", | ||
| 1773 | {"startName": "table", "endName": "table"}) | ||
| 1774 | self.parser.phase.processEndTag(impliedTagToken("table")) | ||
| 1775 | if not self.parser.innerHTML: | ||
| 1776 | return token | ||
| 1777 | |||
| 1778 | def startTagStyleScript(self, token): | ||
| 1779 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 1780 | |||
| 1781 | def startTagInput(self, token): | ||
| 1782 | if ("type" in token["data"] and | ||
| 1783 | token["data"]["type"].translate(asciiUpper2Lower) == "hidden"): | ||
| 1784 | self.parser.parseError("unexpected-hidden-input-in-table") | ||
| 1785 | self.tree.insertElement(token) | ||
| 1786 | # XXX associate with form | ||
| 1787 | self.tree.openElements.pop() | ||
| 1788 | else: | ||
| 1789 | self.startTagOther(token) | ||
| 1790 | |||
| 1791 | def startTagForm(self, token): | ||
| 1792 | self.parser.parseError("unexpected-form-in-table") | ||
| 1793 | if self.tree.formPointer is None: | ||
| 1794 | self.tree.insertElement(token) | ||
| 1795 | self.tree.formPointer = self.tree.openElements[-1] | ||
| 1796 | self.tree.openElements.pop() | ||
| 1797 | |||
| 1798 | def startTagOther(self, token): | ||
| 1799 | self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]}) | ||
| 1800 | # Do the table magic! | ||
| 1801 | self.tree.insertFromTable = True | ||
| 1802 | self.parser.phases["inBody"].processStartTag(token) | ||
| 1803 | self.tree.insertFromTable = False | ||
| 1804 | |||
| 1805 | def endTagTable(self, token): | ||
| 1806 | if self.tree.elementInScope("table", variant="table"): | ||
| 1807 | self.tree.generateImpliedEndTags() | ||
| 1808 | if self.tree.openElements[-1].name != "table": | ||
| 1809 | self.parser.parseError("end-tag-too-early-named", | ||
| 1810 | {"gotName": "table", | ||
| 1811 | "expectedName": self.tree.openElements[-1].name}) | ||
| 1812 | while self.tree.openElements[-1].name != "table": | ||
| 1813 | self.tree.openElements.pop() | ||
| 1814 | self.tree.openElements.pop() | ||
| 1815 | self.parser.resetInsertionMode() | ||
| 1816 | else: | ||
| 1817 | # innerHTML case | ||
| 1818 | assert self.parser.innerHTML | ||
| 1819 | self.parser.parseError() | ||
| 1820 | |||
| 1821 | def endTagIgnore(self, token): | ||
| 1822 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 1823 | |||
| 1824 | def endTagOther(self, token): | ||
| 1825 | self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]}) | ||
| 1826 | # Do the table magic! | ||
| 1827 | self.tree.insertFromTable = True | ||
| 1828 | self.parser.phases["inBody"].processEndTag(token) | ||
| 1829 | self.tree.insertFromTable = False | ||
| 1830 | |||
| 1831 | class InTableTextPhase(Phase): | ||
| 1832 | def __init__(self, parser, tree): | ||
| 1833 | Phase.__init__(self, parser, tree) | ||
| 1834 | self.originalPhase = None | ||
| 1835 | self.characterTokens = [] | ||
| 1836 | |||
| 1837 | def flushCharacters(self): | ||
| 1838 | data = "".join([item["data"] for item in self.characterTokens]) | ||
| 1839 | if any([item not in spaceCharacters for item in data]): | ||
| 1840 | token = {"type": tokenTypes["Characters"], "data": data} | ||
| 1841 | self.parser.phases["inTable"].insertText(token) | ||
| 1842 | elif data: | ||
| 1843 | self.tree.insertText(data) | ||
| 1844 | self.characterTokens = [] | ||
| 1845 | |||
| 1846 | def processComment(self, token): | ||
| 1847 | self.flushCharacters() | ||
| 1848 | self.parser.phase = self.originalPhase | ||
| 1849 | return token | ||
| 1850 | |||
| 1851 | def processEOF(self): | ||
| 1852 | self.flushCharacters() | ||
| 1853 | self.parser.phase = self.originalPhase | ||
| 1854 | return True | ||
| 1855 | |||
| 1856 | def processCharacters(self, token): | ||
| 1857 | if token["data"] == "\u0000": | ||
| 1858 | return | ||
| 1859 | self.characterTokens.append(token) | ||
| 1860 | |||
| 1861 | def processSpaceCharacters(self, token): | ||
| 1862 | # pretty sure we should never reach here | ||
| 1863 | self.characterTokens.append(token) | ||
| 1864 | # assert False | ||
| 1865 | |||
| 1866 | def processStartTag(self, token): | ||
| 1867 | self.flushCharacters() | ||
| 1868 | self.parser.phase = self.originalPhase | ||
| 1869 | return token | ||
| 1870 | |||
| 1871 | def processEndTag(self, token): | ||
| 1872 | self.flushCharacters() | ||
| 1873 | self.parser.phase = self.originalPhase | ||
| 1874 | return token | ||
| 1875 | |||
| 1876 | class InCaptionPhase(Phase): | ||
| 1877 | # http://www.whatwg.org/specs/web-apps/current-work/#in-caption | ||
| 1878 | def __init__(self, parser, tree): | ||
| 1879 | Phase.__init__(self, parser, tree) | ||
| 1880 | |||
| 1881 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 1882 | ("html", self.startTagHtml), | ||
| 1883 | (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", | ||
| 1884 | "thead", "tr"), self.startTagTableElement) | ||
| 1885 | ]) | ||
| 1886 | self.startTagHandler.default = self.startTagOther | ||
| 1887 | |||
| 1888 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 1889 | ("caption", self.endTagCaption), | ||
| 1890 | ("table", self.endTagTable), | ||
| 1891 | (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th", | ||
| 1892 | "thead", "tr"), self.endTagIgnore) | ||
| 1893 | ]) | ||
| 1894 | self.endTagHandler.default = self.endTagOther | ||
| 1895 | |||
| 1896 | def ignoreEndTagCaption(self): | ||
| 1897 | return not self.tree.elementInScope("caption", variant="table") | ||
| 1898 | |||
| 1899 | def processEOF(self): | ||
| 1900 | self.parser.phases["inBody"].processEOF() | ||
| 1901 | |||
| 1902 | def processCharacters(self, token): | ||
| 1903 | return self.parser.phases["inBody"].processCharacters(token) | ||
| 1904 | |||
| 1905 | def startTagTableElement(self, token): | ||
| 1906 | self.parser.parseError() | ||
| 1907 | # XXX Have to duplicate logic here to find out if the tag is ignored | ||
| 1908 | ignoreEndTag = self.ignoreEndTagCaption() | ||
| 1909 | self.parser.phase.processEndTag(impliedTagToken("caption")) | ||
| 1910 | if not ignoreEndTag: | ||
| 1911 | return token | ||
| 1912 | |||
| 1913 | def startTagOther(self, token): | ||
| 1914 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 1915 | |||
| 1916 | def endTagCaption(self, token): | ||
| 1917 | if not self.ignoreEndTagCaption(): | ||
| 1918 | # AT this code is quite similar to endTagTable in "InTable" | ||
| 1919 | self.tree.generateImpliedEndTags() | ||
| 1920 | if self.tree.openElements[-1].name != "caption": | ||
| 1921 | self.parser.parseError("expected-one-end-tag-but-got-another", | ||
| 1922 | {"gotName": "caption", | ||
| 1923 | "expectedName": self.tree.openElements[-1].name}) | ||
| 1924 | while self.tree.openElements[-1].name != "caption": | ||
| 1925 | self.tree.openElements.pop() | ||
| 1926 | self.tree.openElements.pop() | ||
| 1927 | self.tree.clearActiveFormattingElements() | ||
| 1928 | self.parser.phase = self.parser.phases["inTable"] | ||
| 1929 | else: | ||
| 1930 | # innerHTML case | ||
| 1931 | assert self.parser.innerHTML | ||
| 1932 | self.parser.parseError() | ||
| 1933 | |||
| 1934 | def endTagTable(self, token): | ||
| 1935 | self.parser.parseError() | ||
| 1936 | ignoreEndTag = self.ignoreEndTagCaption() | ||
| 1937 | self.parser.phase.processEndTag(impliedTagToken("caption")) | ||
| 1938 | if not ignoreEndTag: | ||
| 1939 | return token | ||
| 1940 | |||
| 1941 | def endTagIgnore(self, token): | ||
| 1942 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 1943 | |||
| 1944 | def endTagOther(self, token): | ||
| 1945 | return self.parser.phases["inBody"].processEndTag(token) | ||
| 1946 | |||
| 1947 | class InColumnGroupPhase(Phase): | ||
| 1948 | # http://www.whatwg.org/specs/web-apps/current-work/#in-column | ||
| 1949 | |||
| 1950 | def __init__(self, parser, tree): | ||
| 1951 | Phase.__init__(self, parser, tree) | ||
| 1952 | |||
| 1953 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 1954 | ("html", self.startTagHtml), | ||
| 1955 | ("col", self.startTagCol) | ||
| 1956 | ]) | ||
| 1957 | self.startTagHandler.default = self.startTagOther | ||
| 1958 | |||
| 1959 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 1960 | ("colgroup", self.endTagColgroup), | ||
| 1961 | ("col", self.endTagCol) | ||
| 1962 | ]) | ||
| 1963 | self.endTagHandler.default = self.endTagOther | ||
| 1964 | |||
| 1965 | def ignoreEndTagColgroup(self): | ||
| 1966 | return self.tree.openElements[-1].name == "html" | ||
| 1967 | |||
| 1968 | def processEOF(self): | ||
| 1969 | if self.tree.openElements[-1].name == "html": | ||
| 1970 | assert self.parser.innerHTML | ||
| 1971 | return | ||
| 1972 | else: | ||
| 1973 | ignoreEndTag = self.ignoreEndTagColgroup() | ||
| 1974 | self.endTagColgroup(impliedTagToken("colgroup")) | ||
| 1975 | if not ignoreEndTag: | ||
| 1976 | return True | ||
| 1977 | |||
| 1978 | def processCharacters(self, token): | ||
| 1979 | ignoreEndTag = self.ignoreEndTagColgroup() | ||
| 1980 | self.endTagColgroup(impliedTagToken("colgroup")) | ||
| 1981 | if not ignoreEndTag: | ||
| 1982 | return token | ||
| 1983 | |||
| 1984 | def startTagCol(self, token): | ||
| 1985 | self.tree.insertElement(token) | ||
| 1986 | self.tree.openElements.pop() | ||
| 1987 | token["selfClosingAcknowledged"] = True | ||
| 1988 | |||
| 1989 | def startTagOther(self, token): | ||
| 1990 | ignoreEndTag = self.ignoreEndTagColgroup() | ||
| 1991 | self.endTagColgroup(impliedTagToken("colgroup")) | ||
| 1992 | if not ignoreEndTag: | ||
| 1993 | return token | ||
| 1994 | |||
| 1995 | def endTagColgroup(self, token): | ||
| 1996 | if self.ignoreEndTagColgroup(): | ||
| 1997 | # innerHTML case | ||
| 1998 | assert self.parser.innerHTML | ||
| 1999 | self.parser.parseError() | ||
| 2000 | else: | ||
| 2001 | self.tree.openElements.pop() | ||
| 2002 | self.parser.phase = self.parser.phases["inTable"] | ||
| 2003 | |||
| 2004 | def endTagCol(self, token): | ||
| 2005 | self.parser.parseError("no-end-tag", {"name": "col"}) | ||
| 2006 | |||
| 2007 | def endTagOther(self, token): | ||
| 2008 | ignoreEndTag = self.ignoreEndTagColgroup() | ||
| 2009 | self.endTagColgroup(impliedTagToken("colgroup")) | ||
| 2010 | if not ignoreEndTag: | ||
| 2011 | return token | ||
| 2012 | |||
| 2013 | class InTableBodyPhase(Phase): | ||
| 2014 | # http://www.whatwg.org/specs/web-apps/current-work/#in-table0 | ||
| 2015 | def __init__(self, parser, tree): | ||
| 2016 | Phase.__init__(self, parser, tree) | ||
| 2017 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2018 | ("html", self.startTagHtml), | ||
| 2019 | ("tr", self.startTagTr), | ||
| 2020 | (("td", "th"), self.startTagTableCell), | ||
| 2021 | (("caption", "col", "colgroup", "tbody", "tfoot", "thead"), | ||
| 2022 | self.startTagTableOther) | ||
| 2023 | ]) | ||
| 2024 | self.startTagHandler.default = self.startTagOther | ||
| 2025 | |||
| 2026 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2027 | (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), | ||
| 2028 | ("table", self.endTagTable), | ||
| 2029 | (("body", "caption", "col", "colgroup", "html", "td", "th", | ||
| 2030 | "tr"), self.endTagIgnore) | ||
| 2031 | ]) | ||
| 2032 | self.endTagHandler.default = self.endTagOther | ||
| 2033 | |||
| 2034 | # helper methods | ||
| 2035 | def clearStackToTableBodyContext(self): | ||
| 2036 | while self.tree.openElements[-1].name not in ("tbody", "tfoot", | ||
| 2037 | "thead", "html"): | ||
| 2038 | # self.parser.parseError("unexpected-implied-end-tag-in-table", | ||
| 2039 | # {"name": self.tree.openElements[-1].name}) | ||
| 2040 | self.tree.openElements.pop() | ||
| 2041 | if self.tree.openElements[-1].name == "html": | ||
| 2042 | assert self.parser.innerHTML | ||
| 2043 | |||
| 2044 | # the rest | ||
| 2045 | def processEOF(self): | ||
| 2046 | self.parser.phases["inTable"].processEOF() | ||
| 2047 | |||
| 2048 | def processSpaceCharacters(self, token): | ||
| 2049 | return self.parser.phases["inTable"].processSpaceCharacters(token) | ||
| 2050 | |||
| 2051 | def processCharacters(self, token): | ||
| 2052 | return self.parser.phases["inTable"].processCharacters(token) | ||
| 2053 | |||
| 2054 | def startTagTr(self, token): | ||
| 2055 | self.clearStackToTableBodyContext() | ||
| 2056 | self.tree.insertElement(token) | ||
| 2057 | self.parser.phase = self.parser.phases["inRow"] | ||
| 2058 | |||
| 2059 | def startTagTableCell(self, token): | ||
| 2060 | self.parser.parseError("unexpected-cell-in-table-body", | ||
| 2061 | {"name": token["name"]}) | ||
| 2062 | self.startTagTr(impliedTagToken("tr", "StartTag")) | ||
| 2063 | return token | ||
| 2064 | |||
| 2065 | def startTagTableOther(self, token): | ||
| 2066 | # XXX AT Any ideas on how to share this with endTagTable? | ||
| 2067 | if (self.tree.elementInScope("tbody", variant="table") or | ||
| 2068 | self.tree.elementInScope("thead", variant="table") or | ||
| 2069 | self.tree.elementInScope("tfoot", variant="table")): | ||
| 2070 | self.clearStackToTableBodyContext() | ||
| 2071 | self.endTagTableRowGroup( | ||
| 2072 | impliedTagToken(self.tree.openElements[-1].name)) | ||
| 2073 | return token | ||
| 2074 | else: | ||
| 2075 | # innerHTML case | ||
| 2076 | assert self.parser.innerHTML | ||
| 2077 | self.parser.parseError() | ||
| 2078 | |||
| 2079 | def startTagOther(self, token): | ||
| 2080 | return self.parser.phases["inTable"].processStartTag(token) | ||
| 2081 | |||
| 2082 | def endTagTableRowGroup(self, token): | ||
| 2083 | if self.tree.elementInScope(token["name"], variant="table"): | ||
| 2084 | self.clearStackToTableBodyContext() | ||
| 2085 | self.tree.openElements.pop() | ||
| 2086 | self.parser.phase = self.parser.phases["inTable"] | ||
| 2087 | else: | ||
| 2088 | self.parser.parseError("unexpected-end-tag-in-table-body", | ||
| 2089 | {"name": token["name"]}) | ||
| 2090 | |||
| 2091 | def endTagTable(self, token): | ||
| 2092 | if (self.tree.elementInScope("tbody", variant="table") or | ||
| 2093 | self.tree.elementInScope("thead", variant="table") or | ||
| 2094 | self.tree.elementInScope("tfoot", variant="table")): | ||
| 2095 | self.clearStackToTableBodyContext() | ||
| 2096 | self.endTagTableRowGroup( | ||
| 2097 | impliedTagToken(self.tree.openElements[-1].name)) | ||
| 2098 | return token | ||
| 2099 | else: | ||
| 2100 | # innerHTML case | ||
| 2101 | assert self.parser.innerHTML | ||
| 2102 | self.parser.parseError() | ||
| 2103 | |||
| 2104 | def endTagIgnore(self, token): | ||
| 2105 | self.parser.parseError("unexpected-end-tag-in-table-body", | ||
| 2106 | {"name": token["name"]}) | ||
| 2107 | |||
| 2108 | def endTagOther(self, token): | ||
| 2109 | return self.parser.phases["inTable"].processEndTag(token) | ||
| 2110 | |||
| 2111 | class InRowPhase(Phase): | ||
| 2112 | # http://www.whatwg.org/specs/web-apps/current-work/#in-row | ||
| 2113 | def __init__(self, parser, tree): | ||
| 2114 | Phase.__init__(self, parser, tree) | ||
| 2115 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2116 | ("html", self.startTagHtml), | ||
| 2117 | (("td", "th"), self.startTagTableCell), | ||
| 2118 | (("caption", "col", "colgroup", "tbody", "tfoot", "thead", | ||
| 2119 | "tr"), self.startTagTableOther) | ||
| 2120 | ]) | ||
| 2121 | self.startTagHandler.default = self.startTagOther | ||
| 2122 | |||
| 2123 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2124 | ("tr", self.endTagTr), | ||
| 2125 | ("table", self.endTagTable), | ||
| 2126 | (("tbody", "tfoot", "thead"), self.endTagTableRowGroup), | ||
| 2127 | (("body", "caption", "col", "colgroup", "html", "td", "th"), | ||
| 2128 | self.endTagIgnore) | ||
| 2129 | ]) | ||
| 2130 | self.endTagHandler.default = self.endTagOther | ||
| 2131 | |||
| 2132 | # helper methods (XXX unify this with other table helper methods) | ||
| 2133 | def clearStackToTableRowContext(self): | ||
| 2134 | while self.tree.openElements[-1].name not in ("tr", "html"): | ||
| 2135 | self.parser.parseError("unexpected-implied-end-tag-in-table-row", | ||
| 2136 | {"name": self.tree.openElements[-1].name}) | ||
| 2137 | self.tree.openElements.pop() | ||
| 2138 | |||
| 2139 | def ignoreEndTagTr(self): | ||
| 2140 | return not self.tree.elementInScope("tr", variant="table") | ||
| 2141 | |||
| 2142 | # the rest | ||
| 2143 | def processEOF(self): | ||
| 2144 | self.parser.phases["inTable"].processEOF() | ||
| 2145 | |||
| 2146 | def processSpaceCharacters(self, token): | ||
| 2147 | return self.parser.phases["inTable"].processSpaceCharacters(token) | ||
| 2148 | |||
| 2149 | def processCharacters(self, token): | ||
| 2150 | return self.parser.phases["inTable"].processCharacters(token) | ||
| 2151 | |||
| 2152 | def startTagTableCell(self, token): | ||
| 2153 | self.clearStackToTableRowContext() | ||
| 2154 | self.tree.insertElement(token) | ||
| 2155 | self.parser.phase = self.parser.phases["inCell"] | ||
| 2156 | self.tree.activeFormattingElements.append(Marker) | ||
| 2157 | |||
| 2158 | def startTagTableOther(self, token): | ||
| 2159 | ignoreEndTag = self.ignoreEndTagTr() | ||
| 2160 | self.endTagTr(impliedTagToken("tr")) | ||
| 2161 | # XXX how are we sure it's always ignored in the innerHTML case? | ||
| 2162 | if not ignoreEndTag: | ||
| 2163 | return token | ||
| 2164 | |||
| 2165 | def startTagOther(self, token): | ||
| 2166 | return self.parser.phases["inTable"].processStartTag(token) | ||
| 2167 | |||
| 2168 | def endTagTr(self, token): | ||
| 2169 | if not self.ignoreEndTagTr(): | ||
| 2170 | self.clearStackToTableRowContext() | ||
| 2171 | self.tree.openElements.pop() | ||
| 2172 | self.parser.phase = self.parser.phases["inTableBody"] | ||
| 2173 | else: | ||
| 2174 | # innerHTML case | ||
| 2175 | assert self.parser.innerHTML | ||
| 2176 | self.parser.parseError() | ||
| 2177 | |||
| 2178 | def endTagTable(self, token): | ||
| 2179 | ignoreEndTag = self.ignoreEndTagTr() | ||
| 2180 | self.endTagTr(impliedTagToken("tr")) | ||
| 2181 | # Reprocess the current tag if the tr end tag was not ignored | ||
| 2182 | # XXX how are we sure it's always ignored in the innerHTML case? | ||
| 2183 | if not ignoreEndTag: | ||
| 2184 | return token | ||
| 2185 | |||
| 2186 | def endTagTableRowGroup(self, token): | ||
| 2187 | if self.tree.elementInScope(token["name"], variant="table"): | ||
| 2188 | self.endTagTr(impliedTagToken("tr")) | ||
| 2189 | return token | ||
| 2190 | else: | ||
| 2191 | self.parser.parseError() | ||
| 2192 | |||
| 2193 | def endTagIgnore(self, token): | ||
| 2194 | self.parser.parseError("unexpected-end-tag-in-table-row", | ||
| 2195 | {"name": token["name"]}) | ||
| 2196 | |||
| 2197 | def endTagOther(self, token): | ||
| 2198 | return self.parser.phases["inTable"].processEndTag(token) | ||
| 2199 | |||
| 2200 | class InCellPhase(Phase): | ||
| 2201 | # http://www.whatwg.org/specs/web-apps/current-work/#in-cell | ||
| 2202 | def __init__(self, parser, tree): | ||
| 2203 | Phase.__init__(self, parser, tree) | ||
| 2204 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2205 | ("html", self.startTagHtml), | ||
| 2206 | (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th", | ||
| 2207 | "thead", "tr"), self.startTagTableOther) | ||
| 2208 | ]) | ||
| 2209 | self.startTagHandler.default = self.startTagOther | ||
| 2210 | |||
| 2211 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2212 | (("td", "th"), self.endTagTableCell), | ||
| 2213 | (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore), | ||
| 2214 | (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply) | ||
| 2215 | ]) | ||
| 2216 | self.endTagHandler.default = self.endTagOther | ||
| 2217 | |||
| 2218 | # helper | ||
| 2219 | def closeCell(self): | ||
| 2220 | if self.tree.elementInScope("td", variant="table"): | ||
| 2221 | self.endTagTableCell(impliedTagToken("td")) | ||
| 2222 | elif self.tree.elementInScope("th", variant="table"): | ||
| 2223 | self.endTagTableCell(impliedTagToken("th")) | ||
| 2224 | |||
| 2225 | # the rest | ||
| 2226 | def processEOF(self): | ||
| 2227 | self.parser.phases["inBody"].processEOF() | ||
| 2228 | |||
| 2229 | def processCharacters(self, token): | ||
| 2230 | return self.parser.phases["inBody"].processCharacters(token) | ||
| 2231 | |||
| 2232 | def startTagTableOther(self, token): | ||
| 2233 | if (self.tree.elementInScope("td", variant="table") or | ||
| 2234 | self.tree.elementInScope("th", variant="table")): | ||
| 2235 | self.closeCell() | ||
| 2236 | return token | ||
| 2237 | else: | ||
| 2238 | # innerHTML case | ||
| 2239 | assert self.parser.innerHTML | ||
| 2240 | self.parser.parseError() | ||
| 2241 | |||
| 2242 | def startTagOther(self, token): | ||
| 2243 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 2244 | |||
| 2245 | def endTagTableCell(self, token): | ||
| 2246 | if self.tree.elementInScope(token["name"], variant="table"): | ||
| 2247 | self.tree.generateImpliedEndTags(token["name"]) | ||
| 2248 | if self.tree.openElements[-1].name != token["name"]: | ||
| 2249 | self.parser.parseError("unexpected-cell-end-tag", | ||
| 2250 | {"name": token["name"]}) | ||
| 2251 | while True: | ||
| 2252 | node = self.tree.openElements.pop() | ||
| 2253 | if node.name == token["name"]: | ||
| 2254 | break | ||
| 2255 | else: | ||
| 2256 | self.tree.openElements.pop() | ||
| 2257 | self.tree.clearActiveFormattingElements() | ||
| 2258 | self.parser.phase = self.parser.phases["inRow"] | ||
| 2259 | else: | ||
| 2260 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 2261 | |||
| 2262 | def endTagIgnore(self, token): | ||
| 2263 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 2264 | |||
| 2265 | def endTagImply(self, token): | ||
| 2266 | if self.tree.elementInScope(token["name"], variant="table"): | ||
| 2267 | self.closeCell() | ||
| 2268 | return token | ||
| 2269 | else: | ||
| 2270 | # sometimes innerHTML case | ||
| 2271 | self.parser.parseError() | ||
| 2272 | |||
| 2273 | def endTagOther(self, token): | ||
| 2274 | return self.parser.phases["inBody"].processEndTag(token) | ||
| 2275 | |||
| 2276 | class InSelectPhase(Phase): | ||
| 2277 | def __init__(self, parser, tree): | ||
| 2278 | Phase.__init__(self, parser, tree) | ||
| 2279 | |||
| 2280 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2281 | ("html", self.startTagHtml), | ||
| 2282 | ("option", self.startTagOption), | ||
| 2283 | ("optgroup", self.startTagOptgroup), | ||
| 2284 | ("select", self.startTagSelect), | ||
| 2285 | (("input", "keygen", "textarea"), self.startTagInput), | ||
| 2286 | ("script", self.startTagScript) | ||
| 2287 | ]) | ||
| 2288 | self.startTagHandler.default = self.startTagOther | ||
| 2289 | |||
| 2290 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2291 | ("option", self.endTagOption), | ||
| 2292 | ("optgroup", self.endTagOptgroup), | ||
| 2293 | ("select", self.endTagSelect) | ||
| 2294 | ]) | ||
| 2295 | self.endTagHandler.default = self.endTagOther | ||
| 2296 | |||
| 2297 | # http://www.whatwg.org/specs/web-apps/current-work/#in-select | ||
| 2298 | def processEOF(self): | ||
| 2299 | if self.tree.openElements[-1].name != "html": | ||
| 2300 | self.parser.parseError("eof-in-select") | ||
| 2301 | else: | ||
| 2302 | assert self.parser.innerHTML | ||
| 2303 | |||
| 2304 | def processCharacters(self, token): | ||
| 2305 | if token["data"] == "\u0000": | ||
| 2306 | return | ||
| 2307 | self.tree.insertText(token["data"]) | ||
| 2308 | |||
| 2309 | def startTagOption(self, token): | ||
| 2310 | # We need to imply </option> if <option> is the current node. | ||
| 2311 | if self.tree.openElements[-1].name == "option": | ||
| 2312 | self.tree.openElements.pop() | ||
| 2313 | self.tree.insertElement(token) | ||
| 2314 | |||
| 2315 | def startTagOptgroup(self, token): | ||
| 2316 | if self.tree.openElements[-1].name == "option": | ||
| 2317 | self.tree.openElements.pop() | ||
| 2318 | if self.tree.openElements[-1].name == "optgroup": | ||
| 2319 | self.tree.openElements.pop() | ||
| 2320 | self.tree.insertElement(token) | ||
| 2321 | |||
| 2322 | def startTagSelect(self, token): | ||
| 2323 | self.parser.parseError("unexpected-select-in-select") | ||
| 2324 | self.endTagSelect(impliedTagToken("select")) | ||
| 2325 | |||
| 2326 | def startTagInput(self, token): | ||
| 2327 | self.parser.parseError("unexpected-input-in-select") | ||
| 2328 | if self.tree.elementInScope("select", variant="select"): | ||
| 2329 | self.endTagSelect(impliedTagToken("select")) | ||
| 2330 | return token | ||
| 2331 | else: | ||
| 2332 | assert self.parser.innerHTML | ||
| 2333 | |||
| 2334 | def startTagScript(self, token): | ||
| 2335 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 2336 | |||
| 2337 | def startTagOther(self, token): | ||
| 2338 | self.parser.parseError("unexpected-start-tag-in-select", | ||
| 2339 | {"name": token["name"]}) | ||
| 2340 | |||
| 2341 | def endTagOption(self, token): | ||
| 2342 | if self.tree.openElements[-1].name == "option": | ||
| 2343 | self.tree.openElements.pop() | ||
| 2344 | else: | ||
| 2345 | self.parser.parseError("unexpected-end-tag-in-select", | ||
| 2346 | {"name": "option"}) | ||
| 2347 | |||
| 2348 | def endTagOptgroup(self, token): | ||
| 2349 | # </optgroup> implicitly closes <option> | ||
| 2350 | if (self.tree.openElements[-1].name == "option" and | ||
| 2351 | self.tree.openElements[-2].name == "optgroup"): | ||
| 2352 | self.tree.openElements.pop() | ||
| 2353 | # It also closes </optgroup> | ||
| 2354 | if self.tree.openElements[-1].name == "optgroup": | ||
| 2355 | self.tree.openElements.pop() | ||
| 2356 | # But nothing else | ||
| 2357 | else: | ||
| 2358 | self.parser.parseError("unexpected-end-tag-in-select", | ||
| 2359 | {"name": "optgroup"}) | ||
| 2360 | |||
| 2361 | def endTagSelect(self, token): | ||
| 2362 | if self.tree.elementInScope("select", variant="select"): | ||
| 2363 | node = self.tree.openElements.pop() | ||
| 2364 | while node.name != "select": | ||
| 2365 | node = self.tree.openElements.pop() | ||
| 2366 | self.parser.resetInsertionMode() | ||
| 2367 | else: | ||
| 2368 | # innerHTML case | ||
| 2369 | assert self.parser.innerHTML | ||
| 2370 | self.parser.parseError() | ||
| 2371 | |||
| 2372 | def endTagOther(self, token): | ||
| 2373 | self.parser.parseError("unexpected-end-tag-in-select", | ||
| 2374 | {"name": token["name"]}) | ||
| 2375 | |||
| 2376 | class InSelectInTablePhase(Phase): | ||
| 2377 | def __init__(self, parser, tree): | ||
| 2378 | Phase.__init__(self, parser, tree) | ||
| 2379 | |||
| 2380 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2381 | (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), | ||
| 2382 | self.startTagTable) | ||
| 2383 | ]) | ||
| 2384 | self.startTagHandler.default = self.startTagOther | ||
| 2385 | |||
| 2386 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2387 | (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"), | ||
| 2388 | self.endTagTable) | ||
| 2389 | ]) | ||
| 2390 | self.endTagHandler.default = self.endTagOther | ||
| 2391 | |||
| 2392 | def processEOF(self): | ||
| 2393 | self.parser.phases["inSelect"].processEOF() | ||
| 2394 | |||
| 2395 | def processCharacters(self, token): | ||
| 2396 | return self.parser.phases["inSelect"].processCharacters(token) | ||
| 2397 | |||
| 2398 | def startTagTable(self, token): | ||
| 2399 | self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]}) | ||
| 2400 | self.endTagOther(impliedTagToken("select")) | ||
| 2401 | return token | ||
| 2402 | |||
| 2403 | def startTagOther(self, token): | ||
| 2404 | return self.parser.phases["inSelect"].processStartTag(token) | ||
| 2405 | |||
| 2406 | def endTagTable(self, token): | ||
| 2407 | self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]}) | ||
| 2408 | if self.tree.elementInScope(token["name"], variant="table"): | ||
| 2409 | self.endTagOther(impliedTagToken("select")) | ||
| 2410 | return token | ||
| 2411 | |||
| 2412 | def endTagOther(self, token): | ||
| 2413 | return self.parser.phases["inSelect"].processEndTag(token) | ||
| 2414 | |||
| 2415 | class InForeignContentPhase(Phase): | ||
| 2416 | breakoutElements = frozenset(["b", "big", "blockquote", "body", "br", | ||
| 2417 | "center", "code", "dd", "div", "dl", "dt", | ||
| 2418 | "em", "embed", "h1", "h2", "h3", | ||
| 2419 | "h4", "h5", "h6", "head", "hr", "i", "img", | ||
| 2420 | "li", "listing", "menu", "meta", "nobr", | ||
| 2421 | "ol", "p", "pre", "ruby", "s", "small", | ||
| 2422 | "span", "strong", "strike", "sub", "sup", | ||
| 2423 | "table", "tt", "u", "ul", "var"]) | ||
| 2424 | |||
| 2425 | def __init__(self, parser, tree): | ||
| 2426 | Phase.__init__(self, parser, tree) | ||
| 2427 | |||
| 2428 | def adjustSVGTagNames(self, token): | ||
| 2429 | replacements = {"altglyph": "altGlyph", | ||
| 2430 | "altglyphdef": "altGlyphDef", | ||
| 2431 | "altglyphitem": "altGlyphItem", | ||
| 2432 | "animatecolor": "animateColor", | ||
| 2433 | "animatemotion": "animateMotion", | ||
| 2434 | "animatetransform": "animateTransform", | ||
| 2435 | "clippath": "clipPath", | ||
| 2436 | "feblend": "feBlend", | ||
| 2437 | "fecolormatrix": "feColorMatrix", | ||
| 2438 | "fecomponenttransfer": "feComponentTransfer", | ||
| 2439 | "fecomposite": "feComposite", | ||
| 2440 | "feconvolvematrix": "feConvolveMatrix", | ||
| 2441 | "fediffuselighting": "feDiffuseLighting", | ||
| 2442 | "fedisplacementmap": "feDisplacementMap", | ||
| 2443 | "fedistantlight": "feDistantLight", | ||
| 2444 | "feflood": "feFlood", | ||
| 2445 | "fefunca": "feFuncA", | ||
| 2446 | "fefuncb": "feFuncB", | ||
| 2447 | "fefuncg": "feFuncG", | ||
| 2448 | "fefuncr": "feFuncR", | ||
| 2449 | "fegaussianblur": "feGaussianBlur", | ||
| 2450 | "feimage": "feImage", | ||
| 2451 | "femerge": "feMerge", | ||
| 2452 | "femergenode": "feMergeNode", | ||
| 2453 | "femorphology": "feMorphology", | ||
| 2454 | "feoffset": "feOffset", | ||
| 2455 | "fepointlight": "fePointLight", | ||
| 2456 | "fespecularlighting": "feSpecularLighting", | ||
| 2457 | "fespotlight": "feSpotLight", | ||
| 2458 | "fetile": "feTile", | ||
| 2459 | "feturbulence": "feTurbulence", | ||
| 2460 | "foreignobject": "foreignObject", | ||
| 2461 | "glyphref": "glyphRef", | ||
| 2462 | "lineargradient": "linearGradient", | ||
| 2463 | "radialgradient": "radialGradient", | ||
| 2464 | "textpath": "textPath"} | ||
| 2465 | |||
| 2466 | if token["name"] in replacements: | ||
| 2467 | token["name"] = replacements[token["name"]] | ||
| 2468 | |||
| 2469 | def processCharacters(self, token): | ||
| 2470 | if token["data"] == "\u0000": | ||
| 2471 | token["data"] = "\uFFFD" | ||
| 2472 | elif (self.parser.framesetOK and | ||
| 2473 | any(char not in spaceCharacters for char in token["data"])): | ||
| 2474 | self.parser.framesetOK = False | ||
| 2475 | Phase.processCharacters(self, token) | ||
| 2476 | |||
| 2477 | def processStartTag(self, token): | ||
| 2478 | currentNode = self.tree.openElements[-1] | ||
| 2479 | if (token["name"] in self.breakoutElements or | ||
| 2480 | (token["name"] == "font" and | ||
| 2481 | set(token["data"].keys()) & set(["color", "face", "size"]))): | ||
| 2482 | self.parser.parseError("unexpected-html-element-in-foreign-content", | ||
| 2483 | {"name": token["name"]}) | ||
| 2484 | while (self.tree.openElements[-1].namespace != | ||
| 2485 | self.tree.defaultNamespace and | ||
| 2486 | not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and | ||
| 2487 | not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])): | ||
| 2488 | self.tree.openElements.pop() | ||
| 2489 | return token | ||
| 2490 | |||
| 2491 | else: | ||
| 2492 | if currentNode.namespace == namespaces["mathml"]: | ||
| 2493 | self.parser.adjustMathMLAttributes(token) | ||
| 2494 | elif currentNode.namespace == namespaces["svg"]: | ||
| 2495 | self.adjustSVGTagNames(token) | ||
| 2496 | self.parser.adjustSVGAttributes(token) | ||
| 2497 | self.parser.adjustForeignAttributes(token) | ||
| 2498 | token["namespace"] = currentNode.namespace | ||
| 2499 | self.tree.insertElement(token) | ||
| 2500 | if token["selfClosing"]: | ||
| 2501 | self.tree.openElements.pop() | ||
| 2502 | token["selfClosingAcknowledged"] = True | ||
| 2503 | |||
| 2504 | def processEndTag(self, token): | ||
| 2505 | nodeIndex = len(self.tree.openElements) - 1 | ||
| 2506 | node = self.tree.openElements[-1] | ||
| 2507 | if node.name.translate(asciiUpper2Lower) != token["name"]: | ||
| 2508 | self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) | ||
| 2509 | |||
| 2510 | while True: | ||
| 2511 | if node.name.translate(asciiUpper2Lower) == token["name"]: | ||
| 2512 | # XXX this isn't in the spec but it seems necessary | ||
| 2513 | if self.parser.phase == self.parser.phases["inTableText"]: | ||
| 2514 | self.parser.phase.flushCharacters() | ||
| 2515 | self.parser.phase = self.parser.phase.originalPhase | ||
| 2516 | while self.tree.openElements.pop() != node: | ||
| 2517 | assert self.tree.openElements | ||
| 2518 | new_token = None | ||
| 2519 | break | ||
| 2520 | nodeIndex -= 1 | ||
| 2521 | |||
| 2522 | node = self.tree.openElements[nodeIndex] | ||
| 2523 | if node.namespace != self.tree.defaultNamespace: | ||
| 2524 | continue | ||
| 2525 | else: | ||
| 2526 | new_token = self.parser.phase.processEndTag(token) | ||
| 2527 | break | ||
| 2528 | return new_token | ||
| 2529 | |||
| 2530 | class AfterBodyPhase(Phase): | ||
| 2531 | def __init__(self, parser, tree): | ||
| 2532 | Phase.__init__(self, parser, tree) | ||
| 2533 | |||
| 2534 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2535 | ("html", self.startTagHtml) | ||
| 2536 | ]) | ||
| 2537 | self.startTagHandler.default = self.startTagOther | ||
| 2538 | |||
| 2539 | self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)]) | ||
| 2540 | self.endTagHandler.default = self.endTagOther | ||
| 2541 | |||
| 2542 | def processEOF(self): | ||
| 2543 | # Stop parsing | ||
| 2544 | pass | ||
| 2545 | |||
| 2546 | def processComment(self, token): | ||
| 2547 | # This is needed because data is to be appended to the <html> element | ||
| 2548 | # here and not to whatever is currently open. | ||
| 2549 | self.tree.insertComment(token, self.tree.openElements[0]) | ||
| 2550 | |||
| 2551 | def processCharacters(self, token): | ||
| 2552 | self.parser.parseError("unexpected-char-after-body") | ||
| 2553 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2554 | return token | ||
| 2555 | |||
| 2556 | def startTagHtml(self, token): | ||
| 2557 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 2558 | |||
| 2559 | def startTagOther(self, token): | ||
| 2560 | self.parser.parseError("unexpected-start-tag-after-body", | ||
| 2561 | {"name": token["name"]}) | ||
| 2562 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2563 | return token | ||
| 2564 | |||
| 2565 | def endTagHtml(self, name): | ||
| 2566 | if self.parser.innerHTML: | ||
| 2567 | self.parser.parseError("unexpected-end-tag-after-body-innerhtml") | ||
| 2568 | else: | ||
| 2569 | self.parser.phase = self.parser.phases["afterAfterBody"] | ||
| 2570 | |||
| 2571 | def endTagOther(self, token): | ||
| 2572 | self.parser.parseError("unexpected-end-tag-after-body", | ||
| 2573 | {"name": token["name"]}) | ||
| 2574 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2575 | return token | ||
| 2576 | |||
| 2577 | class InFramesetPhase(Phase): | ||
| 2578 | # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset | ||
| 2579 | def __init__(self, parser, tree): | ||
| 2580 | Phase.__init__(self, parser, tree) | ||
| 2581 | |||
| 2582 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2583 | ("html", self.startTagHtml), | ||
| 2584 | ("frameset", self.startTagFrameset), | ||
| 2585 | ("frame", self.startTagFrame), | ||
| 2586 | ("noframes", self.startTagNoframes) | ||
| 2587 | ]) | ||
| 2588 | self.startTagHandler.default = self.startTagOther | ||
| 2589 | |||
| 2590 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2591 | ("frameset", self.endTagFrameset) | ||
| 2592 | ]) | ||
| 2593 | self.endTagHandler.default = self.endTagOther | ||
| 2594 | |||
| 2595 | def processEOF(self): | ||
| 2596 | if self.tree.openElements[-1].name != "html": | ||
| 2597 | self.parser.parseError("eof-in-frameset") | ||
| 2598 | else: | ||
| 2599 | assert self.parser.innerHTML | ||
| 2600 | |||
| 2601 | def processCharacters(self, token): | ||
| 2602 | self.parser.parseError("unexpected-char-in-frameset") | ||
| 2603 | |||
| 2604 | def startTagFrameset(self, token): | ||
| 2605 | self.tree.insertElement(token) | ||
| 2606 | |||
| 2607 | def startTagFrame(self, token): | ||
| 2608 | self.tree.insertElement(token) | ||
| 2609 | self.tree.openElements.pop() | ||
| 2610 | |||
| 2611 | def startTagNoframes(self, token): | ||
| 2612 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 2613 | |||
| 2614 | def startTagOther(self, token): | ||
| 2615 | self.parser.parseError("unexpected-start-tag-in-frameset", | ||
| 2616 | {"name": token["name"]}) | ||
| 2617 | |||
| 2618 | def endTagFrameset(self, token): | ||
| 2619 | if self.tree.openElements[-1].name == "html": | ||
| 2620 | # innerHTML case | ||
| 2621 | self.parser.parseError("unexpected-frameset-in-frameset-innerhtml") | ||
| 2622 | else: | ||
| 2623 | self.tree.openElements.pop() | ||
| 2624 | if (not self.parser.innerHTML and | ||
| 2625 | self.tree.openElements[-1].name != "frameset"): | ||
| 2626 | # If we're not in innerHTML mode and the current node is not a | ||
| 2627 | # "frameset" element (anymore) then switch. | ||
| 2628 | self.parser.phase = self.parser.phases["afterFrameset"] | ||
| 2629 | |||
| 2630 | def endTagOther(self, token): | ||
| 2631 | self.parser.parseError("unexpected-end-tag-in-frameset", | ||
| 2632 | {"name": token["name"]}) | ||
| 2633 | |||
| 2634 | class AfterFramesetPhase(Phase): | ||
| 2635 | # http://www.whatwg.org/specs/web-apps/current-work/#after3 | ||
| 2636 | def __init__(self, parser, tree): | ||
| 2637 | Phase.__init__(self, parser, tree) | ||
| 2638 | |||
| 2639 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2640 | ("html", self.startTagHtml), | ||
| 2641 | ("noframes", self.startTagNoframes) | ||
| 2642 | ]) | ||
| 2643 | self.startTagHandler.default = self.startTagOther | ||
| 2644 | |||
| 2645 | self.endTagHandler = _utils.MethodDispatcher([ | ||
| 2646 | ("html", self.endTagHtml) | ||
| 2647 | ]) | ||
| 2648 | self.endTagHandler.default = self.endTagOther | ||
| 2649 | |||
| 2650 | def processEOF(self): | ||
| 2651 | # Stop parsing | ||
| 2652 | pass | ||
| 2653 | |||
| 2654 | def processCharacters(self, token): | ||
| 2655 | self.parser.parseError("unexpected-char-after-frameset") | ||
| 2656 | |||
| 2657 | def startTagNoframes(self, token): | ||
| 2658 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 2659 | |||
| 2660 | def startTagOther(self, token): | ||
| 2661 | self.parser.parseError("unexpected-start-tag-after-frameset", | ||
| 2662 | {"name": token["name"]}) | ||
| 2663 | |||
| 2664 | def endTagHtml(self, token): | ||
| 2665 | self.parser.phase = self.parser.phases["afterAfterFrameset"] | ||
| 2666 | |||
| 2667 | def endTagOther(self, token): | ||
| 2668 | self.parser.parseError("unexpected-end-tag-after-frameset", | ||
| 2669 | {"name": token["name"]}) | ||
| 2670 | |||
| 2671 | class AfterAfterBodyPhase(Phase): | ||
| 2672 | def __init__(self, parser, tree): | ||
| 2673 | Phase.__init__(self, parser, tree) | ||
| 2674 | |||
| 2675 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2676 | ("html", self.startTagHtml) | ||
| 2677 | ]) | ||
| 2678 | self.startTagHandler.default = self.startTagOther | ||
| 2679 | |||
| 2680 | def processEOF(self): | ||
| 2681 | pass | ||
| 2682 | |||
| 2683 | def processComment(self, token): | ||
| 2684 | self.tree.insertComment(token, self.tree.document) | ||
| 2685 | |||
| 2686 | def processSpaceCharacters(self, token): | ||
| 2687 | return self.parser.phases["inBody"].processSpaceCharacters(token) | ||
| 2688 | |||
| 2689 | def processCharacters(self, token): | ||
| 2690 | self.parser.parseError("expected-eof-but-got-char") | ||
| 2691 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2692 | return token | ||
| 2693 | |||
| 2694 | def startTagHtml(self, token): | ||
| 2695 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 2696 | |||
| 2697 | def startTagOther(self, token): | ||
| 2698 | self.parser.parseError("expected-eof-but-got-start-tag", | ||
| 2699 | {"name": token["name"]}) | ||
| 2700 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2701 | return token | ||
| 2702 | |||
| 2703 | def processEndTag(self, token): | ||
| 2704 | self.parser.parseError("expected-eof-but-got-end-tag", | ||
| 2705 | {"name": token["name"]}) | ||
| 2706 | self.parser.phase = self.parser.phases["inBody"] | ||
| 2707 | return token | ||
| 2708 | |||
| 2709 | class AfterAfterFramesetPhase(Phase): | ||
| 2710 | def __init__(self, parser, tree): | ||
| 2711 | Phase.__init__(self, parser, tree) | ||
| 2712 | |||
| 2713 | self.startTagHandler = _utils.MethodDispatcher([ | ||
| 2714 | ("html", self.startTagHtml), | ||
| 2715 | ("noframes", self.startTagNoFrames) | ||
| 2716 | ]) | ||
| 2717 | self.startTagHandler.default = self.startTagOther | ||
| 2718 | |||
| 2719 | def processEOF(self): | ||
| 2720 | pass | ||
| 2721 | |||
| 2722 | def processComment(self, token): | ||
| 2723 | self.tree.insertComment(token, self.tree.document) | ||
| 2724 | |||
| 2725 | def processSpaceCharacters(self, token): | ||
| 2726 | return self.parser.phases["inBody"].processSpaceCharacters(token) | ||
| 2727 | |||
| 2728 | def processCharacters(self, token): | ||
| 2729 | self.parser.parseError("expected-eof-but-got-char") | ||
| 2730 | |||
| 2731 | def startTagHtml(self, token): | ||
| 2732 | return self.parser.phases["inBody"].processStartTag(token) | ||
| 2733 | |||
| 2734 | def startTagNoFrames(self, token): | ||
| 2735 | return self.parser.phases["inHead"].processStartTag(token) | ||
| 2736 | |||
| 2737 | def startTagOther(self, token): | ||
| 2738 | self.parser.parseError("expected-eof-but-got-start-tag", | ||
| 2739 | {"name": token["name"]}) | ||
| 2740 | |||
| 2741 | def processEndTag(self, token): | ||
| 2742 | self.parser.parseError("expected-eof-but-got-end-tag", | ||
| 2743 | {"name": token["name"]}) | ||
| 2744 | # pylint:enable=unused-argument | ||
| 2745 | |||
| 2746 | return { | ||
| 2747 | "initial": InitialPhase, | ||
| 2748 | "beforeHtml": BeforeHtmlPhase, | ||
| 2749 | "beforeHead": BeforeHeadPhase, | ||
| 2750 | "inHead": InHeadPhase, | ||
| 2751 | "inHeadNoscript": InHeadNoscriptPhase, | ||
| 2752 | "afterHead": AfterHeadPhase, | ||
| 2753 | "inBody": InBodyPhase, | ||
| 2754 | "text": TextPhase, | ||
| 2755 | "inTable": InTablePhase, | ||
| 2756 | "inTableText": InTableTextPhase, | ||
| 2757 | "inCaption": InCaptionPhase, | ||
| 2758 | "inColumnGroup": InColumnGroupPhase, | ||
| 2759 | "inTableBody": InTableBodyPhase, | ||
| 2760 | "inRow": InRowPhase, | ||
| 2761 | "inCell": InCellPhase, | ||
| 2762 | "inSelect": InSelectPhase, | ||
| 2763 | "inSelectInTable": InSelectInTablePhase, | ||
| 2764 | "inForeignContent": InForeignContentPhase, | ||
| 2765 | "afterBody": AfterBodyPhase, | ||
| 2766 | "inFrameset": InFramesetPhase, | ||
| 2767 | "afterFrameset": AfterFramesetPhase, | ||
| 2768 | "afterAfterBody": AfterAfterBodyPhase, | ||
| 2769 | "afterAfterFrameset": AfterAfterFramesetPhase, | ||
| 2770 | # XXX after after frameset | ||
| 2771 | } | ||
| 2772 | |||
| 2773 | |||
| 2774 | def adjust_attributes(token, replacements): | ||
| 2775 | needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) | ||
| 2776 | if needs_adjustment: | ||
| 2777 | token['data'] = OrderedDict((replacements.get(k, k), v) | ||
| 2778 | for k, v in token['data'].items()) | ||
| 2779 | |||
| 2780 | |||
| 2781 | def impliedTagToken(name, type="EndTag", attributes=None, | ||
| 2782 | selfClosing=False): | ||
| 2783 | if attributes is None: | ||
| 2784 | attributes = {} | ||
| 2785 | return {"type": tokenTypes[type], "name": name, "data": attributes, | ||
| 2786 | "selfClosing": selfClosing} | ||
| 2787 | |||
| 2788 | |||
| 2789 | class ParseError(Exception): | ||
| 2790 | """Error in parsed document""" | ||
| 2791 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/serializer.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/serializer.py new file mode 100644 index 0000000..641323e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/serializer.py | |||
| @@ -0,0 +1,409 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | from pip._vendor.six import text_type | ||
| 3 | |||
| 4 | import re | ||
| 5 | |||
| 6 | from codecs import register_error, xmlcharrefreplace_errors | ||
| 7 | |||
| 8 | from .constants import voidElements, booleanAttributes, spaceCharacters | ||
| 9 | from .constants import rcdataElements, entities, xmlEntities | ||
| 10 | from . import treewalkers, _utils | ||
| 11 | from xml.sax.saxutils import escape | ||
| 12 | |||
| 13 | _quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`" | ||
| 14 | _quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]") | ||
| 15 | _quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars + | ||
| 16 | "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n" | ||
| 17 | "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15" | ||
| 18 | "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" | ||
| 19 | "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000" | ||
| 20 | "\u2001\u2002\u2003\u2004\u2005\u2006\u2007" | ||
| 21 | "\u2008\u2009\u200a\u2028\u2029\u202f\u205f" | ||
| 22 | "\u3000]") | ||
| 23 | |||
| 24 | |||
| 25 | _encode_entity_map = {} | ||
| 26 | _is_ucs4 = len("\U0010FFFF") == 1 | ||
| 27 | for k, v in list(entities.items()): | ||
| 28 | # skip multi-character entities | ||
| 29 | if ((_is_ucs4 and len(v) > 1) or | ||
| 30 | (not _is_ucs4 and len(v) > 2)): | ||
| 31 | continue | ||
| 32 | if v != "&": | ||
| 33 | if len(v) == 2: | ||
| 34 | v = _utils.surrogatePairToCodepoint(v) | ||
| 35 | else: | ||
| 36 | v = ord(v) | ||
| 37 | if v not in _encode_entity_map or k.islower(): | ||
| 38 | # prefer < over < and similarly for &, >, etc. | ||
| 39 | _encode_entity_map[v] = k | ||
| 40 | |||
| 41 | |||
| 42 | def htmlentityreplace_errors(exc): | ||
| 43 | if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): | ||
| 44 | res = [] | ||
| 45 | codepoints = [] | ||
| 46 | skip = False | ||
| 47 | for i, c in enumerate(exc.object[exc.start:exc.end]): | ||
| 48 | if skip: | ||
| 49 | skip = False | ||
| 50 | continue | ||
| 51 | index = i + exc.start | ||
| 52 | if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]): | ||
| 53 | codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2]) | ||
| 54 | skip = True | ||
| 55 | else: | ||
| 56 | codepoint = ord(c) | ||
| 57 | codepoints.append(codepoint) | ||
| 58 | for cp in codepoints: | ||
| 59 | e = _encode_entity_map.get(cp) | ||
| 60 | if e: | ||
| 61 | res.append("&") | ||
| 62 | res.append(e) | ||
| 63 | if not e.endswith(";"): | ||
| 64 | res.append(";") | ||
| 65 | else: | ||
| 66 | res.append("&#x%s;" % (hex(cp)[2:])) | ||
| 67 | return ("".join(res), exc.end) | ||
| 68 | else: | ||
| 69 | return xmlcharrefreplace_errors(exc) | ||
| 70 | |||
| 71 | |||
| 72 | register_error("htmlentityreplace", htmlentityreplace_errors) | ||
| 73 | |||
| 74 | |||
| 75 | def serialize(input, tree="etree", encoding=None, **serializer_opts): | ||
| 76 | """Serializes the input token stream using the specified treewalker | ||
| 77 | |||
| 78 | :arg input: the token stream to serialize | ||
| 79 | |||
| 80 | :arg tree: the treewalker to use | ||
| 81 | |||
| 82 | :arg encoding: the encoding to use | ||
| 83 | |||
| 84 | :arg serializer_opts: any options to pass to the | ||
| 85 | :py:class:`html5lib.serializer.HTMLSerializer` that gets created | ||
| 86 | |||
| 87 | :returns: the tree serialized as a string | ||
| 88 | |||
| 89 | Example: | ||
| 90 | |||
| 91 | >>> from html5lib.html5parser import parse | ||
| 92 | >>> from html5lib.serializer import serialize | ||
| 93 | >>> token_stream = parse('<html><body><p>Hi!</p></body></html>') | ||
| 94 | >>> serialize(token_stream, omit_optional_tags=False) | ||
| 95 | '<html><head></head><body><p>Hi!</p></body></html>' | ||
| 96 | |||
| 97 | """ | ||
| 98 | # XXX: Should we cache this? | ||
| 99 | walker = treewalkers.getTreeWalker(tree) | ||
| 100 | s = HTMLSerializer(**serializer_opts) | ||
| 101 | return s.render(walker(input), encoding) | ||
| 102 | |||
| 103 | |||
| 104 | class HTMLSerializer(object): | ||
| 105 | |||
| 106 | # attribute quoting options | ||
| 107 | quote_attr_values = "legacy" # be secure by default | ||
| 108 | quote_char = '"' | ||
| 109 | use_best_quote_char = True | ||
| 110 | |||
| 111 | # tag syntax options | ||
| 112 | omit_optional_tags = True | ||
| 113 | minimize_boolean_attributes = True | ||
| 114 | use_trailing_solidus = False | ||
| 115 | space_before_trailing_solidus = True | ||
| 116 | |||
| 117 | # escaping options | ||
| 118 | escape_lt_in_attrs = False | ||
| 119 | escape_rcdata = False | ||
| 120 | resolve_entities = True | ||
| 121 | |||
| 122 | # miscellaneous options | ||
| 123 | alphabetical_attributes = False | ||
| 124 | inject_meta_charset = True | ||
| 125 | strip_whitespace = False | ||
| 126 | sanitize = False | ||
| 127 | |||
| 128 | options = ("quote_attr_values", "quote_char", "use_best_quote_char", | ||
| 129 | "omit_optional_tags", "minimize_boolean_attributes", | ||
| 130 | "use_trailing_solidus", "space_before_trailing_solidus", | ||
| 131 | "escape_lt_in_attrs", "escape_rcdata", "resolve_entities", | ||
| 132 | "alphabetical_attributes", "inject_meta_charset", | ||
| 133 | "strip_whitespace", "sanitize") | ||
| 134 | |||
| 135 | def __init__(self, **kwargs): | ||
| 136 | """Initialize HTMLSerializer | ||
| 137 | |||
| 138 | :arg inject_meta_charset: Whether or not to inject the meta charset. | ||
| 139 | |||
| 140 | Defaults to ``True``. | ||
| 141 | |||
| 142 | :arg quote_attr_values: Whether to quote attribute values that don't | ||
| 143 | require quoting per legacy browser behavior (``"legacy"``), when | ||
| 144 | required by the standard (``"spec"``), or always (``"always"``). | ||
| 145 | |||
| 146 | Defaults to ``"legacy"``. | ||
| 147 | |||
| 148 | :arg quote_char: Use given quote character for attribute quoting. | ||
| 149 | |||
| 150 | Defaults to ``"`` which will use double quotes unless attribute | ||
| 151 | value contains a double quote, in which case single quotes are | ||
| 152 | used. | ||
| 153 | |||
| 154 | :arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute | ||
| 155 | values. | ||
| 156 | |||
| 157 | Defaults to ``False``. | ||
| 158 | |||
| 159 | :arg escape_rcdata: Whether to escape characters that need to be | ||
| 160 | escaped within normal elements within rcdata elements such as | ||
| 161 | style. | ||
| 162 | |||
| 163 | Defaults to ``False``. | ||
| 164 | |||
| 165 | :arg resolve_entities: Whether to resolve named character entities that | ||
| 166 | appear in the source tree. The XML predefined entities < > | ||
| 167 | & " ' are unaffected by this setting. | ||
| 168 | |||
| 169 | Defaults to ``True``. | ||
| 170 | |||
| 171 | :arg strip_whitespace: Whether to remove semantically meaningless | ||
| 172 | whitespace. (This compresses all whitespace to a single space | ||
| 173 | except within ``pre``.) | ||
| 174 | |||
| 175 | Defaults to ``False``. | ||
| 176 | |||
| 177 | :arg minimize_boolean_attributes: Shortens boolean attributes to give | ||
| 178 | just the attribute value, for example:: | ||
| 179 | |||
| 180 | <input disabled="disabled"> | ||
| 181 | |||
| 182 | becomes:: | ||
| 183 | |||
| 184 | <input disabled> | ||
| 185 | |||
| 186 | Defaults to ``True``. | ||
| 187 | |||
| 188 | :arg use_trailing_solidus: Includes a close-tag slash at the end of the | ||
| 189 | start tag of void elements (empty elements whose end tag is | ||
| 190 | forbidden). E.g. ``<hr/>``. | ||
| 191 | |||
| 192 | Defaults to ``False``. | ||
| 193 | |||
| 194 | :arg space_before_trailing_solidus: Places a space immediately before | ||
| 195 | the closing slash in a tag using a trailing solidus. E.g. | ||
| 196 | ``<hr />``. Requires ``use_trailing_solidus=True``. | ||
| 197 | |||
| 198 | Defaults to ``True``. | ||
| 199 | |||
| 200 | :arg sanitize: Strip all unsafe or unknown constructs from output. | ||
| 201 | See :py:class:`html5lib.filters.sanitizer.Filter`. | ||
| 202 | |||
| 203 | Defaults to ``False``. | ||
| 204 | |||
| 205 | :arg omit_optional_tags: Omit start/end tags that are optional. | ||
| 206 | |||
| 207 | Defaults to ``True``. | ||
| 208 | |||
| 209 | :arg alphabetical_attributes: Reorder attributes to be in alphabetical order. | ||
| 210 | |||
| 211 | Defaults to ``False``. | ||
| 212 | |||
| 213 | """ | ||
| 214 | unexpected_args = frozenset(kwargs) - frozenset(self.options) | ||
| 215 | if len(unexpected_args) > 0: | ||
| 216 | raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args))) | ||
| 217 | if 'quote_char' in kwargs: | ||
| 218 | self.use_best_quote_char = False | ||
| 219 | for attr in self.options: | ||
| 220 | setattr(self, attr, kwargs.get(attr, getattr(self, attr))) | ||
| 221 | self.errors = [] | ||
| 222 | self.strict = False | ||
| 223 | |||
| 224 | def encode(self, string): | ||
| 225 | assert(isinstance(string, text_type)) | ||
| 226 | if self.encoding: | ||
| 227 | return string.encode(self.encoding, "htmlentityreplace") | ||
| 228 | else: | ||
| 229 | return string | ||
| 230 | |||
| 231 | def encodeStrict(self, string): | ||
| 232 | assert(isinstance(string, text_type)) | ||
| 233 | if self.encoding: | ||
| 234 | return string.encode(self.encoding, "strict") | ||
| 235 | else: | ||
| 236 | return string | ||
| 237 | |||
| 238 | def serialize(self, treewalker, encoding=None): | ||
| 239 | # pylint:disable=too-many-nested-blocks | ||
| 240 | self.encoding = encoding | ||
| 241 | in_cdata = False | ||
| 242 | self.errors = [] | ||
| 243 | |||
| 244 | if encoding and self.inject_meta_charset: | ||
| 245 | from .filters.inject_meta_charset import Filter | ||
| 246 | treewalker = Filter(treewalker, encoding) | ||
| 247 | # Alphabetical attributes is here under the assumption that none of | ||
| 248 | # the later filters add or change order of attributes; it needs to be | ||
| 249 | # before the sanitizer so escaped elements come out correctly | ||
| 250 | if self.alphabetical_attributes: | ||
| 251 | from .filters.alphabeticalattributes import Filter | ||
| 252 | treewalker = Filter(treewalker) | ||
| 253 | # WhitespaceFilter should be used before OptionalTagFilter | ||
| 254 | # for maximum efficiently of this latter filter | ||
| 255 | if self.strip_whitespace: | ||
| 256 | from .filters.whitespace import Filter | ||
| 257 | treewalker = Filter(treewalker) | ||
| 258 | if self.sanitize: | ||
| 259 | from .filters.sanitizer import Filter | ||
| 260 | treewalker = Filter(treewalker) | ||
| 261 | if self.omit_optional_tags: | ||
| 262 | from .filters.optionaltags import Filter | ||
| 263 | treewalker = Filter(treewalker) | ||
| 264 | |||
| 265 | for token in treewalker: | ||
| 266 | type = token["type"] | ||
| 267 | if type == "Doctype": | ||
| 268 | doctype = "<!DOCTYPE %s" % token["name"] | ||
| 269 | |||
| 270 | if token["publicId"]: | ||
| 271 | doctype += ' PUBLIC "%s"' % token["publicId"] | ||
| 272 | elif token["systemId"]: | ||
| 273 | doctype += " SYSTEM" | ||
| 274 | if token["systemId"]: | ||
| 275 | if token["systemId"].find('"') >= 0: | ||
| 276 | if token["systemId"].find("'") >= 0: | ||
| 277 | self.serializeError("System identifer contains both single and double quote characters") | ||
| 278 | quote_char = "'" | ||
| 279 | else: | ||
| 280 | quote_char = '"' | ||
| 281 | doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char) | ||
| 282 | |||
| 283 | doctype += ">" | ||
| 284 | yield self.encodeStrict(doctype) | ||
| 285 | |||
| 286 | elif type in ("Characters", "SpaceCharacters"): | ||
| 287 | if type == "SpaceCharacters" or in_cdata: | ||
| 288 | if in_cdata and token["data"].find("</") >= 0: | ||
| 289 | self.serializeError("Unexpected </ in CDATA") | ||
| 290 | yield self.encode(token["data"]) | ||
| 291 | else: | ||
| 292 | yield self.encode(escape(token["data"])) | ||
| 293 | |||
| 294 | elif type in ("StartTag", "EmptyTag"): | ||
| 295 | name = token["name"] | ||
| 296 | yield self.encodeStrict("<%s" % name) | ||
| 297 | if name in rcdataElements and not self.escape_rcdata: | ||
| 298 | in_cdata = True | ||
| 299 | elif in_cdata: | ||
| 300 | self.serializeError("Unexpected child element of a CDATA element") | ||
| 301 | for (_, attr_name), attr_value in token["data"].items(): | ||
| 302 | # TODO: Add namespace support here | ||
| 303 | k = attr_name | ||
| 304 | v = attr_value | ||
| 305 | yield self.encodeStrict(' ') | ||
| 306 | |||
| 307 | yield self.encodeStrict(k) | ||
| 308 | if not self.minimize_boolean_attributes or \ | ||
| 309 | (k not in booleanAttributes.get(name, tuple()) and | ||
| 310 | k not in booleanAttributes.get("", tuple())): | ||
| 311 | yield self.encodeStrict("=") | ||
| 312 | if self.quote_attr_values == "always" or len(v) == 0: | ||
| 313 | quote_attr = True | ||
| 314 | elif self.quote_attr_values == "spec": | ||
| 315 | quote_attr = _quoteAttributeSpec.search(v) is not None | ||
| 316 | elif self.quote_attr_values == "legacy": | ||
| 317 | quote_attr = _quoteAttributeLegacy.search(v) is not None | ||
| 318 | else: | ||
| 319 | raise ValueError("quote_attr_values must be one of: " | ||
| 320 | "'always', 'spec', or 'legacy'") | ||
| 321 | v = v.replace("&", "&") | ||
| 322 | if self.escape_lt_in_attrs: | ||
| 323 | v = v.replace("<", "<") | ||
| 324 | if quote_attr: | ||
| 325 | quote_char = self.quote_char | ||
| 326 | if self.use_best_quote_char: | ||
| 327 | if "'" in v and '"' not in v: | ||
| 328 | quote_char = '"' | ||
| 329 | elif '"' in v and "'" not in v: | ||
| 330 | quote_char = "'" | ||
| 331 | if quote_char == "'": | ||
| 332 | v = v.replace("'", "'") | ||
| 333 | else: | ||
| 334 | v = v.replace('"', """) | ||
| 335 | yield self.encodeStrict(quote_char) | ||
| 336 | yield self.encode(v) | ||
| 337 | yield self.encodeStrict(quote_char) | ||
| 338 | else: | ||
| 339 | yield self.encode(v) | ||
| 340 | if name in voidElements and self.use_trailing_solidus: | ||
| 341 | if self.space_before_trailing_solidus: | ||
| 342 | yield self.encodeStrict(" /") | ||
| 343 | else: | ||
| 344 | yield self.encodeStrict("/") | ||
| 345 | yield self.encode(">") | ||
| 346 | |||
| 347 | elif type == "EndTag": | ||
| 348 | name = token["name"] | ||
| 349 | if name in rcdataElements: | ||
| 350 | in_cdata = False | ||
| 351 | elif in_cdata: | ||
| 352 | self.serializeError("Unexpected child element of a CDATA element") | ||
| 353 | yield self.encodeStrict("</%s>" % name) | ||
| 354 | |||
| 355 | elif type == "Comment": | ||
| 356 | data = token["data"] | ||
| 357 | if data.find("--") >= 0: | ||
| 358 | self.serializeError("Comment contains --") | ||
| 359 | yield self.encodeStrict("<!--%s-->" % token["data"]) | ||
| 360 | |||
| 361 | elif type == "Entity": | ||
| 362 | name = token["name"] | ||
| 363 | key = name + ";" | ||
| 364 | if key not in entities: | ||
| 365 | self.serializeError("Entity %s not recognized" % name) | ||
| 366 | if self.resolve_entities and key not in xmlEntities: | ||
| 367 | data = entities[key] | ||
| 368 | else: | ||
| 369 | data = "&%s;" % name | ||
| 370 | yield self.encodeStrict(data) | ||
| 371 | |||
| 372 | else: | ||
| 373 | self.serializeError(token["data"]) | ||
| 374 | |||
| 375 | def render(self, treewalker, encoding=None): | ||
| 376 | """Serializes the stream from the treewalker into a string | ||
| 377 | |||
| 378 | :arg treewalker: the treewalker to serialize | ||
| 379 | |||
| 380 | :arg encoding: the string encoding to use | ||
| 381 | |||
| 382 | :returns: the serialized tree | ||
| 383 | |||
| 384 | Example: | ||
| 385 | |||
| 386 | >>> from html5lib import parse, getTreeWalker | ||
| 387 | >>> from html5lib.serializer import HTMLSerializer | ||
| 388 | >>> token_stream = parse('<html><body>Hi!</body></html>') | ||
| 389 | >>> walker = getTreeWalker('etree') | ||
| 390 | >>> serializer = HTMLSerializer(omit_optional_tags=False) | ||
| 391 | >>> serializer.render(walker(token_stream)) | ||
| 392 | '<html><head></head><body>Hi!</body></html>' | ||
| 393 | |||
| 394 | """ | ||
| 395 | if encoding: | ||
| 396 | return b"".join(list(self.serialize(treewalker, encoding))) | ||
| 397 | else: | ||
| 398 | return "".join(list(self.serialize(treewalker))) | ||
| 399 | |||
| 400 | def serializeError(self, data="XXX ERROR MESSAGE NEEDED"): | ||
| 401 | # XXX The idea is to make data mandatory. | ||
| 402 | self.errors.append(data) | ||
| 403 | if self.strict: | ||
| 404 | raise SerializeError | ||
| 405 | |||
| 406 | |||
| 407 | class SerializeError(Exception): | ||
| 408 | """Error in serialized tree""" | ||
| 409 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py new file mode 100644 index 0000000..8767fb0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py | |||
| @@ -0,0 +1,30 @@ | |||
| 1 | """Tree adapters let you convert from one tree structure to another | ||
| 2 | |||
| 3 | Example: | ||
| 4 | |||
| 5 | .. code-block:: python | ||
| 6 | |||
| 7 | from pip._vendor import html5lib | ||
| 8 | from pip._vendor.html5lib.treeadapters import genshi | ||
| 9 | |||
| 10 | doc = '<html><body>Hi!</body></html>' | ||
| 11 | treebuilder = html5lib.getTreeBuilder('etree') | ||
| 12 | parser = html5lib.HTMLParser(tree=treebuilder) | ||
| 13 | tree = parser.parse(doc) | ||
| 14 | TreeWalker = html5lib.getTreeWalker('etree') | ||
| 15 | |||
| 16 | genshi_tree = genshi.to_genshi(TreeWalker(tree)) | ||
| 17 | |||
| 18 | """ | ||
| 19 | from __future__ import absolute_import, division, unicode_literals | ||
| 20 | |||
| 21 | from . import sax | ||
| 22 | |||
| 23 | __all__ = ["sax"] | ||
| 24 | |||
| 25 | try: | ||
| 26 | from . import genshi # noqa | ||
| 27 | except ImportError: | ||
| 28 | pass | ||
| 29 | else: | ||
| 30 | __all__.append("genshi") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py new file mode 100644 index 0000000..73c70c6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py | |||
| @@ -0,0 +1,54 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from genshi.core import QName, Attrs | ||
| 4 | from genshi.core import START, END, TEXT, COMMENT, DOCTYPE | ||
| 5 | |||
| 6 | |||
| 7 | def to_genshi(walker): | ||
| 8 | """Convert a tree to a genshi tree | ||
| 9 | |||
| 10 | :arg walker: the treewalker to use to walk the tree to convert it | ||
| 11 | |||
| 12 | :returns: generator of genshi nodes | ||
| 13 | |||
| 14 | """ | ||
| 15 | text = [] | ||
| 16 | for token in walker: | ||
| 17 | type = token["type"] | ||
| 18 | if type in ("Characters", "SpaceCharacters"): | ||
| 19 | text.append(token["data"]) | ||
| 20 | elif text: | ||
| 21 | yield TEXT, "".join(text), (None, -1, -1) | ||
| 22 | text = [] | ||
| 23 | |||
| 24 | if type in ("StartTag", "EmptyTag"): | ||
| 25 | if token["namespace"]: | ||
| 26 | name = "{%s}%s" % (token["namespace"], token["name"]) | ||
| 27 | else: | ||
| 28 | name = token["name"] | ||
| 29 | attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) | ||
| 30 | for attr, value in token["data"].items()]) | ||
| 31 | yield (START, (QName(name), attrs), (None, -1, -1)) | ||
| 32 | if type == "EmptyTag": | ||
| 33 | type = "EndTag" | ||
| 34 | |||
| 35 | if type == "EndTag": | ||
| 36 | if token["namespace"]: | ||
| 37 | name = "{%s}%s" % (token["namespace"], token["name"]) | ||
| 38 | else: | ||
| 39 | name = token["name"] | ||
| 40 | |||
| 41 | yield END, QName(name), (None, -1, -1) | ||
| 42 | |||
| 43 | elif type == "Comment": | ||
| 44 | yield COMMENT, token["data"], (None, -1, -1) | ||
| 45 | |||
| 46 | elif type == "Doctype": | ||
| 47 | yield DOCTYPE, (token["name"], token["publicId"], | ||
| 48 | token["systemId"]), (None, -1, -1) | ||
| 49 | |||
| 50 | else: | ||
| 51 | pass # FIXME: What to do? | ||
| 52 | |||
| 53 | if text: | ||
| 54 | yield TEXT, "".join(text), (None, -1, -1) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py new file mode 100644 index 0000000..1f06d13 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py | |||
| @@ -0,0 +1,50 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from xml.sax.xmlreader import AttributesNSImpl | ||
| 4 | |||
| 5 | from ..constants import adjustForeignAttributes, unadjustForeignAttributes | ||
| 6 | |||
| 7 | prefix_mapping = {} | ||
| 8 | for prefix, localName, namespace in adjustForeignAttributes.values(): | ||
| 9 | if prefix is not None: | ||
| 10 | prefix_mapping[prefix] = namespace | ||
| 11 | |||
| 12 | |||
| 13 | def to_sax(walker, handler): | ||
| 14 | """Call SAX-like content handler based on treewalker walker | ||
| 15 | |||
| 16 | :arg walker: the treewalker to use to walk the tree to convert it | ||
| 17 | |||
| 18 | :arg handler: SAX handler to use | ||
| 19 | |||
| 20 | """ | ||
| 21 | handler.startDocument() | ||
| 22 | for prefix, namespace in prefix_mapping.items(): | ||
| 23 | handler.startPrefixMapping(prefix, namespace) | ||
| 24 | |||
| 25 | for token in walker: | ||
| 26 | type = token["type"] | ||
| 27 | if type == "Doctype": | ||
| 28 | continue | ||
| 29 | elif type in ("StartTag", "EmptyTag"): | ||
| 30 | attrs = AttributesNSImpl(token["data"], | ||
| 31 | unadjustForeignAttributes) | ||
| 32 | handler.startElementNS((token["namespace"], token["name"]), | ||
| 33 | token["name"], | ||
| 34 | attrs) | ||
| 35 | if type == "EmptyTag": | ||
| 36 | handler.endElementNS((token["namespace"], token["name"]), | ||
| 37 | token["name"]) | ||
| 38 | elif type == "EndTag": | ||
| 39 | handler.endElementNS((token["namespace"], token["name"]), | ||
| 40 | token["name"]) | ||
| 41 | elif type in ("Characters", "SpaceCharacters"): | ||
| 42 | handler.characters(token["data"]) | ||
| 43 | elif type == "Comment": | ||
| 44 | pass | ||
| 45 | else: | ||
| 46 | assert False, "Unknown token type" | ||
| 47 | |||
| 48 | for prefix, namespace in prefix_mapping.items(): | ||
| 49 | handler.endPrefixMapping(prefix) | ||
| 50 | handler.endDocument() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/__init__.py new file mode 100644 index 0000000..2ce5c87 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/__init__.py | |||
| @@ -0,0 +1,88 @@ | |||
| 1 | """A collection of modules for building different kinds of trees from HTML | ||
| 2 | documents. | ||
| 3 | |||
| 4 | To create a treebuilder for a new type of tree, you need to do | ||
| 5 | implement several things: | ||
| 6 | |||
| 7 | 1. A set of classes for various types of elements: Document, Doctype, Comment, | ||
| 8 | Element. These must implement the interface of ``base.treebuilders.Node`` | ||
| 9 | (although comment nodes have a different signature for their constructor, | ||
| 10 | see ``treebuilders.etree.Comment``) Textual content may also be implemented | ||
| 11 | as another node type, or not, as your tree implementation requires. | ||
| 12 | |||
| 13 | 2. A treebuilder object (called ``TreeBuilder`` by convention) that inherits | ||
| 14 | from ``treebuilders.base.TreeBuilder``. This has 4 required attributes: | ||
| 15 | |||
| 16 | * ``documentClass`` - the class to use for the bottommost node of a document | ||
| 17 | * ``elementClass`` - the class to use for HTML Elements | ||
| 18 | * ``commentClass`` - the class to use for comments | ||
| 19 | * ``doctypeClass`` - the class to use for doctypes | ||
| 20 | |||
| 21 | It also has one required method: | ||
| 22 | |||
| 23 | * ``getDocument`` - Returns the root node of the complete document tree | ||
| 24 | |||
| 25 | 3. If you wish to run the unit tests, you must also create a ``testSerializer`` | ||
| 26 | method on your treebuilder which accepts a node and returns a string | ||
| 27 | containing Node and its children serialized according to the format used in | ||
| 28 | the unittests | ||
| 29 | |||
| 30 | """ | ||
| 31 | |||
| 32 | from __future__ import absolute_import, division, unicode_literals | ||
| 33 | |||
| 34 | from .._utils import default_etree | ||
| 35 | |||
| 36 | treeBuilderCache = {} | ||
| 37 | |||
| 38 | |||
| 39 | def getTreeBuilder(treeType, implementation=None, **kwargs): | ||
| 40 | """Get a TreeBuilder class for various types of trees with built-in support | ||
| 41 | |||
| 42 | :arg treeType: the name of the tree type required (case-insensitive). Supported | ||
| 43 | values are: | ||
| 44 | |||
| 45 | * "dom" - A generic builder for DOM implementations, defaulting to a | ||
| 46 | xml.dom.minidom based implementation. | ||
| 47 | * "etree" - A generic builder for tree implementations exposing an | ||
| 48 | ElementTree-like interface, defaulting to xml.etree.cElementTree if | ||
| 49 | available and xml.etree.ElementTree if not. | ||
| 50 | * "lxml" - A etree-based builder for lxml.etree, handling limitations | ||
| 51 | of lxml's implementation. | ||
| 52 | |||
| 53 | :arg implementation: (Currently applies to the "etree" and "dom" tree | ||
| 54 | types). A module implementing the tree type e.g. xml.etree.ElementTree | ||
| 55 | or xml.etree.cElementTree. | ||
| 56 | |||
| 57 | :arg kwargs: Any additional options to pass to the TreeBuilder when | ||
| 58 | creating it. | ||
| 59 | |||
| 60 | Example: | ||
| 61 | |||
| 62 | >>> from html5lib.treebuilders import getTreeBuilder | ||
| 63 | >>> builder = getTreeBuilder('etree') | ||
| 64 | |||
| 65 | """ | ||
| 66 | |||
| 67 | treeType = treeType.lower() | ||
| 68 | if treeType not in treeBuilderCache: | ||
| 69 | if treeType == "dom": | ||
| 70 | from . import dom | ||
| 71 | # Come up with a sane default (pref. from the stdlib) | ||
| 72 | if implementation is None: | ||
| 73 | from xml.dom import minidom | ||
| 74 | implementation = minidom | ||
| 75 | # NEVER cache here, caching is done in the dom submodule | ||
| 76 | return dom.getDomModule(implementation, **kwargs).TreeBuilder | ||
| 77 | elif treeType == "lxml": | ||
| 78 | from . import etree_lxml | ||
| 79 | treeBuilderCache[treeType] = etree_lxml.TreeBuilder | ||
| 80 | elif treeType == "etree": | ||
| 81 | from . import etree | ||
| 82 | if implementation is None: | ||
| 83 | implementation = default_etree | ||
| 84 | # NEVER cache here, caching is done in the etree submodule | ||
| 85 | return etree.getETreeModule(implementation, **kwargs).TreeBuilder | ||
| 86 | else: | ||
| 87 | raise ValueError("""Unrecognised treebuilder "%s" """ % treeType) | ||
| 88 | return treeBuilderCache.get(treeType) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/base.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/base.py new file mode 100644 index 0000000..ed32fcb --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/base.py | |||
| @@ -0,0 +1,417 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | from pip._vendor.six import text_type | ||
| 3 | |||
| 4 | from ..constants import scopingElements, tableInsertModeElements, namespaces | ||
| 5 | |||
| 6 | # The scope markers are inserted when entering object elements, | ||
| 7 | # marquees, table cells, and table captions, and are used to prevent formatting | ||
| 8 | # from "leaking" into tables, object elements, and marquees. | ||
| 9 | Marker = None | ||
| 10 | |||
| 11 | listElementsMap = { | ||
| 12 | None: (frozenset(scopingElements), False), | ||
| 13 | "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), | ||
| 14 | "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), | ||
| 15 | (namespaces["html"], "ul")])), False), | ||
| 16 | "table": (frozenset([(namespaces["html"], "html"), | ||
| 17 | (namespaces["html"], "table")]), False), | ||
| 18 | "select": (frozenset([(namespaces["html"], "optgroup"), | ||
| 19 | (namespaces["html"], "option")]), True) | ||
| 20 | } | ||
| 21 | |||
| 22 | |||
| 23 | class Node(object): | ||
| 24 | """Represents an item in the tree""" | ||
| 25 | def __init__(self, name): | ||
| 26 | """Creates a Node | ||
| 27 | |||
| 28 | :arg name: The tag name associated with the node | ||
| 29 | |||
| 30 | """ | ||
| 31 | # The tag name assocaited with the node | ||
| 32 | self.name = name | ||
| 33 | # The parent of the current node (or None for the document node) | ||
| 34 | self.parent = None | ||
| 35 | # The value of the current node (applies to text nodes and comments) | ||
| 36 | self.value = None | ||
| 37 | # A dict holding name -> value pairs for attributes of the node | ||
| 38 | self.attributes = {} | ||
| 39 | # A list of child nodes of the current node. This must include all | ||
| 40 | # elements but not necessarily other node types. | ||
| 41 | self.childNodes = [] | ||
| 42 | # A list of miscellaneous flags that can be set on the node. | ||
| 43 | self._flags = [] | ||
| 44 | |||
| 45 | def __str__(self): | ||
| 46 | attributesStr = " ".join(["%s=\"%s\"" % (name, value) | ||
| 47 | for name, value in | ||
| 48 | self.attributes.items()]) | ||
| 49 | if attributesStr: | ||
| 50 | return "<%s %s>" % (self.name, attributesStr) | ||
| 51 | else: | ||
| 52 | return "<%s>" % (self.name) | ||
| 53 | |||
| 54 | def __repr__(self): | ||
| 55 | return "<%s>" % (self.name) | ||
| 56 | |||
| 57 | def appendChild(self, node): | ||
| 58 | """Insert node as a child of the current node | ||
| 59 | |||
| 60 | :arg node: the node to insert | ||
| 61 | |||
| 62 | """ | ||
| 63 | raise NotImplementedError | ||
| 64 | |||
| 65 | def insertText(self, data, insertBefore=None): | ||
| 66 | """Insert data as text in the current node, positioned before the | ||
| 67 | start of node insertBefore or to the end of the node's text. | ||
| 68 | |||
| 69 | :arg data: the data to insert | ||
| 70 | |||
| 71 | :arg insertBefore: True if you want to insert the text before the node | ||
| 72 | and False if you want to insert it after the node | ||
| 73 | |||
| 74 | """ | ||
| 75 | raise NotImplementedError | ||
| 76 | |||
| 77 | def insertBefore(self, node, refNode): | ||
| 78 | """Insert node as a child of the current node, before refNode in the | ||
| 79 | list of child nodes. Raises ValueError if refNode is not a child of | ||
| 80 | the current node | ||
| 81 | |||
| 82 | :arg node: the node to insert | ||
| 83 | |||
| 84 | :arg refNode: the child node to insert the node before | ||
| 85 | |||
| 86 | """ | ||
| 87 | raise NotImplementedError | ||
| 88 | |||
| 89 | def removeChild(self, node): | ||
| 90 | """Remove node from the children of the current node | ||
| 91 | |||
| 92 | :arg node: the child node to remove | ||
| 93 | |||
| 94 | """ | ||
| 95 | raise NotImplementedError | ||
| 96 | |||
| 97 | def reparentChildren(self, newParent): | ||
| 98 | """Move all the children of the current node to newParent. | ||
| 99 | This is needed so that trees that don't store text as nodes move the | ||
| 100 | text in the correct way | ||
| 101 | |||
| 102 | :arg newParent: the node to move all this node's children to | ||
| 103 | |||
| 104 | """ | ||
| 105 | # XXX - should this method be made more general? | ||
| 106 | for child in self.childNodes: | ||
| 107 | newParent.appendChild(child) | ||
| 108 | self.childNodes = [] | ||
| 109 | |||
| 110 | def cloneNode(self): | ||
| 111 | """Return a shallow copy of the current node i.e. a node with the same | ||
| 112 | name and attributes but with no parent or child nodes | ||
| 113 | """ | ||
| 114 | raise NotImplementedError | ||
| 115 | |||
| 116 | def hasContent(self): | ||
| 117 | """Return true if the node has children or text, false otherwise | ||
| 118 | """ | ||
| 119 | raise NotImplementedError | ||
| 120 | |||
| 121 | |||
| 122 | class ActiveFormattingElements(list): | ||
| 123 | def append(self, node): | ||
| 124 | equalCount = 0 | ||
| 125 | if node != Marker: | ||
| 126 | for element in self[::-1]: | ||
| 127 | if element == Marker: | ||
| 128 | break | ||
| 129 | if self.nodesEqual(element, node): | ||
| 130 | equalCount += 1 | ||
| 131 | if equalCount == 3: | ||
| 132 | self.remove(element) | ||
| 133 | break | ||
| 134 | list.append(self, node) | ||
| 135 | |||
| 136 | def nodesEqual(self, node1, node2): | ||
| 137 | if not node1.nameTuple == node2.nameTuple: | ||
| 138 | return False | ||
| 139 | |||
| 140 | if not node1.attributes == node2.attributes: | ||
| 141 | return False | ||
| 142 | |||
| 143 | return True | ||
| 144 | |||
| 145 | |||
| 146 | class TreeBuilder(object): | ||
| 147 | """Base treebuilder implementation | ||
| 148 | |||
| 149 | * documentClass - the class to use for the bottommost node of a document | ||
| 150 | * elementClass - the class to use for HTML Elements | ||
| 151 | * commentClass - the class to use for comments | ||
| 152 | * doctypeClass - the class to use for doctypes | ||
| 153 | |||
| 154 | """ | ||
| 155 | # pylint:disable=not-callable | ||
| 156 | |||
| 157 | # Document class | ||
| 158 | documentClass = None | ||
| 159 | |||
| 160 | # The class to use for creating a node | ||
| 161 | elementClass = None | ||
| 162 | |||
| 163 | # The class to use for creating comments | ||
| 164 | commentClass = None | ||
| 165 | |||
| 166 | # The class to use for creating doctypes | ||
| 167 | doctypeClass = None | ||
| 168 | |||
| 169 | # Fragment class | ||
| 170 | fragmentClass = None | ||
| 171 | |||
| 172 | def __init__(self, namespaceHTMLElements): | ||
| 173 | """Create a TreeBuilder | ||
| 174 | |||
| 175 | :arg namespaceHTMLElements: whether or not to namespace HTML elements | ||
| 176 | |||
| 177 | """ | ||
| 178 | if namespaceHTMLElements: | ||
| 179 | self.defaultNamespace = "http://www.w3.org/1999/xhtml" | ||
| 180 | else: | ||
| 181 | self.defaultNamespace = None | ||
| 182 | self.reset() | ||
| 183 | |||
| 184 | def reset(self): | ||
| 185 | self.openElements = [] | ||
| 186 | self.activeFormattingElements = ActiveFormattingElements() | ||
| 187 | |||
| 188 | # XXX - rename these to headElement, formElement | ||
| 189 | self.headPointer = None | ||
| 190 | self.formPointer = None | ||
| 191 | |||
| 192 | self.insertFromTable = False | ||
| 193 | |||
| 194 | self.document = self.documentClass() | ||
| 195 | |||
| 196 | def elementInScope(self, target, variant=None): | ||
| 197 | |||
| 198 | # If we pass a node in we match that. if we pass a string | ||
| 199 | # match any node with that name | ||
| 200 | exactNode = hasattr(target, "nameTuple") | ||
| 201 | if not exactNode: | ||
| 202 | if isinstance(target, text_type): | ||
| 203 | target = (namespaces["html"], target) | ||
| 204 | assert isinstance(target, tuple) | ||
| 205 | |||
| 206 | listElements, invert = listElementsMap[variant] | ||
| 207 | |||
| 208 | for node in reversed(self.openElements): | ||
| 209 | if exactNode and node == target: | ||
| 210 | return True | ||
| 211 | elif not exactNode and node.nameTuple == target: | ||
| 212 | return True | ||
| 213 | elif (invert ^ (node.nameTuple in listElements)): | ||
| 214 | return False | ||
| 215 | |||
| 216 | assert False # We should never reach this point | ||
| 217 | |||
| 218 | def reconstructActiveFormattingElements(self): | ||
| 219 | # Within this algorithm the order of steps described in the | ||
| 220 | # specification is not quite the same as the order of steps in the | ||
| 221 | # code. It should still do the same though. | ||
| 222 | |||
| 223 | # Step 1: stop the algorithm when there's nothing to do. | ||
| 224 | if not self.activeFormattingElements: | ||
| 225 | return | ||
| 226 | |||
| 227 | # Step 2 and step 3: we start with the last element. So i is -1. | ||
| 228 | i = len(self.activeFormattingElements) - 1 | ||
| 229 | entry = self.activeFormattingElements[i] | ||
| 230 | if entry == Marker or entry in self.openElements: | ||
| 231 | return | ||
| 232 | |||
| 233 | # Step 6 | ||
| 234 | while entry != Marker and entry not in self.openElements: | ||
| 235 | if i == 0: | ||
| 236 | # This will be reset to 0 below | ||
| 237 | i = -1 | ||
| 238 | break | ||
| 239 | i -= 1 | ||
| 240 | # Step 5: let entry be one earlier in the list. | ||
| 241 | entry = self.activeFormattingElements[i] | ||
| 242 | |||
| 243 | while True: | ||
| 244 | # Step 7 | ||
| 245 | i += 1 | ||
| 246 | |||
| 247 | # Step 8 | ||
| 248 | entry = self.activeFormattingElements[i] | ||
| 249 | clone = entry.cloneNode() # Mainly to get a new copy of the attributes | ||
| 250 | |||
| 251 | # Step 9 | ||
| 252 | element = self.insertElement({"type": "StartTag", | ||
| 253 | "name": clone.name, | ||
| 254 | "namespace": clone.namespace, | ||
| 255 | "data": clone.attributes}) | ||
| 256 | |||
| 257 | # Step 10 | ||
| 258 | self.activeFormattingElements[i] = element | ||
| 259 | |||
| 260 | # Step 11 | ||
| 261 | if element == self.activeFormattingElements[-1]: | ||
| 262 | break | ||
| 263 | |||
| 264 | def clearActiveFormattingElements(self): | ||
| 265 | entry = self.activeFormattingElements.pop() | ||
| 266 | while self.activeFormattingElements and entry != Marker: | ||
| 267 | entry = self.activeFormattingElements.pop() | ||
| 268 | |||
| 269 | def elementInActiveFormattingElements(self, name): | ||
| 270 | """Check if an element exists between the end of the active | ||
| 271 | formatting elements and the last marker. If it does, return it, else | ||
| 272 | return false""" | ||
| 273 | |||
| 274 | for item in self.activeFormattingElements[::-1]: | ||
| 275 | # Check for Marker first because if it's a Marker it doesn't have a | ||
| 276 | # name attribute. | ||
| 277 | if item == Marker: | ||
| 278 | break | ||
| 279 | elif item.name == name: | ||
| 280 | return item | ||
| 281 | return False | ||
| 282 | |||
| 283 | def insertRoot(self, token): | ||
| 284 | element = self.createElement(token) | ||
| 285 | self.openElements.append(element) | ||
| 286 | self.document.appendChild(element) | ||
| 287 | |||
| 288 | def insertDoctype(self, token): | ||
| 289 | name = token["name"] | ||
| 290 | publicId = token["publicId"] | ||
| 291 | systemId = token["systemId"] | ||
| 292 | |||
| 293 | doctype = self.doctypeClass(name, publicId, systemId) | ||
| 294 | self.document.appendChild(doctype) | ||
| 295 | |||
| 296 | def insertComment(self, token, parent=None): | ||
| 297 | if parent is None: | ||
| 298 | parent = self.openElements[-1] | ||
| 299 | parent.appendChild(self.commentClass(token["data"])) | ||
| 300 | |||
| 301 | def createElement(self, token): | ||
| 302 | """Create an element but don't insert it anywhere""" | ||
| 303 | name = token["name"] | ||
| 304 | namespace = token.get("namespace", self.defaultNamespace) | ||
| 305 | element = self.elementClass(name, namespace) | ||
| 306 | element.attributes = token["data"] | ||
| 307 | return element | ||
| 308 | |||
| 309 | def _getInsertFromTable(self): | ||
| 310 | return self._insertFromTable | ||
| 311 | |||
| 312 | def _setInsertFromTable(self, value): | ||
| 313 | """Switch the function used to insert an element from the | ||
| 314 | normal one to the misnested table one and back again""" | ||
| 315 | self._insertFromTable = value | ||
| 316 | if value: | ||
| 317 | self.insertElement = self.insertElementTable | ||
| 318 | else: | ||
| 319 | self.insertElement = self.insertElementNormal | ||
| 320 | |||
| 321 | insertFromTable = property(_getInsertFromTable, _setInsertFromTable) | ||
| 322 | |||
| 323 | def insertElementNormal(self, token): | ||
| 324 | name = token["name"] | ||
| 325 | assert isinstance(name, text_type), "Element %s not unicode" % name | ||
| 326 | namespace = token.get("namespace", self.defaultNamespace) | ||
| 327 | element = self.elementClass(name, namespace) | ||
| 328 | element.attributes = token["data"] | ||
| 329 | self.openElements[-1].appendChild(element) | ||
| 330 | self.openElements.append(element) | ||
| 331 | return element | ||
| 332 | |||
| 333 | def insertElementTable(self, token): | ||
| 334 | """Create an element and insert it into the tree""" | ||
| 335 | element = self.createElement(token) | ||
| 336 | if self.openElements[-1].name not in tableInsertModeElements: | ||
| 337 | return self.insertElementNormal(token) | ||
| 338 | else: | ||
| 339 | # We should be in the InTable mode. This means we want to do | ||
| 340 | # special magic element rearranging | ||
| 341 | parent, insertBefore = self.getTableMisnestedNodePosition() | ||
| 342 | if insertBefore is None: | ||
| 343 | parent.appendChild(element) | ||
| 344 | else: | ||
| 345 | parent.insertBefore(element, insertBefore) | ||
| 346 | self.openElements.append(element) | ||
| 347 | return element | ||
| 348 | |||
| 349 | def insertText(self, data, parent=None): | ||
| 350 | """Insert text data.""" | ||
| 351 | if parent is None: | ||
| 352 | parent = self.openElements[-1] | ||
| 353 | |||
| 354 | if (not self.insertFromTable or (self.insertFromTable and | ||
| 355 | self.openElements[-1].name | ||
| 356 | not in tableInsertModeElements)): | ||
| 357 | parent.insertText(data) | ||
| 358 | else: | ||
| 359 | # We should be in the InTable mode. This means we want to do | ||
| 360 | # special magic element rearranging | ||
| 361 | parent, insertBefore = self.getTableMisnestedNodePosition() | ||
| 362 | parent.insertText(data, insertBefore) | ||
| 363 | |||
| 364 | def getTableMisnestedNodePosition(self): | ||
| 365 | """Get the foster parent element, and sibling to insert before | ||
| 366 | (or None) when inserting a misnested table node""" | ||
| 367 | # The foster parent element is the one which comes before the most | ||
| 368 | # recently opened table element | ||
| 369 | # XXX - this is really inelegant | ||
| 370 | lastTable = None | ||
| 371 | fosterParent = None | ||
| 372 | insertBefore = None | ||
| 373 | for elm in self.openElements[::-1]: | ||
| 374 | if elm.name == "table": | ||
| 375 | lastTable = elm | ||
| 376 | break | ||
| 377 | if lastTable: | ||
| 378 | # XXX - we should really check that this parent is actually a | ||
| 379 | # node here | ||
| 380 | if lastTable.parent: | ||
| 381 | fosterParent = lastTable.parent | ||
| 382 | insertBefore = lastTable | ||
| 383 | else: | ||
| 384 | fosterParent = self.openElements[ | ||
| 385 | self.openElements.index(lastTable) - 1] | ||
| 386 | else: | ||
| 387 | fosterParent = self.openElements[0] | ||
| 388 | return fosterParent, insertBefore | ||
| 389 | |||
| 390 | def generateImpliedEndTags(self, exclude=None): | ||
| 391 | name = self.openElements[-1].name | ||
| 392 | # XXX td, th and tr are not actually needed | ||
| 393 | if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and | ||
| 394 | name != exclude): | ||
| 395 | self.openElements.pop() | ||
| 396 | # XXX This is not entirely what the specification says. We should | ||
| 397 | # investigate it more closely. | ||
| 398 | self.generateImpliedEndTags(exclude) | ||
| 399 | |||
| 400 | def getDocument(self): | ||
| 401 | """Return the final tree""" | ||
| 402 | return self.document | ||
| 403 | |||
| 404 | def getFragment(self): | ||
| 405 | """Return the final fragment""" | ||
| 406 | # assert self.innerHTML | ||
| 407 | fragment = self.fragmentClass() | ||
| 408 | self.openElements[0].reparentChildren(fragment) | ||
| 409 | return fragment | ||
| 410 | |||
| 411 | def testSerializer(self, node): | ||
| 412 | """Serialize the subtree of node in the format required by unit tests | ||
| 413 | |||
| 414 | :arg node: the node from which to start serializing | ||
| 415 | |||
| 416 | """ | ||
| 417 | raise NotImplementedError | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/dom.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/dom.py new file mode 100644 index 0000000..8117b2d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/dom.py | |||
| @@ -0,0 +1,236 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | |||
| 4 | from collections import MutableMapping | ||
| 5 | from xml.dom import minidom, Node | ||
| 6 | import weakref | ||
| 7 | |||
| 8 | from . import base | ||
| 9 | from .. import constants | ||
| 10 | from ..constants import namespaces | ||
| 11 | from .._utils import moduleFactoryFactory | ||
| 12 | |||
| 13 | |||
| 14 | def getDomBuilder(DomImplementation): | ||
| 15 | Dom = DomImplementation | ||
| 16 | |||
| 17 | class AttrList(MutableMapping): | ||
| 18 | def __init__(self, element): | ||
| 19 | self.element = element | ||
| 20 | |||
| 21 | def __iter__(self): | ||
| 22 | return iter(self.element.attributes.keys()) | ||
| 23 | |||
| 24 | def __setitem__(self, name, value): | ||
| 25 | if isinstance(name, tuple): | ||
| 26 | raise NotImplementedError | ||
| 27 | else: | ||
| 28 | attr = self.element.ownerDocument.createAttribute(name) | ||
| 29 | attr.value = value | ||
| 30 | self.element.attributes[name] = attr | ||
| 31 | |||
| 32 | def __len__(self): | ||
| 33 | return len(self.element.attributes) | ||
| 34 | |||
| 35 | def items(self): | ||
| 36 | return list(self.element.attributes.items()) | ||
| 37 | |||
| 38 | def values(self): | ||
| 39 | return list(self.element.attributes.values()) | ||
| 40 | |||
| 41 | def __getitem__(self, name): | ||
| 42 | if isinstance(name, tuple): | ||
| 43 | raise NotImplementedError | ||
| 44 | else: | ||
| 45 | return self.element.attributes[name].value | ||
| 46 | |||
| 47 | def __delitem__(self, name): | ||
| 48 | if isinstance(name, tuple): | ||
| 49 | raise NotImplementedError | ||
| 50 | else: | ||
| 51 | del self.element.attributes[name] | ||
| 52 | |||
| 53 | class NodeBuilder(base.Node): | ||
| 54 | def __init__(self, element): | ||
| 55 | base.Node.__init__(self, element.nodeName) | ||
| 56 | self.element = element | ||
| 57 | |||
| 58 | namespace = property(lambda self: hasattr(self.element, "namespaceURI") and | ||
| 59 | self.element.namespaceURI or None) | ||
| 60 | |||
| 61 | def appendChild(self, node): | ||
| 62 | node.parent = self | ||
| 63 | self.element.appendChild(node.element) | ||
| 64 | |||
| 65 | def insertText(self, data, insertBefore=None): | ||
| 66 | text = self.element.ownerDocument.createTextNode(data) | ||
| 67 | if insertBefore: | ||
| 68 | self.element.insertBefore(text, insertBefore.element) | ||
| 69 | else: | ||
| 70 | self.element.appendChild(text) | ||
| 71 | |||
| 72 | def insertBefore(self, node, refNode): | ||
| 73 | self.element.insertBefore(node.element, refNode.element) | ||
| 74 | node.parent = self | ||
| 75 | |||
| 76 | def removeChild(self, node): | ||
| 77 | if node.element.parentNode == self.element: | ||
| 78 | self.element.removeChild(node.element) | ||
| 79 | node.parent = None | ||
| 80 | |||
| 81 | def reparentChildren(self, newParent): | ||
| 82 | while self.element.hasChildNodes(): | ||
| 83 | child = self.element.firstChild | ||
| 84 | self.element.removeChild(child) | ||
| 85 | newParent.element.appendChild(child) | ||
| 86 | self.childNodes = [] | ||
| 87 | |||
| 88 | def getAttributes(self): | ||
| 89 | return AttrList(self.element) | ||
| 90 | |||
| 91 | def setAttributes(self, attributes): | ||
| 92 | if attributes: | ||
| 93 | for name, value in list(attributes.items()): | ||
| 94 | if isinstance(name, tuple): | ||
| 95 | if name[0] is not None: | ||
| 96 | qualifiedName = (name[0] + ":" + name[1]) | ||
| 97 | else: | ||
| 98 | qualifiedName = name[1] | ||
| 99 | self.element.setAttributeNS(name[2], qualifiedName, | ||
| 100 | value) | ||
| 101 | else: | ||
| 102 | self.element.setAttribute( | ||
| 103 | name, value) | ||
| 104 | attributes = property(getAttributes, setAttributes) | ||
| 105 | |||
| 106 | def cloneNode(self): | ||
| 107 | return NodeBuilder(self.element.cloneNode(False)) | ||
| 108 | |||
| 109 | def hasContent(self): | ||
| 110 | return self.element.hasChildNodes() | ||
| 111 | |||
| 112 | def getNameTuple(self): | ||
| 113 | if self.namespace is None: | ||
| 114 | return namespaces["html"], self.name | ||
| 115 | else: | ||
| 116 | return self.namespace, self.name | ||
| 117 | |||
| 118 | nameTuple = property(getNameTuple) | ||
| 119 | |||
| 120 | class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable | ||
| 121 | def documentClass(self): | ||
| 122 | self.dom = Dom.getDOMImplementation().createDocument(None, None, None) | ||
| 123 | return weakref.proxy(self) | ||
| 124 | |||
| 125 | def insertDoctype(self, token): | ||
| 126 | name = token["name"] | ||
| 127 | publicId = token["publicId"] | ||
| 128 | systemId = token["systemId"] | ||
| 129 | |||
| 130 | domimpl = Dom.getDOMImplementation() | ||
| 131 | doctype = domimpl.createDocumentType(name, publicId, systemId) | ||
| 132 | self.document.appendChild(NodeBuilder(doctype)) | ||
| 133 | if Dom == minidom: | ||
| 134 | doctype.ownerDocument = self.dom | ||
| 135 | |||
| 136 | def elementClass(self, name, namespace=None): | ||
| 137 | if namespace is None and self.defaultNamespace is None: | ||
| 138 | node = self.dom.createElement(name) | ||
| 139 | else: | ||
| 140 | node = self.dom.createElementNS(namespace, name) | ||
| 141 | |||
| 142 | return NodeBuilder(node) | ||
| 143 | |||
| 144 | def commentClass(self, data): | ||
| 145 | return NodeBuilder(self.dom.createComment(data)) | ||
| 146 | |||
| 147 | def fragmentClass(self): | ||
| 148 | return NodeBuilder(self.dom.createDocumentFragment()) | ||
| 149 | |||
| 150 | def appendChild(self, node): | ||
| 151 | self.dom.appendChild(node.element) | ||
| 152 | |||
| 153 | def testSerializer(self, element): | ||
| 154 | return testSerializer(element) | ||
| 155 | |||
| 156 | def getDocument(self): | ||
| 157 | return self.dom | ||
| 158 | |||
| 159 | def getFragment(self): | ||
| 160 | return base.TreeBuilder.getFragment(self).element | ||
| 161 | |||
| 162 | def insertText(self, data, parent=None): | ||
| 163 | data = data | ||
| 164 | if parent != self: | ||
| 165 | base.TreeBuilder.insertText(self, data, parent) | ||
| 166 | else: | ||
| 167 | # HACK: allow text nodes as children of the document node | ||
| 168 | if hasattr(self.dom, '_child_node_types'): | ||
| 169 | # pylint:disable=protected-access | ||
| 170 | if Node.TEXT_NODE not in self.dom._child_node_types: | ||
| 171 | self.dom._child_node_types = list(self.dom._child_node_types) | ||
| 172 | self.dom._child_node_types.append(Node.TEXT_NODE) | ||
| 173 | self.dom.appendChild(self.dom.createTextNode(data)) | ||
| 174 | |||
| 175 | implementation = DomImplementation | ||
| 176 | name = None | ||
| 177 | |||
| 178 | def testSerializer(element): | ||
| 179 | element.normalize() | ||
| 180 | rv = [] | ||
| 181 | |||
| 182 | def serializeElement(element, indent=0): | ||
| 183 | if element.nodeType == Node.DOCUMENT_TYPE_NODE: | ||
| 184 | if element.name: | ||
| 185 | if element.publicId or element.systemId: | ||
| 186 | publicId = element.publicId or "" | ||
| 187 | systemId = element.systemId or "" | ||
| 188 | rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" % | ||
| 189 | (' ' * indent, element.name, publicId, systemId)) | ||
| 190 | else: | ||
| 191 | rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name)) | ||
| 192 | else: | ||
| 193 | rv.append("|%s<!DOCTYPE >" % (' ' * indent,)) | ||
| 194 | elif element.nodeType == Node.DOCUMENT_NODE: | ||
| 195 | rv.append("#document") | ||
| 196 | elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE: | ||
| 197 | rv.append("#document-fragment") | ||
| 198 | elif element.nodeType == Node.COMMENT_NODE: | ||
| 199 | rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue)) | ||
| 200 | elif element.nodeType == Node.TEXT_NODE: | ||
| 201 | rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue)) | ||
| 202 | else: | ||
| 203 | if (hasattr(element, "namespaceURI") and | ||
| 204 | element.namespaceURI is not None): | ||
| 205 | name = "%s %s" % (constants.prefixes[element.namespaceURI], | ||
| 206 | element.nodeName) | ||
| 207 | else: | ||
| 208 | name = element.nodeName | ||
| 209 | rv.append("|%s<%s>" % (' ' * indent, name)) | ||
| 210 | if element.hasAttributes(): | ||
| 211 | attributes = [] | ||
| 212 | for i in range(len(element.attributes)): | ||
| 213 | attr = element.attributes.item(i) | ||
| 214 | name = attr.nodeName | ||
| 215 | value = attr.value | ||
| 216 | ns = attr.namespaceURI | ||
| 217 | if ns: | ||
| 218 | name = "%s %s" % (constants.prefixes[ns], attr.localName) | ||
| 219 | else: | ||
| 220 | name = attr.nodeName | ||
| 221 | attributes.append((name, value)) | ||
| 222 | |||
| 223 | for name, value in sorted(attributes): | ||
| 224 | rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) | ||
| 225 | indent += 2 | ||
| 226 | for child in element.childNodes: | ||
| 227 | serializeElement(child, indent) | ||
| 228 | serializeElement(element, 0) | ||
| 229 | |||
| 230 | return "\n".join(rv) | ||
| 231 | |||
| 232 | return locals() | ||
| 233 | |||
| 234 | |||
| 235 | # The actual means to get a module! | ||
| 236 | getDomModule = moduleFactoryFactory(getDomBuilder) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree.py new file mode 100644 index 0000000..9a4aa95 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree.py | |||
| @@ -0,0 +1,340 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | # pylint:disable=protected-access | ||
| 3 | |||
| 4 | from pip._vendor.six import text_type | ||
| 5 | |||
| 6 | import re | ||
| 7 | |||
| 8 | from . import base | ||
| 9 | from .. import _ihatexml | ||
| 10 | from .. import constants | ||
| 11 | from ..constants import namespaces | ||
| 12 | from .._utils import moduleFactoryFactory | ||
| 13 | |||
| 14 | tag_regexp = re.compile("{([^}]*)}(.*)") | ||
| 15 | |||
| 16 | |||
| 17 | def getETreeBuilder(ElementTreeImplementation, fullTree=False): | ||
| 18 | ElementTree = ElementTreeImplementation | ||
| 19 | ElementTreeCommentType = ElementTree.Comment("asd").tag | ||
| 20 | |||
| 21 | class Element(base.Node): | ||
| 22 | def __init__(self, name, namespace=None): | ||
| 23 | self._name = name | ||
| 24 | self._namespace = namespace | ||
| 25 | self._element = ElementTree.Element(self._getETreeTag(name, | ||
| 26 | namespace)) | ||
| 27 | if namespace is None: | ||
| 28 | self.nameTuple = namespaces["html"], self._name | ||
| 29 | else: | ||
| 30 | self.nameTuple = self._namespace, self._name | ||
| 31 | self.parent = None | ||
| 32 | self._childNodes = [] | ||
| 33 | self._flags = [] | ||
| 34 | |||
| 35 | def _getETreeTag(self, name, namespace): | ||
| 36 | if namespace is None: | ||
| 37 | etree_tag = name | ||
| 38 | else: | ||
| 39 | etree_tag = "{%s}%s" % (namespace, name) | ||
| 40 | return etree_tag | ||
| 41 | |||
| 42 | def _setName(self, name): | ||
| 43 | self._name = name | ||
| 44 | self._element.tag = self._getETreeTag(self._name, self._namespace) | ||
| 45 | |||
| 46 | def _getName(self): | ||
| 47 | return self._name | ||
| 48 | |||
| 49 | name = property(_getName, _setName) | ||
| 50 | |||
| 51 | def _setNamespace(self, namespace): | ||
| 52 | self._namespace = namespace | ||
| 53 | self._element.tag = self._getETreeTag(self._name, self._namespace) | ||
| 54 | |||
| 55 | def _getNamespace(self): | ||
| 56 | return self._namespace | ||
| 57 | |||
| 58 | namespace = property(_getNamespace, _setNamespace) | ||
| 59 | |||
| 60 | def _getAttributes(self): | ||
| 61 | return self._element.attrib | ||
| 62 | |||
| 63 | def _setAttributes(self, attributes): | ||
| 64 | # Delete existing attributes first | ||
| 65 | # XXX - there may be a better way to do this... | ||
| 66 | for key in list(self._element.attrib.keys()): | ||
| 67 | del self._element.attrib[key] | ||
| 68 | for key, value in attributes.items(): | ||
| 69 | if isinstance(key, tuple): | ||
| 70 | name = "{%s}%s" % (key[2], key[1]) | ||
| 71 | else: | ||
| 72 | name = key | ||
| 73 | self._element.set(name, value) | ||
| 74 | |||
| 75 | attributes = property(_getAttributes, _setAttributes) | ||
| 76 | |||
| 77 | def _getChildNodes(self): | ||
| 78 | return self._childNodes | ||
| 79 | |||
| 80 | def _setChildNodes(self, value): | ||
| 81 | del self._element[:] | ||
| 82 | self._childNodes = [] | ||
| 83 | for element in value: | ||
| 84 | self.insertChild(element) | ||
| 85 | |||
| 86 | childNodes = property(_getChildNodes, _setChildNodes) | ||
| 87 | |||
| 88 | def hasContent(self): | ||
| 89 | """Return true if the node has children or text""" | ||
| 90 | return bool(self._element.text or len(self._element)) | ||
| 91 | |||
| 92 | def appendChild(self, node): | ||
| 93 | self._childNodes.append(node) | ||
| 94 | self._element.append(node._element) | ||
| 95 | node.parent = self | ||
| 96 | |||
| 97 | def insertBefore(self, node, refNode): | ||
| 98 | index = list(self._element).index(refNode._element) | ||
| 99 | self._element.insert(index, node._element) | ||
| 100 | node.parent = self | ||
| 101 | |||
| 102 | def removeChild(self, node): | ||
| 103 | self._childNodes.remove(node) | ||
| 104 | self._element.remove(node._element) | ||
| 105 | node.parent = None | ||
| 106 | |||
| 107 | def insertText(self, data, insertBefore=None): | ||
| 108 | if not(len(self._element)): | ||
| 109 | if not self._element.text: | ||
| 110 | self._element.text = "" | ||
| 111 | self._element.text += data | ||
| 112 | elif insertBefore is None: | ||
| 113 | # Insert the text as the tail of the last child element | ||
| 114 | if not self._element[-1].tail: | ||
| 115 | self._element[-1].tail = "" | ||
| 116 | self._element[-1].tail += data | ||
| 117 | else: | ||
| 118 | # Insert the text before the specified node | ||
| 119 | children = list(self._element) | ||
| 120 | index = children.index(insertBefore._element) | ||
| 121 | if index > 0: | ||
| 122 | if not self._element[index - 1].tail: | ||
| 123 | self._element[index - 1].tail = "" | ||
| 124 | self._element[index - 1].tail += data | ||
| 125 | else: | ||
| 126 | if not self._element.text: | ||
| 127 | self._element.text = "" | ||
| 128 | self._element.text += data | ||
| 129 | |||
| 130 | def cloneNode(self): | ||
| 131 | element = type(self)(self.name, self.namespace) | ||
| 132 | for name, value in self.attributes.items(): | ||
| 133 | element.attributes[name] = value | ||
| 134 | return element | ||
| 135 | |||
| 136 | def reparentChildren(self, newParent): | ||
| 137 | if newParent.childNodes: | ||
| 138 | newParent.childNodes[-1]._element.tail += self._element.text | ||
| 139 | else: | ||
| 140 | if not newParent._element.text: | ||
| 141 | newParent._element.text = "" | ||
| 142 | if self._element.text is not None: | ||
| 143 | newParent._element.text += self._element.text | ||
| 144 | self._element.text = "" | ||
| 145 | base.Node.reparentChildren(self, newParent) | ||
| 146 | |||
| 147 | class Comment(Element): | ||
| 148 | def __init__(self, data): | ||
| 149 | # Use the superclass constructor to set all properties on the | ||
| 150 | # wrapper element | ||
| 151 | self._element = ElementTree.Comment(data) | ||
| 152 | self.parent = None | ||
| 153 | self._childNodes = [] | ||
| 154 | self._flags = [] | ||
| 155 | |||
| 156 | def _getData(self): | ||
| 157 | return self._element.text | ||
| 158 | |||
| 159 | def _setData(self, value): | ||
| 160 | self._element.text = value | ||
| 161 | |||
| 162 | data = property(_getData, _setData) | ||
| 163 | |||
| 164 | class DocumentType(Element): | ||
| 165 | def __init__(self, name, publicId, systemId): | ||
| 166 | Element.__init__(self, "<!DOCTYPE>") | ||
| 167 | self._element.text = name | ||
| 168 | self.publicId = publicId | ||
| 169 | self.systemId = systemId | ||
| 170 | |||
| 171 | def _getPublicId(self): | ||
| 172 | return self._element.get("publicId", "") | ||
| 173 | |||
| 174 | def _setPublicId(self, value): | ||
| 175 | if value is not None: | ||
| 176 | self._element.set("publicId", value) | ||
| 177 | |||
| 178 | publicId = property(_getPublicId, _setPublicId) | ||
| 179 | |||
| 180 | def _getSystemId(self): | ||
| 181 | return self._element.get("systemId", "") | ||
| 182 | |||
| 183 | def _setSystemId(self, value): | ||
| 184 | if value is not None: | ||
| 185 | self._element.set("systemId", value) | ||
| 186 | |||
| 187 | systemId = property(_getSystemId, _setSystemId) | ||
| 188 | |||
| 189 | class Document(Element): | ||
| 190 | def __init__(self): | ||
| 191 | Element.__init__(self, "DOCUMENT_ROOT") | ||
| 192 | |||
| 193 | class DocumentFragment(Element): | ||
| 194 | def __init__(self): | ||
| 195 | Element.__init__(self, "DOCUMENT_FRAGMENT") | ||
| 196 | |||
| 197 | def testSerializer(element): | ||
| 198 | rv = [] | ||
| 199 | |||
| 200 | def serializeElement(element, indent=0): | ||
| 201 | if not(hasattr(element, "tag")): | ||
| 202 | element = element.getroot() | ||
| 203 | if element.tag == "<!DOCTYPE>": | ||
| 204 | if element.get("publicId") or element.get("systemId"): | ||
| 205 | publicId = element.get("publicId") or "" | ||
| 206 | systemId = element.get("systemId") or "" | ||
| 207 | rv.append("""<!DOCTYPE %s "%s" "%s">""" % | ||
| 208 | (element.text, publicId, systemId)) | ||
| 209 | else: | ||
| 210 | rv.append("<!DOCTYPE %s>" % (element.text,)) | ||
| 211 | elif element.tag == "DOCUMENT_ROOT": | ||
| 212 | rv.append("#document") | ||
| 213 | if element.text is not None: | ||
| 214 | rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) | ||
| 215 | if element.tail is not None: | ||
| 216 | raise TypeError("Document node cannot have tail") | ||
| 217 | if hasattr(element, "attrib") and len(element.attrib): | ||
| 218 | raise TypeError("Document node cannot have attributes") | ||
| 219 | elif element.tag == ElementTreeCommentType: | ||
| 220 | rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) | ||
| 221 | else: | ||
| 222 | assert isinstance(element.tag, text_type), \ | ||
| 223 | "Expected unicode, got %s, %s" % (type(element.tag), element.tag) | ||
| 224 | nsmatch = tag_regexp.match(element.tag) | ||
| 225 | |||
| 226 | if nsmatch is None: | ||
| 227 | name = element.tag | ||
| 228 | else: | ||
| 229 | ns, name = nsmatch.groups() | ||
| 230 | prefix = constants.prefixes[ns] | ||
| 231 | name = "%s %s" % (prefix, name) | ||
| 232 | rv.append("|%s<%s>" % (' ' * indent, name)) | ||
| 233 | |||
| 234 | if hasattr(element, "attrib"): | ||
| 235 | attributes = [] | ||
| 236 | for name, value in element.attrib.items(): | ||
| 237 | nsmatch = tag_regexp.match(name) | ||
| 238 | if nsmatch is not None: | ||
| 239 | ns, name = nsmatch.groups() | ||
| 240 | prefix = constants.prefixes[ns] | ||
| 241 | attr_string = "%s %s" % (prefix, name) | ||
| 242 | else: | ||
| 243 | attr_string = name | ||
| 244 | attributes.append((attr_string, value)) | ||
| 245 | |||
| 246 | for name, value in sorted(attributes): | ||
| 247 | rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) | ||
| 248 | if element.text: | ||
| 249 | rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) | ||
| 250 | indent += 2 | ||
| 251 | for child in element: | ||
| 252 | serializeElement(child, indent) | ||
| 253 | if element.tail: | ||
| 254 | rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) | ||
| 255 | serializeElement(element, 0) | ||
| 256 | |||
| 257 | return "\n".join(rv) | ||
| 258 | |||
| 259 | def tostring(element): # pylint:disable=unused-variable | ||
| 260 | """Serialize an element and its child nodes to a string""" | ||
| 261 | rv = [] | ||
| 262 | filter = _ihatexml.InfosetFilter() | ||
| 263 | |||
| 264 | def serializeElement(element): | ||
| 265 | if isinstance(element, ElementTree.ElementTree): | ||
| 266 | element = element.getroot() | ||
| 267 | |||
| 268 | if element.tag == "<!DOCTYPE>": | ||
| 269 | if element.get("publicId") or element.get("systemId"): | ||
| 270 | publicId = element.get("publicId") or "" | ||
| 271 | systemId = element.get("systemId") or "" | ||
| 272 | rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" % | ||
| 273 | (element.text, publicId, systemId)) | ||
| 274 | else: | ||
| 275 | rv.append("<!DOCTYPE %s>" % (element.text,)) | ||
| 276 | elif element.tag == "DOCUMENT_ROOT": | ||
| 277 | if element.text is not None: | ||
| 278 | rv.append(element.text) | ||
| 279 | if element.tail is not None: | ||
| 280 | raise TypeError("Document node cannot have tail") | ||
| 281 | if hasattr(element, "attrib") and len(element.attrib): | ||
| 282 | raise TypeError("Document node cannot have attributes") | ||
| 283 | |||
| 284 | for child in element: | ||
| 285 | serializeElement(child) | ||
| 286 | |||
| 287 | elif element.tag == ElementTreeCommentType: | ||
| 288 | rv.append("<!--%s-->" % (element.text,)) | ||
| 289 | else: | ||
| 290 | # This is assumed to be an ordinary element | ||
| 291 | if not element.attrib: | ||
| 292 | rv.append("<%s>" % (filter.fromXmlName(element.tag),)) | ||
| 293 | else: | ||
| 294 | attr = " ".join(["%s=\"%s\"" % ( | ||
| 295 | filter.fromXmlName(name), value) | ||
| 296 | for name, value in element.attrib.items()]) | ||
| 297 | rv.append("<%s %s>" % (element.tag, attr)) | ||
| 298 | if element.text: | ||
| 299 | rv.append(element.text) | ||
| 300 | |||
| 301 | for child in element: | ||
| 302 | serializeElement(child) | ||
| 303 | |||
| 304 | rv.append("</%s>" % (element.tag,)) | ||
| 305 | |||
| 306 | if element.tail: | ||
| 307 | rv.append(element.tail) | ||
| 308 | |||
| 309 | serializeElement(element) | ||
| 310 | |||
| 311 | return "".join(rv) | ||
| 312 | |||
| 313 | class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable | ||
| 314 | documentClass = Document | ||
| 315 | doctypeClass = DocumentType | ||
| 316 | elementClass = Element | ||
| 317 | commentClass = Comment | ||
| 318 | fragmentClass = DocumentFragment | ||
| 319 | implementation = ElementTreeImplementation | ||
| 320 | |||
| 321 | def testSerializer(self, element): | ||
| 322 | return testSerializer(element) | ||
| 323 | |||
| 324 | def getDocument(self): | ||
| 325 | if fullTree: | ||
| 326 | return self.document._element | ||
| 327 | else: | ||
| 328 | if self.defaultNamespace is not None: | ||
| 329 | return self.document._element.find( | ||
| 330 | "{%s}html" % self.defaultNamespace) | ||
| 331 | else: | ||
| 332 | return self.document._element.find("html") | ||
| 333 | |||
| 334 | def getFragment(self): | ||
| 335 | return base.TreeBuilder.getFragment(self)._element | ||
| 336 | |||
| 337 | return locals() | ||
| 338 | |||
| 339 | |||
| 340 | getETreeModule = moduleFactoryFactory(getETreeBuilder) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py new file mode 100644 index 0000000..66a9ba3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py | |||
| @@ -0,0 +1,366 @@ | |||
| 1 | """Module for supporting the lxml.etree library. The idea here is to use as much | ||
| 2 | of the native library as possible, without using fragile hacks like custom element | ||
| 3 | names that break between releases. The downside of this is that we cannot represent | ||
| 4 | all possible trees; specifically the following are known to cause problems: | ||
| 5 | |||
| 6 | Text or comments as siblings of the root element | ||
| 7 | Docypes with no name | ||
| 8 | |||
| 9 | When any of these things occur, we emit a DataLossWarning | ||
| 10 | """ | ||
| 11 | |||
| 12 | from __future__ import absolute_import, division, unicode_literals | ||
| 13 | # pylint:disable=protected-access | ||
| 14 | |||
| 15 | import warnings | ||
| 16 | import re | ||
| 17 | import sys | ||
| 18 | |||
| 19 | from . import base | ||
| 20 | from ..constants import DataLossWarning | ||
| 21 | from .. import constants | ||
| 22 | from . import etree as etree_builders | ||
| 23 | from .. import _ihatexml | ||
| 24 | |||
| 25 | import lxml.etree as etree | ||
| 26 | |||
| 27 | |||
| 28 | fullTree = True | ||
| 29 | tag_regexp = re.compile("{([^}]*)}(.*)") | ||
| 30 | |||
| 31 | comment_type = etree.Comment("asd").tag | ||
| 32 | |||
| 33 | |||
| 34 | class DocumentType(object): | ||
| 35 | def __init__(self, name, publicId, systemId): | ||
| 36 | self.name = name | ||
| 37 | self.publicId = publicId | ||
| 38 | self.systemId = systemId | ||
| 39 | |||
| 40 | |||
| 41 | class Document(object): | ||
| 42 | def __init__(self): | ||
| 43 | self._elementTree = None | ||
| 44 | self._childNodes = [] | ||
| 45 | |||
| 46 | def appendChild(self, element): | ||
| 47 | self._elementTree.getroot().addnext(element._element) | ||
| 48 | |||
| 49 | def _getChildNodes(self): | ||
| 50 | return self._childNodes | ||
| 51 | |||
| 52 | childNodes = property(_getChildNodes) | ||
| 53 | |||
| 54 | |||
| 55 | def testSerializer(element): | ||
| 56 | rv = [] | ||
| 57 | infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) | ||
| 58 | |||
| 59 | def serializeElement(element, indent=0): | ||
| 60 | if not hasattr(element, "tag"): | ||
| 61 | if hasattr(element, "getroot"): | ||
| 62 | # Full tree case | ||
| 63 | rv.append("#document") | ||
| 64 | if element.docinfo.internalDTD: | ||
| 65 | if not (element.docinfo.public_id or | ||
| 66 | element.docinfo.system_url): | ||
| 67 | dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name | ||
| 68 | else: | ||
| 69 | dtd_str = """<!DOCTYPE %s "%s" "%s">""" % ( | ||
| 70 | element.docinfo.root_name, | ||
| 71 | element.docinfo.public_id, | ||
| 72 | element.docinfo.system_url) | ||
| 73 | rv.append("|%s%s" % (' ' * (indent + 2), dtd_str)) | ||
| 74 | next_element = element.getroot() | ||
| 75 | while next_element.getprevious() is not None: | ||
| 76 | next_element = next_element.getprevious() | ||
| 77 | while next_element is not None: | ||
| 78 | serializeElement(next_element, indent + 2) | ||
| 79 | next_element = next_element.getnext() | ||
| 80 | elif isinstance(element, str) or isinstance(element, bytes): | ||
| 81 | # Text in a fragment | ||
| 82 | assert isinstance(element, str) or sys.version_info[0] == 2 | ||
| 83 | rv.append("|%s\"%s\"" % (' ' * indent, element)) | ||
| 84 | else: | ||
| 85 | # Fragment case | ||
| 86 | rv.append("#document-fragment") | ||
| 87 | for next_element in element: | ||
| 88 | serializeElement(next_element, indent + 2) | ||
| 89 | elif element.tag == comment_type: | ||
| 90 | rv.append("|%s<!-- %s -->" % (' ' * indent, element.text)) | ||
| 91 | if hasattr(element, "tail") and element.tail: | ||
| 92 | rv.append("|%s\"%s\"" % (' ' * indent, element.tail)) | ||
| 93 | else: | ||
| 94 | assert isinstance(element, etree._Element) | ||
| 95 | nsmatch = etree_builders.tag_regexp.match(element.tag) | ||
| 96 | if nsmatch is not None: | ||
| 97 | ns = nsmatch.group(1) | ||
| 98 | tag = nsmatch.group(2) | ||
| 99 | prefix = constants.prefixes[ns] | ||
| 100 | rv.append("|%s<%s %s>" % (' ' * indent, prefix, | ||
| 101 | infosetFilter.fromXmlName(tag))) | ||
| 102 | else: | ||
| 103 | rv.append("|%s<%s>" % (' ' * indent, | ||
| 104 | infosetFilter.fromXmlName(element.tag))) | ||
| 105 | |||
| 106 | if hasattr(element, "attrib"): | ||
| 107 | attributes = [] | ||
| 108 | for name, value in element.attrib.items(): | ||
| 109 | nsmatch = tag_regexp.match(name) | ||
| 110 | if nsmatch is not None: | ||
| 111 | ns, name = nsmatch.groups() | ||
| 112 | name = infosetFilter.fromXmlName(name) | ||
| 113 | prefix = constants.prefixes[ns] | ||
| 114 | attr_string = "%s %s" % (prefix, name) | ||
| 115 | else: | ||
| 116 | attr_string = infosetFilter.fromXmlName(name) | ||
| 117 | attributes.append((attr_string, value)) | ||
| 118 | |||
| 119 | for name, value in sorted(attributes): | ||
| 120 | rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) | ||
| 121 | |||
| 122 | if element.text: | ||
| 123 | rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text)) | ||
| 124 | indent += 2 | ||
| 125 | for child in element: | ||
| 126 | serializeElement(child, indent) | ||
| 127 | if hasattr(element, "tail") and element.tail: | ||
| 128 | rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail)) | ||
| 129 | serializeElement(element, 0) | ||
| 130 | |||
| 131 | return "\n".join(rv) | ||
| 132 | |||
| 133 | |||
| 134 | def tostring(element): | ||
| 135 | """Serialize an element and its child nodes to a string""" | ||
| 136 | rv = [] | ||
| 137 | |||
| 138 | def serializeElement(element): | ||
| 139 | if not hasattr(element, "tag"): | ||
| 140 | if element.docinfo.internalDTD: | ||
| 141 | if element.docinfo.doctype: | ||
| 142 | dtd_str = element.docinfo.doctype | ||
| 143 | else: | ||
| 144 | dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name | ||
| 145 | rv.append(dtd_str) | ||
| 146 | serializeElement(element.getroot()) | ||
| 147 | |||
| 148 | elif element.tag == comment_type: | ||
| 149 | rv.append("<!--%s-->" % (element.text,)) | ||
| 150 | |||
| 151 | else: | ||
| 152 | # This is assumed to be an ordinary element | ||
| 153 | if not element.attrib: | ||
| 154 | rv.append("<%s>" % (element.tag,)) | ||
| 155 | else: | ||
| 156 | attr = " ".join(["%s=\"%s\"" % (name, value) | ||
| 157 | for name, value in element.attrib.items()]) | ||
| 158 | rv.append("<%s %s>" % (element.tag, attr)) | ||
| 159 | if element.text: | ||
| 160 | rv.append(element.text) | ||
| 161 | |||
| 162 | for child in element: | ||
| 163 | serializeElement(child) | ||
| 164 | |||
| 165 | rv.append("</%s>" % (element.tag,)) | ||
| 166 | |||
| 167 | if hasattr(element, "tail") and element.tail: | ||
| 168 | rv.append(element.tail) | ||
| 169 | |||
| 170 | serializeElement(element) | ||
| 171 | |||
| 172 | return "".join(rv) | ||
| 173 | |||
| 174 | |||
| 175 | class TreeBuilder(base.TreeBuilder): | ||
| 176 | documentClass = Document | ||
| 177 | doctypeClass = DocumentType | ||
| 178 | elementClass = None | ||
| 179 | commentClass = None | ||
| 180 | fragmentClass = Document | ||
| 181 | implementation = etree | ||
| 182 | |||
| 183 | def __init__(self, namespaceHTMLElements, fullTree=False): | ||
| 184 | builder = etree_builders.getETreeModule(etree, fullTree=fullTree) | ||
| 185 | infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True) | ||
| 186 | self.namespaceHTMLElements = namespaceHTMLElements | ||
| 187 | |||
| 188 | class Attributes(dict): | ||
| 189 | def __init__(self, element, value=None): | ||
| 190 | if value is None: | ||
| 191 | value = {} | ||
| 192 | self._element = element | ||
| 193 | dict.__init__(self, value) # pylint:disable=non-parent-init-called | ||
| 194 | for key, value in self.items(): | ||
| 195 | if isinstance(key, tuple): | ||
| 196 | name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) | ||
| 197 | else: | ||
| 198 | name = infosetFilter.coerceAttribute(key) | ||
| 199 | self._element._element.attrib[name] = value | ||
| 200 | |||
| 201 | def __setitem__(self, key, value): | ||
| 202 | dict.__setitem__(self, key, value) | ||
| 203 | if isinstance(key, tuple): | ||
| 204 | name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1])) | ||
| 205 | else: | ||
| 206 | name = infosetFilter.coerceAttribute(key) | ||
| 207 | self._element._element.attrib[name] = value | ||
| 208 | |||
| 209 | class Element(builder.Element): | ||
| 210 | def __init__(self, name, namespace): | ||
| 211 | name = infosetFilter.coerceElement(name) | ||
| 212 | builder.Element.__init__(self, name, namespace=namespace) | ||
| 213 | self._attributes = Attributes(self) | ||
| 214 | |||
| 215 | def _setName(self, name): | ||
| 216 | self._name = infosetFilter.coerceElement(name) | ||
| 217 | self._element.tag = self._getETreeTag( | ||
| 218 | self._name, self._namespace) | ||
| 219 | |||
| 220 | def _getName(self): | ||
| 221 | return infosetFilter.fromXmlName(self._name) | ||
| 222 | |||
| 223 | name = property(_getName, _setName) | ||
| 224 | |||
| 225 | def _getAttributes(self): | ||
| 226 | return self._attributes | ||
| 227 | |||
| 228 | def _setAttributes(self, attributes): | ||
| 229 | self._attributes = Attributes(self, attributes) | ||
| 230 | |||
| 231 | attributes = property(_getAttributes, _setAttributes) | ||
| 232 | |||
| 233 | def insertText(self, data, insertBefore=None): | ||
| 234 | data = infosetFilter.coerceCharacters(data) | ||
| 235 | builder.Element.insertText(self, data, insertBefore) | ||
| 236 | |||
| 237 | def appendChild(self, child): | ||
| 238 | builder.Element.appendChild(self, child) | ||
| 239 | |||
| 240 | class Comment(builder.Comment): | ||
| 241 | def __init__(self, data): | ||
| 242 | data = infosetFilter.coerceComment(data) | ||
| 243 | builder.Comment.__init__(self, data) | ||
| 244 | |||
| 245 | def _setData(self, data): | ||
| 246 | data = infosetFilter.coerceComment(data) | ||
| 247 | self._element.text = data | ||
| 248 | |||
| 249 | def _getData(self): | ||
| 250 | return self._element.text | ||
| 251 | |||
| 252 | data = property(_getData, _setData) | ||
| 253 | |||
| 254 | self.elementClass = Element | ||
| 255 | self.commentClass = Comment | ||
| 256 | # self.fragmentClass = builder.DocumentFragment | ||
| 257 | base.TreeBuilder.__init__(self, namespaceHTMLElements) | ||
| 258 | |||
| 259 | def reset(self): | ||
| 260 | base.TreeBuilder.reset(self) | ||
| 261 | self.insertComment = self.insertCommentInitial | ||
| 262 | self.initial_comments = [] | ||
| 263 | self.doctype = None | ||
| 264 | |||
| 265 | def testSerializer(self, element): | ||
| 266 | return testSerializer(element) | ||
| 267 | |||
| 268 | def getDocument(self): | ||
| 269 | if fullTree: | ||
| 270 | return self.document._elementTree | ||
| 271 | else: | ||
| 272 | return self.document._elementTree.getroot() | ||
| 273 | |||
| 274 | def getFragment(self): | ||
| 275 | fragment = [] | ||
| 276 | element = self.openElements[0]._element | ||
| 277 | if element.text: | ||
| 278 | fragment.append(element.text) | ||
| 279 | fragment.extend(list(element)) | ||
| 280 | if element.tail: | ||
| 281 | fragment.append(element.tail) | ||
| 282 | return fragment | ||
| 283 | |||
| 284 | def insertDoctype(self, token): | ||
| 285 | name = token["name"] | ||
| 286 | publicId = token["publicId"] | ||
| 287 | systemId = token["systemId"] | ||
| 288 | |||
| 289 | if not name: | ||
| 290 | warnings.warn("lxml cannot represent empty doctype", DataLossWarning) | ||
| 291 | self.doctype = None | ||
| 292 | else: | ||
| 293 | coercedName = self.infosetFilter.coerceElement(name) | ||
| 294 | if coercedName != name: | ||
| 295 | warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning) | ||
| 296 | |||
| 297 | doctype = self.doctypeClass(coercedName, publicId, systemId) | ||
| 298 | self.doctype = doctype | ||
| 299 | |||
| 300 | def insertCommentInitial(self, data, parent=None): | ||
| 301 | assert parent is None or parent is self.document | ||
| 302 | assert self.document._elementTree is None | ||
| 303 | self.initial_comments.append(data) | ||
| 304 | |||
| 305 | def insertCommentMain(self, data, parent=None): | ||
| 306 | if (parent == self.document and | ||
| 307 | self.document._elementTree.getroot()[-1].tag == comment_type): | ||
| 308 | warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning) | ||
| 309 | super(TreeBuilder, self).insertComment(data, parent) | ||
| 310 | |||
| 311 | def insertRoot(self, token): | ||
| 312 | # Because of the way libxml2 works, it doesn't seem to be possible to | ||
| 313 | # alter information like the doctype after the tree has been parsed. | ||
| 314 | # Therefore we need to use the built-in parser to create our initial | ||
| 315 | # tree, after which we can add elements like normal | ||
| 316 | docStr = "" | ||
| 317 | if self.doctype: | ||
| 318 | assert self.doctype.name | ||
| 319 | docStr += "<!DOCTYPE %s" % self.doctype.name | ||
| 320 | if (self.doctype.publicId is not None or | ||
| 321 | self.doctype.systemId is not None): | ||
| 322 | docStr += (' PUBLIC "%s" ' % | ||
| 323 | (self.infosetFilter.coercePubid(self.doctype.publicId or ""))) | ||
| 324 | if self.doctype.systemId: | ||
| 325 | sysid = self.doctype.systemId | ||
| 326 | if sysid.find("'") >= 0 and sysid.find('"') >= 0: | ||
| 327 | warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning) | ||
| 328 | sysid = sysid.replace("'", 'U00027') | ||
| 329 | if sysid.find("'") >= 0: | ||
| 330 | docStr += '"%s"' % sysid | ||
| 331 | else: | ||
| 332 | docStr += "'%s'" % sysid | ||
| 333 | else: | ||
| 334 | docStr += "''" | ||
| 335 | docStr += ">" | ||
| 336 | if self.doctype.name != token["name"]: | ||
| 337 | warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning) | ||
| 338 | docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>" | ||
| 339 | root = etree.fromstring(docStr) | ||
| 340 | |||
| 341 | # Append the initial comments: | ||
| 342 | for comment_token in self.initial_comments: | ||
| 343 | comment = self.commentClass(comment_token["data"]) | ||
| 344 | root.addprevious(comment._element) | ||
| 345 | |||
| 346 | # Create the root document and add the ElementTree to it | ||
| 347 | self.document = self.documentClass() | ||
| 348 | self.document._elementTree = root.getroottree() | ||
| 349 | |||
| 350 | # Give the root element the right name | ||
| 351 | name = token["name"] | ||
| 352 | namespace = token.get("namespace", self.defaultNamespace) | ||
| 353 | if namespace is None: | ||
| 354 | etree_tag = name | ||
| 355 | else: | ||
| 356 | etree_tag = "{%s}%s" % (namespace, name) | ||
| 357 | root.tag = etree_tag | ||
| 358 | |||
| 359 | # Add the root element to the internal child/open data structures | ||
| 360 | root_element = self.elementClass(name, namespace) | ||
| 361 | root_element._element = root | ||
| 362 | self.document._childNodes.append(root_element) | ||
| 363 | self.openElements.append(root_element) | ||
| 364 | |||
| 365 | # Reset to the default insert comment function | ||
| 366 | self.insertComment = self.insertCommentMain | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/__init__.py new file mode 100644 index 0000000..31a173d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/__init__.py | |||
| @@ -0,0 +1,154 @@ | |||
| 1 | """A collection of modules for iterating through different kinds of | ||
| 2 | tree, generating tokens identical to those produced by the tokenizer | ||
| 3 | module. | ||
| 4 | |||
| 5 | To create a tree walker for a new type of tree, you need to do | ||
| 6 | implement a tree walker object (called TreeWalker by convention) that | ||
| 7 | implements a 'serialize' method taking a tree as sole argument and | ||
| 8 | returning an iterator generating tokens. | ||
| 9 | """ | ||
| 10 | |||
| 11 | from __future__ import absolute_import, division, unicode_literals | ||
| 12 | |||
| 13 | from .. import constants | ||
| 14 | from .._utils import default_etree | ||
| 15 | |||
| 16 | __all__ = ["getTreeWalker", "pprint"] | ||
| 17 | |||
| 18 | treeWalkerCache = {} | ||
| 19 | |||
| 20 | |||
| 21 | def getTreeWalker(treeType, implementation=None, **kwargs): | ||
| 22 | """Get a TreeWalker class for various types of tree with built-in support | ||
| 23 | |||
| 24 | :arg str treeType: the name of the tree type required (case-insensitive). | ||
| 25 | Supported values are: | ||
| 26 | |||
| 27 | * "dom": The xml.dom.minidom DOM implementation | ||
| 28 | * "etree": A generic walker for tree implementations exposing an | ||
| 29 | elementtree-like interface (known to work with ElementTree, | ||
| 30 | cElementTree and lxml.etree). | ||
| 31 | * "lxml": Optimized walker for lxml.etree | ||
| 32 | * "genshi": a Genshi stream | ||
| 33 | |||
| 34 | :arg implementation: A module implementing the tree type e.g. | ||
| 35 | xml.etree.ElementTree or cElementTree (Currently applies to the "etree" | ||
| 36 | tree type only). | ||
| 37 | |||
| 38 | :arg kwargs: keyword arguments passed to the etree walker--for other | ||
| 39 | walkers, this has no effect | ||
| 40 | |||
| 41 | :returns: a TreeWalker class | ||
| 42 | |||
| 43 | """ | ||
| 44 | |||
| 45 | treeType = treeType.lower() | ||
| 46 | if treeType not in treeWalkerCache: | ||
| 47 | if treeType == "dom": | ||
| 48 | from . import dom | ||
| 49 | treeWalkerCache[treeType] = dom.TreeWalker | ||
| 50 | elif treeType == "genshi": | ||
| 51 | from . import genshi | ||
| 52 | treeWalkerCache[treeType] = genshi.TreeWalker | ||
| 53 | elif treeType == "lxml": | ||
| 54 | from . import etree_lxml | ||
| 55 | treeWalkerCache[treeType] = etree_lxml.TreeWalker | ||
| 56 | elif treeType == "etree": | ||
| 57 | from . import etree | ||
| 58 | if implementation is None: | ||
| 59 | implementation = default_etree | ||
| 60 | # XXX: NEVER cache here, caching is done in the etree submodule | ||
| 61 | return etree.getETreeModule(implementation, **kwargs).TreeWalker | ||
| 62 | return treeWalkerCache.get(treeType) | ||
| 63 | |||
| 64 | |||
| 65 | def concatenateCharacterTokens(tokens): | ||
| 66 | pendingCharacters = [] | ||
| 67 | for token in tokens: | ||
| 68 | type = token["type"] | ||
| 69 | if type in ("Characters", "SpaceCharacters"): | ||
| 70 | pendingCharacters.append(token["data"]) | ||
| 71 | else: | ||
| 72 | if pendingCharacters: | ||
| 73 | yield {"type": "Characters", "data": "".join(pendingCharacters)} | ||
| 74 | pendingCharacters = [] | ||
| 75 | yield token | ||
| 76 | if pendingCharacters: | ||
| 77 | yield {"type": "Characters", "data": "".join(pendingCharacters)} | ||
| 78 | |||
| 79 | |||
| 80 | def pprint(walker): | ||
| 81 | """Pretty printer for tree walkers | ||
| 82 | |||
| 83 | Takes a TreeWalker instance and pretty prints the output of walking the tree. | ||
| 84 | |||
| 85 | :arg walker: a TreeWalker instance | ||
| 86 | |||
| 87 | """ | ||
| 88 | output = [] | ||
| 89 | indent = 0 | ||
| 90 | for token in concatenateCharacterTokens(walker): | ||
| 91 | type = token["type"] | ||
| 92 | if type in ("StartTag", "EmptyTag"): | ||
| 93 | # tag name | ||
| 94 | if token["namespace"] and token["namespace"] != constants.namespaces["html"]: | ||
| 95 | if token["namespace"] in constants.prefixes: | ||
| 96 | ns = constants.prefixes[token["namespace"]] | ||
| 97 | else: | ||
| 98 | ns = token["namespace"] | ||
| 99 | name = "%s %s" % (ns, token["name"]) | ||
| 100 | else: | ||
| 101 | name = token["name"] | ||
| 102 | output.append("%s<%s>" % (" " * indent, name)) | ||
| 103 | indent += 2 | ||
| 104 | # attributes (sorted for consistent ordering) | ||
| 105 | attrs = token["data"] | ||
| 106 | for (namespace, localname), value in sorted(attrs.items()): | ||
| 107 | if namespace: | ||
| 108 | if namespace in constants.prefixes: | ||
| 109 | ns = constants.prefixes[namespace] | ||
| 110 | else: | ||
| 111 | ns = namespace | ||
| 112 | name = "%s %s" % (ns, localname) | ||
| 113 | else: | ||
| 114 | name = localname | ||
| 115 | output.append("%s%s=\"%s\"" % (" " * indent, name, value)) | ||
| 116 | # self-closing | ||
| 117 | if type == "EmptyTag": | ||
| 118 | indent -= 2 | ||
| 119 | |||
| 120 | elif type == "EndTag": | ||
| 121 | indent -= 2 | ||
| 122 | |||
| 123 | elif type == "Comment": | ||
| 124 | output.append("%s<!-- %s -->" % (" " * indent, token["data"])) | ||
| 125 | |||
| 126 | elif type == "Doctype": | ||
| 127 | if token["name"]: | ||
| 128 | if token["publicId"]: | ||
| 129 | output.append("""%s<!DOCTYPE %s "%s" "%s">""" % | ||
| 130 | (" " * indent, | ||
| 131 | token["name"], | ||
| 132 | token["publicId"], | ||
| 133 | token["systemId"] if token["systemId"] else "")) | ||
| 134 | elif token["systemId"]: | ||
| 135 | output.append("""%s<!DOCTYPE %s "" "%s">""" % | ||
| 136 | (" " * indent, | ||
| 137 | token["name"], | ||
| 138 | token["systemId"])) | ||
| 139 | else: | ||
| 140 | output.append("%s<!DOCTYPE %s>" % (" " * indent, | ||
| 141 | token["name"])) | ||
| 142 | else: | ||
| 143 | output.append("%s<!DOCTYPE >" % (" " * indent,)) | ||
| 144 | |||
| 145 | elif type == "Characters": | ||
| 146 | output.append("%s\"%s\"" % (" " * indent, token["data"])) | ||
| 147 | |||
| 148 | elif type == "SpaceCharacters": | ||
| 149 | assert False, "concatenateCharacterTokens should have got rid of all Space tokens" | ||
| 150 | |||
| 151 | else: | ||
| 152 | raise ValueError("Unknown token type, %s" % type) | ||
| 153 | |||
| 154 | return "\n".join(output) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/base.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/base.py new file mode 100644 index 0000000..f82984b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/base.py | |||
| @@ -0,0 +1,252 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from xml.dom import Node | ||
| 4 | from ..constants import namespaces, voidElements, spaceCharacters | ||
| 5 | |||
| 6 | __all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN", | ||
| 7 | "TreeWalker", "NonRecursiveTreeWalker"] | ||
| 8 | |||
| 9 | DOCUMENT = Node.DOCUMENT_NODE | ||
| 10 | DOCTYPE = Node.DOCUMENT_TYPE_NODE | ||
| 11 | TEXT = Node.TEXT_NODE | ||
| 12 | ELEMENT = Node.ELEMENT_NODE | ||
| 13 | COMMENT = Node.COMMENT_NODE | ||
| 14 | ENTITY = Node.ENTITY_NODE | ||
| 15 | UNKNOWN = "<#UNKNOWN#>" | ||
| 16 | |||
| 17 | spaceCharacters = "".join(spaceCharacters) | ||
| 18 | |||
| 19 | |||
| 20 | class TreeWalker(object): | ||
| 21 | """Walks a tree yielding tokens | ||
| 22 | |||
| 23 | Tokens are dicts that all have a ``type`` field specifying the type of the | ||
| 24 | token. | ||
| 25 | |||
| 26 | """ | ||
| 27 | def __init__(self, tree): | ||
| 28 | """Creates a TreeWalker | ||
| 29 | |||
| 30 | :arg tree: the tree to walk | ||
| 31 | |||
| 32 | """ | ||
| 33 | self.tree = tree | ||
| 34 | |||
| 35 | def __iter__(self): | ||
| 36 | raise NotImplementedError | ||
| 37 | |||
| 38 | def error(self, msg): | ||
| 39 | """Generates an error token with the given message | ||
| 40 | |||
| 41 | :arg msg: the error message | ||
| 42 | |||
| 43 | :returns: SerializeError token | ||
| 44 | |||
| 45 | """ | ||
| 46 | return {"type": "SerializeError", "data": msg} | ||
| 47 | |||
| 48 | def emptyTag(self, namespace, name, attrs, hasChildren=False): | ||
| 49 | """Generates an EmptyTag token | ||
| 50 | |||
| 51 | :arg namespace: the namespace of the token--can be ``None`` | ||
| 52 | |||
| 53 | :arg name: the name of the element | ||
| 54 | |||
| 55 | :arg attrs: the attributes of the element as a dict | ||
| 56 | |||
| 57 | :arg hasChildren: whether or not to yield a SerializationError because | ||
| 58 | this tag shouldn't have children | ||
| 59 | |||
| 60 | :returns: EmptyTag token | ||
| 61 | |||
| 62 | """ | ||
| 63 | yield {"type": "EmptyTag", "name": name, | ||
| 64 | "namespace": namespace, | ||
| 65 | "data": attrs} | ||
| 66 | if hasChildren: | ||
| 67 | yield self.error("Void element has children") | ||
| 68 | |||
| 69 | def startTag(self, namespace, name, attrs): | ||
| 70 | """Generates a StartTag token | ||
| 71 | |||
| 72 | :arg namespace: the namespace of the token--can be ``None`` | ||
| 73 | |||
| 74 | :arg name: the name of the element | ||
| 75 | |||
| 76 | :arg attrs: the attributes of the element as a dict | ||
| 77 | |||
| 78 | :returns: StartTag token | ||
| 79 | |||
| 80 | """ | ||
| 81 | return {"type": "StartTag", | ||
| 82 | "name": name, | ||
| 83 | "namespace": namespace, | ||
| 84 | "data": attrs} | ||
| 85 | |||
| 86 | def endTag(self, namespace, name): | ||
| 87 | """Generates an EndTag token | ||
| 88 | |||
| 89 | :arg namespace: the namespace of the token--can be ``None`` | ||
| 90 | |||
| 91 | :arg name: the name of the element | ||
| 92 | |||
| 93 | :returns: EndTag token | ||
| 94 | |||
| 95 | """ | ||
| 96 | return {"type": "EndTag", | ||
| 97 | "name": name, | ||
| 98 | "namespace": namespace} | ||
| 99 | |||
| 100 | def text(self, data): | ||
| 101 | """Generates SpaceCharacters and Characters tokens | ||
| 102 | |||
| 103 | Depending on what's in the data, this generates one or more | ||
| 104 | ``SpaceCharacters`` and ``Characters`` tokens. | ||
| 105 | |||
| 106 | For example: | ||
| 107 | |||
| 108 | >>> from html5lib.treewalkers.base import TreeWalker | ||
| 109 | >>> # Give it an empty tree just so it instantiates | ||
| 110 | >>> walker = TreeWalker([]) | ||
| 111 | >>> list(walker.text('')) | ||
| 112 | [] | ||
| 113 | >>> list(walker.text(' ')) | ||
| 114 | [{u'data': ' ', u'type': u'SpaceCharacters'}] | ||
| 115 | >>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE | ||
| 116 | [{u'data': ' ', u'type': u'SpaceCharacters'}, | ||
| 117 | {u'data': u'abc', u'type': u'Characters'}, | ||
| 118 | {u'data': u' ', u'type': u'SpaceCharacters'}] | ||
| 119 | |||
| 120 | :arg data: the text data | ||
| 121 | |||
| 122 | :returns: one or more ``SpaceCharacters`` and ``Characters`` tokens | ||
| 123 | |||
| 124 | """ | ||
| 125 | data = data | ||
| 126 | middle = data.lstrip(spaceCharacters) | ||
| 127 | left = data[:len(data) - len(middle)] | ||
| 128 | if left: | ||
| 129 | yield {"type": "SpaceCharacters", "data": left} | ||
| 130 | data = middle | ||
| 131 | middle = data.rstrip(spaceCharacters) | ||
| 132 | right = data[len(middle):] | ||
| 133 | if middle: | ||
| 134 | yield {"type": "Characters", "data": middle} | ||
| 135 | if right: | ||
| 136 | yield {"type": "SpaceCharacters", "data": right} | ||
| 137 | |||
| 138 | def comment(self, data): | ||
| 139 | """Generates a Comment token | ||
| 140 | |||
| 141 | :arg data: the comment | ||
| 142 | |||
| 143 | :returns: Comment token | ||
| 144 | |||
| 145 | """ | ||
| 146 | return {"type": "Comment", "data": data} | ||
| 147 | |||
| 148 | def doctype(self, name, publicId=None, systemId=None): | ||
| 149 | """Generates a Doctype token | ||
| 150 | |||
| 151 | :arg name: | ||
| 152 | |||
| 153 | :arg publicId: | ||
| 154 | |||
| 155 | :arg systemId: | ||
| 156 | |||
| 157 | :returns: the Doctype token | ||
| 158 | |||
| 159 | """ | ||
| 160 | return {"type": "Doctype", | ||
| 161 | "name": name, | ||
| 162 | "publicId": publicId, | ||
| 163 | "systemId": systemId} | ||
| 164 | |||
| 165 | def entity(self, name): | ||
| 166 | """Generates an Entity token | ||
| 167 | |||
| 168 | :arg name: the entity name | ||
| 169 | |||
| 170 | :returns: an Entity token | ||
| 171 | |||
| 172 | """ | ||
| 173 | return {"type": "Entity", "name": name} | ||
| 174 | |||
| 175 | def unknown(self, nodeType): | ||
| 176 | """Handles unknown node types""" | ||
| 177 | return self.error("Unknown node type: " + nodeType) | ||
| 178 | |||
| 179 | |||
| 180 | class NonRecursiveTreeWalker(TreeWalker): | ||
| 181 | def getNodeDetails(self, node): | ||
| 182 | raise NotImplementedError | ||
| 183 | |||
| 184 | def getFirstChild(self, node): | ||
| 185 | raise NotImplementedError | ||
| 186 | |||
| 187 | def getNextSibling(self, node): | ||
| 188 | raise NotImplementedError | ||
| 189 | |||
| 190 | def getParentNode(self, node): | ||
| 191 | raise NotImplementedError | ||
| 192 | |||
| 193 | def __iter__(self): | ||
| 194 | currentNode = self.tree | ||
| 195 | while currentNode is not None: | ||
| 196 | details = self.getNodeDetails(currentNode) | ||
| 197 | type, details = details[0], details[1:] | ||
| 198 | hasChildren = False | ||
| 199 | |||
| 200 | if type == DOCTYPE: | ||
| 201 | yield self.doctype(*details) | ||
| 202 | |||
| 203 | elif type == TEXT: | ||
| 204 | for token in self.text(*details): | ||
| 205 | yield token | ||
| 206 | |||
| 207 | elif type == ELEMENT: | ||
| 208 | namespace, name, attributes, hasChildren = details | ||
| 209 | if (not namespace or namespace == namespaces["html"]) and name in voidElements: | ||
| 210 | for token in self.emptyTag(namespace, name, attributes, | ||
| 211 | hasChildren): | ||
| 212 | yield token | ||
| 213 | hasChildren = False | ||
| 214 | else: | ||
| 215 | yield self.startTag(namespace, name, attributes) | ||
| 216 | |||
| 217 | elif type == COMMENT: | ||
| 218 | yield self.comment(details[0]) | ||
| 219 | |||
| 220 | elif type == ENTITY: | ||
| 221 | yield self.entity(details[0]) | ||
| 222 | |||
| 223 | elif type == DOCUMENT: | ||
| 224 | hasChildren = True | ||
| 225 | |||
| 226 | else: | ||
| 227 | yield self.unknown(details[0]) | ||
| 228 | |||
| 229 | if hasChildren: | ||
| 230 | firstChild = self.getFirstChild(currentNode) | ||
| 231 | else: | ||
| 232 | firstChild = None | ||
| 233 | |||
| 234 | if firstChild is not None: | ||
| 235 | currentNode = firstChild | ||
| 236 | else: | ||
| 237 | while currentNode is not None: | ||
| 238 | details = self.getNodeDetails(currentNode) | ||
| 239 | type, details = details[0], details[1:] | ||
| 240 | if type == ELEMENT: | ||
| 241 | namespace, name, attributes, hasChildren = details | ||
| 242 | if (namespace and namespace != namespaces["html"]) or name not in voidElements: | ||
| 243 | yield self.endTag(namespace, name) | ||
| 244 | if self.tree is currentNode: | ||
| 245 | currentNode = None | ||
| 246 | break | ||
| 247 | nextSibling = self.getNextSibling(currentNode) | ||
| 248 | if nextSibling is not None: | ||
| 249 | currentNode = nextSibling | ||
| 250 | break | ||
| 251 | else: | ||
| 252 | currentNode = self.getParentNode(currentNode) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py new file mode 100644 index 0000000..b3e2753 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py | |||
| @@ -0,0 +1,43 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from xml.dom import Node | ||
| 4 | |||
| 5 | from . import base | ||
| 6 | |||
| 7 | |||
| 8 | class TreeWalker(base.NonRecursiveTreeWalker): | ||
| 9 | def getNodeDetails(self, node): | ||
| 10 | if node.nodeType == Node.DOCUMENT_TYPE_NODE: | ||
| 11 | return base.DOCTYPE, node.name, node.publicId, node.systemId | ||
| 12 | |||
| 13 | elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): | ||
| 14 | return base.TEXT, node.nodeValue | ||
| 15 | |||
| 16 | elif node.nodeType == Node.ELEMENT_NODE: | ||
| 17 | attrs = {} | ||
| 18 | for attr in list(node.attributes.keys()): | ||
| 19 | attr = node.getAttributeNode(attr) | ||
| 20 | if attr.namespaceURI: | ||
| 21 | attrs[(attr.namespaceURI, attr.localName)] = attr.value | ||
| 22 | else: | ||
| 23 | attrs[(None, attr.name)] = attr.value | ||
| 24 | return (base.ELEMENT, node.namespaceURI, node.nodeName, | ||
| 25 | attrs, node.hasChildNodes()) | ||
| 26 | |||
| 27 | elif node.nodeType == Node.COMMENT_NODE: | ||
| 28 | return base.COMMENT, node.nodeValue | ||
| 29 | |||
| 30 | elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE): | ||
| 31 | return (base.DOCUMENT,) | ||
| 32 | |||
| 33 | else: | ||
| 34 | return base.UNKNOWN, node.nodeType | ||
| 35 | |||
| 36 | def getFirstChild(self, node): | ||
| 37 | return node.firstChild | ||
| 38 | |||
| 39 | def getNextSibling(self, node): | ||
| 40 | return node.nextSibling | ||
| 41 | |||
| 42 | def getParentNode(self, node): | ||
| 43 | return node.parentNode | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree.py new file mode 100644 index 0000000..1a35add --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree.py | |||
| @@ -0,0 +1,130 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from collections import OrderedDict | ||
| 4 | import re | ||
| 5 | |||
| 6 | from pip._vendor.six import string_types | ||
| 7 | |||
| 8 | from . import base | ||
| 9 | from .._utils import moduleFactoryFactory | ||
| 10 | |||
| 11 | tag_regexp = re.compile("{([^}]*)}(.*)") | ||
| 12 | |||
| 13 | |||
| 14 | def getETreeBuilder(ElementTreeImplementation): | ||
| 15 | ElementTree = ElementTreeImplementation | ||
| 16 | ElementTreeCommentType = ElementTree.Comment("asd").tag | ||
| 17 | |||
| 18 | class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable | ||
| 19 | """Given the particular ElementTree representation, this implementation, | ||
| 20 | to avoid using recursion, returns "nodes" as tuples with the following | ||
| 21 | content: | ||
| 22 | |||
| 23 | 1. The current element | ||
| 24 | |||
| 25 | 2. The index of the element relative to its parent | ||
| 26 | |||
| 27 | 3. A stack of ancestor elements | ||
| 28 | |||
| 29 | 4. A flag "text", "tail" or None to indicate if the current node is a | ||
| 30 | text node; either the text or tail of the current element (1) | ||
| 31 | """ | ||
| 32 | def getNodeDetails(self, node): | ||
| 33 | if isinstance(node, tuple): # It might be the root Element | ||
| 34 | elt, _, _, flag = node | ||
| 35 | if flag in ("text", "tail"): | ||
| 36 | return base.TEXT, getattr(elt, flag) | ||
| 37 | else: | ||
| 38 | node = elt | ||
| 39 | |||
| 40 | if not(hasattr(node, "tag")): | ||
| 41 | node = node.getroot() | ||
| 42 | |||
| 43 | if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"): | ||
| 44 | return (base.DOCUMENT,) | ||
| 45 | |||
| 46 | elif node.tag == "<!DOCTYPE>": | ||
| 47 | return (base.DOCTYPE, node.text, | ||
| 48 | node.get("publicId"), node.get("systemId")) | ||
| 49 | |||
| 50 | elif node.tag == ElementTreeCommentType: | ||
| 51 | return base.COMMENT, node.text | ||
| 52 | |||
| 53 | else: | ||
| 54 | assert isinstance(node.tag, string_types), type(node.tag) | ||
| 55 | # This is assumed to be an ordinary element | ||
| 56 | match = tag_regexp.match(node.tag) | ||
| 57 | if match: | ||
| 58 | namespace, tag = match.groups() | ||
| 59 | else: | ||
| 60 | namespace = None | ||
| 61 | tag = node.tag | ||
| 62 | attrs = OrderedDict() | ||
| 63 | for name, value in list(node.attrib.items()): | ||
| 64 | match = tag_regexp.match(name) | ||
| 65 | if match: | ||
| 66 | attrs[(match.group(1), match.group(2))] = value | ||
| 67 | else: | ||
| 68 | attrs[(None, name)] = value | ||
| 69 | return (base.ELEMENT, namespace, tag, | ||
| 70 | attrs, len(node) or node.text) | ||
| 71 | |||
| 72 | def getFirstChild(self, node): | ||
| 73 | if isinstance(node, tuple): | ||
| 74 | element, key, parents, flag = node | ||
| 75 | else: | ||
| 76 | element, key, parents, flag = node, None, [], None | ||
| 77 | |||
| 78 | if flag in ("text", "tail"): | ||
| 79 | return None | ||
| 80 | else: | ||
| 81 | if element.text: | ||
| 82 | return element, key, parents, "text" | ||
| 83 | elif len(element): | ||
| 84 | parents.append(element) | ||
| 85 | return element[0], 0, parents, None | ||
| 86 | else: | ||
| 87 | return None | ||
| 88 | |||
| 89 | def getNextSibling(self, node): | ||
| 90 | if isinstance(node, tuple): | ||
| 91 | element, key, parents, flag = node | ||
| 92 | else: | ||
| 93 | return None | ||
| 94 | |||
| 95 | if flag == "text": | ||
| 96 | if len(element): | ||
| 97 | parents.append(element) | ||
| 98 | return element[0], 0, parents, None | ||
| 99 | else: | ||
| 100 | return None | ||
| 101 | else: | ||
| 102 | if element.tail and flag != "tail": | ||
| 103 | return element, key, parents, "tail" | ||
| 104 | elif key < len(parents[-1]) - 1: | ||
| 105 | return parents[-1][key + 1], key + 1, parents, None | ||
| 106 | else: | ||
| 107 | return None | ||
| 108 | |||
| 109 | def getParentNode(self, node): | ||
| 110 | if isinstance(node, tuple): | ||
| 111 | element, key, parents, flag = node | ||
| 112 | else: | ||
| 113 | return None | ||
| 114 | |||
| 115 | if flag == "text": | ||
| 116 | if not parents: | ||
| 117 | return element | ||
| 118 | else: | ||
| 119 | return element, key, parents, None | ||
| 120 | else: | ||
| 121 | parent = parents.pop() | ||
| 122 | if not parents: | ||
| 123 | return parent | ||
| 124 | else: | ||
| 125 | assert list(parents[-1]).count(parent) == 1 | ||
| 126 | return parent, list(parents[-1]).index(parent), parents, None | ||
| 127 | |||
| 128 | return locals() | ||
| 129 | |||
| 130 | getETreeModule = moduleFactoryFactory(getETreeBuilder) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py new file mode 100644 index 0000000..f6f395a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py | |||
| @@ -0,0 +1,213 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | from pip._vendor.six import text_type | ||
| 3 | |||
| 4 | from lxml import etree | ||
| 5 | from ..treebuilders.etree import tag_regexp | ||
| 6 | |||
| 7 | from . import base | ||
| 8 | |||
| 9 | from .. import _ihatexml | ||
| 10 | |||
| 11 | |||
| 12 | def ensure_str(s): | ||
| 13 | if s is None: | ||
| 14 | return None | ||
| 15 | elif isinstance(s, text_type): | ||
| 16 | return s | ||
| 17 | else: | ||
| 18 | return s.decode("ascii", "strict") | ||
| 19 | |||
| 20 | |||
| 21 | class Root(object): | ||
| 22 | def __init__(self, et): | ||
| 23 | self.elementtree = et | ||
| 24 | self.children = [] | ||
| 25 | |||
| 26 | try: | ||
| 27 | if et.docinfo.internalDTD: | ||
| 28 | self.children.append(Doctype(self, | ||
| 29 | ensure_str(et.docinfo.root_name), | ||
| 30 | ensure_str(et.docinfo.public_id), | ||
| 31 | ensure_str(et.docinfo.system_url))) | ||
| 32 | except AttributeError: | ||
| 33 | pass | ||
| 34 | |||
| 35 | try: | ||
| 36 | node = et.getroot() | ||
| 37 | except AttributeError: | ||
| 38 | node = et | ||
| 39 | |||
| 40 | while node.getprevious() is not None: | ||
| 41 | node = node.getprevious() | ||
| 42 | while node is not None: | ||
| 43 | self.children.append(node) | ||
| 44 | node = node.getnext() | ||
| 45 | |||
| 46 | self.text = None | ||
| 47 | self.tail = None | ||
| 48 | |||
| 49 | def __getitem__(self, key): | ||
| 50 | return self.children[key] | ||
| 51 | |||
| 52 | def getnext(self): | ||
| 53 | return None | ||
| 54 | |||
| 55 | def __len__(self): | ||
| 56 | return 1 | ||
| 57 | |||
| 58 | |||
| 59 | class Doctype(object): | ||
| 60 | def __init__(self, root_node, name, public_id, system_id): | ||
| 61 | self.root_node = root_node | ||
| 62 | self.name = name | ||
| 63 | self.public_id = public_id | ||
| 64 | self.system_id = system_id | ||
| 65 | |||
| 66 | self.text = None | ||
| 67 | self.tail = None | ||
| 68 | |||
| 69 | def getnext(self): | ||
| 70 | return self.root_node.children[1] | ||
| 71 | |||
| 72 | |||
| 73 | class FragmentRoot(Root): | ||
| 74 | def __init__(self, children): | ||
| 75 | self.children = [FragmentWrapper(self, child) for child in children] | ||
| 76 | self.text = self.tail = None | ||
| 77 | |||
| 78 | def getnext(self): | ||
| 79 | return None | ||
| 80 | |||
| 81 | |||
| 82 | class FragmentWrapper(object): | ||
| 83 | def __init__(self, fragment_root, obj): | ||
| 84 | self.root_node = fragment_root | ||
| 85 | self.obj = obj | ||
| 86 | if hasattr(self.obj, 'text'): | ||
| 87 | self.text = ensure_str(self.obj.text) | ||
| 88 | else: | ||
| 89 | self.text = None | ||
| 90 | if hasattr(self.obj, 'tail'): | ||
| 91 | self.tail = ensure_str(self.obj.tail) | ||
| 92 | else: | ||
| 93 | self.tail = None | ||
| 94 | |||
| 95 | def __getattr__(self, name): | ||
| 96 | return getattr(self.obj, name) | ||
| 97 | |||
| 98 | def getnext(self): | ||
| 99 | siblings = self.root_node.children | ||
| 100 | idx = siblings.index(self) | ||
| 101 | if idx < len(siblings) - 1: | ||
| 102 | return siblings[idx + 1] | ||
| 103 | else: | ||
| 104 | return None | ||
| 105 | |||
| 106 | def __getitem__(self, key): | ||
| 107 | return self.obj[key] | ||
| 108 | |||
| 109 | def __bool__(self): | ||
| 110 | return bool(self.obj) | ||
| 111 | |||
| 112 | def getparent(self): | ||
| 113 | return None | ||
| 114 | |||
| 115 | def __str__(self): | ||
| 116 | return str(self.obj) | ||
| 117 | |||
| 118 | def __unicode__(self): | ||
| 119 | return str(self.obj) | ||
| 120 | |||
| 121 | def __len__(self): | ||
| 122 | return len(self.obj) | ||
| 123 | |||
| 124 | |||
| 125 | class TreeWalker(base.NonRecursiveTreeWalker): | ||
| 126 | def __init__(self, tree): | ||
| 127 | # pylint:disable=redefined-variable-type | ||
| 128 | if isinstance(tree, list): | ||
| 129 | self.fragmentChildren = set(tree) | ||
| 130 | tree = FragmentRoot(tree) | ||
| 131 | else: | ||
| 132 | self.fragmentChildren = set() | ||
| 133 | tree = Root(tree) | ||
| 134 | base.NonRecursiveTreeWalker.__init__(self, tree) | ||
| 135 | self.filter = _ihatexml.InfosetFilter() | ||
| 136 | |||
| 137 | def getNodeDetails(self, node): | ||
| 138 | if isinstance(node, tuple): # Text node | ||
| 139 | node, key = node | ||
| 140 | assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key | ||
| 141 | return base.TEXT, ensure_str(getattr(node, key)) | ||
| 142 | |||
| 143 | elif isinstance(node, Root): | ||
| 144 | return (base.DOCUMENT,) | ||
| 145 | |||
| 146 | elif isinstance(node, Doctype): | ||
| 147 | return base.DOCTYPE, node.name, node.public_id, node.system_id | ||
| 148 | |||
| 149 | elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"): | ||
| 150 | return base.TEXT, ensure_str(node.obj) | ||
| 151 | |||
| 152 | elif node.tag == etree.Comment: | ||
| 153 | return base.COMMENT, ensure_str(node.text) | ||
| 154 | |||
| 155 | elif node.tag == etree.Entity: | ||
| 156 | return base.ENTITY, ensure_str(node.text)[1:-1] # strip &; | ||
| 157 | |||
| 158 | else: | ||
| 159 | # This is assumed to be an ordinary element | ||
| 160 | match = tag_regexp.match(ensure_str(node.tag)) | ||
| 161 | if match: | ||
| 162 | namespace, tag = match.groups() | ||
| 163 | else: | ||
| 164 | namespace = None | ||
| 165 | tag = ensure_str(node.tag) | ||
| 166 | attrs = {} | ||
| 167 | for name, value in list(node.attrib.items()): | ||
| 168 | name = ensure_str(name) | ||
| 169 | value = ensure_str(value) | ||
| 170 | match = tag_regexp.match(name) | ||
| 171 | if match: | ||
| 172 | attrs[(match.group(1), match.group(2))] = value | ||
| 173 | else: | ||
| 174 | attrs[(None, name)] = value | ||
| 175 | return (base.ELEMENT, namespace, self.filter.fromXmlName(tag), | ||
| 176 | attrs, len(node) > 0 or node.text) | ||
| 177 | |||
| 178 | def getFirstChild(self, node): | ||
| 179 | assert not isinstance(node, tuple), "Text nodes have no children" | ||
| 180 | |||
| 181 | assert len(node) or node.text, "Node has no children" | ||
| 182 | if node.text: | ||
| 183 | return (node, "text") | ||
| 184 | else: | ||
| 185 | return node[0] | ||
| 186 | |||
| 187 | def getNextSibling(self, node): | ||
| 188 | if isinstance(node, tuple): # Text node | ||
| 189 | node, key = node | ||
| 190 | assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key | ||
| 191 | if key == "text": | ||
| 192 | # XXX: we cannot use a "bool(node) and node[0] or None" construct here | ||
| 193 | # because node[0] might evaluate to False if it has no child element | ||
| 194 | if len(node): | ||
| 195 | return node[0] | ||
| 196 | else: | ||
| 197 | return None | ||
| 198 | else: # tail | ||
| 199 | return node.getnext() | ||
| 200 | |||
| 201 | return (node, "tail") if node.tail else node.getnext() | ||
| 202 | |||
| 203 | def getParentNode(self, node): | ||
| 204 | if isinstance(node, tuple): # Text node | ||
| 205 | node, key = node | ||
| 206 | assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key | ||
| 207 | if key == "text": | ||
| 208 | return node | ||
| 209 | # else: fallback to "normal" processing | ||
| 210 | elif node in self.fragmentChildren: | ||
| 211 | return None | ||
| 212 | |||
| 213 | return node.getparent() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py new file mode 100644 index 0000000..42cd559 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py | |||
| @@ -0,0 +1,69 @@ | |||
| 1 | from __future__ import absolute_import, division, unicode_literals | ||
| 2 | |||
| 3 | from genshi.core import QName | ||
| 4 | from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT | ||
| 5 | from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT | ||
| 6 | |||
| 7 | from . import base | ||
| 8 | |||
| 9 | from ..constants import voidElements, namespaces | ||
| 10 | |||
| 11 | |||
| 12 | class TreeWalker(base.TreeWalker): | ||
| 13 | def __iter__(self): | ||
| 14 | # Buffer the events so we can pass in the following one | ||
| 15 | previous = None | ||
| 16 | for event in self.tree: | ||
| 17 | if previous is not None: | ||
| 18 | for token in self.tokens(previous, event): | ||
| 19 | yield token | ||
| 20 | previous = event | ||
| 21 | |||
| 22 | # Don't forget the final event! | ||
| 23 | if previous is not None: | ||
| 24 | for token in self.tokens(previous, None): | ||
| 25 | yield token | ||
| 26 | |||
| 27 | def tokens(self, event, next): | ||
| 28 | kind, data, _ = event | ||
| 29 | if kind == START: | ||
| 30 | tag, attribs = data | ||
| 31 | name = tag.localname | ||
| 32 | namespace = tag.namespace | ||
| 33 | converted_attribs = {} | ||
| 34 | for k, v in attribs: | ||
| 35 | if isinstance(k, QName): | ||
| 36 | converted_attribs[(k.namespace, k.localname)] = v | ||
| 37 | else: | ||
| 38 | converted_attribs[(None, k)] = v | ||
| 39 | |||
| 40 | if namespace == namespaces["html"] and name in voidElements: | ||
| 41 | for token in self.emptyTag(namespace, name, converted_attribs, | ||
| 42 | not next or next[0] != END or | ||
| 43 | next[1] != tag): | ||
| 44 | yield token | ||
| 45 | else: | ||
| 46 | yield self.startTag(namespace, name, converted_attribs) | ||
| 47 | |||
| 48 | elif kind == END: | ||
| 49 | name = data.localname | ||
| 50 | namespace = data.namespace | ||
| 51 | if namespace != namespaces["html"] or name not in voidElements: | ||
| 52 | yield self.endTag(namespace, name) | ||
| 53 | |||
| 54 | elif kind == COMMENT: | ||
| 55 | yield self.comment(data) | ||
| 56 | |||
| 57 | elif kind == TEXT: | ||
| 58 | for token in self.text(data): | ||
| 59 | yield token | ||
| 60 | |||
| 61 | elif kind == DOCTYPE: | ||
| 62 | yield self.doctype(*data) | ||
| 63 | |||
| 64 | elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS, | ||
| 65 | START_CDATA, END_CDATA, PI): | ||
| 66 | pass | ||
| 67 | |||
| 68 | else: | ||
| 69 | yield self.unknown(kind) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/__init__.py new file mode 100644 index 0000000..4ed56a1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/__init__.py | |||
| @@ -0,0 +1,2 @@ | |||
| 1 | from .package_data import __version__ | ||
| 2 | from .core import * | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/codec.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/codec.py new file mode 100644 index 0000000..65b06e2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/codec.py | |||
| @@ -0,0 +1,118 @@ | |||
| 1 | from .core import encode, decode, alabel, ulabel, IDNAError | ||
| 2 | import codecs | ||
| 3 | import re | ||
| 4 | |||
| 5 | _unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') | ||
| 6 | |||
| 7 | class Codec(codecs.Codec): | ||
| 8 | |||
| 9 | def encode(self, data, errors='strict'): | ||
| 10 | |||
| 11 | if errors != 'strict': | ||
| 12 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) | ||
| 13 | |||
| 14 | if not data: | ||
| 15 | return "", 0 | ||
| 16 | |||
| 17 | return encode(data), len(data) | ||
| 18 | |||
| 19 | def decode(self, data, errors='strict'): | ||
| 20 | |||
| 21 | if errors != 'strict': | ||
| 22 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) | ||
| 23 | |||
| 24 | if not data: | ||
| 25 | return u"", 0 | ||
| 26 | |||
| 27 | return decode(data), len(data) | ||
| 28 | |||
| 29 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder): | ||
| 30 | def _buffer_encode(self, data, errors, final): | ||
| 31 | if errors != 'strict': | ||
| 32 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) | ||
| 33 | |||
| 34 | if not data: | ||
| 35 | return ("", 0) | ||
| 36 | |||
| 37 | labels = _unicode_dots_re.split(data) | ||
| 38 | trailing_dot = u'' | ||
| 39 | if labels: | ||
| 40 | if not labels[-1]: | ||
| 41 | trailing_dot = '.' | ||
| 42 | del labels[-1] | ||
| 43 | elif not final: | ||
| 44 | # Keep potentially unfinished label until the next call | ||
| 45 | del labels[-1] | ||
| 46 | if labels: | ||
| 47 | trailing_dot = '.' | ||
| 48 | |||
| 49 | result = [] | ||
| 50 | size = 0 | ||
| 51 | for label in labels: | ||
| 52 | result.append(alabel(label)) | ||
| 53 | if size: | ||
| 54 | size += 1 | ||
| 55 | size += len(label) | ||
| 56 | |||
| 57 | # Join with U+002E | ||
| 58 | result = ".".join(result) + trailing_dot | ||
| 59 | size += len(trailing_dot) | ||
| 60 | return (result, size) | ||
| 61 | |||
| 62 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder): | ||
| 63 | def _buffer_decode(self, data, errors, final): | ||
| 64 | if errors != 'strict': | ||
| 65 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) | ||
| 66 | |||
| 67 | if not data: | ||
| 68 | return (u"", 0) | ||
| 69 | |||
| 70 | # IDNA allows decoding to operate on Unicode strings, too. | ||
| 71 | if isinstance(data, unicode): | ||
| 72 | labels = _unicode_dots_re.split(data) | ||
| 73 | else: | ||
| 74 | # Must be ASCII string | ||
| 75 | data = str(data) | ||
| 76 | unicode(data, "ascii") | ||
| 77 | labels = data.split(".") | ||
| 78 | |||
| 79 | trailing_dot = u'' | ||
| 80 | if labels: | ||
| 81 | if not labels[-1]: | ||
| 82 | trailing_dot = u'.' | ||
| 83 | del labels[-1] | ||
| 84 | elif not final: | ||
| 85 | # Keep potentially unfinished label until the next call | ||
| 86 | del labels[-1] | ||
| 87 | if labels: | ||
| 88 | trailing_dot = u'.' | ||
| 89 | |||
| 90 | result = [] | ||
| 91 | size = 0 | ||
| 92 | for label in labels: | ||
| 93 | result.append(ulabel(label)) | ||
| 94 | if size: | ||
| 95 | size += 1 | ||
| 96 | size += len(label) | ||
| 97 | |||
| 98 | result = u".".join(result) + trailing_dot | ||
| 99 | size += len(trailing_dot) | ||
| 100 | return (result, size) | ||
| 101 | |||
| 102 | |||
| 103 | class StreamWriter(Codec, codecs.StreamWriter): | ||
| 104 | pass | ||
| 105 | |||
| 106 | class StreamReader(Codec, codecs.StreamReader): | ||
| 107 | pass | ||
| 108 | |||
| 109 | def getregentry(): | ||
| 110 | return codecs.CodecInfo( | ||
| 111 | name='idna', | ||
| 112 | encode=Codec().encode, | ||
| 113 | decode=Codec().decode, | ||
| 114 | incrementalencoder=IncrementalEncoder, | ||
| 115 | incrementaldecoder=IncrementalDecoder, | ||
| 116 | streamwriter=StreamWriter, | ||
| 117 | streamreader=StreamReader, | ||
| 118 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/compat.py new file mode 100644 index 0000000..f4d3f6d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/compat.py | |||
| @@ -0,0 +1,12 @@ | |||
| 1 | from .core import * | ||
| 2 | from .codec import * | ||
| 3 | |||
| 4 | def ToASCII(label): | ||
| 5 | return encode(label) | ||
| 6 | |||
| 7 | def ToUnicode(label): | ||
| 8 | return decode(label) | ||
| 9 | |||
| 10 | def nameprep(s): | ||
| 11 | raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") | ||
| 12 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/core.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/core.py new file mode 100644 index 0000000..944ff98 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/core.py | |||
| @@ -0,0 +1,387 @@ | |||
| 1 | from . import idnadata | ||
| 2 | import bisect | ||
| 3 | import unicodedata | ||
| 4 | import re | ||
| 5 | import sys | ||
| 6 | from .intranges import intranges_contain | ||
| 7 | |||
| 8 | _virama_combining_class = 9 | ||
| 9 | _alabel_prefix = b'xn--' | ||
| 10 | _unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') | ||
| 11 | |||
| 12 | if sys.version_info[0] == 3: | ||
| 13 | unicode = str | ||
| 14 | unichr = chr | ||
| 15 | |||
| 16 | class IDNAError(UnicodeError): | ||
| 17 | """ Base exception for all IDNA-encoding related problems """ | ||
| 18 | pass | ||
| 19 | |||
| 20 | |||
| 21 | class IDNABidiError(IDNAError): | ||
| 22 | """ Exception when bidirectional requirements are not satisfied """ | ||
| 23 | pass | ||
| 24 | |||
| 25 | |||
| 26 | class InvalidCodepoint(IDNAError): | ||
| 27 | """ Exception when a disallowed or unallocated codepoint is used """ | ||
| 28 | pass | ||
| 29 | |||
| 30 | |||
| 31 | class InvalidCodepointContext(IDNAError): | ||
| 32 | """ Exception when the codepoint is not valid in the context it is used """ | ||
| 33 | pass | ||
| 34 | |||
| 35 | |||
| 36 | def _combining_class(cp): | ||
| 37 | return unicodedata.combining(unichr(cp)) | ||
| 38 | |||
| 39 | def _is_script(cp, script): | ||
| 40 | return intranges_contain(ord(cp), idnadata.scripts[script]) | ||
| 41 | |||
| 42 | def _punycode(s): | ||
| 43 | return s.encode('punycode') | ||
| 44 | |||
| 45 | def _unot(s): | ||
| 46 | return 'U+{0:04X}'.format(s) | ||
| 47 | |||
| 48 | |||
| 49 | def valid_label_length(label): | ||
| 50 | |||
| 51 | if len(label) > 63: | ||
| 52 | return False | ||
| 53 | return True | ||
| 54 | |||
| 55 | |||
| 56 | def valid_string_length(label, trailing_dot): | ||
| 57 | |||
| 58 | if len(label) > (254 if trailing_dot else 253): | ||
| 59 | return False | ||
| 60 | return True | ||
| 61 | |||
| 62 | |||
| 63 | def check_bidi(label, check_ltr=False): | ||
| 64 | |||
| 65 | # Bidi rules should only be applied if string contains RTL characters | ||
| 66 | bidi_label = False | ||
| 67 | for (idx, cp) in enumerate(label, 1): | ||
| 68 | direction = unicodedata.bidirectional(cp) | ||
| 69 | if direction == '': | ||
| 70 | # String likely comes from a newer version of Unicode | ||
| 71 | raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) | ||
| 72 | if direction in ['R', 'AL', 'AN']: | ||
| 73 | bidi_label = True | ||
| 74 | break | ||
| 75 | if not bidi_label and not check_ltr: | ||
| 76 | return True | ||
| 77 | |||
| 78 | # Bidi rule 1 | ||
| 79 | direction = unicodedata.bidirectional(label[0]) | ||
| 80 | if direction in ['R', 'AL']: | ||
| 81 | rtl = True | ||
| 82 | elif direction == 'L': | ||
| 83 | rtl = False | ||
| 84 | else: | ||
| 85 | raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) | ||
| 86 | |||
| 87 | valid_ending = False | ||
| 88 | number_type = False | ||
| 89 | for (idx, cp) in enumerate(label, 1): | ||
| 90 | direction = unicodedata.bidirectional(cp) | ||
| 91 | |||
| 92 | if rtl: | ||
| 93 | # Bidi rule 2 | ||
| 94 | if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: | ||
| 95 | raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) | ||
| 96 | # Bidi rule 3 | ||
| 97 | if direction in ['R', 'AL', 'EN', 'AN']: | ||
| 98 | valid_ending = True | ||
| 99 | elif direction != 'NSM': | ||
| 100 | valid_ending = False | ||
| 101 | # Bidi rule 4 | ||
| 102 | if direction in ['AN', 'EN']: | ||
| 103 | if not number_type: | ||
| 104 | number_type = direction | ||
| 105 | else: | ||
| 106 | if number_type != direction: | ||
| 107 | raise IDNABidiError('Can not mix numeral types in a right-to-left label') | ||
| 108 | else: | ||
| 109 | # Bidi rule 5 | ||
| 110 | if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: | ||
| 111 | raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) | ||
| 112 | # Bidi rule 6 | ||
| 113 | if direction in ['L', 'EN']: | ||
| 114 | valid_ending = True | ||
| 115 | elif direction != 'NSM': | ||
| 116 | valid_ending = False | ||
| 117 | |||
| 118 | if not valid_ending: | ||
| 119 | raise IDNABidiError('Label ends with illegal codepoint directionality') | ||
| 120 | |||
| 121 | return True | ||
| 122 | |||
| 123 | |||
| 124 | def check_initial_combiner(label): | ||
| 125 | |||
| 126 | if unicodedata.category(label[0])[0] == 'M': | ||
| 127 | raise IDNAError('Label begins with an illegal combining character') | ||
| 128 | return True | ||
| 129 | |||
| 130 | |||
| 131 | def check_hyphen_ok(label): | ||
| 132 | |||
| 133 | if label[2:4] == '--': | ||
| 134 | raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') | ||
| 135 | if label[0] == '-' or label[-1] == '-': | ||
| 136 | raise IDNAError('Label must not start or end with a hyphen') | ||
| 137 | return True | ||
| 138 | |||
| 139 | |||
| 140 | def check_nfc(label): | ||
| 141 | |||
| 142 | if unicodedata.normalize('NFC', label) != label: | ||
| 143 | raise IDNAError('Label must be in Normalization Form C') | ||
| 144 | |||
| 145 | |||
| 146 | def valid_contextj(label, pos): | ||
| 147 | |||
| 148 | cp_value = ord(label[pos]) | ||
| 149 | |||
| 150 | if cp_value == 0x200c: | ||
| 151 | |||
| 152 | if pos > 0: | ||
| 153 | if _combining_class(ord(label[pos - 1])) == _virama_combining_class: | ||
| 154 | return True | ||
| 155 | |||
| 156 | ok = False | ||
| 157 | for i in range(pos-1, -1, -1): | ||
| 158 | joining_type = idnadata.joining_types.get(ord(label[i])) | ||
| 159 | if joining_type == ord('T'): | ||
| 160 | continue | ||
| 161 | if joining_type in [ord('L'), ord('D')]: | ||
| 162 | ok = True | ||
| 163 | break | ||
| 164 | |||
| 165 | if not ok: | ||
| 166 | return False | ||
| 167 | |||
| 168 | ok = False | ||
| 169 | for i in range(pos+1, len(label)): | ||
| 170 | joining_type = idnadata.joining_types.get(ord(label[i])) | ||
| 171 | if joining_type == ord('T'): | ||
| 172 | continue | ||
| 173 | if joining_type in [ord('R'), ord('D')]: | ||
| 174 | ok = True | ||
| 175 | break | ||
| 176 | return ok | ||
| 177 | |||
| 178 | if cp_value == 0x200d: | ||
| 179 | |||
| 180 | if pos > 0: | ||
| 181 | if _combining_class(ord(label[pos - 1])) == _virama_combining_class: | ||
| 182 | return True | ||
| 183 | return False | ||
| 184 | |||
| 185 | else: | ||
| 186 | |||
| 187 | return False | ||
| 188 | |||
| 189 | |||
| 190 | def valid_contexto(label, pos, exception=False): | ||
| 191 | |||
| 192 | cp_value = ord(label[pos]) | ||
| 193 | |||
| 194 | if cp_value == 0x00b7: | ||
| 195 | if 0 < pos < len(label)-1: | ||
| 196 | if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: | ||
| 197 | return True | ||
| 198 | return False | ||
| 199 | |||
| 200 | elif cp_value == 0x0375: | ||
| 201 | if pos < len(label)-1 and len(label) > 1: | ||
| 202 | return _is_script(label[pos + 1], 'Greek') | ||
| 203 | return False | ||
| 204 | |||
| 205 | elif cp_value == 0x05f3 or cp_value == 0x05f4: | ||
| 206 | if pos > 0: | ||
| 207 | return _is_script(label[pos - 1], 'Hebrew') | ||
| 208 | return False | ||
| 209 | |||
| 210 | elif cp_value == 0x30fb: | ||
| 211 | for cp in label: | ||
| 212 | if cp == u'\u30fb': | ||
| 213 | continue | ||
| 214 | if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): | ||
| 215 | return True | ||
| 216 | return False | ||
| 217 | |||
| 218 | elif 0x660 <= cp_value <= 0x669: | ||
| 219 | for cp in label: | ||
| 220 | if 0x6f0 <= ord(cp) <= 0x06f9: | ||
| 221 | return False | ||
| 222 | return True | ||
| 223 | |||
| 224 | elif 0x6f0 <= cp_value <= 0x6f9: | ||
| 225 | for cp in label: | ||
| 226 | if 0x660 <= ord(cp) <= 0x0669: | ||
| 227 | return False | ||
| 228 | return True | ||
| 229 | |||
| 230 | |||
| 231 | def check_label(label): | ||
| 232 | |||
| 233 | if isinstance(label, (bytes, bytearray)): | ||
| 234 | label = label.decode('utf-8') | ||
| 235 | if len(label) == 0: | ||
| 236 | raise IDNAError('Empty Label') | ||
| 237 | |||
| 238 | check_nfc(label) | ||
| 239 | check_hyphen_ok(label) | ||
| 240 | check_initial_combiner(label) | ||
| 241 | |||
| 242 | for (pos, cp) in enumerate(label): | ||
| 243 | cp_value = ord(cp) | ||
| 244 | if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): | ||
| 245 | continue | ||
| 246 | elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): | ||
| 247 | if not valid_contextj(label, pos): | ||
| 248 | raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) | ||
| 249 | elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): | ||
| 250 | if not valid_contexto(label, pos): | ||
| 251 | raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) | ||
| 252 | else: | ||
| 253 | raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) | ||
| 254 | |||
| 255 | check_bidi(label) | ||
| 256 | |||
| 257 | |||
| 258 | def alabel(label): | ||
| 259 | |||
| 260 | try: | ||
| 261 | label = label.encode('ascii') | ||
| 262 | try: | ||
| 263 | ulabel(label) | ||
| 264 | except IDNAError: | ||
| 265 | raise IDNAError('The label {0} is not a valid A-label'.format(label)) | ||
| 266 | if not valid_label_length(label): | ||
| 267 | raise IDNAError('Label too long') | ||
| 268 | return label | ||
| 269 | except UnicodeEncodeError: | ||
| 270 | pass | ||
| 271 | |||
| 272 | if not label: | ||
| 273 | raise IDNAError('No Input') | ||
| 274 | |||
| 275 | label = unicode(label) | ||
| 276 | check_label(label) | ||
| 277 | label = _punycode(label) | ||
| 278 | label = _alabel_prefix + label | ||
| 279 | |||
| 280 | if not valid_label_length(label): | ||
| 281 | raise IDNAError('Label too long') | ||
| 282 | |||
| 283 | return label | ||
| 284 | |||
| 285 | |||
| 286 | def ulabel(label): | ||
| 287 | |||
| 288 | if not isinstance(label, (bytes, bytearray)): | ||
| 289 | try: | ||
| 290 | label = label.encode('ascii') | ||
| 291 | except UnicodeEncodeError: | ||
| 292 | check_label(label) | ||
| 293 | return label | ||
| 294 | |||
| 295 | label = label.lower() | ||
| 296 | if label.startswith(_alabel_prefix): | ||
| 297 | label = label[len(_alabel_prefix):] | ||
| 298 | else: | ||
| 299 | check_label(label) | ||
| 300 | return label.decode('ascii') | ||
| 301 | |||
| 302 | label = label.decode('punycode') | ||
| 303 | check_label(label) | ||
| 304 | return label | ||
| 305 | |||
| 306 | |||
| 307 | def uts46_remap(domain, std3_rules=True, transitional=False): | ||
| 308 | """Re-map the characters in the string according to UTS46 processing.""" | ||
| 309 | from .uts46data import uts46data | ||
| 310 | output = u"" | ||
| 311 | try: | ||
| 312 | for pos, char in enumerate(domain): | ||
| 313 | code_point = ord(char) | ||
| 314 | uts46row = uts46data[code_point if code_point < 256 else | ||
| 315 | bisect.bisect_left(uts46data, (code_point, "Z")) - 1] | ||
| 316 | status = uts46row[1] | ||
| 317 | replacement = uts46row[2] if len(uts46row) == 3 else None | ||
| 318 | if (status == "V" or | ||
| 319 | (status == "D" and not transitional) or | ||
| 320 | (status == "3" and std3_rules and replacement is None)): | ||
| 321 | output += char | ||
| 322 | elif replacement is not None and (status == "M" or | ||
| 323 | (status == "3" and std3_rules) or | ||
| 324 | (status == "D" and transitional)): | ||
| 325 | output += replacement | ||
| 326 | elif status != "I": | ||
| 327 | raise IndexError() | ||
| 328 | return unicodedata.normalize("NFC", output) | ||
| 329 | except IndexError: | ||
| 330 | raise InvalidCodepoint( | ||
| 331 | "Codepoint {0} not allowed at position {1} in {2}".format( | ||
| 332 | _unot(code_point), pos + 1, repr(domain))) | ||
| 333 | |||
| 334 | |||
| 335 | def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): | ||
| 336 | |||
| 337 | if isinstance(s, (bytes, bytearray)): | ||
| 338 | s = s.decode("ascii") | ||
| 339 | if uts46: | ||
| 340 | s = uts46_remap(s, std3_rules, transitional) | ||
| 341 | trailing_dot = False | ||
| 342 | result = [] | ||
| 343 | if strict: | ||
| 344 | labels = s.split('.') | ||
| 345 | else: | ||
| 346 | labels = _unicode_dots_re.split(s) | ||
| 347 | while labels and not labels[0]: | ||
| 348 | del labels[0] | ||
| 349 | if not labels: | ||
| 350 | raise IDNAError('Empty domain') | ||
| 351 | if labels[-1] == '': | ||
| 352 | del labels[-1] | ||
| 353 | trailing_dot = True | ||
| 354 | for label in labels: | ||
| 355 | result.append(alabel(label)) | ||
| 356 | if trailing_dot: | ||
| 357 | result.append(b'') | ||
| 358 | s = b'.'.join(result) | ||
| 359 | if not valid_string_length(s, trailing_dot): | ||
| 360 | raise IDNAError('Domain too long') | ||
| 361 | return s | ||
| 362 | |||
| 363 | |||
| 364 | def decode(s, strict=False, uts46=False, std3_rules=False): | ||
| 365 | |||
| 366 | if isinstance(s, (bytes, bytearray)): | ||
| 367 | s = s.decode("ascii") | ||
| 368 | if uts46: | ||
| 369 | s = uts46_remap(s, std3_rules, False) | ||
| 370 | trailing_dot = False | ||
| 371 | result = [] | ||
| 372 | if not strict: | ||
| 373 | labels = _unicode_dots_re.split(s) | ||
| 374 | else: | ||
| 375 | labels = s.split(u'.') | ||
| 376 | while labels and not labels[0]: | ||
| 377 | del labels[0] | ||
| 378 | if not labels: | ||
| 379 | raise IDNAError('Empty domain') | ||
| 380 | if not labels[-1]: | ||
| 381 | del labels[-1] | ||
| 382 | trailing_dot = True | ||
| 383 | for label in labels: | ||
| 384 | result.append(ulabel(label)) | ||
| 385 | if trailing_dot: | ||
| 386 | result.append(u'') | ||
| 387 | return u'.'.join(result) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/idnadata.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/idnadata.py new file mode 100644 index 0000000..c197a69 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/idnadata.py | |||
| @@ -0,0 +1,1585 @@ | |||
| 1 | # This file is automatically generated by tools/idna-data | ||
| 2 | |||
| 3 | __version__ = "6.3.0" | ||
| 4 | scripts = { | ||
| 5 | 'Greek': ( | ||
| 6 | 0x37000000374, | ||
| 7 | 0x37500000378, | ||
| 8 | 0x37a0000037e, | ||
| 9 | 0x38400000385, | ||
| 10 | 0x38600000387, | ||
| 11 | 0x3880000038b, | ||
| 12 | 0x38c0000038d, | ||
| 13 | 0x38e000003a2, | ||
| 14 | 0x3a3000003e2, | ||
| 15 | 0x3f000000400, | ||
| 16 | 0x1d2600001d2b, | ||
| 17 | 0x1d5d00001d62, | ||
| 18 | 0x1d6600001d6b, | ||
| 19 | 0x1dbf00001dc0, | ||
| 20 | 0x1f0000001f16, | ||
| 21 | 0x1f1800001f1e, | ||
| 22 | 0x1f2000001f46, | ||
| 23 | 0x1f4800001f4e, | ||
| 24 | 0x1f5000001f58, | ||
| 25 | 0x1f5900001f5a, | ||
| 26 | 0x1f5b00001f5c, | ||
| 27 | 0x1f5d00001f5e, | ||
| 28 | 0x1f5f00001f7e, | ||
| 29 | 0x1f8000001fb5, | ||
| 30 | 0x1fb600001fc5, | ||
| 31 | 0x1fc600001fd4, | ||
| 32 | 0x1fd600001fdc, | ||
| 33 | 0x1fdd00001ff0, | ||
| 34 | 0x1ff200001ff5, | ||
| 35 | 0x1ff600001fff, | ||
| 36 | 0x212600002127, | ||
| 37 | 0x101400001018b, | ||
| 38 | 0x1d2000001d246, | ||
| 39 | ), | ||
| 40 | 'Han': ( | ||
| 41 | 0x2e8000002e9a, | ||
| 42 | 0x2e9b00002ef4, | ||
| 43 | 0x2f0000002fd6, | ||
| 44 | 0x300500003006, | ||
| 45 | 0x300700003008, | ||
| 46 | 0x30210000302a, | ||
| 47 | 0x30380000303c, | ||
| 48 | 0x340000004db6, | ||
| 49 | 0x4e0000009fcd, | ||
| 50 | 0xf9000000fa6e, | ||
| 51 | 0xfa700000fada, | ||
| 52 | 0x200000002a6d7, | ||
| 53 | 0x2a7000002b735, | ||
| 54 | 0x2b7400002b81e, | ||
| 55 | 0x2f8000002fa1e, | ||
| 56 | ), | ||
| 57 | 'Hebrew': ( | ||
| 58 | 0x591000005c8, | ||
| 59 | 0x5d0000005eb, | ||
| 60 | 0x5f0000005f5, | ||
| 61 | 0xfb1d0000fb37, | ||
| 62 | 0xfb380000fb3d, | ||
| 63 | 0xfb3e0000fb3f, | ||
| 64 | 0xfb400000fb42, | ||
| 65 | 0xfb430000fb45, | ||
| 66 | 0xfb460000fb50, | ||
| 67 | ), | ||
| 68 | 'Hiragana': ( | ||
| 69 | 0x304100003097, | ||
| 70 | 0x309d000030a0, | ||
| 71 | 0x1b0010001b002, | ||
| 72 | 0x1f2000001f201, | ||
| 73 | ), | ||
| 74 | 'Katakana': ( | ||
| 75 | 0x30a1000030fb, | ||
| 76 | 0x30fd00003100, | ||
| 77 | 0x31f000003200, | ||
| 78 | 0x32d0000032ff, | ||
| 79 | 0x330000003358, | ||
| 80 | 0xff660000ff70, | ||
| 81 | 0xff710000ff9e, | ||
| 82 | 0x1b0000001b001, | ||
| 83 | ), | ||
| 84 | } | ||
| 85 | joining_types = { | ||
| 86 | 0x600: 85, | ||
| 87 | 0x601: 85, | ||
| 88 | 0x602: 85, | ||
| 89 | 0x603: 85, | ||
| 90 | 0x604: 85, | ||
| 91 | 0x608: 85, | ||
| 92 | 0x60b: 85, | ||
| 93 | 0x620: 68, | ||
| 94 | 0x621: 85, | ||
| 95 | 0x622: 82, | ||
| 96 | 0x623: 82, | ||
| 97 | 0x624: 82, | ||
| 98 | 0x625: 82, | ||
| 99 | 0x626: 68, | ||
| 100 | 0x627: 82, | ||
| 101 | 0x628: 68, | ||
| 102 | 0x629: 82, | ||
| 103 | 0x62a: 68, | ||
| 104 | 0x62b: 68, | ||
| 105 | 0x62c: 68, | ||
| 106 | 0x62d: 68, | ||
| 107 | 0x62e: 68, | ||
| 108 | 0x62f: 82, | ||
| 109 | 0x630: 82, | ||
| 110 | 0x631: 82, | ||
| 111 | 0x632: 82, | ||
| 112 | 0x633: 68, | ||
| 113 | 0x634: 68, | ||
| 114 | 0x635: 68, | ||
| 115 | 0x636: 68, | ||
| 116 | 0x637: 68, | ||
| 117 | 0x638: 68, | ||
| 118 | 0x639: 68, | ||
| 119 | 0x63a: 68, | ||
| 120 | 0x63b: 68, | ||
| 121 | 0x63c: 68, | ||
| 122 | 0x63d: 68, | ||
| 123 | 0x63e: 68, | ||
| 124 | 0x63f: 68, | ||
| 125 | 0x640: 67, | ||
| 126 | 0x641: 68, | ||
| 127 | 0x642: 68, | ||
| 128 | 0x643: 68, | ||
| 129 | 0x644: 68, | ||
| 130 | 0x645: 68, | ||
| 131 | 0x646: 68, | ||
| 132 | 0x647: 68, | ||
| 133 | 0x648: 82, | ||
| 134 | 0x649: 68, | ||
| 135 | 0x64a: 68, | ||
| 136 | 0x66e: 68, | ||
| 137 | 0x66f: 68, | ||
| 138 | 0x671: 82, | ||
| 139 | 0x672: 82, | ||
| 140 | 0x673: 82, | ||
| 141 | 0x674: 85, | ||
| 142 | 0x675: 82, | ||
| 143 | 0x676: 82, | ||
| 144 | 0x677: 82, | ||
| 145 | 0x678: 68, | ||
| 146 | 0x679: 68, | ||
| 147 | 0x67a: 68, | ||
| 148 | 0x67b: 68, | ||
| 149 | 0x67c: 68, | ||
| 150 | 0x67d: 68, | ||
| 151 | 0x67e: 68, | ||
| 152 | 0x67f: 68, | ||
| 153 | 0x680: 68, | ||
| 154 | 0x681: 68, | ||
| 155 | 0x682: 68, | ||
| 156 | 0x683: 68, | ||
| 157 | 0x684: 68, | ||
| 158 | 0x685: 68, | ||
| 159 | 0x686: 68, | ||
| 160 | 0x687: 68, | ||
| 161 | 0x688: 82, | ||
| 162 | 0x689: 82, | ||
| 163 | 0x68a: 82, | ||
| 164 | 0x68b: 82, | ||
| 165 | 0x68c: 82, | ||
| 166 | 0x68d: 82, | ||
| 167 | 0x68e: 82, | ||
| 168 | 0x68f: 82, | ||
| 169 | 0x690: 82, | ||
| 170 | 0x691: 82, | ||
| 171 | 0x692: 82, | ||
| 172 | 0x693: 82, | ||
| 173 | 0x694: 82, | ||
| 174 | 0x695: 82, | ||
| 175 | 0x696: 82, | ||
| 176 | 0x697: 82, | ||
| 177 | 0x698: 82, | ||
| 178 | 0x699: 82, | ||
| 179 | 0x69a: 68, | ||
| 180 | 0x69b: 68, | ||
| 181 | 0x69c: 68, | ||
| 182 | 0x69d: 68, | ||
| 183 | 0x69e: 68, | ||
| 184 | 0x69f: 68, | ||
| 185 | 0x6a0: 68, | ||
| 186 | 0x6a1: 68, | ||
| 187 | 0x6a2: 68, | ||
| 188 | 0x6a3: 68, | ||
| 189 | 0x6a4: 68, | ||
| 190 | 0x6a5: 68, | ||
| 191 | 0x6a6: 68, | ||
| 192 | 0x6a7: 68, | ||
| 193 | 0x6a8: 68, | ||
| 194 | 0x6a9: 68, | ||
| 195 | 0x6aa: 68, | ||
| 196 | 0x6ab: 68, | ||
| 197 | 0x6ac: 68, | ||
| 198 | 0x6ad: 68, | ||
| 199 | 0x6ae: 68, | ||
| 200 | 0x6af: 68, | ||
| 201 | 0x6b0: 68, | ||
| 202 | 0x6b1: 68, | ||
| 203 | 0x6b2: 68, | ||
| 204 | 0x6b3: 68, | ||
| 205 | 0x6b4: 68, | ||
| 206 | 0x6b5: 68, | ||
| 207 | 0x6b6: 68, | ||
| 208 | 0x6b7: 68, | ||
| 209 | 0x6b8: 68, | ||
| 210 | 0x6b9: 68, | ||
| 211 | 0x6ba: 68, | ||
| 212 | 0x6bb: 68, | ||
| 213 | 0x6bc: 68, | ||
| 214 | 0x6bd: 68, | ||
| 215 | 0x6be: 68, | ||
| 216 | 0x6bf: 68, | ||
| 217 | 0x6c0: 82, | ||
| 218 | 0x6c1: 68, | ||
| 219 | 0x6c2: 68, | ||
| 220 | 0x6c3: 82, | ||
| 221 | 0x6c4: 82, | ||
| 222 | 0x6c5: 82, | ||
| 223 | 0x6c6: 82, | ||
| 224 | 0x6c7: 82, | ||
| 225 | 0x6c8: 82, | ||
| 226 | 0x6c9: 82, | ||
| 227 | 0x6ca: 82, | ||
| 228 | 0x6cb: 82, | ||
| 229 | 0x6cc: 68, | ||
| 230 | 0x6cd: 82, | ||
| 231 | 0x6ce: 68, | ||
| 232 | 0x6cf: 82, | ||
| 233 | 0x6d0: 68, | ||
| 234 | 0x6d1: 68, | ||
| 235 | 0x6d2: 82, | ||
| 236 | 0x6d3: 82, | ||
| 237 | 0x6d5: 82, | ||
| 238 | 0x6dd: 85, | ||
| 239 | 0x6ee: 82, | ||
| 240 | 0x6ef: 82, | ||
| 241 | 0x6fa: 68, | ||
| 242 | 0x6fb: 68, | ||
| 243 | 0x6fc: 68, | ||
| 244 | 0x6ff: 68, | ||
| 245 | 0x710: 82, | ||
| 246 | 0x712: 68, | ||
| 247 | 0x713: 68, | ||
| 248 | 0x714: 68, | ||
| 249 | 0x715: 82, | ||
| 250 | 0x716: 82, | ||
| 251 | 0x717: 82, | ||
| 252 | 0x718: 82, | ||
| 253 | 0x719: 82, | ||
| 254 | 0x71a: 68, | ||
| 255 | 0x71b: 68, | ||
| 256 | 0x71c: 68, | ||
| 257 | 0x71d: 68, | ||
| 258 | 0x71e: 82, | ||
| 259 | 0x71f: 68, | ||
| 260 | 0x720: 68, | ||
| 261 | 0x721: 68, | ||
| 262 | 0x722: 68, | ||
| 263 | 0x723: 68, | ||
| 264 | 0x724: 68, | ||
| 265 | 0x725: 68, | ||
| 266 | 0x726: 68, | ||
| 267 | 0x727: 68, | ||
| 268 | 0x728: 82, | ||
| 269 | 0x729: 68, | ||
| 270 | 0x72a: 82, | ||
| 271 | 0x72b: 68, | ||
| 272 | 0x72c: 82, | ||
| 273 | 0x72d: 68, | ||
| 274 | 0x72e: 68, | ||
| 275 | 0x72f: 82, | ||
| 276 | 0x74d: 82, | ||
| 277 | 0x74e: 68, | ||
| 278 | 0x74f: 68, | ||
| 279 | 0x750: 68, | ||
| 280 | 0x751: 68, | ||
| 281 | 0x752: 68, | ||
| 282 | 0x753: 68, | ||
| 283 | 0x754: 68, | ||
| 284 | 0x755: 68, | ||
| 285 | 0x756: 68, | ||
| 286 | 0x757: 68, | ||
| 287 | 0x758: 68, | ||
| 288 | 0x759: 82, | ||
| 289 | 0x75a: 82, | ||
| 290 | 0x75b: 82, | ||
| 291 | 0x75c: 68, | ||
| 292 | 0x75d: 68, | ||
| 293 | 0x75e: 68, | ||
| 294 | 0x75f: 68, | ||
| 295 | 0x760: 68, | ||
| 296 | 0x761: 68, | ||
| 297 | 0x762: 68, | ||
| 298 | 0x763: 68, | ||
| 299 | 0x764: 68, | ||
| 300 | 0x765: 68, | ||
| 301 | 0x766: 68, | ||
| 302 | 0x767: 68, | ||
| 303 | 0x768: 68, | ||
| 304 | 0x769: 68, | ||
| 305 | 0x76a: 68, | ||
| 306 | 0x76b: 82, | ||
| 307 | 0x76c: 82, | ||
| 308 | 0x76d: 68, | ||
| 309 | 0x76e: 68, | ||
| 310 | 0x76f: 68, | ||
| 311 | 0x770: 68, | ||
| 312 | 0x771: 82, | ||
| 313 | 0x772: 68, | ||
| 314 | 0x773: 82, | ||
| 315 | 0x774: 82, | ||
| 316 | 0x775: 68, | ||
| 317 | 0x776: 68, | ||
| 318 | 0x777: 68, | ||
| 319 | 0x778: 82, | ||
| 320 | 0x779: 82, | ||
| 321 | 0x77a: 68, | ||
| 322 | 0x77b: 68, | ||
| 323 | 0x77c: 68, | ||
| 324 | 0x77d: 68, | ||
| 325 | 0x77e: 68, | ||
| 326 | 0x77f: 68, | ||
| 327 | 0x7ca: 68, | ||
| 328 | 0x7cb: 68, | ||
| 329 | 0x7cc: 68, | ||
| 330 | 0x7cd: 68, | ||
| 331 | 0x7ce: 68, | ||
| 332 | 0x7cf: 68, | ||
| 333 | 0x7d0: 68, | ||
| 334 | 0x7d1: 68, | ||
| 335 | 0x7d2: 68, | ||
| 336 | 0x7d3: 68, | ||
| 337 | 0x7d4: 68, | ||
| 338 | 0x7d5: 68, | ||
| 339 | 0x7d6: 68, | ||
| 340 | 0x7d7: 68, | ||
| 341 | 0x7d8: 68, | ||
| 342 | 0x7d9: 68, | ||
| 343 | 0x7da: 68, | ||
| 344 | 0x7db: 68, | ||
| 345 | 0x7dc: 68, | ||
| 346 | 0x7dd: 68, | ||
| 347 | 0x7de: 68, | ||
| 348 | 0x7df: 68, | ||
| 349 | 0x7e0: 68, | ||
| 350 | 0x7e1: 68, | ||
| 351 | 0x7e2: 68, | ||
| 352 | 0x7e3: 68, | ||
| 353 | 0x7e4: 68, | ||
| 354 | 0x7e5: 68, | ||
| 355 | 0x7e6: 68, | ||
| 356 | 0x7e7: 68, | ||
| 357 | 0x7e8: 68, | ||
| 358 | 0x7e9: 68, | ||
| 359 | 0x7ea: 68, | ||
| 360 | 0x7fa: 67, | ||
| 361 | 0x840: 82, | ||
| 362 | 0x841: 68, | ||
| 363 | 0x842: 68, | ||
| 364 | 0x843: 68, | ||
| 365 | 0x844: 68, | ||
| 366 | 0x845: 68, | ||
| 367 | 0x846: 82, | ||
| 368 | 0x847: 68, | ||
| 369 | 0x848: 68, | ||
| 370 | 0x849: 82, | ||
| 371 | 0x84a: 68, | ||
| 372 | 0x84b: 68, | ||
| 373 | 0x84c: 68, | ||
| 374 | 0x84d: 68, | ||
| 375 | 0x84e: 68, | ||
| 376 | 0x84f: 82, | ||
| 377 | 0x850: 68, | ||
| 378 | 0x851: 68, | ||
| 379 | 0x852: 68, | ||
| 380 | 0x853: 68, | ||
| 381 | 0x854: 82, | ||
| 382 | 0x855: 68, | ||
| 383 | 0x856: 85, | ||
| 384 | 0x857: 85, | ||
| 385 | 0x858: 85, | ||
| 386 | 0x8a0: 68, | ||
| 387 | 0x8a2: 68, | ||
| 388 | 0x8a3: 68, | ||
| 389 | 0x8a4: 68, | ||
| 390 | 0x8a5: 68, | ||
| 391 | 0x8a6: 68, | ||
| 392 | 0x8a7: 68, | ||
| 393 | 0x8a8: 68, | ||
| 394 | 0x8a9: 68, | ||
| 395 | 0x8aa: 82, | ||
| 396 | 0x8ab: 82, | ||
| 397 | 0x8ac: 82, | ||
| 398 | 0x1806: 85, | ||
| 399 | 0x1807: 68, | ||
| 400 | 0x180a: 67, | ||
| 401 | 0x180e: 85, | ||
| 402 | 0x1820: 68, | ||
| 403 | 0x1821: 68, | ||
| 404 | 0x1822: 68, | ||
| 405 | 0x1823: 68, | ||
| 406 | 0x1824: 68, | ||
| 407 | 0x1825: 68, | ||
| 408 | 0x1826: 68, | ||
| 409 | 0x1827: 68, | ||
| 410 | 0x1828: 68, | ||
| 411 | 0x1829: 68, | ||
| 412 | 0x182a: 68, | ||
| 413 | 0x182b: 68, | ||
| 414 | 0x182c: 68, | ||
| 415 | 0x182d: 68, | ||
| 416 | 0x182e: 68, | ||
| 417 | 0x182f: 68, | ||
| 418 | 0x1830: 68, | ||
| 419 | 0x1831: 68, | ||
| 420 | 0x1832: 68, | ||
| 421 | 0x1833: 68, | ||
| 422 | 0x1834: 68, | ||
| 423 | 0x1835: 68, | ||
| 424 | 0x1836: 68, | ||
| 425 | 0x1837: 68, | ||
| 426 | 0x1838: 68, | ||
| 427 | 0x1839: 68, | ||
| 428 | 0x183a: 68, | ||
| 429 | 0x183b: 68, | ||
| 430 | 0x183c: 68, | ||
| 431 | 0x183d: 68, | ||
| 432 | 0x183e: 68, | ||
| 433 | 0x183f: 68, | ||
| 434 | 0x1840: 68, | ||
| 435 | 0x1841: 68, | ||
| 436 | 0x1842: 68, | ||
| 437 | 0x1843: 68, | ||
| 438 | 0x1844: 68, | ||
| 439 | 0x1845: 68, | ||
| 440 | 0x1846: 68, | ||
| 441 | 0x1847: 68, | ||
| 442 | 0x1848: 68, | ||
| 443 | 0x1849: 68, | ||
| 444 | 0x184a: 68, | ||
| 445 | 0x184b: 68, | ||
| 446 | 0x184c: 68, | ||
| 447 | 0x184d: 68, | ||
| 448 | 0x184e: 68, | ||
| 449 | 0x184f: 68, | ||
| 450 | 0x1850: 68, | ||
| 451 | 0x1851: 68, | ||
| 452 | 0x1852: 68, | ||
| 453 | 0x1853: 68, | ||
| 454 | 0x1854: 68, | ||
| 455 | 0x1855: 68, | ||
| 456 | 0x1856: 68, | ||
| 457 | 0x1857: 68, | ||
| 458 | 0x1858: 68, | ||
| 459 | 0x1859: 68, | ||
| 460 | 0x185a: 68, | ||
| 461 | 0x185b: 68, | ||
| 462 | 0x185c: 68, | ||
| 463 | 0x185d: 68, | ||
| 464 | 0x185e: 68, | ||
| 465 | 0x185f: 68, | ||
| 466 | 0x1860: 68, | ||
| 467 | 0x1861: 68, | ||
| 468 | 0x1862: 68, | ||
| 469 | 0x1863: 68, | ||
| 470 | 0x1864: 68, | ||
| 471 | 0x1865: 68, | ||
| 472 | 0x1866: 68, | ||
| 473 | 0x1867: 68, | ||
| 474 | 0x1868: 68, | ||
| 475 | 0x1869: 68, | ||
| 476 | 0x186a: 68, | ||
| 477 | 0x186b: 68, | ||
| 478 | 0x186c: 68, | ||
| 479 | 0x186d: 68, | ||
| 480 | 0x186e: 68, | ||
| 481 | 0x186f: 68, | ||
| 482 | 0x1870: 68, | ||
| 483 | 0x1871: 68, | ||
| 484 | 0x1872: 68, | ||
| 485 | 0x1873: 68, | ||
| 486 | 0x1874: 68, | ||
| 487 | 0x1875: 68, | ||
| 488 | 0x1876: 68, | ||
| 489 | 0x1877: 68, | ||
| 490 | 0x1880: 85, | ||
| 491 | 0x1881: 85, | ||
| 492 | 0x1882: 85, | ||
| 493 | 0x1883: 85, | ||
| 494 | 0x1884: 85, | ||
| 495 | 0x1885: 85, | ||
| 496 | 0x1886: 85, | ||
| 497 | 0x1887: 68, | ||
| 498 | 0x1888: 68, | ||
| 499 | 0x1889: 68, | ||
| 500 | 0x188a: 68, | ||
| 501 | 0x188b: 68, | ||
| 502 | 0x188c: 68, | ||
| 503 | 0x188d: 68, | ||
| 504 | 0x188e: 68, | ||
| 505 | 0x188f: 68, | ||
| 506 | 0x1890: 68, | ||
| 507 | 0x1891: 68, | ||
| 508 | 0x1892: 68, | ||
| 509 | 0x1893: 68, | ||
| 510 | 0x1894: 68, | ||
| 511 | 0x1895: 68, | ||
| 512 | 0x1896: 68, | ||
| 513 | 0x1897: 68, | ||
| 514 | 0x1898: 68, | ||
| 515 | 0x1899: 68, | ||
| 516 | 0x189a: 68, | ||
| 517 | 0x189b: 68, | ||
| 518 | 0x189c: 68, | ||
| 519 | 0x189d: 68, | ||
| 520 | 0x189e: 68, | ||
| 521 | 0x189f: 68, | ||
| 522 | 0x18a0: 68, | ||
| 523 | 0x18a1: 68, | ||
| 524 | 0x18a2: 68, | ||
| 525 | 0x18a3: 68, | ||
| 526 | 0x18a4: 68, | ||
| 527 | 0x18a5: 68, | ||
| 528 | 0x18a6: 68, | ||
| 529 | 0x18a7: 68, | ||
| 530 | 0x18a8: 68, | ||
| 531 | 0x18aa: 68, | ||
| 532 | 0x200c: 85, | ||
| 533 | 0x200d: 67, | ||
| 534 | 0x2066: 85, | ||
| 535 | 0x2067: 85, | ||
| 536 | 0x2068: 85, | ||
| 537 | 0x2069: 85, | ||
| 538 | 0xa840: 68, | ||
| 539 | 0xa841: 68, | ||
| 540 | 0xa842: 68, | ||
| 541 | 0xa843: 68, | ||
| 542 | 0xa844: 68, | ||
| 543 | 0xa845: 68, | ||
| 544 | 0xa846: 68, | ||
| 545 | 0xa847: 68, | ||
| 546 | 0xa848: 68, | ||
| 547 | 0xa849: 68, | ||
| 548 | 0xa84a: 68, | ||
| 549 | 0xa84b: 68, | ||
| 550 | 0xa84c: 68, | ||
| 551 | 0xa84d: 68, | ||
| 552 | 0xa84e: 68, | ||
| 553 | 0xa84f: 68, | ||
| 554 | 0xa850: 68, | ||
| 555 | 0xa851: 68, | ||
| 556 | 0xa852: 68, | ||
| 557 | 0xa853: 68, | ||
| 558 | 0xa854: 68, | ||
| 559 | 0xa855: 68, | ||
| 560 | 0xa856: 68, | ||
| 561 | 0xa857: 68, | ||
| 562 | 0xa858: 68, | ||
| 563 | 0xa859: 68, | ||
| 564 | 0xa85a: 68, | ||
| 565 | 0xa85b: 68, | ||
| 566 | 0xa85c: 68, | ||
| 567 | 0xa85d: 68, | ||
| 568 | 0xa85e: 68, | ||
| 569 | 0xa85f: 68, | ||
| 570 | 0xa860: 68, | ||
| 571 | 0xa861: 68, | ||
| 572 | 0xa862: 68, | ||
| 573 | 0xa863: 68, | ||
| 574 | 0xa864: 68, | ||
| 575 | 0xa865: 68, | ||
| 576 | 0xa866: 68, | ||
| 577 | 0xa867: 68, | ||
| 578 | 0xa868: 68, | ||
| 579 | 0xa869: 68, | ||
| 580 | 0xa86a: 68, | ||
| 581 | 0xa86b: 68, | ||
| 582 | 0xa86c: 68, | ||
| 583 | 0xa86d: 68, | ||
| 584 | 0xa86e: 68, | ||
| 585 | 0xa86f: 68, | ||
| 586 | 0xa870: 68, | ||
| 587 | 0xa871: 68, | ||
| 588 | 0xa872: 76, | ||
| 589 | 0xa873: 85, | ||
| 590 | } | ||
| 591 | codepoint_classes = { | ||
| 592 | 'PVALID': ( | ||
| 593 | 0x2d0000002e, | ||
| 594 | 0x300000003a, | ||
| 595 | 0x610000007b, | ||
| 596 | 0xdf000000f7, | ||
| 597 | 0xf800000100, | ||
| 598 | 0x10100000102, | ||
| 599 | 0x10300000104, | ||
| 600 | 0x10500000106, | ||
| 601 | 0x10700000108, | ||
| 602 | 0x1090000010a, | ||
| 603 | 0x10b0000010c, | ||
| 604 | 0x10d0000010e, | ||
| 605 | 0x10f00000110, | ||
| 606 | 0x11100000112, | ||
| 607 | 0x11300000114, | ||
| 608 | 0x11500000116, | ||
| 609 | 0x11700000118, | ||
| 610 | 0x1190000011a, | ||
| 611 | 0x11b0000011c, | ||
| 612 | 0x11d0000011e, | ||
| 613 | 0x11f00000120, | ||
| 614 | 0x12100000122, | ||
| 615 | 0x12300000124, | ||
| 616 | 0x12500000126, | ||
| 617 | 0x12700000128, | ||
| 618 | 0x1290000012a, | ||
| 619 | 0x12b0000012c, | ||
| 620 | 0x12d0000012e, | ||
| 621 | 0x12f00000130, | ||
| 622 | 0x13100000132, | ||
| 623 | 0x13500000136, | ||
| 624 | 0x13700000139, | ||
| 625 | 0x13a0000013b, | ||
| 626 | 0x13c0000013d, | ||
| 627 | 0x13e0000013f, | ||
| 628 | 0x14200000143, | ||
| 629 | 0x14400000145, | ||
| 630 | 0x14600000147, | ||
| 631 | 0x14800000149, | ||
| 632 | 0x14b0000014c, | ||
| 633 | 0x14d0000014e, | ||
| 634 | 0x14f00000150, | ||
| 635 | 0x15100000152, | ||
| 636 | 0x15300000154, | ||
| 637 | 0x15500000156, | ||
| 638 | 0x15700000158, | ||
| 639 | 0x1590000015a, | ||
| 640 | 0x15b0000015c, | ||
| 641 | 0x15d0000015e, | ||
| 642 | 0x15f00000160, | ||
| 643 | 0x16100000162, | ||
| 644 | 0x16300000164, | ||
| 645 | 0x16500000166, | ||
| 646 | 0x16700000168, | ||
| 647 | 0x1690000016a, | ||
| 648 | 0x16b0000016c, | ||
| 649 | 0x16d0000016e, | ||
| 650 | 0x16f00000170, | ||
| 651 | 0x17100000172, | ||
| 652 | 0x17300000174, | ||
| 653 | 0x17500000176, | ||
| 654 | 0x17700000178, | ||
| 655 | 0x17a0000017b, | ||
| 656 | 0x17c0000017d, | ||
| 657 | 0x17e0000017f, | ||
| 658 | 0x18000000181, | ||
| 659 | 0x18300000184, | ||
| 660 | 0x18500000186, | ||
| 661 | 0x18800000189, | ||
| 662 | 0x18c0000018e, | ||
| 663 | 0x19200000193, | ||
| 664 | 0x19500000196, | ||
| 665 | 0x1990000019c, | ||
| 666 | 0x19e0000019f, | ||
| 667 | 0x1a1000001a2, | ||
| 668 | 0x1a3000001a4, | ||
| 669 | 0x1a5000001a6, | ||
| 670 | 0x1a8000001a9, | ||
| 671 | 0x1aa000001ac, | ||
| 672 | 0x1ad000001ae, | ||
| 673 | 0x1b0000001b1, | ||
| 674 | 0x1b4000001b5, | ||
| 675 | 0x1b6000001b7, | ||
| 676 | 0x1b9000001bc, | ||
| 677 | 0x1bd000001c4, | ||
| 678 | 0x1ce000001cf, | ||
| 679 | 0x1d0000001d1, | ||
| 680 | 0x1d2000001d3, | ||
| 681 | 0x1d4000001d5, | ||
| 682 | 0x1d6000001d7, | ||
| 683 | 0x1d8000001d9, | ||
| 684 | 0x1da000001db, | ||
| 685 | 0x1dc000001de, | ||
| 686 | 0x1df000001e0, | ||
| 687 | 0x1e1000001e2, | ||
| 688 | 0x1e3000001e4, | ||
| 689 | 0x1e5000001e6, | ||
| 690 | 0x1e7000001e8, | ||
| 691 | 0x1e9000001ea, | ||
| 692 | 0x1eb000001ec, | ||
| 693 | 0x1ed000001ee, | ||
| 694 | 0x1ef000001f1, | ||
| 695 | 0x1f5000001f6, | ||
| 696 | 0x1f9000001fa, | ||
| 697 | 0x1fb000001fc, | ||
| 698 | 0x1fd000001fe, | ||
| 699 | 0x1ff00000200, | ||
| 700 | 0x20100000202, | ||
| 701 | 0x20300000204, | ||
| 702 | 0x20500000206, | ||
| 703 | 0x20700000208, | ||
| 704 | 0x2090000020a, | ||
| 705 | 0x20b0000020c, | ||
| 706 | 0x20d0000020e, | ||
| 707 | 0x20f00000210, | ||
| 708 | 0x21100000212, | ||
| 709 | 0x21300000214, | ||
| 710 | 0x21500000216, | ||
| 711 | 0x21700000218, | ||
| 712 | 0x2190000021a, | ||
| 713 | 0x21b0000021c, | ||
| 714 | 0x21d0000021e, | ||
| 715 | 0x21f00000220, | ||
| 716 | 0x22100000222, | ||
| 717 | 0x22300000224, | ||
| 718 | 0x22500000226, | ||
| 719 | 0x22700000228, | ||
| 720 | 0x2290000022a, | ||
| 721 | 0x22b0000022c, | ||
| 722 | 0x22d0000022e, | ||
| 723 | 0x22f00000230, | ||
| 724 | 0x23100000232, | ||
| 725 | 0x2330000023a, | ||
| 726 | 0x23c0000023d, | ||
| 727 | 0x23f00000241, | ||
| 728 | 0x24200000243, | ||
| 729 | 0x24700000248, | ||
| 730 | 0x2490000024a, | ||
| 731 | 0x24b0000024c, | ||
| 732 | 0x24d0000024e, | ||
| 733 | 0x24f000002b0, | ||
| 734 | 0x2b9000002c2, | ||
| 735 | 0x2c6000002d2, | ||
| 736 | 0x2ec000002ed, | ||
| 737 | 0x2ee000002ef, | ||
| 738 | 0x30000000340, | ||
| 739 | 0x34200000343, | ||
| 740 | 0x3460000034f, | ||
| 741 | 0x35000000370, | ||
| 742 | 0x37100000372, | ||
| 743 | 0x37300000374, | ||
| 744 | 0x37700000378, | ||
| 745 | 0x37b0000037e, | ||
| 746 | 0x39000000391, | ||
| 747 | 0x3ac000003cf, | ||
| 748 | 0x3d7000003d8, | ||
| 749 | 0x3d9000003da, | ||
| 750 | 0x3db000003dc, | ||
| 751 | 0x3dd000003de, | ||
| 752 | 0x3df000003e0, | ||
| 753 | 0x3e1000003e2, | ||
| 754 | 0x3e3000003e4, | ||
| 755 | 0x3e5000003e6, | ||
| 756 | 0x3e7000003e8, | ||
| 757 | 0x3e9000003ea, | ||
| 758 | 0x3eb000003ec, | ||
| 759 | 0x3ed000003ee, | ||
| 760 | 0x3ef000003f0, | ||
| 761 | 0x3f3000003f4, | ||
| 762 | 0x3f8000003f9, | ||
| 763 | 0x3fb000003fd, | ||
| 764 | 0x43000000460, | ||
| 765 | 0x46100000462, | ||
| 766 | 0x46300000464, | ||
| 767 | 0x46500000466, | ||
| 768 | 0x46700000468, | ||
| 769 | 0x4690000046a, | ||
| 770 | 0x46b0000046c, | ||
| 771 | 0x46d0000046e, | ||
| 772 | 0x46f00000470, | ||
| 773 | 0x47100000472, | ||
| 774 | 0x47300000474, | ||
| 775 | 0x47500000476, | ||
| 776 | 0x47700000478, | ||
| 777 | 0x4790000047a, | ||
| 778 | 0x47b0000047c, | ||
| 779 | 0x47d0000047e, | ||
| 780 | 0x47f00000480, | ||
| 781 | 0x48100000482, | ||
| 782 | 0x48300000488, | ||
| 783 | 0x48b0000048c, | ||
| 784 | 0x48d0000048e, | ||
| 785 | 0x48f00000490, | ||
| 786 | 0x49100000492, | ||
| 787 | 0x49300000494, | ||
| 788 | 0x49500000496, | ||
| 789 | 0x49700000498, | ||
| 790 | 0x4990000049a, | ||
| 791 | 0x49b0000049c, | ||
| 792 | 0x49d0000049e, | ||
| 793 | 0x49f000004a0, | ||
| 794 | 0x4a1000004a2, | ||
| 795 | 0x4a3000004a4, | ||
| 796 | 0x4a5000004a6, | ||
| 797 | 0x4a7000004a8, | ||
| 798 | 0x4a9000004aa, | ||
| 799 | 0x4ab000004ac, | ||
| 800 | 0x4ad000004ae, | ||
| 801 | 0x4af000004b0, | ||
| 802 | 0x4b1000004b2, | ||
| 803 | 0x4b3000004b4, | ||
| 804 | 0x4b5000004b6, | ||
| 805 | 0x4b7000004b8, | ||
| 806 | 0x4b9000004ba, | ||
| 807 | 0x4bb000004bc, | ||
| 808 | 0x4bd000004be, | ||
| 809 | 0x4bf000004c0, | ||
| 810 | 0x4c2000004c3, | ||
| 811 | 0x4c4000004c5, | ||
| 812 | 0x4c6000004c7, | ||
| 813 | 0x4c8000004c9, | ||
| 814 | 0x4ca000004cb, | ||
| 815 | 0x4cc000004cd, | ||
| 816 | 0x4ce000004d0, | ||
| 817 | 0x4d1000004d2, | ||
| 818 | 0x4d3000004d4, | ||
| 819 | 0x4d5000004d6, | ||
| 820 | 0x4d7000004d8, | ||
| 821 | 0x4d9000004da, | ||
| 822 | 0x4db000004dc, | ||
| 823 | 0x4dd000004de, | ||
| 824 | 0x4df000004e0, | ||
| 825 | 0x4e1000004e2, | ||
| 826 | 0x4e3000004e4, | ||
| 827 | 0x4e5000004e6, | ||
| 828 | 0x4e7000004e8, | ||
| 829 | 0x4e9000004ea, | ||
| 830 | 0x4eb000004ec, | ||
| 831 | 0x4ed000004ee, | ||
| 832 | 0x4ef000004f0, | ||
| 833 | 0x4f1000004f2, | ||
| 834 | 0x4f3000004f4, | ||
| 835 | 0x4f5000004f6, | ||
| 836 | 0x4f7000004f8, | ||
| 837 | 0x4f9000004fa, | ||
| 838 | 0x4fb000004fc, | ||
| 839 | 0x4fd000004fe, | ||
| 840 | 0x4ff00000500, | ||
| 841 | 0x50100000502, | ||
| 842 | 0x50300000504, | ||
| 843 | 0x50500000506, | ||
| 844 | 0x50700000508, | ||
| 845 | 0x5090000050a, | ||
| 846 | 0x50b0000050c, | ||
| 847 | 0x50d0000050e, | ||
| 848 | 0x50f00000510, | ||
| 849 | 0x51100000512, | ||
| 850 | 0x51300000514, | ||
| 851 | 0x51500000516, | ||
| 852 | 0x51700000518, | ||
| 853 | 0x5190000051a, | ||
| 854 | 0x51b0000051c, | ||
| 855 | 0x51d0000051e, | ||
| 856 | 0x51f00000520, | ||
| 857 | 0x52100000522, | ||
| 858 | 0x52300000524, | ||
| 859 | 0x52500000526, | ||
| 860 | 0x52700000528, | ||
| 861 | 0x5590000055a, | ||
| 862 | 0x56100000587, | ||
| 863 | 0x591000005be, | ||
| 864 | 0x5bf000005c0, | ||
| 865 | 0x5c1000005c3, | ||
| 866 | 0x5c4000005c6, | ||
| 867 | 0x5c7000005c8, | ||
| 868 | 0x5d0000005eb, | ||
| 869 | 0x5f0000005f3, | ||
| 870 | 0x6100000061b, | ||
| 871 | 0x62000000640, | ||
| 872 | 0x64100000660, | ||
| 873 | 0x66e00000675, | ||
| 874 | 0x679000006d4, | ||
| 875 | 0x6d5000006dd, | ||
| 876 | 0x6df000006e9, | ||
| 877 | 0x6ea000006f0, | ||
| 878 | 0x6fa00000700, | ||
| 879 | 0x7100000074b, | ||
| 880 | 0x74d000007b2, | ||
| 881 | 0x7c0000007f6, | ||
| 882 | 0x8000000082e, | ||
| 883 | 0x8400000085c, | ||
| 884 | 0x8a0000008a1, | ||
| 885 | 0x8a2000008ad, | ||
| 886 | 0x8e4000008ff, | ||
| 887 | 0x90000000958, | ||
| 888 | 0x96000000964, | ||
| 889 | 0x96600000970, | ||
| 890 | 0x97100000978, | ||
| 891 | 0x97900000980, | ||
| 892 | 0x98100000984, | ||
| 893 | 0x9850000098d, | ||
| 894 | 0x98f00000991, | ||
| 895 | 0x993000009a9, | ||
| 896 | 0x9aa000009b1, | ||
| 897 | 0x9b2000009b3, | ||
| 898 | 0x9b6000009ba, | ||
| 899 | 0x9bc000009c5, | ||
| 900 | 0x9c7000009c9, | ||
| 901 | 0x9cb000009cf, | ||
| 902 | 0x9d7000009d8, | ||
| 903 | 0x9e0000009e4, | ||
| 904 | 0x9e6000009f2, | ||
| 905 | 0xa0100000a04, | ||
| 906 | 0xa0500000a0b, | ||
| 907 | 0xa0f00000a11, | ||
| 908 | 0xa1300000a29, | ||
| 909 | 0xa2a00000a31, | ||
| 910 | 0xa3200000a33, | ||
| 911 | 0xa3500000a36, | ||
| 912 | 0xa3800000a3a, | ||
| 913 | 0xa3c00000a3d, | ||
| 914 | 0xa3e00000a43, | ||
| 915 | 0xa4700000a49, | ||
| 916 | 0xa4b00000a4e, | ||
| 917 | 0xa5100000a52, | ||
| 918 | 0xa5c00000a5d, | ||
| 919 | 0xa6600000a76, | ||
| 920 | 0xa8100000a84, | ||
| 921 | 0xa8500000a8e, | ||
| 922 | 0xa8f00000a92, | ||
| 923 | 0xa9300000aa9, | ||
| 924 | 0xaaa00000ab1, | ||
| 925 | 0xab200000ab4, | ||
| 926 | 0xab500000aba, | ||
| 927 | 0xabc00000ac6, | ||
| 928 | 0xac700000aca, | ||
| 929 | 0xacb00000ace, | ||
| 930 | 0xad000000ad1, | ||
| 931 | 0xae000000ae4, | ||
| 932 | 0xae600000af0, | ||
| 933 | 0xb0100000b04, | ||
| 934 | 0xb0500000b0d, | ||
| 935 | 0xb0f00000b11, | ||
| 936 | 0xb1300000b29, | ||
| 937 | 0xb2a00000b31, | ||
| 938 | 0xb3200000b34, | ||
| 939 | 0xb3500000b3a, | ||
| 940 | 0xb3c00000b45, | ||
| 941 | 0xb4700000b49, | ||
| 942 | 0xb4b00000b4e, | ||
| 943 | 0xb5600000b58, | ||
| 944 | 0xb5f00000b64, | ||
| 945 | 0xb6600000b70, | ||
| 946 | 0xb7100000b72, | ||
| 947 | 0xb8200000b84, | ||
| 948 | 0xb8500000b8b, | ||
| 949 | 0xb8e00000b91, | ||
| 950 | 0xb9200000b96, | ||
| 951 | 0xb9900000b9b, | ||
| 952 | 0xb9c00000b9d, | ||
| 953 | 0xb9e00000ba0, | ||
| 954 | 0xba300000ba5, | ||
| 955 | 0xba800000bab, | ||
| 956 | 0xbae00000bba, | ||
| 957 | 0xbbe00000bc3, | ||
| 958 | 0xbc600000bc9, | ||
| 959 | 0xbca00000bce, | ||
| 960 | 0xbd000000bd1, | ||
| 961 | 0xbd700000bd8, | ||
| 962 | 0xbe600000bf0, | ||
| 963 | 0xc0100000c04, | ||
| 964 | 0xc0500000c0d, | ||
| 965 | 0xc0e00000c11, | ||
| 966 | 0xc1200000c29, | ||
| 967 | 0xc2a00000c34, | ||
| 968 | 0xc3500000c3a, | ||
| 969 | 0xc3d00000c45, | ||
| 970 | 0xc4600000c49, | ||
| 971 | 0xc4a00000c4e, | ||
| 972 | 0xc5500000c57, | ||
| 973 | 0xc5800000c5a, | ||
| 974 | 0xc6000000c64, | ||
| 975 | 0xc6600000c70, | ||
| 976 | 0xc8200000c84, | ||
| 977 | 0xc8500000c8d, | ||
| 978 | 0xc8e00000c91, | ||
| 979 | 0xc9200000ca9, | ||
| 980 | 0xcaa00000cb4, | ||
| 981 | 0xcb500000cba, | ||
| 982 | 0xcbc00000cc5, | ||
| 983 | 0xcc600000cc9, | ||
| 984 | 0xcca00000cce, | ||
| 985 | 0xcd500000cd7, | ||
| 986 | 0xcde00000cdf, | ||
| 987 | 0xce000000ce4, | ||
| 988 | 0xce600000cf0, | ||
| 989 | 0xcf100000cf3, | ||
| 990 | 0xd0200000d04, | ||
| 991 | 0xd0500000d0d, | ||
| 992 | 0xd0e00000d11, | ||
| 993 | 0xd1200000d3b, | ||
| 994 | 0xd3d00000d45, | ||
| 995 | 0xd4600000d49, | ||
| 996 | 0xd4a00000d4f, | ||
| 997 | 0xd5700000d58, | ||
| 998 | 0xd6000000d64, | ||
| 999 | 0xd6600000d70, | ||
| 1000 | 0xd7a00000d80, | ||
| 1001 | 0xd8200000d84, | ||
| 1002 | 0xd8500000d97, | ||
| 1003 | 0xd9a00000db2, | ||
| 1004 | 0xdb300000dbc, | ||
| 1005 | 0xdbd00000dbe, | ||
| 1006 | 0xdc000000dc7, | ||
| 1007 | 0xdca00000dcb, | ||
| 1008 | 0xdcf00000dd5, | ||
| 1009 | 0xdd600000dd7, | ||
| 1010 | 0xdd800000de0, | ||
| 1011 | 0xdf200000df4, | ||
| 1012 | 0xe0100000e33, | ||
| 1013 | 0xe3400000e3b, | ||
| 1014 | 0xe4000000e4f, | ||
| 1015 | 0xe5000000e5a, | ||
| 1016 | 0xe8100000e83, | ||
| 1017 | 0xe8400000e85, | ||
| 1018 | 0xe8700000e89, | ||
| 1019 | 0xe8a00000e8b, | ||
| 1020 | 0xe8d00000e8e, | ||
| 1021 | 0xe9400000e98, | ||
| 1022 | 0xe9900000ea0, | ||
| 1023 | 0xea100000ea4, | ||
| 1024 | 0xea500000ea6, | ||
| 1025 | 0xea700000ea8, | ||
| 1026 | 0xeaa00000eac, | ||
| 1027 | 0xead00000eb3, | ||
| 1028 | 0xeb400000eba, | ||
| 1029 | 0xebb00000ebe, | ||
| 1030 | 0xec000000ec5, | ||
| 1031 | 0xec600000ec7, | ||
| 1032 | 0xec800000ece, | ||
| 1033 | 0xed000000eda, | ||
| 1034 | 0xede00000ee0, | ||
| 1035 | 0xf0000000f01, | ||
| 1036 | 0xf0b00000f0c, | ||
| 1037 | 0xf1800000f1a, | ||
| 1038 | 0xf2000000f2a, | ||
| 1039 | 0xf3500000f36, | ||
| 1040 | 0xf3700000f38, | ||
| 1041 | 0xf3900000f3a, | ||
| 1042 | 0xf3e00000f43, | ||
| 1043 | 0xf4400000f48, | ||
| 1044 | 0xf4900000f4d, | ||
| 1045 | 0xf4e00000f52, | ||
| 1046 | 0xf5300000f57, | ||
| 1047 | 0xf5800000f5c, | ||
| 1048 | 0xf5d00000f69, | ||
| 1049 | 0xf6a00000f6d, | ||
| 1050 | 0xf7100000f73, | ||
| 1051 | 0xf7400000f75, | ||
| 1052 | 0xf7a00000f81, | ||
| 1053 | 0xf8200000f85, | ||
| 1054 | 0xf8600000f93, | ||
| 1055 | 0xf9400000f98, | ||
| 1056 | 0xf9900000f9d, | ||
| 1057 | 0xf9e00000fa2, | ||
| 1058 | 0xfa300000fa7, | ||
| 1059 | 0xfa800000fac, | ||
| 1060 | 0xfad00000fb9, | ||
| 1061 | 0xfba00000fbd, | ||
| 1062 | 0xfc600000fc7, | ||
| 1063 | 0x10000000104a, | ||
| 1064 | 0x10500000109e, | ||
| 1065 | 0x10d0000010fb, | ||
| 1066 | 0x10fd00001100, | ||
| 1067 | 0x120000001249, | ||
| 1068 | 0x124a0000124e, | ||
| 1069 | 0x125000001257, | ||
| 1070 | 0x125800001259, | ||
| 1071 | 0x125a0000125e, | ||
| 1072 | 0x126000001289, | ||
| 1073 | 0x128a0000128e, | ||
| 1074 | 0x1290000012b1, | ||
| 1075 | 0x12b2000012b6, | ||
| 1076 | 0x12b8000012bf, | ||
| 1077 | 0x12c0000012c1, | ||
| 1078 | 0x12c2000012c6, | ||
| 1079 | 0x12c8000012d7, | ||
| 1080 | 0x12d800001311, | ||
| 1081 | 0x131200001316, | ||
| 1082 | 0x13180000135b, | ||
| 1083 | 0x135d00001360, | ||
| 1084 | 0x138000001390, | ||
| 1085 | 0x13a0000013f5, | ||
| 1086 | 0x14010000166d, | ||
| 1087 | 0x166f00001680, | ||
| 1088 | 0x16810000169b, | ||
| 1089 | 0x16a0000016eb, | ||
| 1090 | 0x17000000170d, | ||
| 1091 | 0x170e00001715, | ||
| 1092 | 0x172000001735, | ||
| 1093 | 0x174000001754, | ||
| 1094 | 0x17600000176d, | ||
| 1095 | 0x176e00001771, | ||
| 1096 | 0x177200001774, | ||
| 1097 | 0x1780000017b4, | ||
| 1098 | 0x17b6000017d4, | ||
| 1099 | 0x17d7000017d8, | ||
| 1100 | 0x17dc000017de, | ||
| 1101 | 0x17e0000017ea, | ||
| 1102 | 0x18100000181a, | ||
| 1103 | 0x182000001878, | ||
| 1104 | 0x1880000018ab, | ||
| 1105 | 0x18b0000018f6, | ||
| 1106 | 0x19000000191d, | ||
| 1107 | 0x19200000192c, | ||
| 1108 | 0x19300000193c, | ||
| 1109 | 0x19460000196e, | ||
| 1110 | 0x197000001975, | ||
| 1111 | 0x1980000019ac, | ||
| 1112 | 0x19b0000019ca, | ||
| 1113 | 0x19d0000019da, | ||
| 1114 | 0x1a0000001a1c, | ||
| 1115 | 0x1a2000001a5f, | ||
| 1116 | 0x1a6000001a7d, | ||
| 1117 | 0x1a7f00001a8a, | ||
| 1118 | 0x1a9000001a9a, | ||
| 1119 | 0x1aa700001aa8, | ||
| 1120 | 0x1b0000001b4c, | ||
| 1121 | 0x1b5000001b5a, | ||
| 1122 | 0x1b6b00001b74, | ||
| 1123 | 0x1b8000001bf4, | ||
| 1124 | 0x1c0000001c38, | ||
| 1125 | 0x1c4000001c4a, | ||
| 1126 | 0x1c4d00001c7e, | ||
| 1127 | 0x1cd000001cd3, | ||
| 1128 | 0x1cd400001cf7, | ||
| 1129 | 0x1d0000001d2c, | ||
| 1130 | 0x1d2f00001d30, | ||
| 1131 | 0x1d3b00001d3c, | ||
| 1132 | 0x1d4e00001d4f, | ||
| 1133 | 0x1d6b00001d78, | ||
| 1134 | 0x1d7900001d9b, | ||
| 1135 | 0x1dc000001de7, | ||
| 1136 | 0x1dfc00001e00, | ||
| 1137 | 0x1e0100001e02, | ||
| 1138 | 0x1e0300001e04, | ||
| 1139 | 0x1e0500001e06, | ||
| 1140 | 0x1e0700001e08, | ||
| 1141 | 0x1e0900001e0a, | ||
| 1142 | 0x1e0b00001e0c, | ||
| 1143 | 0x1e0d00001e0e, | ||
| 1144 | 0x1e0f00001e10, | ||
| 1145 | 0x1e1100001e12, | ||
| 1146 | 0x1e1300001e14, | ||
| 1147 | 0x1e1500001e16, | ||
| 1148 | 0x1e1700001e18, | ||
| 1149 | 0x1e1900001e1a, | ||
| 1150 | 0x1e1b00001e1c, | ||
| 1151 | 0x1e1d00001e1e, | ||
| 1152 | 0x1e1f00001e20, | ||
| 1153 | 0x1e2100001e22, | ||
| 1154 | 0x1e2300001e24, | ||
| 1155 | 0x1e2500001e26, | ||
| 1156 | 0x1e2700001e28, | ||
| 1157 | 0x1e2900001e2a, | ||
| 1158 | 0x1e2b00001e2c, | ||
| 1159 | 0x1e2d00001e2e, | ||
| 1160 | 0x1e2f00001e30, | ||
| 1161 | 0x1e3100001e32, | ||
| 1162 | 0x1e3300001e34, | ||
| 1163 | 0x1e3500001e36, | ||
| 1164 | 0x1e3700001e38, | ||
| 1165 | 0x1e3900001e3a, | ||
| 1166 | 0x1e3b00001e3c, | ||
| 1167 | 0x1e3d00001e3e, | ||
| 1168 | 0x1e3f00001e40, | ||
| 1169 | 0x1e4100001e42, | ||
| 1170 | 0x1e4300001e44, | ||
| 1171 | 0x1e4500001e46, | ||
| 1172 | 0x1e4700001e48, | ||
| 1173 | 0x1e4900001e4a, | ||
| 1174 | 0x1e4b00001e4c, | ||
| 1175 | 0x1e4d00001e4e, | ||
| 1176 | 0x1e4f00001e50, | ||
| 1177 | 0x1e5100001e52, | ||
| 1178 | 0x1e5300001e54, | ||
| 1179 | 0x1e5500001e56, | ||
| 1180 | 0x1e5700001e58, | ||
| 1181 | 0x1e5900001e5a, | ||
| 1182 | 0x1e5b00001e5c, | ||
| 1183 | 0x1e5d00001e5e, | ||
| 1184 | 0x1e5f00001e60, | ||
| 1185 | 0x1e6100001e62, | ||
| 1186 | 0x1e6300001e64, | ||
| 1187 | 0x1e6500001e66, | ||
| 1188 | 0x1e6700001e68, | ||
| 1189 | 0x1e6900001e6a, | ||
| 1190 | 0x1e6b00001e6c, | ||
| 1191 | 0x1e6d00001e6e, | ||
| 1192 | 0x1e6f00001e70, | ||
| 1193 | 0x1e7100001e72, | ||
| 1194 | 0x1e7300001e74, | ||
| 1195 | 0x1e7500001e76, | ||
| 1196 | 0x1e7700001e78, | ||
| 1197 | 0x1e7900001e7a, | ||
| 1198 | 0x1e7b00001e7c, | ||
| 1199 | 0x1e7d00001e7e, | ||
| 1200 | 0x1e7f00001e80, | ||
| 1201 | 0x1e8100001e82, | ||
| 1202 | 0x1e8300001e84, | ||
| 1203 | 0x1e8500001e86, | ||
| 1204 | 0x1e8700001e88, | ||
| 1205 | 0x1e8900001e8a, | ||
| 1206 | 0x1e8b00001e8c, | ||
| 1207 | 0x1e8d00001e8e, | ||
| 1208 | 0x1e8f00001e90, | ||
| 1209 | 0x1e9100001e92, | ||
| 1210 | 0x1e9300001e94, | ||
| 1211 | 0x1e9500001e9a, | ||
| 1212 | 0x1e9c00001e9e, | ||
| 1213 | 0x1e9f00001ea0, | ||
| 1214 | 0x1ea100001ea2, | ||
| 1215 | 0x1ea300001ea4, | ||
| 1216 | 0x1ea500001ea6, | ||
| 1217 | 0x1ea700001ea8, | ||
| 1218 | 0x1ea900001eaa, | ||
| 1219 | 0x1eab00001eac, | ||
| 1220 | 0x1ead00001eae, | ||
| 1221 | 0x1eaf00001eb0, | ||
| 1222 | 0x1eb100001eb2, | ||
| 1223 | 0x1eb300001eb4, | ||
| 1224 | 0x1eb500001eb6, | ||
| 1225 | 0x1eb700001eb8, | ||
| 1226 | 0x1eb900001eba, | ||
| 1227 | 0x1ebb00001ebc, | ||
| 1228 | 0x1ebd00001ebe, | ||
| 1229 | 0x1ebf00001ec0, | ||
| 1230 | 0x1ec100001ec2, | ||
| 1231 | 0x1ec300001ec4, | ||
| 1232 | 0x1ec500001ec6, | ||
| 1233 | 0x1ec700001ec8, | ||
| 1234 | 0x1ec900001eca, | ||
| 1235 | 0x1ecb00001ecc, | ||
| 1236 | 0x1ecd00001ece, | ||
| 1237 | 0x1ecf00001ed0, | ||
| 1238 | 0x1ed100001ed2, | ||
| 1239 | 0x1ed300001ed4, | ||
| 1240 | 0x1ed500001ed6, | ||
| 1241 | 0x1ed700001ed8, | ||
| 1242 | 0x1ed900001eda, | ||
| 1243 | 0x1edb00001edc, | ||
| 1244 | 0x1edd00001ede, | ||
| 1245 | 0x1edf00001ee0, | ||
| 1246 | 0x1ee100001ee2, | ||
| 1247 | 0x1ee300001ee4, | ||
| 1248 | 0x1ee500001ee6, | ||
| 1249 | 0x1ee700001ee8, | ||
| 1250 | 0x1ee900001eea, | ||
| 1251 | 0x1eeb00001eec, | ||
| 1252 | 0x1eed00001eee, | ||
| 1253 | 0x1eef00001ef0, | ||
| 1254 | 0x1ef100001ef2, | ||
| 1255 | 0x1ef300001ef4, | ||
| 1256 | 0x1ef500001ef6, | ||
| 1257 | 0x1ef700001ef8, | ||
| 1258 | 0x1ef900001efa, | ||
| 1259 | 0x1efb00001efc, | ||
| 1260 | 0x1efd00001efe, | ||
| 1261 | 0x1eff00001f08, | ||
| 1262 | 0x1f1000001f16, | ||
| 1263 | 0x1f2000001f28, | ||
| 1264 | 0x1f3000001f38, | ||
| 1265 | 0x1f4000001f46, | ||
| 1266 | 0x1f5000001f58, | ||
| 1267 | 0x1f6000001f68, | ||
| 1268 | 0x1f7000001f71, | ||
| 1269 | 0x1f7200001f73, | ||
| 1270 | 0x1f7400001f75, | ||
| 1271 | 0x1f7600001f77, | ||
| 1272 | 0x1f7800001f79, | ||
| 1273 | 0x1f7a00001f7b, | ||
| 1274 | 0x1f7c00001f7d, | ||
| 1275 | 0x1fb000001fb2, | ||
| 1276 | 0x1fb600001fb7, | ||
| 1277 | 0x1fc600001fc7, | ||
| 1278 | 0x1fd000001fd3, | ||
| 1279 | 0x1fd600001fd8, | ||
| 1280 | 0x1fe000001fe3, | ||
| 1281 | 0x1fe400001fe8, | ||
| 1282 | 0x1ff600001ff7, | ||
| 1283 | 0x214e0000214f, | ||
| 1284 | 0x218400002185, | ||
| 1285 | 0x2c3000002c5f, | ||
| 1286 | 0x2c6100002c62, | ||
| 1287 | 0x2c6500002c67, | ||
| 1288 | 0x2c6800002c69, | ||
| 1289 | 0x2c6a00002c6b, | ||
| 1290 | 0x2c6c00002c6d, | ||
| 1291 | 0x2c7100002c72, | ||
| 1292 | 0x2c7300002c75, | ||
| 1293 | 0x2c7600002c7c, | ||
| 1294 | 0x2c8100002c82, | ||
| 1295 | 0x2c8300002c84, | ||
| 1296 | 0x2c8500002c86, | ||
| 1297 | 0x2c8700002c88, | ||
| 1298 | 0x2c8900002c8a, | ||
| 1299 | 0x2c8b00002c8c, | ||
| 1300 | 0x2c8d00002c8e, | ||
| 1301 | 0x2c8f00002c90, | ||
| 1302 | 0x2c9100002c92, | ||
| 1303 | 0x2c9300002c94, | ||
| 1304 | 0x2c9500002c96, | ||
| 1305 | 0x2c9700002c98, | ||
| 1306 | 0x2c9900002c9a, | ||
| 1307 | 0x2c9b00002c9c, | ||
| 1308 | 0x2c9d00002c9e, | ||
| 1309 | 0x2c9f00002ca0, | ||
| 1310 | 0x2ca100002ca2, | ||
| 1311 | 0x2ca300002ca4, | ||
| 1312 | 0x2ca500002ca6, | ||
| 1313 | 0x2ca700002ca8, | ||
| 1314 | 0x2ca900002caa, | ||
| 1315 | 0x2cab00002cac, | ||
| 1316 | 0x2cad00002cae, | ||
| 1317 | 0x2caf00002cb0, | ||
| 1318 | 0x2cb100002cb2, | ||
| 1319 | 0x2cb300002cb4, | ||
| 1320 | 0x2cb500002cb6, | ||
| 1321 | 0x2cb700002cb8, | ||
| 1322 | 0x2cb900002cba, | ||
| 1323 | 0x2cbb00002cbc, | ||
| 1324 | 0x2cbd00002cbe, | ||
| 1325 | 0x2cbf00002cc0, | ||
| 1326 | 0x2cc100002cc2, | ||
| 1327 | 0x2cc300002cc4, | ||
| 1328 | 0x2cc500002cc6, | ||
| 1329 | 0x2cc700002cc8, | ||
| 1330 | 0x2cc900002cca, | ||
| 1331 | 0x2ccb00002ccc, | ||
| 1332 | 0x2ccd00002cce, | ||
| 1333 | 0x2ccf00002cd0, | ||
| 1334 | 0x2cd100002cd2, | ||
| 1335 | 0x2cd300002cd4, | ||
| 1336 | 0x2cd500002cd6, | ||
| 1337 | 0x2cd700002cd8, | ||
| 1338 | 0x2cd900002cda, | ||
| 1339 | 0x2cdb00002cdc, | ||
| 1340 | 0x2cdd00002cde, | ||
| 1341 | 0x2cdf00002ce0, | ||
| 1342 | 0x2ce100002ce2, | ||
| 1343 | 0x2ce300002ce5, | ||
| 1344 | 0x2cec00002ced, | ||
| 1345 | 0x2cee00002cf2, | ||
| 1346 | 0x2cf300002cf4, | ||
| 1347 | 0x2d0000002d26, | ||
| 1348 | 0x2d2700002d28, | ||
| 1349 | 0x2d2d00002d2e, | ||
| 1350 | 0x2d3000002d68, | ||
| 1351 | 0x2d7f00002d97, | ||
| 1352 | 0x2da000002da7, | ||
| 1353 | 0x2da800002daf, | ||
| 1354 | 0x2db000002db7, | ||
| 1355 | 0x2db800002dbf, | ||
| 1356 | 0x2dc000002dc7, | ||
| 1357 | 0x2dc800002dcf, | ||
| 1358 | 0x2dd000002dd7, | ||
| 1359 | 0x2dd800002ddf, | ||
| 1360 | 0x2de000002e00, | ||
| 1361 | 0x2e2f00002e30, | ||
| 1362 | 0x300500003008, | ||
| 1363 | 0x302a0000302e, | ||
| 1364 | 0x303c0000303d, | ||
| 1365 | 0x304100003097, | ||
| 1366 | 0x30990000309b, | ||
| 1367 | 0x309d0000309f, | ||
| 1368 | 0x30a1000030fb, | ||
| 1369 | 0x30fc000030ff, | ||
| 1370 | 0x31050000312e, | ||
| 1371 | 0x31a0000031bb, | ||
| 1372 | 0x31f000003200, | ||
| 1373 | 0x340000004db6, | ||
| 1374 | 0x4e0000009fcd, | ||
| 1375 | 0xa0000000a48d, | ||
| 1376 | 0xa4d00000a4fe, | ||
| 1377 | 0xa5000000a60d, | ||
| 1378 | 0xa6100000a62c, | ||
| 1379 | 0xa6410000a642, | ||
| 1380 | 0xa6430000a644, | ||
| 1381 | 0xa6450000a646, | ||
| 1382 | 0xa6470000a648, | ||
| 1383 | 0xa6490000a64a, | ||
| 1384 | 0xa64b0000a64c, | ||
| 1385 | 0xa64d0000a64e, | ||
| 1386 | 0xa64f0000a650, | ||
| 1387 | 0xa6510000a652, | ||
| 1388 | 0xa6530000a654, | ||
| 1389 | 0xa6550000a656, | ||
| 1390 | 0xa6570000a658, | ||
| 1391 | 0xa6590000a65a, | ||
| 1392 | 0xa65b0000a65c, | ||
| 1393 | 0xa65d0000a65e, | ||
| 1394 | 0xa65f0000a660, | ||
| 1395 | 0xa6610000a662, | ||
| 1396 | 0xa6630000a664, | ||
| 1397 | 0xa6650000a666, | ||
| 1398 | 0xa6670000a668, | ||
| 1399 | 0xa6690000a66a, | ||
| 1400 | 0xa66b0000a66c, | ||
| 1401 | 0xa66d0000a670, | ||
| 1402 | 0xa6740000a67e, | ||
| 1403 | 0xa67f0000a680, | ||
| 1404 | 0xa6810000a682, | ||
| 1405 | 0xa6830000a684, | ||
| 1406 | 0xa6850000a686, | ||
| 1407 | 0xa6870000a688, | ||
| 1408 | 0xa6890000a68a, | ||
| 1409 | 0xa68b0000a68c, | ||
| 1410 | 0xa68d0000a68e, | ||
| 1411 | 0xa68f0000a690, | ||
| 1412 | 0xa6910000a692, | ||
| 1413 | 0xa6930000a694, | ||
| 1414 | 0xa6950000a696, | ||
| 1415 | 0xa6970000a698, | ||
| 1416 | 0xa69f0000a6e6, | ||
| 1417 | 0xa6f00000a6f2, | ||
| 1418 | 0xa7170000a720, | ||
| 1419 | 0xa7230000a724, | ||
| 1420 | 0xa7250000a726, | ||
| 1421 | 0xa7270000a728, | ||
| 1422 | 0xa7290000a72a, | ||
| 1423 | 0xa72b0000a72c, | ||
| 1424 | 0xa72d0000a72e, | ||
| 1425 | 0xa72f0000a732, | ||
| 1426 | 0xa7330000a734, | ||
| 1427 | 0xa7350000a736, | ||
| 1428 | 0xa7370000a738, | ||
| 1429 | 0xa7390000a73a, | ||
| 1430 | 0xa73b0000a73c, | ||
| 1431 | 0xa73d0000a73e, | ||
| 1432 | 0xa73f0000a740, | ||
| 1433 | 0xa7410000a742, | ||
| 1434 | 0xa7430000a744, | ||
| 1435 | 0xa7450000a746, | ||
| 1436 | 0xa7470000a748, | ||
| 1437 | 0xa7490000a74a, | ||
| 1438 | 0xa74b0000a74c, | ||
| 1439 | 0xa74d0000a74e, | ||
| 1440 | 0xa74f0000a750, | ||
| 1441 | 0xa7510000a752, | ||
| 1442 | 0xa7530000a754, | ||
| 1443 | 0xa7550000a756, | ||
| 1444 | 0xa7570000a758, | ||
| 1445 | 0xa7590000a75a, | ||
| 1446 | 0xa75b0000a75c, | ||
| 1447 | 0xa75d0000a75e, | ||
| 1448 | 0xa75f0000a760, | ||
| 1449 | 0xa7610000a762, | ||
| 1450 | 0xa7630000a764, | ||
| 1451 | 0xa7650000a766, | ||
| 1452 | 0xa7670000a768, | ||
| 1453 | 0xa7690000a76a, | ||
| 1454 | 0xa76b0000a76c, | ||
| 1455 | 0xa76d0000a76e, | ||
| 1456 | 0xa76f0000a770, | ||
| 1457 | 0xa7710000a779, | ||
| 1458 | 0xa77a0000a77b, | ||
| 1459 | 0xa77c0000a77d, | ||
| 1460 | 0xa77f0000a780, | ||
| 1461 | 0xa7810000a782, | ||
| 1462 | 0xa7830000a784, | ||
| 1463 | 0xa7850000a786, | ||
| 1464 | 0xa7870000a789, | ||
| 1465 | 0xa78c0000a78d, | ||
| 1466 | 0xa78e0000a78f, | ||
| 1467 | 0xa7910000a792, | ||
| 1468 | 0xa7930000a794, | ||
| 1469 | 0xa7a10000a7a2, | ||
| 1470 | 0xa7a30000a7a4, | ||
| 1471 | 0xa7a50000a7a6, | ||
| 1472 | 0xa7a70000a7a8, | ||
| 1473 | 0xa7a90000a7aa, | ||
| 1474 | 0xa7fa0000a828, | ||
| 1475 | 0xa8400000a874, | ||
| 1476 | 0xa8800000a8c5, | ||
| 1477 | 0xa8d00000a8da, | ||
| 1478 | 0xa8e00000a8f8, | ||
| 1479 | 0xa8fb0000a8fc, | ||
| 1480 | 0xa9000000a92e, | ||
| 1481 | 0xa9300000a954, | ||
| 1482 | 0xa9800000a9c1, | ||
| 1483 | 0xa9cf0000a9da, | ||
| 1484 | 0xaa000000aa37, | ||
| 1485 | 0xaa400000aa4e, | ||
| 1486 | 0xaa500000aa5a, | ||
| 1487 | 0xaa600000aa77, | ||
| 1488 | 0xaa7a0000aa7c, | ||
| 1489 | 0xaa800000aac3, | ||
| 1490 | 0xaadb0000aade, | ||
| 1491 | 0xaae00000aaf0, | ||
| 1492 | 0xaaf20000aaf7, | ||
| 1493 | 0xab010000ab07, | ||
| 1494 | 0xab090000ab0f, | ||
| 1495 | 0xab110000ab17, | ||
| 1496 | 0xab200000ab27, | ||
| 1497 | 0xab280000ab2f, | ||
| 1498 | 0xabc00000abeb, | ||
| 1499 | 0xabec0000abee, | ||
| 1500 | 0xabf00000abfa, | ||
| 1501 | 0xac000000d7a4, | ||
| 1502 | 0xfa0e0000fa10, | ||
| 1503 | 0xfa110000fa12, | ||
| 1504 | 0xfa130000fa15, | ||
| 1505 | 0xfa1f0000fa20, | ||
| 1506 | 0xfa210000fa22, | ||
| 1507 | 0xfa230000fa25, | ||
| 1508 | 0xfa270000fa2a, | ||
| 1509 | 0xfb1e0000fb1f, | ||
| 1510 | 0xfe200000fe27, | ||
| 1511 | 0xfe730000fe74, | ||
| 1512 | 0x100000001000c, | ||
| 1513 | 0x1000d00010027, | ||
| 1514 | 0x100280001003b, | ||
| 1515 | 0x1003c0001003e, | ||
| 1516 | 0x1003f0001004e, | ||
| 1517 | 0x100500001005e, | ||
| 1518 | 0x10080000100fb, | ||
| 1519 | 0x101fd000101fe, | ||
| 1520 | 0x102800001029d, | ||
| 1521 | 0x102a0000102d1, | ||
| 1522 | 0x103000001031f, | ||
| 1523 | 0x1033000010341, | ||
| 1524 | 0x103420001034a, | ||
| 1525 | 0x103800001039e, | ||
| 1526 | 0x103a0000103c4, | ||
| 1527 | 0x103c8000103d0, | ||
| 1528 | 0x104280001049e, | ||
| 1529 | 0x104a0000104aa, | ||
| 1530 | 0x1080000010806, | ||
| 1531 | 0x1080800010809, | ||
| 1532 | 0x1080a00010836, | ||
| 1533 | 0x1083700010839, | ||
| 1534 | 0x1083c0001083d, | ||
| 1535 | 0x1083f00010856, | ||
| 1536 | 0x1090000010916, | ||
| 1537 | 0x109200001093a, | ||
| 1538 | 0x10980000109b8, | ||
| 1539 | 0x109be000109c0, | ||
| 1540 | 0x10a0000010a04, | ||
| 1541 | 0x10a0500010a07, | ||
| 1542 | 0x10a0c00010a14, | ||
| 1543 | 0x10a1500010a18, | ||
| 1544 | 0x10a1900010a34, | ||
| 1545 | 0x10a3800010a3b, | ||
| 1546 | 0x10a3f00010a40, | ||
| 1547 | 0x10a6000010a7d, | ||
| 1548 | 0x10b0000010b36, | ||
| 1549 | 0x10b4000010b56, | ||
| 1550 | 0x10b6000010b73, | ||
| 1551 | 0x10c0000010c49, | ||
| 1552 | 0x1100000011047, | ||
| 1553 | 0x1106600011070, | ||
| 1554 | 0x11080000110bb, | ||
| 1555 | 0x110d0000110e9, | ||
| 1556 | 0x110f0000110fa, | ||
| 1557 | 0x1110000011135, | ||
| 1558 | 0x1113600011140, | ||
| 1559 | 0x11180000111c5, | ||
| 1560 | 0x111d0000111da, | ||
| 1561 | 0x11680000116b8, | ||
| 1562 | 0x116c0000116ca, | ||
| 1563 | 0x120000001236f, | ||
| 1564 | 0x130000001342f, | ||
| 1565 | 0x1680000016a39, | ||
| 1566 | 0x16f0000016f45, | ||
| 1567 | 0x16f5000016f7f, | ||
| 1568 | 0x16f8f00016fa0, | ||
| 1569 | 0x1b0000001b002, | ||
| 1570 | 0x200000002a6d7, | ||
| 1571 | 0x2a7000002b735, | ||
| 1572 | 0x2b7400002b81e, | ||
| 1573 | ), | ||
| 1574 | 'CONTEXTJ': ( | ||
| 1575 | 0x200c0000200e, | ||
| 1576 | ), | ||
| 1577 | 'CONTEXTO': ( | ||
| 1578 | 0xb7000000b8, | ||
| 1579 | 0x37500000376, | ||
| 1580 | 0x5f3000005f5, | ||
| 1581 | 0x6600000066a, | ||
| 1582 | 0x6f0000006fa, | ||
| 1583 | 0x30fb000030fc, | ||
| 1584 | ), | ||
| 1585 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/intranges.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/intranges.py new file mode 100644 index 0000000..8202be8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/intranges.py | |||
| @@ -0,0 +1,53 @@ | |||
| 1 | """ | ||
| 2 | Given a list of integers, made up of (hopefully) a small number of long runs | ||
| 3 | of consecutive integers, compute a representation of the form | ||
| 4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present | ||
| 5 | in the original list?" in time O(log(# runs)). | ||
| 6 | """ | ||
| 7 | |||
| 8 | import bisect | ||
| 9 | |||
| 10 | def intranges_from_list(list_): | ||
| 11 | """Represent a list of integers as a sequence of ranges: | ||
| 12 | ((start_0, end_0), (start_1, end_1), ...), such that the original | ||
| 13 | integers are exactly those x such that start_i <= x < end_i for some i. | ||
| 14 | |||
| 15 | Ranges are encoded as single integers (start << 32 | end), not as tuples. | ||
| 16 | """ | ||
| 17 | |||
| 18 | sorted_list = sorted(list_) | ||
| 19 | ranges = [] | ||
| 20 | last_write = -1 | ||
| 21 | for i in range(len(sorted_list)): | ||
| 22 | if i+1 < len(sorted_list): | ||
| 23 | if sorted_list[i] == sorted_list[i+1]-1: | ||
| 24 | continue | ||
| 25 | current_range = sorted_list[last_write+1:i+1] | ||
| 26 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) | ||
| 27 | last_write = i | ||
| 28 | |||
| 29 | return tuple(ranges) | ||
| 30 | |||
| 31 | def _encode_range(start, end): | ||
| 32 | return (start << 32) | end | ||
| 33 | |||
| 34 | def _decode_range(r): | ||
| 35 | return (r >> 32), (r & ((1 << 32) - 1)) | ||
| 36 | |||
| 37 | |||
| 38 | def intranges_contain(int_, ranges): | ||
| 39 | """Determine if `int_` falls into one of the ranges in `ranges`.""" | ||
| 40 | tuple_ = _encode_range(int_, 0) | ||
| 41 | pos = bisect.bisect_left(ranges, tuple_) | ||
| 42 | # we could be immediately ahead of a tuple (start, end) | ||
| 43 | # with start < int_ <= end | ||
| 44 | if pos > 0: | ||
| 45 | left, right = _decode_range(ranges[pos-1]) | ||
| 46 | if left <= int_ < right: | ||
| 47 | return True | ||
| 48 | # or we could be immediately behind a tuple (int_, end) | ||
| 49 | if pos < len(ranges): | ||
| 50 | left, _ = _decode_range(ranges[pos]) | ||
| 51 | if left == int_: | ||
| 52 | return True | ||
| 53 | return False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/package_data.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/package_data.py new file mode 100644 index 0000000..3073271 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/package_data.py | |||
| @@ -0,0 +1,2 @@ | |||
| 1 | __version__ = '2.6' | ||
| 2 | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/uts46data.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/uts46data.py new file mode 100644 index 0000000..fa1d66a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/idna/uts46data.py | |||
| @@ -0,0 +1,7634 @@ | |||
| 1 | # This file is automatically generated by tools/idna-data | ||
| 2 | # vim: set fileencoding=utf-8 : | ||
| 3 | |||
| 4 | """IDNA Mapping Table from UTS46.""" | ||
| 5 | |||
| 6 | |||
| 7 | __version__ = "6.3.0" | ||
| 8 | def _seg_0(): | ||
| 9 | return [ | ||
| 10 | (0x0, '3'), | ||
| 11 | (0x1, '3'), | ||
| 12 | (0x2, '3'), | ||
| 13 | (0x3, '3'), | ||
| 14 | (0x4, '3'), | ||
| 15 | (0x5, '3'), | ||
| 16 | (0x6, '3'), | ||
| 17 | (0x7, '3'), | ||
| 18 | (0x8, '3'), | ||
| 19 | (0x9, '3'), | ||
| 20 | (0xA, '3'), | ||
| 21 | (0xB, '3'), | ||
| 22 | (0xC, '3'), | ||
| 23 | (0xD, '3'), | ||
| 24 | (0xE, '3'), | ||
| 25 | (0xF, '3'), | ||
| 26 | (0x10, '3'), | ||
| 27 | (0x11, '3'), | ||
| 28 | (0x12, '3'), | ||
| 29 | (0x13, '3'), | ||
| 30 | (0x14, '3'), | ||
| 31 | (0x15, '3'), | ||
| 32 | (0x16, '3'), | ||
| 33 | (0x17, '3'), | ||
| 34 | (0x18, '3'), | ||
| 35 | (0x19, '3'), | ||
| 36 | (0x1A, '3'), | ||
| 37 | (0x1B, '3'), | ||
| 38 | (0x1C, '3'), | ||
| 39 | (0x1D, '3'), | ||
| 40 | (0x1E, '3'), | ||
| 41 | (0x1F, '3'), | ||
| 42 | (0x20, '3'), | ||
| 43 | (0x21, '3'), | ||
| 44 | (0x22, '3'), | ||
| 45 | (0x23, '3'), | ||
| 46 | (0x24, '3'), | ||
| 47 | (0x25, '3'), | ||
| 48 | (0x26, '3'), | ||
| 49 | (0x27, '3'), | ||
| 50 | (0x28, '3'), | ||
| 51 | (0x29, '3'), | ||
| 52 | (0x2A, '3'), | ||
| 53 | (0x2B, '3'), | ||
| 54 | (0x2C, '3'), | ||
| 55 | (0x2D, 'V'), | ||
| 56 | (0x2E, 'V'), | ||
| 57 | (0x2F, '3'), | ||
| 58 | (0x30, 'V'), | ||
| 59 | (0x31, 'V'), | ||
| 60 | (0x32, 'V'), | ||
| 61 | (0x33, 'V'), | ||
| 62 | (0x34, 'V'), | ||
| 63 | (0x35, 'V'), | ||
| 64 | (0x36, 'V'), | ||
| 65 | (0x37, 'V'), | ||
| 66 | (0x38, 'V'), | ||
| 67 | (0x39, 'V'), | ||
| 68 | (0x3A, '3'), | ||
| 69 | (0x3B, '3'), | ||
| 70 | (0x3C, '3'), | ||
| 71 | (0x3D, '3'), | ||
| 72 | (0x3E, '3'), | ||
| 73 | (0x3F, '3'), | ||
| 74 | (0x40, '3'), | ||
| 75 | (0x41, 'M', u'a'), | ||
| 76 | (0x42, 'M', u'b'), | ||
| 77 | (0x43, 'M', u'c'), | ||
| 78 | (0x44, 'M', u'd'), | ||
| 79 | (0x45, 'M', u'e'), | ||
| 80 | (0x46, 'M', u'f'), | ||
| 81 | (0x47, 'M', u'g'), | ||
| 82 | (0x48, 'M', u'h'), | ||
| 83 | (0x49, 'M', u'i'), | ||
| 84 | (0x4A, 'M', u'j'), | ||
| 85 | (0x4B, 'M', u'k'), | ||
| 86 | (0x4C, 'M', u'l'), | ||
| 87 | (0x4D, 'M', u'm'), | ||
| 88 | (0x4E, 'M', u'n'), | ||
| 89 | (0x4F, 'M', u'o'), | ||
| 90 | (0x50, 'M', u'p'), | ||
| 91 | (0x51, 'M', u'q'), | ||
| 92 | (0x52, 'M', u'r'), | ||
| 93 | (0x53, 'M', u's'), | ||
| 94 | (0x54, 'M', u't'), | ||
| 95 | (0x55, 'M', u'u'), | ||
| 96 | (0x56, 'M', u'v'), | ||
| 97 | (0x57, 'M', u'w'), | ||
| 98 | (0x58, 'M', u'x'), | ||
| 99 | (0x59, 'M', u'y'), | ||
| 100 | (0x5A, 'M', u'z'), | ||
| 101 | (0x5B, '3'), | ||
| 102 | (0x5C, '3'), | ||
| 103 | (0x5D, '3'), | ||
| 104 | (0x5E, '3'), | ||
| 105 | (0x5F, '3'), | ||
| 106 | (0x60, '3'), | ||
| 107 | (0x61, 'V'), | ||
| 108 | (0x62, 'V'), | ||
| 109 | (0x63, 'V'), | ||
| 110 | ] | ||
| 111 | |||
| 112 | def _seg_1(): | ||
| 113 | return [ | ||
| 114 | (0x64, 'V'), | ||
| 115 | (0x65, 'V'), | ||
| 116 | (0x66, 'V'), | ||
| 117 | (0x67, 'V'), | ||
| 118 | (0x68, 'V'), | ||
| 119 | (0x69, 'V'), | ||
| 120 | (0x6A, 'V'), | ||
| 121 | (0x6B, 'V'), | ||
| 122 | (0x6C, 'V'), | ||
| 123 | (0x6D, 'V'), | ||
| 124 | (0x6E, 'V'), | ||
| 125 | (0x6F, 'V'), | ||
| 126 | (0x70, 'V'), | ||
| 127 | (0x71, 'V'), | ||
| 128 | (0x72, 'V'), | ||
| 129 | (0x73, 'V'), | ||
| 130 | (0x74, 'V'), | ||
| 131 | (0x75, 'V'), | ||
| 132 | (0x76, 'V'), | ||
| 133 | (0x77, 'V'), | ||
| 134 | (0x78, 'V'), | ||
| 135 | (0x79, 'V'), | ||
| 136 | (0x7A, 'V'), | ||
| 137 | (0x7B, '3'), | ||
| 138 | (0x7C, '3'), | ||
| 139 | (0x7D, '3'), | ||
| 140 | (0x7E, '3'), | ||
| 141 | (0x7F, '3'), | ||
| 142 | (0x80, 'X'), | ||
| 143 | (0x81, 'X'), | ||
| 144 | (0x82, 'X'), | ||
| 145 | (0x83, 'X'), | ||
| 146 | (0x84, 'X'), | ||
| 147 | (0x85, 'X'), | ||
| 148 | (0x86, 'X'), | ||
| 149 | (0x87, 'X'), | ||
| 150 | (0x88, 'X'), | ||
| 151 | (0x89, 'X'), | ||
| 152 | (0x8A, 'X'), | ||
| 153 | (0x8B, 'X'), | ||
| 154 | (0x8C, 'X'), | ||
| 155 | (0x8D, 'X'), | ||
| 156 | (0x8E, 'X'), | ||
| 157 | (0x8F, 'X'), | ||
| 158 | (0x90, 'X'), | ||
| 159 | (0x91, 'X'), | ||
| 160 | (0x92, 'X'), | ||
| 161 | (0x93, 'X'), | ||
| 162 | (0x94, 'X'), | ||
| 163 | (0x95, 'X'), | ||
| 164 | (0x96, 'X'), | ||
| 165 | (0x97, 'X'), | ||
| 166 | (0x98, 'X'), | ||
| 167 | (0x99, 'X'), | ||
| 168 | (0x9A, 'X'), | ||
| 169 | (0x9B, 'X'), | ||
| 170 | (0x9C, 'X'), | ||
| 171 | (0x9D, 'X'), | ||
| 172 | (0x9E, 'X'), | ||
| 173 | (0x9F, 'X'), | ||
| 174 | (0xA0, '3', u' '), | ||
| 175 | (0xA1, 'V'), | ||
| 176 | (0xA2, 'V'), | ||
| 177 | (0xA3, 'V'), | ||
| 178 | (0xA4, 'V'), | ||
| 179 | (0xA5, 'V'), | ||
| 180 | (0xA6, 'V'), | ||
| 181 | (0xA7, 'V'), | ||
| 182 | (0xA8, '3', u' ̈'), | ||
| 183 | (0xA9, 'V'), | ||
| 184 | (0xAA, 'M', u'a'), | ||
| 185 | (0xAB, 'V'), | ||
| 186 | (0xAC, 'V'), | ||
| 187 | (0xAD, 'I'), | ||
| 188 | (0xAE, 'V'), | ||
| 189 | (0xAF, '3', u' ̄'), | ||
| 190 | (0xB0, 'V'), | ||
| 191 | (0xB1, 'V'), | ||
| 192 | (0xB2, 'M', u'2'), | ||
| 193 | (0xB3, 'M', u'3'), | ||
| 194 | (0xB4, '3', u' ́'), | ||
| 195 | (0xB5, 'M', u'μ'), | ||
| 196 | (0xB6, 'V'), | ||
| 197 | (0xB7, 'V'), | ||
| 198 | (0xB8, '3', u' ̧'), | ||
| 199 | (0xB9, 'M', u'1'), | ||
| 200 | (0xBA, 'M', u'o'), | ||
| 201 | (0xBB, 'V'), | ||
| 202 | (0xBC, 'M', u'1⁄4'), | ||
| 203 | (0xBD, 'M', u'1⁄2'), | ||
| 204 | (0xBE, 'M', u'3⁄4'), | ||
| 205 | (0xBF, 'V'), | ||
| 206 | (0xC0, 'M', u'à'), | ||
| 207 | (0xC1, 'M', u'á'), | ||
| 208 | (0xC2, 'M', u'â'), | ||
| 209 | (0xC3, 'M', u'ã'), | ||
| 210 | (0xC4, 'M', u'ä'), | ||
| 211 | (0xC5, 'M', u'å'), | ||
| 212 | (0xC6, 'M', u'æ'), | ||
| 213 | (0xC7, 'M', u'ç'), | ||
| 214 | ] | ||
| 215 | |||
| 216 | def _seg_2(): | ||
| 217 | return [ | ||
| 218 | (0xC8, 'M', u'è'), | ||
| 219 | (0xC9, 'M', u'é'), | ||
| 220 | (0xCA, 'M', u'ê'), | ||
| 221 | (0xCB, 'M', u'ë'), | ||
| 222 | (0xCC, 'M', u'ì'), | ||
| 223 | (0xCD, 'M', u'í'), | ||
| 224 | (0xCE, 'M', u'î'), | ||
| 225 | (0xCF, 'M', u'ï'), | ||
| 226 | (0xD0, 'M', u'ð'), | ||
| 227 | (0xD1, 'M', u'ñ'), | ||
| 228 | (0xD2, 'M', u'ò'), | ||
| 229 | (0xD3, 'M', u'ó'), | ||
| 230 | (0xD4, 'M', u'ô'), | ||
| 231 | (0xD5, 'M', u'õ'), | ||
| 232 | (0xD6, 'M', u'ö'), | ||
| 233 | (0xD7, 'V'), | ||
| 234 | (0xD8, 'M', u'ø'), | ||
| 235 | (0xD9, 'M', u'ù'), | ||
| 236 | (0xDA, 'M', u'ú'), | ||
| 237 | (0xDB, 'M', u'û'), | ||
| 238 | (0xDC, 'M', u'ü'), | ||
| 239 | (0xDD, 'M', u'ý'), | ||
| 240 | (0xDE, 'M', u'þ'), | ||
| 241 | (0xDF, 'D', u'ss'), | ||
| 242 | (0xE0, 'V'), | ||
| 243 | (0xE1, 'V'), | ||
| 244 | (0xE2, 'V'), | ||
| 245 | (0xE3, 'V'), | ||
| 246 | (0xE4, 'V'), | ||
| 247 | (0xE5, 'V'), | ||
| 248 | (0xE6, 'V'), | ||
| 249 | (0xE7, 'V'), | ||
| 250 | (0xE8, 'V'), | ||
| 251 | (0xE9, 'V'), | ||
| 252 | (0xEA, 'V'), | ||
| 253 | (0xEB, 'V'), | ||
| 254 | (0xEC, 'V'), | ||
| 255 | (0xED, 'V'), | ||
| 256 | (0xEE, 'V'), | ||
| 257 | (0xEF, 'V'), | ||
| 258 | (0xF0, 'V'), | ||
| 259 | (0xF1, 'V'), | ||
| 260 | (0xF2, 'V'), | ||
| 261 | (0xF3, 'V'), | ||
| 262 | (0xF4, 'V'), | ||
| 263 | (0xF5, 'V'), | ||
| 264 | (0xF6, 'V'), | ||
| 265 | (0xF7, 'V'), | ||
| 266 | (0xF8, 'V'), | ||
| 267 | (0xF9, 'V'), | ||
| 268 | (0xFA, 'V'), | ||
| 269 | (0xFB, 'V'), | ||
| 270 | (0xFC, 'V'), | ||
| 271 | (0xFD, 'V'), | ||
| 272 | (0xFE, 'V'), | ||
| 273 | (0xFF, 'V'), | ||
| 274 | (0x100, 'M', u'ā'), | ||
| 275 | (0x101, 'V'), | ||
| 276 | (0x102, 'M', u'ă'), | ||
| 277 | (0x103, 'V'), | ||
| 278 | (0x104, 'M', u'ą'), | ||
| 279 | (0x105, 'V'), | ||
| 280 | (0x106, 'M', u'ć'), | ||
| 281 | (0x107, 'V'), | ||
| 282 | (0x108, 'M', u'ĉ'), | ||
| 283 | (0x109, 'V'), | ||
| 284 | (0x10A, 'M', u'ċ'), | ||
| 285 | (0x10B, 'V'), | ||
| 286 | (0x10C, 'M', u'č'), | ||
| 287 | (0x10D, 'V'), | ||
| 288 | (0x10E, 'M', u'ď'), | ||
| 289 | (0x10F, 'V'), | ||
| 290 | (0x110, 'M', u'đ'), | ||
| 291 | (0x111, 'V'), | ||
| 292 | (0x112, 'M', u'ē'), | ||
| 293 | (0x113, 'V'), | ||
| 294 | (0x114, 'M', u'ĕ'), | ||
| 295 | (0x115, 'V'), | ||
| 296 | (0x116, 'M', u'ė'), | ||
| 297 | (0x117, 'V'), | ||
| 298 | (0x118, 'M', u'ę'), | ||
| 299 | (0x119, 'V'), | ||
| 300 | (0x11A, 'M', u'ě'), | ||
| 301 | (0x11B, 'V'), | ||
| 302 | (0x11C, 'M', u'ĝ'), | ||
| 303 | (0x11D, 'V'), | ||
| 304 | (0x11E, 'M', u'ğ'), | ||
| 305 | (0x11F, 'V'), | ||
| 306 | (0x120, 'M', u'ġ'), | ||
| 307 | (0x121, 'V'), | ||
| 308 | (0x122, 'M', u'ģ'), | ||
| 309 | (0x123, 'V'), | ||
| 310 | (0x124, 'M', u'ĥ'), | ||
| 311 | (0x125, 'V'), | ||
| 312 | (0x126, 'M', u'ħ'), | ||
| 313 | (0x127, 'V'), | ||
| 314 | (0x128, 'M', u'ĩ'), | ||
| 315 | (0x129, 'V'), | ||
| 316 | (0x12A, 'M', u'ī'), | ||
| 317 | (0x12B, 'V'), | ||
| 318 | ] | ||
| 319 | |||
| 320 | def _seg_3(): | ||
| 321 | return [ | ||
| 322 | (0x12C, 'M', u'ĭ'), | ||
| 323 | (0x12D, 'V'), | ||
| 324 | (0x12E, 'M', u'į'), | ||
| 325 | (0x12F, 'V'), | ||
| 326 | (0x130, 'M', u'i̇'), | ||
| 327 | (0x131, 'V'), | ||
| 328 | (0x132, 'M', u'ij'), | ||
| 329 | (0x134, 'M', u'ĵ'), | ||
| 330 | (0x135, 'V'), | ||
| 331 | (0x136, 'M', u'ķ'), | ||
| 332 | (0x137, 'V'), | ||
| 333 | (0x139, 'M', u'ĺ'), | ||
| 334 | (0x13A, 'V'), | ||
| 335 | (0x13B, 'M', u'ļ'), | ||
| 336 | (0x13C, 'V'), | ||
| 337 | (0x13D, 'M', u'ľ'), | ||
| 338 | (0x13E, 'V'), | ||
| 339 | (0x13F, 'M', u'l·'), | ||
| 340 | (0x141, 'M', u'ł'), | ||
| 341 | (0x142, 'V'), | ||
| 342 | (0x143, 'M', u'ń'), | ||
| 343 | (0x144, 'V'), | ||
| 344 | (0x145, 'M', u'ņ'), | ||
| 345 | (0x146, 'V'), | ||
| 346 | (0x147, 'M', u'ň'), | ||
| 347 | (0x148, 'V'), | ||
| 348 | (0x149, 'M', u'ʼn'), | ||
| 349 | (0x14A, 'M', u'ŋ'), | ||
| 350 | (0x14B, 'V'), | ||
| 351 | (0x14C, 'M', u'ō'), | ||
| 352 | (0x14D, 'V'), | ||
| 353 | (0x14E, 'M', u'ŏ'), | ||
| 354 | (0x14F, 'V'), | ||
| 355 | (0x150, 'M', u'ő'), | ||
| 356 | (0x151, 'V'), | ||
| 357 | (0x152, 'M', u'œ'), | ||
| 358 | (0x153, 'V'), | ||
| 359 | (0x154, 'M', u'ŕ'), | ||
| 360 | (0x155, 'V'), | ||
| 361 | (0x156, 'M', u'ŗ'), | ||
| 362 | (0x157, 'V'), | ||
| 363 | (0x158, 'M', u'ř'), | ||
| 364 | (0x159, 'V'), | ||
| 365 | (0x15A, 'M', u'ś'), | ||
| 366 | (0x15B, 'V'), | ||
| 367 | (0x15C, 'M', u'ŝ'), | ||
| 368 | (0x15D, 'V'), | ||
| 369 | (0x15E, 'M', u'ş'), | ||
| 370 | (0x15F, 'V'), | ||
| 371 | (0x160, 'M', u'š'), | ||
| 372 | (0x161, 'V'), | ||
| 373 | (0x162, 'M', u'ţ'), | ||
| 374 | (0x163, 'V'), | ||
| 375 | (0x164, 'M', u'ť'), | ||
| 376 | (0x165, 'V'), | ||
| 377 | (0x166, 'M', u'ŧ'), | ||
| 378 | (0x167, 'V'), | ||
| 379 | (0x168, 'M', u'ũ'), | ||
| 380 | (0x169, 'V'), | ||
| 381 | (0x16A, 'M', u'ū'), | ||
| 382 | (0x16B, 'V'), | ||
| 383 | (0x16C, 'M', u'ŭ'), | ||
| 384 | (0x16D, 'V'), | ||
| 385 | (0x16E, 'M', u'ů'), | ||
| 386 | (0x16F, 'V'), | ||
| 387 | (0x170, 'M', u'ű'), | ||
| 388 | (0x171, 'V'), | ||
| 389 | (0x172, 'M', u'ų'), | ||
| 390 | (0x173, 'V'), | ||
| 391 | (0x174, 'M', u'ŵ'), | ||
| 392 | (0x175, 'V'), | ||
| 393 | (0x176, 'M', u'ŷ'), | ||
| 394 | (0x177, 'V'), | ||
| 395 | (0x178, 'M', u'ÿ'), | ||
| 396 | (0x179, 'M', u'ź'), | ||
| 397 | (0x17A, 'V'), | ||
| 398 | (0x17B, 'M', u'ż'), | ||
| 399 | (0x17C, 'V'), | ||
| 400 | (0x17D, 'M', u'ž'), | ||
| 401 | (0x17E, 'V'), | ||
| 402 | (0x17F, 'M', u's'), | ||
| 403 | (0x180, 'V'), | ||
| 404 | (0x181, 'M', u'ɓ'), | ||
| 405 | (0x182, 'M', u'ƃ'), | ||
| 406 | (0x183, 'V'), | ||
| 407 | (0x184, 'M', u'ƅ'), | ||
| 408 | (0x185, 'V'), | ||
| 409 | (0x186, 'M', u'ɔ'), | ||
| 410 | (0x187, 'M', u'ƈ'), | ||
| 411 | (0x188, 'V'), | ||
| 412 | (0x189, 'M', u'ɖ'), | ||
| 413 | (0x18A, 'M', u'ɗ'), | ||
| 414 | (0x18B, 'M', u'ƌ'), | ||
| 415 | (0x18C, 'V'), | ||
| 416 | (0x18E, 'M', u'ǝ'), | ||
| 417 | (0x18F, 'M', u'ə'), | ||
| 418 | (0x190, 'M', u'ɛ'), | ||
| 419 | (0x191, 'M', u'ƒ'), | ||
| 420 | (0x192, 'V'), | ||
| 421 | (0x193, 'M', u'ɠ'), | ||
| 422 | ] | ||
| 423 | |||
| 424 | def _seg_4(): | ||
| 425 | return [ | ||
| 426 | (0x194, 'M', u'ɣ'), | ||
| 427 | (0x195, 'V'), | ||
| 428 | (0x196, 'M', u'ɩ'), | ||
| 429 | (0x197, 'M', u'ɨ'), | ||
| 430 | (0x198, 'M', u'ƙ'), | ||
| 431 | (0x199, 'V'), | ||
| 432 | (0x19C, 'M', u'ɯ'), | ||
| 433 | (0x19D, 'M', u'ɲ'), | ||
| 434 | (0x19E, 'V'), | ||
| 435 | (0x19F, 'M', u'ɵ'), | ||
| 436 | (0x1A0, 'M', u'ơ'), | ||
| 437 | (0x1A1, 'V'), | ||
| 438 | (0x1A2, 'M', u'ƣ'), | ||
| 439 | (0x1A3, 'V'), | ||
| 440 | (0x1A4, 'M', u'ƥ'), | ||
| 441 | (0x1A5, 'V'), | ||
| 442 | (0x1A6, 'M', u'ʀ'), | ||
| 443 | (0x1A7, 'M', u'ƨ'), | ||
| 444 | (0x1A8, 'V'), | ||
| 445 | (0x1A9, 'M', u'ʃ'), | ||
| 446 | (0x1AA, 'V'), | ||
| 447 | (0x1AC, 'M', u'ƭ'), | ||
| 448 | (0x1AD, 'V'), | ||
| 449 | (0x1AE, 'M', u'ʈ'), | ||
| 450 | (0x1AF, 'M', u'ư'), | ||
| 451 | (0x1B0, 'V'), | ||
| 452 | (0x1B1, 'M', u'ʊ'), | ||
| 453 | (0x1B2, 'M', u'ʋ'), | ||
| 454 | (0x1B3, 'M', u'ƴ'), | ||
| 455 | (0x1B4, 'V'), | ||
| 456 | (0x1B5, 'M', u'ƶ'), | ||
| 457 | (0x1B6, 'V'), | ||
| 458 | (0x1B7, 'M', u'ʒ'), | ||
| 459 | (0x1B8, 'M', u'ƹ'), | ||
| 460 | (0x1B9, 'V'), | ||
| 461 | (0x1BC, 'M', u'ƽ'), | ||
| 462 | (0x1BD, 'V'), | ||
| 463 | (0x1C4, 'M', u'dž'), | ||
| 464 | (0x1C7, 'M', u'lj'), | ||
| 465 | (0x1CA, 'M', u'nj'), | ||
| 466 | (0x1CD, 'M', u'ǎ'), | ||
| 467 | (0x1CE, 'V'), | ||
| 468 | (0x1CF, 'M', u'ǐ'), | ||
| 469 | (0x1D0, 'V'), | ||
| 470 | (0x1D1, 'M', u'ǒ'), | ||
| 471 | (0x1D2, 'V'), | ||
| 472 | (0x1D3, 'M', u'ǔ'), | ||
| 473 | (0x1D4, 'V'), | ||
| 474 | (0x1D5, 'M', u'ǖ'), | ||
| 475 | (0x1D6, 'V'), | ||
| 476 | (0x1D7, 'M', u'ǘ'), | ||
| 477 | (0x1D8, 'V'), | ||
| 478 | (0x1D9, 'M', u'ǚ'), | ||
| 479 | (0x1DA, 'V'), | ||
| 480 | (0x1DB, 'M', u'ǜ'), | ||
| 481 | (0x1DC, 'V'), | ||
| 482 | (0x1DE, 'M', u'ǟ'), | ||
| 483 | (0x1DF, 'V'), | ||
| 484 | (0x1E0, 'M', u'ǡ'), | ||
| 485 | (0x1E1, 'V'), | ||
| 486 | (0x1E2, 'M', u'ǣ'), | ||
| 487 | (0x1E3, 'V'), | ||
| 488 | (0x1E4, 'M', u'ǥ'), | ||
| 489 | (0x1E5, 'V'), | ||
| 490 | (0x1E6, 'M', u'ǧ'), | ||
| 491 | (0x1E7, 'V'), | ||
| 492 | (0x1E8, 'M', u'ǩ'), | ||
| 493 | (0x1E9, 'V'), | ||
| 494 | (0x1EA, 'M', u'ǫ'), | ||
| 495 | (0x1EB, 'V'), | ||
| 496 | (0x1EC, 'M', u'ǭ'), | ||
| 497 | (0x1ED, 'V'), | ||
| 498 | (0x1EE, 'M', u'ǯ'), | ||
| 499 | (0x1EF, 'V'), | ||
| 500 | (0x1F1, 'M', u'dz'), | ||
| 501 | (0x1F4, 'M', u'ǵ'), | ||
| 502 | (0x1F5, 'V'), | ||
| 503 | (0x1F6, 'M', u'ƕ'), | ||
| 504 | (0x1F7, 'M', u'ƿ'), | ||
| 505 | (0x1F8, 'M', u'ǹ'), | ||
| 506 | (0x1F9, 'V'), | ||
| 507 | (0x1FA, 'M', u'ǻ'), | ||
| 508 | (0x1FB, 'V'), | ||
| 509 | (0x1FC, 'M', u'ǽ'), | ||
| 510 | (0x1FD, 'V'), | ||
| 511 | (0x1FE, 'M', u'ǿ'), | ||
| 512 | (0x1FF, 'V'), | ||
| 513 | (0x200, 'M', u'ȁ'), | ||
| 514 | (0x201, 'V'), | ||
| 515 | (0x202, 'M', u'ȃ'), | ||
| 516 | (0x203, 'V'), | ||
| 517 | (0x204, 'M', u'ȅ'), | ||
| 518 | (0x205, 'V'), | ||
| 519 | (0x206, 'M', u'ȇ'), | ||
| 520 | (0x207, 'V'), | ||
| 521 | (0x208, 'M', u'ȉ'), | ||
| 522 | (0x209, 'V'), | ||
| 523 | (0x20A, 'M', u'ȋ'), | ||
| 524 | (0x20B, 'V'), | ||
| 525 | (0x20C, 'M', u'ȍ'), | ||
| 526 | ] | ||
| 527 | |||
| 528 | def _seg_5(): | ||
| 529 | return [ | ||
| 530 | (0x20D, 'V'), | ||
| 531 | (0x20E, 'M', u'ȏ'), | ||
| 532 | (0x20F, 'V'), | ||
| 533 | (0x210, 'M', u'ȑ'), | ||
| 534 | (0x211, 'V'), | ||
| 535 | (0x212, 'M', u'ȓ'), | ||
| 536 | (0x213, 'V'), | ||
| 537 | (0x214, 'M', u'ȕ'), | ||
| 538 | (0x215, 'V'), | ||
| 539 | (0x216, 'M', u'ȗ'), | ||
| 540 | (0x217, 'V'), | ||
| 541 | (0x218, 'M', u'ș'), | ||
| 542 | (0x219, 'V'), | ||
| 543 | (0x21A, 'M', u'ț'), | ||
| 544 | (0x21B, 'V'), | ||
| 545 | (0x21C, 'M', u'ȝ'), | ||
| 546 | (0x21D, 'V'), | ||
| 547 | (0x21E, 'M', u'ȟ'), | ||
| 548 | (0x21F, 'V'), | ||
| 549 | (0x220, 'M', u'ƞ'), | ||
| 550 | (0x221, 'V'), | ||
| 551 | (0x222, 'M', u'ȣ'), | ||
| 552 | (0x223, 'V'), | ||
| 553 | (0x224, 'M', u'ȥ'), | ||
| 554 | (0x225, 'V'), | ||
| 555 | (0x226, 'M', u'ȧ'), | ||
| 556 | (0x227, 'V'), | ||
| 557 | (0x228, 'M', u'ȩ'), | ||
| 558 | (0x229, 'V'), | ||
| 559 | (0x22A, 'M', u'ȫ'), | ||
| 560 | (0x22B, 'V'), | ||
| 561 | (0x22C, 'M', u'ȭ'), | ||
| 562 | (0x22D, 'V'), | ||
| 563 | (0x22E, 'M', u'ȯ'), | ||
| 564 | (0x22F, 'V'), | ||
| 565 | (0x230, 'M', u'ȱ'), | ||
| 566 | (0x231, 'V'), | ||
| 567 | (0x232, 'M', u'ȳ'), | ||
| 568 | (0x233, 'V'), | ||
| 569 | (0x23A, 'M', u'ⱥ'), | ||
| 570 | (0x23B, 'M', u'ȼ'), | ||
| 571 | (0x23C, 'V'), | ||
| 572 | (0x23D, 'M', u'ƚ'), | ||
| 573 | (0x23E, 'M', u'ⱦ'), | ||
| 574 | (0x23F, 'V'), | ||
| 575 | (0x241, 'M', u'ɂ'), | ||
| 576 | (0x242, 'V'), | ||
| 577 | (0x243, 'M', u'ƀ'), | ||
| 578 | (0x244, 'M', u'ʉ'), | ||
| 579 | (0x245, 'M', u'ʌ'), | ||
| 580 | (0x246, 'M', u'ɇ'), | ||
| 581 | (0x247, 'V'), | ||
| 582 | (0x248, 'M', u'ɉ'), | ||
| 583 | (0x249, 'V'), | ||
| 584 | (0x24A, 'M', u'ɋ'), | ||
| 585 | (0x24B, 'V'), | ||
| 586 | (0x24C, 'M', u'ɍ'), | ||
| 587 | (0x24D, 'V'), | ||
| 588 | (0x24E, 'M', u'ɏ'), | ||
| 589 | (0x24F, 'V'), | ||
| 590 | (0x2B0, 'M', u'h'), | ||
| 591 | (0x2B1, 'M', u'ɦ'), | ||
| 592 | (0x2B2, 'M', u'j'), | ||
| 593 | (0x2B3, 'M', u'r'), | ||
| 594 | (0x2B4, 'M', u'ɹ'), | ||
| 595 | (0x2B5, 'M', u'ɻ'), | ||
| 596 | (0x2B6, 'M', u'ʁ'), | ||
| 597 | (0x2B7, 'M', u'w'), | ||
| 598 | (0x2B8, 'M', u'y'), | ||
| 599 | (0x2B9, 'V'), | ||
| 600 | (0x2D8, '3', u' ̆'), | ||
| 601 | (0x2D9, '3', u' ̇'), | ||
| 602 | (0x2DA, '3', u' ̊'), | ||
| 603 | (0x2DB, '3', u' ̨'), | ||
| 604 | (0x2DC, '3', u' ̃'), | ||
| 605 | (0x2DD, '3', u' ̋'), | ||
| 606 | (0x2DE, 'V'), | ||
| 607 | (0x2E0, 'M', u'ɣ'), | ||
| 608 | (0x2E1, 'M', u'l'), | ||
| 609 | (0x2E2, 'M', u's'), | ||
| 610 | (0x2E3, 'M', u'x'), | ||
| 611 | (0x2E4, 'M', u'ʕ'), | ||
| 612 | (0x2E5, 'V'), | ||
| 613 | (0x340, 'M', u'̀'), | ||
| 614 | (0x341, 'M', u'́'), | ||
| 615 | (0x342, 'V'), | ||
| 616 | (0x343, 'M', u'̓'), | ||
| 617 | (0x344, 'M', u'̈́'), | ||
| 618 | (0x345, 'M', u'ι'), | ||
| 619 | (0x346, 'V'), | ||
| 620 | (0x34F, 'I'), | ||
| 621 | (0x350, 'V'), | ||
| 622 | (0x370, 'M', u'ͱ'), | ||
| 623 | (0x371, 'V'), | ||
| 624 | (0x372, 'M', u'ͳ'), | ||
| 625 | (0x373, 'V'), | ||
| 626 | (0x374, 'M', u'ʹ'), | ||
| 627 | (0x375, 'V'), | ||
| 628 | (0x376, 'M', u'ͷ'), | ||
| 629 | (0x377, 'V'), | ||
| 630 | ] | ||
| 631 | |||
| 632 | def _seg_6(): | ||
| 633 | return [ | ||
| 634 | (0x378, 'X'), | ||
| 635 | (0x37A, '3', u' ι'), | ||
| 636 | (0x37B, 'V'), | ||
| 637 | (0x37E, '3', u';'), | ||
| 638 | (0x37F, 'X'), | ||
| 639 | (0x384, '3', u' ́'), | ||
| 640 | (0x385, '3', u' ̈́'), | ||
| 641 | (0x386, 'M', u'ά'), | ||
| 642 | (0x387, 'M', u'·'), | ||
| 643 | (0x388, 'M', u'έ'), | ||
| 644 | (0x389, 'M', u'ή'), | ||
| 645 | (0x38A, 'M', u'ί'), | ||
| 646 | (0x38B, 'X'), | ||
| 647 | (0x38C, 'M', u'ό'), | ||
| 648 | (0x38D, 'X'), | ||
| 649 | (0x38E, 'M', u'ύ'), | ||
| 650 | (0x38F, 'M', u'ώ'), | ||
| 651 | (0x390, 'V'), | ||
| 652 | (0x391, 'M', u'α'), | ||
| 653 | (0x392, 'M', u'β'), | ||
| 654 | (0x393, 'M', u'γ'), | ||
| 655 | (0x394, 'M', u'δ'), | ||
| 656 | (0x395, 'M', u'ε'), | ||
| 657 | (0x396, 'M', u'ζ'), | ||
| 658 | (0x397, 'M', u'η'), | ||
| 659 | (0x398, 'M', u'θ'), | ||
| 660 | (0x399, 'M', u'ι'), | ||
| 661 | (0x39A, 'M', u'κ'), | ||
| 662 | (0x39B, 'M', u'λ'), | ||
| 663 | (0x39C, 'M', u'μ'), | ||
| 664 | (0x39D, 'M', u'ν'), | ||
| 665 | (0x39E, 'M', u'ξ'), | ||
| 666 | (0x39F, 'M', u'ο'), | ||
| 667 | (0x3A0, 'M', u'π'), | ||
| 668 | (0x3A1, 'M', u'ρ'), | ||
| 669 | (0x3A2, 'X'), | ||
| 670 | (0x3A3, 'M', u'σ'), | ||
| 671 | (0x3A4, 'M', u'τ'), | ||
| 672 | (0x3A5, 'M', u'υ'), | ||
| 673 | (0x3A6, 'M', u'φ'), | ||
| 674 | (0x3A7, 'M', u'χ'), | ||
| 675 | (0x3A8, 'M', u'ψ'), | ||
| 676 | (0x3A9, 'M', u'ω'), | ||
| 677 | (0x3AA, 'M', u'ϊ'), | ||
| 678 | (0x3AB, 'M', u'ϋ'), | ||
| 679 | (0x3AC, 'V'), | ||
| 680 | (0x3C2, 'D', u'σ'), | ||
| 681 | (0x3C3, 'V'), | ||
| 682 | (0x3CF, 'M', u'ϗ'), | ||
| 683 | (0x3D0, 'M', u'β'), | ||
| 684 | (0x3D1, 'M', u'θ'), | ||
| 685 | (0x3D2, 'M', u'υ'), | ||
| 686 | (0x3D3, 'M', u'ύ'), | ||
| 687 | (0x3D4, 'M', u'ϋ'), | ||
| 688 | (0x3D5, 'M', u'φ'), | ||
| 689 | (0x3D6, 'M', u'π'), | ||
| 690 | (0x3D7, 'V'), | ||
| 691 | (0x3D8, 'M', u'ϙ'), | ||
| 692 | (0x3D9, 'V'), | ||
| 693 | (0x3DA, 'M', u'ϛ'), | ||
| 694 | (0x3DB, 'V'), | ||
| 695 | (0x3DC, 'M', u'ϝ'), | ||
| 696 | (0x3DD, 'V'), | ||
| 697 | (0x3DE, 'M', u'ϟ'), | ||
| 698 | (0x3DF, 'V'), | ||
| 699 | (0x3E0, 'M', u'ϡ'), | ||
| 700 | (0x3E1, 'V'), | ||
| 701 | (0x3E2, 'M', u'ϣ'), | ||
| 702 | (0x3E3, 'V'), | ||
| 703 | (0x3E4, 'M', u'ϥ'), | ||
| 704 | (0x3E5, 'V'), | ||
| 705 | (0x3E6, 'M', u'ϧ'), | ||
| 706 | (0x3E7, 'V'), | ||
| 707 | (0x3E8, 'M', u'ϩ'), | ||
| 708 | (0x3E9, 'V'), | ||
| 709 | (0x3EA, 'M', u'ϫ'), | ||
| 710 | (0x3EB, 'V'), | ||
| 711 | (0x3EC, 'M', u'ϭ'), | ||
| 712 | (0x3ED, 'V'), | ||
| 713 | (0x3EE, 'M', u'ϯ'), | ||
| 714 | (0x3EF, 'V'), | ||
| 715 | (0x3F0, 'M', u'κ'), | ||
| 716 | (0x3F1, 'M', u'ρ'), | ||
| 717 | (0x3F2, 'M', u'σ'), | ||
| 718 | (0x3F3, 'V'), | ||
| 719 | (0x3F4, 'M', u'θ'), | ||
| 720 | (0x3F5, 'M', u'ε'), | ||
| 721 | (0x3F6, 'V'), | ||
| 722 | (0x3F7, 'M', u'ϸ'), | ||
| 723 | (0x3F8, 'V'), | ||
| 724 | (0x3F9, 'M', u'σ'), | ||
| 725 | (0x3FA, 'M', u'ϻ'), | ||
| 726 | (0x3FB, 'V'), | ||
| 727 | (0x3FD, 'M', u'ͻ'), | ||
| 728 | (0x3FE, 'M', u'ͼ'), | ||
| 729 | (0x3FF, 'M', u'ͽ'), | ||
| 730 | (0x400, 'M', u'ѐ'), | ||
| 731 | (0x401, 'M', u'ё'), | ||
| 732 | (0x402, 'M', u'ђ'), | ||
| 733 | (0x403, 'M', u'ѓ'), | ||
| 734 | ] | ||
| 735 | |||
| 736 | def _seg_7(): | ||
| 737 | return [ | ||
| 738 | (0x404, 'M', u'є'), | ||
| 739 | (0x405, 'M', u'ѕ'), | ||
| 740 | (0x406, 'M', u'і'), | ||
| 741 | (0x407, 'M', u'ї'), | ||
| 742 | (0x408, 'M', u'ј'), | ||
| 743 | (0x409, 'M', u'љ'), | ||
| 744 | (0x40A, 'M', u'њ'), | ||
| 745 | (0x40B, 'M', u'ћ'), | ||
| 746 | (0x40C, 'M', u'ќ'), | ||
| 747 | (0x40D, 'M', u'ѝ'), | ||
| 748 | (0x40E, 'M', u'ў'), | ||
| 749 | (0x40F, 'M', u'џ'), | ||
| 750 | (0x410, 'M', u'а'), | ||
| 751 | (0x411, 'M', u'б'), | ||
| 752 | (0x412, 'M', u'в'), | ||
| 753 | (0x413, 'M', u'г'), | ||
| 754 | (0x414, 'M', u'д'), | ||
| 755 | (0x415, 'M', u'е'), | ||
| 756 | (0x416, 'M', u'ж'), | ||
| 757 | (0x417, 'M', u'з'), | ||
| 758 | (0x418, 'M', u'и'), | ||
| 759 | (0x419, 'M', u'й'), | ||
| 760 | (0x41A, 'M', u'к'), | ||
| 761 | (0x41B, 'M', u'л'), | ||
| 762 | (0x41C, 'M', u'м'), | ||
| 763 | (0x41D, 'M', u'н'), | ||
| 764 | (0x41E, 'M', u'о'), | ||
| 765 | (0x41F, 'M', u'п'), | ||
| 766 | (0x420, 'M', u'р'), | ||
| 767 | (0x421, 'M', u'с'), | ||
| 768 | (0x422, 'M', u'т'), | ||
| 769 | (0x423, 'M', u'у'), | ||
| 770 | (0x424, 'M', u'ф'), | ||
| 771 | (0x425, 'M', u'х'), | ||
| 772 | (0x426, 'M', u'ц'), | ||
| 773 | (0x427, 'M', u'ч'), | ||
| 774 | (0x428, 'M', u'ш'), | ||
| 775 | (0x429, 'M', u'щ'), | ||
| 776 | (0x42A, 'M', u'ъ'), | ||
| 777 | (0x42B, 'M', u'ы'), | ||
| 778 | (0x42C, 'M', u'ь'), | ||
| 779 | (0x42D, 'M', u'э'), | ||
| 780 | (0x42E, 'M', u'ю'), | ||
| 781 | (0x42F, 'M', u'я'), | ||
| 782 | (0x430, 'V'), | ||
| 783 | (0x460, 'M', u'ѡ'), | ||
| 784 | (0x461, 'V'), | ||
| 785 | (0x462, 'M', u'ѣ'), | ||
| 786 | (0x463, 'V'), | ||
| 787 | (0x464, 'M', u'ѥ'), | ||
| 788 | (0x465, 'V'), | ||
| 789 | (0x466, 'M', u'ѧ'), | ||
| 790 | (0x467, 'V'), | ||
| 791 | (0x468, 'M', u'ѩ'), | ||
| 792 | (0x469, 'V'), | ||
| 793 | (0x46A, 'M', u'ѫ'), | ||
| 794 | (0x46B, 'V'), | ||
| 795 | (0x46C, 'M', u'ѭ'), | ||
| 796 | (0x46D, 'V'), | ||
| 797 | (0x46E, 'M', u'ѯ'), | ||
| 798 | (0x46F, 'V'), | ||
| 799 | (0x470, 'M', u'ѱ'), | ||
| 800 | (0x471, 'V'), | ||
| 801 | (0x472, 'M', u'ѳ'), | ||
| 802 | (0x473, 'V'), | ||
| 803 | (0x474, 'M', u'ѵ'), | ||
| 804 | (0x475, 'V'), | ||
| 805 | (0x476, 'M', u'ѷ'), | ||
| 806 | (0x477, 'V'), | ||
| 807 | (0x478, 'M', u'ѹ'), | ||
| 808 | (0x479, 'V'), | ||
| 809 | (0x47A, 'M', u'ѻ'), | ||
| 810 | (0x47B, 'V'), | ||
| 811 | (0x47C, 'M', u'ѽ'), | ||
| 812 | (0x47D, 'V'), | ||
| 813 | (0x47E, 'M', u'ѿ'), | ||
| 814 | (0x47F, 'V'), | ||
| 815 | (0x480, 'M', u'ҁ'), | ||
| 816 | (0x481, 'V'), | ||
| 817 | (0x48A, 'M', u'ҋ'), | ||
| 818 | (0x48B, 'V'), | ||
| 819 | (0x48C, 'M', u'ҍ'), | ||
| 820 | (0x48D, 'V'), | ||
| 821 | (0x48E, 'M', u'ҏ'), | ||
| 822 | (0x48F, 'V'), | ||
| 823 | (0x490, 'M', u'ґ'), | ||
| 824 | (0x491, 'V'), | ||
| 825 | (0x492, 'M', u'ғ'), | ||
| 826 | (0x493, 'V'), | ||
| 827 | (0x494, 'M', u'ҕ'), | ||
| 828 | (0x495, 'V'), | ||
| 829 | (0x496, 'M', u'җ'), | ||
| 830 | (0x497, 'V'), | ||
| 831 | (0x498, 'M', u'ҙ'), | ||
| 832 | (0x499, 'V'), | ||
| 833 | (0x49A, 'M', u'қ'), | ||
| 834 | (0x49B, 'V'), | ||
| 835 | (0x49C, 'M', u'ҝ'), | ||
| 836 | (0x49D, 'V'), | ||
| 837 | (0x49E, 'M', u'ҟ'), | ||
| 838 | ] | ||
| 839 | |||
| 840 | def _seg_8(): | ||
| 841 | return [ | ||
| 842 | (0x49F, 'V'), | ||
| 843 | (0x4A0, 'M', u'ҡ'), | ||
| 844 | (0x4A1, 'V'), | ||
| 845 | (0x4A2, 'M', u'ң'), | ||
| 846 | (0x4A3, 'V'), | ||
| 847 | (0x4A4, 'M', u'ҥ'), | ||
| 848 | (0x4A5, 'V'), | ||
| 849 | (0x4A6, 'M', u'ҧ'), | ||
| 850 | (0x4A7, 'V'), | ||
| 851 | (0x4A8, 'M', u'ҩ'), | ||
| 852 | (0x4A9, 'V'), | ||
| 853 | (0x4AA, 'M', u'ҫ'), | ||
| 854 | (0x4AB, 'V'), | ||
| 855 | (0x4AC, 'M', u'ҭ'), | ||
| 856 | (0x4AD, 'V'), | ||
| 857 | (0x4AE, 'M', u'ү'), | ||
| 858 | (0x4AF, 'V'), | ||
| 859 | (0x4B0, 'M', u'ұ'), | ||
| 860 | (0x4B1, 'V'), | ||
| 861 | (0x4B2, 'M', u'ҳ'), | ||
| 862 | (0x4B3, 'V'), | ||
| 863 | (0x4B4, 'M', u'ҵ'), | ||
| 864 | (0x4B5, 'V'), | ||
| 865 | (0x4B6, 'M', u'ҷ'), | ||
| 866 | (0x4B7, 'V'), | ||
| 867 | (0x4B8, 'M', u'ҹ'), | ||
| 868 | (0x4B9, 'V'), | ||
| 869 | (0x4BA, 'M', u'һ'), | ||
| 870 | (0x4BB, 'V'), | ||
| 871 | (0x4BC, 'M', u'ҽ'), | ||
| 872 | (0x4BD, 'V'), | ||
| 873 | (0x4BE, 'M', u'ҿ'), | ||
| 874 | (0x4BF, 'V'), | ||
| 875 | (0x4C0, 'X'), | ||
| 876 | (0x4C1, 'M', u'ӂ'), | ||
| 877 | (0x4C2, 'V'), | ||
| 878 | (0x4C3, 'M', u'ӄ'), | ||
| 879 | (0x4C4, 'V'), | ||
| 880 | (0x4C5, 'M', u'ӆ'), | ||
| 881 | (0x4C6, 'V'), | ||
| 882 | (0x4C7, 'M', u'ӈ'), | ||
| 883 | (0x4C8, 'V'), | ||
| 884 | (0x4C9, 'M', u'ӊ'), | ||
| 885 | (0x4CA, 'V'), | ||
| 886 | (0x4CB, 'M', u'ӌ'), | ||
| 887 | (0x4CC, 'V'), | ||
| 888 | (0x4CD, 'M', u'ӎ'), | ||
| 889 | (0x4CE, 'V'), | ||
| 890 | (0x4D0, 'M', u'ӑ'), | ||
| 891 | (0x4D1, 'V'), | ||
| 892 | (0x4D2, 'M', u'ӓ'), | ||
| 893 | (0x4D3, 'V'), | ||
| 894 | (0x4D4, 'M', u'ӕ'), | ||
| 895 | (0x4D5, 'V'), | ||
| 896 | (0x4D6, 'M', u'ӗ'), | ||
| 897 | (0x4D7, 'V'), | ||
| 898 | (0x4D8, 'M', u'ә'), | ||
| 899 | (0x4D9, 'V'), | ||
| 900 | (0x4DA, 'M', u'ӛ'), | ||
| 901 | (0x4DB, 'V'), | ||
| 902 | (0x4DC, 'M', u'ӝ'), | ||
| 903 | (0x4DD, 'V'), | ||
| 904 | (0x4DE, 'M', u'ӟ'), | ||
| 905 | (0x4DF, 'V'), | ||
| 906 | (0x4E0, 'M', u'ӡ'), | ||
| 907 | (0x4E1, 'V'), | ||
| 908 | (0x4E2, 'M', u'ӣ'), | ||
| 909 | (0x4E3, 'V'), | ||
| 910 | (0x4E4, 'M', u'ӥ'), | ||
| 911 | (0x4E5, 'V'), | ||
| 912 | (0x4E6, 'M', u'ӧ'), | ||
| 913 | (0x4E7, 'V'), | ||
| 914 | (0x4E8, 'M', u'ө'), | ||
| 915 | (0x4E9, 'V'), | ||
| 916 | (0x4EA, 'M', u'ӫ'), | ||
| 917 | (0x4EB, 'V'), | ||
| 918 | (0x4EC, 'M', u'ӭ'), | ||
| 919 | (0x4ED, 'V'), | ||
| 920 | (0x4EE, 'M', u'ӯ'), | ||
| 921 | (0x4EF, 'V'), | ||
| 922 | (0x4F0, 'M', u'ӱ'), | ||
| 923 | (0x4F1, 'V'), | ||
| 924 | (0x4F2, 'M', u'ӳ'), | ||
| 925 | (0x4F3, 'V'), | ||
| 926 | (0x4F4, 'M', u'ӵ'), | ||
| 927 | (0x4F5, 'V'), | ||
| 928 | (0x4F6, 'M', u'ӷ'), | ||
| 929 | (0x4F7, 'V'), | ||
| 930 | (0x4F8, 'M', u'ӹ'), | ||
| 931 | (0x4F9, 'V'), | ||
| 932 | (0x4FA, 'M', u'ӻ'), | ||
| 933 | (0x4FB, 'V'), | ||
| 934 | (0x4FC, 'M', u'ӽ'), | ||
| 935 | (0x4FD, 'V'), | ||
| 936 | (0x4FE, 'M', u'ӿ'), | ||
| 937 | (0x4FF, 'V'), | ||
| 938 | (0x500, 'M', u'ԁ'), | ||
| 939 | (0x501, 'V'), | ||
| 940 | (0x502, 'M', u'ԃ'), | ||
| 941 | (0x503, 'V'), | ||
| 942 | ] | ||
| 943 | |||
| 944 | def _seg_9(): | ||
| 945 | return [ | ||
| 946 | (0x504, 'M', u'ԅ'), | ||
| 947 | (0x505, 'V'), | ||
| 948 | (0x506, 'M', u'ԇ'), | ||
| 949 | (0x507, 'V'), | ||
| 950 | (0x508, 'M', u'ԉ'), | ||
| 951 | (0x509, 'V'), | ||
| 952 | (0x50A, 'M', u'ԋ'), | ||
| 953 | (0x50B, 'V'), | ||
| 954 | (0x50C, 'M', u'ԍ'), | ||
| 955 | (0x50D, 'V'), | ||
| 956 | (0x50E, 'M', u'ԏ'), | ||
| 957 | (0x50F, 'V'), | ||
| 958 | (0x510, 'M', u'ԑ'), | ||
| 959 | (0x511, 'V'), | ||
| 960 | (0x512, 'M', u'ԓ'), | ||
| 961 | (0x513, 'V'), | ||
| 962 | (0x514, 'M', u'ԕ'), | ||
| 963 | (0x515, 'V'), | ||
| 964 | (0x516, 'M', u'ԗ'), | ||
| 965 | (0x517, 'V'), | ||
| 966 | (0x518, 'M', u'ԙ'), | ||
| 967 | (0x519, 'V'), | ||
| 968 | (0x51A, 'M', u'ԛ'), | ||
| 969 | (0x51B, 'V'), | ||
| 970 | (0x51C, 'M', u'ԝ'), | ||
| 971 | (0x51D, 'V'), | ||
| 972 | (0x51E, 'M', u'ԟ'), | ||
| 973 | (0x51F, 'V'), | ||
| 974 | (0x520, 'M', u'ԡ'), | ||
| 975 | (0x521, 'V'), | ||
| 976 | (0x522, 'M', u'ԣ'), | ||
| 977 | (0x523, 'V'), | ||
| 978 | (0x524, 'M', u'ԥ'), | ||
| 979 | (0x525, 'V'), | ||
| 980 | (0x526, 'M', u'ԧ'), | ||
| 981 | (0x527, 'V'), | ||
| 982 | (0x528, 'X'), | ||
| 983 | (0x531, 'M', u'ա'), | ||
| 984 | (0x532, 'M', u'բ'), | ||
| 985 | (0x533, 'M', u'գ'), | ||
| 986 | (0x534, 'M', u'դ'), | ||
| 987 | (0x535, 'M', u'ե'), | ||
| 988 | (0x536, 'M', u'զ'), | ||
| 989 | (0x537, 'M', u'է'), | ||
| 990 | (0x538, 'M', u'ը'), | ||
| 991 | (0x539, 'M', u'թ'), | ||
| 992 | (0x53A, 'M', u'ժ'), | ||
| 993 | (0x53B, 'M', u'ի'), | ||
| 994 | (0x53C, 'M', u'լ'), | ||
| 995 | (0x53D, 'M', u'խ'), | ||
| 996 | (0x53E, 'M', u'ծ'), | ||
| 997 | (0x53F, 'M', u'կ'), | ||
| 998 | (0x540, 'M', u'հ'), | ||
| 999 | (0x541, 'M', u'ձ'), | ||
| 1000 | (0x542, 'M', u'ղ'), | ||
| 1001 | (0x543, 'M', u'ճ'), | ||
| 1002 | (0x544, 'M', u'մ'), | ||
| 1003 | (0x545, 'M', u'յ'), | ||
| 1004 | (0x546, 'M', u'ն'), | ||
| 1005 | (0x547, 'M', u'շ'), | ||
| 1006 | (0x548, 'M', u'ո'), | ||
| 1007 | (0x549, 'M', u'չ'), | ||
| 1008 | (0x54A, 'M', u'պ'), | ||
| 1009 | (0x54B, 'M', u'ջ'), | ||
| 1010 | (0x54C, 'M', u'ռ'), | ||
| 1011 | (0x54D, 'M', u'ս'), | ||
| 1012 | (0x54E, 'M', u'վ'), | ||
| 1013 | (0x54F, 'M', u'տ'), | ||
| 1014 | (0x550, 'M', u'ր'), | ||
| 1015 | (0x551, 'M', u'ց'), | ||
| 1016 | (0x552, 'M', u'ւ'), | ||
| 1017 | (0x553, 'M', u'փ'), | ||
| 1018 | (0x554, 'M', u'ք'), | ||
| 1019 | (0x555, 'M', u'օ'), | ||
| 1020 | (0x556, 'M', u'ֆ'), | ||
| 1021 | (0x557, 'X'), | ||
| 1022 | (0x559, 'V'), | ||
| 1023 | (0x560, 'X'), | ||
| 1024 | (0x561, 'V'), | ||
| 1025 | (0x587, 'M', u'եւ'), | ||
| 1026 | (0x588, 'X'), | ||
| 1027 | (0x589, 'V'), | ||
| 1028 | (0x58B, 'X'), | ||
| 1029 | (0x58F, 'V'), | ||
| 1030 | (0x590, 'X'), | ||
| 1031 | (0x591, 'V'), | ||
| 1032 | (0x5C8, 'X'), | ||
| 1033 | (0x5D0, 'V'), | ||
| 1034 | (0x5EB, 'X'), | ||
| 1035 | (0x5F0, 'V'), | ||
| 1036 | (0x5F5, 'X'), | ||
| 1037 | (0x606, 'V'), | ||
| 1038 | (0x61C, 'X'), | ||
| 1039 | (0x61E, 'V'), | ||
| 1040 | (0x675, 'M', u'اٴ'), | ||
| 1041 | (0x676, 'M', u'وٴ'), | ||
| 1042 | (0x677, 'M', u'ۇٴ'), | ||
| 1043 | (0x678, 'M', u'يٴ'), | ||
| 1044 | (0x679, 'V'), | ||
| 1045 | (0x6DD, 'X'), | ||
| 1046 | ] | ||
| 1047 | |||
| 1048 | def _seg_10(): | ||
| 1049 | return [ | ||
| 1050 | (0x6DE, 'V'), | ||
| 1051 | (0x70E, 'X'), | ||
| 1052 | (0x710, 'V'), | ||
| 1053 | (0x74B, 'X'), | ||
| 1054 | (0x74D, 'V'), | ||
| 1055 | (0x7B2, 'X'), | ||
| 1056 | (0x7C0, 'V'), | ||
| 1057 | (0x7FB, 'X'), | ||
| 1058 | (0x800, 'V'), | ||
| 1059 | (0x82E, 'X'), | ||
| 1060 | (0x830, 'V'), | ||
| 1061 | (0x83F, 'X'), | ||
| 1062 | (0x840, 'V'), | ||
| 1063 | (0x85C, 'X'), | ||
| 1064 | (0x85E, 'V'), | ||
| 1065 | (0x85F, 'X'), | ||
| 1066 | (0x8A0, 'V'), | ||
| 1067 | (0x8A1, 'X'), | ||
| 1068 | (0x8A2, 'V'), | ||
| 1069 | (0x8AD, 'X'), | ||
| 1070 | (0x8E4, 'V'), | ||
| 1071 | (0x8FF, 'X'), | ||
| 1072 | (0x900, 'V'), | ||
| 1073 | (0x958, 'M', u'क़'), | ||
| 1074 | (0x959, 'M', u'ख़'), | ||
| 1075 | (0x95A, 'M', u'ग़'), | ||
| 1076 | (0x95B, 'M', u'ज़'), | ||
| 1077 | (0x95C, 'M', u'ड़'), | ||
| 1078 | (0x95D, 'M', u'ढ़'), | ||
| 1079 | (0x95E, 'M', u'फ़'), | ||
| 1080 | (0x95F, 'M', u'य़'), | ||
| 1081 | (0x960, 'V'), | ||
| 1082 | (0x978, 'X'), | ||
| 1083 | (0x979, 'V'), | ||
| 1084 | (0x980, 'X'), | ||
| 1085 | (0x981, 'V'), | ||
| 1086 | (0x984, 'X'), | ||
| 1087 | (0x985, 'V'), | ||
| 1088 | (0x98D, 'X'), | ||
| 1089 | (0x98F, 'V'), | ||
| 1090 | (0x991, 'X'), | ||
| 1091 | (0x993, 'V'), | ||
| 1092 | (0x9A9, 'X'), | ||
| 1093 | (0x9AA, 'V'), | ||
| 1094 | (0x9B1, 'X'), | ||
| 1095 | (0x9B2, 'V'), | ||
| 1096 | (0x9B3, 'X'), | ||
| 1097 | (0x9B6, 'V'), | ||
| 1098 | (0x9BA, 'X'), | ||
| 1099 | (0x9BC, 'V'), | ||
| 1100 | (0x9C5, 'X'), | ||
| 1101 | (0x9C7, 'V'), | ||
| 1102 | (0x9C9, 'X'), | ||
| 1103 | (0x9CB, 'V'), | ||
| 1104 | (0x9CF, 'X'), | ||
| 1105 | (0x9D7, 'V'), | ||
| 1106 | (0x9D8, 'X'), | ||
| 1107 | (0x9DC, 'M', u'ড়'), | ||
| 1108 | (0x9DD, 'M', u'ঢ়'), | ||
| 1109 | (0x9DE, 'X'), | ||
| 1110 | (0x9DF, 'M', u'য়'), | ||
| 1111 | (0x9E0, 'V'), | ||
| 1112 | (0x9E4, 'X'), | ||
| 1113 | (0x9E6, 'V'), | ||
| 1114 | (0x9FC, 'X'), | ||
| 1115 | (0xA01, 'V'), | ||
| 1116 | (0xA04, 'X'), | ||
| 1117 | (0xA05, 'V'), | ||
| 1118 | (0xA0B, 'X'), | ||
| 1119 | (0xA0F, 'V'), | ||
| 1120 | (0xA11, 'X'), | ||
| 1121 | (0xA13, 'V'), | ||
| 1122 | (0xA29, 'X'), | ||
| 1123 | (0xA2A, 'V'), | ||
| 1124 | (0xA31, 'X'), | ||
| 1125 | (0xA32, 'V'), | ||
| 1126 | (0xA33, 'M', u'ਲ਼'), | ||
| 1127 | (0xA34, 'X'), | ||
| 1128 | (0xA35, 'V'), | ||
| 1129 | (0xA36, 'M', u'ਸ਼'), | ||
| 1130 | (0xA37, 'X'), | ||
| 1131 | (0xA38, 'V'), | ||
| 1132 | (0xA3A, 'X'), | ||
| 1133 | (0xA3C, 'V'), | ||
| 1134 | (0xA3D, 'X'), | ||
| 1135 | (0xA3E, 'V'), | ||
| 1136 | (0xA43, 'X'), | ||
| 1137 | (0xA47, 'V'), | ||
| 1138 | (0xA49, 'X'), | ||
| 1139 | (0xA4B, 'V'), | ||
| 1140 | (0xA4E, 'X'), | ||
| 1141 | (0xA51, 'V'), | ||
| 1142 | (0xA52, 'X'), | ||
| 1143 | (0xA59, 'M', u'ਖ਼'), | ||
| 1144 | (0xA5A, 'M', u'ਗ਼'), | ||
| 1145 | (0xA5B, 'M', u'ਜ਼'), | ||
| 1146 | (0xA5C, 'V'), | ||
| 1147 | (0xA5D, 'X'), | ||
| 1148 | (0xA5E, 'M', u'ਫ਼'), | ||
| 1149 | (0xA5F, 'X'), | ||
| 1150 | ] | ||
| 1151 | |||
| 1152 | def _seg_11(): | ||
| 1153 | return [ | ||
| 1154 | (0xA66, 'V'), | ||
| 1155 | (0xA76, 'X'), | ||
| 1156 | (0xA81, 'V'), | ||
| 1157 | (0xA84, 'X'), | ||
| 1158 | (0xA85, 'V'), | ||
| 1159 | (0xA8E, 'X'), | ||
| 1160 | (0xA8F, 'V'), | ||
| 1161 | (0xA92, 'X'), | ||
| 1162 | (0xA93, 'V'), | ||
| 1163 | (0xAA9, 'X'), | ||
| 1164 | (0xAAA, 'V'), | ||
| 1165 | (0xAB1, 'X'), | ||
| 1166 | (0xAB2, 'V'), | ||
| 1167 | (0xAB4, 'X'), | ||
| 1168 | (0xAB5, 'V'), | ||
| 1169 | (0xABA, 'X'), | ||
| 1170 | (0xABC, 'V'), | ||
| 1171 | (0xAC6, 'X'), | ||
| 1172 | (0xAC7, 'V'), | ||
| 1173 | (0xACA, 'X'), | ||
| 1174 | (0xACB, 'V'), | ||
| 1175 | (0xACE, 'X'), | ||
| 1176 | (0xAD0, 'V'), | ||
| 1177 | (0xAD1, 'X'), | ||
| 1178 | (0xAE0, 'V'), | ||
| 1179 | (0xAE4, 'X'), | ||
| 1180 | (0xAE6, 'V'), | ||
| 1181 | (0xAF2, 'X'), | ||
| 1182 | (0xB01, 'V'), | ||
| 1183 | (0xB04, 'X'), | ||
| 1184 | (0xB05, 'V'), | ||
| 1185 | (0xB0D, 'X'), | ||
| 1186 | (0xB0F, 'V'), | ||
| 1187 | (0xB11, 'X'), | ||
| 1188 | (0xB13, 'V'), | ||
| 1189 | (0xB29, 'X'), | ||
| 1190 | (0xB2A, 'V'), | ||
| 1191 | (0xB31, 'X'), | ||
| 1192 | (0xB32, 'V'), | ||
| 1193 | (0xB34, 'X'), | ||
| 1194 | (0xB35, 'V'), | ||
| 1195 | (0xB3A, 'X'), | ||
| 1196 | (0xB3C, 'V'), | ||
| 1197 | (0xB45, 'X'), | ||
| 1198 | (0xB47, 'V'), | ||
| 1199 | (0xB49, 'X'), | ||
| 1200 | (0xB4B, 'V'), | ||
| 1201 | (0xB4E, 'X'), | ||
| 1202 | (0xB56, 'V'), | ||
| 1203 | (0xB58, 'X'), | ||
| 1204 | (0xB5C, 'M', u'ଡ଼'), | ||
| 1205 | (0xB5D, 'M', u'ଢ଼'), | ||
| 1206 | (0xB5E, 'X'), | ||
| 1207 | (0xB5F, 'V'), | ||
| 1208 | (0xB64, 'X'), | ||
| 1209 | (0xB66, 'V'), | ||
| 1210 | (0xB78, 'X'), | ||
| 1211 | (0xB82, 'V'), | ||
| 1212 | (0xB84, 'X'), | ||
| 1213 | (0xB85, 'V'), | ||
| 1214 | (0xB8B, 'X'), | ||
| 1215 | (0xB8E, 'V'), | ||
| 1216 | (0xB91, 'X'), | ||
| 1217 | (0xB92, 'V'), | ||
| 1218 | (0xB96, 'X'), | ||
| 1219 | (0xB99, 'V'), | ||
| 1220 | (0xB9B, 'X'), | ||
| 1221 | (0xB9C, 'V'), | ||
| 1222 | (0xB9D, 'X'), | ||
| 1223 | (0xB9E, 'V'), | ||
| 1224 | (0xBA0, 'X'), | ||
| 1225 | (0xBA3, 'V'), | ||
| 1226 | (0xBA5, 'X'), | ||
| 1227 | (0xBA8, 'V'), | ||
| 1228 | (0xBAB, 'X'), | ||
| 1229 | (0xBAE, 'V'), | ||
| 1230 | (0xBBA, 'X'), | ||
| 1231 | (0xBBE, 'V'), | ||
| 1232 | (0xBC3, 'X'), | ||
| 1233 | (0xBC6, 'V'), | ||
| 1234 | (0xBC9, 'X'), | ||
| 1235 | (0xBCA, 'V'), | ||
| 1236 | (0xBCE, 'X'), | ||
| 1237 | (0xBD0, 'V'), | ||
| 1238 | (0xBD1, 'X'), | ||
| 1239 | (0xBD7, 'V'), | ||
| 1240 | (0xBD8, 'X'), | ||
| 1241 | (0xBE6, 'V'), | ||
| 1242 | (0xBFB, 'X'), | ||
| 1243 | (0xC01, 'V'), | ||
| 1244 | (0xC04, 'X'), | ||
| 1245 | (0xC05, 'V'), | ||
| 1246 | (0xC0D, 'X'), | ||
| 1247 | (0xC0E, 'V'), | ||
| 1248 | (0xC11, 'X'), | ||
| 1249 | (0xC12, 'V'), | ||
| 1250 | (0xC29, 'X'), | ||
| 1251 | (0xC2A, 'V'), | ||
| 1252 | (0xC34, 'X'), | ||
| 1253 | (0xC35, 'V'), | ||
| 1254 | ] | ||
| 1255 | |||
| 1256 | def _seg_12(): | ||
| 1257 | return [ | ||
| 1258 | (0xC3A, 'X'), | ||
| 1259 | (0xC3D, 'V'), | ||
| 1260 | (0xC45, 'X'), | ||
| 1261 | (0xC46, 'V'), | ||
| 1262 | (0xC49, 'X'), | ||
| 1263 | (0xC4A, 'V'), | ||
| 1264 | (0xC4E, 'X'), | ||
| 1265 | (0xC55, 'V'), | ||
| 1266 | (0xC57, 'X'), | ||
| 1267 | (0xC58, 'V'), | ||
| 1268 | (0xC5A, 'X'), | ||
| 1269 | (0xC60, 'V'), | ||
| 1270 | (0xC64, 'X'), | ||
| 1271 | (0xC66, 'V'), | ||
| 1272 | (0xC70, 'X'), | ||
| 1273 | (0xC78, 'V'), | ||
| 1274 | (0xC80, 'X'), | ||
| 1275 | (0xC82, 'V'), | ||
| 1276 | (0xC84, 'X'), | ||
| 1277 | (0xC85, 'V'), | ||
| 1278 | (0xC8D, 'X'), | ||
| 1279 | (0xC8E, 'V'), | ||
| 1280 | (0xC91, 'X'), | ||
| 1281 | (0xC92, 'V'), | ||
| 1282 | (0xCA9, 'X'), | ||
| 1283 | (0xCAA, 'V'), | ||
| 1284 | (0xCB4, 'X'), | ||
| 1285 | (0xCB5, 'V'), | ||
| 1286 | (0xCBA, 'X'), | ||
| 1287 | (0xCBC, 'V'), | ||
| 1288 | (0xCC5, 'X'), | ||
| 1289 | (0xCC6, 'V'), | ||
| 1290 | (0xCC9, 'X'), | ||
| 1291 | (0xCCA, 'V'), | ||
| 1292 | (0xCCE, 'X'), | ||
| 1293 | (0xCD5, 'V'), | ||
| 1294 | (0xCD7, 'X'), | ||
| 1295 | (0xCDE, 'V'), | ||
| 1296 | (0xCDF, 'X'), | ||
| 1297 | (0xCE0, 'V'), | ||
| 1298 | (0xCE4, 'X'), | ||
| 1299 | (0xCE6, 'V'), | ||
| 1300 | (0xCF0, 'X'), | ||
| 1301 | (0xCF1, 'V'), | ||
| 1302 | (0xCF3, 'X'), | ||
| 1303 | (0xD02, 'V'), | ||
| 1304 | (0xD04, 'X'), | ||
| 1305 | (0xD05, 'V'), | ||
| 1306 | (0xD0D, 'X'), | ||
| 1307 | (0xD0E, 'V'), | ||
| 1308 | (0xD11, 'X'), | ||
| 1309 | (0xD12, 'V'), | ||
| 1310 | (0xD3B, 'X'), | ||
| 1311 | (0xD3D, 'V'), | ||
| 1312 | (0xD45, 'X'), | ||
| 1313 | (0xD46, 'V'), | ||
| 1314 | (0xD49, 'X'), | ||
| 1315 | (0xD4A, 'V'), | ||
| 1316 | (0xD4F, 'X'), | ||
| 1317 | (0xD57, 'V'), | ||
| 1318 | (0xD58, 'X'), | ||
| 1319 | (0xD60, 'V'), | ||
| 1320 | (0xD64, 'X'), | ||
| 1321 | (0xD66, 'V'), | ||
| 1322 | (0xD76, 'X'), | ||
| 1323 | (0xD79, 'V'), | ||
| 1324 | (0xD80, 'X'), | ||
| 1325 | (0xD82, 'V'), | ||
| 1326 | (0xD84, 'X'), | ||
| 1327 | (0xD85, 'V'), | ||
| 1328 | (0xD97, 'X'), | ||
| 1329 | (0xD9A, 'V'), | ||
| 1330 | (0xDB2, 'X'), | ||
| 1331 | (0xDB3, 'V'), | ||
| 1332 | (0xDBC, 'X'), | ||
| 1333 | (0xDBD, 'V'), | ||
| 1334 | (0xDBE, 'X'), | ||
| 1335 | (0xDC0, 'V'), | ||
| 1336 | (0xDC7, 'X'), | ||
| 1337 | (0xDCA, 'V'), | ||
| 1338 | (0xDCB, 'X'), | ||
| 1339 | (0xDCF, 'V'), | ||
| 1340 | (0xDD5, 'X'), | ||
| 1341 | (0xDD6, 'V'), | ||
| 1342 | (0xDD7, 'X'), | ||
| 1343 | (0xDD8, 'V'), | ||
| 1344 | (0xDE0, 'X'), | ||
| 1345 | (0xDF2, 'V'), | ||
| 1346 | (0xDF5, 'X'), | ||
| 1347 | (0xE01, 'V'), | ||
| 1348 | (0xE33, 'M', u'ํา'), | ||
| 1349 | (0xE34, 'V'), | ||
| 1350 | (0xE3B, 'X'), | ||
| 1351 | (0xE3F, 'V'), | ||
| 1352 | (0xE5C, 'X'), | ||
| 1353 | (0xE81, 'V'), | ||
| 1354 | (0xE83, 'X'), | ||
| 1355 | (0xE84, 'V'), | ||
| 1356 | (0xE85, 'X'), | ||
| 1357 | (0xE87, 'V'), | ||
| 1358 | ] | ||
| 1359 | |||
| 1360 | def _seg_13(): | ||
| 1361 | return [ | ||
| 1362 | (0xE89, 'X'), | ||
| 1363 | (0xE8A, 'V'), | ||
| 1364 | (0xE8B, 'X'), | ||
| 1365 | (0xE8D, 'V'), | ||
| 1366 | (0xE8E, 'X'), | ||
| 1367 | (0xE94, 'V'), | ||
| 1368 | (0xE98, 'X'), | ||
| 1369 | (0xE99, 'V'), | ||
| 1370 | (0xEA0, 'X'), | ||
| 1371 | (0xEA1, 'V'), | ||
| 1372 | (0xEA4, 'X'), | ||
| 1373 | (0xEA5, 'V'), | ||
| 1374 | (0xEA6, 'X'), | ||
| 1375 | (0xEA7, 'V'), | ||
| 1376 | (0xEA8, 'X'), | ||
| 1377 | (0xEAA, 'V'), | ||
| 1378 | (0xEAC, 'X'), | ||
| 1379 | (0xEAD, 'V'), | ||
| 1380 | (0xEB3, 'M', u'ໍາ'), | ||
| 1381 | (0xEB4, 'V'), | ||
| 1382 | (0xEBA, 'X'), | ||
| 1383 | (0xEBB, 'V'), | ||
| 1384 | (0xEBE, 'X'), | ||
| 1385 | (0xEC0, 'V'), | ||
| 1386 | (0xEC5, 'X'), | ||
| 1387 | (0xEC6, 'V'), | ||
| 1388 | (0xEC7, 'X'), | ||
| 1389 | (0xEC8, 'V'), | ||
| 1390 | (0xECE, 'X'), | ||
| 1391 | (0xED0, 'V'), | ||
| 1392 | (0xEDA, 'X'), | ||
| 1393 | (0xEDC, 'M', u'ຫນ'), | ||
| 1394 | (0xEDD, 'M', u'ຫມ'), | ||
| 1395 | (0xEDE, 'V'), | ||
| 1396 | (0xEE0, 'X'), | ||
| 1397 | (0xF00, 'V'), | ||
| 1398 | (0xF0C, 'M', u'་'), | ||
| 1399 | (0xF0D, 'V'), | ||
| 1400 | (0xF43, 'M', u'གྷ'), | ||
| 1401 | (0xF44, 'V'), | ||
| 1402 | (0xF48, 'X'), | ||
| 1403 | (0xF49, 'V'), | ||
| 1404 | (0xF4D, 'M', u'ཌྷ'), | ||
| 1405 | (0xF4E, 'V'), | ||
| 1406 | (0xF52, 'M', u'དྷ'), | ||
| 1407 | (0xF53, 'V'), | ||
| 1408 | (0xF57, 'M', u'བྷ'), | ||
| 1409 | (0xF58, 'V'), | ||
| 1410 | (0xF5C, 'M', u'ཛྷ'), | ||
| 1411 | (0xF5D, 'V'), | ||
| 1412 | (0xF69, 'M', u'ཀྵ'), | ||
| 1413 | (0xF6A, 'V'), | ||
| 1414 | (0xF6D, 'X'), | ||
| 1415 | (0xF71, 'V'), | ||
| 1416 | (0xF73, 'M', u'ཱི'), | ||
| 1417 | (0xF74, 'V'), | ||
| 1418 | (0xF75, 'M', u'ཱུ'), | ||
| 1419 | (0xF76, 'M', u'ྲྀ'), | ||
| 1420 | (0xF77, 'M', u'ྲཱྀ'), | ||
| 1421 | (0xF78, 'M', u'ླྀ'), | ||
| 1422 | (0xF79, 'M', u'ླཱྀ'), | ||
| 1423 | (0xF7A, 'V'), | ||
| 1424 | (0xF81, 'M', u'ཱྀ'), | ||
| 1425 | (0xF82, 'V'), | ||
| 1426 | (0xF93, 'M', u'ྒྷ'), | ||
| 1427 | (0xF94, 'V'), | ||
| 1428 | (0xF98, 'X'), | ||
| 1429 | (0xF99, 'V'), | ||
| 1430 | (0xF9D, 'M', u'ྜྷ'), | ||
| 1431 | (0xF9E, 'V'), | ||
| 1432 | (0xFA2, 'M', u'ྡྷ'), | ||
| 1433 | (0xFA3, 'V'), | ||
| 1434 | (0xFA7, 'M', u'ྦྷ'), | ||
| 1435 | (0xFA8, 'V'), | ||
| 1436 | (0xFAC, 'M', u'ྫྷ'), | ||
| 1437 | (0xFAD, 'V'), | ||
| 1438 | (0xFB9, 'M', u'ྐྵ'), | ||
| 1439 | (0xFBA, 'V'), | ||
| 1440 | (0xFBD, 'X'), | ||
| 1441 | (0xFBE, 'V'), | ||
| 1442 | (0xFCD, 'X'), | ||
| 1443 | (0xFCE, 'V'), | ||
| 1444 | (0xFDB, 'X'), | ||
| 1445 | (0x1000, 'V'), | ||
| 1446 | (0x10A0, 'X'), | ||
| 1447 | (0x10C7, 'M', u'ⴧ'), | ||
| 1448 | (0x10C8, 'X'), | ||
| 1449 | (0x10CD, 'M', u'ⴭ'), | ||
| 1450 | (0x10CE, 'X'), | ||
| 1451 | (0x10D0, 'V'), | ||
| 1452 | (0x10FC, 'M', u'ნ'), | ||
| 1453 | (0x10FD, 'V'), | ||
| 1454 | (0x115F, 'X'), | ||
| 1455 | (0x1161, 'V'), | ||
| 1456 | (0x1249, 'X'), | ||
| 1457 | (0x124A, 'V'), | ||
| 1458 | (0x124E, 'X'), | ||
| 1459 | (0x1250, 'V'), | ||
| 1460 | (0x1257, 'X'), | ||
| 1461 | (0x1258, 'V'), | ||
| 1462 | ] | ||
| 1463 | |||
| 1464 | def _seg_14(): | ||
| 1465 | return [ | ||
| 1466 | (0x1259, 'X'), | ||
| 1467 | (0x125A, 'V'), | ||
| 1468 | (0x125E, 'X'), | ||
| 1469 | (0x1260, 'V'), | ||
| 1470 | (0x1289, 'X'), | ||
| 1471 | (0x128A, 'V'), | ||
| 1472 | (0x128E, 'X'), | ||
| 1473 | (0x1290, 'V'), | ||
| 1474 | (0x12B1, 'X'), | ||
| 1475 | (0x12B2, 'V'), | ||
| 1476 | (0x12B6, 'X'), | ||
| 1477 | (0x12B8, 'V'), | ||
| 1478 | (0x12BF, 'X'), | ||
| 1479 | (0x12C0, 'V'), | ||
| 1480 | (0x12C1, 'X'), | ||
| 1481 | (0x12C2, 'V'), | ||
| 1482 | (0x12C6, 'X'), | ||
| 1483 | (0x12C8, 'V'), | ||
| 1484 | (0x12D7, 'X'), | ||
| 1485 | (0x12D8, 'V'), | ||
| 1486 | (0x1311, 'X'), | ||
| 1487 | (0x1312, 'V'), | ||
| 1488 | (0x1316, 'X'), | ||
| 1489 | (0x1318, 'V'), | ||
| 1490 | (0x135B, 'X'), | ||
| 1491 | (0x135D, 'V'), | ||
| 1492 | (0x137D, 'X'), | ||
| 1493 | (0x1380, 'V'), | ||
| 1494 | (0x139A, 'X'), | ||
| 1495 | (0x13A0, 'V'), | ||
| 1496 | (0x13F5, 'X'), | ||
| 1497 | (0x1400, 'V'), | ||
| 1498 | (0x1680, 'X'), | ||
| 1499 | (0x1681, 'V'), | ||
| 1500 | (0x169D, 'X'), | ||
| 1501 | (0x16A0, 'V'), | ||
| 1502 | (0x16F1, 'X'), | ||
| 1503 | (0x1700, 'V'), | ||
| 1504 | (0x170D, 'X'), | ||
| 1505 | (0x170E, 'V'), | ||
| 1506 | (0x1715, 'X'), | ||
| 1507 | (0x1720, 'V'), | ||
| 1508 | (0x1737, 'X'), | ||
| 1509 | (0x1740, 'V'), | ||
| 1510 | (0x1754, 'X'), | ||
| 1511 | (0x1760, 'V'), | ||
| 1512 | (0x176D, 'X'), | ||
| 1513 | (0x176E, 'V'), | ||
| 1514 | (0x1771, 'X'), | ||
| 1515 | (0x1772, 'V'), | ||
| 1516 | (0x1774, 'X'), | ||
| 1517 | (0x1780, 'V'), | ||
| 1518 | (0x17B4, 'X'), | ||
| 1519 | (0x17B6, 'V'), | ||
| 1520 | (0x17DE, 'X'), | ||
| 1521 | (0x17E0, 'V'), | ||
| 1522 | (0x17EA, 'X'), | ||
| 1523 | (0x17F0, 'V'), | ||
| 1524 | (0x17FA, 'X'), | ||
| 1525 | (0x1800, 'V'), | ||
| 1526 | (0x1806, 'X'), | ||
| 1527 | (0x1807, 'V'), | ||
| 1528 | (0x180B, 'I'), | ||
| 1529 | (0x180E, 'X'), | ||
| 1530 | (0x1810, 'V'), | ||
| 1531 | (0x181A, 'X'), | ||
| 1532 | (0x1820, 'V'), | ||
| 1533 | (0x1878, 'X'), | ||
| 1534 | (0x1880, 'V'), | ||
| 1535 | (0x18AB, 'X'), | ||
| 1536 | (0x18B0, 'V'), | ||
| 1537 | (0x18F6, 'X'), | ||
| 1538 | (0x1900, 'V'), | ||
| 1539 | (0x191D, 'X'), | ||
| 1540 | (0x1920, 'V'), | ||
| 1541 | (0x192C, 'X'), | ||
| 1542 | (0x1930, 'V'), | ||
| 1543 | (0x193C, 'X'), | ||
| 1544 | (0x1940, 'V'), | ||
| 1545 | (0x1941, 'X'), | ||
| 1546 | (0x1944, 'V'), | ||
| 1547 | (0x196E, 'X'), | ||
| 1548 | (0x1970, 'V'), | ||
| 1549 | (0x1975, 'X'), | ||
| 1550 | (0x1980, 'V'), | ||
| 1551 | (0x19AC, 'X'), | ||
| 1552 | (0x19B0, 'V'), | ||
| 1553 | (0x19CA, 'X'), | ||
| 1554 | (0x19D0, 'V'), | ||
| 1555 | (0x19DB, 'X'), | ||
| 1556 | (0x19DE, 'V'), | ||
| 1557 | (0x1A1C, 'X'), | ||
| 1558 | (0x1A1E, 'V'), | ||
| 1559 | (0x1A5F, 'X'), | ||
| 1560 | (0x1A60, 'V'), | ||
| 1561 | (0x1A7D, 'X'), | ||
| 1562 | (0x1A7F, 'V'), | ||
| 1563 | (0x1A8A, 'X'), | ||
| 1564 | (0x1A90, 'V'), | ||
| 1565 | (0x1A9A, 'X'), | ||
| 1566 | ] | ||
| 1567 | |||
| 1568 | def _seg_15(): | ||
| 1569 | return [ | ||
| 1570 | (0x1AA0, 'V'), | ||
| 1571 | (0x1AAE, 'X'), | ||
| 1572 | (0x1B00, 'V'), | ||
| 1573 | (0x1B4C, 'X'), | ||
| 1574 | (0x1B50, 'V'), | ||
| 1575 | (0x1B7D, 'X'), | ||
| 1576 | (0x1B80, 'V'), | ||
| 1577 | (0x1BF4, 'X'), | ||
| 1578 | (0x1BFC, 'V'), | ||
| 1579 | (0x1C38, 'X'), | ||
| 1580 | (0x1C3B, 'V'), | ||
| 1581 | (0x1C4A, 'X'), | ||
| 1582 | (0x1C4D, 'V'), | ||
| 1583 | (0x1C80, 'X'), | ||
| 1584 | (0x1CC0, 'V'), | ||
| 1585 | (0x1CC8, 'X'), | ||
| 1586 | (0x1CD0, 'V'), | ||
| 1587 | (0x1CF7, 'X'), | ||
| 1588 | (0x1D00, 'V'), | ||
| 1589 | (0x1D2C, 'M', u'a'), | ||
| 1590 | (0x1D2D, 'M', u'æ'), | ||
| 1591 | (0x1D2E, 'M', u'b'), | ||
| 1592 | (0x1D2F, 'V'), | ||
| 1593 | (0x1D30, 'M', u'd'), | ||
| 1594 | (0x1D31, 'M', u'e'), | ||
| 1595 | (0x1D32, 'M', u'ǝ'), | ||
| 1596 | (0x1D33, 'M', u'g'), | ||
| 1597 | (0x1D34, 'M', u'h'), | ||
| 1598 | (0x1D35, 'M', u'i'), | ||
| 1599 | (0x1D36, 'M', u'j'), | ||
| 1600 | (0x1D37, 'M', u'k'), | ||
| 1601 | (0x1D38, 'M', u'l'), | ||
| 1602 | (0x1D39, 'M', u'm'), | ||
| 1603 | (0x1D3A, 'M', u'n'), | ||
| 1604 | (0x1D3B, 'V'), | ||
| 1605 | (0x1D3C, 'M', u'o'), | ||
| 1606 | (0x1D3D, 'M', u'ȣ'), | ||
| 1607 | (0x1D3E, 'M', u'p'), | ||
| 1608 | (0x1D3F, 'M', u'r'), | ||
| 1609 | (0x1D40, 'M', u't'), | ||
| 1610 | (0x1D41, 'M', u'u'), | ||
| 1611 | (0x1D42, 'M', u'w'), | ||
| 1612 | (0x1D43, 'M', u'a'), | ||
| 1613 | (0x1D44, 'M', u'ɐ'), | ||
| 1614 | (0x1D45, 'M', u'ɑ'), | ||
| 1615 | (0x1D46, 'M', u'ᴂ'), | ||
| 1616 | (0x1D47, 'M', u'b'), | ||
| 1617 | (0x1D48, 'M', u'd'), | ||
| 1618 | (0x1D49, 'M', u'e'), | ||
| 1619 | (0x1D4A, 'M', u'ə'), | ||
| 1620 | (0x1D4B, 'M', u'ɛ'), | ||
| 1621 | (0x1D4C, 'M', u'ɜ'), | ||
| 1622 | (0x1D4D, 'M', u'g'), | ||
| 1623 | (0x1D4E, 'V'), | ||
| 1624 | (0x1D4F, 'M', u'k'), | ||
| 1625 | (0x1D50, 'M', u'm'), | ||
| 1626 | (0x1D51, 'M', u'ŋ'), | ||
| 1627 | (0x1D52, 'M', u'o'), | ||
| 1628 | (0x1D53, 'M', u'ɔ'), | ||
| 1629 | (0x1D54, 'M', u'ᴖ'), | ||
| 1630 | (0x1D55, 'M', u'ᴗ'), | ||
| 1631 | (0x1D56, 'M', u'p'), | ||
| 1632 | (0x1D57, 'M', u't'), | ||
| 1633 | (0x1D58, 'M', u'u'), | ||
| 1634 | (0x1D59, 'M', u'ᴝ'), | ||
| 1635 | (0x1D5A, 'M', u'ɯ'), | ||
| 1636 | (0x1D5B, 'M', u'v'), | ||
| 1637 | (0x1D5C, 'M', u'ᴥ'), | ||
| 1638 | (0x1D5D, 'M', u'β'), | ||
| 1639 | (0x1D5E, 'M', u'γ'), | ||
| 1640 | (0x1D5F, 'M', u'δ'), | ||
| 1641 | (0x1D60, 'M', u'φ'), | ||
| 1642 | (0x1D61, 'M', u'χ'), | ||
| 1643 | (0x1D62, 'M', u'i'), | ||
| 1644 | (0x1D63, 'M', u'r'), | ||
| 1645 | (0x1D64, 'M', u'u'), | ||
| 1646 | (0x1D65, 'M', u'v'), | ||
| 1647 | (0x1D66, 'M', u'β'), | ||
| 1648 | (0x1D67, 'M', u'γ'), | ||
| 1649 | (0x1D68, 'M', u'ρ'), | ||
| 1650 | (0x1D69, 'M', u'φ'), | ||
| 1651 | (0x1D6A, 'M', u'χ'), | ||
| 1652 | (0x1D6B, 'V'), | ||
| 1653 | (0x1D78, 'M', u'н'), | ||
| 1654 | (0x1D79, 'V'), | ||
| 1655 | (0x1D9B, 'M', u'ɒ'), | ||
| 1656 | (0x1D9C, 'M', u'c'), | ||
| 1657 | (0x1D9D, 'M', u'ɕ'), | ||
| 1658 | (0x1D9E, 'M', u'ð'), | ||
| 1659 | (0x1D9F, 'M', u'ɜ'), | ||
| 1660 | (0x1DA0, 'M', u'f'), | ||
| 1661 | (0x1DA1, 'M', u'ɟ'), | ||
| 1662 | (0x1DA2, 'M', u'ɡ'), | ||
| 1663 | (0x1DA3, 'M', u'ɥ'), | ||
| 1664 | (0x1DA4, 'M', u'ɨ'), | ||
| 1665 | (0x1DA5, 'M', u'ɩ'), | ||
| 1666 | (0x1DA6, 'M', u'ɪ'), | ||
| 1667 | (0x1DA7, 'M', u'ᵻ'), | ||
| 1668 | (0x1DA8, 'M', u'ʝ'), | ||
| 1669 | (0x1DA9, 'M', u'ɭ'), | ||
| 1670 | ] | ||
| 1671 | |||
| 1672 | def _seg_16(): | ||
| 1673 | return [ | ||
| 1674 | (0x1DAA, 'M', u'ᶅ'), | ||
| 1675 | (0x1DAB, 'M', u'ʟ'), | ||
| 1676 | (0x1DAC, 'M', u'ɱ'), | ||
| 1677 | (0x1DAD, 'M', u'ɰ'), | ||
| 1678 | (0x1DAE, 'M', u'ɲ'), | ||
| 1679 | (0x1DAF, 'M', u'ɳ'), | ||
| 1680 | (0x1DB0, 'M', u'ɴ'), | ||
| 1681 | (0x1DB1, 'M', u'ɵ'), | ||
| 1682 | (0x1DB2, 'M', u'ɸ'), | ||
| 1683 | (0x1DB3, 'M', u'ʂ'), | ||
| 1684 | (0x1DB4, 'M', u'ʃ'), | ||
| 1685 | (0x1DB5, 'M', u'ƫ'), | ||
| 1686 | (0x1DB6, 'M', u'ʉ'), | ||
| 1687 | (0x1DB7, 'M', u'ʊ'), | ||
| 1688 | (0x1DB8, 'M', u'ᴜ'), | ||
| 1689 | (0x1DB9, 'M', u'ʋ'), | ||
| 1690 | (0x1DBA, 'M', u'ʌ'), | ||
| 1691 | (0x1DBB, 'M', u'z'), | ||
| 1692 | (0x1DBC, 'M', u'ʐ'), | ||
| 1693 | (0x1DBD, 'M', u'ʑ'), | ||
| 1694 | (0x1DBE, 'M', u'ʒ'), | ||
| 1695 | (0x1DBF, 'M', u'θ'), | ||
| 1696 | (0x1DC0, 'V'), | ||
| 1697 | (0x1DE7, 'X'), | ||
| 1698 | (0x1DFC, 'V'), | ||
| 1699 | (0x1E00, 'M', u'ḁ'), | ||
| 1700 | (0x1E01, 'V'), | ||
| 1701 | (0x1E02, 'M', u'ḃ'), | ||
| 1702 | (0x1E03, 'V'), | ||
| 1703 | (0x1E04, 'M', u'ḅ'), | ||
| 1704 | (0x1E05, 'V'), | ||
| 1705 | (0x1E06, 'M', u'ḇ'), | ||
| 1706 | (0x1E07, 'V'), | ||
| 1707 | (0x1E08, 'M', u'ḉ'), | ||
| 1708 | (0x1E09, 'V'), | ||
| 1709 | (0x1E0A, 'M', u'ḋ'), | ||
| 1710 | (0x1E0B, 'V'), | ||
| 1711 | (0x1E0C, 'M', u'ḍ'), | ||
| 1712 | (0x1E0D, 'V'), | ||
| 1713 | (0x1E0E, 'M', u'ḏ'), | ||
| 1714 | (0x1E0F, 'V'), | ||
| 1715 | (0x1E10, 'M', u'ḑ'), | ||
| 1716 | (0x1E11, 'V'), | ||
| 1717 | (0x1E12, 'M', u'ḓ'), | ||
| 1718 | (0x1E13, 'V'), | ||
| 1719 | (0x1E14, 'M', u'ḕ'), | ||
| 1720 | (0x1E15, 'V'), | ||
| 1721 | (0x1E16, 'M', u'ḗ'), | ||
| 1722 | (0x1E17, 'V'), | ||
| 1723 | (0x1E18, 'M', u'ḙ'), | ||
| 1724 | (0x1E19, 'V'), | ||
| 1725 | (0x1E1A, 'M', u'ḛ'), | ||
| 1726 | (0x1E1B, 'V'), | ||
| 1727 | (0x1E1C, 'M', u'ḝ'), | ||
| 1728 | (0x1E1D, 'V'), | ||
| 1729 | (0x1E1E, 'M', u'ḟ'), | ||
| 1730 | (0x1E1F, 'V'), | ||
| 1731 | (0x1E20, 'M', u'ḡ'), | ||
| 1732 | (0x1E21, 'V'), | ||
| 1733 | (0x1E22, 'M', u'ḣ'), | ||
| 1734 | (0x1E23, 'V'), | ||
| 1735 | (0x1E24, 'M', u'ḥ'), | ||
| 1736 | (0x1E25, 'V'), | ||
| 1737 | (0x1E26, 'M', u'ḧ'), | ||
| 1738 | (0x1E27, 'V'), | ||
| 1739 | (0x1E28, 'M', u'ḩ'), | ||
| 1740 | (0x1E29, 'V'), | ||
| 1741 | (0x1E2A, 'M', u'ḫ'), | ||
| 1742 | (0x1E2B, 'V'), | ||
| 1743 | (0x1E2C, 'M', u'ḭ'), | ||
| 1744 | (0x1E2D, 'V'), | ||
| 1745 | (0x1E2E, 'M', u'ḯ'), | ||
| 1746 | (0x1E2F, 'V'), | ||
| 1747 | (0x1E30, 'M', u'ḱ'), | ||
| 1748 | (0x1E31, 'V'), | ||
| 1749 | (0x1E32, 'M', u'ḳ'), | ||
| 1750 | (0x1E33, 'V'), | ||
| 1751 | (0x1E34, 'M', u'ḵ'), | ||
| 1752 | (0x1E35, 'V'), | ||
| 1753 | (0x1E36, 'M', u'ḷ'), | ||
| 1754 | (0x1E37, 'V'), | ||
| 1755 | (0x1E38, 'M', u'ḹ'), | ||
| 1756 | (0x1E39, 'V'), | ||
| 1757 | (0x1E3A, 'M', u'ḻ'), | ||
| 1758 | (0x1E3B, 'V'), | ||
| 1759 | (0x1E3C, 'M', u'ḽ'), | ||
| 1760 | (0x1E3D, 'V'), | ||
| 1761 | (0x1E3E, 'M', u'ḿ'), | ||
| 1762 | (0x1E3F, 'V'), | ||
| 1763 | (0x1E40, 'M', u'ṁ'), | ||
| 1764 | (0x1E41, 'V'), | ||
| 1765 | (0x1E42, 'M', u'ṃ'), | ||
| 1766 | (0x1E43, 'V'), | ||
| 1767 | (0x1E44, 'M', u'ṅ'), | ||
| 1768 | (0x1E45, 'V'), | ||
| 1769 | (0x1E46, 'M', u'ṇ'), | ||
| 1770 | (0x1E47, 'V'), | ||
| 1771 | (0x1E48, 'M', u'ṉ'), | ||
| 1772 | (0x1E49, 'V'), | ||
| 1773 | (0x1E4A, 'M', u'ṋ'), | ||
| 1774 | ] | ||
| 1775 | |||
| 1776 | def _seg_17(): | ||
| 1777 | return [ | ||
| 1778 | (0x1E4B, 'V'), | ||
| 1779 | (0x1E4C, 'M', u'ṍ'), | ||
| 1780 | (0x1E4D, 'V'), | ||
| 1781 | (0x1E4E, 'M', u'ṏ'), | ||
| 1782 | (0x1E4F, 'V'), | ||
| 1783 | (0x1E50, 'M', u'ṑ'), | ||
| 1784 | (0x1E51, 'V'), | ||
| 1785 | (0x1E52, 'M', u'ṓ'), | ||
| 1786 | (0x1E53, 'V'), | ||
| 1787 | (0x1E54, 'M', u'ṕ'), | ||
| 1788 | (0x1E55, 'V'), | ||
| 1789 | (0x1E56, 'M', u'ṗ'), | ||
| 1790 | (0x1E57, 'V'), | ||
| 1791 | (0x1E58, 'M', u'ṙ'), | ||
| 1792 | (0x1E59, 'V'), | ||
| 1793 | (0x1E5A, 'M', u'ṛ'), | ||
| 1794 | (0x1E5B, 'V'), | ||
| 1795 | (0x1E5C, 'M', u'ṝ'), | ||
| 1796 | (0x1E5D, 'V'), | ||
| 1797 | (0x1E5E, 'M', u'ṟ'), | ||
| 1798 | (0x1E5F, 'V'), | ||
| 1799 | (0x1E60, 'M', u'ṡ'), | ||
| 1800 | (0x1E61, 'V'), | ||
| 1801 | (0x1E62, 'M', u'ṣ'), | ||
| 1802 | (0x1E63, 'V'), | ||
| 1803 | (0x1E64, 'M', u'ṥ'), | ||
| 1804 | (0x1E65, 'V'), | ||
| 1805 | (0x1E66, 'M', u'ṧ'), | ||
| 1806 | (0x1E67, 'V'), | ||
| 1807 | (0x1E68, 'M', u'ṩ'), | ||
| 1808 | (0x1E69, 'V'), | ||
| 1809 | (0x1E6A, 'M', u'ṫ'), | ||
| 1810 | (0x1E6B, 'V'), | ||
| 1811 | (0x1E6C, 'M', u'ṭ'), | ||
| 1812 | (0x1E6D, 'V'), | ||
| 1813 | (0x1E6E, 'M', u'ṯ'), | ||
| 1814 | (0x1E6F, 'V'), | ||
| 1815 | (0x1E70, 'M', u'ṱ'), | ||
| 1816 | (0x1E71, 'V'), | ||
| 1817 | (0x1E72, 'M', u'ṳ'), | ||
| 1818 | (0x1E73, 'V'), | ||
| 1819 | (0x1E74, 'M', u'ṵ'), | ||
| 1820 | (0x1E75, 'V'), | ||
| 1821 | (0x1E76, 'M', u'ṷ'), | ||
| 1822 | (0x1E77, 'V'), | ||
| 1823 | (0x1E78, 'M', u'ṹ'), | ||
| 1824 | (0x1E79, 'V'), | ||
| 1825 | (0x1E7A, 'M', u'ṻ'), | ||
| 1826 | (0x1E7B, 'V'), | ||
| 1827 | (0x1E7C, 'M', u'ṽ'), | ||
| 1828 | (0x1E7D, 'V'), | ||
| 1829 | (0x1E7E, 'M', u'ṿ'), | ||
| 1830 | (0x1E7F, 'V'), | ||
| 1831 | (0x1E80, 'M', u'ẁ'), | ||
| 1832 | (0x1E81, 'V'), | ||
| 1833 | (0x1E82, 'M', u'ẃ'), | ||
| 1834 | (0x1E83, 'V'), | ||
| 1835 | (0x1E84, 'M', u'ẅ'), | ||
| 1836 | (0x1E85, 'V'), | ||
| 1837 | (0x1E86, 'M', u'ẇ'), | ||
| 1838 | (0x1E87, 'V'), | ||
| 1839 | (0x1E88, 'M', u'ẉ'), | ||
| 1840 | (0x1E89, 'V'), | ||
| 1841 | (0x1E8A, 'M', u'ẋ'), | ||
| 1842 | (0x1E8B, 'V'), | ||
| 1843 | (0x1E8C, 'M', u'ẍ'), | ||
| 1844 | (0x1E8D, 'V'), | ||
| 1845 | (0x1E8E, 'M', u'ẏ'), | ||
| 1846 | (0x1E8F, 'V'), | ||
| 1847 | (0x1E90, 'M', u'ẑ'), | ||
| 1848 | (0x1E91, 'V'), | ||
| 1849 | (0x1E92, 'M', u'ẓ'), | ||
| 1850 | (0x1E93, 'V'), | ||
| 1851 | (0x1E94, 'M', u'ẕ'), | ||
| 1852 | (0x1E95, 'V'), | ||
| 1853 | (0x1E9A, 'M', u'aʾ'), | ||
| 1854 | (0x1E9B, 'M', u'ṡ'), | ||
| 1855 | (0x1E9C, 'V'), | ||
| 1856 | (0x1E9E, 'M', u'ss'), | ||
| 1857 | (0x1E9F, 'V'), | ||
| 1858 | (0x1EA0, 'M', u'ạ'), | ||
| 1859 | (0x1EA1, 'V'), | ||
| 1860 | (0x1EA2, 'M', u'ả'), | ||
| 1861 | (0x1EA3, 'V'), | ||
| 1862 | (0x1EA4, 'M', u'ấ'), | ||
| 1863 | (0x1EA5, 'V'), | ||
| 1864 | (0x1EA6, 'M', u'ầ'), | ||
| 1865 | (0x1EA7, 'V'), | ||
| 1866 | (0x1EA8, 'M', u'ẩ'), | ||
| 1867 | (0x1EA9, 'V'), | ||
| 1868 | (0x1EAA, 'M', u'ẫ'), | ||
| 1869 | (0x1EAB, 'V'), | ||
| 1870 | (0x1EAC, 'M', u'ậ'), | ||
| 1871 | (0x1EAD, 'V'), | ||
| 1872 | (0x1EAE, 'M', u'ắ'), | ||
| 1873 | (0x1EAF, 'V'), | ||
| 1874 | (0x1EB0, 'M', u'ằ'), | ||
| 1875 | (0x1EB1, 'V'), | ||
| 1876 | (0x1EB2, 'M', u'ẳ'), | ||
| 1877 | (0x1EB3, 'V'), | ||
| 1878 | ] | ||
| 1879 | |||
| 1880 | def _seg_18(): | ||
| 1881 | return [ | ||
| 1882 | (0x1EB4, 'M', u'ẵ'), | ||
| 1883 | (0x1EB5, 'V'), | ||
| 1884 | (0x1EB6, 'M', u'ặ'), | ||
| 1885 | (0x1EB7, 'V'), | ||
| 1886 | (0x1EB8, 'M', u'ẹ'), | ||
| 1887 | (0x1EB9, 'V'), | ||
| 1888 | (0x1EBA, 'M', u'ẻ'), | ||
| 1889 | (0x1EBB, 'V'), | ||
| 1890 | (0x1EBC, 'M', u'ẽ'), | ||
| 1891 | (0x1EBD, 'V'), | ||
| 1892 | (0x1EBE, 'M', u'ế'), | ||
| 1893 | (0x1EBF, 'V'), | ||
| 1894 | (0x1EC0, 'M', u'ề'), | ||
| 1895 | (0x1EC1, 'V'), | ||
| 1896 | (0x1EC2, 'M', u'ể'), | ||
| 1897 | (0x1EC3, 'V'), | ||
| 1898 | (0x1EC4, 'M', u'ễ'), | ||
| 1899 | (0x1EC5, 'V'), | ||
| 1900 | (0x1EC6, 'M', u'ệ'), | ||
| 1901 | (0x1EC7, 'V'), | ||
| 1902 | (0x1EC8, 'M', u'ỉ'), | ||
| 1903 | (0x1EC9, 'V'), | ||
| 1904 | (0x1ECA, 'M', u'ị'), | ||
| 1905 | (0x1ECB, 'V'), | ||
| 1906 | (0x1ECC, 'M', u'ọ'), | ||
| 1907 | (0x1ECD, 'V'), | ||
| 1908 | (0x1ECE, 'M', u'ỏ'), | ||
| 1909 | (0x1ECF, 'V'), | ||
| 1910 | (0x1ED0, 'M', u'ố'), | ||
| 1911 | (0x1ED1, 'V'), | ||
| 1912 | (0x1ED2, 'M', u'ồ'), | ||
| 1913 | (0x1ED3, 'V'), | ||
| 1914 | (0x1ED4, 'M', u'ổ'), | ||
| 1915 | (0x1ED5, 'V'), | ||
| 1916 | (0x1ED6, 'M', u'ỗ'), | ||
| 1917 | (0x1ED7, 'V'), | ||
| 1918 | (0x1ED8, 'M', u'ộ'), | ||
| 1919 | (0x1ED9, 'V'), | ||
| 1920 | (0x1EDA, 'M', u'ớ'), | ||
| 1921 | (0x1EDB, 'V'), | ||
| 1922 | (0x1EDC, 'M', u'ờ'), | ||
| 1923 | (0x1EDD, 'V'), | ||
| 1924 | (0x1EDE, 'M', u'ở'), | ||
| 1925 | (0x1EDF, 'V'), | ||
| 1926 | (0x1EE0, 'M', u'ỡ'), | ||
| 1927 | (0x1EE1, 'V'), | ||
| 1928 | (0x1EE2, 'M', u'ợ'), | ||
| 1929 | (0x1EE3, 'V'), | ||
| 1930 | (0x1EE4, 'M', u'ụ'), | ||
| 1931 | (0x1EE5, 'V'), | ||
| 1932 | (0x1EE6, 'M', u'ủ'), | ||
| 1933 | (0x1EE7, 'V'), | ||
| 1934 | (0x1EE8, 'M', u'ứ'), | ||
| 1935 | (0x1EE9, 'V'), | ||
| 1936 | (0x1EEA, 'M', u'ừ'), | ||
| 1937 | (0x1EEB, 'V'), | ||
| 1938 | (0x1EEC, 'M', u'ử'), | ||
| 1939 | (0x1EED, 'V'), | ||
| 1940 | (0x1EEE, 'M', u'ữ'), | ||
| 1941 | (0x1EEF, 'V'), | ||
| 1942 | (0x1EF0, 'M', u'ự'), | ||
| 1943 | (0x1EF1, 'V'), | ||
| 1944 | (0x1EF2, 'M', u'ỳ'), | ||
| 1945 | (0x1EF3, 'V'), | ||
| 1946 | (0x1EF4, 'M', u'ỵ'), | ||
| 1947 | (0x1EF5, 'V'), | ||
| 1948 | (0x1EF6, 'M', u'ỷ'), | ||
| 1949 | (0x1EF7, 'V'), | ||
| 1950 | (0x1EF8, 'M', u'ỹ'), | ||
| 1951 | (0x1EF9, 'V'), | ||
| 1952 | (0x1EFA, 'M', u'ỻ'), | ||
| 1953 | (0x1EFB, 'V'), | ||
| 1954 | (0x1EFC, 'M', u'ỽ'), | ||
| 1955 | (0x1EFD, 'V'), | ||
| 1956 | (0x1EFE, 'M', u'ỿ'), | ||
| 1957 | (0x1EFF, 'V'), | ||
| 1958 | (0x1F08, 'M', u'ἀ'), | ||
| 1959 | (0x1F09, 'M', u'ἁ'), | ||
| 1960 | (0x1F0A, 'M', u'ἂ'), | ||
| 1961 | (0x1F0B, 'M', u'ἃ'), | ||
| 1962 | (0x1F0C, 'M', u'ἄ'), | ||
| 1963 | (0x1F0D, 'M', u'ἅ'), | ||
| 1964 | (0x1F0E, 'M', u'ἆ'), | ||
| 1965 | (0x1F0F, 'M', u'ἇ'), | ||
| 1966 | (0x1F10, 'V'), | ||
| 1967 | (0x1F16, 'X'), | ||
| 1968 | (0x1F18, 'M', u'ἐ'), | ||
| 1969 | (0x1F19, 'M', u'ἑ'), | ||
| 1970 | (0x1F1A, 'M', u'ἒ'), | ||
| 1971 | (0x1F1B, 'M', u'ἓ'), | ||
| 1972 | (0x1F1C, 'M', u'ἔ'), | ||
| 1973 | (0x1F1D, 'M', u'ἕ'), | ||
| 1974 | (0x1F1E, 'X'), | ||
| 1975 | (0x1F20, 'V'), | ||
| 1976 | (0x1F28, 'M', u'ἠ'), | ||
| 1977 | (0x1F29, 'M', u'ἡ'), | ||
| 1978 | (0x1F2A, 'M', u'ἢ'), | ||
| 1979 | (0x1F2B, 'M', u'ἣ'), | ||
| 1980 | (0x1F2C, 'M', u'ἤ'), | ||
| 1981 | (0x1F2D, 'M', u'ἥ'), | ||
| 1982 | ] | ||
| 1983 | |||
| 1984 | def _seg_19(): | ||
| 1985 | return [ | ||
| 1986 | (0x1F2E, 'M', u'ἦ'), | ||
| 1987 | (0x1F2F, 'M', u'ἧ'), | ||
| 1988 | (0x1F30, 'V'), | ||
| 1989 | (0x1F38, 'M', u'ἰ'), | ||
| 1990 | (0x1F39, 'M', u'ἱ'), | ||
| 1991 | (0x1F3A, 'M', u'ἲ'), | ||
| 1992 | (0x1F3B, 'M', u'ἳ'), | ||
| 1993 | (0x1F3C, 'M', u'ἴ'), | ||
| 1994 | (0x1F3D, 'M', u'ἵ'), | ||
| 1995 | (0x1F3E, 'M', u'ἶ'), | ||
| 1996 | (0x1F3F, 'M', u'ἷ'), | ||
| 1997 | (0x1F40, 'V'), | ||
| 1998 | (0x1F46, 'X'), | ||
| 1999 | (0x1F48, 'M', u'ὀ'), | ||
| 2000 | (0x1F49, 'M', u'ὁ'), | ||
| 2001 | (0x1F4A, 'M', u'ὂ'), | ||
| 2002 | (0x1F4B, 'M', u'ὃ'), | ||
| 2003 | (0x1F4C, 'M', u'ὄ'), | ||
| 2004 | (0x1F4D, 'M', u'ὅ'), | ||
| 2005 | (0x1F4E, 'X'), | ||
| 2006 | (0x1F50, 'V'), | ||
| 2007 | (0x1F58, 'X'), | ||
| 2008 | (0x1F59, 'M', u'ὑ'), | ||
| 2009 | (0x1F5A, 'X'), | ||
| 2010 | (0x1F5B, 'M', u'ὓ'), | ||
| 2011 | (0x1F5C, 'X'), | ||
| 2012 | (0x1F5D, 'M', u'ὕ'), | ||
| 2013 | (0x1F5E, 'X'), | ||
| 2014 | (0x1F5F, 'M', u'ὗ'), | ||
| 2015 | (0x1F60, 'V'), | ||
| 2016 | (0x1F68, 'M', u'ὠ'), | ||
| 2017 | (0x1F69, 'M', u'ὡ'), | ||
| 2018 | (0x1F6A, 'M', u'ὢ'), | ||
| 2019 | (0x1F6B, 'M', u'ὣ'), | ||
| 2020 | (0x1F6C, 'M', u'ὤ'), | ||
| 2021 | (0x1F6D, 'M', u'ὥ'), | ||
| 2022 | (0x1F6E, 'M', u'ὦ'), | ||
| 2023 | (0x1F6F, 'M', u'ὧ'), | ||
| 2024 | (0x1F70, 'V'), | ||
| 2025 | (0x1F71, 'M', u'ά'), | ||
| 2026 | (0x1F72, 'V'), | ||
| 2027 | (0x1F73, 'M', u'έ'), | ||
| 2028 | (0x1F74, 'V'), | ||
| 2029 | (0x1F75, 'M', u'ή'), | ||
| 2030 | (0x1F76, 'V'), | ||
| 2031 | (0x1F77, 'M', u'ί'), | ||
| 2032 | (0x1F78, 'V'), | ||
| 2033 | (0x1F79, 'M', u'ό'), | ||
| 2034 | (0x1F7A, 'V'), | ||
| 2035 | (0x1F7B, 'M', u'ύ'), | ||
| 2036 | (0x1F7C, 'V'), | ||
| 2037 | (0x1F7D, 'M', u'ώ'), | ||
| 2038 | (0x1F7E, 'X'), | ||
| 2039 | (0x1F80, 'M', u'ἀι'), | ||
| 2040 | (0x1F81, 'M', u'ἁι'), | ||
| 2041 | (0x1F82, 'M', u'ἂι'), | ||
| 2042 | (0x1F83, 'M', u'ἃι'), | ||
| 2043 | (0x1F84, 'M', u'ἄι'), | ||
| 2044 | (0x1F85, 'M', u'ἅι'), | ||
| 2045 | (0x1F86, 'M', u'ἆι'), | ||
| 2046 | (0x1F87, 'M', u'ἇι'), | ||
| 2047 | (0x1F88, 'M', u'ἀι'), | ||
| 2048 | (0x1F89, 'M', u'ἁι'), | ||
| 2049 | (0x1F8A, 'M', u'ἂι'), | ||
| 2050 | (0x1F8B, 'M', u'ἃι'), | ||
| 2051 | (0x1F8C, 'M', u'ἄι'), | ||
| 2052 | (0x1F8D, 'M', u'ἅι'), | ||
| 2053 | (0x1F8E, 'M', u'ἆι'), | ||
| 2054 | (0x1F8F, 'M', u'ἇι'), | ||
| 2055 | (0x1F90, 'M', u'ἠι'), | ||
| 2056 | (0x1F91, 'M', u'ἡι'), | ||
| 2057 | (0x1F92, 'M', u'ἢι'), | ||
| 2058 | (0x1F93, 'M', u'ἣι'), | ||
| 2059 | (0x1F94, 'M', u'ἤι'), | ||
| 2060 | (0x1F95, 'M', u'ἥι'), | ||
| 2061 | (0x1F96, 'M', u'ἦι'), | ||
| 2062 | (0x1F97, 'M', u'ἧι'), | ||
| 2063 | (0x1F98, 'M', u'ἠι'), | ||
| 2064 | (0x1F99, 'M', u'ἡι'), | ||
| 2065 | (0x1F9A, 'M', u'ἢι'), | ||
| 2066 | (0x1F9B, 'M', u'ἣι'), | ||
| 2067 | (0x1F9C, 'M', u'ἤι'), | ||
| 2068 | (0x1F9D, 'M', u'ἥι'), | ||
| 2069 | (0x1F9E, 'M', u'ἦι'), | ||
| 2070 | (0x1F9F, 'M', u'ἧι'), | ||
| 2071 | (0x1FA0, 'M', u'ὠι'), | ||
| 2072 | (0x1FA1, 'M', u'ὡι'), | ||
| 2073 | (0x1FA2, 'M', u'ὢι'), | ||
| 2074 | (0x1FA3, 'M', u'ὣι'), | ||
| 2075 | (0x1FA4, 'M', u'ὤι'), | ||
| 2076 | (0x1FA5, 'M', u'ὥι'), | ||
| 2077 | (0x1FA6, 'M', u'ὦι'), | ||
| 2078 | (0x1FA7, 'M', u'ὧι'), | ||
| 2079 | (0x1FA8, 'M', u'ὠι'), | ||
| 2080 | (0x1FA9, 'M', u'ὡι'), | ||
| 2081 | (0x1FAA, 'M', u'ὢι'), | ||
| 2082 | (0x1FAB, 'M', u'ὣι'), | ||
| 2083 | (0x1FAC, 'M', u'ὤι'), | ||
| 2084 | (0x1FAD, 'M', u'ὥι'), | ||
| 2085 | (0x1FAE, 'M', u'ὦι'), | ||
| 2086 | ] | ||
| 2087 | |||
| 2088 | def _seg_20(): | ||
| 2089 | return [ | ||
| 2090 | (0x1FAF, 'M', u'ὧι'), | ||
| 2091 | (0x1FB0, 'V'), | ||
| 2092 | (0x1FB2, 'M', u'ὰι'), | ||
| 2093 | (0x1FB3, 'M', u'αι'), | ||
| 2094 | (0x1FB4, 'M', u'άι'), | ||
| 2095 | (0x1FB5, 'X'), | ||
| 2096 | (0x1FB6, 'V'), | ||
| 2097 | (0x1FB7, 'M', u'ᾶι'), | ||
| 2098 | (0x1FB8, 'M', u'ᾰ'), | ||
| 2099 | (0x1FB9, 'M', u'ᾱ'), | ||
| 2100 | (0x1FBA, 'M', u'ὰ'), | ||
| 2101 | (0x1FBB, 'M', u'ά'), | ||
| 2102 | (0x1FBC, 'M', u'αι'), | ||
| 2103 | (0x1FBD, '3', u' ̓'), | ||
| 2104 | (0x1FBE, 'M', u'ι'), | ||
| 2105 | (0x1FBF, '3', u' ̓'), | ||
| 2106 | (0x1FC0, '3', u' ͂'), | ||
| 2107 | (0x1FC1, '3', u' ̈͂'), | ||
| 2108 | (0x1FC2, 'M', u'ὴι'), | ||
| 2109 | (0x1FC3, 'M', u'ηι'), | ||
| 2110 | (0x1FC4, 'M', u'ήι'), | ||
| 2111 | (0x1FC5, 'X'), | ||
| 2112 | (0x1FC6, 'V'), | ||
| 2113 | (0x1FC7, 'M', u'ῆι'), | ||
| 2114 | (0x1FC8, 'M', u'ὲ'), | ||
| 2115 | (0x1FC9, 'M', u'έ'), | ||
| 2116 | (0x1FCA, 'M', u'ὴ'), | ||
| 2117 | (0x1FCB, 'M', u'ή'), | ||
| 2118 | (0x1FCC, 'M', u'ηι'), | ||
| 2119 | (0x1FCD, '3', u' ̓̀'), | ||
| 2120 | (0x1FCE, '3', u' ̓́'), | ||
| 2121 | (0x1FCF, '3', u' ̓͂'), | ||
| 2122 | (0x1FD0, 'V'), | ||
| 2123 | (0x1FD3, 'M', u'ΐ'), | ||
| 2124 | (0x1FD4, 'X'), | ||
| 2125 | (0x1FD6, 'V'), | ||
| 2126 | (0x1FD8, 'M', u'ῐ'), | ||
| 2127 | (0x1FD9, 'M', u'ῑ'), | ||
| 2128 | (0x1FDA, 'M', u'ὶ'), | ||
| 2129 | (0x1FDB, 'M', u'ί'), | ||
| 2130 | (0x1FDC, 'X'), | ||
| 2131 | (0x1FDD, '3', u' ̔̀'), | ||
| 2132 | (0x1FDE, '3', u' ̔́'), | ||
| 2133 | (0x1FDF, '3', u' ̔͂'), | ||
| 2134 | (0x1FE0, 'V'), | ||
| 2135 | (0x1FE3, 'M', u'ΰ'), | ||
| 2136 | (0x1FE4, 'V'), | ||
| 2137 | (0x1FE8, 'M', u'ῠ'), | ||
| 2138 | (0x1FE9, 'M', u'ῡ'), | ||
| 2139 | (0x1FEA, 'M', u'ὺ'), | ||
| 2140 | (0x1FEB, 'M', u'ύ'), | ||
| 2141 | (0x1FEC, 'M', u'ῥ'), | ||
| 2142 | (0x1FED, '3', u' ̈̀'), | ||
| 2143 | (0x1FEE, '3', u' ̈́'), | ||
| 2144 | (0x1FEF, '3', u'`'), | ||
| 2145 | (0x1FF0, 'X'), | ||
| 2146 | (0x1FF2, 'M', u'ὼι'), | ||
| 2147 | (0x1FF3, 'M', u'ωι'), | ||
| 2148 | (0x1FF4, 'M', u'ώι'), | ||
| 2149 | (0x1FF5, 'X'), | ||
| 2150 | (0x1FF6, 'V'), | ||
| 2151 | (0x1FF7, 'M', u'ῶι'), | ||
| 2152 | (0x1FF8, 'M', u'ὸ'), | ||
| 2153 | (0x1FF9, 'M', u'ό'), | ||
| 2154 | (0x1FFA, 'M', u'ὼ'), | ||
| 2155 | (0x1FFB, 'M', u'ώ'), | ||
| 2156 | (0x1FFC, 'M', u'ωι'), | ||
| 2157 | (0x1FFD, '3', u' ́'), | ||
| 2158 | (0x1FFE, '3', u' ̔'), | ||
| 2159 | (0x1FFF, 'X'), | ||
| 2160 | (0x2000, '3', u' '), | ||
| 2161 | (0x200B, 'I'), | ||
| 2162 | (0x200C, 'D', u''), | ||
| 2163 | (0x200E, 'X'), | ||
| 2164 | (0x2010, 'V'), | ||
| 2165 | (0x2011, 'M', u'‐'), | ||
| 2166 | (0x2012, 'V'), | ||
| 2167 | (0x2017, '3', u' ̳'), | ||
| 2168 | (0x2018, 'V'), | ||
| 2169 | (0x2024, 'X'), | ||
| 2170 | (0x2027, 'V'), | ||
| 2171 | (0x2028, 'X'), | ||
| 2172 | (0x202F, '3', u' '), | ||
| 2173 | (0x2030, 'V'), | ||
| 2174 | (0x2033, 'M', u'′′'), | ||
| 2175 | (0x2034, 'M', u'′′′'), | ||
| 2176 | (0x2035, 'V'), | ||
| 2177 | (0x2036, 'M', u'‵‵'), | ||
| 2178 | (0x2037, 'M', u'‵‵‵'), | ||
| 2179 | (0x2038, 'V'), | ||
| 2180 | (0x203C, '3', u'!!'), | ||
| 2181 | (0x203D, 'V'), | ||
| 2182 | (0x203E, '3', u' ̅'), | ||
| 2183 | (0x203F, 'V'), | ||
| 2184 | (0x2047, '3', u'??'), | ||
| 2185 | (0x2048, '3', u'?!'), | ||
| 2186 | (0x2049, '3', u'!?'), | ||
| 2187 | (0x204A, 'V'), | ||
| 2188 | (0x2057, 'M', u'′′′′'), | ||
| 2189 | (0x2058, 'V'), | ||
| 2190 | ] | ||
| 2191 | |||
| 2192 | def _seg_21(): | ||
| 2193 | return [ | ||
| 2194 | (0x205F, '3', u' '), | ||
| 2195 | (0x2060, 'I'), | ||
| 2196 | (0x2061, 'X'), | ||
| 2197 | (0x2064, 'I'), | ||
| 2198 | (0x2065, 'X'), | ||
| 2199 | (0x2070, 'M', u'0'), | ||
| 2200 | (0x2071, 'M', u'i'), | ||
| 2201 | (0x2072, 'X'), | ||
| 2202 | (0x2074, 'M', u'4'), | ||
| 2203 | (0x2075, 'M', u'5'), | ||
| 2204 | (0x2076, 'M', u'6'), | ||
| 2205 | (0x2077, 'M', u'7'), | ||
| 2206 | (0x2078, 'M', u'8'), | ||
| 2207 | (0x2079, 'M', u'9'), | ||
| 2208 | (0x207A, '3', u'+'), | ||
| 2209 | (0x207B, 'M', u'−'), | ||
| 2210 | (0x207C, '3', u'='), | ||
| 2211 | (0x207D, '3', u'('), | ||
| 2212 | (0x207E, '3', u')'), | ||
| 2213 | (0x207F, 'M', u'n'), | ||
| 2214 | (0x2080, 'M', u'0'), | ||
| 2215 | (0x2081, 'M', u'1'), | ||
| 2216 | (0x2082, 'M', u'2'), | ||
| 2217 | (0x2083, 'M', u'3'), | ||
| 2218 | (0x2084, 'M', u'4'), | ||
| 2219 | (0x2085, 'M', u'5'), | ||
| 2220 | (0x2086, 'M', u'6'), | ||
| 2221 | (0x2087, 'M', u'7'), | ||
| 2222 | (0x2088, 'M', u'8'), | ||
| 2223 | (0x2089, 'M', u'9'), | ||
| 2224 | (0x208A, '3', u'+'), | ||
| 2225 | (0x208B, 'M', u'−'), | ||
| 2226 | (0x208C, '3', u'='), | ||
| 2227 | (0x208D, '3', u'('), | ||
| 2228 | (0x208E, '3', u')'), | ||
| 2229 | (0x208F, 'X'), | ||
| 2230 | (0x2090, 'M', u'a'), | ||
| 2231 | (0x2091, 'M', u'e'), | ||
| 2232 | (0x2092, 'M', u'o'), | ||
| 2233 | (0x2093, 'M', u'x'), | ||
| 2234 | (0x2094, 'M', u'ə'), | ||
| 2235 | (0x2095, 'M', u'h'), | ||
| 2236 | (0x2096, 'M', u'k'), | ||
| 2237 | (0x2097, 'M', u'l'), | ||
| 2238 | (0x2098, 'M', u'm'), | ||
| 2239 | (0x2099, 'M', u'n'), | ||
| 2240 | (0x209A, 'M', u'p'), | ||
| 2241 | (0x209B, 'M', u's'), | ||
| 2242 | (0x209C, 'M', u't'), | ||
| 2243 | (0x209D, 'X'), | ||
| 2244 | (0x20A0, 'V'), | ||
| 2245 | (0x20A8, 'M', u'rs'), | ||
| 2246 | (0x20A9, 'V'), | ||
| 2247 | (0x20BB, 'X'), | ||
| 2248 | (0x20D0, 'V'), | ||
| 2249 | (0x20F1, 'X'), | ||
| 2250 | (0x2100, '3', u'a/c'), | ||
| 2251 | (0x2101, '3', u'a/s'), | ||
| 2252 | (0x2102, 'M', u'c'), | ||
| 2253 | (0x2103, 'M', u'°c'), | ||
| 2254 | (0x2104, 'V'), | ||
| 2255 | (0x2105, '3', u'c/o'), | ||
| 2256 | (0x2106, '3', u'c/u'), | ||
| 2257 | (0x2107, 'M', u'ɛ'), | ||
| 2258 | (0x2108, 'V'), | ||
| 2259 | (0x2109, 'M', u'°f'), | ||
| 2260 | (0x210A, 'M', u'g'), | ||
| 2261 | (0x210B, 'M', u'h'), | ||
| 2262 | (0x210F, 'M', u'ħ'), | ||
| 2263 | (0x2110, 'M', u'i'), | ||
| 2264 | (0x2112, 'M', u'l'), | ||
| 2265 | (0x2114, 'V'), | ||
| 2266 | (0x2115, 'M', u'n'), | ||
| 2267 | (0x2116, 'M', u'no'), | ||
| 2268 | (0x2117, 'V'), | ||
| 2269 | (0x2119, 'M', u'p'), | ||
| 2270 | (0x211A, 'M', u'q'), | ||
| 2271 | (0x211B, 'M', u'r'), | ||
| 2272 | (0x211E, 'V'), | ||
| 2273 | (0x2120, 'M', u'sm'), | ||
| 2274 | (0x2121, 'M', u'tel'), | ||
| 2275 | (0x2122, 'M', u'tm'), | ||
| 2276 | (0x2123, 'V'), | ||
| 2277 | (0x2124, 'M', u'z'), | ||
| 2278 | (0x2125, 'V'), | ||
| 2279 | (0x2126, 'M', u'ω'), | ||
| 2280 | (0x2127, 'V'), | ||
| 2281 | (0x2128, 'M', u'z'), | ||
| 2282 | (0x2129, 'V'), | ||
| 2283 | (0x212A, 'M', u'k'), | ||
| 2284 | (0x212B, 'M', u'å'), | ||
| 2285 | (0x212C, 'M', u'b'), | ||
| 2286 | (0x212D, 'M', u'c'), | ||
| 2287 | (0x212E, 'V'), | ||
| 2288 | (0x212F, 'M', u'e'), | ||
| 2289 | (0x2131, 'M', u'f'), | ||
| 2290 | (0x2132, 'X'), | ||
| 2291 | (0x2133, 'M', u'm'), | ||
| 2292 | (0x2134, 'M', u'o'), | ||
| 2293 | (0x2135, 'M', u'א'), | ||
| 2294 | ] | ||
| 2295 | |||
| 2296 | def _seg_22(): | ||
| 2297 | return [ | ||
| 2298 | (0x2136, 'M', u'ב'), | ||
| 2299 | (0x2137, 'M', u'ג'), | ||
| 2300 | (0x2138, 'M', u'ד'), | ||
| 2301 | (0x2139, 'M', u'i'), | ||
| 2302 | (0x213A, 'V'), | ||
| 2303 | (0x213B, 'M', u'fax'), | ||
| 2304 | (0x213C, 'M', u'π'), | ||
| 2305 | (0x213D, 'M', u'γ'), | ||
| 2306 | (0x213F, 'M', u'π'), | ||
| 2307 | (0x2140, 'M', u'∑'), | ||
| 2308 | (0x2141, 'V'), | ||
| 2309 | (0x2145, 'M', u'd'), | ||
| 2310 | (0x2147, 'M', u'e'), | ||
| 2311 | (0x2148, 'M', u'i'), | ||
| 2312 | (0x2149, 'M', u'j'), | ||
| 2313 | (0x214A, 'V'), | ||
| 2314 | (0x2150, 'M', u'1⁄7'), | ||
| 2315 | (0x2151, 'M', u'1⁄9'), | ||
| 2316 | (0x2152, 'M', u'1⁄10'), | ||
| 2317 | (0x2153, 'M', u'1⁄3'), | ||
| 2318 | (0x2154, 'M', u'2⁄3'), | ||
| 2319 | (0x2155, 'M', u'1⁄5'), | ||
| 2320 | (0x2156, 'M', u'2⁄5'), | ||
| 2321 | (0x2157, 'M', u'3⁄5'), | ||
| 2322 | (0x2158, 'M', u'4⁄5'), | ||
| 2323 | (0x2159, 'M', u'1⁄6'), | ||
| 2324 | (0x215A, 'M', u'5⁄6'), | ||
| 2325 | (0x215B, 'M', u'1⁄8'), | ||
| 2326 | (0x215C, 'M', u'3⁄8'), | ||
| 2327 | (0x215D, 'M', u'5⁄8'), | ||
| 2328 | (0x215E, 'M', u'7⁄8'), | ||
| 2329 | (0x215F, 'M', u'1⁄'), | ||
| 2330 | (0x2160, 'M', u'i'), | ||
| 2331 | (0x2161, 'M', u'ii'), | ||
| 2332 | (0x2162, 'M', u'iii'), | ||
| 2333 | (0x2163, 'M', u'iv'), | ||
| 2334 | (0x2164, 'M', u'v'), | ||
| 2335 | (0x2165, 'M', u'vi'), | ||
| 2336 | (0x2166, 'M', u'vii'), | ||
| 2337 | (0x2167, 'M', u'viii'), | ||
| 2338 | (0x2168, 'M', u'ix'), | ||
| 2339 | (0x2169, 'M', u'x'), | ||
| 2340 | (0x216A, 'M', u'xi'), | ||
| 2341 | (0x216B, 'M', u'xii'), | ||
| 2342 | (0x216C, 'M', u'l'), | ||
| 2343 | (0x216D, 'M', u'c'), | ||
| 2344 | (0x216E, 'M', u'd'), | ||
| 2345 | (0x216F, 'M', u'm'), | ||
| 2346 | (0x2170, 'M', u'i'), | ||
| 2347 | (0x2171, 'M', u'ii'), | ||
| 2348 | (0x2172, 'M', u'iii'), | ||
| 2349 | (0x2173, 'M', u'iv'), | ||
| 2350 | (0x2174, 'M', u'v'), | ||
| 2351 | (0x2175, 'M', u'vi'), | ||
| 2352 | (0x2176, 'M', u'vii'), | ||
| 2353 | (0x2177, 'M', u'viii'), | ||
| 2354 | (0x2178, 'M', u'ix'), | ||
| 2355 | (0x2179, 'M', u'x'), | ||
| 2356 | (0x217A, 'M', u'xi'), | ||
| 2357 | (0x217B, 'M', u'xii'), | ||
| 2358 | (0x217C, 'M', u'l'), | ||
| 2359 | (0x217D, 'M', u'c'), | ||
| 2360 | (0x217E, 'M', u'd'), | ||
| 2361 | (0x217F, 'M', u'm'), | ||
| 2362 | (0x2180, 'V'), | ||
| 2363 | (0x2183, 'X'), | ||
| 2364 | (0x2184, 'V'), | ||
| 2365 | (0x2189, 'M', u'0⁄3'), | ||
| 2366 | (0x218A, 'X'), | ||
| 2367 | (0x2190, 'V'), | ||
| 2368 | (0x222C, 'M', u'∫∫'), | ||
| 2369 | (0x222D, 'M', u'∫∫∫'), | ||
| 2370 | (0x222E, 'V'), | ||
| 2371 | (0x222F, 'M', u'∮∮'), | ||
| 2372 | (0x2230, 'M', u'∮∮∮'), | ||
| 2373 | (0x2231, 'V'), | ||
| 2374 | (0x2260, '3'), | ||
| 2375 | (0x2261, 'V'), | ||
| 2376 | (0x226E, '3'), | ||
| 2377 | (0x2270, 'V'), | ||
| 2378 | (0x2329, 'M', u'〈'), | ||
| 2379 | (0x232A, 'M', u'〉'), | ||
| 2380 | (0x232B, 'V'), | ||
| 2381 | (0x23F4, 'X'), | ||
| 2382 | (0x2400, 'V'), | ||
| 2383 | (0x2427, 'X'), | ||
| 2384 | (0x2440, 'V'), | ||
| 2385 | (0x244B, 'X'), | ||
| 2386 | (0x2460, 'M', u'1'), | ||
| 2387 | (0x2461, 'M', u'2'), | ||
| 2388 | (0x2462, 'M', u'3'), | ||
| 2389 | (0x2463, 'M', u'4'), | ||
| 2390 | (0x2464, 'M', u'5'), | ||
| 2391 | (0x2465, 'M', u'6'), | ||
| 2392 | (0x2466, 'M', u'7'), | ||
| 2393 | (0x2467, 'M', u'8'), | ||
| 2394 | (0x2468, 'M', u'9'), | ||
| 2395 | (0x2469, 'M', u'10'), | ||
| 2396 | (0x246A, 'M', u'11'), | ||
| 2397 | (0x246B, 'M', u'12'), | ||
| 2398 | ] | ||
| 2399 | |||
| 2400 | def _seg_23(): | ||
| 2401 | return [ | ||
| 2402 | (0x246C, 'M', u'13'), | ||
| 2403 | (0x246D, 'M', u'14'), | ||
| 2404 | (0x246E, 'M', u'15'), | ||
| 2405 | (0x246F, 'M', u'16'), | ||
| 2406 | (0x2470, 'M', u'17'), | ||
| 2407 | (0x2471, 'M', u'18'), | ||
| 2408 | (0x2472, 'M', u'19'), | ||
| 2409 | (0x2473, 'M', u'20'), | ||
| 2410 | (0x2474, '3', u'(1)'), | ||
| 2411 | (0x2475, '3', u'(2)'), | ||
| 2412 | (0x2476, '3', u'(3)'), | ||
| 2413 | (0x2477, '3', u'(4)'), | ||
| 2414 | (0x2478, '3', u'(5)'), | ||
| 2415 | (0x2479, '3', u'(6)'), | ||
| 2416 | (0x247A, '3', u'(7)'), | ||
| 2417 | (0x247B, '3', u'(8)'), | ||
| 2418 | (0x247C, '3', u'(9)'), | ||
| 2419 | (0x247D, '3', u'(10)'), | ||
| 2420 | (0x247E, '3', u'(11)'), | ||
| 2421 | (0x247F, '3', u'(12)'), | ||
| 2422 | (0x2480, '3', u'(13)'), | ||
| 2423 | (0x2481, '3', u'(14)'), | ||
| 2424 | (0x2482, '3', u'(15)'), | ||
| 2425 | (0x2483, '3', u'(16)'), | ||
| 2426 | (0x2484, '3', u'(17)'), | ||
| 2427 | (0x2485, '3', u'(18)'), | ||
| 2428 | (0x2486, '3', u'(19)'), | ||
| 2429 | (0x2487, '3', u'(20)'), | ||
| 2430 | (0x2488, 'X'), | ||
| 2431 | (0x249C, '3', u'(a)'), | ||
| 2432 | (0x249D, '3', u'(b)'), | ||
| 2433 | (0x249E, '3', u'(c)'), | ||
| 2434 | (0x249F, '3', u'(d)'), | ||
| 2435 | (0x24A0, '3', u'(e)'), | ||
| 2436 | (0x24A1, '3', u'(f)'), | ||
| 2437 | (0x24A2, '3', u'(g)'), | ||
| 2438 | (0x24A3, '3', u'(h)'), | ||
| 2439 | (0x24A4, '3', u'(i)'), | ||
| 2440 | (0x24A5, '3', u'(j)'), | ||
| 2441 | (0x24A6, '3', u'(k)'), | ||
| 2442 | (0x24A7, '3', u'(l)'), | ||
| 2443 | (0x24A8, '3', u'(m)'), | ||
| 2444 | (0x24A9, '3', u'(n)'), | ||
| 2445 | (0x24AA, '3', u'(o)'), | ||
| 2446 | (0x24AB, '3', u'(p)'), | ||
| 2447 | (0x24AC, '3', u'(q)'), | ||
| 2448 | (0x24AD, '3', u'(r)'), | ||
| 2449 | (0x24AE, '3', u'(s)'), | ||
| 2450 | (0x24AF, '3', u'(t)'), | ||
| 2451 | (0x24B0, '3', u'(u)'), | ||
| 2452 | (0x24B1, '3', u'(v)'), | ||
| 2453 | (0x24B2, '3', u'(w)'), | ||
| 2454 | (0x24B3, '3', u'(x)'), | ||
| 2455 | (0x24B4, '3', u'(y)'), | ||
| 2456 | (0x24B5, '3', u'(z)'), | ||
| 2457 | (0x24B6, 'M', u'a'), | ||
| 2458 | (0x24B7, 'M', u'b'), | ||
| 2459 | (0x24B8, 'M', u'c'), | ||
| 2460 | (0x24B9, 'M', u'd'), | ||
| 2461 | (0x24BA, 'M', u'e'), | ||
| 2462 | (0x24BB, 'M', u'f'), | ||
| 2463 | (0x24BC, 'M', u'g'), | ||
| 2464 | (0x24BD, 'M', u'h'), | ||
| 2465 | (0x24BE, 'M', u'i'), | ||
| 2466 | (0x24BF, 'M', u'j'), | ||
| 2467 | (0x24C0, 'M', u'k'), | ||
| 2468 | (0x24C1, 'M', u'l'), | ||
| 2469 | (0x24C2, 'M', u'm'), | ||
| 2470 | (0x24C3, 'M', u'n'), | ||
| 2471 | (0x24C4, 'M', u'o'), | ||
| 2472 | (0x24C5, 'M', u'p'), | ||
| 2473 | (0x24C6, 'M', u'q'), | ||
| 2474 | (0x24C7, 'M', u'r'), | ||
| 2475 | (0x24C8, 'M', u's'), | ||
| 2476 | (0x24C9, 'M', u't'), | ||
| 2477 | (0x24CA, 'M', u'u'), | ||
| 2478 | (0x24CB, 'M', u'v'), | ||
| 2479 | (0x24CC, 'M', u'w'), | ||
| 2480 | (0x24CD, 'M', u'x'), | ||
| 2481 | (0x24CE, 'M', u'y'), | ||
| 2482 | (0x24CF, 'M', u'z'), | ||
| 2483 | (0x24D0, 'M', u'a'), | ||
| 2484 | (0x24D1, 'M', u'b'), | ||
| 2485 | (0x24D2, 'M', u'c'), | ||
| 2486 | (0x24D3, 'M', u'd'), | ||
| 2487 | (0x24D4, 'M', u'e'), | ||
| 2488 | (0x24D5, 'M', u'f'), | ||
| 2489 | (0x24D6, 'M', u'g'), | ||
| 2490 | (0x24D7, 'M', u'h'), | ||
| 2491 | (0x24D8, 'M', u'i'), | ||
| 2492 | (0x24D9, 'M', u'j'), | ||
| 2493 | (0x24DA, 'M', u'k'), | ||
| 2494 | (0x24DB, 'M', u'l'), | ||
| 2495 | (0x24DC, 'M', u'm'), | ||
| 2496 | (0x24DD, 'M', u'n'), | ||
| 2497 | (0x24DE, 'M', u'o'), | ||
| 2498 | (0x24DF, 'M', u'p'), | ||
| 2499 | (0x24E0, 'M', u'q'), | ||
| 2500 | (0x24E1, 'M', u'r'), | ||
| 2501 | (0x24E2, 'M', u's'), | ||
| 2502 | ] | ||
| 2503 | |||
| 2504 | def _seg_24(): | ||
| 2505 | return [ | ||
| 2506 | (0x24E3, 'M', u't'), | ||
| 2507 | (0x24E4, 'M', u'u'), | ||
| 2508 | (0x24E5, 'M', u'v'), | ||
| 2509 | (0x24E6, 'M', u'w'), | ||
| 2510 | (0x24E7, 'M', u'x'), | ||
| 2511 | (0x24E8, 'M', u'y'), | ||
| 2512 | (0x24E9, 'M', u'z'), | ||
| 2513 | (0x24EA, 'M', u'0'), | ||
| 2514 | (0x24EB, 'V'), | ||
| 2515 | (0x2700, 'X'), | ||
| 2516 | (0x2701, 'V'), | ||
| 2517 | (0x2A0C, 'M', u'∫∫∫∫'), | ||
| 2518 | (0x2A0D, 'V'), | ||
| 2519 | (0x2A74, '3', u'::='), | ||
| 2520 | (0x2A75, '3', u'=='), | ||
| 2521 | (0x2A76, '3', u'==='), | ||
| 2522 | (0x2A77, 'V'), | ||
| 2523 | (0x2ADC, 'M', u'⫝̸'), | ||
| 2524 | (0x2ADD, 'V'), | ||
| 2525 | (0x2B4D, 'X'), | ||
| 2526 | (0x2B50, 'V'), | ||
| 2527 | (0x2B5A, 'X'), | ||
| 2528 | (0x2C00, 'M', u'ⰰ'), | ||
| 2529 | (0x2C01, 'M', u'ⰱ'), | ||
| 2530 | (0x2C02, 'M', u'ⰲ'), | ||
| 2531 | (0x2C03, 'M', u'ⰳ'), | ||
| 2532 | (0x2C04, 'M', u'ⰴ'), | ||
| 2533 | (0x2C05, 'M', u'ⰵ'), | ||
| 2534 | (0x2C06, 'M', u'ⰶ'), | ||
| 2535 | (0x2C07, 'M', u'ⰷ'), | ||
| 2536 | (0x2C08, 'M', u'ⰸ'), | ||
| 2537 | (0x2C09, 'M', u'ⰹ'), | ||
| 2538 | (0x2C0A, 'M', u'ⰺ'), | ||
| 2539 | (0x2C0B, 'M', u'ⰻ'), | ||
| 2540 | (0x2C0C, 'M', u'ⰼ'), | ||
| 2541 | (0x2C0D, 'M', u'ⰽ'), | ||
| 2542 | (0x2C0E, 'M', u'ⰾ'), | ||
| 2543 | (0x2C0F, 'M', u'ⰿ'), | ||
| 2544 | (0x2C10, 'M', u'ⱀ'), | ||
| 2545 | (0x2C11, 'M', u'ⱁ'), | ||
| 2546 | (0x2C12, 'M', u'ⱂ'), | ||
| 2547 | (0x2C13, 'M', u'ⱃ'), | ||
| 2548 | (0x2C14, 'M', u'ⱄ'), | ||
| 2549 | (0x2C15, 'M', u'ⱅ'), | ||
| 2550 | (0x2C16, 'M', u'ⱆ'), | ||
| 2551 | (0x2C17, 'M', u'ⱇ'), | ||
| 2552 | (0x2C18, 'M', u'ⱈ'), | ||
| 2553 | (0x2C19, 'M', u'ⱉ'), | ||
| 2554 | (0x2C1A, 'M', u'ⱊ'), | ||
| 2555 | (0x2C1B, 'M', u'ⱋ'), | ||
| 2556 | (0x2C1C, 'M', u'ⱌ'), | ||
| 2557 | (0x2C1D, 'M', u'ⱍ'), | ||
| 2558 | (0x2C1E, 'M', u'ⱎ'), | ||
| 2559 | (0x2C1F, 'M', u'ⱏ'), | ||
| 2560 | (0x2C20, 'M', u'ⱐ'), | ||
| 2561 | (0x2C21, 'M', u'ⱑ'), | ||
| 2562 | (0x2C22, 'M', u'ⱒ'), | ||
| 2563 | (0x2C23, 'M', u'ⱓ'), | ||
| 2564 | (0x2C24, 'M', u'ⱔ'), | ||
| 2565 | (0x2C25, 'M', u'ⱕ'), | ||
| 2566 | (0x2C26, 'M', u'ⱖ'), | ||
| 2567 | (0x2C27, 'M', u'ⱗ'), | ||
| 2568 | (0x2C28, 'M', u'ⱘ'), | ||
| 2569 | (0x2C29, 'M', u'ⱙ'), | ||
| 2570 | (0x2C2A, 'M', u'ⱚ'), | ||
| 2571 | (0x2C2B, 'M', u'ⱛ'), | ||
| 2572 | (0x2C2C, 'M', u'ⱜ'), | ||
| 2573 | (0x2C2D, 'M', u'ⱝ'), | ||
| 2574 | (0x2C2E, 'M', u'ⱞ'), | ||
| 2575 | (0x2C2F, 'X'), | ||
| 2576 | (0x2C30, 'V'), | ||
| 2577 | (0x2C5F, 'X'), | ||
| 2578 | (0x2C60, 'M', u'ⱡ'), | ||
| 2579 | (0x2C61, 'V'), | ||
| 2580 | (0x2C62, 'M', u'ɫ'), | ||
| 2581 | (0x2C63, 'M', u'ᵽ'), | ||
| 2582 | (0x2C64, 'M', u'ɽ'), | ||
| 2583 | (0x2C65, 'V'), | ||
| 2584 | (0x2C67, 'M', u'ⱨ'), | ||
| 2585 | (0x2C68, 'V'), | ||
| 2586 | (0x2C69, 'M', u'ⱪ'), | ||
| 2587 | (0x2C6A, 'V'), | ||
| 2588 | (0x2C6B, 'M', u'ⱬ'), | ||
| 2589 | (0x2C6C, 'V'), | ||
| 2590 | (0x2C6D, 'M', u'ɑ'), | ||
| 2591 | (0x2C6E, 'M', u'ɱ'), | ||
| 2592 | (0x2C6F, 'M', u'ɐ'), | ||
| 2593 | (0x2C70, 'M', u'ɒ'), | ||
| 2594 | (0x2C71, 'V'), | ||
| 2595 | (0x2C72, 'M', u'ⱳ'), | ||
| 2596 | (0x2C73, 'V'), | ||
| 2597 | (0x2C75, 'M', u'ⱶ'), | ||
| 2598 | (0x2C76, 'V'), | ||
| 2599 | (0x2C7C, 'M', u'j'), | ||
| 2600 | (0x2C7D, 'M', u'v'), | ||
| 2601 | (0x2C7E, 'M', u'ȿ'), | ||
| 2602 | (0x2C7F, 'M', u'ɀ'), | ||
| 2603 | (0x2C80, 'M', u'ⲁ'), | ||
| 2604 | (0x2C81, 'V'), | ||
| 2605 | (0x2C82, 'M', u'ⲃ'), | ||
| 2606 | ] | ||
| 2607 | |||
| 2608 | def _seg_25(): | ||
| 2609 | return [ | ||
| 2610 | (0x2C83, 'V'), | ||
| 2611 | (0x2C84, 'M', u'ⲅ'), | ||
| 2612 | (0x2C85, 'V'), | ||
| 2613 | (0x2C86, 'M', u'ⲇ'), | ||
| 2614 | (0x2C87, 'V'), | ||
| 2615 | (0x2C88, 'M', u'ⲉ'), | ||
| 2616 | (0x2C89, 'V'), | ||
| 2617 | (0x2C8A, 'M', u'ⲋ'), | ||
| 2618 | (0x2C8B, 'V'), | ||
| 2619 | (0x2C8C, 'M', u'ⲍ'), | ||
| 2620 | (0x2C8D, 'V'), | ||
| 2621 | (0x2C8E, 'M', u'ⲏ'), | ||
| 2622 | (0x2C8F, 'V'), | ||
| 2623 | (0x2C90, 'M', u'ⲑ'), | ||
| 2624 | (0x2C91, 'V'), | ||
| 2625 | (0x2C92, 'M', u'ⲓ'), | ||
| 2626 | (0x2C93, 'V'), | ||
| 2627 | (0x2C94, 'M', u'ⲕ'), | ||
| 2628 | (0x2C95, 'V'), | ||
| 2629 | (0x2C96, 'M', u'ⲗ'), | ||
| 2630 | (0x2C97, 'V'), | ||
| 2631 | (0x2C98, 'M', u'ⲙ'), | ||
| 2632 | (0x2C99, 'V'), | ||
| 2633 | (0x2C9A, 'M', u'ⲛ'), | ||
| 2634 | (0x2C9B, 'V'), | ||
| 2635 | (0x2C9C, 'M', u'ⲝ'), | ||
| 2636 | (0x2C9D, 'V'), | ||
| 2637 | (0x2C9E, 'M', u'ⲟ'), | ||
| 2638 | (0x2C9F, 'V'), | ||
| 2639 | (0x2CA0, 'M', u'ⲡ'), | ||
| 2640 | (0x2CA1, 'V'), | ||
| 2641 | (0x2CA2, 'M', u'ⲣ'), | ||
| 2642 | (0x2CA3, 'V'), | ||
| 2643 | (0x2CA4, 'M', u'ⲥ'), | ||
| 2644 | (0x2CA5, 'V'), | ||
| 2645 | (0x2CA6, 'M', u'ⲧ'), | ||
| 2646 | (0x2CA7, 'V'), | ||
| 2647 | (0x2CA8, 'M', u'ⲩ'), | ||
| 2648 | (0x2CA9, 'V'), | ||
| 2649 | (0x2CAA, 'M', u'ⲫ'), | ||
| 2650 | (0x2CAB, 'V'), | ||
| 2651 | (0x2CAC, 'M', u'ⲭ'), | ||
| 2652 | (0x2CAD, 'V'), | ||
| 2653 | (0x2CAE, 'M', u'ⲯ'), | ||
| 2654 | (0x2CAF, 'V'), | ||
| 2655 | (0x2CB0, 'M', u'ⲱ'), | ||
| 2656 | (0x2CB1, 'V'), | ||
| 2657 | (0x2CB2, 'M', u'ⲳ'), | ||
| 2658 | (0x2CB3, 'V'), | ||
| 2659 | (0x2CB4, 'M', u'ⲵ'), | ||
| 2660 | (0x2CB5, 'V'), | ||
| 2661 | (0x2CB6, 'M', u'ⲷ'), | ||
| 2662 | (0x2CB7, 'V'), | ||
| 2663 | (0x2CB8, 'M', u'ⲹ'), | ||
| 2664 | (0x2CB9, 'V'), | ||
| 2665 | (0x2CBA, 'M', u'ⲻ'), | ||
| 2666 | (0x2CBB, 'V'), | ||
| 2667 | (0x2CBC, 'M', u'ⲽ'), | ||
| 2668 | (0x2CBD, 'V'), | ||
| 2669 | (0x2CBE, 'M', u'ⲿ'), | ||
| 2670 | (0x2CBF, 'V'), | ||
| 2671 | (0x2CC0, 'M', u'ⳁ'), | ||
| 2672 | (0x2CC1, 'V'), | ||
| 2673 | (0x2CC2, 'M', u'ⳃ'), | ||
| 2674 | (0x2CC3, 'V'), | ||
| 2675 | (0x2CC4, 'M', u'ⳅ'), | ||
| 2676 | (0x2CC5, 'V'), | ||
| 2677 | (0x2CC6, 'M', u'ⳇ'), | ||
| 2678 | (0x2CC7, 'V'), | ||
| 2679 | (0x2CC8, 'M', u'ⳉ'), | ||
| 2680 | (0x2CC9, 'V'), | ||
| 2681 | (0x2CCA, 'M', u'ⳋ'), | ||
| 2682 | (0x2CCB, 'V'), | ||
| 2683 | (0x2CCC, 'M', u'ⳍ'), | ||
| 2684 | (0x2CCD, 'V'), | ||
| 2685 | (0x2CCE, 'M', u'ⳏ'), | ||
| 2686 | (0x2CCF, 'V'), | ||
| 2687 | (0x2CD0, 'M', u'ⳑ'), | ||
| 2688 | (0x2CD1, 'V'), | ||
| 2689 | (0x2CD2, 'M', u'ⳓ'), | ||
| 2690 | (0x2CD3, 'V'), | ||
| 2691 | (0x2CD4, 'M', u'ⳕ'), | ||
| 2692 | (0x2CD5, 'V'), | ||
| 2693 | (0x2CD6, 'M', u'ⳗ'), | ||
| 2694 | (0x2CD7, 'V'), | ||
| 2695 | (0x2CD8, 'M', u'ⳙ'), | ||
| 2696 | (0x2CD9, 'V'), | ||
| 2697 | (0x2CDA, 'M', u'ⳛ'), | ||
| 2698 | (0x2CDB, 'V'), | ||
| 2699 | (0x2CDC, 'M', u'ⳝ'), | ||
| 2700 | (0x2CDD, 'V'), | ||
| 2701 | (0x2CDE, 'M', u'ⳟ'), | ||
| 2702 | (0x2CDF, 'V'), | ||
| 2703 | (0x2CE0, 'M', u'ⳡ'), | ||
| 2704 | (0x2CE1, 'V'), | ||
| 2705 | (0x2CE2, 'M', u'ⳣ'), | ||
| 2706 | (0x2CE3, 'V'), | ||
| 2707 | (0x2CEB, 'M', u'ⳬ'), | ||
| 2708 | (0x2CEC, 'V'), | ||
| 2709 | (0x2CED, 'M', u'ⳮ'), | ||
| 2710 | ] | ||
| 2711 | |||
| 2712 | def _seg_26(): | ||
| 2713 | return [ | ||
| 2714 | (0x2CEE, 'V'), | ||
| 2715 | (0x2CF2, 'M', u'ⳳ'), | ||
| 2716 | (0x2CF3, 'V'), | ||
| 2717 | (0x2CF4, 'X'), | ||
| 2718 | (0x2CF9, 'V'), | ||
| 2719 | (0x2D26, 'X'), | ||
| 2720 | (0x2D27, 'V'), | ||
| 2721 | (0x2D28, 'X'), | ||
| 2722 | (0x2D2D, 'V'), | ||
| 2723 | (0x2D2E, 'X'), | ||
| 2724 | (0x2D30, 'V'), | ||
| 2725 | (0x2D68, 'X'), | ||
| 2726 | (0x2D6F, 'M', u'ⵡ'), | ||
| 2727 | (0x2D70, 'V'), | ||
| 2728 | (0x2D71, 'X'), | ||
| 2729 | (0x2D7F, 'V'), | ||
| 2730 | (0x2D97, 'X'), | ||
| 2731 | (0x2DA0, 'V'), | ||
| 2732 | (0x2DA7, 'X'), | ||
| 2733 | (0x2DA8, 'V'), | ||
| 2734 | (0x2DAF, 'X'), | ||
| 2735 | (0x2DB0, 'V'), | ||
| 2736 | (0x2DB7, 'X'), | ||
| 2737 | (0x2DB8, 'V'), | ||
| 2738 | (0x2DBF, 'X'), | ||
| 2739 | (0x2DC0, 'V'), | ||
| 2740 | (0x2DC7, 'X'), | ||
| 2741 | (0x2DC8, 'V'), | ||
| 2742 | (0x2DCF, 'X'), | ||
| 2743 | (0x2DD0, 'V'), | ||
| 2744 | (0x2DD7, 'X'), | ||
| 2745 | (0x2DD8, 'V'), | ||
| 2746 | (0x2DDF, 'X'), | ||
| 2747 | (0x2DE0, 'V'), | ||
| 2748 | (0x2E3C, 'X'), | ||
| 2749 | (0x2E80, 'V'), | ||
| 2750 | (0x2E9A, 'X'), | ||
| 2751 | (0x2E9B, 'V'), | ||
| 2752 | (0x2E9F, 'M', u'母'), | ||
| 2753 | (0x2EA0, 'V'), | ||
| 2754 | (0x2EF3, 'M', u'龟'), | ||
| 2755 | (0x2EF4, 'X'), | ||
| 2756 | (0x2F00, 'M', u'一'), | ||
| 2757 | (0x2F01, 'M', u'丨'), | ||
| 2758 | (0x2F02, 'M', u'丶'), | ||
| 2759 | (0x2F03, 'M', u'丿'), | ||
| 2760 | (0x2F04, 'M', u'乙'), | ||
| 2761 | (0x2F05, 'M', u'亅'), | ||
| 2762 | (0x2F06, 'M', u'二'), | ||
| 2763 | (0x2F07, 'M', u'亠'), | ||
| 2764 | (0x2F08, 'M', u'人'), | ||
| 2765 | (0x2F09, 'M', u'儿'), | ||
| 2766 | (0x2F0A, 'M', u'入'), | ||
| 2767 | (0x2F0B, 'M', u'八'), | ||
| 2768 | (0x2F0C, 'M', u'冂'), | ||
| 2769 | (0x2F0D, 'M', u'冖'), | ||
| 2770 | (0x2F0E, 'M', u'冫'), | ||
| 2771 | (0x2F0F, 'M', u'几'), | ||
| 2772 | (0x2F10, 'M', u'凵'), | ||
| 2773 | (0x2F11, 'M', u'刀'), | ||
| 2774 | (0x2F12, 'M', u'力'), | ||
| 2775 | (0x2F13, 'M', u'勹'), | ||
| 2776 | (0x2F14, 'M', u'匕'), | ||
| 2777 | (0x2F15, 'M', u'匚'), | ||
| 2778 | (0x2F16, 'M', u'匸'), | ||
| 2779 | (0x2F17, 'M', u'十'), | ||
| 2780 | (0x2F18, 'M', u'卜'), | ||
| 2781 | (0x2F19, 'M', u'卩'), | ||
| 2782 | (0x2F1A, 'M', u'厂'), | ||
| 2783 | (0x2F1B, 'M', u'厶'), | ||
| 2784 | (0x2F1C, 'M', u'又'), | ||
| 2785 | (0x2F1D, 'M', u'口'), | ||
| 2786 | (0x2F1E, 'M', u'囗'), | ||
| 2787 | (0x2F1F, 'M', u'土'), | ||
| 2788 | (0x2F20, 'M', u'士'), | ||
| 2789 | (0x2F21, 'M', u'夂'), | ||
| 2790 | (0x2F22, 'M', u'夊'), | ||
| 2791 | (0x2F23, 'M', u'夕'), | ||
| 2792 | (0x2F24, 'M', u'大'), | ||
| 2793 | (0x2F25, 'M', u'女'), | ||
| 2794 | (0x2F26, 'M', u'子'), | ||
| 2795 | (0x2F27, 'M', u'宀'), | ||
| 2796 | (0x2F28, 'M', u'寸'), | ||
| 2797 | (0x2F29, 'M', u'小'), | ||
| 2798 | (0x2F2A, 'M', u'尢'), | ||
| 2799 | (0x2F2B, 'M', u'尸'), | ||
| 2800 | (0x2F2C, 'M', u'屮'), | ||
| 2801 | (0x2F2D, 'M', u'山'), | ||
| 2802 | (0x2F2E, 'M', u'巛'), | ||
| 2803 | (0x2F2F, 'M', u'工'), | ||
| 2804 | (0x2F30, 'M', u'己'), | ||
| 2805 | (0x2F31, 'M', u'巾'), | ||
| 2806 | (0x2F32, 'M', u'干'), | ||
| 2807 | (0x2F33, 'M', u'幺'), | ||
| 2808 | (0x2F34, 'M', u'广'), | ||
| 2809 | (0x2F35, 'M', u'廴'), | ||
| 2810 | (0x2F36, 'M', u'廾'), | ||
| 2811 | (0x2F37, 'M', u'弋'), | ||
| 2812 | (0x2F38, 'M', u'弓'), | ||
| 2813 | (0x2F39, 'M', u'彐'), | ||
| 2814 | ] | ||
| 2815 | |||
| 2816 | def _seg_27(): | ||
| 2817 | return [ | ||
| 2818 | (0x2F3A, 'M', u'彡'), | ||
| 2819 | (0x2F3B, 'M', u'彳'), | ||
| 2820 | (0x2F3C, 'M', u'心'), | ||
| 2821 | (0x2F3D, 'M', u'戈'), | ||
| 2822 | (0x2F3E, 'M', u'戶'), | ||
| 2823 | (0x2F3F, 'M', u'手'), | ||
| 2824 | (0x2F40, 'M', u'支'), | ||
| 2825 | (0x2F41, 'M', u'攴'), | ||
| 2826 | (0x2F42, 'M', u'文'), | ||
| 2827 | (0x2F43, 'M', u'斗'), | ||
| 2828 | (0x2F44, 'M', u'斤'), | ||
| 2829 | (0x2F45, 'M', u'方'), | ||
| 2830 | (0x2F46, 'M', u'无'), | ||
| 2831 | (0x2F47, 'M', u'日'), | ||
| 2832 | (0x2F48, 'M', u'曰'), | ||
| 2833 | (0x2F49, 'M', u'月'), | ||
| 2834 | (0x2F4A, 'M', u'木'), | ||
| 2835 | (0x2F4B, 'M', u'欠'), | ||
| 2836 | (0x2F4C, 'M', u'止'), | ||
| 2837 | (0x2F4D, 'M', u'歹'), | ||
| 2838 | (0x2F4E, 'M', u'殳'), | ||
| 2839 | (0x2F4F, 'M', u'毋'), | ||
| 2840 | (0x2F50, 'M', u'比'), | ||
| 2841 | (0x2F51, 'M', u'毛'), | ||
| 2842 | (0x2F52, 'M', u'氏'), | ||
| 2843 | (0x2F53, 'M', u'气'), | ||
| 2844 | (0x2F54, 'M', u'水'), | ||
| 2845 | (0x2F55, 'M', u'火'), | ||
| 2846 | (0x2F56, 'M', u'爪'), | ||
| 2847 | (0x2F57, 'M', u'父'), | ||
| 2848 | (0x2F58, 'M', u'爻'), | ||
| 2849 | (0x2F59, 'M', u'爿'), | ||
| 2850 | (0x2F5A, 'M', u'片'), | ||
| 2851 | (0x2F5B, 'M', u'牙'), | ||
| 2852 | (0x2F5C, 'M', u'牛'), | ||
| 2853 | (0x2F5D, 'M', u'犬'), | ||
| 2854 | (0x2F5E, 'M', u'玄'), | ||
| 2855 | (0x2F5F, 'M', u'玉'), | ||
| 2856 | (0x2F60, 'M', u'瓜'), | ||
| 2857 | (0x2F61, 'M', u'瓦'), | ||
| 2858 | (0x2F62, 'M', u'甘'), | ||
| 2859 | (0x2F63, 'M', u'生'), | ||
| 2860 | (0x2F64, 'M', u'用'), | ||
| 2861 | (0x2F65, 'M', u'田'), | ||
| 2862 | (0x2F66, 'M', u'疋'), | ||
| 2863 | (0x2F67, 'M', u'疒'), | ||
| 2864 | (0x2F68, 'M', u'癶'), | ||
| 2865 | (0x2F69, 'M', u'白'), | ||
| 2866 | (0x2F6A, 'M', u'皮'), | ||
| 2867 | (0x2F6B, 'M', u'皿'), | ||
| 2868 | (0x2F6C, 'M', u'目'), | ||
| 2869 | (0x2F6D, 'M', u'矛'), | ||
| 2870 | (0x2F6E, 'M', u'矢'), | ||
| 2871 | (0x2F6F, 'M', u'石'), | ||
| 2872 | (0x2F70, 'M', u'示'), | ||
| 2873 | (0x2F71, 'M', u'禸'), | ||
| 2874 | (0x2F72, 'M', u'禾'), | ||
| 2875 | (0x2F73, 'M', u'穴'), | ||
| 2876 | (0x2F74, 'M', u'立'), | ||
| 2877 | (0x2F75, 'M', u'竹'), | ||
| 2878 | (0x2F76, 'M', u'米'), | ||
| 2879 | (0x2F77, 'M', u'糸'), | ||
| 2880 | (0x2F78, 'M', u'缶'), | ||
| 2881 | (0x2F79, 'M', u'网'), | ||
| 2882 | (0x2F7A, 'M', u'羊'), | ||
| 2883 | (0x2F7B, 'M', u'羽'), | ||
| 2884 | (0x2F7C, 'M', u'老'), | ||
| 2885 | (0x2F7D, 'M', u'而'), | ||
| 2886 | (0x2F7E, 'M', u'耒'), | ||
| 2887 | (0x2F7F, 'M', u'耳'), | ||
| 2888 | (0x2F80, 'M', u'聿'), | ||
| 2889 | (0x2F81, 'M', u'肉'), | ||
| 2890 | (0x2F82, 'M', u'臣'), | ||
| 2891 | (0x2F83, 'M', u'自'), | ||
| 2892 | (0x2F84, 'M', u'至'), | ||
| 2893 | (0x2F85, 'M', u'臼'), | ||
| 2894 | (0x2F86, 'M', u'舌'), | ||
| 2895 | (0x2F87, 'M', u'舛'), | ||
| 2896 | (0x2F88, 'M', u'舟'), | ||
| 2897 | (0x2F89, 'M', u'艮'), | ||
| 2898 | (0x2F8A, 'M', u'色'), | ||
| 2899 | (0x2F8B, 'M', u'艸'), | ||
| 2900 | (0x2F8C, 'M', u'虍'), | ||
| 2901 | (0x2F8D, 'M', u'虫'), | ||
| 2902 | (0x2F8E, 'M', u'血'), | ||
| 2903 | (0x2F8F, 'M', u'行'), | ||
| 2904 | (0x2F90, 'M', u'衣'), | ||
| 2905 | (0x2F91, 'M', u'襾'), | ||
| 2906 | (0x2F92, 'M', u'見'), | ||
| 2907 | (0x2F93, 'M', u'角'), | ||
| 2908 | (0x2F94, 'M', u'言'), | ||
| 2909 | (0x2F95, 'M', u'谷'), | ||
| 2910 | (0x2F96, 'M', u'豆'), | ||
| 2911 | (0x2F97, 'M', u'豕'), | ||
| 2912 | (0x2F98, 'M', u'豸'), | ||
| 2913 | (0x2F99, 'M', u'貝'), | ||
| 2914 | (0x2F9A, 'M', u'赤'), | ||
| 2915 | (0x2F9B, 'M', u'走'), | ||
| 2916 | (0x2F9C, 'M', u'足'), | ||
| 2917 | (0x2F9D, 'M', u'身'), | ||
| 2918 | ] | ||
| 2919 | |||
| 2920 | def _seg_28(): | ||
| 2921 | return [ | ||
| 2922 | (0x2F9E, 'M', u'車'), | ||
| 2923 | (0x2F9F, 'M', u'辛'), | ||
| 2924 | (0x2FA0, 'M', u'辰'), | ||
| 2925 | (0x2FA1, 'M', u'辵'), | ||
| 2926 | (0x2FA2, 'M', u'邑'), | ||
| 2927 | (0x2FA3, 'M', u'酉'), | ||
| 2928 | (0x2FA4, 'M', u'釆'), | ||
| 2929 | (0x2FA5, 'M', u'里'), | ||
| 2930 | (0x2FA6, 'M', u'金'), | ||
| 2931 | (0x2FA7, 'M', u'長'), | ||
| 2932 | (0x2FA8, 'M', u'門'), | ||
| 2933 | (0x2FA9, 'M', u'阜'), | ||
| 2934 | (0x2FAA, 'M', u'隶'), | ||
| 2935 | (0x2FAB, 'M', u'隹'), | ||
| 2936 | (0x2FAC, 'M', u'雨'), | ||
| 2937 | (0x2FAD, 'M', u'靑'), | ||
| 2938 | (0x2FAE, 'M', u'非'), | ||
| 2939 | (0x2FAF, 'M', u'面'), | ||
| 2940 | (0x2FB0, 'M', u'革'), | ||
| 2941 | (0x2FB1, 'M', u'韋'), | ||
| 2942 | (0x2FB2, 'M', u'韭'), | ||
| 2943 | (0x2FB3, 'M', u'音'), | ||
| 2944 | (0x2FB4, 'M', u'頁'), | ||
| 2945 | (0x2FB5, 'M', u'風'), | ||
| 2946 | (0x2FB6, 'M', u'飛'), | ||
| 2947 | (0x2FB7, 'M', u'食'), | ||
| 2948 | (0x2FB8, 'M', u'首'), | ||
| 2949 | (0x2FB9, 'M', u'香'), | ||
| 2950 | (0x2FBA, 'M', u'馬'), | ||
| 2951 | (0x2FBB, 'M', u'骨'), | ||
| 2952 | (0x2FBC, 'M', u'高'), | ||
| 2953 | (0x2FBD, 'M', u'髟'), | ||
| 2954 | (0x2FBE, 'M', u'鬥'), | ||
| 2955 | (0x2FBF, 'M', u'鬯'), | ||
| 2956 | (0x2FC0, 'M', u'鬲'), | ||
| 2957 | (0x2FC1, 'M', u'鬼'), | ||
| 2958 | (0x2FC2, 'M', u'魚'), | ||
| 2959 | (0x2FC3, 'M', u'鳥'), | ||
| 2960 | (0x2FC4, 'M', u'鹵'), | ||
| 2961 | (0x2FC5, 'M', u'鹿'), | ||
| 2962 | (0x2FC6, 'M', u'麥'), | ||
| 2963 | (0x2FC7, 'M', u'麻'), | ||
| 2964 | (0x2FC8, 'M', u'黃'), | ||
| 2965 | (0x2FC9, 'M', u'黍'), | ||
| 2966 | (0x2FCA, 'M', u'黑'), | ||
| 2967 | (0x2FCB, 'M', u'黹'), | ||
| 2968 | (0x2FCC, 'M', u'黽'), | ||
| 2969 | (0x2FCD, 'M', u'鼎'), | ||
| 2970 | (0x2FCE, 'M', u'鼓'), | ||
| 2971 | (0x2FCF, 'M', u'鼠'), | ||
| 2972 | (0x2FD0, 'M', u'鼻'), | ||
| 2973 | (0x2FD1, 'M', u'齊'), | ||
| 2974 | (0x2FD2, 'M', u'齒'), | ||
| 2975 | (0x2FD3, 'M', u'龍'), | ||
| 2976 | (0x2FD4, 'M', u'龜'), | ||
| 2977 | (0x2FD5, 'M', u'龠'), | ||
| 2978 | (0x2FD6, 'X'), | ||
| 2979 | (0x3000, '3', u' '), | ||
| 2980 | (0x3001, 'V'), | ||
| 2981 | (0x3002, 'M', u'.'), | ||
| 2982 | (0x3003, 'V'), | ||
| 2983 | (0x3036, 'M', u'〒'), | ||
| 2984 | (0x3037, 'V'), | ||
| 2985 | (0x3038, 'M', u'十'), | ||
| 2986 | (0x3039, 'M', u'卄'), | ||
| 2987 | (0x303A, 'M', u'卅'), | ||
| 2988 | (0x303B, 'V'), | ||
| 2989 | (0x3040, 'X'), | ||
| 2990 | (0x3041, 'V'), | ||
| 2991 | (0x3097, 'X'), | ||
| 2992 | (0x3099, 'V'), | ||
| 2993 | (0x309B, '3', u' ゙'), | ||
| 2994 | (0x309C, '3', u' ゚'), | ||
| 2995 | (0x309D, 'V'), | ||
| 2996 | (0x309F, 'M', u'より'), | ||
| 2997 | (0x30A0, 'V'), | ||
| 2998 | (0x30FF, 'M', u'コト'), | ||
| 2999 | (0x3100, 'X'), | ||
| 3000 | (0x3105, 'V'), | ||
| 3001 | (0x312E, 'X'), | ||
| 3002 | (0x3131, 'M', u'ᄀ'), | ||
| 3003 | (0x3132, 'M', u'ᄁ'), | ||
| 3004 | (0x3133, 'M', u'ᆪ'), | ||
| 3005 | (0x3134, 'M', u'ᄂ'), | ||
| 3006 | (0x3135, 'M', u'ᆬ'), | ||
| 3007 | (0x3136, 'M', u'ᆭ'), | ||
| 3008 | (0x3137, 'M', u'ᄃ'), | ||
| 3009 | (0x3138, 'M', u'ᄄ'), | ||
| 3010 | (0x3139, 'M', u'ᄅ'), | ||
| 3011 | (0x313A, 'M', u'ᆰ'), | ||
| 3012 | (0x313B, 'M', u'ᆱ'), | ||
| 3013 | (0x313C, 'M', u'ᆲ'), | ||
| 3014 | (0x313D, 'M', u'ᆳ'), | ||
| 3015 | (0x313E, 'M', u'ᆴ'), | ||
| 3016 | (0x313F, 'M', u'ᆵ'), | ||
| 3017 | (0x3140, 'M', u'ᄚ'), | ||
| 3018 | (0x3141, 'M', u'ᄆ'), | ||
| 3019 | (0x3142, 'M', u'ᄇ'), | ||
| 3020 | (0x3143, 'M', u'ᄈ'), | ||
| 3021 | (0x3144, 'M', u'ᄡ'), | ||
| 3022 | ] | ||
| 3023 | |||
| 3024 | def _seg_29(): | ||
| 3025 | return [ | ||
| 3026 | (0x3145, 'M', u'ᄉ'), | ||
| 3027 | (0x3146, 'M', u'ᄊ'), | ||
| 3028 | (0x3147, 'M', u'ᄋ'), | ||
| 3029 | (0x3148, 'M', u'ᄌ'), | ||
| 3030 | (0x3149, 'M', u'ᄍ'), | ||
| 3031 | (0x314A, 'M', u'ᄎ'), | ||
| 3032 | (0x314B, 'M', u'ᄏ'), | ||
| 3033 | (0x314C, 'M', u'ᄐ'), | ||
| 3034 | (0x314D, 'M', u'ᄑ'), | ||
| 3035 | (0x314E, 'M', u'ᄒ'), | ||
| 3036 | (0x314F, 'M', u'ᅡ'), | ||
| 3037 | (0x3150, 'M', u'ᅢ'), | ||
| 3038 | (0x3151, 'M', u'ᅣ'), | ||
| 3039 | (0x3152, 'M', u'ᅤ'), | ||
| 3040 | (0x3153, 'M', u'ᅥ'), | ||
| 3041 | (0x3154, 'M', u'ᅦ'), | ||
| 3042 | (0x3155, 'M', u'ᅧ'), | ||
| 3043 | (0x3156, 'M', u'ᅨ'), | ||
| 3044 | (0x3157, 'M', u'ᅩ'), | ||
| 3045 | (0x3158, 'M', u'ᅪ'), | ||
| 3046 | (0x3159, 'M', u'ᅫ'), | ||
| 3047 | (0x315A, 'M', u'ᅬ'), | ||
| 3048 | (0x315B, 'M', u'ᅭ'), | ||
| 3049 | (0x315C, 'M', u'ᅮ'), | ||
| 3050 | (0x315D, 'M', u'ᅯ'), | ||
| 3051 | (0x315E, 'M', u'ᅰ'), | ||
| 3052 | (0x315F, 'M', u'ᅱ'), | ||
| 3053 | (0x3160, 'M', u'ᅲ'), | ||
| 3054 | (0x3161, 'M', u'ᅳ'), | ||
| 3055 | (0x3162, 'M', u'ᅴ'), | ||
| 3056 | (0x3163, 'M', u'ᅵ'), | ||
| 3057 | (0x3164, 'X'), | ||
| 3058 | (0x3165, 'M', u'ᄔ'), | ||
| 3059 | (0x3166, 'M', u'ᄕ'), | ||
| 3060 | (0x3167, 'M', u'ᇇ'), | ||
| 3061 | (0x3168, 'M', u'ᇈ'), | ||
| 3062 | (0x3169, 'M', u'ᇌ'), | ||
| 3063 | (0x316A, 'M', u'ᇎ'), | ||
| 3064 | (0x316B, 'M', u'ᇓ'), | ||
| 3065 | (0x316C, 'M', u'ᇗ'), | ||
| 3066 | (0x316D, 'M', u'ᇙ'), | ||
| 3067 | (0x316E, 'M', u'ᄜ'), | ||
| 3068 | (0x316F, 'M', u'ᇝ'), | ||
| 3069 | (0x3170, 'M', u'ᇟ'), | ||
| 3070 | (0x3171, 'M', u'ᄝ'), | ||
| 3071 | (0x3172, 'M', u'ᄞ'), | ||
| 3072 | (0x3173, 'M', u'ᄠ'), | ||
| 3073 | (0x3174, 'M', u'ᄢ'), | ||
| 3074 | (0x3175, 'M', u'ᄣ'), | ||
| 3075 | (0x3176, 'M', u'ᄧ'), | ||
| 3076 | (0x3177, 'M', u'ᄩ'), | ||
| 3077 | (0x3178, 'M', u'ᄫ'), | ||
| 3078 | (0x3179, 'M', u'ᄬ'), | ||
| 3079 | (0x317A, 'M', u'ᄭ'), | ||
| 3080 | (0x317B, 'M', u'ᄮ'), | ||
| 3081 | (0x317C, 'M', u'ᄯ'), | ||
| 3082 | (0x317D, 'M', u'ᄲ'), | ||
| 3083 | (0x317E, 'M', u'ᄶ'), | ||
| 3084 | (0x317F, 'M', u'ᅀ'), | ||
| 3085 | (0x3180, 'M', u'ᅇ'), | ||
| 3086 | (0x3181, 'M', u'ᅌ'), | ||
| 3087 | (0x3182, 'M', u'ᇱ'), | ||
| 3088 | (0x3183, 'M', u'ᇲ'), | ||
| 3089 | (0x3184, 'M', u'ᅗ'), | ||
| 3090 | (0x3185, 'M', u'ᅘ'), | ||
| 3091 | (0x3186, 'M', u'ᅙ'), | ||
| 3092 | (0x3187, 'M', u'ᆄ'), | ||
| 3093 | (0x3188, 'M', u'ᆅ'), | ||
| 3094 | (0x3189, 'M', u'ᆈ'), | ||
| 3095 | (0x318A, 'M', u'ᆑ'), | ||
| 3096 | (0x318B, 'M', u'ᆒ'), | ||
| 3097 | (0x318C, 'M', u'ᆔ'), | ||
| 3098 | (0x318D, 'M', u'ᆞ'), | ||
| 3099 | (0x318E, 'M', u'ᆡ'), | ||
| 3100 | (0x318F, 'X'), | ||
| 3101 | (0x3190, 'V'), | ||
| 3102 | (0x3192, 'M', u'一'), | ||
| 3103 | (0x3193, 'M', u'二'), | ||
| 3104 | (0x3194, 'M', u'三'), | ||
| 3105 | (0x3195, 'M', u'四'), | ||
| 3106 | (0x3196, 'M', u'上'), | ||
| 3107 | (0x3197, 'M', u'中'), | ||
| 3108 | (0x3198, 'M', u'下'), | ||
| 3109 | (0x3199, 'M', u'甲'), | ||
| 3110 | (0x319A, 'M', u'乙'), | ||
| 3111 | (0x319B, 'M', u'丙'), | ||
| 3112 | (0x319C, 'M', u'丁'), | ||
| 3113 | (0x319D, 'M', u'天'), | ||
| 3114 | (0x319E, 'M', u'地'), | ||
| 3115 | (0x319F, 'M', u'人'), | ||
| 3116 | (0x31A0, 'V'), | ||
| 3117 | (0x31BB, 'X'), | ||
| 3118 | (0x31C0, 'V'), | ||
| 3119 | (0x31E4, 'X'), | ||
| 3120 | (0x31F0, 'V'), | ||
| 3121 | (0x3200, '3', u'(ᄀ)'), | ||
| 3122 | (0x3201, '3', u'(ᄂ)'), | ||
| 3123 | (0x3202, '3', u'(ᄃ)'), | ||
| 3124 | (0x3203, '3', u'(ᄅ)'), | ||
| 3125 | (0x3204, '3', u'(ᄆ)'), | ||
| 3126 | ] | ||
| 3127 | |||
| 3128 | def _seg_30(): | ||
| 3129 | return [ | ||
| 3130 | (0x3205, '3', u'(ᄇ)'), | ||
| 3131 | (0x3206, '3', u'(ᄉ)'), | ||
| 3132 | (0x3207, '3', u'(ᄋ)'), | ||
| 3133 | (0x3208, '3', u'(ᄌ)'), | ||
| 3134 | (0x3209, '3', u'(ᄎ)'), | ||
| 3135 | (0x320A, '3', u'(ᄏ)'), | ||
| 3136 | (0x320B, '3', u'(ᄐ)'), | ||
| 3137 | (0x320C, '3', u'(ᄑ)'), | ||
| 3138 | (0x320D, '3', u'(ᄒ)'), | ||
| 3139 | (0x320E, '3', u'(가)'), | ||
| 3140 | (0x320F, '3', u'(나)'), | ||
| 3141 | (0x3210, '3', u'(다)'), | ||
| 3142 | (0x3211, '3', u'(라)'), | ||
| 3143 | (0x3212, '3', u'(마)'), | ||
| 3144 | (0x3213, '3', u'(바)'), | ||
| 3145 | (0x3214, '3', u'(사)'), | ||
| 3146 | (0x3215, '3', u'(아)'), | ||
| 3147 | (0x3216, '3', u'(자)'), | ||
| 3148 | (0x3217, '3', u'(차)'), | ||
| 3149 | (0x3218, '3', u'(카)'), | ||
| 3150 | (0x3219, '3', u'(타)'), | ||
| 3151 | (0x321A, '3', u'(파)'), | ||
| 3152 | (0x321B, '3', u'(하)'), | ||
| 3153 | (0x321C, '3', u'(주)'), | ||
| 3154 | (0x321D, '3', u'(오전)'), | ||
| 3155 | (0x321E, '3', u'(오후)'), | ||
| 3156 | (0x321F, 'X'), | ||
| 3157 | (0x3220, '3', u'(一)'), | ||
| 3158 | (0x3221, '3', u'(二)'), | ||
| 3159 | (0x3222, '3', u'(三)'), | ||
| 3160 | (0x3223, '3', u'(四)'), | ||
| 3161 | (0x3224, '3', u'(五)'), | ||
| 3162 | (0x3225, '3', u'(六)'), | ||
| 3163 | (0x3226, '3', u'(七)'), | ||
| 3164 | (0x3227, '3', u'(八)'), | ||
| 3165 | (0x3228, '3', u'(九)'), | ||
| 3166 | (0x3229, '3', u'(十)'), | ||
| 3167 | (0x322A, '3', u'(月)'), | ||
| 3168 | (0x322B, '3', u'(火)'), | ||
| 3169 | (0x322C, '3', u'(水)'), | ||
| 3170 | (0x322D, '3', u'(木)'), | ||
| 3171 | (0x322E, '3', u'(金)'), | ||
| 3172 | (0x322F, '3', u'(土)'), | ||
| 3173 | (0x3230, '3', u'(日)'), | ||
| 3174 | (0x3231, '3', u'(株)'), | ||
| 3175 | (0x3232, '3', u'(有)'), | ||
| 3176 | (0x3233, '3', u'(社)'), | ||
| 3177 | (0x3234, '3', u'(名)'), | ||
| 3178 | (0x3235, '3', u'(特)'), | ||
| 3179 | (0x3236, '3', u'(財)'), | ||
| 3180 | (0x3237, '3', u'(祝)'), | ||
| 3181 | (0x3238, '3', u'(労)'), | ||
| 3182 | (0x3239, '3', u'(代)'), | ||
| 3183 | (0x323A, '3', u'(呼)'), | ||
| 3184 | (0x323B, '3', u'(学)'), | ||
| 3185 | (0x323C, '3', u'(監)'), | ||
| 3186 | (0x323D, '3', u'(企)'), | ||
| 3187 | (0x323E, '3', u'(資)'), | ||
| 3188 | (0x323F, '3', u'(協)'), | ||
| 3189 | (0x3240, '3', u'(祭)'), | ||
| 3190 | (0x3241, '3', u'(休)'), | ||
| 3191 | (0x3242, '3', u'(自)'), | ||
| 3192 | (0x3243, '3', u'(至)'), | ||
| 3193 | (0x3244, 'M', u'問'), | ||
| 3194 | (0x3245, 'M', u'幼'), | ||
| 3195 | (0x3246, 'M', u'文'), | ||
| 3196 | (0x3247, 'M', u'箏'), | ||
| 3197 | (0x3248, 'V'), | ||
| 3198 | (0x3250, 'M', u'pte'), | ||
| 3199 | (0x3251, 'M', u'21'), | ||
| 3200 | (0x3252, 'M', u'22'), | ||
| 3201 | (0x3253, 'M', u'23'), | ||
| 3202 | (0x3254, 'M', u'24'), | ||
| 3203 | (0x3255, 'M', u'25'), | ||
| 3204 | (0x3256, 'M', u'26'), | ||
| 3205 | (0x3257, 'M', u'27'), | ||
| 3206 | (0x3258, 'M', u'28'), | ||
| 3207 | (0x3259, 'M', u'29'), | ||
| 3208 | (0x325A, 'M', u'30'), | ||
| 3209 | (0x325B, 'M', u'31'), | ||
| 3210 | (0x325C, 'M', u'32'), | ||
| 3211 | (0x325D, 'M', u'33'), | ||
| 3212 | (0x325E, 'M', u'34'), | ||
| 3213 | (0x325F, 'M', u'35'), | ||
| 3214 | (0x3260, 'M', u'ᄀ'), | ||
| 3215 | (0x3261, 'M', u'ᄂ'), | ||
| 3216 | (0x3262, 'M', u'ᄃ'), | ||
| 3217 | (0x3263, 'M', u'ᄅ'), | ||
| 3218 | (0x3264, 'M', u'ᄆ'), | ||
| 3219 | (0x3265, 'M', u'ᄇ'), | ||
| 3220 | (0x3266, 'M', u'ᄉ'), | ||
| 3221 | (0x3267, 'M', u'ᄋ'), | ||
| 3222 | (0x3268, 'M', u'ᄌ'), | ||
| 3223 | (0x3269, 'M', u'ᄎ'), | ||
| 3224 | (0x326A, 'M', u'ᄏ'), | ||
| 3225 | (0x326B, 'M', u'ᄐ'), | ||
| 3226 | (0x326C, 'M', u'ᄑ'), | ||
| 3227 | (0x326D, 'M', u'ᄒ'), | ||
| 3228 | (0x326E, 'M', u'가'), | ||
| 3229 | (0x326F, 'M', u'나'), | ||
| 3230 | ] | ||
| 3231 | |||
| 3232 | def _seg_31(): | ||
| 3233 | return [ | ||
| 3234 | (0x3270, 'M', u'다'), | ||
| 3235 | (0x3271, 'M', u'라'), | ||
| 3236 | (0x3272, 'M', u'마'), | ||
| 3237 | (0x3273, 'M', u'바'), | ||
| 3238 | (0x3274, 'M', u'사'), | ||
| 3239 | (0x3275, 'M', u'아'), | ||
| 3240 | (0x3276, 'M', u'자'), | ||
| 3241 | (0x3277, 'M', u'차'), | ||
| 3242 | (0x3278, 'M', u'카'), | ||
| 3243 | (0x3279, 'M', u'타'), | ||
| 3244 | (0x327A, 'M', u'파'), | ||
| 3245 | (0x327B, 'M', u'하'), | ||
| 3246 | (0x327C, 'M', u'참고'), | ||
| 3247 | (0x327D, 'M', u'주의'), | ||
| 3248 | (0x327E, 'M', u'우'), | ||
| 3249 | (0x327F, 'V'), | ||
| 3250 | (0x3280, 'M', u'一'), | ||
| 3251 | (0x3281, 'M', u'二'), | ||
| 3252 | (0x3282, 'M', u'三'), | ||
| 3253 | (0x3283, 'M', u'四'), | ||
| 3254 | (0x3284, 'M', u'五'), | ||
| 3255 | (0x3285, 'M', u'六'), | ||
| 3256 | (0x3286, 'M', u'七'), | ||
| 3257 | (0x3287, 'M', u'八'), | ||
| 3258 | (0x3288, 'M', u'九'), | ||
| 3259 | (0x3289, 'M', u'十'), | ||
| 3260 | (0x328A, 'M', u'月'), | ||
| 3261 | (0x328B, 'M', u'火'), | ||
| 3262 | (0x328C, 'M', u'水'), | ||
| 3263 | (0x328D, 'M', u'木'), | ||
| 3264 | (0x328E, 'M', u'金'), | ||
| 3265 | (0x328F, 'M', u'土'), | ||
| 3266 | (0x3290, 'M', u'日'), | ||
| 3267 | (0x3291, 'M', u'株'), | ||
| 3268 | (0x3292, 'M', u'有'), | ||
| 3269 | (0x3293, 'M', u'社'), | ||
| 3270 | (0x3294, 'M', u'名'), | ||
| 3271 | (0x3295, 'M', u'特'), | ||
| 3272 | (0x3296, 'M', u'財'), | ||
| 3273 | (0x3297, 'M', u'祝'), | ||
| 3274 | (0x3298, 'M', u'労'), | ||
| 3275 | (0x3299, 'M', u'秘'), | ||
| 3276 | (0x329A, 'M', u'男'), | ||
| 3277 | (0x329B, 'M', u'女'), | ||
| 3278 | (0x329C, 'M', u'適'), | ||
| 3279 | (0x329D, 'M', u'優'), | ||
| 3280 | (0x329E, 'M', u'印'), | ||
| 3281 | (0x329F, 'M', u'注'), | ||
| 3282 | (0x32A0, 'M', u'項'), | ||
| 3283 | (0x32A1, 'M', u'休'), | ||
| 3284 | (0x32A2, 'M', u'写'), | ||
| 3285 | (0x32A3, 'M', u'正'), | ||
| 3286 | (0x32A4, 'M', u'上'), | ||
| 3287 | (0x32A5, 'M', u'中'), | ||
| 3288 | (0x32A6, 'M', u'下'), | ||
| 3289 | (0x32A7, 'M', u'左'), | ||
| 3290 | (0x32A8, 'M', u'右'), | ||
| 3291 | (0x32A9, 'M', u'医'), | ||
| 3292 | (0x32AA, 'M', u'宗'), | ||
| 3293 | (0x32AB, 'M', u'学'), | ||
| 3294 | (0x32AC, 'M', u'監'), | ||
| 3295 | (0x32AD, 'M', u'企'), | ||
| 3296 | (0x32AE, 'M', u'資'), | ||
| 3297 | (0x32AF, 'M', u'協'), | ||
| 3298 | (0x32B0, 'M', u'夜'), | ||
| 3299 | (0x32B1, 'M', u'36'), | ||
| 3300 | (0x32B2, 'M', u'37'), | ||
| 3301 | (0x32B3, 'M', u'38'), | ||
| 3302 | (0x32B4, 'M', u'39'), | ||
| 3303 | (0x32B5, 'M', u'40'), | ||
| 3304 | (0x32B6, 'M', u'41'), | ||
| 3305 | (0x32B7, 'M', u'42'), | ||
| 3306 | (0x32B8, 'M', u'43'), | ||
| 3307 | (0x32B9, 'M', u'44'), | ||
| 3308 | (0x32BA, 'M', u'45'), | ||
| 3309 | (0x32BB, 'M', u'46'), | ||
| 3310 | (0x32BC, 'M', u'47'), | ||
| 3311 | (0x32BD, 'M', u'48'), | ||
| 3312 | (0x32BE, 'M', u'49'), | ||
| 3313 | (0x32BF, 'M', u'50'), | ||
| 3314 | (0x32C0, 'M', u'1月'), | ||
| 3315 | (0x32C1, 'M', u'2月'), | ||
| 3316 | (0x32C2, 'M', u'3月'), | ||
| 3317 | (0x32C3, 'M', u'4月'), | ||
| 3318 | (0x32C4, 'M', u'5月'), | ||
| 3319 | (0x32C5, 'M', u'6月'), | ||
| 3320 | (0x32C6, 'M', u'7月'), | ||
| 3321 | (0x32C7, 'M', u'8月'), | ||
| 3322 | (0x32C8, 'M', u'9月'), | ||
| 3323 | (0x32C9, 'M', u'10月'), | ||
| 3324 | (0x32CA, 'M', u'11月'), | ||
| 3325 | (0x32CB, 'M', u'12月'), | ||
| 3326 | (0x32CC, 'M', u'hg'), | ||
| 3327 | (0x32CD, 'M', u'erg'), | ||
| 3328 | (0x32CE, 'M', u'ev'), | ||
| 3329 | (0x32CF, 'M', u'ltd'), | ||
| 3330 | (0x32D0, 'M', u'ア'), | ||
| 3331 | (0x32D1, 'M', u'イ'), | ||
| 3332 | (0x32D2, 'M', u'ウ'), | ||
| 3333 | (0x32D3, 'M', u'エ'), | ||
| 3334 | ] | ||
| 3335 | |||
| 3336 | def _seg_32(): | ||
| 3337 | return [ | ||
| 3338 | (0x32D4, 'M', u'オ'), | ||
| 3339 | (0x32D5, 'M', u'カ'), | ||
| 3340 | (0x32D6, 'M', u'キ'), | ||
| 3341 | (0x32D7, 'M', u'ク'), | ||
| 3342 | (0x32D8, 'M', u'ケ'), | ||
| 3343 | (0x32D9, 'M', u'コ'), | ||
| 3344 | (0x32DA, 'M', u'サ'), | ||
| 3345 | (0x32DB, 'M', u'シ'), | ||
| 3346 | (0x32DC, 'M', u'ス'), | ||
| 3347 | (0x32DD, 'M', u'セ'), | ||
| 3348 | (0x32DE, 'M', u'ソ'), | ||
| 3349 | (0x32DF, 'M', u'タ'), | ||
| 3350 | (0x32E0, 'M', u'チ'), | ||
| 3351 | (0x32E1, 'M', u'ツ'), | ||
| 3352 | (0x32E2, 'M', u'テ'), | ||
| 3353 | (0x32E3, 'M', u'ト'), | ||
| 3354 | (0x32E4, 'M', u'ナ'), | ||
| 3355 | (0x32E5, 'M', u'ニ'), | ||
| 3356 | (0x32E6, 'M', u'ヌ'), | ||
| 3357 | (0x32E7, 'M', u'ネ'), | ||
| 3358 | (0x32E8, 'M', u'ノ'), | ||
| 3359 | (0x32E9, 'M', u'ハ'), | ||
| 3360 | (0x32EA, 'M', u'ヒ'), | ||
| 3361 | (0x32EB, 'M', u'フ'), | ||
| 3362 | (0x32EC, 'M', u'ヘ'), | ||
| 3363 | (0x32ED, 'M', u'ホ'), | ||
| 3364 | (0x32EE, 'M', u'マ'), | ||
| 3365 | (0x32EF, 'M', u'ミ'), | ||
| 3366 | (0x32F0, 'M', u'ム'), | ||
| 3367 | (0x32F1, 'M', u'メ'), | ||
| 3368 | (0x32F2, 'M', u'モ'), | ||
| 3369 | (0x32F3, 'M', u'ヤ'), | ||
| 3370 | (0x32F4, 'M', u'ユ'), | ||
| 3371 | (0x32F5, 'M', u'ヨ'), | ||
| 3372 | (0x32F6, 'M', u'ラ'), | ||
| 3373 | (0x32F7, 'M', u'リ'), | ||
| 3374 | (0x32F8, 'M', u'ル'), | ||
| 3375 | (0x32F9, 'M', u'レ'), | ||
| 3376 | (0x32FA, 'M', u'ロ'), | ||
| 3377 | (0x32FB, 'M', u'ワ'), | ||
| 3378 | (0x32FC, 'M', u'ヰ'), | ||
| 3379 | (0x32FD, 'M', u'ヱ'), | ||
| 3380 | (0x32FE, 'M', u'ヲ'), | ||
| 3381 | (0x32FF, 'X'), | ||
| 3382 | (0x3300, 'M', u'アパート'), | ||
| 3383 | (0x3301, 'M', u'アルファ'), | ||
| 3384 | (0x3302, 'M', u'アンペア'), | ||
| 3385 | (0x3303, 'M', u'アール'), | ||
| 3386 | (0x3304, 'M', u'イニング'), | ||
| 3387 | (0x3305, 'M', u'インチ'), | ||
| 3388 | (0x3306, 'M', u'ウォン'), | ||
| 3389 | (0x3307, 'M', u'エスクード'), | ||
| 3390 | (0x3308, 'M', u'エーカー'), | ||
| 3391 | (0x3309, 'M', u'オンス'), | ||
| 3392 | (0x330A, 'M', u'オーム'), | ||
| 3393 | (0x330B, 'M', u'カイリ'), | ||
| 3394 | (0x330C, 'M', u'カラット'), | ||
| 3395 | (0x330D, 'M', u'カロリー'), | ||
| 3396 | (0x330E, 'M', u'ガロン'), | ||
| 3397 | (0x330F, 'M', u'ガンマ'), | ||
| 3398 | (0x3310, 'M', u'ギガ'), | ||
| 3399 | (0x3311, 'M', u'ギニー'), | ||
| 3400 | (0x3312, 'M', u'キュリー'), | ||
| 3401 | (0x3313, 'M', u'ギルダー'), | ||
| 3402 | (0x3314, 'M', u'キロ'), | ||
| 3403 | (0x3315, 'M', u'キログラム'), | ||
| 3404 | (0x3316, 'M', u'キロメートル'), | ||
| 3405 | (0x3317, 'M', u'キロワット'), | ||
| 3406 | (0x3318, 'M', u'グラム'), | ||
| 3407 | (0x3319, 'M', u'グラムトン'), | ||
| 3408 | (0x331A, 'M', u'クルゼイロ'), | ||
| 3409 | (0x331B, 'M', u'クローネ'), | ||
| 3410 | (0x331C, 'M', u'ケース'), | ||
| 3411 | (0x331D, 'M', u'コルナ'), | ||
| 3412 | (0x331E, 'M', u'コーポ'), | ||
| 3413 | (0x331F, 'M', u'サイクル'), | ||
| 3414 | (0x3320, 'M', u'サンチーム'), | ||
| 3415 | (0x3321, 'M', u'シリング'), | ||
| 3416 | (0x3322, 'M', u'センチ'), | ||
| 3417 | (0x3323, 'M', u'セント'), | ||
| 3418 | (0x3324, 'M', u'ダース'), | ||
| 3419 | (0x3325, 'M', u'デシ'), | ||
| 3420 | (0x3326, 'M', u'ドル'), | ||
| 3421 | (0x3327, 'M', u'トン'), | ||
| 3422 | (0x3328, 'M', u'ナノ'), | ||
| 3423 | (0x3329, 'M', u'ノット'), | ||
| 3424 | (0x332A, 'M', u'ハイツ'), | ||
| 3425 | (0x332B, 'M', u'パーセント'), | ||
| 3426 | (0x332C, 'M', u'パーツ'), | ||
| 3427 | (0x332D, 'M', u'バーレル'), | ||
| 3428 | (0x332E, 'M', u'ピアストル'), | ||
| 3429 | (0x332F, 'M', u'ピクル'), | ||
| 3430 | (0x3330, 'M', u'ピコ'), | ||
| 3431 | (0x3331, 'M', u'ビル'), | ||
| 3432 | (0x3332, 'M', u'ファラッド'), | ||
| 3433 | (0x3333, 'M', u'フィート'), | ||
| 3434 | (0x3334, 'M', u'ブッシェル'), | ||
| 3435 | (0x3335, 'M', u'フラン'), | ||
| 3436 | (0x3336, 'M', u'ヘクタール'), | ||
| 3437 | (0x3337, 'M', u'ペソ'), | ||
| 3438 | ] | ||
| 3439 | |||
| 3440 | def _seg_33(): | ||
| 3441 | return [ | ||
| 3442 | (0x3338, 'M', u'ペニヒ'), | ||
| 3443 | (0x3339, 'M', u'ヘルツ'), | ||
| 3444 | (0x333A, 'M', u'ペンス'), | ||
| 3445 | (0x333B, 'M', u'ページ'), | ||
| 3446 | (0x333C, 'M', u'ベータ'), | ||
| 3447 | (0x333D, 'M', u'ポイント'), | ||
| 3448 | (0x333E, 'M', u'ボルト'), | ||
| 3449 | (0x333F, 'M', u'ホン'), | ||
| 3450 | (0x3340, 'M', u'ポンド'), | ||
| 3451 | (0x3341, 'M', u'ホール'), | ||
| 3452 | (0x3342, 'M', u'ホーン'), | ||
| 3453 | (0x3343, 'M', u'マイクロ'), | ||
| 3454 | (0x3344, 'M', u'マイル'), | ||
| 3455 | (0x3345, 'M', u'マッハ'), | ||
| 3456 | (0x3346, 'M', u'マルク'), | ||
| 3457 | (0x3347, 'M', u'マンション'), | ||
| 3458 | (0x3348, 'M', u'ミクロン'), | ||
| 3459 | (0x3349, 'M', u'ミリ'), | ||
| 3460 | (0x334A, 'M', u'ミリバール'), | ||
| 3461 | (0x334B, 'M', u'メガ'), | ||
| 3462 | (0x334C, 'M', u'メガトン'), | ||
| 3463 | (0x334D, 'M', u'メートル'), | ||
| 3464 | (0x334E, 'M', u'ヤード'), | ||
| 3465 | (0x334F, 'M', u'ヤール'), | ||
| 3466 | (0x3350, 'M', u'ユアン'), | ||
| 3467 | (0x3351, 'M', u'リットル'), | ||
| 3468 | (0x3352, 'M', u'リラ'), | ||
| 3469 | (0x3353, 'M', u'ルピー'), | ||
| 3470 | (0x3354, 'M', u'ルーブル'), | ||
| 3471 | (0x3355, 'M', u'レム'), | ||
| 3472 | (0x3356, 'M', u'レントゲン'), | ||
| 3473 | (0x3357, 'M', u'ワット'), | ||
| 3474 | (0x3358, 'M', u'0点'), | ||
| 3475 | (0x3359, 'M', u'1点'), | ||
| 3476 | (0x335A, 'M', u'2点'), | ||
| 3477 | (0x335B, 'M', u'3点'), | ||
| 3478 | (0x335C, 'M', u'4点'), | ||
| 3479 | (0x335D, 'M', u'5点'), | ||
| 3480 | (0x335E, 'M', u'6点'), | ||
| 3481 | (0x335F, 'M', u'7点'), | ||
| 3482 | (0x3360, 'M', u'8点'), | ||
| 3483 | (0x3361, 'M', u'9点'), | ||
| 3484 | (0x3362, 'M', u'10点'), | ||
| 3485 | (0x3363, 'M', u'11点'), | ||
| 3486 | (0x3364, 'M', u'12点'), | ||
| 3487 | (0x3365, 'M', u'13点'), | ||
| 3488 | (0x3366, 'M', u'14点'), | ||
| 3489 | (0x3367, 'M', u'15点'), | ||
| 3490 | (0x3368, 'M', u'16点'), | ||
| 3491 | (0x3369, 'M', u'17点'), | ||
| 3492 | (0x336A, 'M', u'18点'), | ||
| 3493 | (0x336B, 'M', u'19点'), | ||
| 3494 | (0x336C, 'M', u'20点'), | ||
| 3495 | (0x336D, 'M', u'21点'), | ||
| 3496 | (0x336E, 'M', u'22点'), | ||
| 3497 | (0x336F, 'M', u'23点'), | ||
| 3498 | (0x3370, 'M', u'24点'), | ||
| 3499 | (0x3371, 'M', u'hpa'), | ||
| 3500 | (0x3372, 'M', u'da'), | ||
| 3501 | (0x3373, 'M', u'au'), | ||
| 3502 | (0x3374, 'M', u'bar'), | ||
| 3503 | (0x3375, 'M', u'ov'), | ||
| 3504 | (0x3376, 'M', u'pc'), | ||
| 3505 | (0x3377, 'M', u'dm'), | ||
| 3506 | (0x3378, 'M', u'dm2'), | ||
| 3507 | (0x3379, 'M', u'dm3'), | ||
| 3508 | (0x337A, 'M', u'iu'), | ||
| 3509 | (0x337B, 'M', u'平成'), | ||
| 3510 | (0x337C, 'M', u'昭和'), | ||
| 3511 | (0x337D, 'M', u'大正'), | ||
| 3512 | (0x337E, 'M', u'明治'), | ||
| 3513 | (0x337F, 'M', u'株式会社'), | ||
| 3514 | (0x3380, 'M', u'pa'), | ||
| 3515 | (0x3381, 'M', u'na'), | ||
| 3516 | (0x3382, 'M', u'μa'), | ||
| 3517 | (0x3383, 'M', u'ma'), | ||
| 3518 | (0x3384, 'M', u'ka'), | ||
| 3519 | (0x3385, 'M', u'kb'), | ||
| 3520 | (0x3386, 'M', u'mb'), | ||
| 3521 | (0x3387, 'M', u'gb'), | ||
| 3522 | (0x3388, 'M', u'cal'), | ||
| 3523 | (0x3389, 'M', u'kcal'), | ||
| 3524 | (0x338A, 'M', u'pf'), | ||
| 3525 | (0x338B, 'M', u'nf'), | ||
| 3526 | (0x338C, 'M', u'μf'), | ||
| 3527 | (0x338D, 'M', u'μg'), | ||
| 3528 | (0x338E, 'M', u'mg'), | ||
| 3529 | (0x338F, 'M', u'kg'), | ||
| 3530 | (0x3390, 'M', u'hz'), | ||
| 3531 | (0x3391, 'M', u'khz'), | ||
| 3532 | (0x3392, 'M', u'mhz'), | ||
| 3533 | (0x3393, 'M', u'ghz'), | ||
| 3534 | (0x3394, 'M', u'thz'), | ||
| 3535 | (0x3395, 'M', u'μl'), | ||
| 3536 | (0x3396, 'M', u'ml'), | ||
| 3537 | (0x3397, 'M', u'dl'), | ||
| 3538 | (0x3398, 'M', u'kl'), | ||
| 3539 | (0x3399, 'M', u'fm'), | ||
| 3540 | (0x339A, 'M', u'nm'), | ||
| 3541 | (0x339B, 'M', u'μm'), | ||
| 3542 | ] | ||
| 3543 | |||
| 3544 | def _seg_34(): | ||
| 3545 | return [ | ||
| 3546 | (0x339C, 'M', u'mm'), | ||
| 3547 | (0x339D, 'M', u'cm'), | ||
| 3548 | (0x339E, 'M', u'km'), | ||
| 3549 | (0x339F, 'M', u'mm2'), | ||
| 3550 | (0x33A0, 'M', u'cm2'), | ||
| 3551 | (0x33A1, 'M', u'm2'), | ||
| 3552 | (0x33A2, 'M', u'km2'), | ||
| 3553 | (0x33A3, 'M', u'mm3'), | ||
| 3554 | (0x33A4, 'M', u'cm3'), | ||
| 3555 | (0x33A5, 'M', u'm3'), | ||
| 3556 | (0x33A6, 'M', u'km3'), | ||
| 3557 | (0x33A7, 'M', u'm∕s'), | ||
| 3558 | (0x33A8, 'M', u'm∕s2'), | ||
| 3559 | (0x33A9, 'M', u'pa'), | ||
| 3560 | (0x33AA, 'M', u'kpa'), | ||
| 3561 | (0x33AB, 'M', u'mpa'), | ||
| 3562 | (0x33AC, 'M', u'gpa'), | ||
| 3563 | (0x33AD, 'M', u'rad'), | ||
| 3564 | (0x33AE, 'M', u'rad∕s'), | ||
| 3565 | (0x33AF, 'M', u'rad∕s2'), | ||
| 3566 | (0x33B0, 'M', u'ps'), | ||
| 3567 | (0x33B1, 'M', u'ns'), | ||
| 3568 | (0x33B2, 'M', u'μs'), | ||
| 3569 | (0x33B3, 'M', u'ms'), | ||
| 3570 | (0x33B4, 'M', u'pv'), | ||
| 3571 | (0x33B5, 'M', u'nv'), | ||
| 3572 | (0x33B6, 'M', u'μv'), | ||
| 3573 | (0x33B7, 'M', u'mv'), | ||
| 3574 | (0x33B8, 'M', u'kv'), | ||
| 3575 | (0x33B9, 'M', u'mv'), | ||
| 3576 | (0x33BA, 'M', u'pw'), | ||
| 3577 | (0x33BB, 'M', u'nw'), | ||
| 3578 | (0x33BC, 'M', u'μw'), | ||
| 3579 | (0x33BD, 'M', u'mw'), | ||
| 3580 | (0x33BE, 'M', u'kw'), | ||
| 3581 | (0x33BF, 'M', u'mw'), | ||
| 3582 | (0x33C0, 'M', u'kω'), | ||
| 3583 | (0x33C1, 'M', u'mω'), | ||
| 3584 | (0x33C2, 'X'), | ||
| 3585 | (0x33C3, 'M', u'bq'), | ||
| 3586 | (0x33C4, 'M', u'cc'), | ||
| 3587 | (0x33C5, 'M', u'cd'), | ||
| 3588 | (0x33C6, 'M', u'c∕kg'), | ||
| 3589 | (0x33C7, 'X'), | ||
| 3590 | (0x33C8, 'M', u'db'), | ||
| 3591 | (0x33C9, 'M', u'gy'), | ||
| 3592 | (0x33CA, 'M', u'ha'), | ||
| 3593 | (0x33CB, 'M', u'hp'), | ||
| 3594 | (0x33CC, 'M', u'in'), | ||
| 3595 | (0x33CD, 'M', u'kk'), | ||
| 3596 | (0x33CE, 'M', u'km'), | ||
| 3597 | (0x33CF, 'M', u'kt'), | ||
| 3598 | (0x33D0, 'M', u'lm'), | ||
| 3599 | (0x33D1, 'M', u'ln'), | ||
| 3600 | (0x33D2, 'M', u'log'), | ||
| 3601 | (0x33D3, 'M', u'lx'), | ||
| 3602 | (0x33D4, 'M', u'mb'), | ||
| 3603 | (0x33D5, 'M', u'mil'), | ||
| 3604 | (0x33D6, 'M', u'mol'), | ||
| 3605 | (0x33D7, 'M', u'ph'), | ||
| 3606 | (0x33D8, 'X'), | ||
| 3607 | (0x33D9, 'M', u'ppm'), | ||
| 3608 | (0x33DA, 'M', u'pr'), | ||
| 3609 | (0x33DB, 'M', u'sr'), | ||
| 3610 | (0x33DC, 'M', u'sv'), | ||
| 3611 | (0x33DD, 'M', u'wb'), | ||
| 3612 | (0x33DE, 'M', u'v∕m'), | ||
| 3613 | (0x33DF, 'M', u'a∕m'), | ||
| 3614 | (0x33E0, 'M', u'1日'), | ||
| 3615 | (0x33E1, 'M', u'2日'), | ||
| 3616 | (0x33E2, 'M', u'3日'), | ||
| 3617 | (0x33E3, 'M', u'4日'), | ||
| 3618 | (0x33E4, 'M', u'5日'), | ||
| 3619 | (0x33E5, 'M', u'6日'), | ||
| 3620 | (0x33E6, 'M', u'7日'), | ||
| 3621 | (0x33E7, 'M', u'8日'), | ||
| 3622 | (0x33E8, 'M', u'9日'), | ||
| 3623 | (0x33E9, 'M', u'10日'), | ||
| 3624 | (0x33EA, 'M', u'11日'), | ||
| 3625 | (0x33EB, 'M', u'12日'), | ||
| 3626 | (0x33EC, 'M', u'13日'), | ||
| 3627 | (0x33ED, 'M', u'14日'), | ||
| 3628 | (0x33EE, 'M', u'15日'), | ||
| 3629 | (0x33EF, 'M', u'16日'), | ||
| 3630 | (0x33F0, 'M', u'17日'), | ||
| 3631 | (0x33F1, 'M', u'18日'), | ||
| 3632 | (0x33F2, 'M', u'19日'), | ||
| 3633 | (0x33F3, 'M', u'20日'), | ||
| 3634 | (0x33F4, 'M', u'21日'), | ||
| 3635 | (0x33F5, 'M', u'22日'), | ||
| 3636 | (0x33F6, 'M', u'23日'), | ||
| 3637 | (0x33F7, 'M', u'24日'), | ||
| 3638 | (0x33F8, 'M', u'25日'), | ||
| 3639 | (0x33F9, 'M', u'26日'), | ||
| 3640 | (0x33FA, 'M', u'27日'), | ||
| 3641 | (0x33FB, 'M', u'28日'), | ||
| 3642 | (0x33FC, 'M', u'29日'), | ||
| 3643 | (0x33FD, 'M', u'30日'), | ||
| 3644 | (0x33FE, 'M', u'31日'), | ||
| 3645 | (0x33FF, 'M', u'gal'), | ||
| 3646 | ] | ||
| 3647 | |||
| 3648 | def _seg_35(): | ||
| 3649 | return [ | ||
| 3650 | (0x3400, 'V'), | ||
| 3651 | (0x4DB6, 'X'), | ||
| 3652 | (0x4DC0, 'V'), | ||
| 3653 | (0x9FCD, 'X'), | ||
| 3654 | (0xA000, 'V'), | ||
| 3655 | (0xA48D, 'X'), | ||
| 3656 | (0xA490, 'V'), | ||
| 3657 | (0xA4C7, 'X'), | ||
| 3658 | (0xA4D0, 'V'), | ||
| 3659 | (0xA62C, 'X'), | ||
| 3660 | (0xA640, 'M', u'ꙁ'), | ||
| 3661 | (0xA641, 'V'), | ||
| 3662 | (0xA642, 'M', u'ꙃ'), | ||
| 3663 | (0xA643, 'V'), | ||
| 3664 | (0xA644, 'M', u'ꙅ'), | ||
| 3665 | (0xA645, 'V'), | ||
| 3666 | (0xA646, 'M', u'ꙇ'), | ||
| 3667 | (0xA647, 'V'), | ||
| 3668 | (0xA648, 'M', u'ꙉ'), | ||
| 3669 | (0xA649, 'V'), | ||
| 3670 | (0xA64A, 'M', u'ꙋ'), | ||
| 3671 | (0xA64B, 'V'), | ||
| 3672 | (0xA64C, 'M', u'ꙍ'), | ||
| 3673 | (0xA64D, 'V'), | ||
| 3674 | (0xA64E, 'M', u'ꙏ'), | ||
| 3675 | (0xA64F, 'V'), | ||
| 3676 | (0xA650, 'M', u'ꙑ'), | ||
| 3677 | (0xA651, 'V'), | ||
| 3678 | (0xA652, 'M', u'ꙓ'), | ||
| 3679 | (0xA653, 'V'), | ||
| 3680 | (0xA654, 'M', u'ꙕ'), | ||
| 3681 | (0xA655, 'V'), | ||
| 3682 | (0xA656, 'M', u'ꙗ'), | ||
| 3683 | (0xA657, 'V'), | ||
| 3684 | (0xA658, 'M', u'ꙙ'), | ||
| 3685 | (0xA659, 'V'), | ||
| 3686 | (0xA65A, 'M', u'ꙛ'), | ||
| 3687 | (0xA65B, 'V'), | ||
| 3688 | (0xA65C, 'M', u'ꙝ'), | ||
| 3689 | (0xA65D, 'V'), | ||
| 3690 | (0xA65E, 'M', u'ꙟ'), | ||
| 3691 | (0xA65F, 'V'), | ||
| 3692 | (0xA660, 'M', u'ꙡ'), | ||
| 3693 | (0xA661, 'V'), | ||
| 3694 | (0xA662, 'M', u'ꙣ'), | ||
| 3695 | (0xA663, 'V'), | ||
| 3696 | (0xA664, 'M', u'ꙥ'), | ||
| 3697 | (0xA665, 'V'), | ||
| 3698 | (0xA666, 'M', u'ꙧ'), | ||
| 3699 | (0xA667, 'V'), | ||
| 3700 | (0xA668, 'M', u'ꙩ'), | ||
| 3701 | (0xA669, 'V'), | ||
| 3702 | (0xA66A, 'M', u'ꙫ'), | ||
| 3703 | (0xA66B, 'V'), | ||
| 3704 | (0xA66C, 'M', u'ꙭ'), | ||
| 3705 | (0xA66D, 'V'), | ||
| 3706 | (0xA680, 'M', u'ꚁ'), | ||
| 3707 | (0xA681, 'V'), | ||
| 3708 | (0xA682, 'M', u'ꚃ'), | ||
| 3709 | (0xA683, 'V'), | ||
| 3710 | (0xA684, 'M', u'ꚅ'), | ||
| 3711 | (0xA685, 'V'), | ||
| 3712 | (0xA686, 'M', u'ꚇ'), | ||
| 3713 | (0xA687, 'V'), | ||
| 3714 | (0xA688, 'M', u'ꚉ'), | ||
| 3715 | (0xA689, 'V'), | ||
| 3716 | (0xA68A, 'M', u'ꚋ'), | ||
| 3717 | (0xA68B, 'V'), | ||
| 3718 | (0xA68C, 'M', u'ꚍ'), | ||
| 3719 | (0xA68D, 'V'), | ||
| 3720 | (0xA68E, 'M', u'ꚏ'), | ||
| 3721 | (0xA68F, 'V'), | ||
| 3722 | (0xA690, 'M', u'ꚑ'), | ||
| 3723 | (0xA691, 'V'), | ||
| 3724 | (0xA692, 'M', u'ꚓ'), | ||
| 3725 | (0xA693, 'V'), | ||
| 3726 | (0xA694, 'M', u'ꚕ'), | ||
| 3727 | (0xA695, 'V'), | ||
| 3728 | (0xA696, 'M', u'ꚗ'), | ||
| 3729 | (0xA697, 'V'), | ||
| 3730 | (0xA698, 'X'), | ||
| 3731 | (0xA69F, 'V'), | ||
| 3732 | (0xA6F8, 'X'), | ||
| 3733 | (0xA700, 'V'), | ||
| 3734 | (0xA722, 'M', u'ꜣ'), | ||
| 3735 | (0xA723, 'V'), | ||
| 3736 | (0xA724, 'M', u'ꜥ'), | ||
| 3737 | (0xA725, 'V'), | ||
| 3738 | (0xA726, 'M', u'ꜧ'), | ||
| 3739 | (0xA727, 'V'), | ||
| 3740 | (0xA728, 'M', u'ꜩ'), | ||
| 3741 | (0xA729, 'V'), | ||
| 3742 | (0xA72A, 'M', u'ꜫ'), | ||
| 3743 | (0xA72B, 'V'), | ||
| 3744 | (0xA72C, 'M', u'ꜭ'), | ||
| 3745 | (0xA72D, 'V'), | ||
| 3746 | (0xA72E, 'M', u'ꜯ'), | ||
| 3747 | (0xA72F, 'V'), | ||
| 3748 | (0xA732, 'M', u'ꜳ'), | ||
| 3749 | (0xA733, 'V'), | ||
| 3750 | ] | ||
| 3751 | |||
| 3752 | def _seg_36(): | ||
| 3753 | return [ | ||
| 3754 | (0xA734, 'M', u'ꜵ'), | ||
| 3755 | (0xA735, 'V'), | ||
| 3756 | (0xA736, 'M', u'ꜷ'), | ||
| 3757 | (0xA737, 'V'), | ||
| 3758 | (0xA738, 'M', u'ꜹ'), | ||
| 3759 | (0xA739, 'V'), | ||
| 3760 | (0xA73A, 'M', u'ꜻ'), | ||
| 3761 | (0xA73B, 'V'), | ||
| 3762 | (0xA73C, 'M', u'ꜽ'), | ||
| 3763 | (0xA73D, 'V'), | ||
| 3764 | (0xA73E, 'M', u'ꜿ'), | ||
| 3765 | (0xA73F, 'V'), | ||
| 3766 | (0xA740, 'M', u'ꝁ'), | ||
| 3767 | (0xA741, 'V'), | ||
| 3768 | (0xA742, 'M', u'ꝃ'), | ||
| 3769 | (0xA743, 'V'), | ||
| 3770 | (0xA744, 'M', u'ꝅ'), | ||
| 3771 | (0xA745, 'V'), | ||
| 3772 | (0xA746, 'M', u'ꝇ'), | ||
| 3773 | (0xA747, 'V'), | ||
| 3774 | (0xA748, 'M', u'ꝉ'), | ||
| 3775 | (0xA749, 'V'), | ||
| 3776 | (0xA74A, 'M', u'ꝋ'), | ||
| 3777 | (0xA74B, 'V'), | ||
| 3778 | (0xA74C, 'M', u'ꝍ'), | ||
| 3779 | (0xA74D, 'V'), | ||
| 3780 | (0xA74E, 'M', u'ꝏ'), | ||
| 3781 | (0xA74F, 'V'), | ||
| 3782 | (0xA750, 'M', u'ꝑ'), | ||
| 3783 | (0xA751, 'V'), | ||
| 3784 | (0xA752, 'M', u'ꝓ'), | ||
| 3785 | (0xA753, 'V'), | ||
| 3786 | (0xA754, 'M', u'ꝕ'), | ||
| 3787 | (0xA755, 'V'), | ||
| 3788 | (0xA756, 'M', u'ꝗ'), | ||
| 3789 | (0xA757, 'V'), | ||
| 3790 | (0xA758, 'M', u'ꝙ'), | ||
| 3791 | (0xA759, 'V'), | ||
| 3792 | (0xA75A, 'M', u'ꝛ'), | ||
| 3793 | (0xA75B, 'V'), | ||
| 3794 | (0xA75C, 'M', u'ꝝ'), | ||
| 3795 | (0xA75D, 'V'), | ||
| 3796 | (0xA75E, 'M', u'ꝟ'), | ||
| 3797 | (0xA75F, 'V'), | ||
| 3798 | (0xA760, 'M', u'ꝡ'), | ||
| 3799 | (0xA761, 'V'), | ||
| 3800 | (0xA762, 'M', u'ꝣ'), | ||
| 3801 | (0xA763, 'V'), | ||
| 3802 | (0xA764, 'M', u'ꝥ'), | ||
| 3803 | (0xA765, 'V'), | ||
| 3804 | (0xA766, 'M', u'ꝧ'), | ||
| 3805 | (0xA767, 'V'), | ||
| 3806 | (0xA768, 'M', u'ꝩ'), | ||
| 3807 | (0xA769, 'V'), | ||
| 3808 | (0xA76A, 'M', u'ꝫ'), | ||
| 3809 | (0xA76B, 'V'), | ||
| 3810 | (0xA76C, 'M', u'ꝭ'), | ||
| 3811 | (0xA76D, 'V'), | ||
| 3812 | (0xA76E, 'M', u'ꝯ'), | ||
| 3813 | (0xA76F, 'V'), | ||
| 3814 | (0xA770, 'M', u'ꝯ'), | ||
| 3815 | (0xA771, 'V'), | ||
| 3816 | (0xA779, 'M', u'ꝺ'), | ||
| 3817 | (0xA77A, 'V'), | ||
| 3818 | (0xA77B, 'M', u'ꝼ'), | ||
| 3819 | (0xA77C, 'V'), | ||
| 3820 | (0xA77D, 'M', u'ᵹ'), | ||
| 3821 | (0xA77E, 'M', u'ꝿ'), | ||
| 3822 | (0xA77F, 'V'), | ||
| 3823 | (0xA780, 'M', u'ꞁ'), | ||
| 3824 | (0xA781, 'V'), | ||
| 3825 | (0xA782, 'M', u'ꞃ'), | ||
| 3826 | (0xA783, 'V'), | ||
| 3827 | (0xA784, 'M', u'ꞅ'), | ||
| 3828 | (0xA785, 'V'), | ||
| 3829 | (0xA786, 'M', u'ꞇ'), | ||
| 3830 | (0xA787, 'V'), | ||
| 3831 | (0xA78B, 'M', u'ꞌ'), | ||
| 3832 | (0xA78C, 'V'), | ||
| 3833 | (0xA78D, 'M', u'ɥ'), | ||
| 3834 | (0xA78E, 'V'), | ||
| 3835 | (0xA78F, 'X'), | ||
| 3836 | (0xA790, 'M', u'ꞑ'), | ||
| 3837 | (0xA791, 'V'), | ||
| 3838 | (0xA792, 'M', u'ꞓ'), | ||
| 3839 | (0xA793, 'V'), | ||
| 3840 | (0xA794, 'X'), | ||
| 3841 | (0xA7A0, 'M', u'ꞡ'), | ||
| 3842 | (0xA7A1, 'V'), | ||
| 3843 | (0xA7A2, 'M', u'ꞣ'), | ||
| 3844 | (0xA7A3, 'V'), | ||
| 3845 | (0xA7A4, 'M', u'ꞥ'), | ||
| 3846 | (0xA7A5, 'V'), | ||
| 3847 | (0xA7A6, 'M', u'ꞧ'), | ||
| 3848 | (0xA7A7, 'V'), | ||
| 3849 | (0xA7A8, 'M', u'ꞩ'), | ||
| 3850 | (0xA7A9, 'V'), | ||
| 3851 | (0xA7AA, 'M', u'ɦ'), | ||
| 3852 | (0xA7AB, 'X'), | ||
| 3853 | (0xA7F8, 'M', u'ħ'), | ||
| 3854 | ] | ||
| 3855 | |||
| 3856 | def _seg_37(): | ||
| 3857 | return [ | ||
| 3858 | (0xA7F9, 'M', u'œ'), | ||
| 3859 | (0xA7FA, 'V'), | ||
| 3860 | (0xA82C, 'X'), | ||
| 3861 | (0xA830, 'V'), | ||
| 3862 | (0xA83A, 'X'), | ||
| 3863 | (0xA840, 'V'), | ||
| 3864 | (0xA878, 'X'), | ||
| 3865 | (0xA880, 'V'), | ||
| 3866 | (0xA8C5, 'X'), | ||
| 3867 | (0xA8CE, 'V'), | ||
| 3868 | (0xA8DA, 'X'), | ||
| 3869 | (0xA8E0, 'V'), | ||
| 3870 | (0xA8FC, 'X'), | ||
| 3871 | (0xA900, 'V'), | ||
| 3872 | (0xA954, 'X'), | ||
| 3873 | (0xA95F, 'V'), | ||
| 3874 | (0xA97D, 'X'), | ||
| 3875 | (0xA980, 'V'), | ||
| 3876 | (0xA9CE, 'X'), | ||
| 3877 | (0xA9CF, 'V'), | ||
| 3878 | (0xA9DA, 'X'), | ||
| 3879 | (0xA9DE, 'V'), | ||
| 3880 | (0xA9E0, 'X'), | ||
| 3881 | (0xAA00, 'V'), | ||
| 3882 | (0xAA37, 'X'), | ||
| 3883 | (0xAA40, 'V'), | ||
| 3884 | (0xAA4E, 'X'), | ||
| 3885 | (0xAA50, 'V'), | ||
| 3886 | (0xAA5A, 'X'), | ||
| 3887 | (0xAA5C, 'V'), | ||
| 3888 | (0xAA7C, 'X'), | ||
| 3889 | (0xAA80, 'V'), | ||
| 3890 | (0xAAC3, 'X'), | ||
| 3891 | (0xAADB, 'V'), | ||
| 3892 | (0xAAF7, 'X'), | ||
| 3893 | (0xAB01, 'V'), | ||
| 3894 | (0xAB07, 'X'), | ||
| 3895 | (0xAB09, 'V'), | ||
| 3896 | (0xAB0F, 'X'), | ||
| 3897 | (0xAB11, 'V'), | ||
| 3898 | (0xAB17, 'X'), | ||
| 3899 | (0xAB20, 'V'), | ||
| 3900 | (0xAB27, 'X'), | ||
| 3901 | (0xAB28, 'V'), | ||
| 3902 | (0xAB2F, 'X'), | ||
| 3903 | (0xABC0, 'V'), | ||
| 3904 | (0xABEE, 'X'), | ||
| 3905 | (0xABF0, 'V'), | ||
| 3906 | (0xABFA, 'X'), | ||
| 3907 | (0xAC00, 'V'), | ||
| 3908 | (0xD7A4, 'X'), | ||
| 3909 | (0xD7B0, 'V'), | ||
| 3910 | (0xD7C7, 'X'), | ||
| 3911 | (0xD7CB, 'V'), | ||
| 3912 | (0xD7FC, 'X'), | ||
| 3913 | (0xF900, 'M', u'豈'), | ||
| 3914 | (0xF901, 'M', u'更'), | ||
| 3915 | (0xF902, 'M', u'車'), | ||
| 3916 | (0xF903, 'M', u'賈'), | ||
| 3917 | (0xF904, 'M', u'滑'), | ||
| 3918 | (0xF905, 'M', u'串'), | ||
| 3919 | (0xF906, 'M', u'句'), | ||
| 3920 | (0xF907, 'M', u'龜'), | ||
| 3921 | (0xF909, 'M', u'契'), | ||
| 3922 | (0xF90A, 'M', u'金'), | ||
| 3923 | (0xF90B, 'M', u'喇'), | ||
| 3924 | (0xF90C, 'M', u'奈'), | ||
| 3925 | (0xF90D, 'M', u'懶'), | ||
| 3926 | (0xF90E, 'M', u'癩'), | ||
| 3927 | (0xF90F, 'M', u'羅'), | ||
| 3928 | (0xF910, 'M', u'蘿'), | ||
| 3929 | (0xF911, 'M', u'螺'), | ||
| 3930 | (0xF912, 'M', u'裸'), | ||
| 3931 | (0xF913, 'M', u'邏'), | ||
| 3932 | (0xF914, 'M', u'樂'), | ||
| 3933 | (0xF915, 'M', u'洛'), | ||
| 3934 | (0xF916, 'M', u'烙'), | ||
| 3935 | (0xF917, 'M', u'珞'), | ||
| 3936 | (0xF918, 'M', u'落'), | ||
| 3937 | (0xF919, 'M', u'酪'), | ||
| 3938 | (0xF91A, 'M', u'駱'), | ||
| 3939 | (0xF91B, 'M', u'亂'), | ||
| 3940 | (0xF91C, 'M', u'卵'), | ||
| 3941 | (0xF91D, 'M', u'欄'), | ||
| 3942 | (0xF91E, 'M', u'爛'), | ||
| 3943 | (0xF91F, 'M', u'蘭'), | ||
| 3944 | (0xF920, 'M', u'鸞'), | ||
| 3945 | (0xF921, 'M', u'嵐'), | ||
| 3946 | (0xF922, 'M', u'濫'), | ||
| 3947 | (0xF923, 'M', u'藍'), | ||
| 3948 | (0xF924, 'M', u'襤'), | ||
| 3949 | (0xF925, 'M', u'拉'), | ||
| 3950 | (0xF926, 'M', u'臘'), | ||
| 3951 | (0xF927, 'M', u'蠟'), | ||
| 3952 | (0xF928, 'M', u'廊'), | ||
| 3953 | (0xF929, 'M', u'朗'), | ||
| 3954 | (0xF92A, 'M', u'浪'), | ||
| 3955 | (0xF92B, 'M', u'狼'), | ||
| 3956 | (0xF92C, 'M', u'郎'), | ||
| 3957 | (0xF92D, 'M', u'來'), | ||
| 3958 | ] | ||
| 3959 | |||
| 3960 | def _seg_38(): | ||
| 3961 | return [ | ||
| 3962 | (0xF92E, 'M', u'冷'), | ||
| 3963 | (0xF92F, 'M', u'勞'), | ||
| 3964 | (0xF930, 'M', u'擄'), | ||
| 3965 | (0xF931, 'M', u'櫓'), | ||
| 3966 | (0xF932, 'M', u'爐'), | ||
| 3967 | (0xF933, 'M', u'盧'), | ||
| 3968 | (0xF934, 'M', u'老'), | ||
| 3969 | (0xF935, 'M', u'蘆'), | ||
| 3970 | (0xF936, 'M', u'虜'), | ||
| 3971 | (0xF937, 'M', u'路'), | ||
| 3972 | (0xF938, 'M', u'露'), | ||
| 3973 | (0xF939, 'M', u'魯'), | ||
| 3974 | (0xF93A, 'M', u'鷺'), | ||
| 3975 | (0xF93B, 'M', u'碌'), | ||
| 3976 | (0xF93C, 'M', u'祿'), | ||
| 3977 | (0xF93D, 'M', u'綠'), | ||
| 3978 | (0xF93E, 'M', u'菉'), | ||
| 3979 | (0xF93F, 'M', u'錄'), | ||
| 3980 | (0xF940, 'M', u'鹿'), | ||
| 3981 | (0xF941, 'M', u'論'), | ||
| 3982 | (0xF942, 'M', u'壟'), | ||
| 3983 | (0xF943, 'M', u'弄'), | ||
| 3984 | (0xF944, 'M', u'籠'), | ||
| 3985 | (0xF945, 'M', u'聾'), | ||
| 3986 | (0xF946, 'M', u'牢'), | ||
| 3987 | (0xF947, 'M', u'磊'), | ||
| 3988 | (0xF948, 'M', u'賂'), | ||
| 3989 | (0xF949, 'M', u'雷'), | ||
| 3990 | (0xF94A, 'M', u'壘'), | ||
| 3991 | (0xF94B, 'M', u'屢'), | ||
| 3992 | (0xF94C, 'M', u'樓'), | ||
| 3993 | (0xF94D, 'M', u'淚'), | ||
| 3994 | (0xF94E, 'M', u'漏'), | ||
| 3995 | (0xF94F, 'M', u'累'), | ||
| 3996 | (0xF950, 'M', u'縷'), | ||
| 3997 | (0xF951, 'M', u'陋'), | ||
| 3998 | (0xF952, 'M', u'勒'), | ||
| 3999 | (0xF953, 'M', u'肋'), | ||
| 4000 | (0xF954, 'M', u'凜'), | ||
| 4001 | (0xF955, 'M', u'凌'), | ||
| 4002 | (0xF956, 'M', u'稜'), | ||
| 4003 | (0xF957, 'M', u'綾'), | ||
| 4004 | (0xF958, 'M', u'菱'), | ||
| 4005 | (0xF959, 'M', u'陵'), | ||
| 4006 | (0xF95A, 'M', u'讀'), | ||
| 4007 | (0xF95B, 'M', u'拏'), | ||
| 4008 | (0xF95C, 'M', u'樂'), | ||
| 4009 | (0xF95D, 'M', u'諾'), | ||
| 4010 | (0xF95E, 'M', u'丹'), | ||
| 4011 | (0xF95F, 'M', u'寧'), | ||
| 4012 | (0xF960, 'M', u'怒'), | ||
| 4013 | (0xF961, 'M', u'率'), | ||
| 4014 | (0xF962, 'M', u'異'), | ||
| 4015 | (0xF963, 'M', u'北'), | ||
| 4016 | (0xF964, 'M', u'磻'), | ||
| 4017 | (0xF965, 'M', u'便'), | ||
| 4018 | (0xF966, 'M', u'復'), | ||
| 4019 | (0xF967, 'M', u'不'), | ||
| 4020 | (0xF968, 'M', u'泌'), | ||
| 4021 | (0xF969, 'M', u'數'), | ||
| 4022 | (0xF96A, 'M', u'索'), | ||
| 4023 | (0xF96B, 'M', u'參'), | ||
| 4024 | (0xF96C, 'M', u'塞'), | ||
| 4025 | (0xF96D, 'M', u'省'), | ||
| 4026 | (0xF96E, 'M', u'葉'), | ||
| 4027 | (0xF96F, 'M', u'說'), | ||
| 4028 | (0xF970, 'M', u'殺'), | ||
| 4029 | (0xF971, 'M', u'辰'), | ||
| 4030 | (0xF972, 'M', u'沈'), | ||
| 4031 | (0xF973, 'M', u'拾'), | ||
| 4032 | (0xF974, 'M', u'若'), | ||
| 4033 | (0xF975, 'M', u'掠'), | ||
| 4034 | (0xF976, 'M', u'略'), | ||
| 4035 | (0xF977, 'M', u'亮'), | ||
| 4036 | (0xF978, 'M', u'兩'), | ||
| 4037 | (0xF979, 'M', u'凉'), | ||
| 4038 | (0xF97A, 'M', u'梁'), | ||
| 4039 | (0xF97B, 'M', u'糧'), | ||
| 4040 | (0xF97C, 'M', u'良'), | ||
| 4041 | (0xF97D, 'M', u'諒'), | ||
| 4042 | (0xF97E, 'M', u'量'), | ||
| 4043 | (0xF97F, 'M', u'勵'), | ||
| 4044 | (0xF980, 'M', u'呂'), | ||
| 4045 | (0xF981, 'M', u'女'), | ||
| 4046 | (0xF982, 'M', u'廬'), | ||
| 4047 | (0xF983, 'M', u'旅'), | ||
| 4048 | (0xF984, 'M', u'濾'), | ||
| 4049 | (0xF985, 'M', u'礪'), | ||
| 4050 | (0xF986, 'M', u'閭'), | ||
| 4051 | (0xF987, 'M', u'驪'), | ||
| 4052 | (0xF988, 'M', u'麗'), | ||
| 4053 | (0xF989, 'M', u'黎'), | ||
| 4054 | (0xF98A, 'M', u'力'), | ||
| 4055 | (0xF98B, 'M', u'曆'), | ||
| 4056 | (0xF98C, 'M', u'歷'), | ||
| 4057 | (0xF98D, 'M', u'轢'), | ||
| 4058 | (0xF98E, 'M', u'年'), | ||
| 4059 | (0xF98F, 'M', u'憐'), | ||
| 4060 | (0xF990, 'M', u'戀'), | ||
| 4061 | (0xF991, 'M', u'撚'), | ||
| 4062 | ] | ||
| 4063 | |||
| 4064 | def _seg_39(): | ||
| 4065 | return [ | ||
| 4066 | (0xF992, 'M', u'漣'), | ||
| 4067 | (0xF993, 'M', u'煉'), | ||
| 4068 | (0xF994, 'M', u'璉'), | ||
| 4069 | (0xF995, 'M', u'秊'), | ||
| 4070 | (0xF996, 'M', u'練'), | ||
| 4071 | (0xF997, 'M', u'聯'), | ||
| 4072 | (0xF998, 'M', u'輦'), | ||
| 4073 | (0xF999, 'M', u'蓮'), | ||
| 4074 | (0xF99A, 'M', u'連'), | ||
| 4075 | (0xF99B, 'M', u'鍊'), | ||
| 4076 | (0xF99C, 'M', u'列'), | ||
| 4077 | (0xF99D, 'M', u'劣'), | ||
| 4078 | (0xF99E, 'M', u'咽'), | ||
| 4079 | (0xF99F, 'M', u'烈'), | ||
| 4080 | (0xF9A0, 'M', u'裂'), | ||
| 4081 | (0xF9A1, 'M', u'說'), | ||
| 4082 | (0xF9A2, 'M', u'廉'), | ||
| 4083 | (0xF9A3, 'M', u'念'), | ||
| 4084 | (0xF9A4, 'M', u'捻'), | ||
| 4085 | (0xF9A5, 'M', u'殮'), | ||
| 4086 | (0xF9A6, 'M', u'簾'), | ||
| 4087 | (0xF9A7, 'M', u'獵'), | ||
| 4088 | (0xF9A8, 'M', u'令'), | ||
| 4089 | (0xF9A9, 'M', u'囹'), | ||
| 4090 | (0xF9AA, 'M', u'寧'), | ||
| 4091 | (0xF9AB, 'M', u'嶺'), | ||
| 4092 | (0xF9AC, 'M', u'怜'), | ||
| 4093 | (0xF9AD, 'M', u'玲'), | ||
| 4094 | (0xF9AE, 'M', u'瑩'), | ||
| 4095 | (0xF9AF, 'M', u'羚'), | ||
| 4096 | (0xF9B0, 'M', u'聆'), | ||
| 4097 | (0xF9B1, 'M', u'鈴'), | ||
| 4098 | (0xF9B2, 'M', u'零'), | ||
| 4099 | (0xF9B3, 'M', u'靈'), | ||
| 4100 | (0xF9B4, 'M', u'領'), | ||
| 4101 | (0xF9B5, 'M', u'例'), | ||
| 4102 | (0xF9B6, 'M', u'禮'), | ||
| 4103 | (0xF9B7, 'M', u'醴'), | ||
| 4104 | (0xF9B8, 'M', u'隸'), | ||
| 4105 | (0xF9B9, 'M', u'惡'), | ||
| 4106 | (0xF9BA, 'M', u'了'), | ||
| 4107 | (0xF9BB, 'M', u'僚'), | ||
| 4108 | (0xF9BC, 'M', u'寮'), | ||
| 4109 | (0xF9BD, 'M', u'尿'), | ||
| 4110 | (0xF9BE, 'M', u'料'), | ||
| 4111 | (0xF9BF, 'M', u'樂'), | ||
| 4112 | (0xF9C0, 'M', u'燎'), | ||
| 4113 | (0xF9C1, 'M', u'療'), | ||
| 4114 | (0xF9C2, 'M', u'蓼'), | ||
| 4115 | (0xF9C3, 'M', u'遼'), | ||
| 4116 | (0xF9C4, 'M', u'龍'), | ||
| 4117 | (0xF9C5, 'M', u'暈'), | ||
| 4118 | (0xF9C6, 'M', u'阮'), | ||
| 4119 | (0xF9C7, 'M', u'劉'), | ||
| 4120 | (0xF9C8, 'M', u'杻'), | ||
| 4121 | (0xF9C9, 'M', u'柳'), | ||
| 4122 | (0xF9CA, 'M', u'流'), | ||
| 4123 | (0xF9CB, 'M', u'溜'), | ||
| 4124 | (0xF9CC, 'M', u'琉'), | ||
| 4125 | (0xF9CD, 'M', u'留'), | ||
| 4126 | (0xF9CE, 'M', u'硫'), | ||
| 4127 | (0xF9CF, 'M', u'紐'), | ||
| 4128 | (0xF9D0, 'M', u'類'), | ||
| 4129 | (0xF9D1, 'M', u'六'), | ||
| 4130 | (0xF9D2, 'M', u'戮'), | ||
| 4131 | (0xF9D3, 'M', u'陸'), | ||
| 4132 | (0xF9D4, 'M', u'倫'), | ||
| 4133 | (0xF9D5, 'M', u'崙'), | ||
| 4134 | (0xF9D6, 'M', u'淪'), | ||
| 4135 | (0xF9D7, 'M', u'輪'), | ||
| 4136 | (0xF9D8, 'M', u'律'), | ||
| 4137 | (0xF9D9, 'M', u'慄'), | ||
| 4138 | (0xF9DA, 'M', u'栗'), | ||
| 4139 | (0xF9DB, 'M', u'率'), | ||
| 4140 | (0xF9DC, 'M', u'隆'), | ||
| 4141 | (0xF9DD, 'M', u'利'), | ||
| 4142 | (0xF9DE, 'M', u'吏'), | ||
| 4143 | (0xF9DF, 'M', u'履'), | ||
| 4144 | (0xF9E0, 'M', u'易'), | ||
| 4145 | (0xF9E1, 'M', u'李'), | ||
| 4146 | (0xF9E2, 'M', u'梨'), | ||
| 4147 | (0xF9E3, 'M', u'泥'), | ||
| 4148 | (0xF9E4, 'M', u'理'), | ||
| 4149 | (0xF9E5, 'M', u'痢'), | ||
| 4150 | (0xF9E6, 'M', u'罹'), | ||
| 4151 | (0xF9E7, 'M', u'裏'), | ||
| 4152 | (0xF9E8, 'M', u'裡'), | ||
| 4153 | (0xF9E9, 'M', u'里'), | ||
| 4154 | (0xF9EA, 'M', u'離'), | ||
| 4155 | (0xF9EB, 'M', u'匿'), | ||
| 4156 | (0xF9EC, 'M', u'溺'), | ||
| 4157 | (0xF9ED, 'M', u'吝'), | ||
| 4158 | (0xF9EE, 'M', u'燐'), | ||
| 4159 | (0xF9EF, 'M', u'璘'), | ||
| 4160 | (0xF9F0, 'M', u'藺'), | ||
| 4161 | (0xF9F1, 'M', u'隣'), | ||
| 4162 | (0xF9F2, 'M', u'鱗'), | ||
| 4163 | (0xF9F3, 'M', u'麟'), | ||
| 4164 | (0xF9F4, 'M', u'林'), | ||
| 4165 | (0xF9F5, 'M', u'淋'), | ||
| 4166 | ] | ||
| 4167 | |||
| 4168 | def _seg_40(): | ||
| 4169 | return [ | ||
| 4170 | (0xF9F6, 'M', u'臨'), | ||
| 4171 | (0xF9F7, 'M', u'立'), | ||
| 4172 | (0xF9F8, 'M', u'笠'), | ||
| 4173 | (0xF9F9, 'M', u'粒'), | ||
| 4174 | (0xF9FA, 'M', u'狀'), | ||
| 4175 | (0xF9FB, 'M', u'炙'), | ||
| 4176 | (0xF9FC, 'M', u'識'), | ||
| 4177 | (0xF9FD, 'M', u'什'), | ||
| 4178 | (0xF9FE, 'M', u'茶'), | ||
| 4179 | (0xF9FF, 'M', u'刺'), | ||
| 4180 | (0xFA00, 'M', u'切'), | ||
| 4181 | (0xFA01, 'M', u'度'), | ||
| 4182 | (0xFA02, 'M', u'拓'), | ||
| 4183 | (0xFA03, 'M', u'糖'), | ||
| 4184 | (0xFA04, 'M', u'宅'), | ||
| 4185 | (0xFA05, 'M', u'洞'), | ||
| 4186 | (0xFA06, 'M', u'暴'), | ||
| 4187 | (0xFA07, 'M', u'輻'), | ||
| 4188 | (0xFA08, 'M', u'行'), | ||
| 4189 | (0xFA09, 'M', u'降'), | ||
| 4190 | (0xFA0A, 'M', u'見'), | ||
| 4191 | (0xFA0B, 'M', u'廓'), | ||
| 4192 | (0xFA0C, 'M', u'兀'), | ||
| 4193 | (0xFA0D, 'M', u'嗀'), | ||
| 4194 | (0xFA0E, 'V'), | ||
| 4195 | (0xFA10, 'M', u'塚'), | ||
| 4196 | (0xFA11, 'V'), | ||
| 4197 | (0xFA12, 'M', u'晴'), | ||
| 4198 | (0xFA13, 'V'), | ||
| 4199 | (0xFA15, 'M', u'凞'), | ||
| 4200 | (0xFA16, 'M', u'猪'), | ||
| 4201 | (0xFA17, 'M', u'益'), | ||
| 4202 | (0xFA18, 'M', u'礼'), | ||
| 4203 | (0xFA19, 'M', u'神'), | ||
| 4204 | (0xFA1A, 'M', u'祥'), | ||
| 4205 | (0xFA1B, 'M', u'福'), | ||
| 4206 | (0xFA1C, 'M', u'靖'), | ||
| 4207 | (0xFA1D, 'M', u'精'), | ||
| 4208 | (0xFA1E, 'M', u'羽'), | ||
| 4209 | (0xFA1F, 'V'), | ||
| 4210 | (0xFA20, 'M', u'蘒'), | ||
| 4211 | (0xFA21, 'V'), | ||
| 4212 | (0xFA22, 'M', u'諸'), | ||
| 4213 | (0xFA23, 'V'), | ||
| 4214 | (0xFA25, 'M', u'逸'), | ||
| 4215 | (0xFA26, 'M', u'都'), | ||
| 4216 | (0xFA27, 'V'), | ||
| 4217 | (0xFA2A, 'M', u'飯'), | ||
| 4218 | (0xFA2B, 'M', u'飼'), | ||
| 4219 | (0xFA2C, 'M', u'館'), | ||
| 4220 | (0xFA2D, 'M', u'鶴'), | ||
| 4221 | (0xFA2E, 'M', u'郞'), | ||
| 4222 | (0xFA2F, 'M', u'隷'), | ||
| 4223 | (0xFA30, 'M', u'侮'), | ||
| 4224 | (0xFA31, 'M', u'僧'), | ||
| 4225 | (0xFA32, 'M', u'免'), | ||
| 4226 | (0xFA33, 'M', u'勉'), | ||
| 4227 | (0xFA34, 'M', u'勤'), | ||
| 4228 | (0xFA35, 'M', u'卑'), | ||
| 4229 | (0xFA36, 'M', u'喝'), | ||
| 4230 | (0xFA37, 'M', u'嘆'), | ||
| 4231 | (0xFA38, 'M', u'器'), | ||
| 4232 | (0xFA39, 'M', u'塀'), | ||
| 4233 | (0xFA3A, 'M', u'墨'), | ||
| 4234 | (0xFA3B, 'M', u'層'), | ||
| 4235 | (0xFA3C, 'M', u'屮'), | ||
| 4236 | (0xFA3D, 'M', u'悔'), | ||
| 4237 | (0xFA3E, 'M', u'慨'), | ||
| 4238 | (0xFA3F, 'M', u'憎'), | ||
| 4239 | (0xFA40, 'M', u'懲'), | ||
| 4240 | (0xFA41, 'M', u'敏'), | ||
| 4241 | (0xFA42, 'M', u'既'), | ||
| 4242 | (0xFA43, 'M', u'暑'), | ||
| 4243 | (0xFA44, 'M', u'梅'), | ||
| 4244 | (0xFA45, 'M', u'海'), | ||
| 4245 | (0xFA46, 'M', u'渚'), | ||
| 4246 | (0xFA47, 'M', u'漢'), | ||
| 4247 | (0xFA48, 'M', u'煮'), | ||
| 4248 | (0xFA49, 'M', u'爫'), | ||
| 4249 | (0xFA4A, 'M', u'琢'), | ||
| 4250 | (0xFA4B, 'M', u'碑'), | ||
| 4251 | (0xFA4C, 'M', u'社'), | ||
| 4252 | (0xFA4D, 'M', u'祉'), | ||
| 4253 | (0xFA4E, 'M', u'祈'), | ||
| 4254 | (0xFA4F, 'M', u'祐'), | ||
| 4255 | (0xFA50, 'M', u'祖'), | ||
| 4256 | (0xFA51, 'M', u'祝'), | ||
| 4257 | (0xFA52, 'M', u'禍'), | ||
| 4258 | (0xFA53, 'M', u'禎'), | ||
| 4259 | (0xFA54, 'M', u'穀'), | ||
| 4260 | (0xFA55, 'M', u'突'), | ||
| 4261 | (0xFA56, 'M', u'節'), | ||
| 4262 | (0xFA57, 'M', u'練'), | ||
| 4263 | (0xFA58, 'M', u'縉'), | ||
| 4264 | (0xFA59, 'M', u'繁'), | ||
| 4265 | (0xFA5A, 'M', u'署'), | ||
| 4266 | (0xFA5B, 'M', u'者'), | ||
| 4267 | (0xFA5C, 'M', u'臭'), | ||
| 4268 | (0xFA5D, 'M', u'艹'), | ||
| 4269 | (0xFA5F, 'M', u'著'), | ||
| 4270 | ] | ||
| 4271 | |||
| 4272 | def _seg_41(): | ||
| 4273 | return [ | ||
| 4274 | (0xFA60, 'M', u'褐'), | ||
| 4275 | (0xFA61, 'M', u'視'), | ||
| 4276 | (0xFA62, 'M', u'謁'), | ||
| 4277 | (0xFA63, 'M', u'謹'), | ||
| 4278 | (0xFA64, 'M', u'賓'), | ||
| 4279 | (0xFA65, 'M', u'贈'), | ||
| 4280 | (0xFA66, 'M', u'辶'), | ||
| 4281 | (0xFA67, 'M', u'逸'), | ||
| 4282 | (0xFA68, 'M', u'難'), | ||
| 4283 | (0xFA69, 'M', u'響'), | ||
| 4284 | (0xFA6A, 'M', u'頻'), | ||
| 4285 | (0xFA6B, 'M', u'恵'), | ||
| 4286 | (0xFA6C, 'M', u'𤋮'), | ||
| 4287 | (0xFA6D, 'M', u'舘'), | ||
| 4288 | (0xFA6E, 'X'), | ||
| 4289 | (0xFA70, 'M', u'並'), | ||
| 4290 | (0xFA71, 'M', u'况'), | ||
| 4291 | (0xFA72, 'M', u'全'), | ||
| 4292 | (0xFA73, 'M', u'侀'), | ||
| 4293 | (0xFA74, 'M', u'充'), | ||
| 4294 | (0xFA75, 'M', u'冀'), | ||
| 4295 | (0xFA76, 'M', u'勇'), | ||
| 4296 | (0xFA77, 'M', u'勺'), | ||
| 4297 | (0xFA78, 'M', u'喝'), | ||
| 4298 | (0xFA79, 'M', u'啕'), | ||
| 4299 | (0xFA7A, 'M', u'喙'), | ||
| 4300 | (0xFA7B, 'M', u'嗢'), | ||
| 4301 | (0xFA7C, 'M', u'塚'), | ||
| 4302 | (0xFA7D, 'M', u'墳'), | ||
| 4303 | (0xFA7E, 'M', u'奄'), | ||
| 4304 | (0xFA7F, 'M', u'奔'), | ||
| 4305 | (0xFA80, 'M', u'婢'), | ||
| 4306 | (0xFA81, 'M', u'嬨'), | ||
| 4307 | (0xFA82, 'M', u'廒'), | ||
| 4308 | (0xFA83, 'M', u'廙'), | ||
| 4309 | (0xFA84, 'M', u'彩'), | ||
| 4310 | (0xFA85, 'M', u'徭'), | ||
| 4311 | (0xFA86, 'M', u'惘'), | ||
| 4312 | (0xFA87, 'M', u'慎'), | ||
| 4313 | (0xFA88, 'M', u'愈'), | ||
| 4314 | (0xFA89, 'M', u'憎'), | ||
| 4315 | (0xFA8A, 'M', u'慠'), | ||
| 4316 | (0xFA8B, 'M', u'懲'), | ||
| 4317 | (0xFA8C, 'M', u'戴'), | ||
| 4318 | (0xFA8D, 'M', u'揄'), | ||
| 4319 | (0xFA8E, 'M', u'搜'), | ||
| 4320 | (0xFA8F, 'M', u'摒'), | ||
| 4321 | (0xFA90, 'M', u'敖'), | ||
| 4322 | (0xFA91, 'M', u'晴'), | ||
| 4323 | (0xFA92, 'M', u'朗'), | ||
| 4324 | (0xFA93, 'M', u'望'), | ||
| 4325 | (0xFA94, 'M', u'杖'), | ||
| 4326 | (0xFA95, 'M', u'歹'), | ||
| 4327 | (0xFA96, 'M', u'殺'), | ||
| 4328 | (0xFA97, 'M', u'流'), | ||
| 4329 | (0xFA98, 'M', u'滛'), | ||
| 4330 | (0xFA99, 'M', u'滋'), | ||
| 4331 | (0xFA9A, 'M', u'漢'), | ||
| 4332 | (0xFA9B, 'M', u'瀞'), | ||
| 4333 | (0xFA9C, 'M', u'煮'), | ||
| 4334 | (0xFA9D, 'M', u'瞧'), | ||
| 4335 | (0xFA9E, 'M', u'爵'), | ||
| 4336 | (0xFA9F, 'M', u'犯'), | ||
| 4337 | (0xFAA0, 'M', u'猪'), | ||
| 4338 | (0xFAA1, 'M', u'瑱'), | ||
| 4339 | (0xFAA2, 'M', u'甆'), | ||
| 4340 | (0xFAA3, 'M', u'画'), | ||
| 4341 | (0xFAA4, 'M', u'瘝'), | ||
| 4342 | (0xFAA5, 'M', u'瘟'), | ||
| 4343 | (0xFAA6, 'M', u'益'), | ||
| 4344 | (0xFAA7, 'M', u'盛'), | ||
| 4345 | (0xFAA8, 'M', u'直'), | ||
| 4346 | (0xFAA9, 'M', u'睊'), | ||
| 4347 | (0xFAAA, 'M', u'着'), | ||
| 4348 | (0xFAAB, 'M', u'磌'), | ||
| 4349 | (0xFAAC, 'M', u'窱'), | ||
| 4350 | (0xFAAD, 'M', u'節'), | ||
| 4351 | (0xFAAE, 'M', u'类'), | ||
| 4352 | (0xFAAF, 'M', u'絛'), | ||
| 4353 | (0xFAB0, 'M', u'練'), | ||
| 4354 | (0xFAB1, 'M', u'缾'), | ||
| 4355 | (0xFAB2, 'M', u'者'), | ||
| 4356 | (0xFAB3, 'M', u'荒'), | ||
| 4357 | (0xFAB4, 'M', u'華'), | ||
| 4358 | (0xFAB5, 'M', u'蝹'), | ||
| 4359 | (0xFAB6, 'M', u'襁'), | ||
| 4360 | (0xFAB7, 'M', u'覆'), | ||
| 4361 | (0xFAB8, 'M', u'視'), | ||
| 4362 | (0xFAB9, 'M', u'調'), | ||
| 4363 | (0xFABA, 'M', u'諸'), | ||
| 4364 | (0xFABB, 'M', u'請'), | ||
| 4365 | (0xFABC, 'M', u'謁'), | ||
| 4366 | (0xFABD, 'M', u'諾'), | ||
| 4367 | (0xFABE, 'M', u'諭'), | ||
| 4368 | (0xFABF, 'M', u'謹'), | ||
| 4369 | (0xFAC0, 'M', u'變'), | ||
| 4370 | (0xFAC1, 'M', u'贈'), | ||
| 4371 | (0xFAC2, 'M', u'輸'), | ||
| 4372 | (0xFAC3, 'M', u'遲'), | ||
| 4373 | (0xFAC4, 'M', u'醙'), | ||
| 4374 | ] | ||
| 4375 | |||
| 4376 | def _seg_42(): | ||
| 4377 | return [ | ||
| 4378 | (0xFAC5, 'M', u'鉶'), | ||
| 4379 | (0xFAC6, 'M', u'陼'), | ||
| 4380 | (0xFAC7, 'M', u'難'), | ||
| 4381 | (0xFAC8, 'M', u'靖'), | ||
| 4382 | (0xFAC9, 'M', u'韛'), | ||
| 4383 | (0xFACA, 'M', u'響'), | ||
| 4384 | (0xFACB, 'M', u'頋'), | ||
| 4385 | (0xFACC, 'M', u'頻'), | ||
| 4386 | (0xFACD, 'M', u'鬒'), | ||
| 4387 | (0xFACE, 'M', u'龜'), | ||
| 4388 | (0xFACF, 'M', u'𢡊'), | ||
| 4389 | (0xFAD0, 'M', u'𢡄'), | ||
| 4390 | (0xFAD1, 'M', u'𣏕'), | ||
| 4391 | (0xFAD2, 'M', u'㮝'), | ||
| 4392 | (0xFAD3, 'M', u'䀘'), | ||
| 4393 | (0xFAD4, 'M', u'䀹'), | ||
| 4394 | (0xFAD5, 'M', u'𥉉'), | ||
| 4395 | (0xFAD6, 'M', u'𥳐'), | ||
| 4396 | (0xFAD7, 'M', u'𧻓'), | ||
| 4397 | (0xFAD8, 'M', u'齃'), | ||
| 4398 | (0xFAD9, 'M', u'龎'), | ||
| 4399 | (0xFADA, 'X'), | ||
| 4400 | (0xFB00, 'M', u'ff'), | ||
| 4401 | (0xFB01, 'M', u'fi'), | ||
| 4402 | (0xFB02, 'M', u'fl'), | ||
| 4403 | (0xFB03, 'M', u'ffi'), | ||
| 4404 | (0xFB04, 'M', u'ffl'), | ||
| 4405 | (0xFB05, 'M', u'st'), | ||
| 4406 | (0xFB07, 'X'), | ||
| 4407 | (0xFB13, 'M', u'մն'), | ||
| 4408 | (0xFB14, 'M', u'մե'), | ||
| 4409 | (0xFB15, 'M', u'մի'), | ||
| 4410 | (0xFB16, 'M', u'վն'), | ||
| 4411 | (0xFB17, 'M', u'մխ'), | ||
| 4412 | (0xFB18, 'X'), | ||
| 4413 | (0xFB1D, 'M', u'יִ'), | ||
| 4414 | (0xFB1E, 'V'), | ||
| 4415 | (0xFB1F, 'M', u'ײַ'), | ||
| 4416 | (0xFB20, 'M', u'ע'), | ||
| 4417 | (0xFB21, 'M', u'א'), | ||
| 4418 | (0xFB22, 'M', u'ד'), | ||
| 4419 | (0xFB23, 'M', u'ה'), | ||
| 4420 | (0xFB24, 'M', u'כ'), | ||
| 4421 | (0xFB25, 'M', u'ל'), | ||
| 4422 | (0xFB26, 'M', u'ם'), | ||
| 4423 | (0xFB27, 'M', u'ר'), | ||
| 4424 | (0xFB28, 'M', u'ת'), | ||
| 4425 | (0xFB29, '3', u'+'), | ||
| 4426 | (0xFB2A, 'M', u'שׁ'), | ||
| 4427 | (0xFB2B, 'M', u'שׂ'), | ||
| 4428 | (0xFB2C, 'M', u'שּׁ'), | ||
| 4429 | (0xFB2D, 'M', u'שּׂ'), | ||
| 4430 | (0xFB2E, 'M', u'אַ'), | ||
| 4431 | (0xFB2F, 'M', u'אָ'), | ||
| 4432 | (0xFB30, 'M', u'אּ'), | ||
| 4433 | (0xFB31, 'M', u'בּ'), | ||
| 4434 | (0xFB32, 'M', u'גּ'), | ||
| 4435 | (0xFB33, 'M', u'דּ'), | ||
| 4436 | (0xFB34, 'M', u'הּ'), | ||
| 4437 | (0xFB35, 'M', u'וּ'), | ||
| 4438 | (0xFB36, 'M', u'זּ'), | ||
| 4439 | (0xFB37, 'X'), | ||
| 4440 | (0xFB38, 'M', u'טּ'), | ||
| 4441 | (0xFB39, 'M', u'יּ'), | ||
| 4442 | (0xFB3A, 'M', u'ךּ'), | ||
| 4443 | (0xFB3B, 'M', u'כּ'), | ||
| 4444 | (0xFB3C, 'M', u'לּ'), | ||
| 4445 | (0xFB3D, 'X'), | ||
| 4446 | (0xFB3E, 'M', u'מּ'), | ||
| 4447 | (0xFB3F, 'X'), | ||
| 4448 | (0xFB40, 'M', u'נּ'), | ||
| 4449 | (0xFB41, 'M', u'סּ'), | ||
| 4450 | (0xFB42, 'X'), | ||
| 4451 | (0xFB43, 'M', u'ףּ'), | ||
| 4452 | (0xFB44, 'M', u'פּ'), | ||
| 4453 | (0xFB45, 'X'), | ||
| 4454 | (0xFB46, 'M', u'צּ'), | ||
| 4455 | (0xFB47, 'M', u'קּ'), | ||
| 4456 | (0xFB48, 'M', u'רּ'), | ||
| 4457 | (0xFB49, 'M', u'שּ'), | ||
| 4458 | (0xFB4A, 'M', u'תּ'), | ||
| 4459 | (0xFB4B, 'M', u'וֹ'), | ||
| 4460 | (0xFB4C, 'M', u'בֿ'), | ||
| 4461 | (0xFB4D, 'M', u'כֿ'), | ||
| 4462 | (0xFB4E, 'M', u'פֿ'), | ||
| 4463 | (0xFB4F, 'M', u'אל'), | ||
| 4464 | (0xFB50, 'M', u'ٱ'), | ||
| 4465 | (0xFB52, 'M', u'ٻ'), | ||
| 4466 | (0xFB56, 'M', u'پ'), | ||
| 4467 | (0xFB5A, 'M', u'ڀ'), | ||
| 4468 | (0xFB5E, 'M', u'ٺ'), | ||
| 4469 | (0xFB62, 'M', u'ٿ'), | ||
| 4470 | (0xFB66, 'M', u'ٹ'), | ||
| 4471 | (0xFB6A, 'M', u'ڤ'), | ||
| 4472 | (0xFB6E, 'M', u'ڦ'), | ||
| 4473 | (0xFB72, 'M', u'ڄ'), | ||
| 4474 | (0xFB76, 'M', u'ڃ'), | ||
| 4475 | (0xFB7A, 'M', u'چ'), | ||
| 4476 | (0xFB7E, 'M', u'ڇ'), | ||
| 4477 | (0xFB82, 'M', u'ڍ'), | ||
| 4478 | ] | ||
| 4479 | |||
| 4480 | def _seg_43(): | ||
| 4481 | return [ | ||
| 4482 | (0xFB84, 'M', u'ڌ'), | ||
| 4483 | (0xFB86, 'M', u'ڎ'), | ||
| 4484 | (0xFB88, 'M', u'ڈ'), | ||
| 4485 | (0xFB8A, 'M', u'ژ'), | ||
| 4486 | (0xFB8C, 'M', u'ڑ'), | ||
| 4487 | (0xFB8E, 'M', u'ک'), | ||
| 4488 | (0xFB92, 'M', u'گ'), | ||
| 4489 | (0xFB96, 'M', u'ڳ'), | ||
| 4490 | (0xFB9A, 'M', u'ڱ'), | ||
| 4491 | (0xFB9E, 'M', u'ں'), | ||
| 4492 | (0xFBA0, 'M', u'ڻ'), | ||
| 4493 | (0xFBA4, 'M', u'ۀ'), | ||
| 4494 | (0xFBA6, 'M', u'ہ'), | ||
| 4495 | (0xFBAA, 'M', u'ھ'), | ||
| 4496 | (0xFBAE, 'M', u'ے'), | ||
| 4497 | (0xFBB0, 'M', u'ۓ'), | ||
| 4498 | (0xFBB2, 'V'), | ||
| 4499 | (0xFBC2, 'X'), | ||
| 4500 | (0xFBD3, 'M', u'ڭ'), | ||
| 4501 | (0xFBD7, 'M', u'ۇ'), | ||
| 4502 | (0xFBD9, 'M', u'ۆ'), | ||
| 4503 | (0xFBDB, 'M', u'ۈ'), | ||
| 4504 | (0xFBDD, 'M', u'ۇٴ'), | ||
| 4505 | (0xFBDE, 'M', u'ۋ'), | ||
| 4506 | (0xFBE0, 'M', u'ۅ'), | ||
| 4507 | (0xFBE2, 'M', u'ۉ'), | ||
| 4508 | (0xFBE4, 'M', u'ې'), | ||
| 4509 | (0xFBE8, 'M', u'ى'), | ||
| 4510 | (0xFBEA, 'M', u'ئا'), | ||
| 4511 | (0xFBEC, 'M', u'ئە'), | ||
| 4512 | (0xFBEE, 'M', u'ئو'), | ||
| 4513 | (0xFBF0, 'M', u'ئۇ'), | ||
| 4514 | (0xFBF2, 'M', u'ئۆ'), | ||
| 4515 | (0xFBF4, 'M', u'ئۈ'), | ||
| 4516 | (0xFBF6, 'M', u'ئې'), | ||
| 4517 | (0xFBF9, 'M', u'ئى'), | ||
| 4518 | (0xFBFC, 'M', u'ی'), | ||
| 4519 | (0xFC00, 'M', u'ئج'), | ||
| 4520 | (0xFC01, 'M', u'ئح'), | ||
| 4521 | (0xFC02, 'M', u'ئم'), | ||
| 4522 | (0xFC03, 'M', u'ئى'), | ||
| 4523 | (0xFC04, 'M', u'ئي'), | ||
| 4524 | (0xFC05, 'M', u'بج'), | ||
| 4525 | (0xFC06, 'M', u'بح'), | ||
| 4526 | (0xFC07, 'M', u'بخ'), | ||
| 4527 | (0xFC08, 'M', u'بم'), | ||
| 4528 | (0xFC09, 'M', u'بى'), | ||
| 4529 | (0xFC0A, 'M', u'بي'), | ||
| 4530 | (0xFC0B, 'M', u'تج'), | ||
| 4531 | (0xFC0C, 'M', u'تح'), | ||
| 4532 | (0xFC0D, 'M', u'تخ'), | ||
| 4533 | (0xFC0E, 'M', u'تم'), | ||
| 4534 | (0xFC0F, 'M', u'تى'), | ||
| 4535 | (0xFC10, 'M', u'تي'), | ||
| 4536 | (0xFC11, 'M', u'ثج'), | ||
| 4537 | (0xFC12, 'M', u'ثم'), | ||
| 4538 | (0xFC13, 'M', u'ثى'), | ||
| 4539 | (0xFC14, 'M', u'ثي'), | ||
| 4540 | (0xFC15, 'M', u'جح'), | ||
| 4541 | (0xFC16, 'M', u'جم'), | ||
| 4542 | (0xFC17, 'M', u'حج'), | ||
| 4543 | (0xFC18, 'M', u'حم'), | ||
| 4544 | (0xFC19, 'M', u'خج'), | ||
| 4545 | (0xFC1A, 'M', u'خح'), | ||
| 4546 | (0xFC1B, 'M', u'خم'), | ||
| 4547 | (0xFC1C, 'M', u'سج'), | ||
| 4548 | (0xFC1D, 'M', u'سح'), | ||
| 4549 | (0xFC1E, 'M', u'سخ'), | ||
| 4550 | (0xFC1F, 'M', u'سم'), | ||
| 4551 | (0xFC20, 'M', u'صح'), | ||
| 4552 | (0xFC21, 'M', u'صم'), | ||
| 4553 | (0xFC22, 'M', u'ضج'), | ||
| 4554 | (0xFC23, 'M', u'ضح'), | ||
| 4555 | (0xFC24, 'M', u'ضخ'), | ||
| 4556 | (0xFC25, 'M', u'ضم'), | ||
| 4557 | (0xFC26, 'M', u'طح'), | ||
| 4558 | (0xFC27, 'M', u'طم'), | ||
| 4559 | (0xFC28, 'M', u'ظم'), | ||
| 4560 | (0xFC29, 'M', u'عج'), | ||
| 4561 | (0xFC2A, 'M', u'عم'), | ||
| 4562 | (0xFC2B, 'M', u'غج'), | ||
| 4563 | (0xFC2C, 'M', u'غم'), | ||
| 4564 | (0xFC2D, 'M', u'فج'), | ||
| 4565 | (0xFC2E, 'M', u'فح'), | ||
| 4566 | (0xFC2F, 'M', u'فخ'), | ||
| 4567 | (0xFC30, 'M', u'فم'), | ||
| 4568 | (0xFC31, 'M', u'فى'), | ||
| 4569 | (0xFC32, 'M', u'في'), | ||
| 4570 | (0xFC33, 'M', u'قح'), | ||
| 4571 | (0xFC34, 'M', u'قم'), | ||
| 4572 | (0xFC35, 'M', u'قى'), | ||
| 4573 | (0xFC36, 'M', u'قي'), | ||
| 4574 | (0xFC37, 'M', u'كا'), | ||
| 4575 | (0xFC38, 'M', u'كج'), | ||
| 4576 | (0xFC39, 'M', u'كح'), | ||
| 4577 | (0xFC3A, 'M', u'كخ'), | ||
| 4578 | (0xFC3B, 'M', u'كل'), | ||
| 4579 | (0xFC3C, 'M', u'كم'), | ||
| 4580 | (0xFC3D, 'M', u'كى'), | ||
| 4581 | (0xFC3E, 'M', u'كي'), | ||
| 4582 | ] | ||
| 4583 | |||
| 4584 | def _seg_44(): | ||
| 4585 | return [ | ||
| 4586 | (0xFC3F, 'M', u'لج'), | ||
| 4587 | (0xFC40, 'M', u'لح'), | ||
| 4588 | (0xFC41, 'M', u'لخ'), | ||
| 4589 | (0xFC42, 'M', u'لم'), | ||
| 4590 | (0xFC43, 'M', u'لى'), | ||
| 4591 | (0xFC44, 'M', u'لي'), | ||
| 4592 | (0xFC45, 'M', u'مج'), | ||
| 4593 | (0xFC46, 'M', u'مح'), | ||
| 4594 | (0xFC47, 'M', u'مخ'), | ||
| 4595 | (0xFC48, 'M', u'مم'), | ||
| 4596 | (0xFC49, 'M', u'مى'), | ||
| 4597 | (0xFC4A, 'M', u'مي'), | ||
| 4598 | (0xFC4B, 'M', u'نج'), | ||
| 4599 | (0xFC4C, 'M', u'نح'), | ||
| 4600 | (0xFC4D, 'M', u'نخ'), | ||
| 4601 | (0xFC4E, 'M', u'نم'), | ||
| 4602 | (0xFC4F, 'M', u'نى'), | ||
| 4603 | (0xFC50, 'M', u'ني'), | ||
| 4604 | (0xFC51, 'M', u'هج'), | ||
| 4605 | (0xFC52, 'M', u'هم'), | ||
| 4606 | (0xFC53, 'M', u'هى'), | ||
| 4607 | (0xFC54, 'M', u'هي'), | ||
| 4608 | (0xFC55, 'M', u'يج'), | ||
| 4609 | (0xFC56, 'M', u'يح'), | ||
| 4610 | (0xFC57, 'M', u'يخ'), | ||
| 4611 | (0xFC58, 'M', u'يم'), | ||
| 4612 | (0xFC59, 'M', u'يى'), | ||
| 4613 | (0xFC5A, 'M', u'يي'), | ||
| 4614 | (0xFC5B, 'M', u'ذٰ'), | ||
| 4615 | (0xFC5C, 'M', u'رٰ'), | ||
| 4616 | (0xFC5D, 'M', u'ىٰ'), | ||
| 4617 | (0xFC5E, '3', u' ٌّ'), | ||
| 4618 | (0xFC5F, '3', u' ٍّ'), | ||
| 4619 | (0xFC60, '3', u' َّ'), | ||
| 4620 | (0xFC61, '3', u' ُّ'), | ||
| 4621 | (0xFC62, '3', u' ِّ'), | ||
| 4622 | (0xFC63, '3', u' ّٰ'), | ||
| 4623 | (0xFC64, 'M', u'ئر'), | ||
| 4624 | (0xFC65, 'M', u'ئز'), | ||
| 4625 | (0xFC66, 'M', u'ئم'), | ||
| 4626 | (0xFC67, 'M', u'ئن'), | ||
| 4627 | (0xFC68, 'M', u'ئى'), | ||
| 4628 | (0xFC69, 'M', u'ئي'), | ||
| 4629 | (0xFC6A, 'M', u'بر'), | ||
| 4630 | (0xFC6B, 'M', u'بز'), | ||
| 4631 | (0xFC6C, 'M', u'بم'), | ||
| 4632 | (0xFC6D, 'M', u'بن'), | ||
| 4633 | (0xFC6E, 'M', u'بى'), | ||
| 4634 | (0xFC6F, 'M', u'بي'), | ||
| 4635 | (0xFC70, 'M', u'تر'), | ||
| 4636 | (0xFC71, 'M', u'تز'), | ||
| 4637 | (0xFC72, 'M', u'تم'), | ||
| 4638 | (0xFC73, 'M', u'تن'), | ||
| 4639 | (0xFC74, 'M', u'تى'), | ||
| 4640 | (0xFC75, 'M', u'تي'), | ||
| 4641 | (0xFC76, 'M', u'ثر'), | ||
| 4642 | (0xFC77, 'M', u'ثز'), | ||
| 4643 | (0xFC78, 'M', u'ثم'), | ||
| 4644 | (0xFC79, 'M', u'ثن'), | ||
| 4645 | (0xFC7A, 'M', u'ثى'), | ||
| 4646 | (0xFC7B, 'M', u'ثي'), | ||
| 4647 | (0xFC7C, 'M', u'فى'), | ||
| 4648 | (0xFC7D, 'M', u'في'), | ||
| 4649 | (0xFC7E, 'M', u'قى'), | ||
| 4650 | (0xFC7F, 'M', u'قي'), | ||
| 4651 | (0xFC80, 'M', u'كا'), | ||
| 4652 | (0xFC81, 'M', u'كل'), | ||
| 4653 | (0xFC82, 'M', u'كم'), | ||
| 4654 | (0xFC83, 'M', u'كى'), | ||
| 4655 | (0xFC84, 'M', u'كي'), | ||
| 4656 | (0xFC85, 'M', u'لم'), | ||
| 4657 | (0xFC86, 'M', u'لى'), | ||
| 4658 | (0xFC87, 'M', u'لي'), | ||
| 4659 | (0xFC88, 'M', u'ما'), | ||
| 4660 | (0xFC89, 'M', u'مم'), | ||
| 4661 | (0xFC8A, 'M', u'نر'), | ||
| 4662 | (0xFC8B, 'M', u'نز'), | ||
| 4663 | (0xFC8C, 'M', u'نم'), | ||
| 4664 | (0xFC8D, 'M', u'نن'), | ||
| 4665 | (0xFC8E, 'M', u'نى'), | ||
| 4666 | (0xFC8F, 'M', u'ني'), | ||
| 4667 | (0xFC90, 'M', u'ىٰ'), | ||
| 4668 | (0xFC91, 'M', u'ير'), | ||
| 4669 | (0xFC92, 'M', u'يز'), | ||
| 4670 | (0xFC93, 'M', u'يم'), | ||
| 4671 | (0xFC94, 'M', u'ين'), | ||
| 4672 | (0xFC95, 'M', u'يى'), | ||
| 4673 | (0xFC96, 'M', u'يي'), | ||
| 4674 | (0xFC97, 'M', u'ئج'), | ||
| 4675 | (0xFC98, 'M', u'ئح'), | ||
| 4676 | (0xFC99, 'M', u'ئخ'), | ||
| 4677 | (0xFC9A, 'M', u'ئم'), | ||
| 4678 | (0xFC9B, 'M', u'ئه'), | ||
| 4679 | (0xFC9C, 'M', u'بج'), | ||
| 4680 | (0xFC9D, 'M', u'بح'), | ||
| 4681 | (0xFC9E, 'M', u'بخ'), | ||
| 4682 | (0xFC9F, 'M', u'بم'), | ||
| 4683 | (0xFCA0, 'M', u'به'), | ||
| 4684 | (0xFCA1, 'M', u'تج'), | ||
| 4685 | (0xFCA2, 'M', u'تح'), | ||
| 4686 | ] | ||
| 4687 | |||
| 4688 | def _seg_45(): | ||
| 4689 | return [ | ||
| 4690 | (0xFCA3, 'M', u'تخ'), | ||
| 4691 | (0xFCA4, 'M', u'تم'), | ||
| 4692 | (0xFCA5, 'M', u'ته'), | ||
| 4693 | (0xFCA6, 'M', u'ثم'), | ||
| 4694 | (0xFCA7, 'M', u'جح'), | ||
| 4695 | (0xFCA8, 'M', u'جم'), | ||
| 4696 | (0xFCA9, 'M', u'حج'), | ||
| 4697 | (0xFCAA, 'M', u'حم'), | ||
| 4698 | (0xFCAB, 'M', u'خج'), | ||
| 4699 | (0xFCAC, 'M', u'خم'), | ||
| 4700 | (0xFCAD, 'M', u'سج'), | ||
| 4701 | (0xFCAE, 'M', u'سح'), | ||
| 4702 | (0xFCAF, 'M', u'سخ'), | ||
| 4703 | (0xFCB0, 'M', u'سم'), | ||
| 4704 | (0xFCB1, 'M', u'صح'), | ||
| 4705 | (0xFCB2, 'M', u'صخ'), | ||
| 4706 | (0xFCB3, 'M', u'صم'), | ||
| 4707 | (0xFCB4, 'M', u'ضج'), | ||
| 4708 | (0xFCB5, 'M', u'ضح'), | ||
| 4709 | (0xFCB6, 'M', u'ضخ'), | ||
| 4710 | (0xFCB7, 'M', u'ضم'), | ||
| 4711 | (0xFCB8, 'M', u'طح'), | ||
| 4712 | (0xFCB9, 'M', u'ظم'), | ||
| 4713 | (0xFCBA, 'M', u'عج'), | ||
| 4714 | (0xFCBB, 'M', u'عم'), | ||
| 4715 | (0xFCBC, 'M', u'غج'), | ||
| 4716 | (0xFCBD, 'M', u'غم'), | ||
| 4717 | (0xFCBE, 'M', u'فج'), | ||
| 4718 | (0xFCBF, 'M', u'فح'), | ||
| 4719 | (0xFCC0, 'M', u'فخ'), | ||
| 4720 | (0xFCC1, 'M', u'فم'), | ||
| 4721 | (0xFCC2, 'M', u'قح'), | ||
| 4722 | (0xFCC3, 'M', u'قم'), | ||
| 4723 | (0xFCC4, 'M', u'كج'), | ||
| 4724 | (0xFCC5, 'M', u'كح'), | ||
| 4725 | (0xFCC6, 'M', u'كخ'), | ||
| 4726 | (0xFCC7, 'M', u'كل'), | ||
| 4727 | (0xFCC8, 'M', u'كم'), | ||
| 4728 | (0xFCC9, 'M', u'لج'), | ||
| 4729 | (0xFCCA, 'M', u'لح'), | ||
| 4730 | (0xFCCB, 'M', u'لخ'), | ||
| 4731 | (0xFCCC, 'M', u'لم'), | ||
| 4732 | (0xFCCD, 'M', u'له'), | ||
| 4733 | (0xFCCE, 'M', u'مج'), | ||
| 4734 | (0xFCCF, 'M', u'مح'), | ||
| 4735 | (0xFCD0, 'M', u'مخ'), | ||
| 4736 | (0xFCD1, 'M', u'مم'), | ||
| 4737 | (0xFCD2, 'M', u'نج'), | ||
| 4738 | (0xFCD3, 'M', u'نح'), | ||
| 4739 | (0xFCD4, 'M', u'نخ'), | ||
| 4740 | (0xFCD5, 'M', u'نم'), | ||
| 4741 | (0xFCD6, 'M', u'نه'), | ||
| 4742 | (0xFCD7, 'M', u'هج'), | ||
| 4743 | (0xFCD8, 'M', u'هم'), | ||
| 4744 | (0xFCD9, 'M', u'هٰ'), | ||
| 4745 | (0xFCDA, 'M', u'يج'), | ||
| 4746 | (0xFCDB, 'M', u'يح'), | ||
| 4747 | (0xFCDC, 'M', u'يخ'), | ||
| 4748 | (0xFCDD, 'M', u'يم'), | ||
| 4749 | (0xFCDE, 'M', u'يه'), | ||
| 4750 | (0xFCDF, 'M', u'ئم'), | ||
| 4751 | (0xFCE0, 'M', u'ئه'), | ||
| 4752 | (0xFCE1, 'M', u'بم'), | ||
| 4753 | (0xFCE2, 'M', u'به'), | ||
| 4754 | (0xFCE3, 'M', u'تم'), | ||
| 4755 | (0xFCE4, 'M', u'ته'), | ||
| 4756 | (0xFCE5, 'M', u'ثم'), | ||
| 4757 | (0xFCE6, 'M', u'ثه'), | ||
| 4758 | (0xFCE7, 'M', u'سم'), | ||
| 4759 | (0xFCE8, 'M', u'سه'), | ||
| 4760 | (0xFCE9, 'M', u'شم'), | ||
| 4761 | (0xFCEA, 'M', u'شه'), | ||
| 4762 | (0xFCEB, 'M', u'كل'), | ||
| 4763 | (0xFCEC, 'M', u'كم'), | ||
| 4764 | (0xFCED, 'M', u'لم'), | ||
| 4765 | (0xFCEE, 'M', u'نم'), | ||
| 4766 | (0xFCEF, 'M', u'نه'), | ||
| 4767 | (0xFCF0, 'M', u'يم'), | ||
| 4768 | (0xFCF1, 'M', u'يه'), | ||
| 4769 | (0xFCF2, 'M', u'ـَّ'), | ||
| 4770 | (0xFCF3, 'M', u'ـُّ'), | ||
| 4771 | (0xFCF4, 'M', u'ـِّ'), | ||
| 4772 | (0xFCF5, 'M', u'طى'), | ||
| 4773 | (0xFCF6, 'M', u'طي'), | ||
| 4774 | (0xFCF7, 'M', u'عى'), | ||
| 4775 | (0xFCF8, 'M', u'عي'), | ||
| 4776 | (0xFCF9, 'M', u'غى'), | ||
| 4777 | (0xFCFA, 'M', u'غي'), | ||
| 4778 | (0xFCFB, 'M', u'سى'), | ||
| 4779 | (0xFCFC, 'M', u'سي'), | ||
| 4780 | (0xFCFD, 'M', u'شى'), | ||
| 4781 | (0xFCFE, 'M', u'شي'), | ||
| 4782 | (0xFCFF, 'M', u'حى'), | ||
| 4783 | (0xFD00, 'M', u'حي'), | ||
| 4784 | (0xFD01, 'M', u'جى'), | ||
| 4785 | (0xFD02, 'M', u'جي'), | ||
| 4786 | (0xFD03, 'M', u'خى'), | ||
| 4787 | (0xFD04, 'M', u'خي'), | ||
| 4788 | (0xFD05, 'M', u'صى'), | ||
| 4789 | (0xFD06, 'M', u'صي'), | ||
| 4790 | ] | ||
| 4791 | |||
| 4792 | def _seg_46(): | ||
| 4793 | return [ | ||
| 4794 | (0xFD07, 'M', u'ضى'), | ||
| 4795 | (0xFD08, 'M', u'ضي'), | ||
| 4796 | (0xFD09, 'M', u'شج'), | ||
| 4797 | (0xFD0A, 'M', u'شح'), | ||
| 4798 | (0xFD0B, 'M', u'شخ'), | ||
| 4799 | (0xFD0C, 'M', u'شم'), | ||
| 4800 | (0xFD0D, 'M', u'شر'), | ||
| 4801 | (0xFD0E, 'M', u'سر'), | ||
| 4802 | (0xFD0F, 'M', u'صر'), | ||
| 4803 | (0xFD10, 'M', u'ضر'), | ||
| 4804 | (0xFD11, 'M', u'طى'), | ||
| 4805 | (0xFD12, 'M', u'طي'), | ||
| 4806 | (0xFD13, 'M', u'عى'), | ||
| 4807 | (0xFD14, 'M', u'عي'), | ||
| 4808 | (0xFD15, 'M', u'غى'), | ||
| 4809 | (0xFD16, 'M', u'غي'), | ||
| 4810 | (0xFD17, 'M', u'سى'), | ||
| 4811 | (0xFD18, 'M', u'سي'), | ||
| 4812 | (0xFD19, 'M', u'شى'), | ||
| 4813 | (0xFD1A, 'M', u'شي'), | ||
| 4814 | (0xFD1B, 'M', u'حى'), | ||
| 4815 | (0xFD1C, 'M', u'حي'), | ||
| 4816 | (0xFD1D, 'M', u'جى'), | ||
| 4817 | (0xFD1E, 'M', u'جي'), | ||
| 4818 | (0xFD1F, 'M', u'خى'), | ||
| 4819 | (0xFD20, 'M', u'خي'), | ||
| 4820 | (0xFD21, 'M', u'صى'), | ||
| 4821 | (0xFD22, 'M', u'صي'), | ||
| 4822 | (0xFD23, 'M', u'ضى'), | ||
| 4823 | (0xFD24, 'M', u'ضي'), | ||
| 4824 | (0xFD25, 'M', u'شج'), | ||
| 4825 | (0xFD26, 'M', u'شح'), | ||
| 4826 | (0xFD27, 'M', u'شخ'), | ||
| 4827 | (0xFD28, 'M', u'شم'), | ||
| 4828 | (0xFD29, 'M', u'شر'), | ||
| 4829 | (0xFD2A, 'M', u'سر'), | ||
| 4830 | (0xFD2B, 'M', u'صر'), | ||
| 4831 | (0xFD2C, 'M', u'ضر'), | ||
| 4832 | (0xFD2D, 'M', u'شج'), | ||
| 4833 | (0xFD2E, 'M', u'شح'), | ||
| 4834 | (0xFD2F, 'M', u'شخ'), | ||
| 4835 | (0xFD30, 'M', u'شم'), | ||
| 4836 | (0xFD31, 'M', u'سه'), | ||
| 4837 | (0xFD32, 'M', u'شه'), | ||
| 4838 | (0xFD33, 'M', u'طم'), | ||
| 4839 | (0xFD34, 'M', u'سج'), | ||
| 4840 | (0xFD35, 'M', u'سح'), | ||
| 4841 | (0xFD36, 'M', u'سخ'), | ||
| 4842 | (0xFD37, 'M', u'شج'), | ||
| 4843 | (0xFD38, 'M', u'شح'), | ||
| 4844 | (0xFD39, 'M', u'شخ'), | ||
| 4845 | (0xFD3A, 'M', u'طم'), | ||
| 4846 | (0xFD3B, 'M', u'ظم'), | ||
| 4847 | (0xFD3C, 'M', u'اً'), | ||
| 4848 | (0xFD3E, 'V'), | ||
| 4849 | (0xFD40, 'X'), | ||
| 4850 | (0xFD50, 'M', u'تجم'), | ||
| 4851 | (0xFD51, 'M', u'تحج'), | ||
| 4852 | (0xFD53, 'M', u'تحم'), | ||
| 4853 | (0xFD54, 'M', u'تخم'), | ||
| 4854 | (0xFD55, 'M', u'تمج'), | ||
| 4855 | (0xFD56, 'M', u'تمح'), | ||
| 4856 | (0xFD57, 'M', u'تمخ'), | ||
| 4857 | (0xFD58, 'M', u'جمح'), | ||
| 4858 | (0xFD5A, 'M', u'حمي'), | ||
| 4859 | (0xFD5B, 'M', u'حمى'), | ||
| 4860 | (0xFD5C, 'M', u'سحج'), | ||
| 4861 | (0xFD5D, 'M', u'سجح'), | ||
| 4862 | (0xFD5E, 'M', u'سجى'), | ||
| 4863 | (0xFD5F, 'M', u'سمح'), | ||
| 4864 | (0xFD61, 'M', u'سمج'), | ||
| 4865 | (0xFD62, 'M', u'سمم'), | ||
| 4866 | (0xFD64, 'M', u'صحح'), | ||
| 4867 | (0xFD66, 'M', u'صمم'), | ||
| 4868 | (0xFD67, 'M', u'شحم'), | ||
| 4869 | (0xFD69, 'M', u'شجي'), | ||
| 4870 | (0xFD6A, 'M', u'شمخ'), | ||
| 4871 | (0xFD6C, 'M', u'شمم'), | ||
| 4872 | (0xFD6E, 'M', u'ضحى'), | ||
| 4873 | (0xFD6F, 'M', u'ضخم'), | ||
| 4874 | (0xFD71, 'M', u'طمح'), | ||
| 4875 | (0xFD73, 'M', u'طمم'), | ||
| 4876 | (0xFD74, 'M', u'طمي'), | ||
| 4877 | (0xFD75, 'M', u'عجم'), | ||
| 4878 | (0xFD76, 'M', u'عمم'), | ||
| 4879 | (0xFD78, 'M', u'عمى'), | ||
| 4880 | (0xFD79, 'M', u'غمم'), | ||
| 4881 | (0xFD7A, 'M', u'غمي'), | ||
| 4882 | (0xFD7B, 'M', u'غمى'), | ||
| 4883 | (0xFD7C, 'M', u'فخم'), | ||
| 4884 | (0xFD7E, 'M', u'قمح'), | ||
| 4885 | (0xFD7F, 'M', u'قمم'), | ||
| 4886 | (0xFD80, 'M', u'لحم'), | ||
| 4887 | (0xFD81, 'M', u'لحي'), | ||
| 4888 | (0xFD82, 'M', u'لحى'), | ||
| 4889 | (0xFD83, 'M', u'لجج'), | ||
| 4890 | (0xFD85, 'M', u'لخم'), | ||
| 4891 | (0xFD87, 'M', u'لمح'), | ||
| 4892 | (0xFD89, 'M', u'محج'), | ||
| 4893 | (0xFD8A, 'M', u'محم'), | ||
| 4894 | ] | ||
| 4895 | |||
| 4896 | def _seg_47(): | ||
| 4897 | return [ | ||
| 4898 | (0xFD8B, 'M', u'محي'), | ||
| 4899 | (0xFD8C, 'M', u'مجح'), | ||
| 4900 | (0xFD8D, 'M', u'مجم'), | ||
| 4901 | (0xFD8E, 'M', u'مخج'), | ||
| 4902 | (0xFD8F, 'M', u'مخم'), | ||
| 4903 | (0xFD90, 'X'), | ||
| 4904 | (0xFD92, 'M', u'مجخ'), | ||
| 4905 | (0xFD93, 'M', u'همج'), | ||
| 4906 | (0xFD94, 'M', u'همم'), | ||
| 4907 | (0xFD95, 'M', u'نحم'), | ||
| 4908 | (0xFD96, 'M', u'نحى'), | ||
| 4909 | (0xFD97, 'M', u'نجم'), | ||
| 4910 | (0xFD99, 'M', u'نجى'), | ||
| 4911 | (0xFD9A, 'M', u'نمي'), | ||
| 4912 | (0xFD9B, 'M', u'نمى'), | ||
| 4913 | (0xFD9C, 'M', u'يمم'), | ||
| 4914 | (0xFD9E, 'M', u'بخي'), | ||
| 4915 | (0xFD9F, 'M', u'تجي'), | ||
| 4916 | (0xFDA0, 'M', u'تجى'), | ||
| 4917 | (0xFDA1, 'M', u'تخي'), | ||
| 4918 | (0xFDA2, 'M', u'تخى'), | ||
| 4919 | (0xFDA3, 'M', u'تمي'), | ||
| 4920 | (0xFDA4, 'M', u'تمى'), | ||
| 4921 | (0xFDA5, 'M', u'جمي'), | ||
| 4922 | (0xFDA6, 'M', u'جحى'), | ||
| 4923 | (0xFDA7, 'M', u'جمى'), | ||
| 4924 | (0xFDA8, 'M', u'سخى'), | ||
| 4925 | (0xFDA9, 'M', u'صحي'), | ||
| 4926 | (0xFDAA, 'M', u'شحي'), | ||
| 4927 | (0xFDAB, 'M', u'ضحي'), | ||
| 4928 | (0xFDAC, 'M', u'لجي'), | ||
| 4929 | (0xFDAD, 'M', u'لمي'), | ||
| 4930 | (0xFDAE, 'M', u'يحي'), | ||
| 4931 | (0xFDAF, 'M', u'يجي'), | ||
| 4932 | (0xFDB0, 'M', u'يمي'), | ||
| 4933 | (0xFDB1, 'M', u'ممي'), | ||
| 4934 | (0xFDB2, 'M', u'قمي'), | ||
| 4935 | (0xFDB3, 'M', u'نحي'), | ||
| 4936 | (0xFDB4, 'M', u'قمح'), | ||
| 4937 | (0xFDB5, 'M', u'لحم'), | ||
| 4938 | (0xFDB6, 'M', u'عمي'), | ||
| 4939 | (0xFDB7, 'M', u'كمي'), | ||
| 4940 | (0xFDB8, 'M', u'نجح'), | ||
| 4941 | (0xFDB9, 'M', u'مخي'), | ||
| 4942 | (0xFDBA, 'M', u'لجم'), | ||
| 4943 | (0xFDBB, 'M', u'كمم'), | ||
| 4944 | (0xFDBC, 'M', u'لجم'), | ||
| 4945 | (0xFDBD, 'M', u'نجح'), | ||
| 4946 | (0xFDBE, 'M', u'جحي'), | ||
| 4947 | (0xFDBF, 'M', u'حجي'), | ||
| 4948 | (0xFDC0, 'M', u'مجي'), | ||
| 4949 | (0xFDC1, 'M', u'فمي'), | ||
| 4950 | (0xFDC2, 'M', u'بحي'), | ||
| 4951 | (0xFDC3, 'M', u'كمم'), | ||
| 4952 | (0xFDC4, 'M', u'عجم'), | ||
| 4953 | (0xFDC5, 'M', u'صمم'), | ||
| 4954 | (0xFDC6, 'M', u'سخي'), | ||
| 4955 | (0xFDC7, 'M', u'نجي'), | ||
| 4956 | (0xFDC8, 'X'), | ||
| 4957 | (0xFDF0, 'M', u'صلے'), | ||
| 4958 | (0xFDF1, 'M', u'قلے'), | ||
| 4959 | (0xFDF2, 'M', u'الله'), | ||
| 4960 | (0xFDF3, 'M', u'اكبر'), | ||
| 4961 | (0xFDF4, 'M', u'محمد'), | ||
| 4962 | (0xFDF5, 'M', u'صلعم'), | ||
| 4963 | (0xFDF6, 'M', u'رسول'), | ||
| 4964 | (0xFDF7, 'M', u'عليه'), | ||
| 4965 | (0xFDF8, 'M', u'وسلم'), | ||
| 4966 | (0xFDF9, 'M', u'صلى'), | ||
| 4967 | (0xFDFA, '3', u'صلى الله عليه وسلم'), | ||
| 4968 | (0xFDFB, '3', u'جل جلاله'), | ||
| 4969 | (0xFDFC, 'M', u'ریال'), | ||
| 4970 | (0xFDFD, 'V'), | ||
| 4971 | (0xFDFE, 'X'), | ||
| 4972 | (0xFE00, 'I'), | ||
| 4973 | (0xFE10, '3', u','), | ||
| 4974 | (0xFE11, 'M', u'、'), | ||
| 4975 | (0xFE12, 'X'), | ||
| 4976 | (0xFE13, '3', u':'), | ||
| 4977 | (0xFE14, '3', u';'), | ||
| 4978 | (0xFE15, '3', u'!'), | ||
| 4979 | (0xFE16, '3', u'?'), | ||
| 4980 | (0xFE17, 'M', u'〖'), | ||
| 4981 | (0xFE18, 'M', u'〗'), | ||
| 4982 | (0xFE19, 'X'), | ||
| 4983 | (0xFE20, 'V'), | ||
| 4984 | (0xFE27, 'X'), | ||
| 4985 | (0xFE31, 'M', u'—'), | ||
| 4986 | (0xFE32, 'M', u'–'), | ||
| 4987 | (0xFE33, '3', u'_'), | ||
| 4988 | (0xFE35, '3', u'('), | ||
| 4989 | (0xFE36, '3', u')'), | ||
| 4990 | (0xFE37, '3', u'{'), | ||
| 4991 | (0xFE38, '3', u'}'), | ||
| 4992 | (0xFE39, 'M', u'〔'), | ||
| 4993 | (0xFE3A, 'M', u'〕'), | ||
| 4994 | (0xFE3B, 'M', u'【'), | ||
| 4995 | (0xFE3C, 'M', u'】'), | ||
| 4996 | (0xFE3D, 'M', u'《'), | ||
| 4997 | (0xFE3E, 'M', u'》'), | ||
| 4998 | ] | ||
| 4999 | |||
| 5000 | def _seg_48(): | ||
| 5001 | return [ | ||
| 5002 | (0xFE3F, 'M', u'〈'), | ||
| 5003 | (0xFE40, 'M', u'〉'), | ||
| 5004 | (0xFE41, 'M', u'「'), | ||
| 5005 | (0xFE42, 'M', u'」'), | ||
| 5006 | (0xFE43, 'M', u'『'), | ||
| 5007 | (0xFE44, 'M', u'』'), | ||
| 5008 | (0xFE45, 'V'), | ||
| 5009 | (0xFE47, '3', u'['), | ||
| 5010 | (0xFE48, '3', u']'), | ||
| 5011 | (0xFE49, '3', u' ̅'), | ||
| 5012 | (0xFE4D, '3', u'_'), | ||
| 5013 | (0xFE50, '3', u','), | ||
| 5014 | (0xFE51, 'M', u'、'), | ||
| 5015 | (0xFE52, 'X'), | ||
| 5016 | (0xFE54, '3', u';'), | ||
| 5017 | (0xFE55, '3', u':'), | ||
| 5018 | (0xFE56, '3', u'?'), | ||
| 5019 | (0xFE57, '3', u'!'), | ||
| 5020 | (0xFE58, 'M', u'—'), | ||
| 5021 | (0xFE59, '3', u'('), | ||
| 5022 | (0xFE5A, '3', u')'), | ||
| 5023 | (0xFE5B, '3', u'{'), | ||
| 5024 | (0xFE5C, '3', u'}'), | ||
| 5025 | (0xFE5D, 'M', u'〔'), | ||
| 5026 | (0xFE5E, 'M', u'〕'), | ||
| 5027 | (0xFE5F, '3', u'#'), | ||
| 5028 | (0xFE60, '3', u'&'), | ||
| 5029 | (0xFE61, '3', u'*'), | ||
| 5030 | (0xFE62, '3', u'+'), | ||
| 5031 | (0xFE63, 'M', u'-'), | ||
| 5032 | (0xFE64, '3', u'<'), | ||
| 5033 | (0xFE65, '3', u'>'), | ||
| 5034 | (0xFE66, '3', u'='), | ||
| 5035 | (0xFE67, 'X'), | ||
| 5036 | (0xFE68, '3', u'\\'), | ||
| 5037 | (0xFE69, '3', u'$'), | ||
| 5038 | (0xFE6A, '3', u'%'), | ||
| 5039 | (0xFE6B, '3', u'@'), | ||
| 5040 | (0xFE6C, 'X'), | ||
| 5041 | (0xFE70, '3', u' ً'), | ||
| 5042 | (0xFE71, 'M', u'ـً'), | ||
| 5043 | (0xFE72, '3', u' ٌ'), | ||
| 5044 | (0xFE73, 'V'), | ||
| 5045 | (0xFE74, '3', u' ٍ'), | ||
| 5046 | (0xFE75, 'X'), | ||
| 5047 | (0xFE76, '3', u' َ'), | ||
| 5048 | (0xFE77, 'M', u'ـَ'), | ||
| 5049 | (0xFE78, '3', u' ُ'), | ||
| 5050 | (0xFE79, 'M', u'ـُ'), | ||
| 5051 | (0xFE7A, '3', u' ِ'), | ||
| 5052 | (0xFE7B, 'M', u'ـِ'), | ||
| 5053 | (0xFE7C, '3', u' ّ'), | ||
| 5054 | (0xFE7D, 'M', u'ـّ'), | ||
| 5055 | (0xFE7E, '3', u' ْ'), | ||
| 5056 | (0xFE7F, 'M', u'ـْ'), | ||
| 5057 | (0xFE80, 'M', u'ء'), | ||
| 5058 | (0xFE81, 'M', u'آ'), | ||
| 5059 | (0xFE83, 'M', u'أ'), | ||
| 5060 | (0xFE85, 'M', u'ؤ'), | ||
| 5061 | (0xFE87, 'M', u'إ'), | ||
| 5062 | (0xFE89, 'M', u'ئ'), | ||
| 5063 | (0xFE8D, 'M', u'ا'), | ||
| 5064 | (0xFE8F, 'M', u'ب'), | ||
| 5065 | (0xFE93, 'M', u'ة'), | ||
| 5066 | (0xFE95, 'M', u'ت'), | ||
| 5067 | (0xFE99, 'M', u'ث'), | ||
| 5068 | (0xFE9D, 'M', u'ج'), | ||
| 5069 | (0xFEA1, 'M', u'ح'), | ||
| 5070 | (0xFEA5, 'M', u'خ'), | ||
| 5071 | (0xFEA9, 'M', u'د'), | ||
| 5072 | (0xFEAB, 'M', u'ذ'), | ||
| 5073 | (0xFEAD, 'M', u'ر'), | ||
| 5074 | (0xFEAF, 'M', u'ز'), | ||
| 5075 | (0xFEB1, 'M', u'س'), | ||
| 5076 | (0xFEB5, 'M', u'ش'), | ||
| 5077 | (0xFEB9, 'M', u'ص'), | ||
| 5078 | (0xFEBD, 'M', u'ض'), | ||
| 5079 | (0xFEC1, 'M', u'ط'), | ||
| 5080 | (0xFEC5, 'M', u'ظ'), | ||
| 5081 | (0xFEC9, 'M', u'ع'), | ||
| 5082 | (0xFECD, 'M', u'غ'), | ||
| 5083 | (0xFED1, 'M', u'ف'), | ||
| 5084 | (0xFED5, 'M', u'ق'), | ||
| 5085 | (0xFED9, 'M', u'ك'), | ||
| 5086 | (0xFEDD, 'M', u'ل'), | ||
| 5087 | (0xFEE1, 'M', u'م'), | ||
| 5088 | (0xFEE5, 'M', u'ن'), | ||
| 5089 | (0xFEE9, 'M', u'ه'), | ||
| 5090 | (0xFEED, 'M', u'و'), | ||
| 5091 | (0xFEEF, 'M', u'ى'), | ||
| 5092 | (0xFEF1, 'M', u'ي'), | ||
| 5093 | (0xFEF5, 'M', u'لآ'), | ||
| 5094 | (0xFEF7, 'M', u'لأ'), | ||
| 5095 | (0xFEF9, 'M', u'لإ'), | ||
| 5096 | (0xFEFB, 'M', u'لا'), | ||
| 5097 | (0xFEFD, 'X'), | ||
| 5098 | (0xFEFF, 'I'), | ||
| 5099 | (0xFF00, 'X'), | ||
| 5100 | (0xFF01, '3', u'!'), | ||
| 5101 | (0xFF02, '3', u'"'), | ||
| 5102 | ] | ||
| 5103 | |||
| 5104 | def _seg_49(): | ||
| 5105 | return [ | ||
| 5106 | (0xFF03, '3', u'#'), | ||
| 5107 | (0xFF04, '3', u'$'), | ||
| 5108 | (0xFF05, '3', u'%'), | ||
| 5109 | (0xFF06, '3', u'&'), | ||
| 5110 | (0xFF07, '3', u'\''), | ||
| 5111 | (0xFF08, '3', u'('), | ||
| 5112 | (0xFF09, '3', u')'), | ||
| 5113 | (0xFF0A, '3', u'*'), | ||
| 5114 | (0xFF0B, '3', u'+'), | ||
| 5115 | (0xFF0C, '3', u','), | ||
| 5116 | (0xFF0D, 'M', u'-'), | ||
| 5117 | (0xFF0E, 'M', u'.'), | ||
| 5118 | (0xFF0F, '3', u'/'), | ||
| 5119 | (0xFF10, 'M', u'0'), | ||
| 5120 | (0xFF11, 'M', u'1'), | ||
| 5121 | (0xFF12, 'M', u'2'), | ||
| 5122 | (0xFF13, 'M', u'3'), | ||
| 5123 | (0xFF14, 'M', u'4'), | ||
| 5124 | (0xFF15, 'M', u'5'), | ||
| 5125 | (0xFF16, 'M', u'6'), | ||
| 5126 | (0xFF17, 'M', u'7'), | ||
| 5127 | (0xFF18, 'M', u'8'), | ||
| 5128 | (0xFF19, 'M', u'9'), | ||
| 5129 | (0xFF1A, '3', u':'), | ||
| 5130 | (0xFF1B, '3', u';'), | ||
| 5131 | (0xFF1C, '3', u'<'), | ||
| 5132 | (0xFF1D, '3', u'='), | ||
| 5133 | (0xFF1E, '3', u'>'), | ||
| 5134 | (0xFF1F, '3', u'?'), | ||
| 5135 | (0xFF20, '3', u'@'), | ||
| 5136 | (0xFF21, 'M', u'a'), | ||
| 5137 | (0xFF22, 'M', u'b'), | ||
| 5138 | (0xFF23, 'M', u'c'), | ||
| 5139 | (0xFF24, 'M', u'd'), | ||
| 5140 | (0xFF25, 'M', u'e'), | ||
| 5141 | (0xFF26, 'M', u'f'), | ||
| 5142 | (0xFF27, 'M', u'g'), | ||
| 5143 | (0xFF28, 'M', u'h'), | ||
| 5144 | (0xFF29, 'M', u'i'), | ||
| 5145 | (0xFF2A, 'M', u'j'), | ||
| 5146 | (0xFF2B, 'M', u'k'), | ||
| 5147 | (0xFF2C, 'M', u'l'), | ||
| 5148 | (0xFF2D, 'M', u'm'), | ||
| 5149 | (0xFF2E, 'M', u'n'), | ||
| 5150 | (0xFF2F, 'M', u'o'), | ||
| 5151 | (0xFF30, 'M', u'p'), | ||
| 5152 | (0xFF31, 'M', u'q'), | ||
| 5153 | (0xFF32, 'M', u'r'), | ||
| 5154 | (0xFF33, 'M', u's'), | ||
| 5155 | (0xFF34, 'M', u't'), | ||
| 5156 | (0xFF35, 'M', u'u'), | ||
| 5157 | (0xFF36, 'M', u'v'), | ||
| 5158 | (0xFF37, 'M', u'w'), | ||
| 5159 | (0xFF38, 'M', u'x'), | ||
| 5160 | (0xFF39, 'M', u'y'), | ||
| 5161 | (0xFF3A, 'M', u'z'), | ||
| 5162 | (0xFF3B, '3', u'['), | ||
| 5163 | (0xFF3C, '3', u'\\'), | ||
| 5164 | (0xFF3D, '3', u']'), | ||
| 5165 | (0xFF3E, '3', u'^'), | ||
| 5166 | (0xFF3F, '3', u'_'), | ||
| 5167 | (0xFF40, '3', u'`'), | ||
| 5168 | (0xFF41, 'M', u'a'), | ||
| 5169 | (0xFF42, 'M', u'b'), | ||
| 5170 | (0xFF43, 'M', u'c'), | ||
| 5171 | (0xFF44, 'M', u'd'), | ||
| 5172 | (0xFF45, 'M', u'e'), | ||
| 5173 | (0xFF46, 'M', u'f'), | ||
| 5174 | (0xFF47, 'M', u'g'), | ||
| 5175 | (0xFF48, 'M', u'h'), | ||
| 5176 | (0xFF49, 'M', u'i'), | ||
| 5177 | (0xFF4A, 'M', u'j'), | ||
| 5178 | (0xFF4B, 'M', u'k'), | ||
| 5179 | (0xFF4C, 'M', u'l'), | ||
| 5180 | (0xFF4D, 'M', u'm'), | ||
| 5181 | (0xFF4E, 'M', u'n'), | ||
| 5182 | (0xFF4F, 'M', u'o'), | ||
| 5183 | (0xFF50, 'M', u'p'), | ||
| 5184 | (0xFF51, 'M', u'q'), | ||
| 5185 | (0xFF52, 'M', u'r'), | ||
| 5186 | (0xFF53, 'M', u's'), | ||
| 5187 | (0xFF54, 'M', u't'), | ||
| 5188 | (0xFF55, 'M', u'u'), | ||
| 5189 | (0xFF56, 'M', u'v'), | ||
| 5190 | (0xFF57, 'M', u'w'), | ||
| 5191 | (0xFF58, 'M', u'x'), | ||
| 5192 | (0xFF59, 'M', u'y'), | ||
| 5193 | (0xFF5A, 'M', u'z'), | ||
| 5194 | (0xFF5B, '3', u'{'), | ||
| 5195 | (0xFF5C, '3', u'|'), | ||
| 5196 | (0xFF5D, '3', u'}'), | ||
| 5197 | (0xFF5E, '3', u'~'), | ||
| 5198 | (0xFF5F, 'M', u'⦅'), | ||
| 5199 | (0xFF60, 'M', u'⦆'), | ||
| 5200 | (0xFF61, 'M', u'.'), | ||
| 5201 | (0xFF62, 'M', u'「'), | ||
| 5202 | (0xFF63, 'M', u'」'), | ||
| 5203 | (0xFF64, 'M', u'、'), | ||
| 5204 | (0xFF65, 'M', u'・'), | ||
| 5205 | (0xFF66, 'M', u'ヲ'), | ||
| 5206 | ] | ||
| 5207 | |||
| 5208 | def _seg_50(): | ||
| 5209 | return [ | ||
| 5210 | (0xFF67, 'M', u'ァ'), | ||
| 5211 | (0xFF68, 'M', u'ィ'), | ||
| 5212 | (0xFF69, 'M', u'ゥ'), | ||
| 5213 | (0xFF6A, 'M', u'ェ'), | ||
| 5214 | (0xFF6B, 'M', u'ォ'), | ||
| 5215 | (0xFF6C, 'M', u'ャ'), | ||
| 5216 | (0xFF6D, 'M', u'ュ'), | ||
| 5217 | (0xFF6E, 'M', u'ョ'), | ||
| 5218 | (0xFF6F, 'M', u'ッ'), | ||
| 5219 | (0xFF70, 'M', u'ー'), | ||
| 5220 | (0xFF71, 'M', u'ア'), | ||
| 5221 | (0xFF72, 'M', u'イ'), | ||
| 5222 | (0xFF73, 'M', u'ウ'), | ||
| 5223 | (0xFF74, 'M', u'エ'), | ||
| 5224 | (0xFF75, 'M', u'オ'), | ||
| 5225 | (0xFF76, 'M', u'カ'), | ||
| 5226 | (0xFF77, 'M', u'キ'), | ||
| 5227 | (0xFF78, 'M', u'ク'), | ||
| 5228 | (0xFF79, 'M', u'ケ'), | ||
| 5229 | (0xFF7A, 'M', u'コ'), | ||
| 5230 | (0xFF7B, 'M', u'サ'), | ||
| 5231 | (0xFF7C, 'M', u'シ'), | ||
| 5232 | (0xFF7D, 'M', u'ス'), | ||
| 5233 | (0xFF7E, 'M', u'セ'), | ||
| 5234 | (0xFF7F, 'M', u'ソ'), | ||
| 5235 | (0xFF80, 'M', u'タ'), | ||
| 5236 | (0xFF81, 'M', u'チ'), | ||
| 5237 | (0xFF82, 'M', u'ツ'), | ||
| 5238 | (0xFF83, 'M', u'テ'), | ||
| 5239 | (0xFF84, 'M', u'ト'), | ||
| 5240 | (0xFF85, 'M', u'ナ'), | ||
| 5241 | (0xFF86, 'M', u'ニ'), | ||
| 5242 | (0xFF87, 'M', u'ヌ'), | ||
| 5243 | (0xFF88, 'M', u'ネ'), | ||
| 5244 | (0xFF89, 'M', u'ノ'), | ||
| 5245 | (0xFF8A, 'M', u'ハ'), | ||
| 5246 | (0xFF8B, 'M', u'ヒ'), | ||
| 5247 | (0xFF8C, 'M', u'フ'), | ||
| 5248 | (0xFF8D, 'M', u'ヘ'), | ||
| 5249 | (0xFF8E, 'M', u'ホ'), | ||
| 5250 | (0xFF8F, 'M', u'マ'), | ||
| 5251 | (0xFF90, 'M', u'ミ'), | ||
| 5252 | (0xFF91, 'M', u'ム'), | ||
| 5253 | (0xFF92, 'M', u'メ'), | ||
| 5254 | (0xFF93, 'M', u'モ'), | ||
| 5255 | (0xFF94, 'M', u'ヤ'), | ||
| 5256 | (0xFF95, 'M', u'ユ'), | ||
| 5257 | (0xFF96, 'M', u'ヨ'), | ||
| 5258 | (0xFF97, 'M', u'ラ'), | ||
| 5259 | (0xFF98, 'M', u'リ'), | ||
| 5260 | (0xFF99, 'M', u'ル'), | ||
| 5261 | (0xFF9A, 'M', u'レ'), | ||
| 5262 | (0xFF9B, 'M', u'ロ'), | ||
| 5263 | (0xFF9C, 'M', u'ワ'), | ||
| 5264 | (0xFF9D, 'M', u'ン'), | ||
| 5265 | (0xFF9E, 'M', u'゙'), | ||
| 5266 | (0xFF9F, 'M', u'゚'), | ||
| 5267 | (0xFFA0, 'X'), | ||
| 5268 | (0xFFA1, 'M', u'ᄀ'), | ||
| 5269 | (0xFFA2, 'M', u'ᄁ'), | ||
| 5270 | (0xFFA3, 'M', u'ᆪ'), | ||
| 5271 | (0xFFA4, 'M', u'ᄂ'), | ||
| 5272 | (0xFFA5, 'M', u'ᆬ'), | ||
| 5273 | (0xFFA6, 'M', u'ᆭ'), | ||
| 5274 | (0xFFA7, 'M', u'ᄃ'), | ||
| 5275 | (0xFFA8, 'M', u'ᄄ'), | ||
| 5276 | (0xFFA9, 'M', u'ᄅ'), | ||
| 5277 | (0xFFAA, 'M', u'ᆰ'), | ||
| 5278 | (0xFFAB, 'M', u'ᆱ'), | ||
| 5279 | (0xFFAC, 'M', u'ᆲ'), | ||
| 5280 | (0xFFAD, 'M', u'ᆳ'), | ||
| 5281 | (0xFFAE, 'M', u'ᆴ'), | ||
| 5282 | (0xFFAF, 'M', u'ᆵ'), | ||
| 5283 | (0xFFB0, 'M', u'ᄚ'), | ||
| 5284 | (0xFFB1, 'M', u'ᄆ'), | ||
| 5285 | (0xFFB2, 'M', u'ᄇ'), | ||
| 5286 | (0xFFB3, 'M', u'ᄈ'), | ||
| 5287 | (0xFFB4, 'M', u'ᄡ'), | ||
| 5288 | (0xFFB5, 'M', u'ᄉ'), | ||
| 5289 | (0xFFB6, 'M', u'ᄊ'), | ||
| 5290 | (0xFFB7, 'M', u'ᄋ'), | ||
| 5291 | (0xFFB8, 'M', u'ᄌ'), | ||
| 5292 | (0xFFB9, 'M', u'ᄍ'), | ||
| 5293 | (0xFFBA, 'M', u'ᄎ'), | ||
| 5294 | (0xFFBB, 'M', u'ᄏ'), | ||
| 5295 | (0xFFBC, 'M', u'ᄐ'), | ||
| 5296 | (0xFFBD, 'M', u'ᄑ'), | ||
| 5297 | (0xFFBE, 'M', u'ᄒ'), | ||
| 5298 | (0xFFBF, 'X'), | ||
| 5299 | (0xFFC2, 'M', u'ᅡ'), | ||
| 5300 | (0xFFC3, 'M', u'ᅢ'), | ||
| 5301 | (0xFFC4, 'M', u'ᅣ'), | ||
| 5302 | (0xFFC5, 'M', u'ᅤ'), | ||
| 5303 | (0xFFC6, 'M', u'ᅥ'), | ||
| 5304 | (0xFFC7, 'M', u'ᅦ'), | ||
| 5305 | (0xFFC8, 'X'), | ||
| 5306 | (0xFFCA, 'M', u'ᅧ'), | ||
| 5307 | (0xFFCB, 'M', u'ᅨ'), | ||
| 5308 | (0xFFCC, 'M', u'ᅩ'), | ||
| 5309 | (0xFFCD, 'M', u'ᅪ'), | ||
| 5310 | ] | ||
| 5311 | |||
| 5312 | def _seg_51(): | ||
| 5313 | return [ | ||
| 5314 | (0xFFCE, 'M', u'ᅫ'), | ||
| 5315 | (0xFFCF, 'M', u'ᅬ'), | ||
| 5316 | (0xFFD0, 'X'), | ||
| 5317 | (0xFFD2, 'M', u'ᅭ'), | ||
| 5318 | (0xFFD3, 'M', u'ᅮ'), | ||
| 5319 | (0xFFD4, 'M', u'ᅯ'), | ||
| 5320 | (0xFFD5, 'M', u'ᅰ'), | ||
| 5321 | (0xFFD6, 'M', u'ᅱ'), | ||
| 5322 | (0xFFD7, 'M', u'ᅲ'), | ||
| 5323 | (0xFFD8, 'X'), | ||
| 5324 | (0xFFDA, 'M', u'ᅳ'), | ||
| 5325 | (0xFFDB, 'M', u'ᅴ'), | ||
| 5326 | (0xFFDC, 'M', u'ᅵ'), | ||
| 5327 | (0xFFDD, 'X'), | ||
| 5328 | (0xFFE0, 'M', u'¢'), | ||
| 5329 | (0xFFE1, 'M', u'£'), | ||
| 5330 | (0xFFE2, 'M', u'¬'), | ||
| 5331 | (0xFFE3, '3', u' ̄'), | ||
| 5332 | (0xFFE4, 'M', u'¦'), | ||
| 5333 | (0xFFE5, 'M', u'¥'), | ||
| 5334 | (0xFFE6, 'M', u'₩'), | ||
| 5335 | (0xFFE7, 'X'), | ||
| 5336 | (0xFFE8, 'M', u'│'), | ||
| 5337 | (0xFFE9, 'M', u'←'), | ||
| 5338 | (0xFFEA, 'M', u'↑'), | ||
| 5339 | (0xFFEB, 'M', u'→'), | ||
| 5340 | (0xFFEC, 'M', u'↓'), | ||
| 5341 | (0xFFED, 'M', u'■'), | ||
| 5342 | (0xFFEE, 'M', u'○'), | ||
| 5343 | (0xFFEF, 'X'), | ||
| 5344 | (0x10000, 'V'), | ||
| 5345 | (0x1000C, 'X'), | ||
| 5346 | (0x1000D, 'V'), | ||
| 5347 | (0x10027, 'X'), | ||
| 5348 | (0x10028, 'V'), | ||
| 5349 | (0x1003B, 'X'), | ||
| 5350 | (0x1003C, 'V'), | ||
| 5351 | (0x1003E, 'X'), | ||
| 5352 | (0x1003F, 'V'), | ||
| 5353 | (0x1004E, 'X'), | ||
| 5354 | (0x10050, 'V'), | ||
| 5355 | (0x1005E, 'X'), | ||
| 5356 | (0x10080, 'V'), | ||
| 5357 | (0x100FB, 'X'), | ||
| 5358 | (0x10100, 'V'), | ||
| 5359 | (0x10103, 'X'), | ||
| 5360 | (0x10107, 'V'), | ||
| 5361 | (0x10134, 'X'), | ||
| 5362 | (0x10137, 'V'), | ||
| 5363 | (0x1018B, 'X'), | ||
| 5364 | (0x10190, 'V'), | ||
| 5365 | (0x1019C, 'X'), | ||
| 5366 | (0x101D0, 'V'), | ||
| 5367 | (0x101FE, 'X'), | ||
| 5368 | (0x10280, 'V'), | ||
| 5369 | (0x1029D, 'X'), | ||
| 5370 | (0x102A0, 'V'), | ||
| 5371 | (0x102D1, 'X'), | ||
| 5372 | (0x10300, 'V'), | ||
| 5373 | (0x1031F, 'X'), | ||
| 5374 | (0x10320, 'V'), | ||
| 5375 | (0x10324, 'X'), | ||
| 5376 | (0x10330, 'V'), | ||
| 5377 | (0x1034B, 'X'), | ||
| 5378 | (0x10380, 'V'), | ||
| 5379 | (0x1039E, 'X'), | ||
| 5380 | (0x1039F, 'V'), | ||
| 5381 | (0x103C4, 'X'), | ||
| 5382 | (0x103C8, 'V'), | ||
| 5383 | (0x103D6, 'X'), | ||
| 5384 | (0x10400, 'M', u'𐐨'), | ||
| 5385 | (0x10401, 'M', u'𐐩'), | ||
| 5386 | (0x10402, 'M', u'𐐪'), | ||
| 5387 | (0x10403, 'M', u'𐐫'), | ||
| 5388 | (0x10404, 'M', u'𐐬'), | ||
| 5389 | (0x10405, 'M', u'𐐭'), | ||
| 5390 | (0x10406, 'M', u'𐐮'), | ||
| 5391 | (0x10407, 'M', u'𐐯'), | ||
| 5392 | (0x10408, 'M', u'𐐰'), | ||
| 5393 | (0x10409, 'M', u'𐐱'), | ||
| 5394 | (0x1040A, 'M', u'𐐲'), | ||
| 5395 | (0x1040B, 'M', u'𐐳'), | ||
| 5396 | (0x1040C, 'M', u'𐐴'), | ||
| 5397 | (0x1040D, 'M', u'𐐵'), | ||
| 5398 | (0x1040E, 'M', u'𐐶'), | ||
| 5399 | (0x1040F, 'M', u'𐐷'), | ||
| 5400 | (0x10410, 'M', u'𐐸'), | ||
| 5401 | (0x10411, 'M', u'𐐹'), | ||
| 5402 | (0x10412, 'M', u'𐐺'), | ||
| 5403 | (0x10413, 'M', u'𐐻'), | ||
| 5404 | (0x10414, 'M', u'𐐼'), | ||
| 5405 | (0x10415, 'M', u'𐐽'), | ||
| 5406 | (0x10416, 'M', u'𐐾'), | ||
| 5407 | (0x10417, 'M', u'𐐿'), | ||
| 5408 | (0x10418, 'M', u'𐑀'), | ||
| 5409 | (0x10419, 'M', u'𐑁'), | ||
| 5410 | (0x1041A, 'M', u'𐑂'), | ||
| 5411 | (0x1041B, 'M', u'𐑃'), | ||
| 5412 | (0x1041C, 'M', u'𐑄'), | ||
| 5413 | (0x1041D, 'M', u'𐑅'), | ||
| 5414 | ] | ||
| 5415 | |||
| 5416 | def _seg_52(): | ||
| 5417 | return [ | ||
| 5418 | (0x1041E, 'M', u'𐑆'), | ||
| 5419 | (0x1041F, 'M', u'𐑇'), | ||
| 5420 | (0x10420, 'M', u'𐑈'), | ||
| 5421 | (0x10421, 'M', u'𐑉'), | ||
| 5422 | (0x10422, 'M', u'𐑊'), | ||
| 5423 | (0x10423, 'M', u'𐑋'), | ||
| 5424 | (0x10424, 'M', u'𐑌'), | ||
| 5425 | (0x10425, 'M', u'𐑍'), | ||
| 5426 | (0x10426, 'M', u'𐑎'), | ||
| 5427 | (0x10427, 'M', u'𐑏'), | ||
| 5428 | (0x10428, 'V'), | ||
| 5429 | (0x1049E, 'X'), | ||
| 5430 | (0x104A0, 'V'), | ||
| 5431 | (0x104AA, 'X'), | ||
| 5432 | (0x10800, 'V'), | ||
| 5433 | (0x10806, 'X'), | ||
| 5434 | (0x10808, 'V'), | ||
| 5435 | (0x10809, 'X'), | ||
| 5436 | (0x1080A, 'V'), | ||
| 5437 | (0x10836, 'X'), | ||
| 5438 | (0x10837, 'V'), | ||
| 5439 | (0x10839, 'X'), | ||
| 5440 | (0x1083C, 'V'), | ||
| 5441 | (0x1083D, 'X'), | ||
| 5442 | (0x1083F, 'V'), | ||
| 5443 | (0x10856, 'X'), | ||
| 5444 | (0x10857, 'V'), | ||
| 5445 | (0x10860, 'X'), | ||
| 5446 | (0x10900, 'V'), | ||
| 5447 | (0x1091C, 'X'), | ||
| 5448 | (0x1091F, 'V'), | ||
| 5449 | (0x1093A, 'X'), | ||
| 5450 | (0x1093F, 'V'), | ||
| 5451 | (0x10940, 'X'), | ||
| 5452 | (0x10980, 'V'), | ||
| 5453 | (0x109B8, 'X'), | ||
| 5454 | (0x109BE, 'V'), | ||
| 5455 | (0x109C0, 'X'), | ||
| 5456 | (0x10A00, 'V'), | ||
| 5457 | (0x10A04, 'X'), | ||
| 5458 | (0x10A05, 'V'), | ||
| 5459 | (0x10A07, 'X'), | ||
| 5460 | (0x10A0C, 'V'), | ||
| 5461 | (0x10A14, 'X'), | ||
| 5462 | (0x10A15, 'V'), | ||
| 5463 | (0x10A18, 'X'), | ||
| 5464 | (0x10A19, 'V'), | ||
| 5465 | (0x10A34, 'X'), | ||
| 5466 | (0x10A38, 'V'), | ||
| 5467 | (0x10A3B, 'X'), | ||
| 5468 | (0x10A3F, 'V'), | ||
| 5469 | (0x10A48, 'X'), | ||
| 5470 | (0x10A50, 'V'), | ||
| 5471 | (0x10A59, 'X'), | ||
| 5472 | (0x10A60, 'V'), | ||
| 5473 | (0x10A80, 'X'), | ||
| 5474 | (0x10B00, 'V'), | ||
| 5475 | (0x10B36, 'X'), | ||
| 5476 | (0x10B39, 'V'), | ||
| 5477 | (0x10B56, 'X'), | ||
| 5478 | (0x10B58, 'V'), | ||
| 5479 | (0x10B73, 'X'), | ||
| 5480 | (0x10B78, 'V'), | ||
| 5481 | (0x10B80, 'X'), | ||
| 5482 | (0x10C00, 'V'), | ||
| 5483 | (0x10C49, 'X'), | ||
| 5484 | (0x10E60, 'V'), | ||
| 5485 | (0x10E7F, 'X'), | ||
| 5486 | (0x11000, 'V'), | ||
| 5487 | (0x1104E, 'X'), | ||
| 5488 | (0x11052, 'V'), | ||
| 5489 | (0x11070, 'X'), | ||
| 5490 | (0x11080, 'V'), | ||
| 5491 | (0x110BD, 'X'), | ||
| 5492 | (0x110BE, 'V'), | ||
| 5493 | (0x110C2, 'X'), | ||
| 5494 | (0x110D0, 'V'), | ||
| 5495 | (0x110E9, 'X'), | ||
| 5496 | (0x110F0, 'V'), | ||
| 5497 | (0x110FA, 'X'), | ||
| 5498 | (0x11100, 'V'), | ||
| 5499 | (0x11135, 'X'), | ||
| 5500 | (0x11136, 'V'), | ||
| 5501 | (0x11144, 'X'), | ||
| 5502 | (0x11180, 'V'), | ||
| 5503 | (0x111C9, 'X'), | ||
| 5504 | (0x111D0, 'V'), | ||
| 5505 | (0x111DA, 'X'), | ||
| 5506 | (0x11680, 'V'), | ||
| 5507 | (0x116B8, 'X'), | ||
| 5508 | (0x116C0, 'V'), | ||
| 5509 | (0x116CA, 'X'), | ||
| 5510 | (0x12000, 'V'), | ||
| 5511 | (0x1236F, 'X'), | ||
| 5512 | (0x12400, 'V'), | ||
| 5513 | (0x12463, 'X'), | ||
| 5514 | (0x12470, 'V'), | ||
| 5515 | (0x12474, 'X'), | ||
| 5516 | (0x13000, 'V'), | ||
| 5517 | (0x1342F, 'X'), | ||
| 5518 | ] | ||
| 5519 | |||
| 5520 | def _seg_53(): | ||
| 5521 | return [ | ||
| 5522 | (0x16800, 'V'), | ||
| 5523 | (0x16A39, 'X'), | ||
| 5524 | (0x16F00, 'V'), | ||
| 5525 | (0x16F45, 'X'), | ||
| 5526 | (0x16F50, 'V'), | ||
| 5527 | (0x16F7F, 'X'), | ||
| 5528 | (0x16F8F, 'V'), | ||
| 5529 | (0x16FA0, 'X'), | ||
| 5530 | (0x1B000, 'V'), | ||
| 5531 | (0x1B002, 'X'), | ||
| 5532 | (0x1D000, 'V'), | ||
| 5533 | (0x1D0F6, 'X'), | ||
| 5534 | (0x1D100, 'V'), | ||
| 5535 | (0x1D127, 'X'), | ||
| 5536 | (0x1D129, 'V'), | ||
| 5537 | (0x1D15E, 'M', u'𝅗𝅥'), | ||
| 5538 | (0x1D15F, 'M', u'𝅘𝅥'), | ||
| 5539 | (0x1D160, 'M', u'𝅘𝅥𝅮'), | ||
| 5540 | (0x1D161, 'M', u'𝅘𝅥𝅯'), | ||
| 5541 | (0x1D162, 'M', u'𝅘𝅥𝅰'), | ||
| 5542 | (0x1D163, 'M', u'𝅘𝅥𝅱'), | ||
| 5543 | (0x1D164, 'M', u'𝅘𝅥𝅲'), | ||
| 5544 | (0x1D165, 'V'), | ||
| 5545 | (0x1D173, 'X'), | ||
| 5546 | (0x1D17B, 'V'), | ||
| 5547 | (0x1D1BB, 'M', u'𝆹𝅥'), | ||
| 5548 | (0x1D1BC, 'M', u'𝆺𝅥'), | ||
| 5549 | (0x1D1BD, 'M', u'𝆹𝅥𝅮'), | ||
| 5550 | (0x1D1BE, 'M', u'𝆺𝅥𝅮'), | ||
| 5551 | (0x1D1BF, 'M', u'𝆹𝅥𝅯'), | ||
| 5552 | (0x1D1C0, 'M', u'𝆺𝅥𝅯'), | ||
| 5553 | (0x1D1C1, 'V'), | ||
| 5554 | (0x1D1DE, 'X'), | ||
| 5555 | (0x1D200, 'V'), | ||
| 5556 | (0x1D246, 'X'), | ||
| 5557 | (0x1D300, 'V'), | ||
| 5558 | (0x1D357, 'X'), | ||
| 5559 | (0x1D360, 'V'), | ||
| 5560 | (0x1D372, 'X'), | ||
| 5561 | (0x1D400, 'M', u'a'), | ||
| 5562 | (0x1D401, 'M', u'b'), | ||
| 5563 | (0x1D402, 'M', u'c'), | ||
| 5564 | (0x1D403, 'M', u'd'), | ||
| 5565 | (0x1D404, 'M', u'e'), | ||
| 5566 | (0x1D405, 'M', u'f'), | ||
| 5567 | (0x1D406, 'M', u'g'), | ||
| 5568 | (0x1D407, 'M', u'h'), | ||
| 5569 | (0x1D408, 'M', u'i'), | ||
| 5570 | (0x1D409, 'M', u'j'), | ||
| 5571 | (0x1D40A, 'M', u'k'), | ||
| 5572 | (0x1D40B, 'M', u'l'), | ||
| 5573 | (0x1D40C, 'M', u'm'), | ||
| 5574 | (0x1D40D, 'M', u'n'), | ||
| 5575 | (0x1D40E, 'M', u'o'), | ||
| 5576 | (0x1D40F, 'M', u'p'), | ||
| 5577 | (0x1D410, 'M', u'q'), | ||
| 5578 | (0x1D411, 'M', u'r'), | ||
| 5579 | (0x1D412, 'M', u's'), | ||
| 5580 | (0x1D413, 'M', u't'), | ||
| 5581 | (0x1D414, 'M', u'u'), | ||
| 5582 | (0x1D415, 'M', u'v'), | ||
| 5583 | (0x1D416, 'M', u'w'), | ||
| 5584 | (0x1D417, 'M', u'x'), | ||
| 5585 | (0x1D418, 'M', u'y'), | ||
| 5586 | (0x1D419, 'M', u'z'), | ||
| 5587 | (0x1D41A, 'M', u'a'), | ||
| 5588 | (0x1D41B, 'M', u'b'), | ||
| 5589 | (0x1D41C, 'M', u'c'), | ||
| 5590 | (0x1D41D, 'M', u'd'), | ||
| 5591 | (0x1D41E, 'M', u'e'), | ||
| 5592 | (0x1D41F, 'M', u'f'), | ||
| 5593 | (0x1D420, 'M', u'g'), | ||
| 5594 | (0x1D421, 'M', u'h'), | ||
| 5595 | (0x1D422, 'M', u'i'), | ||
| 5596 | (0x1D423, 'M', u'j'), | ||
| 5597 | (0x1D424, 'M', u'k'), | ||
| 5598 | (0x1D425, 'M', u'l'), | ||
| 5599 | (0x1D426, 'M', u'm'), | ||
| 5600 | (0x1D427, 'M', u'n'), | ||
| 5601 | (0x1D428, 'M', u'o'), | ||
| 5602 | (0x1D429, 'M', u'p'), | ||
| 5603 | (0x1D42A, 'M', u'q'), | ||
| 5604 | (0x1D42B, 'M', u'r'), | ||
| 5605 | (0x1D42C, 'M', u's'), | ||
| 5606 | (0x1D42D, 'M', u't'), | ||
| 5607 | (0x1D42E, 'M', u'u'), | ||
| 5608 | (0x1D42F, 'M', u'v'), | ||
| 5609 | (0x1D430, 'M', u'w'), | ||
| 5610 | (0x1D431, 'M', u'x'), | ||
| 5611 | (0x1D432, 'M', u'y'), | ||
| 5612 | (0x1D433, 'M', u'z'), | ||
| 5613 | (0x1D434, 'M', u'a'), | ||
| 5614 | (0x1D435, 'M', u'b'), | ||
| 5615 | (0x1D436, 'M', u'c'), | ||
| 5616 | (0x1D437, 'M', u'd'), | ||
| 5617 | (0x1D438, 'M', u'e'), | ||
| 5618 | (0x1D439, 'M', u'f'), | ||
| 5619 | (0x1D43A, 'M', u'g'), | ||
| 5620 | (0x1D43B, 'M', u'h'), | ||
| 5621 | (0x1D43C, 'M', u'i'), | ||
| 5622 | ] | ||
| 5623 | |||
| 5624 | def _seg_54(): | ||
| 5625 | return [ | ||
| 5626 | (0x1D43D, 'M', u'j'), | ||
| 5627 | (0x1D43E, 'M', u'k'), | ||
| 5628 | (0x1D43F, 'M', u'l'), | ||
| 5629 | (0x1D440, 'M', u'm'), | ||
| 5630 | (0x1D441, 'M', u'n'), | ||
| 5631 | (0x1D442, 'M', u'o'), | ||
| 5632 | (0x1D443, 'M', u'p'), | ||
| 5633 | (0x1D444, 'M', u'q'), | ||
| 5634 | (0x1D445, 'M', u'r'), | ||
| 5635 | (0x1D446, 'M', u's'), | ||
| 5636 | (0x1D447, 'M', u't'), | ||
| 5637 | (0x1D448, 'M', u'u'), | ||
| 5638 | (0x1D449, 'M', u'v'), | ||
| 5639 | (0x1D44A, 'M', u'w'), | ||
| 5640 | (0x1D44B, 'M', u'x'), | ||
| 5641 | (0x1D44C, 'M', u'y'), | ||
| 5642 | (0x1D44D, 'M', u'z'), | ||
| 5643 | (0x1D44E, 'M', u'a'), | ||
| 5644 | (0x1D44F, 'M', u'b'), | ||
| 5645 | (0x1D450, 'M', u'c'), | ||
| 5646 | (0x1D451, 'M', u'd'), | ||
| 5647 | (0x1D452, 'M', u'e'), | ||
| 5648 | (0x1D453, 'M', u'f'), | ||
| 5649 | (0x1D454, 'M', u'g'), | ||
| 5650 | (0x1D455, 'X'), | ||
| 5651 | (0x1D456, 'M', u'i'), | ||
| 5652 | (0x1D457, 'M', u'j'), | ||
| 5653 | (0x1D458, 'M', u'k'), | ||
| 5654 | (0x1D459, 'M', u'l'), | ||
| 5655 | (0x1D45A, 'M', u'm'), | ||
| 5656 | (0x1D45B, 'M', u'n'), | ||
| 5657 | (0x1D45C, 'M', u'o'), | ||
| 5658 | (0x1D45D, 'M', u'p'), | ||
| 5659 | (0x1D45E, 'M', u'q'), | ||
| 5660 | (0x1D45F, 'M', u'r'), | ||
| 5661 | (0x1D460, 'M', u's'), | ||
| 5662 | (0x1D461, 'M', u't'), | ||
| 5663 | (0x1D462, 'M', u'u'), | ||
| 5664 | (0x1D463, 'M', u'v'), | ||
| 5665 | (0x1D464, 'M', u'w'), | ||
| 5666 | (0x1D465, 'M', u'x'), | ||
| 5667 | (0x1D466, 'M', u'y'), | ||
| 5668 | (0x1D467, 'M', u'z'), | ||
| 5669 | (0x1D468, 'M', u'a'), | ||
| 5670 | (0x1D469, 'M', u'b'), | ||
| 5671 | (0x1D46A, 'M', u'c'), | ||
| 5672 | (0x1D46B, 'M', u'd'), | ||
| 5673 | (0x1D46C, 'M', u'e'), | ||
| 5674 | (0x1D46D, 'M', u'f'), | ||
| 5675 | (0x1D46E, 'M', u'g'), | ||
| 5676 | (0x1D46F, 'M', u'h'), | ||
| 5677 | (0x1D470, 'M', u'i'), | ||
| 5678 | (0x1D471, 'M', u'j'), | ||
| 5679 | (0x1D472, 'M', u'k'), | ||
| 5680 | (0x1D473, 'M', u'l'), | ||
| 5681 | (0x1D474, 'M', u'm'), | ||
| 5682 | (0x1D475, 'M', u'n'), | ||
| 5683 | (0x1D476, 'M', u'o'), | ||
| 5684 | (0x1D477, 'M', u'p'), | ||
| 5685 | (0x1D478, 'M', u'q'), | ||
| 5686 | (0x1D479, 'M', u'r'), | ||
| 5687 | (0x1D47A, 'M', u's'), | ||
| 5688 | (0x1D47B, 'M', u't'), | ||
| 5689 | (0x1D47C, 'M', u'u'), | ||
| 5690 | (0x1D47D, 'M', u'v'), | ||
| 5691 | (0x1D47E, 'M', u'w'), | ||
| 5692 | (0x1D47F, 'M', u'x'), | ||
| 5693 | (0x1D480, 'M', u'y'), | ||
| 5694 | (0x1D481, 'M', u'z'), | ||
| 5695 | (0x1D482, 'M', u'a'), | ||
| 5696 | (0x1D483, 'M', u'b'), | ||
| 5697 | (0x1D484, 'M', u'c'), | ||
| 5698 | (0x1D485, 'M', u'd'), | ||
| 5699 | (0x1D486, 'M', u'e'), | ||
| 5700 | (0x1D487, 'M', u'f'), | ||
| 5701 | (0x1D488, 'M', u'g'), | ||
| 5702 | (0x1D489, 'M', u'h'), | ||
| 5703 | (0x1D48A, 'M', u'i'), | ||
| 5704 | (0x1D48B, 'M', u'j'), | ||
| 5705 | (0x1D48C, 'M', u'k'), | ||
| 5706 | (0x1D48D, 'M', u'l'), | ||
| 5707 | (0x1D48E, 'M', u'm'), | ||
| 5708 | (0x1D48F, 'M', u'n'), | ||
| 5709 | (0x1D490, 'M', u'o'), | ||
| 5710 | (0x1D491, 'M', u'p'), | ||
| 5711 | (0x1D492, 'M', u'q'), | ||
| 5712 | (0x1D493, 'M', u'r'), | ||
| 5713 | (0x1D494, 'M', u's'), | ||
| 5714 | (0x1D495, 'M', u't'), | ||
| 5715 | (0x1D496, 'M', u'u'), | ||
| 5716 | (0x1D497, 'M', u'v'), | ||
| 5717 | (0x1D498, 'M', u'w'), | ||
| 5718 | (0x1D499, 'M', u'x'), | ||
| 5719 | (0x1D49A, 'M', u'y'), | ||
| 5720 | (0x1D49B, 'M', u'z'), | ||
| 5721 | (0x1D49C, 'M', u'a'), | ||
| 5722 | (0x1D49D, 'X'), | ||
| 5723 | (0x1D49E, 'M', u'c'), | ||
| 5724 | (0x1D49F, 'M', u'd'), | ||
| 5725 | (0x1D4A0, 'X'), | ||
| 5726 | ] | ||
| 5727 | |||
| 5728 | def _seg_55(): | ||
| 5729 | return [ | ||
| 5730 | (0x1D4A2, 'M', u'g'), | ||
| 5731 | (0x1D4A3, 'X'), | ||
| 5732 | (0x1D4A5, 'M', u'j'), | ||
| 5733 | (0x1D4A6, 'M', u'k'), | ||
| 5734 | (0x1D4A7, 'X'), | ||
| 5735 | (0x1D4A9, 'M', u'n'), | ||
| 5736 | (0x1D4AA, 'M', u'o'), | ||
| 5737 | (0x1D4AB, 'M', u'p'), | ||
| 5738 | (0x1D4AC, 'M', u'q'), | ||
| 5739 | (0x1D4AD, 'X'), | ||
| 5740 | (0x1D4AE, 'M', u's'), | ||
| 5741 | (0x1D4AF, 'M', u't'), | ||
| 5742 | (0x1D4B0, 'M', u'u'), | ||
| 5743 | (0x1D4B1, 'M', u'v'), | ||
| 5744 | (0x1D4B2, 'M', u'w'), | ||
| 5745 | (0x1D4B3, 'M', u'x'), | ||
| 5746 | (0x1D4B4, 'M', u'y'), | ||
| 5747 | (0x1D4B5, 'M', u'z'), | ||
| 5748 | (0x1D4B6, 'M', u'a'), | ||
| 5749 | (0x1D4B7, 'M', u'b'), | ||
| 5750 | (0x1D4B8, 'M', u'c'), | ||
| 5751 | (0x1D4B9, 'M', u'd'), | ||
| 5752 | (0x1D4BA, 'X'), | ||
| 5753 | (0x1D4BB, 'M', u'f'), | ||
| 5754 | (0x1D4BC, 'X'), | ||
| 5755 | (0x1D4BD, 'M', u'h'), | ||
| 5756 | (0x1D4BE, 'M', u'i'), | ||
| 5757 | (0x1D4BF, 'M', u'j'), | ||
| 5758 | (0x1D4C0, 'M', u'k'), | ||
| 5759 | (0x1D4C1, 'M', u'l'), | ||
| 5760 | (0x1D4C2, 'M', u'm'), | ||
| 5761 | (0x1D4C3, 'M', u'n'), | ||
| 5762 | (0x1D4C4, 'X'), | ||
| 5763 | (0x1D4C5, 'M', u'p'), | ||
| 5764 | (0x1D4C6, 'M', u'q'), | ||
| 5765 | (0x1D4C7, 'M', u'r'), | ||
| 5766 | (0x1D4C8, 'M', u's'), | ||
| 5767 | (0x1D4C9, 'M', u't'), | ||
| 5768 | (0x1D4CA, 'M', u'u'), | ||
| 5769 | (0x1D4CB, 'M', u'v'), | ||
| 5770 | (0x1D4CC, 'M', u'w'), | ||
| 5771 | (0x1D4CD, 'M', u'x'), | ||
| 5772 | (0x1D4CE, 'M', u'y'), | ||
| 5773 | (0x1D4CF, 'M', u'z'), | ||
| 5774 | (0x1D4D0, 'M', u'a'), | ||
| 5775 | (0x1D4D1, 'M', u'b'), | ||
| 5776 | (0x1D4D2, 'M', u'c'), | ||
| 5777 | (0x1D4D3, 'M', u'd'), | ||
| 5778 | (0x1D4D4, 'M', u'e'), | ||
| 5779 | (0x1D4D5, 'M', u'f'), | ||
| 5780 | (0x1D4D6, 'M', u'g'), | ||
| 5781 | (0x1D4D7, 'M', u'h'), | ||
| 5782 | (0x1D4D8, 'M', u'i'), | ||
| 5783 | (0x1D4D9, 'M', u'j'), | ||
| 5784 | (0x1D4DA, 'M', u'k'), | ||
| 5785 | (0x1D4DB, 'M', u'l'), | ||
| 5786 | (0x1D4DC, 'M', u'm'), | ||
| 5787 | (0x1D4DD, 'M', u'n'), | ||
| 5788 | (0x1D4DE, 'M', u'o'), | ||
| 5789 | (0x1D4DF, 'M', u'p'), | ||
| 5790 | (0x1D4E0, 'M', u'q'), | ||
| 5791 | (0x1D4E1, 'M', u'r'), | ||
| 5792 | (0x1D4E2, 'M', u's'), | ||
| 5793 | (0x1D4E3, 'M', u't'), | ||
| 5794 | (0x1D4E4, 'M', u'u'), | ||
| 5795 | (0x1D4E5, 'M', u'v'), | ||
| 5796 | (0x1D4E6, 'M', u'w'), | ||
| 5797 | (0x1D4E7, 'M', u'x'), | ||
| 5798 | (0x1D4E8, 'M', u'y'), | ||
| 5799 | (0x1D4E9, 'M', u'z'), | ||
| 5800 | (0x1D4EA, 'M', u'a'), | ||
| 5801 | (0x1D4EB, 'M', u'b'), | ||
| 5802 | (0x1D4EC, 'M', u'c'), | ||
| 5803 | (0x1D4ED, 'M', u'd'), | ||
| 5804 | (0x1D4EE, 'M', u'e'), | ||
| 5805 | (0x1D4EF, 'M', u'f'), | ||
| 5806 | (0x1D4F0, 'M', u'g'), | ||
| 5807 | (0x1D4F1, 'M', u'h'), | ||
| 5808 | (0x1D4F2, 'M', u'i'), | ||
| 5809 | (0x1D4F3, 'M', u'j'), | ||
| 5810 | (0x1D4F4, 'M', u'k'), | ||
| 5811 | (0x1D4F5, 'M', u'l'), | ||
| 5812 | (0x1D4F6, 'M', u'm'), | ||
| 5813 | (0x1D4F7, 'M', u'n'), | ||
| 5814 | (0x1D4F8, 'M', u'o'), | ||
| 5815 | (0x1D4F9, 'M', u'p'), | ||
| 5816 | (0x1D4FA, 'M', u'q'), | ||
| 5817 | (0x1D4FB, 'M', u'r'), | ||
| 5818 | (0x1D4FC, 'M', u's'), | ||
| 5819 | (0x1D4FD, 'M', u't'), | ||
| 5820 | (0x1D4FE, 'M', u'u'), | ||
| 5821 | (0x1D4FF, 'M', u'v'), | ||
| 5822 | (0x1D500, 'M', u'w'), | ||
| 5823 | (0x1D501, 'M', u'x'), | ||
| 5824 | (0x1D502, 'M', u'y'), | ||
| 5825 | (0x1D503, 'M', u'z'), | ||
| 5826 | (0x1D504, 'M', u'a'), | ||
| 5827 | (0x1D505, 'M', u'b'), | ||
| 5828 | (0x1D506, 'X'), | ||
| 5829 | (0x1D507, 'M', u'd'), | ||
| 5830 | ] | ||
| 5831 | |||
| 5832 | def _seg_56(): | ||
| 5833 | return [ | ||
| 5834 | (0x1D508, 'M', u'e'), | ||
| 5835 | (0x1D509, 'M', u'f'), | ||
| 5836 | (0x1D50A, 'M', u'g'), | ||
| 5837 | (0x1D50B, 'X'), | ||
| 5838 | (0x1D50D, 'M', u'j'), | ||
| 5839 | (0x1D50E, 'M', u'k'), | ||
| 5840 | (0x1D50F, 'M', u'l'), | ||
| 5841 | (0x1D510, 'M', u'm'), | ||
| 5842 | (0x1D511, 'M', u'n'), | ||
| 5843 | (0x1D512, 'M', u'o'), | ||
| 5844 | (0x1D513, 'M', u'p'), | ||
| 5845 | (0x1D514, 'M', u'q'), | ||
| 5846 | (0x1D515, 'X'), | ||
| 5847 | (0x1D516, 'M', u's'), | ||
| 5848 | (0x1D517, 'M', u't'), | ||
| 5849 | (0x1D518, 'M', u'u'), | ||
| 5850 | (0x1D519, 'M', u'v'), | ||
| 5851 | (0x1D51A, 'M', u'w'), | ||
| 5852 | (0x1D51B, 'M', u'x'), | ||
| 5853 | (0x1D51C, 'M', u'y'), | ||
| 5854 | (0x1D51D, 'X'), | ||
| 5855 | (0x1D51E, 'M', u'a'), | ||
| 5856 | (0x1D51F, 'M', u'b'), | ||
| 5857 | (0x1D520, 'M', u'c'), | ||
| 5858 | (0x1D521, 'M', u'd'), | ||
| 5859 | (0x1D522, 'M', u'e'), | ||
| 5860 | (0x1D523, 'M', u'f'), | ||
| 5861 | (0x1D524, 'M', u'g'), | ||
| 5862 | (0x1D525, 'M', u'h'), | ||
| 5863 | (0x1D526, 'M', u'i'), | ||
| 5864 | (0x1D527, 'M', u'j'), | ||
| 5865 | (0x1D528, 'M', u'k'), | ||
| 5866 | (0x1D529, 'M', u'l'), | ||
| 5867 | (0x1D52A, 'M', u'm'), | ||
| 5868 | (0x1D52B, 'M', u'n'), | ||
| 5869 | (0x1D52C, 'M', u'o'), | ||
| 5870 | (0x1D52D, 'M', u'p'), | ||
| 5871 | (0x1D52E, 'M', u'q'), | ||
| 5872 | (0x1D52F, 'M', u'r'), | ||
| 5873 | (0x1D530, 'M', u's'), | ||
| 5874 | (0x1D531, 'M', u't'), | ||
| 5875 | (0x1D532, 'M', u'u'), | ||
| 5876 | (0x1D533, 'M', u'v'), | ||
| 5877 | (0x1D534, 'M', u'w'), | ||
| 5878 | (0x1D535, 'M', u'x'), | ||
| 5879 | (0x1D536, 'M', u'y'), | ||
| 5880 | (0x1D537, 'M', u'z'), | ||
| 5881 | (0x1D538, 'M', u'a'), | ||
| 5882 | (0x1D539, 'M', u'b'), | ||
| 5883 | (0x1D53A, 'X'), | ||
| 5884 | (0x1D53B, 'M', u'd'), | ||
| 5885 | (0x1D53C, 'M', u'e'), | ||
| 5886 | (0x1D53D, 'M', u'f'), | ||
| 5887 | (0x1D53E, 'M', u'g'), | ||
| 5888 | (0x1D53F, 'X'), | ||
| 5889 | (0x1D540, 'M', u'i'), | ||
| 5890 | (0x1D541, 'M', u'j'), | ||
| 5891 | (0x1D542, 'M', u'k'), | ||
| 5892 | (0x1D543, 'M', u'l'), | ||
| 5893 | (0x1D544, 'M', u'm'), | ||
| 5894 | (0x1D545, 'X'), | ||
| 5895 | (0x1D546, 'M', u'o'), | ||
| 5896 | (0x1D547, 'X'), | ||
| 5897 | (0x1D54A, 'M', u's'), | ||
| 5898 | (0x1D54B, 'M', u't'), | ||
| 5899 | (0x1D54C, 'M', u'u'), | ||
| 5900 | (0x1D54D, 'M', u'v'), | ||
| 5901 | (0x1D54E, 'M', u'w'), | ||
| 5902 | (0x1D54F, 'M', u'x'), | ||
| 5903 | (0x1D550, 'M', u'y'), | ||
| 5904 | (0x1D551, 'X'), | ||
| 5905 | (0x1D552, 'M', u'a'), | ||
| 5906 | (0x1D553, 'M', u'b'), | ||
| 5907 | (0x1D554, 'M', u'c'), | ||
| 5908 | (0x1D555, 'M', u'd'), | ||
| 5909 | (0x1D556, 'M', u'e'), | ||
| 5910 | (0x1D557, 'M', u'f'), | ||
| 5911 | (0x1D558, 'M', u'g'), | ||
| 5912 | (0x1D559, 'M', u'h'), | ||
| 5913 | (0x1D55A, 'M', u'i'), | ||
| 5914 | (0x1D55B, 'M', u'j'), | ||
| 5915 | (0x1D55C, 'M', u'k'), | ||
| 5916 | (0x1D55D, 'M', u'l'), | ||
| 5917 | (0x1D55E, 'M', u'm'), | ||
| 5918 | (0x1D55F, 'M', u'n'), | ||
| 5919 | (0x1D560, 'M', u'o'), | ||
| 5920 | (0x1D561, 'M', u'p'), | ||
| 5921 | (0x1D562, 'M', u'q'), | ||
| 5922 | (0x1D563, 'M', u'r'), | ||
| 5923 | (0x1D564, 'M', u's'), | ||
| 5924 | (0x1D565, 'M', u't'), | ||
| 5925 | (0x1D566, 'M', u'u'), | ||
| 5926 | (0x1D567, 'M', u'v'), | ||
| 5927 | (0x1D568, 'M', u'w'), | ||
| 5928 | (0x1D569, 'M', u'x'), | ||
| 5929 | (0x1D56A, 'M', u'y'), | ||
| 5930 | (0x1D56B, 'M', u'z'), | ||
| 5931 | (0x1D56C, 'M', u'a'), | ||
| 5932 | (0x1D56D, 'M', u'b'), | ||
| 5933 | (0x1D56E, 'M', u'c'), | ||
| 5934 | ] | ||
| 5935 | |||
| 5936 | def _seg_57(): | ||
| 5937 | return [ | ||
| 5938 | (0x1D56F, 'M', u'd'), | ||
| 5939 | (0x1D570, 'M', u'e'), | ||
| 5940 | (0x1D571, 'M', u'f'), | ||
| 5941 | (0x1D572, 'M', u'g'), | ||
| 5942 | (0x1D573, 'M', u'h'), | ||
| 5943 | (0x1D574, 'M', u'i'), | ||
| 5944 | (0x1D575, 'M', u'j'), | ||
| 5945 | (0x1D576, 'M', u'k'), | ||
| 5946 | (0x1D577, 'M', u'l'), | ||
| 5947 | (0x1D578, 'M', u'm'), | ||
| 5948 | (0x1D579, 'M', u'n'), | ||
| 5949 | (0x1D57A, 'M', u'o'), | ||
| 5950 | (0x1D57B, 'M', u'p'), | ||
| 5951 | (0x1D57C, 'M', u'q'), | ||
| 5952 | (0x1D57D, 'M', u'r'), | ||
| 5953 | (0x1D57E, 'M', u's'), | ||
| 5954 | (0x1D57F, 'M', u't'), | ||
| 5955 | (0x1D580, 'M', u'u'), | ||
| 5956 | (0x1D581, 'M', u'v'), | ||
| 5957 | (0x1D582, 'M', u'w'), | ||
| 5958 | (0x1D583, 'M', u'x'), | ||
| 5959 | (0x1D584, 'M', u'y'), | ||
| 5960 | (0x1D585, 'M', u'z'), | ||
| 5961 | (0x1D586, 'M', u'a'), | ||
| 5962 | (0x1D587, 'M', u'b'), | ||
| 5963 | (0x1D588, 'M', u'c'), | ||
| 5964 | (0x1D589, 'M', u'd'), | ||
| 5965 | (0x1D58A, 'M', u'e'), | ||
| 5966 | (0x1D58B, 'M', u'f'), | ||
| 5967 | (0x1D58C, 'M', u'g'), | ||
| 5968 | (0x1D58D, 'M', u'h'), | ||
| 5969 | (0x1D58E, 'M', u'i'), | ||
| 5970 | (0x1D58F, 'M', u'j'), | ||
| 5971 | (0x1D590, 'M', u'k'), | ||
| 5972 | (0x1D591, 'M', u'l'), | ||
| 5973 | (0x1D592, 'M', u'm'), | ||
| 5974 | (0x1D593, 'M', u'n'), | ||
| 5975 | (0x1D594, 'M', u'o'), | ||
| 5976 | (0x1D595, 'M', u'p'), | ||
| 5977 | (0x1D596, 'M', u'q'), | ||
| 5978 | (0x1D597, 'M', u'r'), | ||
| 5979 | (0x1D598, 'M', u's'), | ||
| 5980 | (0x1D599, 'M', u't'), | ||
| 5981 | (0x1D59A, 'M', u'u'), | ||
| 5982 | (0x1D59B, 'M', u'v'), | ||
| 5983 | (0x1D59C, 'M', u'w'), | ||
| 5984 | (0x1D59D, 'M', u'x'), | ||
| 5985 | (0x1D59E, 'M', u'y'), | ||
| 5986 | (0x1D59F, 'M', u'z'), | ||
| 5987 | (0x1D5A0, 'M', u'a'), | ||
| 5988 | (0x1D5A1, 'M', u'b'), | ||
| 5989 | (0x1D5A2, 'M', u'c'), | ||
| 5990 | (0x1D5A3, 'M', u'd'), | ||
| 5991 | (0x1D5A4, 'M', u'e'), | ||
| 5992 | (0x1D5A5, 'M', u'f'), | ||
| 5993 | (0x1D5A6, 'M', u'g'), | ||
| 5994 | (0x1D5A7, 'M', u'h'), | ||
| 5995 | (0x1D5A8, 'M', u'i'), | ||
| 5996 | (0x1D5A9, 'M', u'j'), | ||
| 5997 | (0x1D5AA, 'M', u'k'), | ||
| 5998 | (0x1D5AB, 'M', u'l'), | ||
| 5999 | (0x1D5AC, 'M', u'm'), | ||
| 6000 | (0x1D5AD, 'M', u'n'), | ||
| 6001 | (0x1D5AE, 'M', u'o'), | ||
| 6002 | (0x1D5AF, 'M', u'p'), | ||
| 6003 | (0x1D5B0, 'M', u'q'), | ||
| 6004 | (0x1D5B1, 'M', u'r'), | ||
| 6005 | (0x1D5B2, 'M', u's'), | ||
| 6006 | (0x1D5B3, 'M', u't'), | ||
| 6007 | (0x1D5B4, 'M', u'u'), | ||
| 6008 | (0x1D5B5, 'M', u'v'), | ||
| 6009 | (0x1D5B6, 'M', u'w'), | ||
| 6010 | (0x1D5B7, 'M', u'x'), | ||
| 6011 | (0x1D5B8, 'M', u'y'), | ||
| 6012 | (0x1D5B9, 'M', u'z'), | ||
| 6013 | (0x1D5BA, 'M', u'a'), | ||
| 6014 | (0x1D5BB, 'M', u'b'), | ||
| 6015 | (0x1D5BC, 'M', u'c'), | ||
| 6016 | (0x1D5BD, 'M', u'd'), | ||
| 6017 | (0x1D5BE, 'M', u'e'), | ||
| 6018 | (0x1D5BF, 'M', u'f'), | ||
| 6019 | (0x1D5C0, 'M', u'g'), | ||
| 6020 | (0x1D5C1, 'M', u'h'), | ||
| 6021 | (0x1D5C2, 'M', u'i'), | ||
| 6022 | (0x1D5C3, 'M', u'j'), | ||
| 6023 | (0x1D5C4, 'M', u'k'), | ||
| 6024 | (0x1D5C5, 'M', u'l'), | ||
| 6025 | (0x1D5C6, 'M', u'm'), | ||
| 6026 | (0x1D5C7, 'M', u'n'), | ||
| 6027 | (0x1D5C8, 'M', u'o'), | ||
| 6028 | (0x1D5C9, 'M', u'p'), | ||
| 6029 | (0x1D5CA, 'M', u'q'), | ||
| 6030 | (0x1D5CB, 'M', u'r'), | ||
| 6031 | (0x1D5CC, 'M', u's'), | ||
| 6032 | (0x1D5CD, 'M', u't'), | ||
| 6033 | (0x1D5CE, 'M', u'u'), | ||
| 6034 | (0x1D5CF, 'M', u'v'), | ||
| 6035 | (0x1D5D0, 'M', u'w'), | ||
| 6036 | (0x1D5D1, 'M', u'x'), | ||
| 6037 | (0x1D5D2, 'M', u'y'), | ||
| 6038 | ] | ||
| 6039 | |||
| 6040 | def _seg_58(): | ||
| 6041 | return [ | ||
| 6042 | (0x1D5D3, 'M', u'z'), | ||
| 6043 | (0x1D5D4, 'M', u'a'), | ||
| 6044 | (0x1D5D5, 'M', u'b'), | ||
| 6045 | (0x1D5D6, 'M', u'c'), | ||
| 6046 | (0x1D5D7, 'M', u'd'), | ||
| 6047 | (0x1D5D8, 'M', u'e'), | ||
| 6048 | (0x1D5D9, 'M', u'f'), | ||
| 6049 | (0x1D5DA, 'M', u'g'), | ||
| 6050 | (0x1D5DB, 'M', u'h'), | ||
| 6051 | (0x1D5DC, 'M', u'i'), | ||
| 6052 | (0x1D5DD, 'M', u'j'), | ||
| 6053 | (0x1D5DE, 'M', u'k'), | ||
| 6054 | (0x1D5DF, 'M', u'l'), | ||
| 6055 | (0x1D5E0, 'M', u'm'), | ||
| 6056 | (0x1D5E1, 'M', u'n'), | ||
| 6057 | (0x1D5E2, 'M', u'o'), | ||
| 6058 | (0x1D5E3, 'M', u'p'), | ||
| 6059 | (0x1D5E4, 'M', u'q'), | ||
| 6060 | (0x1D5E5, 'M', u'r'), | ||
| 6061 | (0x1D5E6, 'M', u's'), | ||
| 6062 | (0x1D5E7, 'M', u't'), | ||
| 6063 | (0x1D5E8, 'M', u'u'), | ||
| 6064 | (0x1D5E9, 'M', u'v'), | ||
| 6065 | (0x1D5EA, 'M', u'w'), | ||
| 6066 | (0x1D5EB, 'M', u'x'), | ||
| 6067 | (0x1D5EC, 'M', u'y'), | ||
| 6068 | (0x1D5ED, 'M', u'z'), | ||
| 6069 | (0x1D5EE, 'M', u'a'), | ||
| 6070 | (0x1D5EF, 'M', u'b'), | ||
| 6071 | (0x1D5F0, 'M', u'c'), | ||
| 6072 | (0x1D5F1, 'M', u'd'), | ||
| 6073 | (0x1D5F2, 'M', u'e'), | ||
| 6074 | (0x1D5F3, 'M', u'f'), | ||
| 6075 | (0x1D5F4, 'M', u'g'), | ||
| 6076 | (0x1D5F5, 'M', u'h'), | ||
| 6077 | (0x1D5F6, 'M', u'i'), | ||
| 6078 | (0x1D5F7, 'M', u'j'), | ||
| 6079 | (0x1D5F8, 'M', u'k'), | ||
| 6080 | (0x1D5F9, 'M', u'l'), | ||
| 6081 | (0x1D5FA, 'M', u'm'), | ||
| 6082 | (0x1D5FB, 'M', u'n'), | ||
| 6083 | (0x1D5FC, 'M', u'o'), | ||
| 6084 | (0x1D5FD, 'M', u'p'), | ||
| 6085 | (0x1D5FE, 'M', u'q'), | ||
| 6086 | (0x1D5FF, 'M', u'r'), | ||
| 6087 | (0x1D600, 'M', u's'), | ||
| 6088 | (0x1D601, 'M', u't'), | ||
| 6089 | (0x1D602, 'M', u'u'), | ||
| 6090 | (0x1D603, 'M', u'v'), | ||
| 6091 | (0x1D604, 'M', u'w'), | ||
| 6092 | (0x1D605, 'M', u'x'), | ||
| 6093 | (0x1D606, 'M', u'y'), | ||
| 6094 | (0x1D607, 'M', u'z'), | ||
| 6095 | (0x1D608, 'M', u'a'), | ||
| 6096 | (0x1D609, 'M', u'b'), | ||
| 6097 | (0x1D60A, 'M', u'c'), | ||
| 6098 | (0x1D60B, 'M', u'd'), | ||
| 6099 | (0x1D60C, 'M', u'e'), | ||
| 6100 | (0x1D60D, 'M', u'f'), | ||
| 6101 | (0x1D60E, 'M', u'g'), | ||
| 6102 | (0x1D60F, 'M', u'h'), | ||
| 6103 | (0x1D610, 'M', u'i'), | ||
| 6104 | (0x1D611, 'M', u'j'), | ||
| 6105 | (0x1D612, 'M', u'k'), | ||
| 6106 | (0x1D613, 'M', u'l'), | ||
| 6107 | (0x1D614, 'M', u'm'), | ||
| 6108 | (0x1D615, 'M', u'n'), | ||
| 6109 | (0x1D616, 'M', u'o'), | ||
| 6110 | (0x1D617, 'M', u'p'), | ||
| 6111 | (0x1D618, 'M', u'q'), | ||
| 6112 | (0x1D619, 'M', u'r'), | ||
| 6113 | (0x1D61A, 'M', u's'), | ||
| 6114 | (0x1D61B, 'M', u't'), | ||
| 6115 | (0x1D61C, 'M', u'u'), | ||
| 6116 | (0x1D61D, 'M', u'v'), | ||
| 6117 | (0x1D61E, 'M', u'w'), | ||
| 6118 | (0x1D61F, 'M', u'x'), | ||
| 6119 | (0x1D620, 'M', u'y'), | ||
| 6120 | (0x1D621, 'M', u'z'), | ||
| 6121 | (0x1D622, 'M', u'a'), | ||
| 6122 | (0x1D623, 'M', u'b'), | ||
| 6123 | (0x1D624, 'M', u'c'), | ||
| 6124 | (0x1D625, 'M', u'd'), | ||
| 6125 | (0x1D626, 'M', u'e'), | ||
| 6126 | (0x1D627, 'M', u'f'), | ||
| 6127 | (0x1D628, 'M', u'g'), | ||
| 6128 | (0x1D629, 'M', u'h'), | ||
| 6129 | (0x1D62A, 'M', u'i'), | ||
| 6130 | (0x1D62B, 'M', u'j'), | ||
| 6131 | (0x1D62C, 'M', u'k'), | ||
| 6132 | (0x1D62D, 'M', u'l'), | ||
| 6133 | (0x1D62E, 'M', u'm'), | ||
| 6134 | (0x1D62F, 'M', u'n'), | ||
| 6135 | (0x1D630, 'M', u'o'), | ||
| 6136 | (0x1D631, 'M', u'p'), | ||
| 6137 | (0x1D632, 'M', u'q'), | ||
| 6138 | (0x1D633, 'M', u'r'), | ||
| 6139 | (0x1D634, 'M', u's'), | ||
| 6140 | (0x1D635, 'M', u't'), | ||
| 6141 | (0x1D636, 'M', u'u'), | ||
| 6142 | ] | ||
| 6143 | |||
| 6144 | def _seg_59(): | ||
| 6145 | return [ | ||
| 6146 | (0x1D637, 'M', u'v'), | ||
| 6147 | (0x1D638, 'M', u'w'), | ||
| 6148 | (0x1D639, 'M', u'x'), | ||
| 6149 | (0x1D63A, 'M', u'y'), | ||
| 6150 | (0x1D63B, 'M', u'z'), | ||
| 6151 | (0x1D63C, 'M', u'a'), | ||
| 6152 | (0x1D63D, 'M', u'b'), | ||
| 6153 | (0x1D63E, 'M', u'c'), | ||
| 6154 | (0x1D63F, 'M', u'd'), | ||
| 6155 | (0x1D640, 'M', u'e'), | ||
| 6156 | (0x1D641, 'M', u'f'), | ||
| 6157 | (0x1D642, 'M', u'g'), | ||
| 6158 | (0x1D643, 'M', u'h'), | ||
| 6159 | (0x1D644, 'M', u'i'), | ||
| 6160 | (0x1D645, 'M', u'j'), | ||
| 6161 | (0x1D646, 'M', u'k'), | ||
| 6162 | (0x1D647, 'M', u'l'), | ||
| 6163 | (0x1D648, 'M', u'm'), | ||
| 6164 | (0x1D649, 'M', u'n'), | ||
| 6165 | (0x1D64A, 'M', u'o'), | ||
| 6166 | (0x1D64B, 'M', u'p'), | ||
| 6167 | (0x1D64C, 'M', u'q'), | ||
| 6168 | (0x1D64D, 'M', u'r'), | ||
| 6169 | (0x1D64E, 'M', u's'), | ||
| 6170 | (0x1D64F, 'M', u't'), | ||
| 6171 | (0x1D650, 'M', u'u'), | ||
| 6172 | (0x1D651, 'M', u'v'), | ||
| 6173 | (0x1D652, 'M', u'w'), | ||
| 6174 | (0x1D653, 'M', u'x'), | ||
| 6175 | (0x1D654, 'M', u'y'), | ||
| 6176 | (0x1D655, 'M', u'z'), | ||
| 6177 | (0x1D656, 'M', u'a'), | ||
| 6178 | (0x1D657, 'M', u'b'), | ||
| 6179 | (0x1D658, 'M', u'c'), | ||
| 6180 | (0x1D659, 'M', u'd'), | ||
| 6181 | (0x1D65A, 'M', u'e'), | ||
| 6182 | (0x1D65B, 'M', u'f'), | ||
| 6183 | (0x1D65C, 'M', u'g'), | ||
| 6184 | (0x1D65D, 'M', u'h'), | ||
| 6185 | (0x1D65E, 'M', u'i'), | ||
| 6186 | (0x1D65F, 'M', u'j'), | ||
| 6187 | (0x1D660, 'M', u'k'), | ||
| 6188 | (0x1D661, 'M', u'l'), | ||
| 6189 | (0x1D662, 'M', u'm'), | ||
| 6190 | (0x1D663, 'M', u'n'), | ||
| 6191 | (0x1D664, 'M', u'o'), | ||
| 6192 | (0x1D665, 'M', u'p'), | ||
| 6193 | (0x1D666, 'M', u'q'), | ||
| 6194 | (0x1D667, 'M', u'r'), | ||
| 6195 | (0x1D668, 'M', u's'), | ||
| 6196 | (0x1D669, 'M', u't'), | ||
| 6197 | (0x1D66A, 'M', u'u'), | ||
| 6198 | (0x1D66B, 'M', u'v'), | ||
| 6199 | (0x1D66C, 'M', u'w'), | ||
| 6200 | (0x1D66D, 'M', u'x'), | ||
| 6201 | (0x1D66E, 'M', u'y'), | ||
| 6202 | (0x1D66F, 'M', u'z'), | ||
| 6203 | (0x1D670, 'M', u'a'), | ||
| 6204 | (0x1D671, 'M', u'b'), | ||
| 6205 | (0x1D672, 'M', u'c'), | ||
| 6206 | (0x1D673, 'M', u'd'), | ||
| 6207 | (0x1D674, 'M', u'e'), | ||
| 6208 | (0x1D675, 'M', u'f'), | ||
| 6209 | (0x1D676, 'M', u'g'), | ||
| 6210 | (0x1D677, 'M', u'h'), | ||
| 6211 | (0x1D678, 'M', u'i'), | ||
| 6212 | (0x1D679, 'M', u'j'), | ||
| 6213 | (0x1D67A, 'M', u'k'), | ||
| 6214 | (0x1D67B, 'M', u'l'), | ||
| 6215 | (0x1D67C, 'M', u'm'), | ||
| 6216 | (0x1D67D, 'M', u'n'), | ||
| 6217 | (0x1D67E, 'M', u'o'), | ||
| 6218 | (0x1D67F, 'M', u'p'), | ||
| 6219 | (0x1D680, 'M', u'q'), | ||
| 6220 | (0x1D681, 'M', u'r'), | ||
| 6221 | (0x1D682, 'M', u's'), | ||
| 6222 | (0x1D683, 'M', u't'), | ||
| 6223 | (0x1D684, 'M', u'u'), | ||
| 6224 | (0x1D685, 'M', u'v'), | ||
| 6225 | (0x1D686, 'M', u'w'), | ||
| 6226 | (0x1D687, 'M', u'x'), | ||
| 6227 | (0x1D688, 'M', u'y'), | ||
| 6228 | (0x1D689, 'M', u'z'), | ||
| 6229 | (0x1D68A, 'M', u'a'), | ||
| 6230 | (0x1D68B, 'M', u'b'), | ||
| 6231 | (0x1D68C, 'M', u'c'), | ||
| 6232 | (0x1D68D, 'M', u'd'), | ||
| 6233 | (0x1D68E, 'M', u'e'), | ||
| 6234 | (0x1D68F, 'M', u'f'), | ||
| 6235 | (0x1D690, 'M', u'g'), | ||
| 6236 | (0x1D691, 'M', u'h'), | ||
| 6237 | (0x1D692, 'M', u'i'), | ||
| 6238 | (0x1D693, 'M', u'j'), | ||
| 6239 | (0x1D694, 'M', u'k'), | ||
| 6240 | (0x1D695, 'M', u'l'), | ||
| 6241 | (0x1D696, 'M', u'm'), | ||
| 6242 | (0x1D697, 'M', u'n'), | ||
| 6243 | (0x1D698, 'M', u'o'), | ||
| 6244 | (0x1D699, 'M', u'p'), | ||
| 6245 | (0x1D69A, 'M', u'q'), | ||
| 6246 | ] | ||
| 6247 | |||
| 6248 | def _seg_60(): | ||
| 6249 | return [ | ||
| 6250 | (0x1D69B, 'M', u'r'), | ||
| 6251 | (0x1D69C, 'M', u's'), | ||
| 6252 | (0x1D69D, 'M', u't'), | ||
| 6253 | (0x1D69E, 'M', u'u'), | ||
| 6254 | (0x1D69F, 'M', u'v'), | ||
| 6255 | (0x1D6A0, 'M', u'w'), | ||
| 6256 | (0x1D6A1, 'M', u'x'), | ||
| 6257 | (0x1D6A2, 'M', u'y'), | ||
| 6258 | (0x1D6A3, 'M', u'z'), | ||
| 6259 | (0x1D6A4, 'M', u'ı'), | ||
| 6260 | (0x1D6A5, 'M', u'ȷ'), | ||
| 6261 | (0x1D6A6, 'X'), | ||
| 6262 | (0x1D6A8, 'M', u'α'), | ||
| 6263 | (0x1D6A9, 'M', u'β'), | ||
| 6264 | (0x1D6AA, 'M', u'γ'), | ||
| 6265 | (0x1D6AB, 'M', u'δ'), | ||
| 6266 | (0x1D6AC, 'M', u'ε'), | ||
| 6267 | (0x1D6AD, 'M', u'ζ'), | ||
| 6268 | (0x1D6AE, 'M', u'η'), | ||
| 6269 | (0x1D6AF, 'M', u'θ'), | ||
| 6270 | (0x1D6B0, 'M', u'ι'), | ||
| 6271 | (0x1D6B1, 'M', u'κ'), | ||
| 6272 | (0x1D6B2, 'M', u'λ'), | ||
| 6273 | (0x1D6B3, 'M', u'μ'), | ||
| 6274 | (0x1D6B4, 'M', u'ν'), | ||
| 6275 | (0x1D6B5, 'M', u'ξ'), | ||
| 6276 | (0x1D6B6, 'M', u'ο'), | ||
| 6277 | (0x1D6B7, 'M', u'π'), | ||
| 6278 | (0x1D6B8, 'M', u'ρ'), | ||
| 6279 | (0x1D6B9, 'M', u'θ'), | ||
| 6280 | (0x1D6BA, 'M', u'σ'), | ||
| 6281 | (0x1D6BB, 'M', u'τ'), | ||
| 6282 | (0x1D6BC, 'M', u'υ'), | ||
| 6283 | (0x1D6BD, 'M', u'φ'), | ||
| 6284 | (0x1D6BE, 'M', u'χ'), | ||
| 6285 | (0x1D6BF, 'M', u'ψ'), | ||
| 6286 | (0x1D6C0, 'M', u'ω'), | ||
| 6287 | (0x1D6C1, 'M', u'∇'), | ||
| 6288 | (0x1D6C2, 'M', u'α'), | ||
| 6289 | (0x1D6C3, 'M', u'β'), | ||
| 6290 | (0x1D6C4, 'M', u'γ'), | ||
| 6291 | (0x1D6C5, 'M', u'δ'), | ||
| 6292 | (0x1D6C6, 'M', u'ε'), | ||
| 6293 | (0x1D6C7, 'M', u'ζ'), | ||
| 6294 | (0x1D6C8, 'M', u'η'), | ||
| 6295 | (0x1D6C9, 'M', u'θ'), | ||
| 6296 | (0x1D6CA, 'M', u'ι'), | ||
| 6297 | (0x1D6CB, 'M', u'κ'), | ||
| 6298 | (0x1D6CC, 'M', u'λ'), | ||
| 6299 | (0x1D6CD, 'M', u'μ'), | ||
| 6300 | (0x1D6CE, 'M', u'ν'), | ||
| 6301 | (0x1D6CF, 'M', u'ξ'), | ||
| 6302 | (0x1D6D0, 'M', u'ο'), | ||
| 6303 | (0x1D6D1, 'M', u'π'), | ||
| 6304 | (0x1D6D2, 'M', u'ρ'), | ||
| 6305 | (0x1D6D3, 'M', u'σ'), | ||
| 6306 | (0x1D6D5, 'M', u'τ'), | ||
| 6307 | (0x1D6D6, 'M', u'υ'), | ||
| 6308 | (0x1D6D7, 'M', u'φ'), | ||
| 6309 | (0x1D6D8, 'M', u'χ'), | ||
| 6310 | (0x1D6D9, 'M', u'ψ'), | ||
| 6311 | (0x1D6DA, 'M', u'ω'), | ||
| 6312 | (0x1D6DB, 'M', u'∂'), | ||
| 6313 | (0x1D6DC, 'M', u'ε'), | ||
| 6314 | (0x1D6DD, 'M', u'θ'), | ||
| 6315 | (0x1D6DE, 'M', u'κ'), | ||
| 6316 | (0x1D6DF, 'M', u'φ'), | ||
| 6317 | (0x1D6E0, 'M', u'ρ'), | ||
| 6318 | (0x1D6E1, 'M', u'π'), | ||
| 6319 | (0x1D6E2, 'M', u'α'), | ||
| 6320 | (0x1D6E3, 'M', u'β'), | ||
| 6321 | (0x1D6E4, 'M', u'γ'), | ||
| 6322 | (0x1D6E5, 'M', u'δ'), | ||
| 6323 | (0x1D6E6, 'M', u'ε'), | ||
| 6324 | (0x1D6E7, 'M', u'ζ'), | ||
| 6325 | (0x1D6E8, 'M', u'η'), | ||
| 6326 | (0x1D6E9, 'M', u'θ'), | ||
| 6327 | (0x1D6EA, 'M', u'ι'), | ||
| 6328 | (0x1D6EB, 'M', u'κ'), | ||
| 6329 | (0x1D6EC, 'M', u'λ'), | ||
| 6330 | (0x1D6ED, 'M', u'μ'), | ||
| 6331 | (0x1D6EE, 'M', u'ν'), | ||
| 6332 | (0x1D6EF, 'M', u'ξ'), | ||
| 6333 | (0x1D6F0, 'M', u'ο'), | ||
| 6334 | (0x1D6F1, 'M', u'π'), | ||
| 6335 | (0x1D6F2, 'M', u'ρ'), | ||
| 6336 | (0x1D6F3, 'M', u'θ'), | ||
| 6337 | (0x1D6F4, 'M', u'σ'), | ||
| 6338 | (0x1D6F5, 'M', u'τ'), | ||
| 6339 | (0x1D6F6, 'M', u'υ'), | ||
| 6340 | (0x1D6F7, 'M', u'φ'), | ||
| 6341 | (0x1D6F8, 'M', u'χ'), | ||
| 6342 | (0x1D6F9, 'M', u'ψ'), | ||
| 6343 | (0x1D6FA, 'M', u'ω'), | ||
| 6344 | (0x1D6FB, 'M', u'∇'), | ||
| 6345 | (0x1D6FC, 'M', u'α'), | ||
| 6346 | (0x1D6FD, 'M', u'β'), | ||
| 6347 | (0x1D6FE, 'M', u'γ'), | ||
| 6348 | (0x1D6FF, 'M', u'δ'), | ||
| 6349 | (0x1D700, 'M', u'ε'), | ||
| 6350 | ] | ||
| 6351 | |||
| 6352 | def _seg_61(): | ||
| 6353 | return [ | ||
| 6354 | (0x1D701, 'M', u'ζ'), | ||
| 6355 | (0x1D702, 'M', u'η'), | ||
| 6356 | (0x1D703, 'M', u'θ'), | ||
| 6357 | (0x1D704, 'M', u'ι'), | ||
| 6358 | (0x1D705, 'M', u'κ'), | ||
| 6359 | (0x1D706, 'M', u'λ'), | ||
| 6360 | (0x1D707, 'M', u'μ'), | ||
| 6361 | (0x1D708, 'M', u'ν'), | ||
| 6362 | (0x1D709, 'M', u'ξ'), | ||
| 6363 | (0x1D70A, 'M', u'ο'), | ||
| 6364 | (0x1D70B, 'M', u'π'), | ||
| 6365 | (0x1D70C, 'M', u'ρ'), | ||
| 6366 | (0x1D70D, 'M', u'σ'), | ||
| 6367 | (0x1D70F, 'M', u'τ'), | ||
| 6368 | (0x1D710, 'M', u'υ'), | ||
| 6369 | (0x1D711, 'M', u'φ'), | ||
| 6370 | (0x1D712, 'M', u'χ'), | ||
| 6371 | (0x1D713, 'M', u'ψ'), | ||
| 6372 | (0x1D714, 'M', u'ω'), | ||
| 6373 | (0x1D715, 'M', u'∂'), | ||
| 6374 | (0x1D716, 'M', u'ε'), | ||
| 6375 | (0x1D717, 'M', u'θ'), | ||
| 6376 | (0x1D718, 'M', u'κ'), | ||
| 6377 | (0x1D719, 'M', u'φ'), | ||
| 6378 | (0x1D71A, 'M', u'ρ'), | ||
| 6379 | (0x1D71B, 'M', u'π'), | ||
| 6380 | (0x1D71C, 'M', u'α'), | ||
| 6381 | (0x1D71D, 'M', u'β'), | ||
| 6382 | (0x1D71E, 'M', u'γ'), | ||
| 6383 | (0x1D71F, 'M', u'δ'), | ||
| 6384 | (0x1D720, 'M', u'ε'), | ||
| 6385 | (0x1D721, 'M', u'ζ'), | ||
| 6386 | (0x1D722, 'M', u'η'), | ||
| 6387 | (0x1D723, 'M', u'θ'), | ||
| 6388 | (0x1D724, 'M', u'ι'), | ||
| 6389 | (0x1D725, 'M', u'κ'), | ||
| 6390 | (0x1D726, 'M', u'λ'), | ||
| 6391 | (0x1D727, 'M', u'μ'), | ||
| 6392 | (0x1D728, 'M', u'ν'), | ||
| 6393 | (0x1D729, 'M', u'ξ'), | ||
| 6394 | (0x1D72A, 'M', u'ο'), | ||
| 6395 | (0x1D72B, 'M', u'π'), | ||
| 6396 | (0x1D72C, 'M', u'ρ'), | ||
| 6397 | (0x1D72D, 'M', u'θ'), | ||
| 6398 | (0x1D72E, 'M', u'σ'), | ||
| 6399 | (0x1D72F, 'M', u'τ'), | ||
| 6400 | (0x1D730, 'M', u'υ'), | ||
| 6401 | (0x1D731, 'M', u'φ'), | ||
| 6402 | (0x1D732, 'M', u'χ'), | ||
| 6403 | (0x1D733, 'M', u'ψ'), | ||
| 6404 | (0x1D734, 'M', u'ω'), | ||
| 6405 | (0x1D735, 'M', u'∇'), | ||
| 6406 | (0x1D736, 'M', u'α'), | ||
| 6407 | (0x1D737, 'M', u'β'), | ||
| 6408 | (0x1D738, 'M', u'γ'), | ||
| 6409 | (0x1D739, 'M', u'δ'), | ||
| 6410 | (0x1D73A, 'M', u'ε'), | ||
| 6411 | (0x1D73B, 'M', u'ζ'), | ||
| 6412 | (0x1D73C, 'M', u'η'), | ||
| 6413 | (0x1D73D, 'M', u'θ'), | ||
| 6414 | (0x1D73E, 'M', u'ι'), | ||
| 6415 | (0x1D73F, 'M', u'κ'), | ||
| 6416 | (0x1D740, 'M', u'λ'), | ||
| 6417 | (0x1D741, 'M', u'μ'), | ||
| 6418 | (0x1D742, 'M', u'ν'), | ||
| 6419 | (0x1D743, 'M', u'ξ'), | ||
| 6420 | (0x1D744, 'M', u'ο'), | ||
| 6421 | (0x1D745, 'M', u'π'), | ||
| 6422 | (0x1D746, 'M', u'ρ'), | ||
| 6423 | (0x1D747, 'M', u'σ'), | ||
| 6424 | (0x1D749, 'M', u'τ'), | ||
| 6425 | (0x1D74A, 'M', u'υ'), | ||
| 6426 | (0x1D74B, 'M', u'φ'), | ||
| 6427 | (0x1D74C, 'M', u'χ'), | ||
| 6428 | (0x1D74D, 'M', u'ψ'), | ||
| 6429 | (0x1D74E, 'M', u'ω'), | ||
| 6430 | (0x1D74F, 'M', u'∂'), | ||
| 6431 | (0x1D750, 'M', u'ε'), | ||
| 6432 | (0x1D751, 'M', u'θ'), | ||
| 6433 | (0x1D752, 'M', u'κ'), | ||
| 6434 | (0x1D753, 'M', u'φ'), | ||
| 6435 | (0x1D754, 'M', u'ρ'), | ||
| 6436 | (0x1D755, 'M', u'π'), | ||
| 6437 | (0x1D756, 'M', u'α'), | ||
| 6438 | (0x1D757, 'M', u'β'), | ||
| 6439 | (0x1D758, 'M', u'γ'), | ||
| 6440 | (0x1D759, 'M', u'δ'), | ||
| 6441 | (0x1D75A, 'M', u'ε'), | ||
| 6442 | (0x1D75B, 'M', u'ζ'), | ||
| 6443 | (0x1D75C, 'M', u'η'), | ||
| 6444 | (0x1D75D, 'M', u'θ'), | ||
| 6445 | (0x1D75E, 'M', u'ι'), | ||
| 6446 | (0x1D75F, 'M', u'κ'), | ||
| 6447 | (0x1D760, 'M', u'λ'), | ||
| 6448 | (0x1D761, 'M', u'μ'), | ||
| 6449 | (0x1D762, 'M', u'ν'), | ||
| 6450 | (0x1D763, 'M', u'ξ'), | ||
| 6451 | (0x1D764, 'M', u'ο'), | ||
| 6452 | (0x1D765, 'M', u'π'), | ||
| 6453 | (0x1D766, 'M', u'ρ'), | ||
| 6454 | ] | ||
| 6455 | |||
| 6456 | def _seg_62(): | ||
| 6457 | return [ | ||
| 6458 | (0x1D767, 'M', u'θ'), | ||
| 6459 | (0x1D768, 'M', u'σ'), | ||
| 6460 | (0x1D769, 'M', u'τ'), | ||
| 6461 | (0x1D76A, 'M', u'υ'), | ||
| 6462 | (0x1D76B, 'M', u'φ'), | ||
| 6463 | (0x1D76C, 'M', u'χ'), | ||
| 6464 | (0x1D76D, 'M', u'ψ'), | ||
| 6465 | (0x1D76E, 'M', u'ω'), | ||
| 6466 | (0x1D76F, 'M', u'∇'), | ||
| 6467 | (0x1D770, 'M', u'α'), | ||
| 6468 | (0x1D771, 'M', u'β'), | ||
| 6469 | (0x1D772, 'M', u'γ'), | ||
| 6470 | (0x1D773, 'M', u'δ'), | ||
| 6471 | (0x1D774, 'M', u'ε'), | ||
| 6472 | (0x1D775, 'M', u'ζ'), | ||
| 6473 | (0x1D776, 'M', u'η'), | ||
| 6474 | (0x1D777, 'M', u'θ'), | ||
| 6475 | (0x1D778, 'M', u'ι'), | ||
| 6476 | (0x1D779, 'M', u'κ'), | ||
| 6477 | (0x1D77A, 'M', u'λ'), | ||
| 6478 | (0x1D77B, 'M', u'μ'), | ||
| 6479 | (0x1D77C, 'M', u'ν'), | ||
| 6480 | (0x1D77D, 'M', u'ξ'), | ||
| 6481 | (0x1D77E, 'M', u'ο'), | ||
| 6482 | (0x1D77F, 'M', u'π'), | ||
| 6483 | (0x1D780, 'M', u'ρ'), | ||
| 6484 | (0x1D781, 'M', u'σ'), | ||
| 6485 | (0x1D783, 'M', u'τ'), | ||
| 6486 | (0x1D784, 'M', u'υ'), | ||
| 6487 | (0x1D785, 'M', u'φ'), | ||
| 6488 | (0x1D786, 'M', u'χ'), | ||
| 6489 | (0x1D787, 'M', u'ψ'), | ||
| 6490 | (0x1D788, 'M', u'ω'), | ||
| 6491 | (0x1D789, 'M', u'∂'), | ||
| 6492 | (0x1D78A, 'M', u'ε'), | ||
| 6493 | (0x1D78B, 'M', u'θ'), | ||
| 6494 | (0x1D78C, 'M', u'κ'), | ||
| 6495 | (0x1D78D, 'M', u'φ'), | ||
| 6496 | (0x1D78E, 'M', u'ρ'), | ||
| 6497 | (0x1D78F, 'M', u'π'), | ||
| 6498 | (0x1D790, 'M', u'α'), | ||
| 6499 | (0x1D791, 'M', u'β'), | ||
| 6500 | (0x1D792, 'M', u'γ'), | ||
| 6501 | (0x1D793, 'M', u'δ'), | ||
| 6502 | (0x1D794, 'M', u'ε'), | ||
| 6503 | (0x1D795, 'M', u'ζ'), | ||
| 6504 | (0x1D796, 'M', u'η'), | ||
| 6505 | (0x1D797, 'M', u'θ'), | ||
| 6506 | (0x1D798, 'M', u'ι'), | ||
| 6507 | (0x1D799, 'M', u'κ'), | ||
| 6508 | (0x1D79A, 'M', u'λ'), | ||
| 6509 | (0x1D79B, 'M', u'μ'), | ||
| 6510 | (0x1D79C, 'M', u'ν'), | ||
| 6511 | (0x1D79D, 'M', u'ξ'), | ||
| 6512 | (0x1D79E, 'M', u'ο'), | ||
| 6513 | (0x1D79F, 'M', u'π'), | ||
| 6514 | (0x1D7A0, 'M', u'ρ'), | ||
| 6515 | (0x1D7A1, 'M', u'θ'), | ||
| 6516 | (0x1D7A2, 'M', u'σ'), | ||
| 6517 | (0x1D7A3, 'M', u'τ'), | ||
| 6518 | (0x1D7A4, 'M', u'υ'), | ||
| 6519 | (0x1D7A5, 'M', u'φ'), | ||
| 6520 | (0x1D7A6, 'M', u'χ'), | ||
| 6521 | (0x1D7A7, 'M', u'ψ'), | ||
| 6522 | (0x1D7A8, 'M', u'ω'), | ||
| 6523 | (0x1D7A9, 'M', u'∇'), | ||
| 6524 | (0x1D7AA, 'M', u'α'), | ||
| 6525 | (0x1D7AB, 'M', u'β'), | ||
| 6526 | (0x1D7AC, 'M', u'γ'), | ||
| 6527 | (0x1D7AD, 'M', u'δ'), | ||
| 6528 | (0x1D7AE, 'M', u'ε'), | ||
| 6529 | (0x1D7AF, 'M', u'ζ'), | ||
| 6530 | (0x1D7B0, 'M', u'η'), | ||
| 6531 | (0x1D7B1, 'M', u'θ'), | ||
| 6532 | (0x1D7B2, 'M', u'ι'), | ||
| 6533 | (0x1D7B3, 'M', u'κ'), | ||
| 6534 | (0x1D7B4, 'M', u'λ'), | ||
| 6535 | (0x1D7B5, 'M', u'μ'), | ||
| 6536 | (0x1D7B6, 'M', u'ν'), | ||
| 6537 | (0x1D7B7, 'M', u'ξ'), | ||
| 6538 | (0x1D7B8, 'M', u'ο'), | ||
| 6539 | (0x1D7B9, 'M', u'π'), | ||
| 6540 | (0x1D7BA, 'M', u'ρ'), | ||
| 6541 | (0x1D7BB, 'M', u'σ'), | ||
| 6542 | (0x1D7BD, 'M', u'τ'), | ||
| 6543 | (0x1D7BE, 'M', u'υ'), | ||
| 6544 | (0x1D7BF, 'M', u'φ'), | ||
| 6545 | (0x1D7C0, 'M', u'χ'), | ||
| 6546 | (0x1D7C1, 'M', u'ψ'), | ||
| 6547 | (0x1D7C2, 'M', u'ω'), | ||
| 6548 | (0x1D7C3, 'M', u'∂'), | ||
| 6549 | (0x1D7C4, 'M', u'ε'), | ||
| 6550 | (0x1D7C5, 'M', u'θ'), | ||
| 6551 | (0x1D7C6, 'M', u'κ'), | ||
| 6552 | (0x1D7C7, 'M', u'φ'), | ||
| 6553 | (0x1D7C8, 'M', u'ρ'), | ||
| 6554 | (0x1D7C9, 'M', u'π'), | ||
| 6555 | (0x1D7CA, 'M', u'ϝ'), | ||
| 6556 | (0x1D7CC, 'X'), | ||
| 6557 | (0x1D7CE, 'M', u'0'), | ||
| 6558 | ] | ||
| 6559 | |||
| 6560 | def _seg_63(): | ||
| 6561 | return [ | ||
| 6562 | (0x1D7CF, 'M', u'1'), | ||
| 6563 | (0x1D7D0, 'M', u'2'), | ||
| 6564 | (0x1D7D1, 'M', u'3'), | ||
| 6565 | (0x1D7D2, 'M', u'4'), | ||
| 6566 | (0x1D7D3, 'M', u'5'), | ||
| 6567 | (0x1D7D4, 'M', u'6'), | ||
| 6568 | (0x1D7D5, 'M', u'7'), | ||
| 6569 | (0x1D7D6, 'M', u'8'), | ||
| 6570 | (0x1D7D7, 'M', u'9'), | ||
| 6571 | (0x1D7D8, 'M', u'0'), | ||
| 6572 | (0x1D7D9, 'M', u'1'), | ||
| 6573 | (0x1D7DA, 'M', u'2'), | ||
| 6574 | (0x1D7DB, 'M', u'3'), | ||
| 6575 | (0x1D7DC, 'M', u'4'), | ||
| 6576 | (0x1D7DD, 'M', u'5'), | ||
| 6577 | (0x1D7DE, 'M', u'6'), | ||
| 6578 | (0x1D7DF, 'M', u'7'), | ||
| 6579 | (0x1D7E0, 'M', u'8'), | ||
| 6580 | (0x1D7E1, 'M', u'9'), | ||
| 6581 | (0x1D7E2, 'M', u'0'), | ||
| 6582 | (0x1D7E3, 'M', u'1'), | ||
| 6583 | (0x1D7E4, 'M', u'2'), | ||
| 6584 | (0x1D7E5, 'M', u'3'), | ||
| 6585 | (0x1D7E6, 'M', u'4'), | ||
| 6586 | (0x1D7E7, 'M', u'5'), | ||
| 6587 | (0x1D7E8, 'M', u'6'), | ||
| 6588 | (0x1D7E9, 'M', u'7'), | ||
| 6589 | (0x1D7EA, 'M', u'8'), | ||
| 6590 | (0x1D7EB, 'M', u'9'), | ||
| 6591 | (0x1D7EC, 'M', u'0'), | ||
| 6592 | (0x1D7ED, 'M', u'1'), | ||
| 6593 | (0x1D7EE, 'M', u'2'), | ||
| 6594 | (0x1D7EF, 'M', u'3'), | ||
| 6595 | (0x1D7F0, 'M', u'4'), | ||
| 6596 | (0x1D7F1, 'M', u'5'), | ||
| 6597 | (0x1D7F2, 'M', u'6'), | ||
| 6598 | (0x1D7F3, 'M', u'7'), | ||
| 6599 | (0x1D7F4, 'M', u'8'), | ||
| 6600 | (0x1D7F5, 'M', u'9'), | ||
| 6601 | (0x1D7F6, 'M', u'0'), | ||
| 6602 | (0x1D7F7, 'M', u'1'), | ||
| 6603 | (0x1D7F8, 'M', u'2'), | ||
| 6604 | (0x1D7F9, 'M', u'3'), | ||
| 6605 | (0x1D7FA, 'M', u'4'), | ||
| 6606 | (0x1D7FB, 'M', u'5'), | ||
| 6607 | (0x1D7FC, 'M', u'6'), | ||
| 6608 | (0x1D7FD, 'M', u'7'), | ||
| 6609 | (0x1D7FE, 'M', u'8'), | ||
| 6610 | (0x1D7FF, 'M', u'9'), | ||
| 6611 | (0x1D800, 'X'), | ||
| 6612 | (0x1EE00, 'M', u'ا'), | ||
| 6613 | (0x1EE01, 'M', u'ب'), | ||
| 6614 | (0x1EE02, 'M', u'ج'), | ||
| 6615 | (0x1EE03, 'M', u'د'), | ||
| 6616 | (0x1EE04, 'X'), | ||
| 6617 | (0x1EE05, 'M', u'و'), | ||
| 6618 | (0x1EE06, 'M', u'ز'), | ||
| 6619 | (0x1EE07, 'M', u'ح'), | ||
| 6620 | (0x1EE08, 'M', u'ط'), | ||
| 6621 | (0x1EE09, 'M', u'ي'), | ||
| 6622 | (0x1EE0A, 'M', u'ك'), | ||
| 6623 | (0x1EE0B, 'M', u'ل'), | ||
| 6624 | (0x1EE0C, 'M', u'م'), | ||
| 6625 | (0x1EE0D, 'M', u'ن'), | ||
| 6626 | (0x1EE0E, 'M', u'س'), | ||
| 6627 | (0x1EE0F, 'M', u'ع'), | ||
| 6628 | (0x1EE10, 'M', u'ف'), | ||
| 6629 | (0x1EE11, 'M', u'ص'), | ||
| 6630 | (0x1EE12, 'M', u'ق'), | ||
| 6631 | (0x1EE13, 'M', u'ر'), | ||
| 6632 | (0x1EE14, 'M', u'ش'), | ||
| 6633 | (0x1EE15, 'M', u'ت'), | ||
| 6634 | (0x1EE16, 'M', u'ث'), | ||
| 6635 | (0x1EE17, 'M', u'خ'), | ||
| 6636 | (0x1EE18, 'M', u'ذ'), | ||
| 6637 | (0x1EE19, 'M', u'ض'), | ||
| 6638 | (0x1EE1A, 'M', u'ظ'), | ||
| 6639 | (0x1EE1B, 'M', u'غ'), | ||
| 6640 | (0x1EE1C, 'M', u'ٮ'), | ||
| 6641 | (0x1EE1D, 'M', u'ں'), | ||
| 6642 | (0x1EE1E, 'M', u'ڡ'), | ||
| 6643 | (0x1EE1F, 'M', u'ٯ'), | ||
| 6644 | (0x1EE20, 'X'), | ||
| 6645 | (0x1EE21, 'M', u'ب'), | ||
| 6646 | (0x1EE22, 'M', u'ج'), | ||
| 6647 | (0x1EE23, 'X'), | ||
| 6648 | (0x1EE24, 'M', u'ه'), | ||
| 6649 | (0x1EE25, 'X'), | ||
| 6650 | (0x1EE27, 'M', u'ح'), | ||
| 6651 | (0x1EE28, 'X'), | ||
| 6652 | (0x1EE29, 'M', u'ي'), | ||
| 6653 | (0x1EE2A, 'M', u'ك'), | ||
| 6654 | (0x1EE2B, 'M', u'ل'), | ||
| 6655 | (0x1EE2C, 'M', u'م'), | ||
| 6656 | (0x1EE2D, 'M', u'ن'), | ||
| 6657 | (0x1EE2E, 'M', u'س'), | ||
| 6658 | (0x1EE2F, 'M', u'ع'), | ||
| 6659 | (0x1EE30, 'M', u'ف'), | ||
| 6660 | (0x1EE31, 'M', u'ص'), | ||
| 6661 | (0x1EE32, 'M', u'ق'), | ||
| 6662 | ] | ||
| 6663 | |||
| 6664 | def _seg_64(): | ||
| 6665 | return [ | ||
| 6666 | (0x1EE33, 'X'), | ||
| 6667 | (0x1EE34, 'M', u'ش'), | ||
| 6668 | (0x1EE35, 'M', u'ت'), | ||
| 6669 | (0x1EE36, 'M', u'ث'), | ||
| 6670 | (0x1EE37, 'M', u'خ'), | ||
| 6671 | (0x1EE38, 'X'), | ||
| 6672 | (0x1EE39, 'M', u'ض'), | ||
| 6673 | (0x1EE3A, 'X'), | ||
| 6674 | (0x1EE3B, 'M', u'غ'), | ||
| 6675 | (0x1EE3C, 'X'), | ||
| 6676 | (0x1EE42, 'M', u'ج'), | ||
| 6677 | (0x1EE43, 'X'), | ||
| 6678 | (0x1EE47, 'M', u'ح'), | ||
| 6679 | (0x1EE48, 'X'), | ||
| 6680 | (0x1EE49, 'M', u'ي'), | ||
| 6681 | (0x1EE4A, 'X'), | ||
| 6682 | (0x1EE4B, 'M', u'ل'), | ||
| 6683 | (0x1EE4C, 'X'), | ||
| 6684 | (0x1EE4D, 'M', u'ن'), | ||
| 6685 | (0x1EE4E, 'M', u'س'), | ||
| 6686 | (0x1EE4F, 'M', u'ع'), | ||
| 6687 | (0x1EE50, 'X'), | ||
| 6688 | (0x1EE51, 'M', u'ص'), | ||
| 6689 | (0x1EE52, 'M', u'ق'), | ||
| 6690 | (0x1EE53, 'X'), | ||
| 6691 | (0x1EE54, 'M', u'ش'), | ||
| 6692 | (0x1EE55, 'X'), | ||
| 6693 | (0x1EE57, 'M', u'خ'), | ||
| 6694 | (0x1EE58, 'X'), | ||
| 6695 | (0x1EE59, 'M', u'ض'), | ||
| 6696 | (0x1EE5A, 'X'), | ||
| 6697 | (0x1EE5B, 'M', u'غ'), | ||
| 6698 | (0x1EE5C, 'X'), | ||
| 6699 | (0x1EE5D, 'M', u'ں'), | ||
| 6700 | (0x1EE5E, 'X'), | ||
| 6701 | (0x1EE5F, 'M', u'ٯ'), | ||
| 6702 | (0x1EE60, 'X'), | ||
| 6703 | (0x1EE61, 'M', u'ب'), | ||
| 6704 | (0x1EE62, 'M', u'ج'), | ||
| 6705 | (0x1EE63, 'X'), | ||
| 6706 | (0x1EE64, 'M', u'ه'), | ||
| 6707 | (0x1EE65, 'X'), | ||
| 6708 | (0x1EE67, 'M', u'ح'), | ||
| 6709 | (0x1EE68, 'M', u'ط'), | ||
| 6710 | (0x1EE69, 'M', u'ي'), | ||
| 6711 | (0x1EE6A, 'M', u'ك'), | ||
| 6712 | (0x1EE6B, 'X'), | ||
| 6713 | (0x1EE6C, 'M', u'م'), | ||
| 6714 | (0x1EE6D, 'M', u'ن'), | ||
| 6715 | (0x1EE6E, 'M', u'س'), | ||
| 6716 | (0x1EE6F, 'M', u'ع'), | ||
| 6717 | (0x1EE70, 'M', u'ف'), | ||
| 6718 | (0x1EE71, 'M', u'ص'), | ||
| 6719 | (0x1EE72, 'M', u'ق'), | ||
| 6720 | (0x1EE73, 'X'), | ||
| 6721 | (0x1EE74, 'M', u'ش'), | ||
| 6722 | (0x1EE75, 'M', u'ت'), | ||
| 6723 | (0x1EE76, 'M', u'ث'), | ||
| 6724 | (0x1EE77, 'M', u'خ'), | ||
| 6725 | (0x1EE78, 'X'), | ||
| 6726 | (0x1EE79, 'M', u'ض'), | ||
| 6727 | (0x1EE7A, 'M', u'ظ'), | ||
| 6728 | (0x1EE7B, 'M', u'غ'), | ||
| 6729 | (0x1EE7C, 'M', u'ٮ'), | ||
| 6730 | (0x1EE7D, 'X'), | ||
| 6731 | (0x1EE7E, 'M', u'ڡ'), | ||
| 6732 | (0x1EE7F, 'X'), | ||
| 6733 | (0x1EE80, 'M', u'ا'), | ||
| 6734 | (0x1EE81, 'M', u'ب'), | ||
| 6735 | (0x1EE82, 'M', u'ج'), | ||
| 6736 | (0x1EE83, 'M', u'د'), | ||
| 6737 | (0x1EE84, 'M', u'ه'), | ||
| 6738 | (0x1EE85, 'M', u'و'), | ||
| 6739 | (0x1EE86, 'M', u'ز'), | ||
| 6740 | (0x1EE87, 'M', u'ح'), | ||
| 6741 | (0x1EE88, 'M', u'ط'), | ||
| 6742 | (0x1EE89, 'M', u'ي'), | ||
| 6743 | (0x1EE8A, 'X'), | ||
| 6744 | (0x1EE8B, 'M', u'ل'), | ||
| 6745 | (0x1EE8C, 'M', u'م'), | ||
| 6746 | (0x1EE8D, 'M', u'ن'), | ||
| 6747 | (0x1EE8E, 'M', u'س'), | ||
| 6748 | (0x1EE8F, 'M', u'ع'), | ||
| 6749 | (0x1EE90, 'M', u'ف'), | ||
| 6750 | (0x1EE91, 'M', u'ص'), | ||
| 6751 | (0x1EE92, 'M', u'ق'), | ||
| 6752 | (0x1EE93, 'M', u'ر'), | ||
| 6753 | (0x1EE94, 'M', u'ش'), | ||
| 6754 | (0x1EE95, 'M', u'ت'), | ||
| 6755 | (0x1EE96, 'M', u'ث'), | ||
| 6756 | (0x1EE97, 'M', u'خ'), | ||
| 6757 | (0x1EE98, 'M', u'ذ'), | ||
| 6758 | (0x1EE99, 'M', u'ض'), | ||
| 6759 | (0x1EE9A, 'M', u'ظ'), | ||
| 6760 | (0x1EE9B, 'M', u'غ'), | ||
| 6761 | (0x1EE9C, 'X'), | ||
| 6762 | (0x1EEA1, 'M', u'ب'), | ||
| 6763 | (0x1EEA2, 'M', u'ج'), | ||
| 6764 | (0x1EEA3, 'M', u'د'), | ||
| 6765 | (0x1EEA4, 'X'), | ||
| 6766 | ] | ||
| 6767 | |||
| 6768 | def _seg_65(): | ||
| 6769 | return [ | ||
| 6770 | (0x1EEA5, 'M', u'و'), | ||
| 6771 | (0x1EEA6, 'M', u'ز'), | ||
| 6772 | (0x1EEA7, 'M', u'ح'), | ||
| 6773 | (0x1EEA8, 'M', u'ط'), | ||
| 6774 | (0x1EEA9, 'M', u'ي'), | ||
| 6775 | (0x1EEAA, 'X'), | ||
| 6776 | (0x1EEAB, 'M', u'ل'), | ||
| 6777 | (0x1EEAC, 'M', u'م'), | ||
| 6778 | (0x1EEAD, 'M', u'ن'), | ||
| 6779 | (0x1EEAE, 'M', u'س'), | ||
| 6780 | (0x1EEAF, 'M', u'ع'), | ||
| 6781 | (0x1EEB0, 'M', u'ف'), | ||
| 6782 | (0x1EEB1, 'M', u'ص'), | ||
| 6783 | (0x1EEB2, 'M', u'ق'), | ||
| 6784 | (0x1EEB3, 'M', u'ر'), | ||
| 6785 | (0x1EEB4, 'M', u'ش'), | ||
| 6786 | (0x1EEB5, 'M', u'ت'), | ||
| 6787 | (0x1EEB6, 'M', u'ث'), | ||
| 6788 | (0x1EEB7, 'M', u'خ'), | ||
| 6789 | (0x1EEB8, 'M', u'ذ'), | ||
| 6790 | (0x1EEB9, 'M', u'ض'), | ||
| 6791 | (0x1EEBA, 'M', u'ظ'), | ||
| 6792 | (0x1EEBB, 'M', u'غ'), | ||
| 6793 | (0x1EEBC, 'X'), | ||
| 6794 | (0x1EEF0, 'V'), | ||
| 6795 | (0x1EEF2, 'X'), | ||
| 6796 | (0x1F000, 'V'), | ||
| 6797 | (0x1F02C, 'X'), | ||
| 6798 | (0x1F030, 'V'), | ||
| 6799 | (0x1F094, 'X'), | ||
| 6800 | (0x1F0A0, 'V'), | ||
| 6801 | (0x1F0AF, 'X'), | ||
| 6802 | (0x1F0B1, 'V'), | ||
| 6803 | (0x1F0BF, 'X'), | ||
| 6804 | (0x1F0C1, 'V'), | ||
| 6805 | (0x1F0D0, 'X'), | ||
| 6806 | (0x1F0D1, 'V'), | ||
| 6807 | (0x1F0E0, 'X'), | ||
| 6808 | (0x1F101, '3', u'0,'), | ||
| 6809 | (0x1F102, '3', u'1,'), | ||
| 6810 | (0x1F103, '3', u'2,'), | ||
| 6811 | (0x1F104, '3', u'3,'), | ||
| 6812 | (0x1F105, '3', u'4,'), | ||
| 6813 | (0x1F106, '3', u'5,'), | ||
| 6814 | (0x1F107, '3', u'6,'), | ||
| 6815 | (0x1F108, '3', u'7,'), | ||
| 6816 | (0x1F109, '3', u'8,'), | ||
| 6817 | (0x1F10A, '3', u'9,'), | ||
| 6818 | (0x1F10B, 'X'), | ||
| 6819 | (0x1F110, '3', u'(a)'), | ||
| 6820 | (0x1F111, '3', u'(b)'), | ||
| 6821 | (0x1F112, '3', u'(c)'), | ||
| 6822 | (0x1F113, '3', u'(d)'), | ||
| 6823 | (0x1F114, '3', u'(e)'), | ||
| 6824 | (0x1F115, '3', u'(f)'), | ||
| 6825 | (0x1F116, '3', u'(g)'), | ||
| 6826 | (0x1F117, '3', u'(h)'), | ||
| 6827 | (0x1F118, '3', u'(i)'), | ||
| 6828 | (0x1F119, '3', u'(j)'), | ||
| 6829 | (0x1F11A, '3', u'(k)'), | ||
| 6830 | (0x1F11B, '3', u'(l)'), | ||
| 6831 | (0x1F11C, '3', u'(m)'), | ||
| 6832 | (0x1F11D, '3', u'(n)'), | ||
| 6833 | (0x1F11E, '3', u'(o)'), | ||
| 6834 | (0x1F11F, '3', u'(p)'), | ||
| 6835 | (0x1F120, '3', u'(q)'), | ||
| 6836 | (0x1F121, '3', u'(r)'), | ||
| 6837 | (0x1F122, '3', u'(s)'), | ||
| 6838 | (0x1F123, '3', u'(t)'), | ||
| 6839 | (0x1F124, '3', u'(u)'), | ||
| 6840 | (0x1F125, '3', u'(v)'), | ||
| 6841 | (0x1F126, '3', u'(w)'), | ||
| 6842 | (0x1F127, '3', u'(x)'), | ||
| 6843 | (0x1F128, '3', u'(y)'), | ||
| 6844 | (0x1F129, '3', u'(z)'), | ||
| 6845 | (0x1F12A, 'M', u'〔s〕'), | ||
| 6846 | (0x1F12B, 'M', u'c'), | ||
| 6847 | (0x1F12C, 'M', u'r'), | ||
| 6848 | (0x1F12D, 'M', u'cd'), | ||
| 6849 | (0x1F12E, 'M', u'wz'), | ||
| 6850 | (0x1F12F, 'X'), | ||
| 6851 | (0x1F130, 'M', u'a'), | ||
| 6852 | (0x1F131, 'M', u'b'), | ||
| 6853 | (0x1F132, 'M', u'c'), | ||
| 6854 | (0x1F133, 'M', u'd'), | ||
| 6855 | (0x1F134, 'M', u'e'), | ||
| 6856 | (0x1F135, 'M', u'f'), | ||
| 6857 | (0x1F136, 'M', u'g'), | ||
| 6858 | (0x1F137, 'M', u'h'), | ||
| 6859 | (0x1F138, 'M', u'i'), | ||
| 6860 | (0x1F139, 'M', u'j'), | ||
| 6861 | (0x1F13A, 'M', u'k'), | ||
| 6862 | (0x1F13B, 'M', u'l'), | ||
| 6863 | (0x1F13C, 'M', u'm'), | ||
| 6864 | (0x1F13D, 'M', u'n'), | ||
| 6865 | (0x1F13E, 'M', u'o'), | ||
| 6866 | (0x1F13F, 'M', u'p'), | ||
| 6867 | (0x1F140, 'M', u'q'), | ||
| 6868 | (0x1F141, 'M', u'r'), | ||
| 6869 | (0x1F142, 'M', u's'), | ||
| 6870 | ] | ||
| 6871 | |||
| 6872 | def _seg_66(): | ||
| 6873 | return [ | ||
| 6874 | (0x1F143, 'M', u't'), | ||
| 6875 | (0x1F144, 'M', u'u'), | ||
| 6876 | (0x1F145, 'M', u'v'), | ||
| 6877 | (0x1F146, 'M', u'w'), | ||
| 6878 | (0x1F147, 'M', u'x'), | ||
| 6879 | (0x1F148, 'M', u'y'), | ||
| 6880 | (0x1F149, 'M', u'z'), | ||
| 6881 | (0x1F14A, 'M', u'hv'), | ||
| 6882 | (0x1F14B, 'M', u'mv'), | ||
| 6883 | (0x1F14C, 'M', u'sd'), | ||
| 6884 | (0x1F14D, 'M', u'ss'), | ||
| 6885 | (0x1F14E, 'M', u'ppv'), | ||
| 6886 | (0x1F14F, 'M', u'wc'), | ||
| 6887 | (0x1F150, 'V'), | ||
| 6888 | (0x1F16A, 'M', u'mc'), | ||
| 6889 | (0x1F16B, 'M', u'md'), | ||
| 6890 | (0x1F16C, 'X'), | ||
| 6891 | (0x1F170, 'V'), | ||
| 6892 | (0x1F190, 'M', u'dj'), | ||
| 6893 | (0x1F191, 'V'), | ||
| 6894 | (0x1F19B, 'X'), | ||
| 6895 | (0x1F1E6, 'V'), | ||
| 6896 | (0x1F200, 'M', u'ほか'), | ||
| 6897 | (0x1F201, 'M', u'ココ'), | ||
| 6898 | (0x1F202, 'M', u'サ'), | ||
| 6899 | (0x1F203, 'X'), | ||
| 6900 | (0x1F210, 'M', u'手'), | ||
| 6901 | (0x1F211, 'M', u'字'), | ||
| 6902 | (0x1F212, 'M', u'双'), | ||
| 6903 | (0x1F213, 'M', u'デ'), | ||
| 6904 | (0x1F214, 'M', u'二'), | ||
| 6905 | (0x1F215, 'M', u'多'), | ||
| 6906 | (0x1F216, 'M', u'解'), | ||
| 6907 | (0x1F217, 'M', u'天'), | ||
| 6908 | (0x1F218, 'M', u'交'), | ||
| 6909 | (0x1F219, 'M', u'映'), | ||
| 6910 | (0x1F21A, 'M', u'無'), | ||
| 6911 | (0x1F21B, 'M', u'料'), | ||
| 6912 | (0x1F21C, 'M', u'前'), | ||
| 6913 | (0x1F21D, 'M', u'後'), | ||
| 6914 | (0x1F21E, 'M', u'再'), | ||
| 6915 | (0x1F21F, 'M', u'新'), | ||
| 6916 | (0x1F220, 'M', u'初'), | ||
| 6917 | (0x1F221, 'M', u'終'), | ||
| 6918 | (0x1F222, 'M', u'生'), | ||
| 6919 | (0x1F223, 'M', u'販'), | ||
| 6920 | (0x1F224, 'M', u'声'), | ||
| 6921 | (0x1F225, 'M', u'吹'), | ||
| 6922 | (0x1F226, 'M', u'演'), | ||
| 6923 | (0x1F227, 'M', u'投'), | ||
| 6924 | (0x1F228, 'M', u'捕'), | ||
| 6925 | (0x1F229, 'M', u'一'), | ||
| 6926 | (0x1F22A, 'M', u'三'), | ||
| 6927 | (0x1F22B, 'M', u'遊'), | ||
| 6928 | (0x1F22C, 'M', u'左'), | ||
| 6929 | (0x1F22D, 'M', u'中'), | ||
| 6930 | (0x1F22E, 'M', u'右'), | ||
| 6931 | (0x1F22F, 'M', u'指'), | ||
| 6932 | (0x1F230, 'M', u'走'), | ||
| 6933 | (0x1F231, 'M', u'打'), | ||
| 6934 | (0x1F232, 'M', u'禁'), | ||
| 6935 | (0x1F233, 'M', u'空'), | ||
| 6936 | (0x1F234, 'M', u'合'), | ||
| 6937 | (0x1F235, 'M', u'満'), | ||
| 6938 | (0x1F236, 'M', u'有'), | ||
| 6939 | (0x1F237, 'M', u'月'), | ||
| 6940 | (0x1F238, 'M', u'申'), | ||
| 6941 | (0x1F239, 'M', u'割'), | ||
| 6942 | (0x1F23A, 'M', u'営'), | ||
| 6943 | (0x1F23B, 'X'), | ||
| 6944 | (0x1F240, 'M', u'〔本〕'), | ||
| 6945 | (0x1F241, 'M', u'〔三〕'), | ||
| 6946 | (0x1F242, 'M', u'〔二〕'), | ||
| 6947 | (0x1F243, 'M', u'〔安〕'), | ||
| 6948 | (0x1F244, 'M', u'〔点〕'), | ||
| 6949 | (0x1F245, 'M', u'〔打〕'), | ||
| 6950 | (0x1F246, 'M', u'〔盗〕'), | ||
| 6951 | (0x1F247, 'M', u'〔勝〕'), | ||
| 6952 | (0x1F248, 'M', u'〔敗〕'), | ||
| 6953 | (0x1F249, 'X'), | ||
| 6954 | (0x1F250, 'M', u'得'), | ||
| 6955 | (0x1F251, 'M', u'可'), | ||
| 6956 | (0x1F252, 'X'), | ||
| 6957 | (0x1F300, 'V'), | ||
| 6958 | (0x1F321, 'X'), | ||
| 6959 | (0x1F330, 'V'), | ||
| 6960 | (0x1F336, 'X'), | ||
| 6961 | (0x1F337, 'V'), | ||
| 6962 | (0x1F37D, 'X'), | ||
| 6963 | (0x1F380, 'V'), | ||
| 6964 | (0x1F394, 'X'), | ||
| 6965 | (0x1F3A0, 'V'), | ||
| 6966 | (0x1F3C5, 'X'), | ||
| 6967 | (0x1F3C6, 'V'), | ||
| 6968 | (0x1F3CB, 'X'), | ||
| 6969 | (0x1F3E0, 'V'), | ||
| 6970 | (0x1F3F1, 'X'), | ||
| 6971 | (0x1F400, 'V'), | ||
| 6972 | (0x1F43F, 'X'), | ||
| 6973 | (0x1F440, 'V'), | ||
| 6974 | ] | ||
| 6975 | |||
| 6976 | def _seg_67(): | ||
| 6977 | return [ | ||
| 6978 | (0x1F441, 'X'), | ||
| 6979 | (0x1F442, 'V'), | ||
| 6980 | (0x1F4F8, 'X'), | ||
| 6981 | (0x1F4F9, 'V'), | ||
| 6982 | (0x1F4FD, 'X'), | ||
| 6983 | (0x1F500, 'V'), | ||
| 6984 | (0x1F53E, 'X'), | ||
| 6985 | (0x1F540, 'V'), | ||
| 6986 | (0x1F544, 'X'), | ||
| 6987 | (0x1F550, 'V'), | ||
| 6988 | (0x1F568, 'X'), | ||
| 6989 | (0x1F5FB, 'V'), | ||
| 6990 | (0x1F641, 'X'), | ||
| 6991 | (0x1F645, 'V'), | ||
| 6992 | (0x1F650, 'X'), | ||
| 6993 | (0x1F680, 'V'), | ||
| 6994 | (0x1F6C6, 'X'), | ||
| 6995 | (0x1F700, 'V'), | ||
| 6996 | (0x1F774, 'X'), | ||
| 6997 | (0x20000, 'V'), | ||
| 6998 | (0x2A6D7, 'X'), | ||
| 6999 | (0x2A700, 'V'), | ||
| 7000 | (0x2B735, 'X'), | ||
| 7001 | (0x2B740, 'V'), | ||
| 7002 | (0x2B81E, 'X'), | ||
| 7003 | (0x2F800, 'M', u'丽'), | ||
| 7004 | (0x2F801, 'M', u'丸'), | ||
| 7005 | (0x2F802, 'M', u'乁'), | ||
| 7006 | (0x2F803, 'M', u'𠄢'), | ||
| 7007 | (0x2F804, 'M', u'你'), | ||
| 7008 | (0x2F805, 'M', u'侮'), | ||
| 7009 | (0x2F806, 'M', u'侻'), | ||
| 7010 | (0x2F807, 'M', u'倂'), | ||
| 7011 | (0x2F808, 'M', u'偺'), | ||
| 7012 | (0x2F809, 'M', u'備'), | ||
| 7013 | (0x2F80A, 'M', u'僧'), | ||
| 7014 | (0x2F80B, 'M', u'像'), | ||
| 7015 | (0x2F80C, 'M', u'㒞'), | ||
| 7016 | (0x2F80D, 'M', u'𠘺'), | ||
| 7017 | (0x2F80E, 'M', u'免'), | ||
| 7018 | (0x2F80F, 'M', u'兔'), | ||
| 7019 | (0x2F810, 'M', u'兤'), | ||
| 7020 | (0x2F811, 'M', u'具'), | ||
| 7021 | (0x2F812, 'M', u'𠔜'), | ||
| 7022 | (0x2F813, 'M', u'㒹'), | ||
| 7023 | (0x2F814, 'M', u'內'), | ||
| 7024 | (0x2F815, 'M', u'再'), | ||
| 7025 | (0x2F816, 'M', u'𠕋'), | ||
| 7026 | (0x2F817, 'M', u'冗'), | ||
| 7027 | (0x2F818, 'M', u'冤'), | ||
| 7028 | (0x2F819, 'M', u'仌'), | ||
| 7029 | (0x2F81A, 'M', u'冬'), | ||
| 7030 | (0x2F81B, 'M', u'况'), | ||
| 7031 | (0x2F81C, 'M', u'𩇟'), | ||
| 7032 | (0x2F81D, 'M', u'凵'), | ||
| 7033 | (0x2F81E, 'M', u'刃'), | ||
| 7034 | (0x2F81F, 'M', u'㓟'), | ||
| 7035 | (0x2F820, 'M', u'刻'), | ||
| 7036 | (0x2F821, 'M', u'剆'), | ||
| 7037 | (0x2F822, 'M', u'割'), | ||
| 7038 | (0x2F823, 'M', u'剷'), | ||
| 7039 | (0x2F824, 'M', u'㔕'), | ||
| 7040 | (0x2F825, 'M', u'勇'), | ||
| 7041 | (0x2F826, 'M', u'勉'), | ||
| 7042 | (0x2F827, 'M', u'勤'), | ||
| 7043 | (0x2F828, 'M', u'勺'), | ||
| 7044 | (0x2F829, 'M', u'包'), | ||
| 7045 | (0x2F82A, 'M', u'匆'), | ||
| 7046 | (0x2F82B, 'M', u'北'), | ||
| 7047 | (0x2F82C, 'M', u'卉'), | ||
| 7048 | (0x2F82D, 'M', u'卑'), | ||
| 7049 | (0x2F82E, 'M', u'博'), | ||
| 7050 | (0x2F82F, 'M', u'即'), | ||
| 7051 | (0x2F830, 'M', u'卽'), | ||
| 7052 | (0x2F831, 'M', u'卿'), | ||
| 7053 | (0x2F834, 'M', u'𠨬'), | ||
| 7054 | (0x2F835, 'M', u'灰'), | ||
| 7055 | (0x2F836, 'M', u'及'), | ||
| 7056 | (0x2F837, 'M', u'叟'), | ||
| 7057 | (0x2F838, 'M', u'𠭣'), | ||
| 7058 | (0x2F839, 'M', u'叫'), | ||
| 7059 | (0x2F83A, 'M', u'叱'), | ||
| 7060 | (0x2F83B, 'M', u'吆'), | ||
| 7061 | (0x2F83C, 'M', u'咞'), | ||
| 7062 | (0x2F83D, 'M', u'吸'), | ||
| 7063 | (0x2F83E, 'M', u'呈'), | ||
| 7064 | (0x2F83F, 'M', u'周'), | ||
| 7065 | (0x2F840, 'M', u'咢'), | ||
| 7066 | (0x2F841, 'M', u'哶'), | ||
| 7067 | (0x2F842, 'M', u'唐'), | ||
| 7068 | (0x2F843, 'M', u'啓'), | ||
| 7069 | (0x2F844, 'M', u'啣'), | ||
| 7070 | (0x2F845, 'M', u'善'), | ||
| 7071 | (0x2F847, 'M', u'喙'), | ||
| 7072 | (0x2F848, 'M', u'喫'), | ||
| 7073 | (0x2F849, 'M', u'喳'), | ||
| 7074 | (0x2F84A, 'M', u'嗂'), | ||
| 7075 | (0x2F84B, 'M', u'圖'), | ||
| 7076 | (0x2F84C, 'M', u'嘆'), | ||
| 7077 | (0x2F84D, 'M', u'圗'), | ||
| 7078 | ] | ||
| 7079 | |||
| 7080 | def _seg_68(): | ||
| 7081 | return [ | ||
| 7082 | (0x2F84E, 'M', u'噑'), | ||
| 7083 | (0x2F84F, 'M', u'噴'), | ||
| 7084 | (0x2F850, 'M', u'切'), | ||
| 7085 | (0x2F851, 'M', u'壮'), | ||
| 7086 | (0x2F852, 'M', u'城'), | ||
| 7087 | (0x2F853, 'M', u'埴'), | ||
| 7088 | (0x2F854, 'M', u'堍'), | ||
| 7089 | (0x2F855, 'M', u'型'), | ||
| 7090 | (0x2F856, 'M', u'堲'), | ||
| 7091 | (0x2F857, 'M', u'報'), | ||
| 7092 | (0x2F858, 'M', u'墬'), | ||
| 7093 | (0x2F859, 'M', u'𡓤'), | ||
| 7094 | (0x2F85A, 'M', u'売'), | ||
| 7095 | (0x2F85B, 'M', u'壷'), | ||
| 7096 | (0x2F85C, 'M', u'夆'), | ||
| 7097 | (0x2F85D, 'M', u'多'), | ||
| 7098 | (0x2F85E, 'M', u'夢'), | ||
| 7099 | (0x2F85F, 'M', u'奢'), | ||
| 7100 | (0x2F860, 'M', u'𡚨'), | ||
| 7101 | (0x2F861, 'M', u'𡛪'), | ||
| 7102 | (0x2F862, 'M', u'姬'), | ||
| 7103 | (0x2F863, 'M', u'娛'), | ||
| 7104 | (0x2F864, 'M', u'娧'), | ||
| 7105 | (0x2F865, 'M', u'姘'), | ||
| 7106 | (0x2F866, 'M', u'婦'), | ||
| 7107 | (0x2F867, 'M', u'㛮'), | ||
| 7108 | (0x2F868, 'X'), | ||
| 7109 | (0x2F869, 'M', u'嬈'), | ||
| 7110 | (0x2F86A, 'M', u'嬾'), | ||
| 7111 | (0x2F86C, 'M', u'𡧈'), | ||
| 7112 | (0x2F86D, 'M', u'寃'), | ||
| 7113 | (0x2F86E, 'M', u'寘'), | ||
| 7114 | (0x2F86F, 'M', u'寧'), | ||
| 7115 | (0x2F870, 'M', u'寳'), | ||
| 7116 | (0x2F871, 'M', u'𡬘'), | ||
| 7117 | (0x2F872, 'M', u'寿'), | ||
| 7118 | (0x2F873, 'M', u'将'), | ||
| 7119 | (0x2F874, 'X'), | ||
| 7120 | (0x2F875, 'M', u'尢'), | ||
| 7121 | (0x2F876, 'M', u'㞁'), | ||
| 7122 | (0x2F877, 'M', u'屠'), | ||
| 7123 | (0x2F878, 'M', u'屮'), | ||
| 7124 | (0x2F879, 'M', u'峀'), | ||
| 7125 | (0x2F87A, 'M', u'岍'), | ||
| 7126 | (0x2F87B, 'M', u'𡷤'), | ||
| 7127 | (0x2F87C, 'M', u'嵃'), | ||
| 7128 | (0x2F87D, 'M', u'𡷦'), | ||
| 7129 | (0x2F87E, 'M', u'嵮'), | ||
| 7130 | (0x2F87F, 'M', u'嵫'), | ||
| 7131 | (0x2F880, 'M', u'嵼'), | ||
| 7132 | (0x2F881, 'M', u'巡'), | ||
| 7133 | (0x2F882, 'M', u'巢'), | ||
| 7134 | (0x2F883, 'M', u'㠯'), | ||
| 7135 | (0x2F884, 'M', u'巽'), | ||
| 7136 | (0x2F885, 'M', u'帨'), | ||
| 7137 | (0x2F886, 'M', u'帽'), | ||
| 7138 | (0x2F887, 'M', u'幩'), | ||
| 7139 | (0x2F888, 'M', u'㡢'), | ||
| 7140 | (0x2F889, 'M', u'𢆃'), | ||
| 7141 | (0x2F88A, 'M', u'㡼'), | ||
| 7142 | (0x2F88B, 'M', u'庰'), | ||
| 7143 | (0x2F88C, 'M', u'庳'), | ||
| 7144 | (0x2F88D, 'M', u'庶'), | ||
| 7145 | (0x2F88E, 'M', u'廊'), | ||
| 7146 | (0x2F88F, 'M', u'𪎒'), | ||
| 7147 | (0x2F890, 'M', u'廾'), | ||
| 7148 | (0x2F891, 'M', u'𢌱'), | ||
| 7149 | (0x2F893, 'M', u'舁'), | ||
| 7150 | (0x2F894, 'M', u'弢'), | ||
| 7151 | (0x2F896, 'M', u'㣇'), | ||
| 7152 | (0x2F897, 'M', u'𣊸'), | ||
| 7153 | (0x2F898, 'M', u'𦇚'), | ||
| 7154 | (0x2F899, 'M', u'形'), | ||
| 7155 | (0x2F89A, 'M', u'彫'), | ||
| 7156 | (0x2F89B, 'M', u'㣣'), | ||
| 7157 | (0x2F89C, 'M', u'徚'), | ||
| 7158 | (0x2F89D, 'M', u'忍'), | ||
| 7159 | (0x2F89E, 'M', u'志'), | ||
| 7160 | (0x2F89F, 'M', u'忹'), | ||
| 7161 | (0x2F8A0, 'M', u'悁'), | ||
| 7162 | (0x2F8A1, 'M', u'㤺'), | ||
| 7163 | (0x2F8A2, 'M', u'㤜'), | ||
| 7164 | (0x2F8A3, 'M', u'悔'), | ||
| 7165 | (0x2F8A4, 'M', u'𢛔'), | ||
| 7166 | (0x2F8A5, 'M', u'惇'), | ||
| 7167 | (0x2F8A6, 'M', u'慈'), | ||
| 7168 | (0x2F8A7, 'M', u'慌'), | ||
| 7169 | (0x2F8A8, 'M', u'慎'), | ||
| 7170 | (0x2F8A9, 'M', u'慌'), | ||
| 7171 | (0x2F8AA, 'M', u'慺'), | ||
| 7172 | (0x2F8AB, 'M', u'憎'), | ||
| 7173 | (0x2F8AC, 'M', u'憲'), | ||
| 7174 | (0x2F8AD, 'M', u'憤'), | ||
| 7175 | (0x2F8AE, 'M', u'憯'), | ||
| 7176 | (0x2F8AF, 'M', u'懞'), | ||
| 7177 | (0x2F8B0, 'M', u'懲'), | ||
| 7178 | (0x2F8B1, 'M', u'懶'), | ||
| 7179 | (0x2F8B2, 'M', u'成'), | ||
| 7180 | (0x2F8B3, 'M', u'戛'), | ||
| 7181 | (0x2F8B4, 'M', u'扝'), | ||
| 7182 | ] | ||
| 7183 | |||
| 7184 | def _seg_69(): | ||
| 7185 | return [ | ||
| 7186 | (0x2F8B5, 'M', u'抱'), | ||
| 7187 | (0x2F8B6, 'M', u'拔'), | ||
| 7188 | (0x2F8B7, 'M', u'捐'), | ||
| 7189 | (0x2F8B8, 'M', u'𢬌'), | ||
| 7190 | (0x2F8B9, 'M', u'挽'), | ||
| 7191 | (0x2F8BA, 'M', u'拼'), | ||
| 7192 | (0x2F8BB, 'M', u'捨'), | ||
| 7193 | (0x2F8BC, 'M', u'掃'), | ||
| 7194 | (0x2F8BD, 'M', u'揤'), | ||
| 7195 | (0x2F8BE, 'M', u'𢯱'), | ||
| 7196 | (0x2F8BF, 'M', u'搢'), | ||
| 7197 | (0x2F8C0, 'M', u'揅'), | ||
| 7198 | (0x2F8C1, 'M', u'掩'), | ||
| 7199 | (0x2F8C2, 'M', u'㨮'), | ||
| 7200 | (0x2F8C3, 'M', u'摩'), | ||
| 7201 | (0x2F8C4, 'M', u'摾'), | ||
| 7202 | (0x2F8C5, 'M', u'撝'), | ||
| 7203 | (0x2F8C6, 'M', u'摷'), | ||
| 7204 | (0x2F8C7, 'M', u'㩬'), | ||
| 7205 | (0x2F8C8, 'M', u'敏'), | ||
| 7206 | (0x2F8C9, 'M', u'敬'), | ||
| 7207 | (0x2F8CA, 'M', u'𣀊'), | ||
| 7208 | (0x2F8CB, 'M', u'旣'), | ||
| 7209 | (0x2F8CC, 'M', u'書'), | ||
| 7210 | (0x2F8CD, 'M', u'晉'), | ||
| 7211 | (0x2F8CE, 'M', u'㬙'), | ||
| 7212 | (0x2F8CF, 'M', u'暑'), | ||
| 7213 | (0x2F8D0, 'M', u'㬈'), | ||
| 7214 | (0x2F8D1, 'M', u'㫤'), | ||
| 7215 | (0x2F8D2, 'M', u'冒'), | ||
| 7216 | (0x2F8D3, 'M', u'冕'), | ||
| 7217 | (0x2F8D4, 'M', u'最'), | ||
| 7218 | (0x2F8D5, 'M', u'暜'), | ||
| 7219 | (0x2F8D6, 'M', u'肭'), | ||
| 7220 | (0x2F8D7, 'M', u'䏙'), | ||
| 7221 | (0x2F8D8, 'M', u'朗'), | ||
| 7222 | (0x2F8D9, 'M', u'望'), | ||
| 7223 | (0x2F8DA, 'M', u'朡'), | ||
| 7224 | (0x2F8DB, 'M', u'杞'), | ||
| 7225 | (0x2F8DC, 'M', u'杓'), | ||
| 7226 | (0x2F8DD, 'M', u'𣏃'), | ||
| 7227 | (0x2F8DE, 'M', u'㭉'), | ||
| 7228 | (0x2F8DF, 'M', u'柺'), | ||
| 7229 | (0x2F8E0, 'M', u'枅'), | ||
| 7230 | (0x2F8E1, 'M', u'桒'), | ||
| 7231 | (0x2F8E2, 'M', u'梅'), | ||
| 7232 | (0x2F8E3, 'M', u'𣑭'), | ||
| 7233 | (0x2F8E4, 'M', u'梎'), | ||
| 7234 | (0x2F8E5, 'M', u'栟'), | ||
| 7235 | (0x2F8E6, 'M', u'椔'), | ||
| 7236 | (0x2F8E7, 'M', u'㮝'), | ||
| 7237 | (0x2F8E8, 'M', u'楂'), | ||
| 7238 | (0x2F8E9, 'M', u'榣'), | ||
| 7239 | (0x2F8EA, 'M', u'槪'), | ||
| 7240 | (0x2F8EB, 'M', u'檨'), | ||
| 7241 | (0x2F8EC, 'M', u'𣚣'), | ||
| 7242 | (0x2F8ED, 'M', u'櫛'), | ||
| 7243 | (0x2F8EE, 'M', u'㰘'), | ||
| 7244 | (0x2F8EF, 'M', u'次'), | ||
| 7245 | (0x2F8F0, 'M', u'𣢧'), | ||
| 7246 | (0x2F8F1, 'M', u'歔'), | ||
| 7247 | (0x2F8F2, 'M', u'㱎'), | ||
| 7248 | (0x2F8F3, 'M', u'歲'), | ||
| 7249 | (0x2F8F4, 'M', u'殟'), | ||
| 7250 | (0x2F8F5, 'M', u'殺'), | ||
| 7251 | (0x2F8F6, 'M', u'殻'), | ||
| 7252 | (0x2F8F7, 'M', u'𣪍'), | ||
| 7253 | (0x2F8F8, 'M', u'𡴋'), | ||
| 7254 | (0x2F8F9, 'M', u'𣫺'), | ||
| 7255 | (0x2F8FA, 'M', u'汎'), | ||
| 7256 | (0x2F8FB, 'M', u'𣲼'), | ||
| 7257 | (0x2F8FC, 'M', u'沿'), | ||
| 7258 | (0x2F8FD, 'M', u'泍'), | ||
| 7259 | (0x2F8FE, 'M', u'汧'), | ||
| 7260 | (0x2F8FF, 'M', u'洖'), | ||
| 7261 | (0x2F900, 'M', u'派'), | ||
| 7262 | (0x2F901, 'M', u'海'), | ||
| 7263 | (0x2F902, 'M', u'流'), | ||
| 7264 | (0x2F903, 'M', u'浩'), | ||
| 7265 | (0x2F904, 'M', u'浸'), | ||
| 7266 | (0x2F905, 'M', u'涅'), | ||
| 7267 | (0x2F906, 'M', u'𣴞'), | ||
| 7268 | (0x2F907, 'M', u'洴'), | ||
| 7269 | (0x2F908, 'M', u'港'), | ||
| 7270 | (0x2F909, 'M', u'湮'), | ||
| 7271 | (0x2F90A, 'M', u'㴳'), | ||
| 7272 | (0x2F90B, 'M', u'滋'), | ||
| 7273 | (0x2F90C, 'M', u'滇'), | ||
| 7274 | (0x2F90D, 'M', u'𣻑'), | ||
| 7275 | (0x2F90E, 'M', u'淹'), | ||
| 7276 | (0x2F90F, 'M', u'潮'), | ||
| 7277 | (0x2F910, 'M', u'𣽞'), | ||
| 7278 | (0x2F911, 'M', u'𣾎'), | ||
| 7279 | (0x2F912, 'M', u'濆'), | ||
| 7280 | (0x2F913, 'M', u'瀹'), | ||
| 7281 | (0x2F914, 'M', u'瀞'), | ||
| 7282 | (0x2F915, 'M', u'瀛'), | ||
| 7283 | (0x2F916, 'M', u'㶖'), | ||
| 7284 | (0x2F917, 'M', u'灊'), | ||
| 7285 | (0x2F918, 'M', u'災'), | ||
| 7286 | ] | ||
| 7287 | |||
| 7288 | def _seg_70(): | ||
| 7289 | return [ | ||
| 7290 | (0x2F919, 'M', u'灷'), | ||
| 7291 | (0x2F91A, 'M', u'炭'), | ||
| 7292 | (0x2F91B, 'M', u'𠔥'), | ||
| 7293 | (0x2F91C, 'M', u'煅'), | ||
| 7294 | (0x2F91D, 'M', u'𤉣'), | ||
| 7295 | (0x2F91E, 'M', u'熜'), | ||
| 7296 | (0x2F91F, 'X'), | ||
| 7297 | (0x2F920, 'M', u'爨'), | ||
| 7298 | (0x2F921, 'M', u'爵'), | ||
| 7299 | (0x2F922, 'M', u'牐'), | ||
| 7300 | (0x2F923, 'M', u'𤘈'), | ||
| 7301 | (0x2F924, 'M', u'犀'), | ||
| 7302 | (0x2F925, 'M', u'犕'), | ||
| 7303 | (0x2F926, 'M', u'𤜵'), | ||
| 7304 | (0x2F927, 'M', u'𤠔'), | ||
| 7305 | (0x2F928, 'M', u'獺'), | ||
| 7306 | (0x2F929, 'M', u'王'), | ||
| 7307 | (0x2F92A, 'M', u'㺬'), | ||
| 7308 | (0x2F92B, 'M', u'玥'), | ||
| 7309 | (0x2F92C, 'M', u'㺸'), | ||
| 7310 | (0x2F92E, 'M', u'瑇'), | ||
| 7311 | (0x2F92F, 'M', u'瑜'), | ||
| 7312 | (0x2F930, 'M', u'瑱'), | ||
| 7313 | (0x2F931, 'M', u'璅'), | ||
| 7314 | (0x2F932, 'M', u'瓊'), | ||
| 7315 | (0x2F933, 'M', u'㼛'), | ||
| 7316 | (0x2F934, 'M', u'甤'), | ||
| 7317 | (0x2F935, 'M', u'𤰶'), | ||
| 7318 | (0x2F936, 'M', u'甾'), | ||
| 7319 | (0x2F937, 'M', u'𤲒'), | ||
| 7320 | (0x2F938, 'M', u'異'), | ||
| 7321 | (0x2F939, 'M', u'𢆟'), | ||
| 7322 | (0x2F93A, 'M', u'瘐'), | ||
| 7323 | (0x2F93B, 'M', u'𤾡'), | ||
| 7324 | (0x2F93C, 'M', u'𤾸'), | ||
| 7325 | (0x2F93D, 'M', u'𥁄'), | ||
| 7326 | (0x2F93E, 'M', u'㿼'), | ||
| 7327 | (0x2F93F, 'M', u'䀈'), | ||
| 7328 | (0x2F940, 'M', u'直'), | ||
| 7329 | (0x2F941, 'M', u'𥃳'), | ||
| 7330 | (0x2F942, 'M', u'𥃲'), | ||
| 7331 | (0x2F943, 'M', u'𥄙'), | ||
| 7332 | (0x2F944, 'M', u'𥄳'), | ||
| 7333 | (0x2F945, 'M', u'眞'), | ||
| 7334 | (0x2F946, 'M', u'真'), | ||
| 7335 | (0x2F948, 'M', u'睊'), | ||
| 7336 | (0x2F949, 'M', u'䀹'), | ||
| 7337 | (0x2F94A, 'M', u'瞋'), | ||
| 7338 | (0x2F94B, 'M', u'䁆'), | ||
| 7339 | (0x2F94C, 'M', u'䂖'), | ||
| 7340 | (0x2F94D, 'M', u'𥐝'), | ||
| 7341 | (0x2F94E, 'M', u'硎'), | ||
| 7342 | (0x2F94F, 'M', u'碌'), | ||
| 7343 | (0x2F950, 'M', u'磌'), | ||
| 7344 | (0x2F951, 'M', u'䃣'), | ||
| 7345 | (0x2F952, 'M', u'𥘦'), | ||
| 7346 | (0x2F953, 'M', u'祖'), | ||
| 7347 | (0x2F954, 'M', u'𥚚'), | ||
| 7348 | (0x2F955, 'M', u'𥛅'), | ||
| 7349 | (0x2F956, 'M', u'福'), | ||
| 7350 | (0x2F957, 'M', u'秫'), | ||
| 7351 | (0x2F958, 'M', u'䄯'), | ||
| 7352 | (0x2F959, 'M', u'穀'), | ||
| 7353 | (0x2F95A, 'M', u'穊'), | ||
| 7354 | (0x2F95B, 'M', u'穏'), | ||
| 7355 | (0x2F95C, 'M', u'𥥼'), | ||
| 7356 | (0x2F95D, 'M', u'𥪧'), | ||
| 7357 | (0x2F95F, 'X'), | ||
| 7358 | (0x2F960, 'M', u'䈂'), | ||
| 7359 | (0x2F961, 'M', u'𥮫'), | ||
| 7360 | (0x2F962, 'M', u'篆'), | ||
| 7361 | (0x2F963, 'M', u'築'), | ||
| 7362 | (0x2F964, 'M', u'䈧'), | ||
| 7363 | (0x2F965, 'M', u'𥲀'), | ||
| 7364 | (0x2F966, 'M', u'糒'), | ||
| 7365 | (0x2F967, 'M', u'䊠'), | ||
| 7366 | (0x2F968, 'M', u'糨'), | ||
| 7367 | (0x2F969, 'M', u'糣'), | ||
| 7368 | (0x2F96A, 'M', u'紀'), | ||
| 7369 | (0x2F96B, 'M', u'𥾆'), | ||
| 7370 | (0x2F96C, 'M', u'絣'), | ||
| 7371 | (0x2F96D, 'M', u'䌁'), | ||
| 7372 | (0x2F96E, 'M', u'緇'), | ||
| 7373 | (0x2F96F, 'M', u'縂'), | ||
| 7374 | (0x2F970, 'M', u'繅'), | ||
| 7375 | (0x2F971, 'M', u'䌴'), | ||
| 7376 | (0x2F972, 'M', u'𦈨'), | ||
| 7377 | (0x2F973, 'M', u'𦉇'), | ||
| 7378 | (0x2F974, 'M', u'䍙'), | ||
| 7379 | (0x2F975, 'M', u'𦋙'), | ||
| 7380 | (0x2F976, 'M', u'罺'), | ||
| 7381 | (0x2F977, 'M', u'𦌾'), | ||
| 7382 | (0x2F978, 'M', u'羕'), | ||
| 7383 | (0x2F979, 'M', u'翺'), | ||
| 7384 | (0x2F97A, 'M', u'者'), | ||
| 7385 | (0x2F97B, 'M', u'𦓚'), | ||
| 7386 | (0x2F97C, 'M', u'𦔣'), | ||
| 7387 | (0x2F97D, 'M', u'聠'), | ||
| 7388 | (0x2F97E, 'M', u'𦖨'), | ||
| 7389 | (0x2F97F, 'M', u'聰'), | ||
| 7390 | ] | ||
| 7391 | |||
| 7392 | def _seg_71(): | ||
| 7393 | return [ | ||
| 7394 | (0x2F980, 'M', u'𣍟'), | ||
| 7395 | (0x2F981, 'M', u'䏕'), | ||
| 7396 | (0x2F982, 'M', u'育'), | ||
| 7397 | (0x2F983, 'M', u'脃'), | ||
| 7398 | (0x2F984, 'M', u'䐋'), | ||
| 7399 | (0x2F985, 'M', u'脾'), | ||
| 7400 | (0x2F986, 'M', u'媵'), | ||
| 7401 | (0x2F987, 'M', u'𦞧'), | ||
| 7402 | (0x2F988, 'M', u'𦞵'), | ||
| 7403 | (0x2F989, 'M', u'𣎓'), | ||
| 7404 | (0x2F98A, 'M', u'𣎜'), | ||
| 7405 | (0x2F98B, 'M', u'舁'), | ||
| 7406 | (0x2F98C, 'M', u'舄'), | ||
| 7407 | (0x2F98D, 'M', u'辞'), | ||
| 7408 | (0x2F98E, 'M', u'䑫'), | ||
| 7409 | (0x2F98F, 'M', u'芑'), | ||
| 7410 | (0x2F990, 'M', u'芋'), | ||
| 7411 | (0x2F991, 'M', u'芝'), | ||
| 7412 | (0x2F992, 'M', u'劳'), | ||
| 7413 | (0x2F993, 'M', u'花'), | ||
| 7414 | (0x2F994, 'M', u'芳'), | ||
| 7415 | (0x2F995, 'M', u'芽'), | ||
| 7416 | (0x2F996, 'M', u'苦'), | ||
| 7417 | (0x2F997, 'M', u'𦬼'), | ||
| 7418 | (0x2F998, 'M', u'若'), | ||
| 7419 | (0x2F999, 'M', u'茝'), | ||
| 7420 | (0x2F99A, 'M', u'荣'), | ||
| 7421 | (0x2F99B, 'M', u'莭'), | ||
| 7422 | (0x2F99C, 'M', u'茣'), | ||
| 7423 | (0x2F99D, 'M', u'莽'), | ||
| 7424 | (0x2F99E, 'M', u'菧'), | ||
| 7425 | (0x2F99F, 'M', u'著'), | ||
| 7426 | (0x2F9A0, 'M', u'荓'), | ||
| 7427 | (0x2F9A1, 'M', u'菊'), | ||
| 7428 | (0x2F9A2, 'M', u'菌'), | ||
| 7429 | (0x2F9A3, 'M', u'菜'), | ||
| 7430 | (0x2F9A4, 'M', u'𦰶'), | ||
| 7431 | (0x2F9A5, 'M', u'𦵫'), | ||
| 7432 | (0x2F9A6, 'M', u'𦳕'), | ||
| 7433 | (0x2F9A7, 'M', u'䔫'), | ||
| 7434 | (0x2F9A8, 'M', u'蓱'), | ||
| 7435 | (0x2F9A9, 'M', u'蓳'), | ||
| 7436 | (0x2F9AA, 'M', u'蔖'), | ||
| 7437 | (0x2F9AB, 'M', u'𧏊'), | ||
| 7438 | (0x2F9AC, 'M', u'蕤'), | ||
| 7439 | (0x2F9AD, 'M', u'𦼬'), | ||
| 7440 | (0x2F9AE, 'M', u'䕝'), | ||
| 7441 | (0x2F9AF, 'M', u'䕡'), | ||
| 7442 | (0x2F9B0, 'M', u'𦾱'), | ||
| 7443 | (0x2F9B1, 'M', u'𧃒'), | ||
| 7444 | (0x2F9B2, 'M', u'䕫'), | ||
| 7445 | (0x2F9B3, 'M', u'虐'), | ||
| 7446 | (0x2F9B4, 'M', u'虜'), | ||
| 7447 | (0x2F9B5, 'M', u'虧'), | ||
| 7448 | (0x2F9B6, 'M', u'虩'), | ||
| 7449 | (0x2F9B7, 'M', u'蚩'), | ||
| 7450 | (0x2F9B8, 'M', u'蚈'), | ||
| 7451 | (0x2F9B9, 'M', u'蜎'), | ||
| 7452 | (0x2F9BA, 'M', u'蛢'), | ||
| 7453 | (0x2F9BB, 'M', u'蝹'), | ||
| 7454 | (0x2F9BC, 'M', u'蜨'), | ||
| 7455 | (0x2F9BD, 'M', u'蝫'), | ||
| 7456 | (0x2F9BE, 'M', u'螆'), | ||
| 7457 | (0x2F9BF, 'X'), | ||
| 7458 | (0x2F9C0, 'M', u'蟡'), | ||
| 7459 | (0x2F9C1, 'M', u'蠁'), | ||
| 7460 | (0x2F9C2, 'M', u'䗹'), | ||
| 7461 | (0x2F9C3, 'M', u'衠'), | ||
| 7462 | (0x2F9C4, 'M', u'衣'), | ||
| 7463 | (0x2F9C5, 'M', u'𧙧'), | ||
| 7464 | (0x2F9C6, 'M', u'裗'), | ||
| 7465 | (0x2F9C7, 'M', u'裞'), | ||
| 7466 | (0x2F9C8, 'M', u'䘵'), | ||
| 7467 | (0x2F9C9, 'M', u'裺'), | ||
| 7468 | (0x2F9CA, 'M', u'㒻'), | ||
| 7469 | (0x2F9CB, 'M', u'𧢮'), | ||
| 7470 | (0x2F9CC, 'M', u'𧥦'), | ||
| 7471 | (0x2F9CD, 'M', u'䚾'), | ||
| 7472 | (0x2F9CE, 'M', u'䛇'), | ||
| 7473 | (0x2F9CF, 'M', u'誠'), | ||
| 7474 | (0x2F9D0, 'M', u'諭'), | ||
| 7475 | (0x2F9D1, 'M', u'變'), | ||
| 7476 | (0x2F9D2, 'M', u'豕'), | ||
| 7477 | (0x2F9D3, 'M', u'𧲨'), | ||
| 7478 | (0x2F9D4, 'M', u'貫'), | ||
| 7479 | (0x2F9D5, 'M', u'賁'), | ||
| 7480 | (0x2F9D6, 'M', u'贛'), | ||
| 7481 | (0x2F9D7, 'M', u'起'), | ||
| 7482 | (0x2F9D8, 'M', u'𧼯'), | ||
| 7483 | (0x2F9D9, 'M', u'𠠄'), | ||
| 7484 | (0x2F9DA, 'M', u'跋'), | ||
| 7485 | (0x2F9DB, 'M', u'趼'), | ||
| 7486 | (0x2F9DC, 'M', u'跰'), | ||
| 7487 | (0x2F9DD, 'M', u'𠣞'), | ||
| 7488 | (0x2F9DE, 'M', u'軔'), | ||
| 7489 | (0x2F9DF, 'M', u'輸'), | ||
| 7490 | (0x2F9E0, 'M', u'𨗒'), | ||
| 7491 | (0x2F9E1, 'M', u'𨗭'), | ||
| 7492 | (0x2F9E2, 'M', u'邔'), | ||
| 7493 | (0x2F9E3, 'M', u'郱'), | ||
| 7494 | ] | ||
| 7495 | |||
| 7496 | def _seg_72(): | ||
| 7497 | return [ | ||
| 7498 | (0x2F9E4, 'M', u'鄑'), | ||
| 7499 | (0x2F9E5, 'M', u'𨜮'), | ||
| 7500 | (0x2F9E6, 'M', u'鄛'), | ||
| 7501 | (0x2F9E7, 'M', u'鈸'), | ||
| 7502 | (0x2F9E8, 'M', u'鋗'), | ||
| 7503 | (0x2F9E9, 'M', u'鋘'), | ||
| 7504 | (0x2F9EA, 'M', u'鉼'), | ||
| 7505 | (0x2F9EB, 'M', u'鏹'), | ||
| 7506 | (0x2F9EC, 'M', u'鐕'), | ||
| 7507 | (0x2F9ED, 'M', u'𨯺'), | ||
| 7508 | (0x2F9EE, 'M', u'開'), | ||
| 7509 | (0x2F9EF, 'M', u'䦕'), | ||
| 7510 | (0x2F9F0, 'M', u'閷'), | ||
| 7511 | (0x2F9F1, 'M', u'𨵷'), | ||
| 7512 | (0x2F9F2, 'M', u'䧦'), | ||
| 7513 | (0x2F9F3, 'M', u'雃'), | ||
| 7514 | (0x2F9F4, 'M', u'嶲'), | ||
| 7515 | (0x2F9F5, 'M', u'霣'), | ||
| 7516 | (0x2F9F6, 'M', u'𩅅'), | ||
| 7517 | (0x2F9F7, 'M', u'𩈚'), | ||
| 7518 | (0x2F9F8, 'M', u'䩮'), | ||
| 7519 | (0x2F9F9, 'M', u'䩶'), | ||
| 7520 | (0x2F9FA, 'M', u'韠'), | ||
| 7521 | (0x2F9FB, 'M', u'𩐊'), | ||
| 7522 | (0x2F9FC, 'M', u'䪲'), | ||
| 7523 | (0x2F9FD, 'M', u'𩒖'), | ||
| 7524 | (0x2F9FE, 'M', u'頋'), | ||
| 7525 | (0x2FA00, 'M', u'頩'), | ||
| 7526 | (0x2FA01, 'M', u'𩖶'), | ||
| 7527 | (0x2FA02, 'M', u'飢'), | ||
| 7528 | (0x2FA03, 'M', u'䬳'), | ||
| 7529 | (0x2FA04, 'M', u'餩'), | ||
| 7530 | (0x2FA05, 'M', u'馧'), | ||
| 7531 | (0x2FA06, 'M', u'駂'), | ||
| 7532 | (0x2FA07, 'M', u'駾'), | ||
| 7533 | (0x2FA08, 'M', u'䯎'), | ||
| 7534 | (0x2FA09, 'M', u'𩬰'), | ||
| 7535 | (0x2FA0A, 'M', u'鬒'), | ||
| 7536 | (0x2FA0B, 'M', u'鱀'), | ||
| 7537 | (0x2FA0C, 'M', u'鳽'), | ||
| 7538 | (0x2FA0D, 'M', u'䳎'), | ||
| 7539 | (0x2FA0E, 'M', u'䳭'), | ||
| 7540 | (0x2FA0F, 'M', u'鵧'), | ||
| 7541 | (0x2FA10, 'M', u'𪃎'), | ||
| 7542 | (0x2FA11, 'M', u'䳸'), | ||
| 7543 | (0x2FA12, 'M', u'𪄅'), | ||
| 7544 | (0x2FA13, 'M', u'𪈎'), | ||
| 7545 | (0x2FA14, 'M', u'𪊑'), | ||
| 7546 | (0x2FA15, 'M', u'麻'), | ||
| 7547 | (0x2FA16, 'M', u'䵖'), | ||
| 7548 | (0x2FA17, 'M', u'黹'), | ||
| 7549 | (0x2FA18, 'M', u'黾'), | ||
| 7550 | (0x2FA19, 'M', u'鼅'), | ||
| 7551 | (0x2FA1A, 'M', u'鼏'), | ||
| 7552 | (0x2FA1B, 'M', u'鼖'), | ||
| 7553 | (0x2FA1C, 'M', u'鼻'), | ||
| 7554 | (0x2FA1D, 'M', u'𪘀'), | ||
| 7555 | (0x2FA1E, 'X'), | ||
| 7556 | (0xE0100, 'I'), | ||
| 7557 | (0xE01F0, 'X'), | ||
| 7558 | ] | ||
| 7559 | |||
| 7560 | uts46data = tuple( | ||
| 7561 | _seg_0() | ||
| 7562 | + _seg_1() | ||
| 7563 | + _seg_2() | ||
| 7564 | + _seg_3() | ||
| 7565 | + _seg_4() | ||
| 7566 | + _seg_5() | ||
| 7567 | + _seg_6() | ||
| 7568 | + _seg_7() | ||
| 7569 | + _seg_8() | ||
| 7570 | + _seg_9() | ||
| 7571 | + _seg_10() | ||
| 7572 | + _seg_11() | ||
| 7573 | + _seg_12() | ||
| 7574 | + _seg_13() | ||
| 7575 | + _seg_14() | ||
| 7576 | + _seg_15() | ||
| 7577 | + _seg_16() | ||
| 7578 | + _seg_17() | ||
| 7579 | + _seg_18() | ||
| 7580 | + _seg_19() | ||
| 7581 | + _seg_20() | ||
| 7582 | + _seg_21() | ||
| 7583 | + _seg_22() | ||
| 7584 | + _seg_23() | ||
| 7585 | + _seg_24() | ||
| 7586 | + _seg_25() | ||
| 7587 | + _seg_26() | ||
| 7588 | + _seg_27() | ||
| 7589 | + _seg_28() | ||
| 7590 | + _seg_29() | ||
| 7591 | + _seg_30() | ||
| 7592 | + _seg_31() | ||
| 7593 | + _seg_32() | ||
| 7594 | + _seg_33() | ||
| 7595 | + _seg_34() | ||
| 7596 | + _seg_35() | ||
| 7597 | + _seg_36() | ||
| 7598 | + _seg_37() | ||
| 7599 | + _seg_38() | ||
| 7600 | + _seg_39() | ||
| 7601 | + _seg_40() | ||
| 7602 | + _seg_41() | ||
| 7603 | + _seg_42() | ||
| 7604 | + _seg_43() | ||
| 7605 | + _seg_44() | ||
| 7606 | + _seg_45() | ||
| 7607 | + _seg_46() | ||
| 7608 | + _seg_47() | ||
| 7609 | + _seg_48() | ||
| 7610 | + _seg_49() | ||
| 7611 | + _seg_50() | ||
| 7612 | + _seg_51() | ||
| 7613 | + _seg_52() | ||
| 7614 | + _seg_53() | ||
| 7615 | + _seg_54() | ||
| 7616 | + _seg_55() | ||
| 7617 | + _seg_56() | ||
| 7618 | + _seg_57() | ||
| 7619 | + _seg_58() | ||
| 7620 | + _seg_59() | ||
| 7621 | + _seg_60() | ||
| 7622 | + _seg_61() | ||
| 7623 | + _seg_62() | ||
| 7624 | + _seg_63() | ||
| 7625 | + _seg_64() | ||
| 7626 | + _seg_65() | ||
| 7627 | + _seg_66() | ||
| 7628 | + _seg_67() | ||
| 7629 | + _seg_68() | ||
| 7630 | + _seg_69() | ||
| 7631 | + _seg_70() | ||
| 7632 | + _seg_71() | ||
| 7633 | + _seg_72() | ||
| 7634 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/ipaddress.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/ipaddress.py new file mode 100644 index 0000000..6da8d93 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/ipaddress.py | |||
| @@ -0,0 +1,2419 @@ | |||
| 1 | # Copyright 2007 Google Inc. | ||
| 2 | # Licensed to PSF under a Contributor Agreement. | ||
| 3 | |||
| 4 | """A fast, lightweight IPv4/IPv6 manipulation library in Python. | ||
| 5 | |||
| 6 | This library is used to create/poke/manipulate IPv4 and IPv6 addresses | ||
| 7 | and networks. | ||
| 8 | |||
| 9 | """ | ||
| 10 | |||
| 11 | from __future__ import unicode_literals | ||
| 12 | |||
| 13 | |||
| 14 | import itertools | ||
| 15 | import struct | ||
| 16 | |||
| 17 | __version__ = '1.0.19' | ||
| 18 | |||
| 19 | # Compatibility functions | ||
| 20 | _compat_int_types = (int,) | ||
| 21 | try: | ||
| 22 | _compat_int_types = (int, long) | ||
| 23 | except NameError: | ||
| 24 | pass | ||
| 25 | try: | ||
| 26 | _compat_str = unicode | ||
| 27 | except NameError: | ||
| 28 | _compat_str = str | ||
| 29 | assert bytes != str | ||
| 30 | if b'\0'[0] == 0: # Python 3 semantics | ||
| 31 | def _compat_bytes_to_byte_vals(byt): | ||
| 32 | return byt | ||
| 33 | else: | ||
| 34 | def _compat_bytes_to_byte_vals(byt): | ||
| 35 | return [struct.unpack(b'!B', b)[0] for b in byt] | ||
| 36 | try: | ||
| 37 | _compat_int_from_byte_vals = int.from_bytes | ||
| 38 | except AttributeError: | ||
| 39 | def _compat_int_from_byte_vals(bytvals, endianess): | ||
| 40 | assert endianess == 'big' | ||
| 41 | res = 0 | ||
| 42 | for bv in bytvals: | ||
| 43 | assert isinstance(bv, _compat_int_types) | ||
| 44 | res = (res << 8) + bv | ||
| 45 | return res | ||
| 46 | |||
| 47 | |||
| 48 | def _compat_to_bytes(intval, length, endianess): | ||
| 49 | assert isinstance(intval, _compat_int_types) | ||
| 50 | assert endianess == 'big' | ||
| 51 | if length == 4: | ||
| 52 | if intval < 0 or intval >= 2 ** 32: | ||
| 53 | raise struct.error("integer out of range for 'I' format code") | ||
| 54 | return struct.pack(b'!I', intval) | ||
| 55 | elif length == 16: | ||
| 56 | if intval < 0 or intval >= 2 ** 128: | ||
| 57 | raise struct.error("integer out of range for 'QQ' format code") | ||
| 58 | return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff) | ||
| 59 | else: | ||
| 60 | raise NotImplementedError() | ||
| 61 | |||
| 62 | |||
| 63 | if hasattr(int, 'bit_length'): | ||
| 64 | # Not int.bit_length , since that won't work in 2.7 where long exists | ||
| 65 | def _compat_bit_length(i): | ||
| 66 | return i.bit_length() | ||
| 67 | else: | ||
| 68 | def _compat_bit_length(i): | ||
| 69 | for res in itertools.count(): | ||
| 70 | if i >> res == 0: | ||
| 71 | return res | ||
| 72 | |||
| 73 | |||
| 74 | def _compat_range(start, end, step=1): | ||
| 75 | assert step > 0 | ||
| 76 | i = start | ||
| 77 | while i < end: | ||
| 78 | yield i | ||
| 79 | i += step | ||
| 80 | |||
| 81 | |||
| 82 | class _TotalOrderingMixin(object): | ||
| 83 | __slots__ = () | ||
| 84 | |||
| 85 | # Helper that derives the other comparison operations from | ||
| 86 | # __lt__ and __eq__ | ||
| 87 | # We avoid functools.total_ordering because it doesn't handle | ||
| 88 | # NotImplemented correctly yet (http://bugs.python.org/issue10042) | ||
| 89 | def __eq__(self, other): | ||
| 90 | raise NotImplementedError | ||
| 91 | |||
| 92 | def __ne__(self, other): | ||
| 93 | equal = self.__eq__(other) | ||
| 94 | if equal is NotImplemented: | ||
| 95 | return NotImplemented | ||
| 96 | return not equal | ||
| 97 | |||
| 98 | def __lt__(self, other): | ||
| 99 | raise NotImplementedError | ||
| 100 | |||
| 101 | def __le__(self, other): | ||
| 102 | less = self.__lt__(other) | ||
| 103 | if less is NotImplemented or not less: | ||
| 104 | return self.__eq__(other) | ||
| 105 | return less | ||
| 106 | |||
| 107 | def __gt__(self, other): | ||
| 108 | less = self.__lt__(other) | ||
| 109 | if less is NotImplemented: | ||
| 110 | return NotImplemented | ||
| 111 | equal = self.__eq__(other) | ||
| 112 | if equal is NotImplemented: | ||
| 113 | return NotImplemented | ||
| 114 | return not (less or equal) | ||
| 115 | |||
| 116 | def __ge__(self, other): | ||
| 117 | less = self.__lt__(other) | ||
| 118 | if less is NotImplemented: | ||
| 119 | return NotImplemented | ||
| 120 | return not less | ||
| 121 | |||
| 122 | |||
| 123 | IPV4LENGTH = 32 | ||
| 124 | IPV6LENGTH = 128 | ||
| 125 | |||
| 126 | |||
| 127 | class AddressValueError(ValueError): | ||
| 128 | """A Value Error related to the address.""" | ||
| 129 | |||
| 130 | |||
| 131 | class NetmaskValueError(ValueError): | ||
| 132 | """A Value Error related to the netmask.""" | ||
| 133 | |||
| 134 | |||
| 135 | def ip_address(address): | ||
| 136 | """Take an IP string/int and return an object of the correct type. | ||
| 137 | |||
| 138 | Args: | ||
| 139 | address: A string or integer, the IP address. Either IPv4 or | ||
| 140 | IPv6 addresses may be supplied; integers less than 2**32 will | ||
| 141 | be considered to be IPv4 by default. | ||
| 142 | |||
| 143 | Returns: | ||
| 144 | An IPv4Address or IPv6Address object. | ||
| 145 | |||
| 146 | Raises: | ||
| 147 | ValueError: if the *address* passed isn't either a v4 or a v6 | ||
| 148 | address | ||
| 149 | |||
| 150 | """ | ||
| 151 | try: | ||
| 152 | return IPv4Address(address) | ||
| 153 | except (AddressValueError, NetmaskValueError): | ||
| 154 | pass | ||
| 155 | |||
| 156 | try: | ||
| 157 | return IPv6Address(address) | ||
| 158 | except (AddressValueError, NetmaskValueError): | ||
| 159 | pass | ||
| 160 | |||
| 161 | if isinstance(address, bytes): | ||
| 162 | raise AddressValueError( | ||
| 163 | '%r does not appear to be an IPv4 or IPv6 address. ' | ||
| 164 | 'Did you pass in a bytes (str in Python 2) instead of' | ||
| 165 | ' a unicode object?' % address) | ||
| 166 | |||
| 167 | raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % | ||
| 168 | address) | ||
| 169 | |||
| 170 | |||
| 171 | def ip_network(address, strict=True): | ||
| 172 | """Take an IP string/int and return an object of the correct type. | ||
| 173 | |||
| 174 | Args: | ||
| 175 | address: A string or integer, the IP network. Either IPv4 or | ||
| 176 | IPv6 networks may be supplied; integers less than 2**32 will | ||
| 177 | be considered to be IPv4 by default. | ||
| 178 | |||
| 179 | Returns: | ||
| 180 | An IPv4Network or IPv6Network object. | ||
| 181 | |||
| 182 | Raises: | ||
| 183 | ValueError: if the string passed isn't either a v4 or a v6 | ||
| 184 | address. Or if the network has host bits set. | ||
| 185 | |||
| 186 | """ | ||
| 187 | try: | ||
| 188 | return IPv4Network(address, strict) | ||
| 189 | except (AddressValueError, NetmaskValueError): | ||
| 190 | pass | ||
| 191 | |||
| 192 | try: | ||
| 193 | return IPv6Network(address, strict) | ||
| 194 | except (AddressValueError, NetmaskValueError): | ||
| 195 | pass | ||
| 196 | |||
| 197 | if isinstance(address, bytes): | ||
| 198 | raise AddressValueError( | ||
| 199 | '%r does not appear to be an IPv4 or IPv6 network. ' | ||
| 200 | 'Did you pass in a bytes (str in Python 2) instead of' | ||
| 201 | ' a unicode object?' % address) | ||
| 202 | |||
| 203 | raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % | ||
| 204 | address) | ||
| 205 | |||
| 206 | |||
| 207 | def ip_interface(address): | ||
| 208 | """Take an IP string/int and return an object of the correct type. | ||
| 209 | |||
| 210 | Args: | ||
| 211 | address: A string or integer, the IP address. Either IPv4 or | ||
| 212 | IPv6 addresses may be supplied; integers less than 2**32 will | ||
| 213 | be considered to be IPv4 by default. | ||
| 214 | |||
| 215 | Returns: | ||
| 216 | An IPv4Interface or IPv6Interface object. | ||
| 217 | |||
| 218 | Raises: | ||
| 219 | ValueError: if the string passed isn't either a v4 or a v6 | ||
| 220 | address. | ||
| 221 | |||
| 222 | Notes: | ||
| 223 | The IPv?Interface classes describe an Address on a particular | ||
| 224 | Network, so they're basically a combination of both the Address | ||
| 225 | and Network classes. | ||
| 226 | |||
| 227 | """ | ||
| 228 | try: | ||
| 229 | return IPv4Interface(address) | ||
| 230 | except (AddressValueError, NetmaskValueError): | ||
| 231 | pass | ||
| 232 | |||
| 233 | try: | ||
| 234 | return IPv6Interface(address) | ||
| 235 | except (AddressValueError, NetmaskValueError): | ||
| 236 | pass | ||
| 237 | |||
| 238 | raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' % | ||
| 239 | address) | ||
| 240 | |||
| 241 | |||
| 242 | def v4_int_to_packed(address): | ||
| 243 | """Represent an address as 4 packed bytes in network (big-endian) order. | ||
| 244 | |||
| 245 | Args: | ||
| 246 | address: An integer representation of an IPv4 IP address. | ||
| 247 | |||
| 248 | Returns: | ||
| 249 | The integer address packed as 4 bytes in network (big-endian) order. | ||
| 250 | |||
| 251 | Raises: | ||
| 252 | ValueError: If the integer is negative or too large to be an | ||
| 253 | IPv4 IP address. | ||
| 254 | |||
| 255 | """ | ||
| 256 | try: | ||
| 257 | return _compat_to_bytes(address, 4, 'big') | ||
| 258 | except (struct.error, OverflowError): | ||
| 259 | raise ValueError("Address negative or too large for IPv4") | ||
| 260 | |||
| 261 | |||
| 262 | def v6_int_to_packed(address): | ||
| 263 | """Represent an address as 16 packed bytes in network (big-endian) order. | ||
| 264 | |||
| 265 | Args: | ||
| 266 | address: An integer representation of an IPv6 IP address. | ||
| 267 | |||
| 268 | Returns: | ||
| 269 | The integer address packed as 16 bytes in network (big-endian) order. | ||
| 270 | |||
| 271 | """ | ||
| 272 | try: | ||
| 273 | return _compat_to_bytes(address, 16, 'big') | ||
| 274 | except (struct.error, OverflowError): | ||
| 275 | raise ValueError("Address negative or too large for IPv6") | ||
| 276 | |||
| 277 | |||
| 278 | def _split_optional_netmask(address): | ||
| 279 | """Helper to split the netmask and raise AddressValueError if needed""" | ||
| 280 | addr = _compat_str(address).split('/') | ||
| 281 | if len(addr) > 2: | ||
| 282 | raise AddressValueError("Only one '/' permitted in %r" % address) | ||
| 283 | return addr | ||
| 284 | |||
| 285 | |||
| 286 | def _find_address_range(addresses): | ||
| 287 | """Find a sequence of sorted deduplicated IPv#Address. | ||
| 288 | |||
| 289 | Args: | ||
| 290 | addresses: a list of IPv#Address objects. | ||
| 291 | |||
| 292 | Yields: | ||
| 293 | A tuple containing the first and last IP addresses in the sequence. | ||
| 294 | |||
| 295 | """ | ||
| 296 | it = iter(addresses) | ||
| 297 | first = last = next(it) | ||
| 298 | for ip in it: | ||
| 299 | if ip._ip != last._ip + 1: | ||
| 300 | yield first, last | ||
| 301 | first = ip | ||
| 302 | last = ip | ||
| 303 | yield first, last | ||
| 304 | |||
| 305 | |||
| 306 | def _count_righthand_zero_bits(number, bits): | ||
| 307 | """Count the number of zero bits on the right hand side. | ||
| 308 | |||
| 309 | Args: | ||
| 310 | number: an integer. | ||
| 311 | bits: maximum number of bits to count. | ||
| 312 | |||
| 313 | Returns: | ||
| 314 | The number of zero bits on the right hand side of the number. | ||
| 315 | |||
| 316 | """ | ||
| 317 | if number == 0: | ||
| 318 | return bits | ||
| 319 | return min(bits, _compat_bit_length(~number & (number - 1))) | ||
| 320 | |||
| 321 | |||
| 322 | def summarize_address_range(first, last): | ||
| 323 | """Summarize a network range given the first and last IP addresses. | ||
| 324 | |||
| 325 | Example: | ||
| 326 | >>> list(summarize_address_range(IPv4Address('192.0.2.0'), | ||
| 327 | ... IPv4Address('192.0.2.130'))) | ||
| 328 | ... #doctest: +NORMALIZE_WHITESPACE | ||
| 329 | [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), | ||
| 330 | IPv4Network('192.0.2.130/32')] | ||
| 331 | |||
| 332 | Args: | ||
| 333 | first: the first IPv4Address or IPv6Address in the range. | ||
| 334 | last: the last IPv4Address or IPv6Address in the range. | ||
| 335 | |||
| 336 | Returns: | ||
| 337 | An iterator of the summarized IPv(4|6) network objects. | ||
| 338 | |||
| 339 | Raise: | ||
| 340 | TypeError: | ||
| 341 | If the first and last objects are not IP addresses. | ||
| 342 | If the first and last objects are not the same version. | ||
| 343 | ValueError: | ||
| 344 | If the last object is not greater than the first. | ||
| 345 | If the version of the first address is not 4 or 6. | ||
| 346 | |||
| 347 | """ | ||
| 348 | if (not (isinstance(first, _BaseAddress) and | ||
| 349 | isinstance(last, _BaseAddress))): | ||
| 350 | raise TypeError('first and last must be IP addresses, not networks') | ||
| 351 | if first.version != last.version: | ||
| 352 | raise TypeError("%s and %s are not of the same version" % ( | ||
| 353 | first, last)) | ||
| 354 | if first > last: | ||
| 355 | raise ValueError('last IP address must be greater than first') | ||
| 356 | |||
| 357 | if first.version == 4: | ||
| 358 | ip = IPv4Network | ||
| 359 | elif first.version == 6: | ||
| 360 | ip = IPv6Network | ||
| 361 | else: | ||
| 362 | raise ValueError('unknown IP version') | ||
| 363 | |||
| 364 | ip_bits = first._max_prefixlen | ||
| 365 | first_int = first._ip | ||
| 366 | last_int = last._ip | ||
| 367 | while first_int <= last_int: | ||
| 368 | nbits = min(_count_righthand_zero_bits(first_int, ip_bits), | ||
| 369 | _compat_bit_length(last_int - first_int + 1) - 1) | ||
| 370 | net = ip((first_int, ip_bits - nbits)) | ||
| 371 | yield net | ||
| 372 | first_int += 1 << nbits | ||
| 373 | if first_int - 1 == ip._ALL_ONES: | ||
| 374 | break | ||
| 375 | |||
| 376 | |||
| 377 | def _collapse_addresses_internal(addresses): | ||
| 378 | """Loops through the addresses, collapsing concurrent netblocks. | ||
| 379 | |||
| 380 | Example: | ||
| 381 | |||
| 382 | ip1 = IPv4Network('192.0.2.0/26') | ||
| 383 | ip2 = IPv4Network('192.0.2.64/26') | ||
| 384 | ip3 = IPv4Network('192.0.2.128/26') | ||
| 385 | ip4 = IPv4Network('192.0.2.192/26') | ||
| 386 | |||
| 387 | _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> | ||
| 388 | [IPv4Network('192.0.2.0/24')] | ||
| 389 | |||
| 390 | This shouldn't be called directly; it is called via | ||
| 391 | collapse_addresses([]). | ||
| 392 | |||
| 393 | Args: | ||
| 394 | addresses: A list of IPv4Network's or IPv6Network's | ||
| 395 | |||
| 396 | Returns: | ||
| 397 | A list of IPv4Network's or IPv6Network's depending on what we were | ||
| 398 | passed. | ||
| 399 | |||
| 400 | """ | ||
| 401 | # First merge | ||
| 402 | to_merge = list(addresses) | ||
| 403 | subnets = {} | ||
| 404 | while to_merge: | ||
| 405 | net = to_merge.pop() | ||
| 406 | supernet = net.supernet() | ||
| 407 | existing = subnets.get(supernet) | ||
| 408 | if existing is None: | ||
| 409 | subnets[supernet] = net | ||
| 410 | elif existing != net: | ||
| 411 | # Merge consecutive subnets | ||
| 412 | del subnets[supernet] | ||
| 413 | to_merge.append(supernet) | ||
| 414 | # Then iterate over resulting networks, skipping subsumed subnets | ||
| 415 | last = None | ||
| 416 | for net in sorted(subnets.values()): | ||
| 417 | if last is not None: | ||
| 418 | # Since they are sorted, | ||
| 419 | # last.network_address <= net.network_address is a given. | ||
| 420 | if last.broadcast_address >= net.broadcast_address: | ||
| 421 | continue | ||
| 422 | yield net | ||
| 423 | last = net | ||
| 424 | |||
| 425 | |||
| 426 | def collapse_addresses(addresses): | ||
| 427 | """Collapse a list of IP objects. | ||
| 428 | |||
| 429 | Example: | ||
| 430 | collapse_addresses([IPv4Network('192.0.2.0/25'), | ||
| 431 | IPv4Network('192.0.2.128/25')]) -> | ||
| 432 | [IPv4Network('192.0.2.0/24')] | ||
| 433 | |||
| 434 | Args: | ||
| 435 | addresses: An iterator of IPv4Network or IPv6Network objects. | ||
| 436 | |||
| 437 | Returns: | ||
| 438 | An iterator of the collapsed IPv(4|6)Network objects. | ||
| 439 | |||
| 440 | Raises: | ||
| 441 | TypeError: If passed a list of mixed version objects. | ||
| 442 | |||
| 443 | """ | ||
| 444 | addrs = [] | ||
| 445 | ips = [] | ||
| 446 | nets = [] | ||
| 447 | |||
| 448 | # split IP addresses and networks | ||
| 449 | for ip in addresses: | ||
| 450 | if isinstance(ip, _BaseAddress): | ||
| 451 | if ips and ips[-1]._version != ip._version: | ||
| 452 | raise TypeError("%s and %s are not of the same version" % ( | ||
| 453 | ip, ips[-1])) | ||
| 454 | ips.append(ip) | ||
| 455 | elif ip._prefixlen == ip._max_prefixlen: | ||
| 456 | if ips and ips[-1]._version != ip._version: | ||
| 457 | raise TypeError("%s and %s are not of the same version" % ( | ||
| 458 | ip, ips[-1])) | ||
| 459 | try: | ||
| 460 | ips.append(ip.ip) | ||
| 461 | except AttributeError: | ||
| 462 | ips.append(ip.network_address) | ||
| 463 | else: | ||
| 464 | if nets and nets[-1]._version != ip._version: | ||
| 465 | raise TypeError("%s and %s are not of the same version" % ( | ||
| 466 | ip, nets[-1])) | ||
| 467 | nets.append(ip) | ||
| 468 | |||
| 469 | # sort and dedup | ||
| 470 | ips = sorted(set(ips)) | ||
| 471 | |||
| 472 | # find consecutive address ranges in the sorted sequence and summarize them | ||
| 473 | if ips: | ||
| 474 | for first, last in _find_address_range(ips): | ||
| 475 | addrs.extend(summarize_address_range(first, last)) | ||
| 476 | |||
| 477 | return _collapse_addresses_internal(addrs + nets) | ||
| 478 | |||
| 479 | |||
| 480 | def get_mixed_type_key(obj): | ||
| 481 | """Return a key suitable for sorting between networks and addresses. | ||
| 482 | |||
| 483 | Address and Network objects are not sortable by default; they're | ||
| 484 | fundamentally different so the expression | ||
| 485 | |||
| 486 | IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') | ||
| 487 | |||
| 488 | doesn't make any sense. There are some times however, where you may wish | ||
| 489 | to have ipaddress sort these for you anyway. If you need to do this, you | ||
| 490 | can use this function as the key= argument to sorted(). | ||
| 491 | |||
| 492 | Args: | ||
| 493 | obj: either a Network or Address object. | ||
| 494 | Returns: | ||
| 495 | appropriate key. | ||
| 496 | |||
| 497 | """ | ||
| 498 | if isinstance(obj, _BaseNetwork): | ||
| 499 | return obj._get_networks_key() | ||
| 500 | elif isinstance(obj, _BaseAddress): | ||
| 501 | return obj._get_address_key() | ||
| 502 | return NotImplemented | ||
| 503 | |||
| 504 | |||
| 505 | class _IPAddressBase(_TotalOrderingMixin): | ||
| 506 | |||
| 507 | """The mother class.""" | ||
| 508 | |||
| 509 | __slots__ = () | ||
| 510 | |||
| 511 | @property | ||
| 512 | def exploded(self): | ||
| 513 | """Return the longhand version of the IP address as a string.""" | ||
| 514 | return self._explode_shorthand_ip_string() | ||
| 515 | |||
| 516 | @property | ||
| 517 | def compressed(self): | ||
| 518 | """Return the shorthand version of the IP address as a string.""" | ||
| 519 | return _compat_str(self) | ||
| 520 | |||
| 521 | @property | ||
| 522 | def reverse_pointer(self): | ||
| 523 | """The name of the reverse DNS pointer for the IP address, e.g.: | ||
| 524 | >>> ipaddress.ip_address("127.0.0.1").reverse_pointer | ||
| 525 | '1.0.0.127.in-addr.arpa' | ||
| 526 | >>> ipaddress.ip_address("2001:db8::1").reverse_pointer | ||
| 527 | '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' | ||
| 528 | |||
| 529 | """ | ||
| 530 | return self._reverse_pointer() | ||
| 531 | |||
| 532 | @property | ||
| 533 | def version(self): | ||
| 534 | msg = '%200s has no version specified' % (type(self),) | ||
| 535 | raise NotImplementedError(msg) | ||
| 536 | |||
| 537 | def _check_int_address(self, address): | ||
| 538 | if address < 0: | ||
| 539 | msg = "%d (< 0) is not permitted as an IPv%d address" | ||
| 540 | raise AddressValueError(msg % (address, self._version)) | ||
| 541 | if address > self._ALL_ONES: | ||
| 542 | msg = "%d (>= 2**%d) is not permitted as an IPv%d address" | ||
| 543 | raise AddressValueError(msg % (address, self._max_prefixlen, | ||
| 544 | self._version)) | ||
| 545 | |||
| 546 | def _check_packed_address(self, address, expected_len): | ||
| 547 | address_len = len(address) | ||
| 548 | if address_len != expected_len: | ||
| 549 | msg = ( | ||
| 550 | '%r (len %d != %d) is not permitted as an IPv%d address. ' | ||
| 551 | 'Did you pass in a bytes (str in Python 2) instead of' | ||
| 552 | ' a unicode object?') | ||
| 553 | raise AddressValueError(msg % (address, address_len, | ||
| 554 | expected_len, self._version)) | ||
| 555 | |||
| 556 | @classmethod | ||
| 557 | def _ip_int_from_prefix(cls, prefixlen): | ||
| 558 | """Turn the prefix length into a bitwise netmask | ||
| 559 | |||
| 560 | Args: | ||
| 561 | prefixlen: An integer, the prefix length. | ||
| 562 | |||
| 563 | Returns: | ||
| 564 | An integer. | ||
| 565 | |||
| 566 | """ | ||
| 567 | return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) | ||
| 568 | |||
| 569 | @classmethod | ||
| 570 | def _prefix_from_ip_int(cls, ip_int): | ||
| 571 | """Return prefix length from the bitwise netmask. | ||
| 572 | |||
| 573 | Args: | ||
| 574 | ip_int: An integer, the netmask in expanded bitwise format | ||
| 575 | |||
| 576 | Returns: | ||
| 577 | An integer, the prefix length. | ||
| 578 | |||
| 579 | Raises: | ||
| 580 | ValueError: If the input intermingles zeroes & ones | ||
| 581 | """ | ||
| 582 | trailing_zeroes = _count_righthand_zero_bits(ip_int, | ||
| 583 | cls._max_prefixlen) | ||
| 584 | prefixlen = cls._max_prefixlen - trailing_zeroes | ||
| 585 | leading_ones = ip_int >> trailing_zeroes | ||
| 586 | all_ones = (1 << prefixlen) - 1 | ||
| 587 | if leading_ones != all_ones: | ||
| 588 | byteslen = cls._max_prefixlen // 8 | ||
| 589 | details = _compat_to_bytes(ip_int, byteslen, 'big') | ||
| 590 | msg = 'Netmask pattern %r mixes zeroes & ones' | ||
| 591 | raise ValueError(msg % details) | ||
| 592 | return prefixlen | ||
| 593 | |||
| 594 | @classmethod | ||
| 595 | def _report_invalid_netmask(cls, netmask_str): | ||
| 596 | msg = '%r is not a valid netmask' % netmask_str | ||
| 597 | raise NetmaskValueError(msg) | ||
| 598 | |||
| 599 | @classmethod | ||
| 600 | def _prefix_from_prefix_string(cls, prefixlen_str): | ||
| 601 | """Return prefix length from a numeric string | ||
| 602 | |||
| 603 | Args: | ||
| 604 | prefixlen_str: The string to be converted | ||
| 605 | |||
| 606 | Returns: | ||
| 607 | An integer, the prefix length. | ||
| 608 | |||
| 609 | Raises: | ||
| 610 | NetmaskValueError: If the input is not a valid netmask | ||
| 611 | """ | ||
| 612 | # int allows a leading +/- as well as surrounding whitespace, | ||
| 613 | # so we ensure that isn't the case | ||
| 614 | if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): | ||
| 615 | cls._report_invalid_netmask(prefixlen_str) | ||
| 616 | try: | ||
| 617 | prefixlen = int(prefixlen_str) | ||
| 618 | except ValueError: | ||
| 619 | cls._report_invalid_netmask(prefixlen_str) | ||
| 620 | if not (0 <= prefixlen <= cls._max_prefixlen): | ||
| 621 | cls._report_invalid_netmask(prefixlen_str) | ||
| 622 | return prefixlen | ||
| 623 | |||
| 624 | @classmethod | ||
| 625 | def _prefix_from_ip_string(cls, ip_str): | ||
| 626 | """Turn a netmask/hostmask string into a prefix length | ||
| 627 | |||
| 628 | Args: | ||
| 629 | ip_str: The netmask/hostmask to be converted | ||
| 630 | |||
| 631 | Returns: | ||
| 632 | An integer, the prefix length. | ||
| 633 | |||
| 634 | Raises: | ||
| 635 | NetmaskValueError: If the input is not a valid netmask/hostmask | ||
| 636 | """ | ||
| 637 | # Parse the netmask/hostmask like an IP address. | ||
| 638 | try: | ||
| 639 | ip_int = cls._ip_int_from_string(ip_str) | ||
| 640 | except AddressValueError: | ||
| 641 | cls._report_invalid_netmask(ip_str) | ||
| 642 | |||
| 643 | # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). | ||
| 644 | # Note that the two ambiguous cases (all-ones and all-zeroes) are | ||
| 645 | # treated as netmasks. | ||
| 646 | try: | ||
| 647 | return cls._prefix_from_ip_int(ip_int) | ||
| 648 | except ValueError: | ||
| 649 | pass | ||
| 650 | |||
| 651 | # Invert the bits, and try matching a /0+1+/ hostmask instead. | ||
| 652 | ip_int ^= cls._ALL_ONES | ||
| 653 | try: | ||
| 654 | return cls._prefix_from_ip_int(ip_int) | ||
| 655 | except ValueError: | ||
| 656 | cls._report_invalid_netmask(ip_str) | ||
| 657 | |||
| 658 | def __reduce__(self): | ||
| 659 | return self.__class__, (_compat_str(self),) | ||
| 660 | |||
| 661 | |||
| 662 | class _BaseAddress(_IPAddressBase): | ||
| 663 | |||
| 664 | """A generic IP object. | ||
| 665 | |||
| 666 | This IP class contains the version independent methods which are | ||
| 667 | used by single IP addresses. | ||
| 668 | """ | ||
| 669 | |||
| 670 | __slots__ = () | ||
| 671 | |||
| 672 | def __int__(self): | ||
| 673 | return self._ip | ||
| 674 | |||
| 675 | def __eq__(self, other): | ||
| 676 | try: | ||
| 677 | return (self._ip == other._ip and | ||
| 678 | self._version == other._version) | ||
| 679 | except AttributeError: | ||
| 680 | return NotImplemented | ||
| 681 | |||
| 682 | def __lt__(self, other): | ||
| 683 | if not isinstance(other, _IPAddressBase): | ||
| 684 | return NotImplemented | ||
| 685 | if not isinstance(other, _BaseAddress): | ||
| 686 | raise TypeError('%s and %s are not of the same type' % ( | ||
| 687 | self, other)) | ||
| 688 | if self._version != other._version: | ||
| 689 | raise TypeError('%s and %s are not of the same version' % ( | ||
| 690 | self, other)) | ||
| 691 | if self._ip != other._ip: | ||
| 692 | return self._ip < other._ip | ||
| 693 | return False | ||
| 694 | |||
| 695 | # Shorthand for Integer addition and subtraction. This is not | ||
| 696 | # meant to ever support addition/subtraction of addresses. | ||
| 697 | def __add__(self, other): | ||
| 698 | if not isinstance(other, _compat_int_types): | ||
| 699 | return NotImplemented | ||
| 700 | return self.__class__(int(self) + other) | ||
| 701 | |||
| 702 | def __sub__(self, other): | ||
| 703 | if not isinstance(other, _compat_int_types): | ||
| 704 | return NotImplemented | ||
| 705 | return self.__class__(int(self) - other) | ||
| 706 | |||
| 707 | def __repr__(self): | ||
| 708 | return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) | ||
| 709 | |||
| 710 | def __str__(self): | ||
| 711 | return _compat_str(self._string_from_ip_int(self._ip)) | ||
| 712 | |||
| 713 | def __hash__(self): | ||
| 714 | return hash(hex(int(self._ip))) | ||
| 715 | |||
| 716 | def _get_address_key(self): | ||
| 717 | return (self._version, self) | ||
| 718 | |||
| 719 | def __reduce__(self): | ||
| 720 | return self.__class__, (self._ip,) | ||
| 721 | |||
| 722 | |||
| 723 | class _BaseNetwork(_IPAddressBase): | ||
| 724 | |||
| 725 | """A generic IP network object. | ||
| 726 | |||
| 727 | This IP class contains the version independent methods which are | ||
| 728 | used by networks. | ||
| 729 | |||
| 730 | """ | ||
| 731 | def __init__(self, address): | ||
| 732 | self._cache = {} | ||
| 733 | |||
| 734 | def __repr__(self): | ||
| 735 | return '%s(%r)' % (self.__class__.__name__, _compat_str(self)) | ||
| 736 | |||
| 737 | def __str__(self): | ||
| 738 | return '%s/%d' % (self.network_address, self.prefixlen) | ||
| 739 | |||
| 740 | def hosts(self): | ||
| 741 | """Generate Iterator over usable hosts in a network. | ||
| 742 | |||
| 743 | This is like __iter__ except it doesn't return the network | ||
| 744 | or broadcast addresses. | ||
| 745 | |||
| 746 | """ | ||
| 747 | network = int(self.network_address) | ||
| 748 | broadcast = int(self.broadcast_address) | ||
| 749 | for x in _compat_range(network + 1, broadcast): | ||
| 750 | yield self._address_class(x) | ||
| 751 | |||
| 752 | def __iter__(self): | ||
| 753 | network = int(self.network_address) | ||
| 754 | broadcast = int(self.broadcast_address) | ||
| 755 | for x in _compat_range(network, broadcast + 1): | ||
| 756 | yield self._address_class(x) | ||
| 757 | |||
| 758 | def __getitem__(self, n): | ||
| 759 | network = int(self.network_address) | ||
| 760 | broadcast = int(self.broadcast_address) | ||
| 761 | if n >= 0: | ||
| 762 | if network + n > broadcast: | ||
| 763 | raise IndexError('address out of range') | ||
| 764 | return self._address_class(network + n) | ||
| 765 | else: | ||
| 766 | n += 1 | ||
| 767 | if broadcast + n < network: | ||
| 768 | raise IndexError('address out of range') | ||
| 769 | return self._address_class(broadcast + n) | ||
| 770 | |||
| 771 | def __lt__(self, other): | ||
| 772 | if not isinstance(other, _IPAddressBase): | ||
| 773 | return NotImplemented | ||
| 774 | if not isinstance(other, _BaseNetwork): | ||
| 775 | raise TypeError('%s and %s are not of the same type' % ( | ||
| 776 | self, other)) | ||
| 777 | if self._version != other._version: | ||
| 778 | raise TypeError('%s and %s are not of the same version' % ( | ||
| 779 | self, other)) | ||
| 780 | if self.network_address != other.network_address: | ||
| 781 | return self.network_address < other.network_address | ||
| 782 | if self.netmask != other.netmask: | ||
| 783 | return self.netmask < other.netmask | ||
| 784 | return False | ||
| 785 | |||
| 786 | def __eq__(self, other): | ||
| 787 | try: | ||
| 788 | return (self._version == other._version and | ||
| 789 | self.network_address == other.network_address and | ||
| 790 | int(self.netmask) == int(other.netmask)) | ||
| 791 | except AttributeError: | ||
| 792 | return NotImplemented | ||
| 793 | |||
| 794 | def __hash__(self): | ||
| 795 | return hash(int(self.network_address) ^ int(self.netmask)) | ||
| 796 | |||
| 797 | def __contains__(self, other): | ||
| 798 | # always false if one is v4 and the other is v6. | ||
| 799 | if self._version != other._version: | ||
| 800 | return False | ||
| 801 | # dealing with another network. | ||
| 802 | if isinstance(other, _BaseNetwork): | ||
| 803 | return False | ||
| 804 | # dealing with another address | ||
| 805 | else: | ||
| 806 | # address | ||
| 807 | return (int(self.network_address) <= int(other._ip) <= | ||
| 808 | int(self.broadcast_address)) | ||
| 809 | |||
| 810 | def overlaps(self, other): | ||
| 811 | """Tell if self is partly contained in other.""" | ||
| 812 | return self.network_address in other or ( | ||
| 813 | self.broadcast_address in other or ( | ||
| 814 | other.network_address in self or ( | ||
| 815 | other.broadcast_address in self))) | ||
| 816 | |||
| 817 | @property | ||
| 818 | def broadcast_address(self): | ||
| 819 | x = self._cache.get('broadcast_address') | ||
| 820 | if x is None: | ||
| 821 | x = self._address_class(int(self.network_address) | | ||
| 822 | int(self.hostmask)) | ||
| 823 | self._cache['broadcast_address'] = x | ||
| 824 | return x | ||
| 825 | |||
| 826 | @property | ||
| 827 | def hostmask(self): | ||
| 828 | x = self._cache.get('hostmask') | ||
| 829 | if x is None: | ||
| 830 | x = self._address_class(int(self.netmask) ^ self._ALL_ONES) | ||
| 831 | self._cache['hostmask'] = x | ||
| 832 | return x | ||
| 833 | |||
| 834 | @property | ||
| 835 | def with_prefixlen(self): | ||
| 836 | return '%s/%d' % (self.network_address, self._prefixlen) | ||
| 837 | |||
| 838 | @property | ||
| 839 | def with_netmask(self): | ||
| 840 | return '%s/%s' % (self.network_address, self.netmask) | ||
| 841 | |||
| 842 | @property | ||
| 843 | def with_hostmask(self): | ||
| 844 | return '%s/%s' % (self.network_address, self.hostmask) | ||
| 845 | |||
| 846 | @property | ||
| 847 | def num_addresses(self): | ||
| 848 | """Number of hosts in the current subnet.""" | ||
| 849 | return int(self.broadcast_address) - int(self.network_address) + 1 | ||
| 850 | |||
| 851 | @property | ||
| 852 | def _address_class(self): | ||
| 853 | # Returning bare address objects (rather than interfaces) allows for | ||
| 854 | # more consistent behaviour across the network address, broadcast | ||
| 855 | # address and individual host addresses. | ||
| 856 | msg = '%200s has no associated address class' % (type(self),) | ||
| 857 | raise NotImplementedError(msg) | ||
| 858 | |||
| 859 | @property | ||
| 860 | def prefixlen(self): | ||
| 861 | return self._prefixlen | ||
| 862 | |||
| 863 | def address_exclude(self, other): | ||
| 864 | """Remove an address from a larger block. | ||
| 865 | |||
| 866 | For example: | ||
| 867 | |||
| 868 | addr1 = ip_network('192.0.2.0/28') | ||
| 869 | addr2 = ip_network('192.0.2.1/32') | ||
| 870 | list(addr1.address_exclude(addr2)) = | ||
| 871 | [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), | ||
| 872 | IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] | ||
| 873 | |||
| 874 | or IPv6: | ||
| 875 | |||
| 876 | addr1 = ip_network('2001:db8::1/32') | ||
| 877 | addr2 = ip_network('2001:db8::1/128') | ||
| 878 | list(addr1.address_exclude(addr2)) = | ||
| 879 | [ip_network('2001:db8::1/128'), | ||
| 880 | ip_network('2001:db8::2/127'), | ||
| 881 | ip_network('2001:db8::4/126'), | ||
| 882 | ip_network('2001:db8::8/125'), | ||
| 883 | ... | ||
| 884 | ip_network('2001:db8:8000::/33')] | ||
| 885 | |||
| 886 | Args: | ||
| 887 | other: An IPv4Network or IPv6Network object of the same type. | ||
| 888 | |||
| 889 | Returns: | ||
| 890 | An iterator of the IPv(4|6)Network objects which is self | ||
| 891 | minus other. | ||
| 892 | |||
| 893 | Raises: | ||
| 894 | TypeError: If self and other are of differing address | ||
| 895 | versions, or if other is not a network object. | ||
| 896 | ValueError: If other is not completely contained by self. | ||
| 897 | |||
| 898 | """ | ||
| 899 | if not self._version == other._version: | ||
| 900 | raise TypeError("%s and %s are not of the same version" % ( | ||
| 901 | self, other)) | ||
| 902 | |||
| 903 | if not isinstance(other, _BaseNetwork): | ||
| 904 | raise TypeError("%s is not a network object" % other) | ||
| 905 | |||
| 906 | if not other.subnet_of(self): | ||
| 907 | raise ValueError('%s not contained in %s' % (other, self)) | ||
| 908 | if other == self: | ||
| 909 | return | ||
| 910 | |||
| 911 | # Make sure we're comparing the network of other. | ||
| 912 | other = other.__class__('%s/%s' % (other.network_address, | ||
| 913 | other.prefixlen)) | ||
| 914 | |||
| 915 | s1, s2 = self.subnets() | ||
| 916 | while s1 != other and s2 != other: | ||
| 917 | if other.subnet_of(s1): | ||
| 918 | yield s2 | ||
| 919 | s1, s2 = s1.subnets() | ||
| 920 | elif other.subnet_of(s2): | ||
| 921 | yield s1 | ||
| 922 | s1, s2 = s2.subnets() | ||
| 923 | else: | ||
| 924 | # If we got here, there's a bug somewhere. | ||
| 925 | raise AssertionError('Error performing exclusion: ' | ||
| 926 | 's1: %s s2: %s other: %s' % | ||
| 927 | (s1, s2, other)) | ||
| 928 | if s1 == other: | ||
| 929 | yield s2 | ||
| 930 | elif s2 == other: | ||
| 931 | yield s1 | ||
| 932 | else: | ||
| 933 | # If we got here, there's a bug somewhere. | ||
| 934 | raise AssertionError('Error performing exclusion: ' | ||
| 935 | 's1: %s s2: %s other: %s' % | ||
| 936 | (s1, s2, other)) | ||
| 937 | |||
| 938 | def compare_networks(self, other): | ||
| 939 | """Compare two IP objects. | ||
| 940 | |||
| 941 | This is only concerned about the comparison of the integer | ||
| 942 | representation of the network addresses. This means that the | ||
| 943 | host bits aren't considered at all in this method. If you want | ||
| 944 | to compare host bits, you can easily enough do a | ||
| 945 | 'HostA._ip < HostB._ip' | ||
| 946 | |||
| 947 | Args: | ||
| 948 | other: An IP object. | ||
| 949 | |||
| 950 | Returns: | ||
| 951 | If the IP versions of self and other are the same, returns: | ||
| 952 | |||
| 953 | -1 if self < other: | ||
| 954 | eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') | ||
| 955 | IPv6Network('2001:db8::1000/124') < | ||
| 956 | IPv6Network('2001:db8::2000/124') | ||
| 957 | 0 if self == other | ||
| 958 | eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') | ||
| 959 | IPv6Network('2001:db8::1000/124') == | ||
| 960 | IPv6Network('2001:db8::1000/124') | ||
| 961 | 1 if self > other | ||
| 962 | eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') | ||
| 963 | IPv6Network('2001:db8::2000/124') > | ||
| 964 | IPv6Network('2001:db8::1000/124') | ||
| 965 | |||
| 966 | Raises: | ||
| 967 | TypeError if the IP versions are different. | ||
| 968 | |||
| 969 | """ | ||
| 970 | # does this need to raise a ValueError? | ||
| 971 | if self._version != other._version: | ||
| 972 | raise TypeError('%s and %s are not of the same type' % ( | ||
| 973 | self, other)) | ||
| 974 | # self._version == other._version below here: | ||
| 975 | if self.network_address < other.network_address: | ||
| 976 | return -1 | ||
| 977 | if self.network_address > other.network_address: | ||
| 978 | return 1 | ||
| 979 | # self.network_address == other.network_address below here: | ||
| 980 | if self.netmask < other.netmask: | ||
| 981 | return -1 | ||
| 982 | if self.netmask > other.netmask: | ||
| 983 | return 1 | ||
| 984 | return 0 | ||
| 985 | |||
| 986 | def _get_networks_key(self): | ||
| 987 | """Network-only key function. | ||
| 988 | |||
| 989 | Returns an object that identifies this address' network and | ||
| 990 | netmask. This function is a suitable "key" argument for sorted() | ||
| 991 | and list.sort(). | ||
| 992 | |||
| 993 | """ | ||
| 994 | return (self._version, self.network_address, self.netmask) | ||
| 995 | |||
| 996 | def subnets(self, prefixlen_diff=1, new_prefix=None): | ||
| 997 | """The subnets which join to make the current subnet. | ||
| 998 | |||
| 999 | In the case that self contains only one IP | ||
| 1000 | (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 | ||
| 1001 | for IPv6), yield an iterator with just ourself. | ||
| 1002 | |||
| 1003 | Args: | ||
| 1004 | prefixlen_diff: An integer, the amount the prefix length | ||
| 1005 | should be increased by. This should not be set if | ||
| 1006 | new_prefix is also set. | ||
| 1007 | new_prefix: The desired new prefix length. This must be a | ||
| 1008 | larger number (smaller prefix) than the existing prefix. | ||
| 1009 | This should not be set if prefixlen_diff is also set. | ||
| 1010 | |||
| 1011 | Returns: | ||
| 1012 | An iterator of IPv(4|6) objects. | ||
| 1013 | |||
| 1014 | Raises: | ||
| 1015 | ValueError: The prefixlen_diff is too small or too large. | ||
| 1016 | OR | ||
| 1017 | prefixlen_diff and new_prefix are both set or new_prefix | ||
| 1018 | is a smaller number than the current prefix (smaller | ||
| 1019 | number means a larger network) | ||
| 1020 | |||
| 1021 | """ | ||
| 1022 | if self._prefixlen == self._max_prefixlen: | ||
| 1023 | yield self | ||
| 1024 | return | ||
| 1025 | |||
| 1026 | if new_prefix is not None: | ||
| 1027 | if new_prefix < self._prefixlen: | ||
| 1028 | raise ValueError('new prefix must be longer') | ||
| 1029 | if prefixlen_diff != 1: | ||
| 1030 | raise ValueError('cannot set prefixlen_diff and new_prefix') | ||
| 1031 | prefixlen_diff = new_prefix - self._prefixlen | ||
| 1032 | |||
| 1033 | if prefixlen_diff < 0: | ||
| 1034 | raise ValueError('prefix length diff must be > 0') | ||
| 1035 | new_prefixlen = self._prefixlen + prefixlen_diff | ||
| 1036 | |||
| 1037 | if new_prefixlen > self._max_prefixlen: | ||
| 1038 | raise ValueError( | ||
| 1039 | 'prefix length diff %d is invalid for netblock %s' % ( | ||
| 1040 | new_prefixlen, self)) | ||
| 1041 | |||
| 1042 | start = int(self.network_address) | ||
| 1043 | end = int(self.broadcast_address) + 1 | ||
| 1044 | step = (int(self.hostmask) + 1) >> prefixlen_diff | ||
| 1045 | for new_addr in _compat_range(start, end, step): | ||
| 1046 | current = self.__class__((new_addr, new_prefixlen)) | ||
| 1047 | yield current | ||
| 1048 | |||
| 1049 | def supernet(self, prefixlen_diff=1, new_prefix=None): | ||
| 1050 | """The supernet containing the current network. | ||
| 1051 | |||
| 1052 | Args: | ||
| 1053 | prefixlen_diff: An integer, the amount the prefix length of | ||
| 1054 | the network should be decreased by. For example, given a | ||
| 1055 | /24 network and a prefixlen_diff of 3, a supernet with a | ||
| 1056 | /21 netmask is returned. | ||
| 1057 | |||
| 1058 | Returns: | ||
| 1059 | An IPv4 network object. | ||
| 1060 | |||
| 1061 | Raises: | ||
| 1062 | ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have | ||
| 1063 | a negative prefix length. | ||
| 1064 | OR | ||
| 1065 | If prefixlen_diff and new_prefix are both set or new_prefix is a | ||
| 1066 | larger number than the current prefix (larger number means a | ||
| 1067 | smaller network) | ||
| 1068 | |||
| 1069 | """ | ||
| 1070 | if self._prefixlen == 0: | ||
| 1071 | return self | ||
| 1072 | |||
| 1073 | if new_prefix is not None: | ||
| 1074 | if new_prefix > self._prefixlen: | ||
| 1075 | raise ValueError('new prefix must be shorter') | ||
| 1076 | if prefixlen_diff != 1: | ||
| 1077 | raise ValueError('cannot set prefixlen_diff and new_prefix') | ||
| 1078 | prefixlen_diff = self._prefixlen - new_prefix | ||
| 1079 | |||
| 1080 | new_prefixlen = self.prefixlen - prefixlen_diff | ||
| 1081 | if new_prefixlen < 0: | ||
| 1082 | raise ValueError( | ||
| 1083 | 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % | ||
| 1084 | (self.prefixlen, prefixlen_diff)) | ||
| 1085 | return self.__class__(( | ||
| 1086 | int(self.network_address) & (int(self.netmask) << prefixlen_diff), | ||
| 1087 | new_prefixlen)) | ||
| 1088 | |||
| 1089 | @property | ||
| 1090 | def is_multicast(self): | ||
| 1091 | """Test if the address is reserved for multicast use. | ||
| 1092 | |||
| 1093 | Returns: | ||
| 1094 | A boolean, True if the address is a multicast address. | ||
| 1095 | See RFC 2373 2.7 for details. | ||
| 1096 | |||
| 1097 | """ | ||
| 1098 | return (self.network_address.is_multicast and | ||
| 1099 | self.broadcast_address.is_multicast) | ||
| 1100 | |||
| 1101 | @staticmethod | ||
| 1102 | def _is_subnet_of(a, b): | ||
| 1103 | try: | ||
| 1104 | # Always false if one is v4 and the other is v6. | ||
| 1105 | if a._version != b._version: | ||
| 1106 | raise TypeError("%s and %s are not of the same version" (a, b)) | ||
| 1107 | return (b.network_address <= a.network_address and | ||
| 1108 | b.broadcast_address >= a.broadcast_address) | ||
| 1109 | except AttributeError: | ||
| 1110 | raise TypeError("Unable to test subnet containment " | ||
| 1111 | "between %s and %s" % (a, b)) | ||
| 1112 | |||
| 1113 | def subnet_of(self, other): | ||
| 1114 | """Return True if this network is a subnet of other.""" | ||
| 1115 | return self._is_subnet_of(self, other) | ||
| 1116 | |||
| 1117 | def supernet_of(self, other): | ||
| 1118 | """Return True if this network is a supernet of other.""" | ||
| 1119 | return self._is_subnet_of(other, self) | ||
| 1120 | |||
| 1121 | @property | ||
| 1122 | def is_reserved(self): | ||
| 1123 | """Test if the address is otherwise IETF reserved. | ||
| 1124 | |||
| 1125 | Returns: | ||
| 1126 | A boolean, True if the address is within one of the | ||
| 1127 | reserved IPv6 Network ranges. | ||
| 1128 | |||
| 1129 | """ | ||
| 1130 | return (self.network_address.is_reserved and | ||
| 1131 | self.broadcast_address.is_reserved) | ||
| 1132 | |||
| 1133 | @property | ||
| 1134 | def is_link_local(self): | ||
| 1135 | """Test if the address is reserved for link-local. | ||
| 1136 | |||
| 1137 | Returns: | ||
| 1138 | A boolean, True if the address is reserved per RFC 4291. | ||
| 1139 | |||
| 1140 | """ | ||
| 1141 | return (self.network_address.is_link_local and | ||
| 1142 | self.broadcast_address.is_link_local) | ||
| 1143 | |||
| 1144 | @property | ||
| 1145 | def is_private(self): | ||
| 1146 | """Test if this address is allocated for private networks. | ||
| 1147 | |||
| 1148 | Returns: | ||
| 1149 | A boolean, True if the address is reserved per | ||
| 1150 | iana-ipv4-special-registry or iana-ipv6-special-registry. | ||
| 1151 | |||
| 1152 | """ | ||
| 1153 | return (self.network_address.is_private and | ||
| 1154 | self.broadcast_address.is_private) | ||
| 1155 | |||
| 1156 | @property | ||
| 1157 | def is_global(self): | ||
| 1158 | """Test if this address is allocated for public networks. | ||
| 1159 | |||
| 1160 | Returns: | ||
| 1161 | A boolean, True if the address is not reserved per | ||
| 1162 | iana-ipv4-special-registry or iana-ipv6-special-registry. | ||
| 1163 | |||
| 1164 | """ | ||
| 1165 | return not self.is_private | ||
| 1166 | |||
| 1167 | @property | ||
| 1168 | def is_unspecified(self): | ||
| 1169 | """Test if the address is unspecified. | ||
| 1170 | |||
| 1171 | Returns: | ||
| 1172 | A boolean, True if this is the unspecified address as defined in | ||
| 1173 | RFC 2373 2.5.2. | ||
| 1174 | |||
| 1175 | """ | ||
| 1176 | return (self.network_address.is_unspecified and | ||
| 1177 | self.broadcast_address.is_unspecified) | ||
| 1178 | |||
| 1179 | @property | ||
| 1180 | def is_loopback(self): | ||
| 1181 | """Test if the address is a loopback address. | ||
| 1182 | |||
| 1183 | Returns: | ||
| 1184 | A boolean, True if the address is a loopback address as defined in | ||
| 1185 | RFC 2373 2.5.3. | ||
| 1186 | |||
| 1187 | """ | ||
| 1188 | return (self.network_address.is_loopback and | ||
| 1189 | self.broadcast_address.is_loopback) | ||
| 1190 | |||
| 1191 | |||
| 1192 | class _BaseV4(object): | ||
| 1193 | |||
| 1194 | """Base IPv4 object. | ||
| 1195 | |||
| 1196 | The following methods are used by IPv4 objects in both single IP | ||
| 1197 | addresses and networks. | ||
| 1198 | |||
| 1199 | """ | ||
| 1200 | |||
| 1201 | __slots__ = () | ||
| 1202 | _version = 4 | ||
| 1203 | # Equivalent to 255.255.255.255 or 32 bits of 1's. | ||
| 1204 | _ALL_ONES = (2 ** IPV4LENGTH) - 1 | ||
| 1205 | _DECIMAL_DIGITS = frozenset('0123456789') | ||
| 1206 | |||
| 1207 | # the valid octets for host and netmasks. only useful for IPv4. | ||
| 1208 | _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0]) | ||
| 1209 | |||
| 1210 | _max_prefixlen = IPV4LENGTH | ||
| 1211 | # There are only a handful of valid v4 netmasks, so we cache them all | ||
| 1212 | # when constructed (see _make_netmask()). | ||
| 1213 | _netmask_cache = {} | ||
| 1214 | |||
| 1215 | def _explode_shorthand_ip_string(self): | ||
| 1216 | return _compat_str(self) | ||
| 1217 | |||
| 1218 | @classmethod | ||
| 1219 | def _make_netmask(cls, arg): | ||
| 1220 | """Make a (netmask, prefix_len) tuple from the given argument. | ||
| 1221 | |||
| 1222 | Argument can be: | ||
| 1223 | - an integer (the prefix length) | ||
| 1224 | - a string representing the prefix length (e.g. "24") | ||
| 1225 | - a string representing the prefix netmask (e.g. "255.255.255.0") | ||
| 1226 | """ | ||
| 1227 | if arg not in cls._netmask_cache: | ||
| 1228 | if isinstance(arg, _compat_int_types): | ||
| 1229 | prefixlen = arg | ||
| 1230 | else: | ||
| 1231 | try: | ||
| 1232 | # Check for a netmask in prefix length form | ||
| 1233 | prefixlen = cls._prefix_from_prefix_string(arg) | ||
| 1234 | except NetmaskValueError: | ||
| 1235 | # Check for a netmask or hostmask in dotted-quad form. | ||
| 1236 | # This may raise NetmaskValueError. | ||
| 1237 | prefixlen = cls._prefix_from_ip_string(arg) | ||
| 1238 | netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) | ||
| 1239 | cls._netmask_cache[arg] = netmask, prefixlen | ||
| 1240 | return cls._netmask_cache[arg] | ||
| 1241 | |||
| 1242 | @classmethod | ||
| 1243 | def _ip_int_from_string(cls, ip_str): | ||
| 1244 | """Turn the given IP string into an integer for comparison. | ||
| 1245 | |||
| 1246 | Args: | ||
| 1247 | ip_str: A string, the IP ip_str. | ||
| 1248 | |||
| 1249 | Returns: | ||
| 1250 | The IP ip_str as an integer. | ||
| 1251 | |||
| 1252 | Raises: | ||
| 1253 | AddressValueError: if ip_str isn't a valid IPv4 Address. | ||
| 1254 | |||
| 1255 | """ | ||
| 1256 | if not ip_str: | ||
| 1257 | raise AddressValueError('Address cannot be empty') | ||
| 1258 | |||
| 1259 | octets = ip_str.split('.') | ||
| 1260 | if len(octets) != 4: | ||
| 1261 | raise AddressValueError("Expected 4 octets in %r" % ip_str) | ||
| 1262 | |||
| 1263 | try: | ||
| 1264 | return _compat_int_from_byte_vals( | ||
| 1265 | map(cls._parse_octet, octets), 'big') | ||
| 1266 | except ValueError as exc: | ||
| 1267 | raise AddressValueError("%s in %r" % (exc, ip_str)) | ||
| 1268 | |||
| 1269 | @classmethod | ||
| 1270 | def _parse_octet(cls, octet_str): | ||
| 1271 | """Convert a decimal octet into an integer. | ||
| 1272 | |||
| 1273 | Args: | ||
| 1274 | octet_str: A string, the number to parse. | ||
| 1275 | |||
| 1276 | Returns: | ||
| 1277 | The octet as an integer. | ||
| 1278 | |||
| 1279 | Raises: | ||
| 1280 | ValueError: if the octet isn't strictly a decimal from [0..255]. | ||
| 1281 | |||
| 1282 | """ | ||
| 1283 | if not octet_str: | ||
| 1284 | raise ValueError("Empty octet not permitted") | ||
| 1285 | # Whitelist the characters, since int() allows a lot of bizarre stuff. | ||
| 1286 | if not cls._DECIMAL_DIGITS.issuperset(octet_str): | ||
| 1287 | msg = "Only decimal digits permitted in %r" | ||
| 1288 | raise ValueError(msg % octet_str) | ||
| 1289 | # We do the length check second, since the invalid character error | ||
| 1290 | # is likely to be more informative for the user | ||
| 1291 | if len(octet_str) > 3: | ||
| 1292 | msg = "At most 3 characters permitted in %r" | ||
| 1293 | raise ValueError(msg % octet_str) | ||
| 1294 | # Convert to integer (we know digits are legal) | ||
| 1295 | octet_int = int(octet_str, 10) | ||
| 1296 | # Any octets that look like they *might* be written in octal, | ||
| 1297 | # and which don't look exactly the same in both octal and | ||
| 1298 | # decimal are rejected as ambiguous | ||
| 1299 | if octet_int > 7 and octet_str[0] == '0': | ||
| 1300 | msg = "Ambiguous (octal/decimal) value in %r not permitted" | ||
| 1301 | raise ValueError(msg % octet_str) | ||
| 1302 | if octet_int > 255: | ||
| 1303 | raise ValueError("Octet %d (> 255) not permitted" % octet_int) | ||
| 1304 | return octet_int | ||
| 1305 | |||
| 1306 | @classmethod | ||
| 1307 | def _string_from_ip_int(cls, ip_int): | ||
| 1308 | """Turns a 32-bit integer into dotted decimal notation. | ||
| 1309 | |||
| 1310 | Args: | ||
| 1311 | ip_int: An integer, the IP address. | ||
| 1312 | |||
| 1313 | Returns: | ||
| 1314 | The IP address as a string in dotted decimal notation. | ||
| 1315 | |||
| 1316 | """ | ||
| 1317 | return '.'.join(_compat_str(struct.unpack(b'!B', b)[0] | ||
| 1318 | if isinstance(b, bytes) | ||
| 1319 | else b) | ||
| 1320 | for b in _compat_to_bytes(ip_int, 4, 'big')) | ||
| 1321 | |||
| 1322 | def _is_hostmask(self, ip_str): | ||
| 1323 | """Test if the IP string is a hostmask (rather than a netmask). | ||
| 1324 | |||
| 1325 | Args: | ||
| 1326 | ip_str: A string, the potential hostmask. | ||
| 1327 | |||
| 1328 | Returns: | ||
| 1329 | A boolean, True if the IP string is a hostmask. | ||
| 1330 | |||
| 1331 | """ | ||
| 1332 | bits = ip_str.split('.') | ||
| 1333 | try: | ||
| 1334 | parts = [x for x in map(int, bits) if x in self._valid_mask_octets] | ||
| 1335 | except ValueError: | ||
| 1336 | return False | ||
| 1337 | if len(parts) != len(bits): | ||
| 1338 | return False | ||
| 1339 | if parts[0] < parts[-1]: | ||
| 1340 | return True | ||
| 1341 | return False | ||
| 1342 | |||
| 1343 | def _reverse_pointer(self): | ||
| 1344 | """Return the reverse DNS pointer name for the IPv4 address. | ||
| 1345 | |||
| 1346 | This implements the method described in RFC1035 3.5. | ||
| 1347 | |||
| 1348 | """ | ||
| 1349 | reverse_octets = _compat_str(self).split('.')[::-1] | ||
| 1350 | return '.'.join(reverse_octets) + '.in-addr.arpa' | ||
| 1351 | |||
| 1352 | @property | ||
| 1353 | def max_prefixlen(self): | ||
| 1354 | return self._max_prefixlen | ||
| 1355 | |||
| 1356 | @property | ||
| 1357 | def version(self): | ||
| 1358 | return self._version | ||
| 1359 | |||
| 1360 | |||
| 1361 | class IPv4Address(_BaseV4, _BaseAddress): | ||
| 1362 | |||
| 1363 | """Represent and manipulate single IPv4 Addresses.""" | ||
| 1364 | |||
| 1365 | __slots__ = ('_ip', '__weakref__') | ||
| 1366 | |||
| 1367 | def __init__(self, address): | ||
| 1368 | |||
| 1369 | """ | ||
| 1370 | Args: | ||
| 1371 | address: A string or integer representing the IP | ||
| 1372 | |||
| 1373 | Additionally, an integer can be passed, so | ||
| 1374 | IPv4Address('192.0.2.1') == IPv4Address(3221225985). | ||
| 1375 | or, more generally | ||
| 1376 | IPv4Address(int(IPv4Address('192.0.2.1'))) == | ||
| 1377 | IPv4Address('192.0.2.1') | ||
| 1378 | |||
| 1379 | Raises: | ||
| 1380 | AddressValueError: If ipaddress isn't a valid IPv4 address. | ||
| 1381 | |||
| 1382 | """ | ||
| 1383 | # Efficient constructor from integer. | ||
| 1384 | if isinstance(address, _compat_int_types): | ||
| 1385 | self._check_int_address(address) | ||
| 1386 | self._ip = address | ||
| 1387 | return | ||
| 1388 | |||
| 1389 | # Constructing from a packed address | ||
| 1390 | if isinstance(address, bytes): | ||
| 1391 | self._check_packed_address(address, 4) | ||
| 1392 | bvs = _compat_bytes_to_byte_vals(address) | ||
| 1393 | self._ip = _compat_int_from_byte_vals(bvs, 'big') | ||
| 1394 | return | ||
| 1395 | |||
| 1396 | # Assume input argument to be string or any object representation | ||
| 1397 | # which converts into a formatted IP string. | ||
| 1398 | addr_str = _compat_str(address) | ||
| 1399 | if '/' in addr_str: | ||
| 1400 | raise AddressValueError("Unexpected '/' in %r" % address) | ||
| 1401 | self._ip = self._ip_int_from_string(addr_str) | ||
| 1402 | |||
| 1403 | @property | ||
| 1404 | def packed(self): | ||
| 1405 | """The binary representation of this address.""" | ||
| 1406 | return v4_int_to_packed(self._ip) | ||
| 1407 | |||
| 1408 | @property | ||
| 1409 | def is_reserved(self): | ||
| 1410 | """Test if the address is otherwise IETF reserved. | ||
| 1411 | |||
| 1412 | Returns: | ||
| 1413 | A boolean, True if the address is within the | ||
| 1414 | reserved IPv4 Network range. | ||
| 1415 | |||
| 1416 | """ | ||
| 1417 | return self in self._constants._reserved_network | ||
| 1418 | |||
| 1419 | @property | ||
| 1420 | def is_private(self): | ||
| 1421 | """Test if this address is allocated for private networks. | ||
| 1422 | |||
| 1423 | Returns: | ||
| 1424 | A boolean, True if the address is reserved per | ||
| 1425 | iana-ipv4-special-registry. | ||
| 1426 | |||
| 1427 | """ | ||
| 1428 | return any(self in net for net in self._constants._private_networks) | ||
| 1429 | |||
| 1430 | @property | ||
| 1431 | def is_global(self): | ||
| 1432 | return ( | ||
| 1433 | self not in self._constants._public_network and | ||
| 1434 | not self.is_private) | ||
| 1435 | |||
| 1436 | @property | ||
| 1437 | def is_multicast(self): | ||
| 1438 | """Test if the address is reserved for multicast use. | ||
| 1439 | |||
| 1440 | Returns: | ||
| 1441 | A boolean, True if the address is multicast. | ||
| 1442 | See RFC 3171 for details. | ||
| 1443 | |||
| 1444 | """ | ||
| 1445 | return self in self._constants._multicast_network | ||
| 1446 | |||
| 1447 | @property | ||
| 1448 | def is_unspecified(self): | ||
| 1449 | """Test if the address is unspecified. | ||
| 1450 | |||
| 1451 | Returns: | ||
| 1452 | A boolean, True if this is the unspecified address as defined in | ||
| 1453 | RFC 5735 3. | ||
| 1454 | |||
| 1455 | """ | ||
| 1456 | return self == self._constants._unspecified_address | ||
| 1457 | |||
| 1458 | @property | ||
| 1459 | def is_loopback(self): | ||
| 1460 | """Test if the address is a loopback address. | ||
| 1461 | |||
| 1462 | Returns: | ||
| 1463 | A boolean, True if the address is a loopback per RFC 3330. | ||
| 1464 | |||
| 1465 | """ | ||
| 1466 | return self in self._constants._loopback_network | ||
| 1467 | |||
| 1468 | @property | ||
| 1469 | def is_link_local(self): | ||
| 1470 | """Test if the address is reserved for link-local. | ||
| 1471 | |||
| 1472 | Returns: | ||
| 1473 | A boolean, True if the address is link-local per RFC 3927. | ||
| 1474 | |||
| 1475 | """ | ||
| 1476 | return self in self._constants._linklocal_network | ||
| 1477 | |||
| 1478 | |||
| 1479 | class IPv4Interface(IPv4Address): | ||
| 1480 | |||
| 1481 | def __init__(self, address): | ||
| 1482 | if isinstance(address, (bytes, _compat_int_types)): | ||
| 1483 | IPv4Address.__init__(self, address) | ||
| 1484 | self.network = IPv4Network(self._ip) | ||
| 1485 | self._prefixlen = self._max_prefixlen | ||
| 1486 | return | ||
| 1487 | |||
| 1488 | if isinstance(address, tuple): | ||
| 1489 | IPv4Address.__init__(self, address[0]) | ||
| 1490 | if len(address) > 1: | ||
| 1491 | self._prefixlen = int(address[1]) | ||
| 1492 | else: | ||
| 1493 | self._prefixlen = self._max_prefixlen | ||
| 1494 | |||
| 1495 | self.network = IPv4Network(address, strict=False) | ||
| 1496 | self.netmask = self.network.netmask | ||
| 1497 | self.hostmask = self.network.hostmask | ||
| 1498 | return | ||
| 1499 | |||
| 1500 | addr = _split_optional_netmask(address) | ||
| 1501 | IPv4Address.__init__(self, addr[0]) | ||
| 1502 | |||
| 1503 | self.network = IPv4Network(address, strict=False) | ||
| 1504 | self._prefixlen = self.network._prefixlen | ||
| 1505 | |||
| 1506 | self.netmask = self.network.netmask | ||
| 1507 | self.hostmask = self.network.hostmask | ||
| 1508 | |||
| 1509 | def __str__(self): | ||
| 1510 | return '%s/%d' % (self._string_from_ip_int(self._ip), | ||
| 1511 | self.network.prefixlen) | ||
| 1512 | |||
| 1513 | def __eq__(self, other): | ||
| 1514 | address_equal = IPv4Address.__eq__(self, other) | ||
| 1515 | if not address_equal or address_equal is NotImplemented: | ||
| 1516 | return address_equal | ||
| 1517 | try: | ||
| 1518 | return self.network == other.network | ||
| 1519 | except AttributeError: | ||
| 1520 | # An interface with an associated network is NOT the | ||
| 1521 | # same as an unassociated address. That's why the hash | ||
| 1522 | # takes the extra info into account. | ||
| 1523 | return False | ||
| 1524 | |||
| 1525 | def __lt__(self, other): | ||
| 1526 | address_less = IPv4Address.__lt__(self, other) | ||
| 1527 | if address_less is NotImplemented: | ||
| 1528 | return NotImplemented | ||
| 1529 | try: | ||
| 1530 | return (self.network < other.network or | ||
| 1531 | self.network == other.network and address_less) | ||
| 1532 | except AttributeError: | ||
| 1533 | # We *do* allow addresses and interfaces to be sorted. The | ||
| 1534 | # unassociated address is considered less than all interfaces. | ||
| 1535 | return False | ||
| 1536 | |||
| 1537 | def __hash__(self): | ||
| 1538 | return self._ip ^ self._prefixlen ^ int(self.network.network_address) | ||
| 1539 | |||
| 1540 | __reduce__ = _IPAddressBase.__reduce__ | ||
| 1541 | |||
| 1542 | @property | ||
| 1543 | def ip(self): | ||
| 1544 | return IPv4Address(self._ip) | ||
| 1545 | |||
| 1546 | @property | ||
| 1547 | def with_prefixlen(self): | ||
| 1548 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 1549 | self._prefixlen) | ||
| 1550 | |||
| 1551 | @property | ||
| 1552 | def with_netmask(self): | ||
| 1553 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 1554 | self.netmask) | ||
| 1555 | |||
| 1556 | @property | ||
| 1557 | def with_hostmask(self): | ||
| 1558 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 1559 | self.hostmask) | ||
| 1560 | |||
| 1561 | |||
| 1562 | class IPv4Network(_BaseV4, _BaseNetwork): | ||
| 1563 | |||
| 1564 | """This class represents and manipulates 32-bit IPv4 network + addresses.. | ||
| 1565 | |||
| 1566 | Attributes: [examples for IPv4Network('192.0.2.0/27')] | ||
| 1567 | .network_address: IPv4Address('192.0.2.0') | ||
| 1568 | .hostmask: IPv4Address('0.0.0.31') | ||
| 1569 | .broadcast_address: IPv4Address('192.0.2.32') | ||
| 1570 | .netmask: IPv4Address('255.255.255.224') | ||
| 1571 | .prefixlen: 27 | ||
| 1572 | |||
| 1573 | """ | ||
| 1574 | # Class to use when creating address objects | ||
| 1575 | _address_class = IPv4Address | ||
| 1576 | |||
| 1577 | def __init__(self, address, strict=True): | ||
| 1578 | |||
| 1579 | """Instantiate a new IPv4 network object. | ||
| 1580 | |||
| 1581 | Args: | ||
| 1582 | address: A string or integer representing the IP [& network]. | ||
| 1583 | '192.0.2.0/24' | ||
| 1584 | '192.0.2.0/255.255.255.0' | ||
| 1585 | '192.0.0.2/0.0.0.255' | ||
| 1586 | are all functionally the same in IPv4. Similarly, | ||
| 1587 | '192.0.2.1' | ||
| 1588 | '192.0.2.1/255.255.255.255' | ||
| 1589 | '192.0.2.1/32' | ||
| 1590 | are also functionally equivalent. That is to say, failing to | ||
| 1591 | provide a subnetmask will create an object with a mask of /32. | ||
| 1592 | |||
| 1593 | If the mask (portion after the / in the argument) is given in | ||
| 1594 | dotted quad form, it is treated as a netmask if it starts with a | ||
| 1595 | non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it | ||
| 1596 | starts with a zero field (e.g. 0.255.255.255 == /8), with the | ||
| 1597 | single exception of an all-zero mask which is treated as a | ||
| 1598 | netmask == /0. If no mask is given, a default of /32 is used. | ||
| 1599 | |||
| 1600 | Additionally, an integer can be passed, so | ||
| 1601 | IPv4Network('192.0.2.1') == IPv4Network(3221225985) | ||
| 1602 | or, more generally | ||
| 1603 | IPv4Interface(int(IPv4Interface('192.0.2.1'))) == | ||
| 1604 | IPv4Interface('192.0.2.1') | ||
| 1605 | |||
| 1606 | Raises: | ||
| 1607 | AddressValueError: If ipaddress isn't a valid IPv4 address. | ||
| 1608 | NetmaskValueError: If the netmask isn't valid for | ||
| 1609 | an IPv4 address. | ||
| 1610 | ValueError: If strict is True and a network address is not | ||
| 1611 | supplied. | ||
| 1612 | |||
| 1613 | """ | ||
| 1614 | _BaseNetwork.__init__(self, address) | ||
| 1615 | |||
| 1616 | # Constructing from a packed address or integer | ||
| 1617 | if isinstance(address, (_compat_int_types, bytes)): | ||
| 1618 | self.network_address = IPv4Address(address) | ||
| 1619 | self.netmask, self._prefixlen = self._make_netmask( | ||
| 1620 | self._max_prefixlen) | ||
| 1621 | # fixme: address/network test here. | ||
| 1622 | return | ||
| 1623 | |||
| 1624 | if isinstance(address, tuple): | ||
| 1625 | if len(address) > 1: | ||
| 1626 | arg = address[1] | ||
| 1627 | else: | ||
| 1628 | # We weren't given an address[1] | ||
| 1629 | arg = self._max_prefixlen | ||
| 1630 | self.network_address = IPv4Address(address[0]) | ||
| 1631 | self.netmask, self._prefixlen = self._make_netmask(arg) | ||
| 1632 | packed = int(self.network_address) | ||
| 1633 | if packed & int(self.netmask) != packed: | ||
| 1634 | if strict: | ||
| 1635 | raise ValueError('%s has host bits set' % self) | ||
| 1636 | else: | ||
| 1637 | self.network_address = IPv4Address(packed & | ||
| 1638 | int(self.netmask)) | ||
| 1639 | return | ||
| 1640 | |||
| 1641 | # Assume input argument to be string or any object representation | ||
| 1642 | # which converts into a formatted IP prefix string. | ||
| 1643 | addr = _split_optional_netmask(address) | ||
| 1644 | self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) | ||
| 1645 | |||
| 1646 | if len(addr) == 2: | ||
| 1647 | arg = addr[1] | ||
| 1648 | else: | ||
| 1649 | arg = self._max_prefixlen | ||
| 1650 | self.netmask, self._prefixlen = self._make_netmask(arg) | ||
| 1651 | |||
| 1652 | if strict: | ||
| 1653 | if (IPv4Address(int(self.network_address) & int(self.netmask)) != | ||
| 1654 | self.network_address): | ||
| 1655 | raise ValueError('%s has host bits set' % self) | ||
| 1656 | self.network_address = IPv4Address(int(self.network_address) & | ||
| 1657 | int(self.netmask)) | ||
| 1658 | |||
| 1659 | if self._prefixlen == (self._max_prefixlen - 1): | ||
| 1660 | self.hosts = self.__iter__ | ||
| 1661 | |||
| 1662 | @property | ||
| 1663 | def is_global(self): | ||
| 1664 | """Test if this address is allocated for public networks. | ||
| 1665 | |||
| 1666 | Returns: | ||
| 1667 | A boolean, True if the address is not reserved per | ||
| 1668 | iana-ipv4-special-registry. | ||
| 1669 | |||
| 1670 | """ | ||
| 1671 | return (not (self.network_address in IPv4Network('100.64.0.0/10') and | ||
| 1672 | self.broadcast_address in IPv4Network('100.64.0.0/10')) and | ||
| 1673 | not self.is_private) | ||
| 1674 | |||
| 1675 | |||
| 1676 | class _IPv4Constants(object): | ||
| 1677 | |||
| 1678 | _linklocal_network = IPv4Network('169.254.0.0/16') | ||
| 1679 | |||
| 1680 | _loopback_network = IPv4Network('127.0.0.0/8') | ||
| 1681 | |||
| 1682 | _multicast_network = IPv4Network('224.0.0.0/4') | ||
| 1683 | |||
| 1684 | _public_network = IPv4Network('100.64.0.0/10') | ||
| 1685 | |||
| 1686 | _private_networks = [ | ||
| 1687 | IPv4Network('0.0.0.0/8'), | ||
| 1688 | IPv4Network('10.0.0.0/8'), | ||
| 1689 | IPv4Network('127.0.0.0/8'), | ||
| 1690 | IPv4Network('169.254.0.0/16'), | ||
| 1691 | IPv4Network('172.16.0.0/12'), | ||
| 1692 | IPv4Network('192.0.0.0/29'), | ||
| 1693 | IPv4Network('192.0.0.170/31'), | ||
| 1694 | IPv4Network('192.0.2.0/24'), | ||
| 1695 | IPv4Network('192.168.0.0/16'), | ||
| 1696 | IPv4Network('198.18.0.0/15'), | ||
| 1697 | IPv4Network('198.51.100.0/24'), | ||
| 1698 | IPv4Network('203.0.113.0/24'), | ||
| 1699 | IPv4Network('240.0.0.0/4'), | ||
| 1700 | IPv4Network('255.255.255.255/32'), | ||
| 1701 | ] | ||
| 1702 | |||
| 1703 | _reserved_network = IPv4Network('240.0.0.0/4') | ||
| 1704 | |||
| 1705 | _unspecified_address = IPv4Address('0.0.0.0') | ||
| 1706 | |||
| 1707 | |||
| 1708 | IPv4Address._constants = _IPv4Constants | ||
| 1709 | |||
| 1710 | |||
| 1711 | class _BaseV6(object): | ||
| 1712 | |||
| 1713 | """Base IPv6 object. | ||
| 1714 | |||
| 1715 | The following methods are used by IPv6 objects in both single IP | ||
| 1716 | addresses and networks. | ||
| 1717 | |||
| 1718 | """ | ||
| 1719 | |||
| 1720 | __slots__ = () | ||
| 1721 | _version = 6 | ||
| 1722 | _ALL_ONES = (2 ** IPV6LENGTH) - 1 | ||
| 1723 | _HEXTET_COUNT = 8 | ||
| 1724 | _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') | ||
| 1725 | _max_prefixlen = IPV6LENGTH | ||
| 1726 | |||
| 1727 | # There are only a bunch of valid v6 netmasks, so we cache them all | ||
| 1728 | # when constructed (see _make_netmask()). | ||
| 1729 | _netmask_cache = {} | ||
| 1730 | |||
| 1731 | @classmethod | ||
| 1732 | def _make_netmask(cls, arg): | ||
| 1733 | """Make a (netmask, prefix_len) tuple from the given argument. | ||
| 1734 | |||
| 1735 | Argument can be: | ||
| 1736 | - an integer (the prefix length) | ||
| 1737 | - a string representing the prefix length (e.g. "24") | ||
| 1738 | - a string representing the prefix netmask (e.g. "255.255.255.0") | ||
| 1739 | """ | ||
| 1740 | if arg not in cls._netmask_cache: | ||
| 1741 | if isinstance(arg, _compat_int_types): | ||
| 1742 | prefixlen = arg | ||
| 1743 | else: | ||
| 1744 | prefixlen = cls._prefix_from_prefix_string(arg) | ||
| 1745 | netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) | ||
| 1746 | cls._netmask_cache[arg] = netmask, prefixlen | ||
| 1747 | return cls._netmask_cache[arg] | ||
| 1748 | |||
| 1749 | @classmethod | ||
| 1750 | def _ip_int_from_string(cls, ip_str): | ||
| 1751 | """Turn an IPv6 ip_str into an integer. | ||
| 1752 | |||
| 1753 | Args: | ||
| 1754 | ip_str: A string, the IPv6 ip_str. | ||
| 1755 | |||
| 1756 | Returns: | ||
| 1757 | An int, the IPv6 address | ||
| 1758 | |||
| 1759 | Raises: | ||
| 1760 | AddressValueError: if ip_str isn't a valid IPv6 Address. | ||
| 1761 | |||
| 1762 | """ | ||
| 1763 | if not ip_str: | ||
| 1764 | raise AddressValueError('Address cannot be empty') | ||
| 1765 | |||
| 1766 | parts = ip_str.split(':') | ||
| 1767 | |||
| 1768 | # An IPv6 address needs at least 2 colons (3 parts). | ||
| 1769 | _min_parts = 3 | ||
| 1770 | if len(parts) < _min_parts: | ||
| 1771 | msg = "At least %d parts expected in %r" % (_min_parts, ip_str) | ||
| 1772 | raise AddressValueError(msg) | ||
| 1773 | |||
| 1774 | # If the address has an IPv4-style suffix, convert it to hexadecimal. | ||
| 1775 | if '.' in parts[-1]: | ||
| 1776 | try: | ||
| 1777 | ipv4_int = IPv4Address(parts.pop())._ip | ||
| 1778 | except AddressValueError as exc: | ||
| 1779 | raise AddressValueError("%s in %r" % (exc, ip_str)) | ||
| 1780 | parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) | ||
| 1781 | parts.append('%x' % (ipv4_int & 0xFFFF)) | ||
| 1782 | |||
| 1783 | # An IPv6 address can't have more than 8 colons (9 parts). | ||
| 1784 | # The extra colon comes from using the "::" notation for a single | ||
| 1785 | # leading or trailing zero part. | ||
| 1786 | _max_parts = cls._HEXTET_COUNT + 1 | ||
| 1787 | if len(parts) > _max_parts: | ||
| 1788 | msg = "At most %d colons permitted in %r" % ( | ||
| 1789 | _max_parts - 1, ip_str) | ||
| 1790 | raise AddressValueError(msg) | ||
| 1791 | |||
| 1792 | # Disregarding the endpoints, find '::' with nothing in between. | ||
| 1793 | # This indicates that a run of zeroes has been skipped. | ||
| 1794 | skip_index = None | ||
| 1795 | for i in _compat_range(1, len(parts) - 1): | ||
| 1796 | if not parts[i]: | ||
| 1797 | if skip_index is not None: | ||
| 1798 | # Can't have more than one '::' | ||
| 1799 | msg = "At most one '::' permitted in %r" % ip_str | ||
| 1800 | raise AddressValueError(msg) | ||
| 1801 | skip_index = i | ||
| 1802 | |||
| 1803 | # parts_hi is the number of parts to copy from above/before the '::' | ||
| 1804 | # parts_lo is the number of parts to copy from below/after the '::' | ||
| 1805 | if skip_index is not None: | ||
| 1806 | # If we found a '::', then check if it also covers the endpoints. | ||
| 1807 | parts_hi = skip_index | ||
| 1808 | parts_lo = len(parts) - skip_index - 1 | ||
| 1809 | if not parts[0]: | ||
| 1810 | parts_hi -= 1 | ||
| 1811 | if parts_hi: | ||
| 1812 | msg = "Leading ':' only permitted as part of '::' in %r" | ||
| 1813 | raise AddressValueError(msg % ip_str) # ^: requires ^:: | ||
| 1814 | if not parts[-1]: | ||
| 1815 | parts_lo -= 1 | ||
| 1816 | if parts_lo: | ||
| 1817 | msg = "Trailing ':' only permitted as part of '::' in %r" | ||
| 1818 | raise AddressValueError(msg % ip_str) # :$ requires ::$ | ||
| 1819 | parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) | ||
| 1820 | if parts_skipped < 1: | ||
| 1821 | msg = "Expected at most %d other parts with '::' in %r" | ||
| 1822 | raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str)) | ||
| 1823 | else: | ||
| 1824 | # Otherwise, allocate the entire address to parts_hi. The | ||
| 1825 | # endpoints could still be empty, but _parse_hextet() will check | ||
| 1826 | # for that. | ||
| 1827 | if len(parts) != cls._HEXTET_COUNT: | ||
| 1828 | msg = "Exactly %d parts expected without '::' in %r" | ||
| 1829 | raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) | ||
| 1830 | if not parts[0]: | ||
| 1831 | msg = "Leading ':' only permitted as part of '::' in %r" | ||
| 1832 | raise AddressValueError(msg % ip_str) # ^: requires ^:: | ||
| 1833 | if not parts[-1]: | ||
| 1834 | msg = "Trailing ':' only permitted as part of '::' in %r" | ||
| 1835 | raise AddressValueError(msg % ip_str) # :$ requires ::$ | ||
| 1836 | parts_hi = len(parts) | ||
| 1837 | parts_lo = 0 | ||
| 1838 | parts_skipped = 0 | ||
| 1839 | |||
| 1840 | try: | ||
| 1841 | # Now, parse the hextets into a 128-bit integer. | ||
| 1842 | ip_int = 0 | ||
| 1843 | for i in range(parts_hi): | ||
| 1844 | ip_int <<= 16 | ||
| 1845 | ip_int |= cls._parse_hextet(parts[i]) | ||
| 1846 | ip_int <<= 16 * parts_skipped | ||
| 1847 | for i in range(-parts_lo, 0): | ||
| 1848 | ip_int <<= 16 | ||
| 1849 | ip_int |= cls._parse_hextet(parts[i]) | ||
| 1850 | return ip_int | ||
| 1851 | except ValueError as exc: | ||
| 1852 | raise AddressValueError("%s in %r" % (exc, ip_str)) | ||
| 1853 | |||
| 1854 | @classmethod | ||
| 1855 | def _parse_hextet(cls, hextet_str): | ||
| 1856 | """Convert an IPv6 hextet string into an integer. | ||
| 1857 | |||
| 1858 | Args: | ||
| 1859 | hextet_str: A string, the number to parse. | ||
| 1860 | |||
| 1861 | Returns: | ||
| 1862 | The hextet as an integer. | ||
| 1863 | |||
| 1864 | Raises: | ||
| 1865 | ValueError: if the input isn't strictly a hex number from | ||
| 1866 | [0..FFFF]. | ||
| 1867 | |||
| 1868 | """ | ||
| 1869 | # Whitelist the characters, since int() allows a lot of bizarre stuff. | ||
| 1870 | if not cls._HEX_DIGITS.issuperset(hextet_str): | ||
| 1871 | raise ValueError("Only hex digits permitted in %r" % hextet_str) | ||
| 1872 | # We do the length check second, since the invalid character error | ||
| 1873 | # is likely to be more informative for the user | ||
| 1874 | if len(hextet_str) > 4: | ||
| 1875 | msg = "At most 4 characters permitted in %r" | ||
| 1876 | raise ValueError(msg % hextet_str) | ||
| 1877 | # Length check means we can skip checking the integer value | ||
| 1878 | return int(hextet_str, 16) | ||
| 1879 | |||
| 1880 | @classmethod | ||
| 1881 | def _compress_hextets(cls, hextets): | ||
| 1882 | """Compresses a list of hextets. | ||
| 1883 | |||
| 1884 | Compresses a list of strings, replacing the longest continuous | ||
| 1885 | sequence of "0" in the list with "" and adding empty strings at | ||
| 1886 | the beginning or at the end of the string such that subsequently | ||
| 1887 | calling ":".join(hextets) will produce the compressed version of | ||
| 1888 | the IPv6 address. | ||
| 1889 | |||
| 1890 | Args: | ||
| 1891 | hextets: A list of strings, the hextets to compress. | ||
| 1892 | |||
| 1893 | Returns: | ||
| 1894 | A list of strings. | ||
| 1895 | |||
| 1896 | """ | ||
| 1897 | best_doublecolon_start = -1 | ||
| 1898 | best_doublecolon_len = 0 | ||
| 1899 | doublecolon_start = -1 | ||
| 1900 | doublecolon_len = 0 | ||
| 1901 | for index, hextet in enumerate(hextets): | ||
| 1902 | if hextet == '0': | ||
| 1903 | doublecolon_len += 1 | ||
| 1904 | if doublecolon_start == -1: | ||
| 1905 | # Start of a sequence of zeros. | ||
| 1906 | doublecolon_start = index | ||
| 1907 | if doublecolon_len > best_doublecolon_len: | ||
| 1908 | # This is the longest sequence of zeros so far. | ||
| 1909 | best_doublecolon_len = doublecolon_len | ||
| 1910 | best_doublecolon_start = doublecolon_start | ||
| 1911 | else: | ||
| 1912 | doublecolon_len = 0 | ||
| 1913 | doublecolon_start = -1 | ||
| 1914 | |||
| 1915 | if best_doublecolon_len > 1: | ||
| 1916 | best_doublecolon_end = (best_doublecolon_start + | ||
| 1917 | best_doublecolon_len) | ||
| 1918 | # For zeros at the end of the address. | ||
| 1919 | if best_doublecolon_end == len(hextets): | ||
| 1920 | hextets += [''] | ||
| 1921 | hextets[best_doublecolon_start:best_doublecolon_end] = [''] | ||
| 1922 | # For zeros at the beginning of the address. | ||
| 1923 | if best_doublecolon_start == 0: | ||
| 1924 | hextets = [''] + hextets | ||
| 1925 | |||
| 1926 | return hextets | ||
| 1927 | |||
| 1928 | @classmethod | ||
| 1929 | def _string_from_ip_int(cls, ip_int=None): | ||
| 1930 | """Turns a 128-bit integer into hexadecimal notation. | ||
| 1931 | |||
| 1932 | Args: | ||
| 1933 | ip_int: An integer, the IP address. | ||
| 1934 | |||
| 1935 | Returns: | ||
| 1936 | A string, the hexadecimal representation of the address. | ||
| 1937 | |||
| 1938 | Raises: | ||
| 1939 | ValueError: The address is bigger than 128 bits of all ones. | ||
| 1940 | |||
| 1941 | """ | ||
| 1942 | if ip_int is None: | ||
| 1943 | ip_int = int(cls._ip) | ||
| 1944 | |||
| 1945 | if ip_int > cls._ALL_ONES: | ||
| 1946 | raise ValueError('IPv6 address is too large') | ||
| 1947 | |||
| 1948 | hex_str = '%032x' % ip_int | ||
| 1949 | hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)] | ||
| 1950 | |||
| 1951 | hextets = cls._compress_hextets(hextets) | ||
| 1952 | return ':'.join(hextets) | ||
| 1953 | |||
| 1954 | def _explode_shorthand_ip_string(self): | ||
| 1955 | """Expand a shortened IPv6 address. | ||
| 1956 | |||
| 1957 | Args: | ||
| 1958 | ip_str: A string, the IPv6 address. | ||
| 1959 | |||
| 1960 | Returns: | ||
| 1961 | A string, the expanded IPv6 address. | ||
| 1962 | |||
| 1963 | """ | ||
| 1964 | if isinstance(self, IPv6Network): | ||
| 1965 | ip_str = _compat_str(self.network_address) | ||
| 1966 | elif isinstance(self, IPv6Interface): | ||
| 1967 | ip_str = _compat_str(self.ip) | ||
| 1968 | else: | ||
| 1969 | ip_str = _compat_str(self) | ||
| 1970 | |||
| 1971 | ip_int = self._ip_int_from_string(ip_str) | ||
| 1972 | hex_str = '%032x' % ip_int | ||
| 1973 | parts = [hex_str[x:x + 4] for x in range(0, 32, 4)] | ||
| 1974 | if isinstance(self, (_BaseNetwork, IPv6Interface)): | ||
| 1975 | return '%s/%d' % (':'.join(parts), self._prefixlen) | ||
| 1976 | return ':'.join(parts) | ||
| 1977 | |||
| 1978 | def _reverse_pointer(self): | ||
| 1979 | """Return the reverse DNS pointer name for the IPv6 address. | ||
| 1980 | |||
| 1981 | This implements the method described in RFC3596 2.5. | ||
| 1982 | |||
| 1983 | """ | ||
| 1984 | reverse_chars = self.exploded[::-1].replace(':', '') | ||
| 1985 | return '.'.join(reverse_chars) + '.ip6.arpa' | ||
| 1986 | |||
| 1987 | @property | ||
| 1988 | def max_prefixlen(self): | ||
| 1989 | return self._max_prefixlen | ||
| 1990 | |||
| 1991 | @property | ||
| 1992 | def version(self): | ||
| 1993 | return self._version | ||
| 1994 | |||
| 1995 | |||
| 1996 | class IPv6Address(_BaseV6, _BaseAddress): | ||
| 1997 | |||
| 1998 | """Represent and manipulate single IPv6 Addresses.""" | ||
| 1999 | |||
| 2000 | __slots__ = ('_ip', '__weakref__') | ||
| 2001 | |||
| 2002 | def __init__(self, address): | ||
| 2003 | """Instantiate a new IPv6 address object. | ||
| 2004 | |||
| 2005 | Args: | ||
| 2006 | address: A string or integer representing the IP | ||
| 2007 | |||
| 2008 | Additionally, an integer can be passed, so | ||
| 2009 | IPv6Address('2001:db8::') == | ||
| 2010 | IPv6Address(42540766411282592856903984951653826560) | ||
| 2011 | or, more generally | ||
| 2012 | IPv6Address(int(IPv6Address('2001:db8::'))) == | ||
| 2013 | IPv6Address('2001:db8::') | ||
| 2014 | |||
| 2015 | Raises: | ||
| 2016 | AddressValueError: If address isn't a valid IPv6 address. | ||
| 2017 | |||
| 2018 | """ | ||
| 2019 | # Efficient constructor from integer. | ||
| 2020 | if isinstance(address, _compat_int_types): | ||
| 2021 | self._check_int_address(address) | ||
| 2022 | self._ip = address | ||
| 2023 | return | ||
| 2024 | |||
| 2025 | # Constructing from a packed address | ||
| 2026 | if isinstance(address, bytes): | ||
| 2027 | self._check_packed_address(address, 16) | ||
| 2028 | bvs = _compat_bytes_to_byte_vals(address) | ||
| 2029 | self._ip = _compat_int_from_byte_vals(bvs, 'big') | ||
| 2030 | return | ||
| 2031 | |||
| 2032 | # Assume input argument to be string or any object representation | ||
| 2033 | # which converts into a formatted IP string. | ||
| 2034 | addr_str = _compat_str(address) | ||
| 2035 | if '/' in addr_str: | ||
| 2036 | raise AddressValueError("Unexpected '/' in %r" % address) | ||
| 2037 | self._ip = self._ip_int_from_string(addr_str) | ||
| 2038 | |||
| 2039 | @property | ||
| 2040 | def packed(self): | ||
| 2041 | """The binary representation of this address.""" | ||
| 2042 | return v6_int_to_packed(self._ip) | ||
| 2043 | |||
| 2044 | @property | ||
| 2045 | def is_multicast(self): | ||
| 2046 | """Test if the address is reserved for multicast use. | ||
| 2047 | |||
| 2048 | Returns: | ||
| 2049 | A boolean, True if the address is a multicast address. | ||
| 2050 | See RFC 2373 2.7 for details. | ||
| 2051 | |||
| 2052 | """ | ||
| 2053 | return self in self._constants._multicast_network | ||
| 2054 | |||
| 2055 | @property | ||
| 2056 | def is_reserved(self): | ||
| 2057 | """Test if the address is otherwise IETF reserved. | ||
| 2058 | |||
| 2059 | Returns: | ||
| 2060 | A boolean, True if the address is within one of the | ||
| 2061 | reserved IPv6 Network ranges. | ||
| 2062 | |||
| 2063 | """ | ||
| 2064 | return any(self in x for x in self._constants._reserved_networks) | ||
| 2065 | |||
| 2066 | @property | ||
| 2067 | def is_link_local(self): | ||
| 2068 | """Test if the address is reserved for link-local. | ||
| 2069 | |||
| 2070 | Returns: | ||
| 2071 | A boolean, True if the address is reserved per RFC 4291. | ||
| 2072 | |||
| 2073 | """ | ||
| 2074 | return self in self._constants._linklocal_network | ||
| 2075 | |||
| 2076 | @property | ||
| 2077 | def is_site_local(self): | ||
| 2078 | """Test if the address is reserved for site-local. | ||
| 2079 | |||
| 2080 | Note that the site-local address space has been deprecated by RFC 3879. | ||
| 2081 | Use is_private to test if this address is in the space of unique local | ||
| 2082 | addresses as defined by RFC 4193. | ||
| 2083 | |||
| 2084 | Returns: | ||
| 2085 | A boolean, True if the address is reserved per RFC 3513 2.5.6. | ||
| 2086 | |||
| 2087 | """ | ||
| 2088 | return self in self._constants._sitelocal_network | ||
| 2089 | |||
| 2090 | @property | ||
| 2091 | def is_private(self): | ||
| 2092 | """Test if this address is allocated for private networks. | ||
| 2093 | |||
| 2094 | Returns: | ||
| 2095 | A boolean, True if the address is reserved per | ||
| 2096 | iana-ipv6-special-registry. | ||
| 2097 | |||
| 2098 | """ | ||
| 2099 | return any(self in net for net in self._constants._private_networks) | ||
| 2100 | |||
| 2101 | @property | ||
| 2102 | def is_global(self): | ||
| 2103 | """Test if this address is allocated for public networks. | ||
| 2104 | |||
| 2105 | Returns: | ||
| 2106 | A boolean, true if the address is not reserved per | ||
| 2107 | iana-ipv6-special-registry. | ||
| 2108 | |||
| 2109 | """ | ||
| 2110 | return not self.is_private | ||
| 2111 | |||
| 2112 | @property | ||
| 2113 | def is_unspecified(self): | ||
| 2114 | """Test if the address is unspecified. | ||
| 2115 | |||
| 2116 | Returns: | ||
| 2117 | A boolean, True if this is the unspecified address as defined in | ||
| 2118 | RFC 2373 2.5.2. | ||
| 2119 | |||
| 2120 | """ | ||
| 2121 | return self._ip == 0 | ||
| 2122 | |||
| 2123 | @property | ||
| 2124 | def is_loopback(self): | ||
| 2125 | """Test if the address is a loopback address. | ||
| 2126 | |||
| 2127 | Returns: | ||
| 2128 | A boolean, True if the address is a loopback address as defined in | ||
| 2129 | RFC 2373 2.5.3. | ||
| 2130 | |||
| 2131 | """ | ||
| 2132 | return self._ip == 1 | ||
| 2133 | |||
| 2134 | @property | ||
| 2135 | def ipv4_mapped(self): | ||
| 2136 | """Return the IPv4 mapped address. | ||
| 2137 | |||
| 2138 | Returns: | ||
| 2139 | If the IPv6 address is a v4 mapped address, return the | ||
| 2140 | IPv4 mapped address. Return None otherwise. | ||
| 2141 | |||
| 2142 | """ | ||
| 2143 | if (self._ip >> 32) != 0xFFFF: | ||
| 2144 | return None | ||
| 2145 | return IPv4Address(self._ip & 0xFFFFFFFF) | ||
| 2146 | |||
| 2147 | @property | ||
| 2148 | def teredo(self): | ||
| 2149 | """Tuple of embedded teredo IPs. | ||
| 2150 | |||
| 2151 | Returns: | ||
| 2152 | Tuple of the (server, client) IPs or None if the address | ||
| 2153 | doesn't appear to be a teredo address (doesn't start with | ||
| 2154 | 2001::/32) | ||
| 2155 | |||
| 2156 | """ | ||
| 2157 | if (self._ip >> 96) != 0x20010000: | ||
| 2158 | return None | ||
| 2159 | return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), | ||
| 2160 | IPv4Address(~self._ip & 0xFFFFFFFF)) | ||
| 2161 | |||
| 2162 | @property | ||
| 2163 | def sixtofour(self): | ||
| 2164 | """Return the IPv4 6to4 embedded address. | ||
| 2165 | |||
| 2166 | Returns: | ||
| 2167 | The IPv4 6to4-embedded address if present or None if the | ||
| 2168 | address doesn't appear to contain a 6to4 embedded address. | ||
| 2169 | |||
| 2170 | """ | ||
| 2171 | if (self._ip >> 112) != 0x2002: | ||
| 2172 | return None | ||
| 2173 | return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) | ||
| 2174 | |||
| 2175 | |||
| 2176 | class IPv6Interface(IPv6Address): | ||
| 2177 | |||
| 2178 | def __init__(self, address): | ||
| 2179 | if isinstance(address, (bytes, _compat_int_types)): | ||
| 2180 | IPv6Address.__init__(self, address) | ||
| 2181 | self.network = IPv6Network(self._ip) | ||
| 2182 | self._prefixlen = self._max_prefixlen | ||
| 2183 | return | ||
| 2184 | if isinstance(address, tuple): | ||
| 2185 | IPv6Address.__init__(self, address[0]) | ||
| 2186 | if len(address) > 1: | ||
| 2187 | self._prefixlen = int(address[1]) | ||
| 2188 | else: | ||
| 2189 | self._prefixlen = self._max_prefixlen | ||
| 2190 | self.network = IPv6Network(address, strict=False) | ||
| 2191 | self.netmask = self.network.netmask | ||
| 2192 | self.hostmask = self.network.hostmask | ||
| 2193 | return | ||
| 2194 | |||
| 2195 | addr = _split_optional_netmask(address) | ||
| 2196 | IPv6Address.__init__(self, addr[0]) | ||
| 2197 | self.network = IPv6Network(address, strict=False) | ||
| 2198 | self.netmask = self.network.netmask | ||
| 2199 | self._prefixlen = self.network._prefixlen | ||
| 2200 | self.hostmask = self.network.hostmask | ||
| 2201 | |||
| 2202 | def __str__(self): | ||
| 2203 | return '%s/%d' % (self._string_from_ip_int(self._ip), | ||
| 2204 | self.network.prefixlen) | ||
| 2205 | |||
| 2206 | def __eq__(self, other): | ||
| 2207 | address_equal = IPv6Address.__eq__(self, other) | ||
| 2208 | if not address_equal or address_equal is NotImplemented: | ||
| 2209 | return address_equal | ||
| 2210 | try: | ||
| 2211 | return self.network == other.network | ||
| 2212 | except AttributeError: | ||
| 2213 | # An interface with an associated network is NOT the | ||
| 2214 | # same as an unassociated address. That's why the hash | ||
| 2215 | # takes the extra info into account. | ||
| 2216 | return False | ||
| 2217 | |||
| 2218 | def __lt__(self, other): | ||
| 2219 | address_less = IPv6Address.__lt__(self, other) | ||
| 2220 | if address_less is NotImplemented: | ||
| 2221 | return NotImplemented | ||
| 2222 | try: | ||
| 2223 | return (self.network < other.network or | ||
| 2224 | self.network == other.network and address_less) | ||
| 2225 | except AttributeError: | ||
| 2226 | # We *do* allow addresses and interfaces to be sorted. The | ||
| 2227 | # unassociated address is considered less than all interfaces. | ||
| 2228 | return False | ||
| 2229 | |||
| 2230 | def __hash__(self): | ||
| 2231 | return self._ip ^ self._prefixlen ^ int(self.network.network_address) | ||
| 2232 | |||
| 2233 | __reduce__ = _IPAddressBase.__reduce__ | ||
| 2234 | |||
| 2235 | @property | ||
| 2236 | def ip(self): | ||
| 2237 | return IPv6Address(self._ip) | ||
| 2238 | |||
| 2239 | @property | ||
| 2240 | def with_prefixlen(self): | ||
| 2241 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 2242 | self._prefixlen) | ||
| 2243 | |||
| 2244 | @property | ||
| 2245 | def with_netmask(self): | ||
| 2246 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 2247 | self.netmask) | ||
| 2248 | |||
| 2249 | @property | ||
| 2250 | def with_hostmask(self): | ||
| 2251 | return '%s/%s' % (self._string_from_ip_int(self._ip), | ||
| 2252 | self.hostmask) | ||
| 2253 | |||
| 2254 | @property | ||
| 2255 | def is_unspecified(self): | ||
| 2256 | return self._ip == 0 and self.network.is_unspecified | ||
| 2257 | |||
| 2258 | @property | ||
| 2259 | def is_loopback(self): | ||
| 2260 | return self._ip == 1 and self.network.is_loopback | ||
| 2261 | |||
| 2262 | |||
| 2263 | class IPv6Network(_BaseV6, _BaseNetwork): | ||
| 2264 | |||
| 2265 | """This class represents and manipulates 128-bit IPv6 networks. | ||
| 2266 | |||
| 2267 | Attributes: [examples for IPv6('2001:db8::1000/124')] | ||
| 2268 | .network_address: IPv6Address('2001:db8::1000') | ||
| 2269 | .hostmask: IPv6Address('::f') | ||
| 2270 | .broadcast_address: IPv6Address('2001:db8::100f') | ||
| 2271 | .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') | ||
| 2272 | .prefixlen: 124 | ||
| 2273 | |||
| 2274 | """ | ||
| 2275 | |||
| 2276 | # Class to use when creating address objects | ||
| 2277 | _address_class = IPv6Address | ||
| 2278 | |||
| 2279 | def __init__(self, address, strict=True): | ||
| 2280 | """Instantiate a new IPv6 Network object. | ||
| 2281 | |||
| 2282 | Args: | ||
| 2283 | address: A string or integer representing the IPv6 network or the | ||
| 2284 | IP and prefix/netmask. | ||
| 2285 | '2001:db8::/128' | ||
| 2286 | '2001:db8:0000:0000:0000:0000:0000:0000/128' | ||
| 2287 | '2001:db8::' | ||
| 2288 | are all functionally the same in IPv6. That is to say, | ||
| 2289 | failing to provide a subnetmask will create an object with | ||
| 2290 | a mask of /128. | ||
| 2291 | |||
| 2292 | Additionally, an integer can be passed, so | ||
| 2293 | IPv6Network('2001:db8::') == | ||
| 2294 | IPv6Network(42540766411282592856903984951653826560) | ||
| 2295 | or, more generally | ||
| 2296 | IPv6Network(int(IPv6Network('2001:db8::'))) == | ||
| 2297 | IPv6Network('2001:db8::') | ||
| 2298 | |||
| 2299 | strict: A boolean. If true, ensure that we have been passed | ||
| 2300 | A true network address, eg, 2001:db8::1000/124 and not an | ||
| 2301 | IP address on a network, eg, 2001:db8::1/124. | ||
| 2302 | |||
| 2303 | Raises: | ||
| 2304 | AddressValueError: If address isn't a valid IPv6 address. | ||
| 2305 | NetmaskValueError: If the netmask isn't valid for | ||
| 2306 | an IPv6 address. | ||
| 2307 | ValueError: If strict was True and a network address was not | ||
| 2308 | supplied. | ||
| 2309 | |||
| 2310 | """ | ||
| 2311 | _BaseNetwork.__init__(self, address) | ||
| 2312 | |||
| 2313 | # Efficient constructor from integer or packed address | ||
| 2314 | if isinstance(address, (bytes, _compat_int_types)): | ||
| 2315 | self.network_address = IPv6Address(address) | ||
| 2316 | self.netmask, self._prefixlen = self._make_netmask( | ||
| 2317 | self._max_prefixlen) | ||
| 2318 | return | ||
| 2319 | |||
| 2320 | if isinstance(address, tuple): | ||
| 2321 | if len(address) > 1: | ||
| 2322 | arg = address[1] | ||
| 2323 | else: | ||
| 2324 | arg = self._max_prefixlen | ||
| 2325 | self.netmask, self._prefixlen = self._make_netmask(arg) | ||
| 2326 | self.network_address = IPv6Address(address[0]) | ||
| 2327 | packed = int(self.network_address) | ||
| 2328 | if packed & int(self.netmask) != packed: | ||
| 2329 | if strict: | ||
| 2330 | raise ValueError('%s has host bits set' % self) | ||
| 2331 | else: | ||
| 2332 | self.network_address = IPv6Address(packed & | ||
| 2333 | int(self.netmask)) | ||
| 2334 | return | ||
| 2335 | |||
| 2336 | # Assume input argument to be string or any object representation | ||
| 2337 | # which converts into a formatted IP prefix string. | ||
| 2338 | addr = _split_optional_netmask(address) | ||
| 2339 | |||
| 2340 | self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) | ||
| 2341 | |||
| 2342 | if len(addr) == 2: | ||
| 2343 | arg = addr[1] | ||
| 2344 | else: | ||
| 2345 | arg = self._max_prefixlen | ||
| 2346 | self.netmask, self._prefixlen = self._make_netmask(arg) | ||
| 2347 | |||
| 2348 | if strict: | ||
| 2349 | if (IPv6Address(int(self.network_address) & int(self.netmask)) != | ||
| 2350 | self.network_address): | ||
| 2351 | raise ValueError('%s has host bits set' % self) | ||
| 2352 | self.network_address = IPv6Address(int(self.network_address) & | ||
| 2353 | int(self.netmask)) | ||
| 2354 | |||
| 2355 | if self._prefixlen == (self._max_prefixlen - 1): | ||
| 2356 | self.hosts = self.__iter__ | ||
| 2357 | |||
| 2358 | def hosts(self): | ||
| 2359 | """Generate Iterator over usable hosts in a network. | ||
| 2360 | |||
| 2361 | This is like __iter__ except it doesn't return the | ||
| 2362 | Subnet-Router anycast address. | ||
| 2363 | |||
| 2364 | """ | ||
| 2365 | network = int(self.network_address) | ||
| 2366 | broadcast = int(self.broadcast_address) | ||
| 2367 | for x in _compat_range(network + 1, broadcast + 1): | ||
| 2368 | yield self._address_class(x) | ||
| 2369 | |||
| 2370 | @property | ||
| 2371 | def is_site_local(self): | ||
| 2372 | """Test if the address is reserved for site-local. | ||
| 2373 | |||
| 2374 | Note that the site-local address space has been deprecated by RFC 3879. | ||
| 2375 | Use is_private to test if this address is in the space of unique local | ||
| 2376 | addresses as defined by RFC 4193. | ||
| 2377 | |||
| 2378 | Returns: | ||
| 2379 | A boolean, True if the address is reserved per RFC 3513 2.5.6. | ||
| 2380 | |||
| 2381 | """ | ||
| 2382 | return (self.network_address.is_site_local and | ||
| 2383 | self.broadcast_address.is_site_local) | ||
| 2384 | |||
| 2385 | |||
| 2386 | class _IPv6Constants(object): | ||
| 2387 | |||
| 2388 | _linklocal_network = IPv6Network('fe80::/10') | ||
| 2389 | |||
| 2390 | _multicast_network = IPv6Network('ff00::/8') | ||
| 2391 | |||
| 2392 | _private_networks = [ | ||
| 2393 | IPv6Network('::1/128'), | ||
| 2394 | IPv6Network('::/128'), | ||
| 2395 | IPv6Network('::ffff:0:0/96'), | ||
| 2396 | IPv6Network('100::/64'), | ||
| 2397 | IPv6Network('2001::/23'), | ||
| 2398 | IPv6Network('2001:2::/48'), | ||
| 2399 | IPv6Network('2001:db8::/32'), | ||
| 2400 | IPv6Network('2001:10::/28'), | ||
| 2401 | IPv6Network('fc00::/7'), | ||
| 2402 | IPv6Network('fe80::/10'), | ||
| 2403 | ] | ||
| 2404 | |||
| 2405 | _reserved_networks = [ | ||
| 2406 | IPv6Network('::/8'), IPv6Network('100::/8'), | ||
| 2407 | IPv6Network('200::/7'), IPv6Network('400::/6'), | ||
| 2408 | IPv6Network('800::/5'), IPv6Network('1000::/4'), | ||
| 2409 | IPv6Network('4000::/3'), IPv6Network('6000::/3'), | ||
| 2410 | IPv6Network('8000::/3'), IPv6Network('A000::/3'), | ||
| 2411 | IPv6Network('C000::/3'), IPv6Network('E000::/4'), | ||
| 2412 | IPv6Network('F000::/5'), IPv6Network('F800::/6'), | ||
| 2413 | IPv6Network('FE00::/9'), | ||
| 2414 | ] | ||
| 2415 | |||
| 2416 | _sitelocal_network = IPv6Network('fec0::/10') | ||
| 2417 | |||
| 2418 | |||
| 2419 | IPv6Address._constants = _IPv6Constants | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/__init__.py new file mode 100644 index 0000000..228e051 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/__init__.py | |||
| @@ -0,0 +1,347 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | lockfile.py - Platform-independent advisory file locks. | ||
| 5 | |||
| 6 | Requires Python 2.5 unless you apply 2.4.diff | ||
| 7 | Locking is done on a per-thread basis instead of a per-process basis. | ||
| 8 | |||
| 9 | Usage: | ||
| 10 | |||
| 11 | >>> lock = LockFile('somefile') | ||
| 12 | >>> try: | ||
| 13 | ... lock.acquire() | ||
| 14 | ... except AlreadyLocked: | ||
| 15 | ... print 'somefile', 'is locked already.' | ||
| 16 | ... except LockFailed: | ||
| 17 | ... print 'somefile', 'can\\'t be locked.' | ||
| 18 | ... else: | ||
| 19 | ... print 'got lock' | ||
| 20 | got lock | ||
| 21 | >>> print lock.is_locked() | ||
| 22 | True | ||
| 23 | >>> lock.release() | ||
| 24 | |||
| 25 | >>> lock = LockFile('somefile') | ||
| 26 | >>> print lock.is_locked() | ||
| 27 | False | ||
| 28 | >>> with lock: | ||
| 29 | ... print lock.is_locked() | ||
| 30 | True | ||
| 31 | >>> print lock.is_locked() | ||
| 32 | False | ||
| 33 | |||
| 34 | >>> lock = LockFile('somefile') | ||
| 35 | >>> # It is okay to lock twice from the same thread... | ||
| 36 | >>> with lock: | ||
| 37 | ... lock.acquire() | ||
| 38 | ... | ||
| 39 | >>> # Though no counter is kept, so you can't unlock multiple times... | ||
| 40 | >>> print lock.is_locked() | ||
| 41 | False | ||
| 42 | |||
| 43 | Exceptions: | ||
| 44 | |||
| 45 | Error - base class for other exceptions | ||
| 46 | LockError - base class for all locking exceptions | ||
| 47 | AlreadyLocked - Another thread or process already holds the lock | ||
| 48 | LockFailed - Lock failed for some other reason | ||
| 49 | UnlockError - base class for all unlocking exceptions | ||
| 50 | AlreadyUnlocked - File was not locked. | ||
| 51 | NotMyLock - File was locked but not by the current thread/process | ||
| 52 | """ | ||
| 53 | |||
| 54 | from __future__ import absolute_import | ||
| 55 | |||
| 56 | import functools | ||
| 57 | import os | ||
| 58 | import socket | ||
| 59 | import threading | ||
| 60 | import warnings | ||
| 61 | |||
| 62 | # Work with PEP8 and non-PEP8 versions of threading module. | ||
| 63 | if not hasattr(threading, "current_thread"): | ||
| 64 | threading.current_thread = threading.currentThread | ||
| 65 | if not hasattr(threading.Thread, "get_name"): | ||
| 66 | threading.Thread.get_name = threading.Thread.getName | ||
| 67 | |||
| 68 | __all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', | ||
| 69 | 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', | ||
| 70 | 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', | ||
| 71 | 'LockBase', 'locked'] | ||
| 72 | |||
| 73 | |||
| 74 | class Error(Exception): | ||
| 75 | """ | ||
| 76 | Base class for other exceptions. | ||
| 77 | |||
| 78 | >>> try: | ||
| 79 | ... raise Error | ||
| 80 | ... except Exception: | ||
| 81 | ... pass | ||
| 82 | """ | ||
| 83 | pass | ||
| 84 | |||
| 85 | |||
| 86 | class LockError(Error): | ||
| 87 | """ | ||
| 88 | Base class for error arising from attempts to acquire the lock. | ||
| 89 | |||
| 90 | >>> try: | ||
| 91 | ... raise LockError | ||
| 92 | ... except Error: | ||
| 93 | ... pass | ||
| 94 | """ | ||
| 95 | pass | ||
| 96 | |||
| 97 | |||
| 98 | class LockTimeout(LockError): | ||
| 99 | """Raised when lock creation fails within a user-defined period of time. | ||
| 100 | |||
| 101 | >>> try: | ||
| 102 | ... raise LockTimeout | ||
| 103 | ... except LockError: | ||
| 104 | ... pass | ||
| 105 | """ | ||
| 106 | pass | ||
| 107 | |||
| 108 | |||
| 109 | class AlreadyLocked(LockError): | ||
| 110 | """Some other thread/process is locking the file. | ||
| 111 | |||
| 112 | >>> try: | ||
| 113 | ... raise AlreadyLocked | ||
| 114 | ... except LockError: | ||
| 115 | ... pass | ||
| 116 | """ | ||
| 117 | pass | ||
| 118 | |||
| 119 | |||
| 120 | class LockFailed(LockError): | ||
| 121 | """Lock file creation failed for some other reason. | ||
| 122 | |||
| 123 | >>> try: | ||
| 124 | ... raise LockFailed | ||
| 125 | ... except LockError: | ||
| 126 | ... pass | ||
| 127 | """ | ||
| 128 | pass | ||
| 129 | |||
| 130 | |||
| 131 | class UnlockError(Error): | ||
| 132 | """ | ||
| 133 | Base class for errors arising from attempts to release the lock. | ||
| 134 | |||
| 135 | >>> try: | ||
| 136 | ... raise UnlockError | ||
| 137 | ... except Error: | ||
| 138 | ... pass | ||
| 139 | """ | ||
| 140 | pass | ||
| 141 | |||
| 142 | |||
| 143 | class NotLocked(UnlockError): | ||
| 144 | """Raised when an attempt is made to unlock an unlocked file. | ||
| 145 | |||
| 146 | >>> try: | ||
| 147 | ... raise NotLocked | ||
| 148 | ... except UnlockError: | ||
| 149 | ... pass | ||
| 150 | """ | ||
| 151 | pass | ||
| 152 | |||
| 153 | |||
| 154 | class NotMyLock(UnlockError): | ||
| 155 | """Raised when an attempt is made to unlock a file someone else locked. | ||
| 156 | |||
| 157 | >>> try: | ||
| 158 | ... raise NotMyLock | ||
| 159 | ... except UnlockError: | ||
| 160 | ... pass | ||
| 161 | """ | ||
| 162 | pass | ||
| 163 | |||
| 164 | |||
| 165 | class _SharedBase(object): | ||
| 166 | def __init__(self, path): | ||
| 167 | self.path = path | ||
| 168 | |||
| 169 | def acquire(self, timeout=None): | ||
| 170 | """ | ||
| 171 | Acquire the lock. | ||
| 172 | |||
| 173 | * If timeout is omitted (or None), wait forever trying to lock the | ||
| 174 | file. | ||
| 175 | |||
| 176 | * If timeout > 0, try to acquire the lock for that many seconds. If | ||
| 177 | the lock period expires and the file is still locked, raise | ||
| 178 | LockTimeout. | ||
| 179 | |||
| 180 | * If timeout <= 0, raise AlreadyLocked immediately if the file is | ||
| 181 | already locked. | ||
| 182 | """ | ||
| 183 | raise NotImplemented("implement in subclass") | ||
| 184 | |||
| 185 | def release(self): | ||
| 186 | """ | ||
| 187 | Release the lock. | ||
| 188 | |||
| 189 | If the file is not locked, raise NotLocked. | ||
| 190 | """ | ||
| 191 | raise NotImplemented("implement in subclass") | ||
| 192 | |||
| 193 | def __enter__(self): | ||
| 194 | """ | ||
| 195 | Context manager support. | ||
| 196 | """ | ||
| 197 | self.acquire() | ||
| 198 | return self | ||
| 199 | |||
| 200 | def __exit__(self, *_exc): | ||
| 201 | """ | ||
| 202 | Context manager support. | ||
| 203 | """ | ||
| 204 | self.release() | ||
| 205 | |||
| 206 | def __repr__(self): | ||
| 207 | return "<%s: %r>" % (self.__class__.__name__, self.path) | ||
| 208 | |||
| 209 | |||
| 210 | class LockBase(_SharedBase): | ||
| 211 | """Base class for platform-specific lock classes.""" | ||
| 212 | def __init__(self, path, threaded=True, timeout=None): | ||
| 213 | """ | ||
| 214 | >>> lock = LockBase('somefile') | ||
| 215 | >>> lock = LockBase('somefile', threaded=False) | ||
| 216 | """ | ||
| 217 | super(LockBase, self).__init__(path) | ||
| 218 | self.lock_file = os.path.abspath(path) + ".lock" | ||
| 219 | self.hostname = socket.gethostname() | ||
| 220 | self.pid = os.getpid() | ||
| 221 | if threaded: | ||
| 222 | t = threading.current_thread() | ||
| 223 | # Thread objects in Python 2.4 and earlier do not have ident | ||
| 224 | # attrs. Worm around that. | ||
| 225 | ident = getattr(t, "ident", hash(t)) | ||
| 226 | self.tname = "-%x" % (ident & 0xffffffff) | ||
| 227 | else: | ||
| 228 | self.tname = "" | ||
| 229 | dirname = os.path.dirname(self.lock_file) | ||
| 230 | |||
| 231 | # unique name is mostly about the current process, but must | ||
| 232 | # also contain the path -- otherwise, two adjacent locked | ||
| 233 | # files conflict (one file gets locked, creating lock-file and | ||
| 234 | # unique file, the other one gets locked, creating lock-file | ||
| 235 | # and overwriting the already existing lock-file, then one | ||
| 236 | # gets unlocked, deleting both lock-file and unique file, | ||
| 237 | # finally the last lock errors out upon releasing. | ||
| 238 | self.unique_name = os.path.join(dirname, | ||
| 239 | "%s%s.%s%s" % (self.hostname, | ||
| 240 | self.tname, | ||
| 241 | self.pid, | ||
| 242 | hash(self.path))) | ||
| 243 | self.timeout = timeout | ||
| 244 | |||
| 245 | def is_locked(self): | ||
| 246 | """ | ||
| 247 | Tell whether or not the file is locked. | ||
| 248 | """ | ||
| 249 | raise NotImplemented("implement in subclass") | ||
| 250 | |||
| 251 | def i_am_locking(self): | ||
| 252 | """ | ||
| 253 | Return True if this object is locking the file. | ||
| 254 | """ | ||
| 255 | raise NotImplemented("implement in subclass") | ||
| 256 | |||
| 257 | def break_lock(self): | ||
| 258 | """ | ||
| 259 | Remove a lock. Useful if a locking thread failed to unlock. | ||
| 260 | """ | ||
| 261 | raise NotImplemented("implement in subclass") | ||
| 262 | |||
| 263 | def __repr__(self): | ||
| 264 | return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, | ||
| 265 | self.path) | ||
| 266 | |||
| 267 | |||
| 268 | def _fl_helper(cls, mod, *args, **kwds): | ||
| 269 | warnings.warn("Import from %s module instead of lockfile package" % mod, | ||
| 270 | DeprecationWarning, stacklevel=2) | ||
| 271 | # This is a bit funky, but it's only for awhile. The way the unit tests | ||
| 272 | # are constructed this function winds up as an unbound method, so it | ||
| 273 | # actually takes three args, not two. We want to toss out self. | ||
| 274 | if not isinstance(args[0], str): | ||
| 275 | # We are testing, avoid the first arg | ||
| 276 | args = args[1:] | ||
| 277 | if len(args) == 1 and not kwds: | ||
| 278 | kwds["threaded"] = True | ||
| 279 | return cls(*args, **kwds) | ||
| 280 | |||
| 281 | |||
| 282 | def LinkFileLock(*args, **kwds): | ||
| 283 | """Factory function provided for backwards compatibility. | ||
| 284 | |||
| 285 | Do not use in new code. Instead, import LinkLockFile from the | ||
| 286 | lockfile.linklockfile module. | ||
| 287 | """ | ||
| 288 | from . import linklockfile | ||
| 289 | return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", | ||
| 290 | *args, **kwds) | ||
| 291 | |||
| 292 | |||
| 293 | def MkdirFileLock(*args, **kwds): | ||
| 294 | """Factory function provided for backwards compatibility. | ||
| 295 | |||
| 296 | Do not use in new code. Instead, import MkdirLockFile from the | ||
| 297 | lockfile.mkdirlockfile module. | ||
| 298 | """ | ||
| 299 | from . import mkdirlockfile | ||
| 300 | return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", | ||
| 301 | *args, **kwds) | ||
| 302 | |||
| 303 | |||
| 304 | def SQLiteFileLock(*args, **kwds): | ||
| 305 | """Factory function provided for backwards compatibility. | ||
| 306 | |||
| 307 | Do not use in new code. Instead, import SQLiteLockFile from the | ||
| 308 | lockfile.mkdirlockfile module. | ||
| 309 | """ | ||
| 310 | from . import sqlitelockfile | ||
| 311 | return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", | ||
| 312 | *args, **kwds) | ||
| 313 | |||
| 314 | |||
| 315 | def locked(path, timeout=None): | ||
| 316 | """Decorator which enables locks for decorated function. | ||
| 317 | |||
| 318 | Arguments: | ||
| 319 | - path: path for lockfile. | ||
| 320 | - timeout (optional): Timeout for acquiring lock. | ||
| 321 | |||
| 322 | Usage: | ||
| 323 | @locked('/var/run/myname', timeout=0) | ||
| 324 | def myname(...): | ||
| 325 | ... | ||
| 326 | """ | ||
| 327 | def decor(func): | ||
| 328 | @functools.wraps(func) | ||
| 329 | def wrapper(*args, **kwargs): | ||
| 330 | lock = FileLock(path, timeout=timeout) | ||
| 331 | lock.acquire() | ||
| 332 | try: | ||
| 333 | return func(*args, **kwargs) | ||
| 334 | finally: | ||
| 335 | lock.release() | ||
| 336 | return wrapper | ||
| 337 | return decor | ||
| 338 | |||
| 339 | |||
| 340 | if hasattr(os, "link"): | ||
| 341 | from . import linklockfile as _llf | ||
| 342 | LockFile = _llf.LinkLockFile | ||
| 343 | else: | ||
| 344 | from . import mkdirlockfile as _mlf | ||
| 345 | LockFile = _mlf.MkdirLockFile | ||
| 346 | |||
| 347 | FileLock = LockFile | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/linklockfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/linklockfile.py new file mode 100644 index 0000000..11af0f3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/linklockfile.py | |||
| @@ -0,0 +1,73 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import time | ||
| 4 | import os | ||
| 5 | |||
| 6 | from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, | ||
| 7 | AlreadyLocked) | ||
| 8 | |||
| 9 | |||
| 10 | class LinkLockFile(LockBase): | ||
| 11 | """Lock access to a file using atomic property of link(2). | ||
| 12 | |||
| 13 | >>> lock = LinkLockFile('somefile') | ||
| 14 | >>> lock = LinkLockFile('somefile', threaded=False) | ||
| 15 | """ | ||
| 16 | |||
| 17 | def acquire(self, timeout=None): | ||
| 18 | try: | ||
| 19 | open(self.unique_name, "wb").close() | ||
| 20 | except IOError: | ||
| 21 | raise LockFailed("failed to create %s" % self.unique_name) | ||
| 22 | |||
| 23 | timeout = timeout if timeout is not None else self.timeout | ||
| 24 | end_time = time.time() | ||
| 25 | if timeout is not None and timeout > 0: | ||
| 26 | end_time += timeout | ||
| 27 | |||
| 28 | while True: | ||
| 29 | # Try and create a hard link to it. | ||
| 30 | try: | ||
| 31 | os.link(self.unique_name, self.lock_file) | ||
| 32 | except OSError: | ||
| 33 | # Link creation failed. Maybe we've double-locked? | ||
| 34 | nlinks = os.stat(self.unique_name).st_nlink | ||
| 35 | if nlinks == 2: | ||
| 36 | # The original link plus the one I created == 2. We're | ||
| 37 | # good to go. | ||
| 38 | return | ||
| 39 | else: | ||
| 40 | # Otherwise the lock creation failed. | ||
| 41 | if timeout is not None and time.time() > end_time: | ||
| 42 | os.unlink(self.unique_name) | ||
| 43 | if timeout > 0: | ||
| 44 | raise LockTimeout("Timeout waiting to acquire" | ||
| 45 | " lock for %s" % | ||
| 46 | self.path) | ||
| 47 | else: | ||
| 48 | raise AlreadyLocked("%s is already locked" % | ||
| 49 | self.path) | ||
| 50 | time.sleep(timeout is not None and timeout / 10 or 0.1) | ||
| 51 | else: | ||
| 52 | # Link creation succeeded. We're good to go. | ||
| 53 | return | ||
| 54 | |||
| 55 | def release(self): | ||
| 56 | if not self.is_locked(): | ||
| 57 | raise NotLocked("%s is not locked" % self.path) | ||
| 58 | elif not os.path.exists(self.unique_name): | ||
| 59 | raise NotMyLock("%s is locked, but not by me" % self.path) | ||
| 60 | os.unlink(self.unique_name) | ||
| 61 | os.unlink(self.lock_file) | ||
| 62 | |||
| 63 | def is_locked(self): | ||
| 64 | return os.path.exists(self.lock_file) | ||
| 65 | |||
| 66 | def i_am_locking(self): | ||
| 67 | return (self.is_locked() and | ||
| 68 | os.path.exists(self.unique_name) and | ||
| 69 | os.stat(self.unique_name).st_nlink == 2) | ||
| 70 | |||
| 71 | def break_lock(self): | ||
| 72 | if os.path.exists(self.lock_file): | ||
| 73 | os.unlink(self.lock_file) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py new file mode 100644 index 0000000..bd5a51e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py | |||
| @@ -0,0 +1,84 @@ | |||
| 1 | from __future__ import absolute_import, division | ||
| 2 | |||
| 3 | import time | ||
| 4 | import os | ||
| 5 | import sys | ||
| 6 | import errno | ||
| 7 | |||
| 8 | from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, | ||
| 9 | AlreadyLocked) | ||
| 10 | |||
| 11 | |||
| 12 | class MkdirLockFile(LockBase): | ||
| 13 | """Lock file by creating a directory.""" | ||
| 14 | def __init__(self, path, threaded=True, timeout=None): | ||
| 15 | """ | ||
| 16 | >>> lock = MkdirLockFile('somefile') | ||
| 17 | >>> lock = MkdirLockFile('somefile', threaded=False) | ||
| 18 | """ | ||
| 19 | LockBase.__init__(self, path, threaded, timeout) | ||
| 20 | # Lock file itself is a directory. Place the unique file name into | ||
| 21 | # it. | ||
| 22 | self.unique_name = os.path.join(self.lock_file, | ||
| 23 | "%s.%s%s" % (self.hostname, | ||
| 24 | self.tname, | ||
| 25 | self.pid)) | ||
| 26 | |||
| 27 | def acquire(self, timeout=None): | ||
| 28 | timeout = timeout if timeout is not None else self.timeout | ||
| 29 | end_time = time.time() | ||
| 30 | if timeout is not None and timeout > 0: | ||
| 31 | end_time += timeout | ||
| 32 | |||
| 33 | if timeout is None: | ||
| 34 | wait = 0.1 | ||
| 35 | else: | ||
| 36 | wait = max(0, timeout / 10) | ||
| 37 | |||
| 38 | while True: | ||
| 39 | try: | ||
| 40 | os.mkdir(self.lock_file) | ||
| 41 | except OSError: | ||
| 42 | err = sys.exc_info()[1] | ||
| 43 | if err.errno == errno.EEXIST: | ||
| 44 | # Already locked. | ||
| 45 | if os.path.exists(self.unique_name): | ||
| 46 | # Already locked by me. | ||
| 47 | return | ||
| 48 | if timeout is not None and time.time() > end_time: | ||
| 49 | if timeout > 0: | ||
| 50 | raise LockTimeout("Timeout waiting to acquire" | ||
| 51 | " lock for %s" % | ||
| 52 | self.path) | ||
| 53 | else: | ||
| 54 | # Someone else has the lock. | ||
| 55 | raise AlreadyLocked("%s is already locked" % | ||
| 56 | self.path) | ||
| 57 | time.sleep(wait) | ||
| 58 | else: | ||
| 59 | # Couldn't create the lock for some other reason | ||
| 60 | raise LockFailed("failed to create %s" % self.lock_file) | ||
| 61 | else: | ||
| 62 | open(self.unique_name, "wb").close() | ||
| 63 | return | ||
| 64 | |||
| 65 | def release(self): | ||
| 66 | if not self.is_locked(): | ||
| 67 | raise NotLocked("%s is not locked" % self.path) | ||
| 68 | elif not os.path.exists(self.unique_name): | ||
| 69 | raise NotMyLock("%s is locked, but not by me" % self.path) | ||
| 70 | os.unlink(self.unique_name) | ||
| 71 | os.rmdir(self.lock_file) | ||
| 72 | |||
| 73 | def is_locked(self): | ||
| 74 | return os.path.exists(self.lock_file) | ||
| 75 | |||
| 76 | def i_am_locking(self): | ||
| 77 | return (self.is_locked() and | ||
| 78 | os.path.exists(self.unique_name)) | ||
| 79 | |||
| 80 | def break_lock(self): | ||
| 81 | if os.path.exists(self.lock_file): | ||
| 82 | for name in os.listdir(self.lock_file): | ||
| 83 | os.unlink(os.path.join(self.lock_file, name)) | ||
| 84 | os.rmdir(self.lock_file) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/pidlockfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/pidlockfile.py new file mode 100644 index 0000000..d776de5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/pidlockfile.py | |||
| @@ -0,0 +1,190 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | # pidlockfile.py | ||
| 4 | # | ||
| 5 | # Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au> | ||
| 6 | # | ||
| 7 | # This is free software: you may copy, modify, and/or distribute this work | ||
| 8 | # under the terms of the Python Software Foundation License, version 2 or | ||
| 9 | # later as published by the Python Software Foundation. | ||
| 10 | # No warranty expressed or implied. See the file LICENSE.PSF-2 for details. | ||
| 11 | |||
| 12 | """ Lockfile behaviour implemented via Unix PID files. | ||
| 13 | """ | ||
| 14 | |||
| 15 | from __future__ import absolute_import | ||
| 16 | |||
| 17 | import errno | ||
| 18 | import os | ||
| 19 | import time | ||
| 20 | |||
| 21 | from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, | ||
| 22 | LockTimeout) | ||
| 23 | |||
| 24 | |||
| 25 | class PIDLockFile(LockBase): | ||
| 26 | """ Lockfile implemented as a Unix PID file. | ||
| 27 | |||
| 28 | The lock file is a normal file named by the attribute `path`. | ||
| 29 | A lock's PID file contains a single line of text, containing | ||
| 30 | the process ID (PID) of the process that acquired the lock. | ||
| 31 | |||
| 32 | >>> lock = PIDLockFile('somefile') | ||
| 33 | >>> lock = PIDLockFile('somefile') | ||
| 34 | """ | ||
| 35 | |||
| 36 | def __init__(self, path, threaded=False, timeout=None): | ||
| 37 | # pid lockfiles don't support threaded operation, so always force | ||
| 38 | # False as the threaded arg. | ||
| 39 | LockBase.__init__(self, path, False, timeout) | ||
| 40 | self.unique_name = self.path | ||
| 41 | |||
| 42 | def read_pid(self): | ||
| 43 | """ Get the PID from the lock file. | ||
| 44 | """ | ||
| 45 | return read_pid_from_pidfile(self.path) | ||
| 46 | |||
| 47 | def is_locked(self): | ||
| 48 | """ Test if the lock is currently held. | ||
| 49 | |||
| 50 | The lock is held if the PID file for this lock exists. | ||
| 51 | |||
| 52 | """ | ||
| 53 | return os.path.exists(self.path) | ||
| 54 | |||
| 55 | def i_am_locking(self): | ||
| 56 | """ Test if the lock is held by the current process. | ||
| 57 | |||
| 58 | Returns ``True`` if the current process ID matches the | ||
| 59 | number stored in the PID file. | ||
| 60 | """ | ||
| 61 | return self.is_locked() and os.getpid() == self.read_pid() | ||
| 62 | |||
| 63 | def acquire(self, timeout=None): | ||
| 64 | """ Acquire the lock. | ||
| 65 | |||
| 66 | Creates the PID file for this lock, or raises an error if | ||
| 67 | the lock could not be acquired. | ||
| 68 | """ | ||
| 69 | |||
| 70 | timeout = timeout if timeout is not None else self.timeout | ||
| 71 | end_time = time.time() | ||
| 72 | if timeout is not None and timeout > 0: | ||
| 73 | end_time += timeout | ||
| 74 | |||
| 75 | while True: | ||
| 76 | try: | ||
| 77 | write_pid_to_pidfile(self.path) | ||
| 78 | except OSError as exc: | ||
| 79 | if exc.errno == errno.EEXIST: | ||
| 80 | # The lock creation failed. Maybe sleep a bit. | ||
| 81 | if time.time() > end_time: | ||
| 82 | if timeout is not None and timeout > 0: | ||
| 83 | raise LockTimeout("Timeout waiting to acquire" | ||
| 84 | " lock for %s" % | ||
| 85 | self.path) | ||
| 86 | else: | ||
| 87 | raise AlreadyLocked("%s is already locked" % | ||
| 88 | self.path) | ||
| 89 | time.sleep(timeout is not None and timeout / 10 or 0.1) | ||
| 90 | else: | ||
| 91 | raise LockFailed("failed to create %s" % self.path) | ||
| 92 | else: | ||
| 93 | return | ||
| 94 | |||
| 95 | def release(self): | ||
| 96 | """ Release the lock. | ||
| 97 | |||
| 98 | Removes the PID file to release the lock, or raises an | ||
| 99 | error if the current process does not hold the lock. | ||
| 100 | |||
| 101 | """ | ||
| 102 | if not self.is_locked(): | ||
| 103 | raise NotLocked("%s is not locked" % self.path) | ||
| 104 | if not self.i_am_locking(): | ||
| 105 | raise NotMyLock("%s is locked, but not by me" % self.path) | ||
| 106 | remove_existing_pidfile(self.path) | ||
| 107 | |||
| 108 | def break_lock(self): | ||
| 109 | """ Break an existing lock. | ||
| 110 | |||
| 111 | Removes the PID file if it already exists, otherwise does | ||
| 112 | nothing. | ||
| 113 | |||
| 114 | """ | ||
| 115 | remove_existing_pidfile(self.path) | ||
| 116 | |||
| 117 | |||
| 118 | def read_pid_from_pidfile(pidfile_path): | ||
| 119 | """ Read the PID recorded in the named PID file. | ||
| 120 | |||
| 121 | Read and return the numeric PID recorded as text in the named | ||
| 122 | PID file. If the PID file cannot be read, or if the content is | ||
| 123 | not a valid PID, return ``None``. | ||
| 124 | |||
| 125 | """ | ||
| 126 | pid = None | ||
| 127 | try: | ||
| 128 | pidfile = open(pidfile_path, 'r') | ||
| 129 | except IOError: | ||
| 130 | pass | ||
| 131 | else: | ||
| 132 | # According to the FHS 2.3 section on PID files in /var/run: | ||
| 133 | # | ||
| 134 | # The file must consist of the process identifier in | ||
| 135 | # ASCII-encoded decimal, followed by a newline character. | ||
| 136 | # | ||
| 137 | # Programs that read PID files should be somewhat flexible | ||
| 138 | # in what they accept; i.e., they should ignore extra | ||
| 139 | # whitespace, leading zeroes, absence of the trailing | ||
| 140 | # newline, or additional lines in the PID file. | ||
| 141 | |||
| 142 | line = pidfile.readline().strip() | ||
| 143 | try: | ||
| 144 | pid = int(line) | ||
| 145 | except ValueError: | ||
| 146 | pass | ||
| 147 | pidfile.close() | ||
| 148 | |||
| 149 | return pid | ||
| 150 | |||
| 151 | |||
| 152 | def write_pid_to_pidfile(pidfile_path): | ||
| 153 | """ Write the PID in the named PID file. | ||
| 154 | |||
| 155 | Get the numeric process ID (“PID”) of the current process | ||
| 156 | and write it to the named file as a line of text. | ||
| 157 | |||
| 158 | """ | ||
| 159 | open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) | ||
| 160 | open_mode = 0o644 | ||
| 161 | pidfile_fd = os.open(pidfile_path, open_flags, open_mode) | ||
| 162 | pidfile = os.fdopen(pidfile_fd, 'w') | ||
| 163 | |||
| 164 | # According to the FHS 2.3 section on PID files in /var/run: | ||
| 165 | # | ||
| 166 | # The file must consist of the process identifier in | ||
| 167 | # ASCII-encoded decimal, followed by a newline character. For | ||
| 168 | # example, if crond was process number 25, /var/run/crond.pid | ||
| 169 | # would contain three characters: two, five, and newline. | ||
| 170 | |||
| 171 | pid = os.getpid() | ||
| 172 | pidfile.write("%s\n" % pid) | ||
| 173 | pidfile.close() | ||
| 174 | |||
| 175 | |||
| 176 | def remove_existing_pidfile(pidfile_path): | ||
| 177 | """ Remove the named PID file if it exists. | ||
| 178 | |||
| 179 | Removing a PID file that doesn't already exist puts us in the | ||
| 180 | desired state, so we ignore the condition if the file does not | ||
| 181 | exist. | ||
| 182 | |||
| 183 | """ | ||
| 184 | try: | ||
| 185 | os.remove(pidfile_path) | ||
| 186 | except OSError as exc: | ||
| 187 | if exc.errno == errno.ENOENT: | ||
| 188 | pass | ||
| 189 | else: | ||
| 190 | raise | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/sqlitelockfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/sqlitelockfile.py new file mode 100644 index 0000000..278dff4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/sqlitelockfile.py | |||
| @@ -0,0 +1,156 @@ | |||
| 1 | from __future__ import absolute_import, division | ||
| 2 | |||
| 3 | import time | ||
| 4 | import os | ||
| 5 | |||
| 6 | try: | ||
| 7 | unicode | ||
| 8 | except NameError: | ||
| 9 | unicode = str | ||
| 10 | |||
| 11 | from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked | ||
| 12 | |||
| 13 | |||
| 14 | class SQLiteLockFile(LockBase): | ||
| 15 | "Demonstrate SQL-based locking." | ||
| 16 | |||
| 17 | testdb = None | ||
| 18 | |||
| 19 | def __init__(self, path, threaded=True, timeout=None): | ||
| 20 | """ | ||
| 21 | >>> lock = SQLiteLockFile('somefile') | ||
| 22 | >>> lock = SQLiteLockFile('somefile', threaded=False) | ||
| 23 | """ | ||
| 24 | LockBase.__init__(self, path, threaded, timeout) | ||
| 25 | self.lock_file = unicode(self.lock_file) | ||
| 26 | self.unique_name = unicode(self.unique_name) | ||
| 27 | |||
| 28 | if SQLiteLockFile.testdb is None: | ||
| 29 | import tempfile | ||
| 30 | _fd, testdb = tempfile.mkstemp() | ||
| 31 | os.close(_fd) | ||
| 32 | os.unlink(testdb) | ||
| 33 | del _fd, tempfile | ||
| 34 | SQLiteLockFile.testdb = testdb | ||
| 35 | |||
| 36 | import sqlite3 | ||
| 37 | self.connection = sqlite3.connect(SQLiteLockFile.testdb) | ||
| 38 | |||
| 39 | c = self.connection.cursor() | ||
| 40 | try: | ||
| 41 | c.execute("create table locks" | ||
| 42 | "(" | ||
| 43 | " lock_file varchar(32)," | ||
| 44 | " unique_name varchar(32)" | ||
| 45 | ")") | ||
| 46 | except sqlite3.OperationalError: | ||
| 47 | pass | ||
| 48 | else: | ||
| 49 | self.connection.commit() | ||
| 50 | import atexit | ||
| 51 | atexit.register(os.unlink, SQLiteLockFile.testdb) | ||
| 52 | |||
| 53 | def acquire(self, timeout=None): | ||
| 54 | timeout = timeout if timeout is not None else self.timeout | ||
| 55 | end_time = time.time() | ||
| 56 | if timeout is not None and timeout > 0: | ||
| 57 | end_time += timeout | ||
| 58 | |||
| 59 | if timeout is None: | ||
| 60 | wait = 0.1 | ||
| 61 | elif timeout <= 0: | ||
| 62 | wait = 0 | ||
| 63 | else: | ||
| 64 | wait = timeout / 10 | ||
| 65 | |||
| 66 | cursor = self.connection.cursor() | ||
| 67 | |||
| 68 | while True: | ||
| 69 | if not self.is_locked(): | ||
| 70 | # Not locked. Try to lock it. | ||
| 71 | cursor.execute("insert into locks" | ||
| 72 | " (lock_file, unique_name)" | ||
| 73 | " values" | ||
| 74 | " (?, ?)", | ||
| 75 | (self.lock_file, self.unique_name)) | ||
| 76 | self.connection.commit() | ||
| 77 | |||
| 78 | # Check to see if we are the only lock holder. | ||
| 79 | cursor.execute("select * from locks" | ||
| 80 | " where unique_name = ?", | ||
| 81 | (self.unique_name,)) | ||
| 82 | rows = cursor.fetchall() | ||
| 83 | if len(rows) > 1: | ||
| 84 | # Nope. Someone else got there. Remove our lock. | ||
| 85 | cursor.execute("delete from locks" | ||
| 86 | " where unique_name = ?", | ||
| 87 | (self.unique_name,)) | ||
| 88 | self.connection.commit() | ||
| 89 | else: | ||
| 90 | # Yup. We're done, so go home. | ||
| 91 | return | ||
| 92 | else: | ||
| 93 | # Check to see if we are the only lock holder. | ||
| 94 | cursor.execute("select * from locks" | ||
| 95 | " where unique_name = ?", | ||
| 96 | (self.unique_name,)) | ||
| 97 | rows = cursor.fetchall() | ||
| 98 | if len(rows) == 1: | ||
| 99 | # We're the locker, so go home. | ||
| 100 | return | ||
| 101 | |||
| 102 | # Maybe we should wait a bit longer. | ||
| 103 | if timeout is not None and time.time() > end_time: | ||
| 104 | if timeout > 0: | ||
| 105 | # No more waiting. | ||
| 106 | raise LockTimeout("Timeout waiting to acquire" | ||
| 107 | " lock for %s" % | ||
| 108 | self.path) | ||
| 109 | else: | ||
| 110 | # Someone else has the lock and we are impatient.. | ||
| 111 | raise AlreadyLocked("%s is already locked" % self.path) | ||
| 112 | |||
| 113 | # Well, okay. We'll give it a bit longer. | ||
| 114 | time.sleep(wait) | ||
| 115 | |||
| 116 | def release(self): | ||
| 117 | if not self.is_locked(): | ||
| 118 | raise NotLocked("%s is not locked" % self.path) | ||
| 119 | if not self.i_am_locking(): | ||
| 120 | raise NotMyLock("%s is locked, but not by me (by %s)" % | ||
| 121 | (self.unique_name, self._who_is_locking())) | ||
| 122 | cursor = self.connection.cursor() | ||
| 123 | cursor.execute("delete from locks" | ||
| 124 | " where unique_name = ?", | ||
| 125 | (self.unique_name,)) | ||
| 126 | self.connection.commit() | ||
| 127 | |||
| 128 | def _who_is_locking(self): | ||
| 129 | cursor = self.connection.cursor() | ||
| 130 | cursor.execute("select unique_name from locks" | ||
| 131 | " where lock_file = ?", | ||
| 132 | (self.lock_file,)) | ||
| 133 | return cursor.fetchone()[0] | ||
| 134 | |||
| 135 | def is_locked(self): | ||
| 136 | cursor = self.connection.cursor() | ||
| 137 | cursor.execute("select * from locks" | ||
| 138 | " where lock_file = ?", | ||
| 139 | (self.lock_file,)) | ||
| 140 | rows = cursor.fetchall() | ||
| 141 | return not not rows | ||
| 142 | |||
| 143 | def i_am_locking(self): | ||
| 144 | cursor = self.connection.cursor() | ||
| 145 | cursor.execute("select * from locks" | ||
| 146 | " where lock_file = ?" | ||
| 147 | " and unique_name = ?", | ||
| 148 | (self.lock_file, self.unique_name)) | ||
| 149 | return not not cursor.fetchall() | ||
| 150 | |||
| 151 | def break_lock(self): | ||
| 152 | cursor = self.connection.cursor() | ||
| 153 | cursor.execute("delete from locks" | ||
| 154 | " where lock_file = ?", | ||
| 155 | (self.lock_file,)) | ||
| 156 | self.connection.commit() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py new file mode 100644 index 0000000..93ff2b5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py | |||
| @@ -0,0 +1,70 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import os | ||
| 4 | import time | ||
| 5 | |||
| 6 | from . import (LockBase, NotLocked, NotMyLock, LockTimeout, | ||
| 7 | AlreadyLocked) | ||
| 8 | |||
| 9 | |||
| 10 | class SymlinkLockFile(LockBase): | ||
| 11 | """Lock access to a file using symlink(2).""" | ||
| 12 | |||
| 13 | def __init__(self, path, threaded=True, timeout=None): | ||
| 14 | # super(SymlinkLockFile).__init(...) | ||
| 15 | LockBase.__init__(self, path, threaded, timeout) | ||
| 16 | # split it back! | ||
| 17 | self.unique_name = os.path.split(self.unique_name)[1] | ||
| 18 | |||
| 19 | def acquire(self, timeout=None): | ||
| 20 | # Hopefully unnecessary for symlink. | ||
| 21 | # try: | ||
| 22 | # open(self.unique_name, "wb").close() | ||
| 23 | # except IOError: | ||
| 24 | # raise LockFailed("failed to create %s" % self.unique_name) | ||
| 25 | timeout = timeout if timeout is not None else self.timeout | ||
| 26 | end_time = time.time() | ||
| 27 | if timeout is not None and timeout > 0: | ||
| 28 | end_time += timeout | ||
| 29 | |||
| 30 | while True: | ||
| 31 | # Try and create a symbolic link to it. | ||
| 32 | try: | ||
| 33 | os.symlink(self.unique_name, self.lock_file) | ||
| 34 | except OSError: | ||
| 35 | # Link creation failed. Maybe we've double-locked? | ||
| 36 | if self.i_am_locking(): | ||
| 37 | # Linked to out unique name. Proceed. | ||
| 38 | return | ||
| 39 | else: | ||
| 40 | # Otherwise the lock creation failed. | ||
| 41 | if timeout is not None and time.time() > end_time: | ||
| 42 | if timeout > 0: | ||
| 43 | raise LockTimeout("Timeout waiting to acquire" | ||
| 44 | " lock for %s" % | ||
| 45 | self.path) | ||
| 46 | else: | ||
| 47 | raise AlreadyLocked("%s is already locked" % | ||
| 48 | self.path) | ||
| 49 | time.sleep(timeout / 10 if timeout is not None else 0.1) | ||
| 50 | else: | ||
| 51 | # Link creation succeeded. We're good to go. | ||
| 52 | return | ||
| 53 | |||
| 54 | def release(self): | ||
| 55 | if not self.is_locked(): | ||
| 56 | raise NotLocked("%s is not locked" % self.path) | ||
| 57 | elif not self.i_am_locking(): | ||
| 58 | raise NotMyLock("%s is locked, but not by me" % self.path) | ||
| 59 | os.unlink(self.lock_file) | ||
| 60 | |||
| 61 | def is_locked(self): | ||
| 62 | return os.path.islink(self.lock_file) | ||
| 63 | |||
| 64 | def i_am_locking(self): | ||
| 65 | return (os.path.islink(self.lock_file) | ||
| 66 | and os.readlink(self.lock_file) == self.unique_name) | ||
| 67 | |||
| 68 | def break_lock(self): | ||
| 69 | if os.path.islink(self.lock_file): # exists && link | ||
| 70 | os.unlink(self.lock_file) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py new file mode 100644 index 0000000..dda626a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/__init__.py | |||
| @@ -0,0 +1,66 @@ | |||
| 1 | # coding: utf-8 | ||
| 2 | from pip._vendor.msgpack._version import version | ||
| 3 | from pip._vendor.msgpack.exceptions import * | ||
| 4 | |||
| 5 | from collections import namedtuple | ||
| 6 | |||
| 7 | |||
| 8 | class ExtType(namedtuple('ExtType', 'code data')): | ||
| 9 | """ExtType represents ext type in msgpack.""" | ||
| 10 | def __new__(cls, code, data): | ||
| 11 | if not isinstance(code, int): | ||
| 12 | raise TypeError("code must be int") | ||
| 13 | if not isinstance(data, bytes): | ||
| 14 | raise TypeError("data must be bytes") | ||
| 15 | if not 0 <= code <= 127: | ||
| 16 | raise ValueError("code must be 0~127") | ||
| 17 | return super(ExtType, cls).__new__(cls, code, data) | ||
| 18 | |||
| 19 | |||
| 20 | import os | ||
| 21 | if os.environ.get('MSGPACK_PUREPYTHON'): | ||
| 22 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker | ||
| 23 | else: | ||
| 24 | try: | ||
| 25 | from pip._vendor.msgpack._packer import Packer | ||
| 26 | from pip._vendor.msgpack._unpacker import unpackb, Unpacker | ||
| 27 | except ImportError: | ||
| 28 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker | ||
| 29 | |||
| 30 | |||
| 31 | def pack(o, stream, **kwargs): | ||
| 32 | """ | ||
| 33 | Pack object `o` and write it to `stream` | ||
| 34 | |||
| 35 | See :class:`Packer` for options. | ||
| 36 | """ | ||
| 37 | packer = Packer(**kwargs) | ||
| 38 | stream.write(packer.pack(o)) | ||
| 39 | |||
| 40 | |||
| 41 | def packb(o, **kwargs): | ||
| 42 | """ | ||
| 43 | Pack object `o` and return packed bytes | ||
| 44 | |||
| 45 | See :class:`Packer` for options. | ||
| 46 | """ | ||
| 47 | return Packer(**kwargs).pack(o) | ||
| 48 | |||
| 49 | |||
| 50 | def unpack(stream, **kwargs): | ||
| 51 | """ | ||
| 52 | Unpack an object from `stream`. | ||
| 53 | |||
| 54 | Raises `ExtraData` when `stream` contains extra bytes. | ||
| 55 | See :class:`Unpacker` for options. | ||
| 56 | """ | ||
| 57 | data = stream.read() | ||
| 58 | return unpackb(data, **kwargs) | ||
| 59 | |||
| 60 | |||
| 61 | # alias for compatibility to simplejson/marshal/pickle. | ||
| 62 | load = unpack | ||
| 63 | loads = unpackb | ||
| 64 | |||
| 65 | dump = pack | ||
| 66 | dumps = packb | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py new file mode 100644 index 0000000..91d97cd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/_version.py | |||
| @@ -0,0 +1 @@ | |||
| version = (0, 5, 6) | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py new file mode 100644 index 0000000..e0b5133 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/exceptions.py | |||
| @@ -0,0 +1,41 @@ | |||
| 1 | class UnpackException(Exception): | ||
| 2 | """Deprecated. Use Exception instead to catch all exception during unpacking.""" | ||
| 3 | |||
| 4 | |||
| 5 | class BufferFull(UnpackException): | ||
| 6 | pass | ||
| 7 | |||
| 8 | |||
| 9 | class OutOfData(UnpackException): | ||
| 10 | pass | ||
| 11 | |||
| 12 | |||
| 13 | class UnpackValueError(UnpackException, ValueError): | ||
| 14 | """Deprecated. Use ValueError instead.""" | ||
| 15 | |||
| 16 | |||
| 17 | class ExtraData(UnpackValueError): | ||
| 18 | def __init__(self, unpacked, extra): | ||
| 19 | self.unpacked = unpacked | ||
| 20 | self.extra = extra | ||
| 21 | |||
| 22 | def __str__(self): | ||
| 23 | return "unpack(b) received extra data." | ||
| 24 | |||
| 25 | |||
| 26 | class PackException(Exception): | ||
| 27 | """Deprecated. Use Exception instead to catch all exception during packing.""" | ||
| 28 | |||
| 29 | |||
| 30 | class PackValueError(PackException, ValueError): | ||
| 31 | """PackValueError is raised when type of input data is supported but it's value is unsupported. | ||
| 32 | |||
| 33 | Deprecated. Use ValueError instead. | ||
| 34 | """ | ||
| 35 | |||
| 36 | |||
| 37 | class PackOverflowError(PackValueError, OverflowError): | ||
| 38 | """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). | ||
| 39 | |||
| 40 | Deprecated. Use ValueError instead. | ||
| 41 | """ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py new file mode 100644 index 0000000..a1a9712 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/msgpack/fallback.py | |||
| @@ -0,0 +1,977 @@ | |||
| 1 | """Fallback pure Python implementation of msgpack""" | ||
| 2 | |||
| 3 | import sys | ||
| 4 | import struct | ||
| 5 | import warnings | ||
| 6 | |||
| 7 | if sys.version_info[0] == 3: | ||
| 8 | PY3 = True | ||
| 9 | int_types = int | ||
| 10 | Unicode = str | ||
| 11 | xrange = range | ||
| 12 | def dict_iteritems(d): | ||
| 13 | return d.items() | ||
| 14 | else: | ||
| 15 | PY3 = False | ||
| 16 | int_types = (int, long) | ||
| 17 | Unicode = unicode | ||
| 18 | def dict_iteritems(d): | ||
| 19 | return d.iteritems() | ||
| 20 | |||
| 21 | |||
| 22 | if hasattr(sys, 'pypy_version_info'): | ||
| 23 | # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own | ||
| 24 | # StringBuilder is fastest. | ||
| 25 | from __pypy__ import newlist_hint | ||
| 26 | try: | ||
| 27 | from __pypy__.builders import BytesBuilder as StringBuilder | ||
| 28 | except ImportError: | ||
| 29 | from __pypy__.builders import StringBuilder | ||
| 30 | USING_STRINGBUILDER = True | ||
| 31 | class StringIO(object): | ||
| 32 | def __init__(self, s=b''): | ||
| 33 | if s: | ||
| 34 | self.builder = StringBuilder(len(s)) | ||
| 35 | self.builder.append(s) | ||
| 36 | else: | ||
| 37 | self.builder = StringBuilder() | ||
| 38 | def write(self, s): | ||
| 39 | if isinstance(s, memoryview): | ||
| 40 | s = s.tobytes() | ||
| 41 | elif isinstance(s, bytearray): | ||
| 42 | s = bytes(s) | ||
| 43 | self.builder.append(s) | ||
| 44 | def getvalue(self): | ||
| 45 | return self.builder.build() | ||
| 46 | else: | ||
| 47 | USING_STRINGBUILDER = False | ||
| 48 | from io import BytesIO as StringIO | ||
| 49 | newlist_hint = lambda size: [] | ||
| 50 | |||
| 51 | |||
| 52 | from pip._vendor.msgpack.exceptions import ( | ||
| 53 | BufferFull, | ||
| 54 | OutOfData, | ||
| 55 | UnpackValueError, | ||
| 56 | PackValueError, | ||
| 57 | PackOverflowError, | ||
| 58 | ExtraData) | ||
| 59 | |||
| 60 | from pip._vendor.msgpack import ExtType | ||
| 61 | |||
| 62 | |||
| 63 | EX_SKIP = 0 | ||
| 64 | EX_CONSTRUCT = 1 | ||
| 65 | EX_READ_ARRAY_HEADER = 2 | ||
| 66 | EX_READ_MAP_HEADER = 3 | ||
| 67 | |||
| 68 | TYPE_IMMEDIATE = 0 | ||
| 69 | TYPE_ARRAY = 1 | ||
| 70 | TYPE_MAP = 2 | ||
| 71 | TYPE_RAW = 3 | ||
| 72 | TYPE_BIN = 4 | ||
| 73 | TYPE_EXT = 5 | ||
| 74 | |||
| 75 | DEFAULT_RECURSE_LIMIT = 511 | ||
| 76 | |||
| 77 | |||
| 78 | def _check_type_strict(obj, t, type=type, tuple=tuple): | ||
| 79 | if type(t) is tuple: | ||
| 80 | return type(obj) in t | ||
| 81 | else: | ||
| 82 | return type(obj) is t | ||
| 83 | |||
| 84 | |||
| 85 | def _get_data_from_buffer(obj): | ||
| 86 | try: | ||
| 87 | view = memoryview(obj) | ||
| 88 | except TypeError: | ||
| 89 | # try to use legacy buffer protocol if 2.7, otherwise re-raise | ||
| 90 | if not PY3: | ||
| 91 | view = memoryview(buffer(obj)) | ||
| 92 | warnings.warn("using old buffer interface to unpack %s; " | ||
| 93 | "this leads to unpacking errors if slicing is used and " | ||
| 94 | "will be removed in a future version" % type(obj), | ||
| 95 | RuntimeWarning) | ||
| 96 | else: | ||
| 97 | raise | ||
| 98 | if view.itemsize != 1: | ||
| 99 | raise ValueError("cannot unpack from multi-byte object") | ||
| 100 | return view | ||
| 101 | |||
| 102 | |||
| 103 | def unpack(stream, **kwargs): | ||
| 104 | warnings.warn( | ||
| 105 | "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", | ||
| 106 | PendingDeprecationWarning) | ||
| 107 | data = stream.read() | ||
| 108 | return unpackb(data, **kwargs) | ||
| 109 | |||
| 110 | |||
| 111 | def unpackb(packed, **kwargs): | ||
| 112 | """ | ||
| 113 | Unpack an object from `packed`. | ||
| 114 | |||
| 115 | Raises `ExtraData` when `packed` contains extra bytes. | ||
| 116 | See :class:`Unpacker` for options. | ||
| 117 | """ | ||
| 118 | unpacker = Unpacker(None, **kwargs) | ||
| 119 | unpacker.feed(packed) | ||
| 120 | try: | ||
| 121 | ret = unpacker._unpack() | ||
| 122 | except OutOfData: | ||
| 123 | raise UnpackValueError("Data is not enough.") | ||
| 124 | if unpacker._got_extradata(): | ||
| 125 | raise ExtraData(ret, unpacker._get_extradata()) | ||
| 126 | return ret | ||
| 127 | |||
| 128 | |||
| 129 | class Unpacker(object): | ||
| 130 | """Streaming unpacker. | ||
| 131 | |||
| 132 | arguments: | ||
| 133 | |||
| 134 | :param file_like: | ||
| 135 | File-like object having `.read(n)` method. | ||
| 136 | If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable. | ||
| 137 | |||
| 138 | :param int read_size: | ||
| 139 | Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`) | ||
| 140 | |||
| 141 | :param bool use_list: | ||
| 142 | If true, unpack msgpack array to Python list. | ||
| 143 | Otherwise, unpack to Python tuple. (default: True) | ||
| 144 | |||
| 145 | :param bool raw: | ||
| 146 | If true, unpack msgpack raw to Python bytes (default). | ||
| 147 | Otherwise, unpack to Python str (or unicode on Python 2) by decoding | ||
| 148 | with UTF-8 encoding (recommended). | ||
| 149 | Currently, the default is true, but it will be changed to false in | ||
| 150 | near future. So you must specify it explicitly for keeping backward | ||
| 151 | compatibility. | ||
| 152 | |||
| 153 | *encoding* option which is deprecated overrides this option. | ||
| 154 | |||
| 155 | :param callable object_hook: | ||
| 156 | When specified, it should be callable. | ||
| 157 | Unpacker calls it with a dict argument after unpacking msgpack map. | ||
| 158 | (See also simplejson) | ||
| 159 | |||
| 160 | :param callable object_pairs_hook: | ||
| 161 | When specified, it should be callable. | ||
| 162 | Unpacker calls it with a list of key-value pairs after unpacking msgpack map. | ||
| 163 | (See also simplejson) | ||
| 164 | |||
| 165 | :param str encoding: | ||
| 166 | Encoding used for decoding msgpack raw. | ||
| 167 | If it is None (default), msgpack raw is deserialized to Python bytes. | ||
| 168 | |||
| 169 | :param str unicode_errors: | ||
| 170 | (deprecated) Used for decoding msgpack raw with *encoding*. | ||
| 171 | (default: `'strict'`) | ||
| 172 | |||
| 173 | :param int max_buffer_size: | ||
| 174 | Limits size of data waiting unpacked. 0 means system's INT_MAX (default). | ||
| 175 | Raises `BufferFull` exception when it is insufficient. | ||
| 176 | You should set this parameter when unpacking data from untrusted source. | ||
| 177 | |||
| 178 | :param int max_str_len: | ||
| 179 | Limits max length of str. (default: 2**31-1) | ||
| 180 | |||
| 181 | :param int max_bin_len: | ||
| 182 | Limits max length of bin. (default: 2**31-1) | ||
| 183 | |||
| 184 | :param int max_array_len: | ||
| 185 | Limits max length of array. (default: 2**31-1) | ||
| 186 | |||
| 187 | :param int max_map_len: | ||
| 188 | Limits max length of map. (default: 2**31-1) | ||
| 189 | |||
| 190 | |||
| 191 | example of streaming deserialize from file-like object:: | ||
| 192 | |||
| 193 | unpacker = Unpacker(file_like, raw=False) | ||
| 194 | for o in unpacker: | ||
| 195 | process(o) | ||
| 196 | |||
| 197 | example of streaming deserialize from socket:: | ||
| 198 | |||
| 199 | unpacker = Unpacker(raw=False) | ||
| 200 | while True: | ||
| 201 | buf = sock.recv(1024**2) | ||
| 202 | if not buf: | ||
| 203 | break | ||
| 204 | unpacker.feed(buf) | ||
| 205 | for o in unpacker: | ||
| 206 | process(o) | ||
| 207 | """ | ||
| 208 | |||
| 209 | def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, | ||
| 210 | object_hook=None, object_pairs_hook=None, list_hook=None, | ||
| 211 | encoding=None, unicode_errors=None, max_buffer_size=0, | ||
| 212 | ext_hook=ExtType, | ||
| 213 | max_str_len=2147483647, # 2**32-1 | ||
| 214 | max_bin_len=2147483647, | ||
| 215 | max_array_len=2147483647, | ||
| 216 | max_map_len=2147483647, | ||
| 217 | max_ext_len=2147483647): | ||
| 218 | |||
| 219 | if encoding is not None: | ||
| 220 | warnings.warn( | ||
| 221 | "encoding is deprecated, Use raw=False instead.", | ||
| 222 | PendingDeprecationWarning) | ||
| 223 | |||
| 224 | if unicode_errors is None: | ||
| 225 | unicode_errors = 'strict' | ||
| 226 | |||
| 227 | if file_like is None: | ||
| 228 | self._feeding = True | ||
| 229 | else: | ||
| 230 | if not callable(file_like.read): | ||
| 231 | raise TypeError("`file_like.read` must be callable") | ||
| 232 | self.file_like = file_like | ||
| 233 | self._feeding = False | ||
| 234 | |||
| 235 | #: array of bytes fed. | ||
| 236 | self._buffer = bytearray() | ||
| 237 | # Some very old pythons don't support `struct.unpack_from()` with a | ||
| 238 | # `bytearray`. So we wrap it in a `buffer()` there. | ||
| 239 | if sys.version_info < (2, 7, 6): | ||
| 240 | self._buffer_view = buffer(self._buffer) | ||
| 241 | else: | ||
| 242 | self._buffer_view = self._buffer | ||
| 243 | #: Which position we currently reads | ||
| 244 | self._buff_i = 0 | ||
| 245 | |||
| 246 | # When Unpacker is used as an iterable, between the calls to next(), | ||
| 247 | # the buffer is not "consumed" completely, for efficiency sake. | ||
| 248 | # Instead, it is done sloppily. To make sure we raise BufferFull at | ||
| 249 | # the correct moments, we have to keep track of how sloppy we were. | ||
| 250 | # Furthermore, when the buffer is incomplete (that is: in the case | ||
| 251 | # we raise an OutOfData) we need to rollback the buffer to the correct | ||
| 252 | # state, which _buf_checkpoint records. | ||
| 253 | self._buf_checkpoint = 0 | ||
| 254 | |||
| 255 | self._max_buffer_size = max_buffer_size or 2**31-1 | ||
| 256 | if read_size > self._max_buffer_size: | ||
| 257 | raise ValueError("read_size must be smaller than max_buffer_size") | ||
| 258 | self._read_size = read_size or min(self._max_buffer_size, 16*1024) | ||
| 259 | self._raw = bool(raw) | ||
| 260 | self._encoding = encoding | ||
| 261 | self._unicode_errors = unicode_errors | ||
| 262 | self._use_list = use_list | ||
| 263 | self._list_hook = list_hook | ||
| 264 | self._object_hook = object_hook | ||
| 265 | self._object_pairs_hook = object_pairs_hook | ||
| 266 | self._ext_hook = ext_hook | ||
| 267 | self._max_str_len = max_str_len | ||
| 268 | self._max_bin_len = max_bin_len | ||
| 269 | self._max_array_len = max_array_len | ||
| 270 | self._max_map_len = max_map_len | ||
| 271 | self._max_ext_len = max_ext_len | ||
| 272 | self._stream_offset = 0 | ||
| 273 | |||
| 274 | if list_hook is not None and not callable(list_hook): | ||
| 275 | raise TypeError('`list_hook` is not callable') | ||
| 276 | if object_hook is not None and not callable(object_hook): | ||
| 277 | raise TypeError('`object_hook` is not callable') | ||
| 278 | if object_pairs_hook is not None and not callable(object_pairs_hook): | ||
| 279 | raise TypeError('`object_pairs_hook` is not callable') | ||
| 280 | if object_hook is not None and object_pairs_hook is not None: | ||
| 281 | raise TypeError("object_pairs_hook and object_hook are mutually " | ||
| 282 | "exclusive") | ||
| 283 | if not callable(ext_hook): | ||
| 284 | raise TypeError("`ext_hook` is not callable") | ||
| 285 | |||
| 286 | def feed(self, next_bytes): | ||
| 287 | assert self._feeding | ||
| 288 | view = _get_data_from_buffer(next_bytes) | ||
| 289 | if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): | ||
| 290 | raise BufferFull | ||
| 291 | |||
| 292 | # Strip buffer before checkpoint before reading file. | ||
| 293 | if self._buf_checkpoint > 0: | ||
| 294 | del self._buffer[:self._buf_checkpoint] | ||
| 295 | self._buff_i -= self._buf_checkpoint | ||
| 296 | self._buf_checkpoint = 0 | ||
| 297 | |||
| 298 | self._buffer += view | ||
| 299 | |||
| 300 | def _consume(self): | ||
| 301 | """ Gets rid of the used parts of the buffer. """ | ||
| 302 | self._stream_offset += self._buff_i - self._buf_checkpoint | ||
| 303 | self._buf_checkpoint = self._buff_i | ||
| 304 | |||
| 305 | def _got_extradata(self): | ||
| 306 | return self._buff_i < len(self._buffer) | ||
| 307 | |||
| 308 | def _get_extradata(self): | ||
| 309 | return self._buffer[self._buff_i:] | ||
| 310 | |||
| 311 | def read_bytes(self, n): | ||
| 312 | return self._read(n) | ||
| 313 | |||
| 314 | def _read(self, n): | ||
| 315 | # (int) -> bytearray | ||
| 316 | self._reserve(n) | ||
| 317 | i = self._buff_i | ||
| 318 | self._buff_i = i+n | ||
| 319 | return self._buffer[i:i+n] | ||
| 320 | |||
| 321 | def _reserve(self, n): | ||
| 322 | remain_bytes = len(self._buffer) - self._buff_i - n | ||
| 323 | |||
| 324 | # Fast path: buffer has n bytes already | ||
| 325 | if remain_bytes >= 0: | ||
| 326 | return | ||
| 327 | |||
| 328 | if self._feeding: | ||
| 329 | self._buff_i = self._buf_checkpoint | ||
| 330 | raise OutOfData | ||
| 331 | |||
| 332 | # Strip buffer before checkpoint before reading file. | ||
| 333 | if self._buf_checkpoint > 0: | ||
| 334 | del self._buffer[:self._buf_checkpoint] | ||
| 335 | self._buff_i -= self._buf_checkpoint | ||
| 336 | self._buf_checkpoint = 0 | ||
| 337 | |||
| 338 | # Read from file | ||
| 339 | remain_bytes = -remain_bytes | ||
| 340 | while remain_bytes > 0: | ||
| 341 | to_read_bytes = max(self._read_size, remain_bytes) | ||
| 342 | read_data = self.file_like.read(to_read_bytes) | ||
| 343 | if not read_data: | ||
| 344 | break | ||
| 345 | assert isinstance(read_data, bytes) | ||
| 346 | self._buffer += read_data | ||
| 347 | remain_bytes -= len(read_data) | ||
| 348 | |||
| 349 | if len(self._buffer) < n + self._buff_i: | ||
| 350 | self._buff_i = 0 # rollback | ||
| 351 | raise OutOfData | ||
| 352 | |||
| 353 | def _read_header(self, execute=EX_CONSTRUCT): | ||
| 354 | typ = TYPE_IMMEDIATE | ||
| 355 | n = 0 | ||
| 356 | obj = None | ||
| 357 | self._reserve(1) | ||
| 358 | b = self._buffer[self._buff_i] | ||
| 359 | self._buff_i += 1 | ||
| 360 | if b & 0b10000000 == 0: | ||
| 361 | obj = b | ||
| 362 | elif b & 0b11100000 == 0b11100000: | ||
| 363 | obj = -1 - (b ^ 0xff) | ||
| 364 | elif b & 0b11100000 == 0b10100000: | ||
| 365 | n = b & 0b00011111 | ||
| 366 | typ = TYPE_RAW | ||
| 367 | if n > self._max_str_len: | ||
| 368 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
| 369 | obj = self._read(n) | ||
| 370 | elif b & 0b11110000 == 0b10010000: | ||
| 371 | n = b & 0b00001111 | ||
| 372 | typ = TYPE_ARRAY | ||
| 373 | if n > self._max_array_len: | ||
| 374 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
| 375 | elif b & 0b11110000 == 0b10000000: | ||
| 376 | n = b & 0b00001111 | ||
| 377 | typ = TYPE_MAP | ||
| 378 | if n > self._max_map_len: | ||
| 379 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
| 380 | elif b == 0xc0: | ||
| 381 | obj = None | ||
| 382 | elif b == 0xc2: | ||
| 383 | obj = False | ||
| 384 | elif b == 0xc3: | ||
| 385 | obj = True | ||
| 386 | elif b == 0xc4: | ||
| 387 | typ = TYPE_BIN | ||
| 388 | self._reserve(1) | ||
| 389 | n = self._buffer[self._buff_i] | ||
| 390 | self._buff_i += 1 | ||
| 391 | if n > self._max_bin_len: | ||
| 392 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
| 393 | obj = self._read(n) | ||
| 394 | elif b == 0xc5: | ||
| 395 | typ = TYPE_BIN | ||
| 396 | self._reserve(2) | ||
| 397 | n = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] | ||
| 398 | self._buff_i += 2 | ||
| 399 | if n > self._max_bin_len: | ||
| 400 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
| 401 | obj = self._read(n) | ||
| 402 | elif b == 0xc6: | ||
| 403 | typ = TYPE_BIN | ||
| 404 | self._reserve(4) | ||
| 405 | n = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] | ||
| 406 | self._buff_i += 4 | ||
| 407 | if n > self._max_bin_len: | ||
| 408 | raise UnpackValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) | ||
| 409 | obj = self._read(n) | ||
| 410 | elif b == 0xc7: # ext 8 | ||
| 411 | typ = TYPE_EXT | ||
| 412 | self._reserve(2) | ||
| 413 | L, n = struct.unpack_from('Bb', self._buffer_view, self._buff_i) | ||
| 414 | self._buff_i += 2 | ||
| 415 | if L > self._max_ext_len: | ||
| 416 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
| 417 | obj = self._read(L) | ||
| 418 | elif b == 0xc8: # ext 16 | ||
| 419 | typ = TYPE_EXT | ||
| 420 | self._reserve(3) | ||
| 421 | L, n = struct.unpack_from('>Hb', self._buffer_view, self._buff_i) | ||
| 422 | self._buff_i += 3 | ||
| 423 | if L > self._max_ext_len: | ||
| 424 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
| 425 | obj = self._read(L) | ||
| 426 | elif b == 0xc9: # ext 32 | ||
| 427 | typ = TYPE_EXT | ||
| 428 | self._reserve(5) | ||
| 429 | L, n = struct.unpack_from('>Ib', self._buffer_view, self._buff_i) | ||
| 430 | self._buff_i += 5 | ||
| 431 | if L > self._max_ext_len: | ||
| 432 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) | ||
| 433 | obj = self._read(L) | ||
| 434 | elif b == 0xca: | ||
| 435 | self._reserve(4) | ||
| 436 | obj = struct.unpack_from(">f", self._buffer_view, self._buff_i)[0] | ||
| 437 | self._buff_i += 4 | ||
| 438 | elif b == 0xcb: | ||
| 439 | self._reserve(8) | ||
| 440 | obj = struct.unpack_from(">d", self._buffer_view, self._buff_i)[0] | ||
| 441 | self._buff_i += 8 | ||
| 442 | elif b == 0xcc: | ||
| 443 | self._reserve(1) | ||
| 444 | obj = self._buffer[self._buff_i] | ||
| 445 | self._buff_i += 1 | ||
| 446 | elif b == 0xcd: | ||
| 447 | self._reserve(2) | ||
| 448 | obj = struct.unpack_from(">H", self._buffer_view, self._buff_i)[0] | ||
| 449 | self._buff_i += 2 | ||
| 450 | elif b == 0xce: | ||
| 451 | self._reserve(4) | ||
| 452 | obj = struct.unpack_from(">I", self._buffer_view, self._buff_i)[0] | ||
| 453 | self._buff_i += 4 | ||
| 454 | elif b == 0xcf: | ||
| 455 | self._reserve(8) | ||
| 456 | obj = struct.unpack_from(">Q", self._buffer_view, self._buff_i)[0] | ||
| 457 | self._buff_i += 8 | ||
| 458 | elif b == 0xd0: | ||
| 459 | self._reserve(1) | ||
| 460 | obj = struct.unpack_from("b", self._buffer_view, self._buff_i)[0] | ||
| 461 | self._buff_i += 1 | ||
| 462 | elif b == 0xd1: | ||
| 463 | self._reserve(2) | ||
| 464 | obj = struct.unpack_from(">h", self._buffer_view, self._buff_i)[0] | ||
| 465 | self._buff_i += 2 | ||
| 466 | elif b == 0xd2: | ||
| 467 | self._reserve(4) | ||
| 468 | obj = struct.unpack_from(">i", self._buffer_view, self._buff_i)[0] | ||
| 469 | self._buff_i += 4 | ||
| 470 | elif b == 0xd3: | ||
| 471 | self._reserve(8) | ||
| 472 | obj = struct.unpack_from(">q", self._buffer_view, self._buff_i)[0] | ||
| 473 | self._buff_i += 8 | ||
| 474 | elif b == 0xd4: # fixext 1 | ||
| 475 | typ = TYPE_EXT | ||
| 476 | if self._max_ext_len < 1: | ||
| 477 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) | ||
| 478 | self._reserve(2) | ||
| 479 | n, obj = struct.unpack_from("b1s", self._buffer_view, self._buff_i) | ||
| 480 | self._buff_i += 2 | ||
| 481 | elif b == 0xd5: # fixext 2 | ||
| 482 | typ = TYPE_EXT | ||
| 483 | if self._max_ext_len < 2: | ||
| 484 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) | ||
| 485 | self._reserve(3) | ||
| 486 | n, obj = struct.unpack_from("b2s", self._buffer_view, self._buff_i) | ||
| 487 | self._buff_i += 3 | ||
| 488 | elif b == 0xd6: # fixext 4 | ||
| 489 | typ = TYPE_EXT | ||
| 490 | if self._max_ext_len < 4: | ||
| 491 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) | ||
| 492 | self._reserve(5) | ||
| 493 | n, obj = struct.unpack_from("b4s", self._buffer_view, self._buff_i) | ||
| 494 | self._buff_i += 5 | ||
| 495 | elif b == 0xd7: # fixext 8 | ||
| 496 | typ = TYPE_EXT | ||
| 497 | if self._max_ext_len < 8: | ||
| 498 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) | ||
| 499 | self._reserve(9) | ||
| 500 | n, obj = struct.unpack_from("b8s", self._buffer_view, self._buff_i) | ||
| 501 | self._buff_i += 9 | ||
| 502 | elif b == 0xd8: # fixext 16 | ||
| 503 | typ = TYPE_EXT | ||
| 504 | if self._max_ext_len < 16: | ||
| 505 | raise UnpackValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) | ||
| 506 | self._reserve(17) | ||
| 507 | n, obj = struct.unpack_from("b16s", self._buffer_view, self._buff_i) | ||
| 508 | self._buff_i += 17 | ||
| 509 | elif b == 0xd9: | ||
| 510 | typ = TYPE_RAW | ||
| 511 | self._reserve(1) | ||
| 512 | n = self._buffer[self._buff_i] | ||
| 513 | self._buff_i += 1 | ||
| 514 | if n > self._max_str_len: | ||
| 515 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
| 516 | obj = self._read(n) | ||
| 517 | elif b == 0xda: | ||
| 518 | typ = TYPE_RAW | ||
| 519 | self._reserve(2) | ||
| 520 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
| 521 | self._buff_i += 2 | ||
| 522 | if n > self._max_str_len: | ||
| 523 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
| 524 | obj = self._read(n) | ||
| 525 | elif b == 0xdb: | ||
| 526 | typ = TYPE_RAW | ||
| 527 | self._reserve(4) | ||
| 528 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
| 529 | self._buff_i += 4 | ||
| 530 | if n > self._max_str_len: | ||
| 531 | raise UnpackValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) | ||
| 532 | obj = self._read(n) | ||
| 533 | elif b == 0xdc: | ||
| 534 | typ = TYPE_ARRAY | ||
| 535 | self._reserve(2) | ||
| 536 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
| 537 | self._buff_i += 2 | ||
| 538 | if n > self._max_array_len: | ||
| 539 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
| 540 | elif b == 0xdd: | ||
| 541 | typ = TYPE_ARRAY | ||
| 542 | self._reserve(4) | ||
| 543 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
| 544 | self._buff_i += 4 | ||
| 545 | if n > self._max_array_len: | ||
| 546 | raise UnpackValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) | ||
| 547 | elif b == 0xde: | ||
| 548 | self._reserve(2) | ||
| 549 | n, = struct.unpack_from(">H", self._buffer_view, self._buff_i) | ||
| 550 | self._buff_i += 2 | ||
| 551 | if n > self._max_map_len: | ||
| 552 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
| 553 | typ = TYPE_MAP | ||
| 554 | elif b == 0xdf: | ||
| 555 | self._reserve(4) | ||
| 556 | n, = struct.unpack_from(">I", self._buffer_view, self._buff_i) | ||
| 557 | self._buff_i += 4 | ||
| 558 | if n > self._max_map_len: | ||
| 559 | raise UnpackValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) | ||
| 560 | typ = TYPE_MAP | ||
| 561 | else: | ||
| 562 | raise UnpackValueError("Unknown header: 0x%x" % b) | ||
| 563 | return typ, n, obj | ||
| 564 | |||
| 565 | def _unpack(self, execute=EX_CONSTRUCT): | ||
| 566 | typ, n, obj = self._read_header(execute) | ||
| 567 | |||
| 568 | if execute == EX_READ_ARRAY_HEADER: | ||
| 569 | if typ != TYPE_ARRAY: | ||
| 570 | raise UnpackValueError("Expected array") | ||
| 571 | return n | ||
| 572 | if execute == EX_READ_MAP_HEADER: | ||
| 573 | if typ != TYPE_MAP: | ||
| 574 | raise UnpackValueError("Expected map") | ||
| 575 | return n | ||
| 576 | # TODO should we eliminate the recursion? | ||
| 577 | if typ == TYPE_ARRAY: | ||
| 578 | if execute == EX_SKIP: | ||
| 579 | for i in xrange(n): | ||
| 580 | # TODO check whether we need to call `list_hook` | ||
| 581 | self._unpack(EX_SKIP) | ||
| 582 | return | ||
| 583 | ret = newlist_hint(n) | ||
| 584 | for i in xrange(n): | ||
| 585 | ret.append(self._unpack(EX_CONSTRUCT)) | ||
| 586 | if self._list_hook is not None: | ||
| 587 | ret = self._list_hook(ret) | ||
| 588 | # TODO is the interaction between `list_hook` and `use_list` ok? | ||
| 589 | return ret if self._use_list else tuple(ret) | ||
| 590 | if typ == TYPE_MAP: | ||
| 591 | if execute == EX_SKIP: | ||
| 592 | for i in xrange(n): | ||
| 593 | # TODO check whether we need to call hooks | ||
| 594 | self._unpack(EX_SKIP) | ||
| 595 | self._unpack(EX_SKIP) | ||
| 596 | return | ||
| 597 | if self._object_pairs_hook is not None: | ||
| 598 | ret = self._object_pairs_hook( | ||
| 599 | (self._unpack(EX_CONSTRUCT), | ||
| 600 | self._unpack(EX_CONSTRUCT)) | ||
| 601 | for _ in xrange(n)) | ||
| 602 | else: | ||
| 603 | ret = {} | ||
| 604 | for _ in xrange(n): | ||
| 605 | key = self._unpack(EX_CONSTRUCT) | ||
| 606 | ret[key] = self._unpack(EX_CONSTRUCT) | ||
| 607 | if self._object_hook is not None: | ||
| 608 | ret = self._object_hook(ret) | ||
| 609 | return ret | ||
| 610 | if execute == EX_SKIP: | ||
| 611 | return | ||
| 612 | if typ == TYPE_RAW: | ||
| 613 | if self._encoding is not None: | ||
| 614 | obj = obj.decode(self._encoding, self._unicode_errors) | ||
| 615 | elif self._raw: | ||
| 616 | obj = bytes(obj) | ||
| 617 | else: | ||
| 618 | obj = obj.decode('utf_8') | ||
| 619 | return obj | ||
| 620 | if typ == TYPE_EXT: | ||
| 621 | return self._ext_hook(n, bytes(obj)) | ||
| 622 | if typ == TYPE_BIN: | ||
| 623 | return bytes(obj) | ||
| 624 | assert typ == TYPE_IMMEDIATE | ||
| 625 | return obj | ||
| 626 | |||
| 627 | def __iter__(self): | ||
| 628 | return self | ||
| 629 | |||
| 630 | def __next__(self): | ||
| 631 | try: | ||
| 632 | ret = self._unpack(EX_CONSTRUCT) | ||
| 633 | self._consume() | ||
| 634 | return ret | ||
| 635 | except OutOfData: | ||
| 636 | self._consume() | ||
| 637 | raise StopIteration | ||
| 638 | |||
| 639 | next = __next__ | ||
| 640 | |||
| 641 | def skip(self, write_bytes=None): | ||
| 642 | self._unpack(EX_SKIP) | ||
| 643 | if write_bytes is not None: | ||
| 644 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
| 645 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
| 646 | self._consume() | ||
| 647 | |||
| 648 | def unpack(self, write_bytes=None): | ||
| 649 | ret = self._unpack(EX_CONSTRUCT) | ||
| 650 | if write_bytes is not None: | ||
| 651 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
| 652 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
| 653 | self._consume() | ||
| 654 | return ret | ||
| 655 | |||
| 656 | def read_array_header(self, write_bytes=None): | ||
| 657 | ret = self._unpack(EX_READ_ARRAY_HEADER) | ||
| 658 | if write_bytes is not None: | ||
| 659 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
| 660 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
| 661 | self._consume() | ||
| 662 | return ret | ||
| 663 | |||
| 664 | def read_map_header(self, write_bytes=None): | ||
| 665 | ret = self._unpack(EX_READ_MAP_HEADER) | ||
| 666 | if write_bytes is not None: | ||
| 667 | warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning) | ||
| 668 | write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) | ||
| 669 | self._consume() | ||
| 670 | return ret | ||
| 671 | |||
| 672 | def tell(self): | ||
| 673 | return self._stream_offset | ||
| 674 | |||
| 675 | |||
| 676 | class Packer(object): | ||
| 677 | """ | ||
| 678 | MessagePack Packer | ||
| 679 | |||
| 680 | usage: | ||
| 681 | |||
| 682 | packer = Packer() | ||
| 683 | astream.write(packer.pack(a)) | ||
| 684 | astream.write(packer.pack(b)) | ||
| 685 | |||
| 686 | Packer's constructor has some keyword arguments: | ||
| 687 | |||
| 688 | :param callable default: | ||
| 689 | Convert user type to builtin type that Packer supports. | ||
| 690 | See also simplejson's document. | ||
| 691 | |||
| 692 | :param bool use_single_float: | ||
| 693 | Use single precision float type for float. (default: False) | ||
| 694 | |||
| 695 | :param bool autoreset: | ||
| 696 | Reset buffer after each pack and return its content as `bytes`. (default: True). | ||
| 697 | If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. | ||
| 698 | |||
| 699 | :param bool use_bin_type: | ||
| 700 | Use bin type introduced in msgpack spec 2.0 for bytes. | ||
| 701 | It also enables str8 type for unicode. | ||
| 702 | |||
| 703 | :param bool strict_types: | ||
| 704 | If set to true, types will be checked to be exact. Derived classes | ||
| 705 | from serializeable types will not be serialized and will be | ||
| 706 | treated as unsupported type and forwarded to default. | ||
| 707 | Additionally tuples will not be serialized as lists. | ||
| 708 | This is useful when trying to implement accurate serialization | ||
| 709 | for python types. | ||
| 710 | |||
| 711 | :param str encoding: | ||
| 712 | (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') | ||
| 713 | |||
| 714 | :param str unicode_errors: | ||
| 715 | Error handler for encoding unicode. (default: 'strict') | ||
| 716 | """ | ||
| 717 | def __init__(self, default=None, encoding=None, unicode_errors=None, | ||
| 718 | use_single_float=False, autoreset=True, use_bin_type=False, | ||
| 719 | strict_types=False): | ||
| 720 | if encoding is None: | ||
| 721 | encoding = 'utf_8' | ||
| 722 | else: | ||
| 723 | warnings.warn( | ||
| 724 | "encoding is deprecated, Use raw=False instead.", | ||
| 725 | PendingDeprecationWarning) | ||
| 726 | |||
| 727 | if unicode_errors is None: | ||
| 728 | unicode_errors = 'strict' | ||
| 729 | |||
| 730 | self._strict_types = strict_types | ||
| 731 | self._use_float = use_single_float | ||
| 732 | self._autoreset = autoreset | ||
| 733 | self._use_bin_type = use_bin_type | ||
| 734 | self._encoding = encoding | ||
| 735 | self._unicode_errors = unicode_errors | ||
| 736 | self._buffer = StringIO() | ||
| 737 | if default is not None: | ||
| 738 | if not callable(default): | ||
| 739 | raise TypeError("default must be callable") | ||
| 740 | self._default = default | ||
| 741 | |||
| 742 | def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, | ||
| 743 | check=isinstance, check_type_strict=_check_type_strict): | ||
| 744 | default_used = False | ||
| 745 | if self._strict_types: | ||
| 746 | check = check_type_strict | ||
| 747 | list_types = list | ||
| 748 | else: | ||
| 749 | list_types = (list, tuple) | ||
| 750 | while True: | ||
| 751 | if nest_limit < 0: | ||
| 752 | raise PackValueError("recursion limit exceeded") | ||
| 753 | if obj is None: | ||
| 754 | return self._buffer.write(b"\xc0") | ||
| 755 | if check(obj, bool): | ||
| 756 | if obj: | ||
| 757 | return self._buffer.write(b"\xc3") | ||
| 758 | return self._buffer.write(b"\xc2") | ||
| 759 | if check(obj, int_types): | ||
| 760 | if 0 <= obj < 0x80: | ||
| 761 | return self._buffer.write(struct.pack("B", obj)) | ||
| 762 | if -0x20 <= obj < 0: | ||
| 763 | return self._buffer.write(struct.pack("b", obj)) | ||
| 764 | if 0x80 <= obj <= 0xff: | ||
| 765 | return self._buffer.write(struct.pack("BB", 0xcc, obj)) | ||
| 766 | if -0x80 <= obj < 0: | ||
| 767 | return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) | ||
| 768 | if 0xff < obj <= 0xffff: | ||
| 769 | return self._buffer.write(struct.pack(">BH", 0xcd, obj)) | ||
| 770 | if -0x8000 <= obj < -0x80: | ||
| 771 | return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) | ||
| 772 | if 0xffff < obj <= 0xffffffff: | ||
| 773 | return self._buffer.write(struct.pack(">BI", 0xce, obj)) | ||
| 774 | if -0x80000000 <= obj < -0x8000: | ||
| 775 | return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) | ||
| 776 | if 0xffffffff < obj <= 0xffffffffffffffff: | ||
| 777 | return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) | ||
| 778 | if -0x8000000000000000 <= obj < -0x80000000: | ||
| 779 | return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) | ||
| 780 | if not default_used and self._default is not None: | ||
| 781 | obj = self._default(obj) | ||
| 782 | default_used = True | ||
| 783 | continue | ||
| 784 | raise PackOverflowError("Integer value out of range") | ||
| 785 | if check(obj, (bytes, bytearray)): | ||
| 786 | n = len(obj) | ||
| 787 | if n >= 2**32: | ||
| 788 | raise PackValueError("%s is too large" % type(obj).__name__) | ||
| 789 | self._pack_bin_header(n) | ||
| 790 | return self._buffer.write(obj) | ||
| 791 | if check(obj, Unicode): | ||
| 792 | if self._encoding is None: | ||
| 793 | raise TypeError( | ||
| 794 | "Can't encode unicode string: " | ||
| 795 | "no encoding is specified") | ||
| 796 | obj = obj.encode(self._encoding, self._unicode_errors) | ||
| 797 | n = len(obj) | ||
| 798 | if n >= 2**32: | ||
| 799 | raise PackValueError("String is too large") | ||
| 800 | self._pack_raw_header(n) | ||
| 801 | return self._buffer.write(obj) | ||
| 802 | if check(obj, memoryview): | ||
| 803 | n = len(obj) * obj.itemsize | ||
| 804 | if n >= 2**32: | ||
| 805 | raise PackValueError("Memoryview is too large") | ||
| 806 | self._pack_bin_header(n) | ||
| 807 | return self._buffer.write(obj) | ||
| 808 | if check(obj, float): | ||
| 809 | if self._use_float: | ||
| 810 | return self._buffer.write(struct.pack(">Bf", 0xca, obj)) | ||
| 811 | return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) | ||
| 812 | if check(obj, ExtType): | ||
| 813 | code = obj.code | ||
| 814 | data = obj.data | ||
| 815 | assert isinstance(code, int) | ||
| 816 | assert isinstance(data, bytes) | ||
| 817 | L = len(data) | ||
| 818 | if L == 1: | ||
| 819 | self._buffer.write(b'\xd4') | ||
| 820 | elif L == 2: | ||
| 821 | self._buffer.write(b'\xd5') | ||
| 822 | elif L == 4: | ||
| 823 | self._buffer.write(b'\xd6') | ||
| 824 | elif L == 8: | ||
| 825 | self._buffer.write(b'\xd7') | ||
| 826 | elif L == 16: | ||
| 827 | self._buffer.write(b'\xd8') | ||
| 828 | elif L <= 0xff: | ||
| 829 | self._buffer.write(struct.pack(">BB", 0xc7, L)) | ||
| 830 | elif L <= 0xffff: | ||
| 831 | self._buffer.write(struct.pack(">BH", 0xc8, L)) | ||
| 832 | else: | ||
| 833 | self._buffer.write(struct.pack(">BI", 0xc9, L)) | ||
| 834 | self._buffer.write(struct.pack("b", code)) | ||
| 835 | self._buffer.write(data) | ||
| 836 | return | ||
| 837 | if check(obj, list_types): | ||
| 838 | n = len(obj) | ||
| 839 | self._pack_array_header(n) | ||
| 840 | for i in xrange(n): | ||
| 841 | self._pack(obj[i], nest_limit - 1) | ||
| 842 | return | ||
| 843 | if check(obj, dict): | ||
| 844 | return self._pack_map_pairs(len(obj), dict_iteritems(obj), | ||
| 845 | nest_limit - 1) | ||
| 846 | if not default_used and self._default is not None: | ||
| 847 | obj = self._default(obj) | ||
| 848 | default_used = 1 | ||
| 849 | continue | ||
| 850 | raise TypeError("Cannot serialize %r" % (obj, )) | ||
| 851 | |||
| 852 | def pack(self, obj): | ||
| 853 | try: | ||
| 854 | self._pack(obj) | ||
| 855 | except: | ||
| 856 | self._buffer = StringIO() # force reset | ||
| 857 | raise | ||
| 858 | ret = self._buffer.getvalue() | ||
| 859 | if self._autoreset: | ||
| 860 | self._buffer = StringIO() | ||
| 861 | elif USING_STRINGBUILDER: | ||
| 862 | self._buffer = StringIO(ret) | ||
| 863 | return ret | ||
| 864 | |||
| 865 | def pack_map_pairs(self, pairs): | ||
| 866 | self._pack_map_pairs(len(pairs), pairs) | ||
| 867 | ret = self._buffer.getvalue() | ||
| 868 | if self._autoreset: | ||
| 869 | self._buffer = StringIO() | ||
| 870 | elif USING_STRINGBUILDER: | ||
| 871 | self._buffer = StringIO(ret) | ||
| 872 | return ret | ||
| 873 | |||
| 874 | def pack_array_header(self, n): | ||
| 875 | if n >= 2**32: | ||
| 876 | raise PackValueError | ||
| 877 | self._pack_array_header(n) | ||
| 878 | ret = self._buffer.getvalue() | ||
| 879 | if self._autoreset: | ||
| 880 | self._buffer = StringIO() | ||
| 881 | elif USING_STRINGBUILDER: | ||
| 882 | self._buffer = StringIO(ret) | ||
| 883 | return ret | ||
| 884 | |||
| 885 | def pack_map_header(self, n): | ||
| 886 | if n >= 2**32: | ||
| 887 | raise PackValueError | ||
| 888 | self._pack_map_header(n) | ||
| 889 | ret = self._buffer.getvalue() | ||
| 890 | if self._autoreset: | ||
| 891 | self._buffer = StringIO() | ||
| 892 | elif USING_STRINGBUILDER: | ||
| 893 | self._buffer = StringIO(ret) | ||
| 894 | return ret | ||
| 895 | |||
| 896 | def pack_ext_type(self, typecode, data): | ||
| 897 | if not isinstance(typecode, int): | ||
| 898 | raise TypeError("typecode must have int type.") | ||
| 899 | if not 0 <= typecode <= 127: | ||
| 900 | raise ValueError("typecode should be 0-127") | ||
| 901 | if not isinstance(data, bytes): | ||
| 902 | raise TypeError("data must have bytes type") | ||
| 903 | L = len(data) | ||
| 904 | if L > 0xffffffff: | ||
| 905 | raise PackValueError("Too large data") | ||
| 906 | if L == 1: | ||
| 907 | self._buffer.write(b'\xd4') | ||
| 908 | elif L == 2: | ||
| 909 | self._buffer.write(b'\xd5') | ||
| 910 | elif L == 4: | ||
| 911 | self._buffer.write(b'\xd6') | ||
| 912 | elif L == 8: | ||
| 913 | self._buffer.write(b'\xd7') | ||
| 914 | elif L == 16: | ||
| 915 | self._buffer.write(b'\xd8') | ||
| 916 | elif L <= 0xff: | ||
| 917 | self._buffer.write(b'\xc7' + struct.pack('B', L)) | ||
| 918 | elif L <= 0xffff: | ||
| 919 | self._buffer.write(b'\xc8' + struct.pack('>H', L)) | ||
| 920 | else: | ||
| 921 | self._buffer.write(b'\xc9' + struct.pack('>I', L)) | ||
| 922 | self._buffer.write(struct.pack('B', typecode)) | ||
| 923 | self._buffer.write(data) | ||
| 924 | |||
| 925 | def _pack_array_header(self, n): | ||
| 926 | if n <= 0x0f: | ||
| 927 | return self._buffer.write(struct.pack('B', 0x90 + n)) | ||
| 928 | if n <= 0xffff: | ||
| 929 | return self._buffer.write(struct.pack(">BH", 0xdc, n)) | ||
| 930 | if n <= 0xffffffff: | ||
| 931 | return self._buffer.write(struct.pack(">BI", 0xdd, n)) | ||
| 932 | raise PackValueError("Array is too large") | ||
| 933 | |||
| 934 | def _pack_map_header(self, n): | ||
| 935 | if n <= 0x0f: | ||
| 936 | return self._buffer.write(struct.pack('B', 0x80 + n)) | ||
| 937 | if n <= 0xffff: | ||
| 938 | return self._buffer.write(struct.pack(">BH", 0xde, n)) | ||
| 939 | if n <= 0xffffffff: | ||
| 940 | return self._buffer.write(struct.pack(">BI", 0xdf, n)) | ||
| 941 | raise PackValueError("Dict is too large") | ||
| 942 | |||
| 943 | def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): | ||
| 944 | self._pack_map_header(n) | ||
| 945 | for (k, v) in pairs: | ||
| 946 | self._pack(k, nest_limit - 1) | ||
| 947 | self._pack(v, nest_limit - 1) | ||
| 948 | |||
| 949 | def _pack_raw_header(self, n): | ||
| 950 | if n <= 0x1f: | ||
| 951 | self._buffer.write(struct.pack('B', 0xa0 + n)) | ||
| 952 | elif self._use_bin_type and n <= 0xff: | ||
| 953 | self._buffer.write(struct.pack('>BB', 0xd9, n)) | ||
| 954 | elif n <= 0xffff: | ||
| 955 | self._buffer.write(struct.pack(">BH", 0xda, n)) | ||
| 956 | elif n <= 0xffffffff: | ||
| 957 | self._buffer.write(struct.pack(">BI", 0xdb, n)) | ||
| 958 | else: | ||
| 959 | raise PackValueError('Raw is too large') | ||
| 960 | |||
| 961 | def _pack_bin_header(self, n): | ||
| 962 | if not self._use_bin_type: | ||
| 963 | return self._pack_raw_header(n) | ||
| 964 | elif n <= 0xff: | ||
| 965 | return self._buffer.write(struct.pack('>BB', 0xc4, n)) | ||
| 966 | elif n <= 0xffff: | ||
| 967 | return self._buffer.write(struct.pack(">BH", 0xc5, n)) | ||
| 968 | elif n <= 0xffffffff: | ||
| 969 | return self._buffer.write(struct.pack(">BI", 0xc6, n)) | ||
| 970 | else: | ||
| 971 | raise PackValueError('Bin is too large') | ||
| 972 | |||
| 973 | def bytes(self): | ||
| 974 | return self._buffer.getvalue() | ||
| 975 | |||
| 976 | def reset(self): | ||
| 977 | self._buffer = StringIO() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__about__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__about__.py new file mode 100644 index 0000000..bb79fb7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__about__.py | |||
| @@ -0,0 +1,21 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | __all__ = [ | ||
| 7 | "__title__", "__summary__", "__uri__", "__version__", "__author__", | ||
| 8 | "__email__", "__license__", "__copyright__", | ||
| 9 | ] | ||
| 10 | |||
| 11 | __title__ = "packaging" | ||
| 12 | __summary__ = "Core utilities for Python packages" | ||
| 13 | __uri__ = "https://github.com/pypa/packaging" | ||
| 14 | |||
| 15 | __version__ = "17.1" | ||
| 16 | |||
| 17 | __author__ = "Donald Stufft and individual contributors" | ||
| 18 | __email__ = "donald@stufft.io" | ||
| 19 | |||
| 20 | __license__ = "BSD or Apache License, Version 2.0" | ||
| 21 | __copyright__ = "Copyright 2014-2016 %s" % __author__ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__init__.py new file mode 100644 index 0000000..e520d35 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/__init__.py | |||
| @@ -0,0 +1,14 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | from .__about__ import ( | ||
| 7 | __author__, __copyright__, __email__, __license__, __summary__, __title__, | ||
| 8 | __uri__, __version__ | ||
| 9 | ) | ||
| 10 | |||
| 11 | __all__ = [ | ||
| 12 | "__title__", "__summary__", "__uri__", "__version__", "__author__", | ||
| 13 | "__email__", "__license__", "__copyright__", | ||
| 14 | ] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_compat.py new file mode 100644 index 0000000..6daa860 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_compat.py | |||
| @@ -0,0 +1,30 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import sys | ||
| 7 | |||
| 8 | |||
| 9 | PY2 = sys.version_info[0] == 2 | ||
| 10 | PY3 = sys.version_info[0] == 3 | ||
| 11 | |||
| 12 | # flake8: noqa | ||
| 13 | |||
| 14 | if PY3: | ||
| 15 | string_types = str, | ||
| 16 | else: | ||
| 17 | string_types = basestring, | ||
| 18 | |||
| 19 | |||
| 20 | def with_metaclass(meta, *bases): | ||
| 21 | """ | ||
| 22 | Create a base class with a metaclass. | ||
| 23 | """ | ||
| 24 | # This requires a bit of explanation: the basic idea is to make a dummy | ||
| 25 | # metaclass for one level of class instantiation that replaces itself with | ||
| 26 | # the actual metaclass. | ||
| 27 | class metaclass(meta): | ||
| 28 | def __new__(cls, name, this_bases, d): | ||
| 29 | return meta(name, bases, d) | ||
| 30 | return type.__new__(metaclass, 'temporary_class', (), {}) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_structures.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_structures.py new file mode 100644 index 0000000..3f0c27f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/_structures.py | |||
| @@ -0,0 +1,70 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | |||
| 7 | class Infinity(object): | ||
| 8 | |||
| 9 | def __repr__(self): | ||
| 10 | return "Infinity" | ||
| 11 | |||
| 12 | def __hash__(self): | ||
| 13 | return hash(repr(self)) | ||
| 14 | |||
| 15 | def __lt__(self, other): | ||
| 16 | return False | ||
| 17 | |||
| 18 | def __le__(self, other): | ||
| 19 | return False | ||
| 20 | |||
| 21 | def __eq__(self, other): | ||
| 22 | return isinstance(other, self.__class__) | ||
| 23 | |||
| 24 | def __ne__(self, other): | ||
| 25 | return not isinstance(other, self.__class__) | ||
| 26 | |||
| 27 | def __gt__(self, other): | ||
| 28 | return True | ||
| 29 | |||
| 30 | def __ge__(self, other): | ||
| 31 | return True | ||
| 32 | |||
| 33 | def __neg__(self): | ||
| 34 | return NegativeInfinity | ||
| 35 | |||
| 36 | |||
| 37 | Infinity = Infinity() | ||
| 38 | |||
| 39 | |||
| 40 | class NegativeInfinity(object): | ||
| 41 | |||
| 42 | def __repr__(self): | ||
| 43 | return "-Infinity" | ||
| 44 | |||
| 45 | def __hash__(self): | ||
| 46 | return hash(repr(self)) | ||
| 47 | |||
| 48 | def __lt__(self, other): | ||
| 49 | return True | ||
| 50 | |||
| 51 | def __le__(self, other): | ||
| 52 | return True | ||
| 53 | |||
| 54 | def __eq__(self, other): | ||
| 55 | return isinstance(other, self.__class__) | ||
| 56 | |||
| 57 | def __ne__(self, other): | ||
| 58 | return not isinstance(other, self.__class__) | ||
| 59 | |||
| 60 | def __gt__(self, other): | ||
| 61 | return False | ||
| 62 | |||
| 63 | def __ge__(self, other): | ||
| 64 | return False | ||
| 65 | |||
| 66 | def __neg__(self): | ||
| 67 | return Infinity | ||
| 68 | |||
| 69 | |||
| 70 | NegativeInfinity = NegativeInfinity() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/markers.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/markers.py new file mode 100644 index 0000000..b4dc0b9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/markers.py | |||
| @@ -0,0 +1,301 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import operator | ||
| 7 | import os | ||
| 8 | import platform | ||
| 9 | import sys | ||
| 10 | |||
| 11 | from pip._vendor.pyparsing import ParseException, ParseResults, stringStart, stringEnd | ||
| 12 | from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString | ||
| 13 | from pip._vendor.pyparsing import Literal as L # noqa | ||
| 14 | |||
| 15 | from ._compat import string_types | ||
| 16 | from .specifiers import Specifier, InvalidSpecifier | ||
| 17 | |||
| 18 | |||
| 19 | __all__ = [ | ||
| 20 | "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", | ||
| 21 | "Marker", "default_environment", | ||
| 22 | ] | ||
| 23 | |||
| 24 | |||
| 25 | class InvalidMarker(ValueError): | ||
| 26 | """ | ||
| 27 | An invalid marker was found, users should refer to PEP 508. | ||
| 28 | """ | ||
| 29 | |||
| 30 | |||
| 31 | class UndefinedComparison(ValueError): | ||
| 32 | """ | ||
| 33 | An invalid operation was attempted on a value that doesn't support it. | ||
| 34 | """ | ||
| 35 | |||
| 36 | |||
| 37 | class UndefinedEnvironmentName(ValueError): | ||
| 38 | """ | ||
| 39 | A name was attempted to be used that does not exist inside of the | ||
| 40 | environment. | ||
| 41 | """ | ||
| 42 | |||
| 43 | |||
| 44 | class Node(object): | ||
| 45 | |||
| 46 | def __init__(self, value): | ||
| 47 | self.value = value | ||
| 48 | |||
| 49 | def __str__(self): | ||
| 50 | return str(self.value) | ||
| 51 | |||
| 52 | def __repr__(self): | ||
| 53 | return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) | ||
| 54 | |||
| 55 | def serialize(self): | ||
| 56 | raise NotImplementedError | ||
| 57 | |||
| 58 | |||
| 59 | class Variable(Node): | ||
| 60 | |||
| 61 | def serialize(self): | ||
| 62 | return str(self) | ||
| 63 | |||
| 64 | |||
| 65 | class Value(Node): | ||
| 66 | |||
| 67 | def serialize(self): | ||
| 68 | return '"{0}"'.format(self) | ||
| 69 | |||
| 70 | |||
| 71 | class Op(Node): | ||
| 72 | |||
| 73 | def serialize(self): | ||
| 74 | return str(self) | ||
| 75 | |||
| 76 | |||
| 77 | VARIABLE = ( | ||
| 78 | L("implementation_version") | | ||
| 79 | L("platform_python_implementation") | | ||
| 80 | L("implementation_name") | | ||
| 81 | L("python_full_version") | | ||
| 82 | L("platform_release") | | ||
| 83 | L("platform_version") | | ||
| 84 | L("platform_machine") | | ||
| 85 | L("platform_system") | | ||
| 86 | L("python_version") | | ||
| 87 | L("sys_platform") | | ||
| 88 | L("os_name") | | ||
| 89 | L("os.name") | # PEP-345 | ||
| 90 | L("sys.platform") | # PEP-345 | ||
| 91 | L("platform.version") | # PEP-345 | ||
| 92 | L("platform.machine") | # PEP-345 | ||
| 93 | L("platform.python_implementation") | # PEP-345 | ||
| 94 | L("python_implementation") | # undocumented setuptools legacy | ||
| 95 | L("extra") | ||
| 96 | ) | ||
| 97 | ALIASES = { | ||
| 98 | 'os.name': 'os_name', | ||
| 99 | 'sys.platform': 'sys_platform', | ||
| 100 | 'platform.version': 'platform_version', | ||
| 101 | 'platform.machine': 'platform_machine', | ||
| 102 | 'platform.python_implementation': 'platform_python_implementation', | ||
| 103 | 'python_implementation': 'platform_python_implementation' | ||
| 104 | } | ||
| 105 | VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) | ||
| 106 | |||
| 107 | VERSION_CMP = ( | ||
| 108 | L("===") | | ||
| 109 | L("==") | | ||
| 110 | L(">=") | | ||
| 111 | L("<=") | | ||
| 112 | L("!=") | | ||
| 113 | L("~=") | | ||
| 114 | L(">") | | ||
| 115 | L("<") | ||
| 116 | ) | ||
| 117 | |||
| 118 | MARKER_OP = VERSION_CMP | L("not in") | L("in") | ||
| 119 | MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) | ||
| 120 | |||
| 121 | MARKER_VALUE = QuotedString("'") | QuotedString('"') | ||
| 122 | MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) | ||
| 123 | |||
| 124 | BOOLOP = L("and") | L("or") | ||
| 125 | |||
| 126 | MARKER_VAR = VARIABLE | MARKER_VALUE | ||
| 127 | |||
| 128 | MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) | ||
| 129 | MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) | ||
| 130 | |||
| 131 | LPAREN = L("(").suppress() | ||
| 132 | RPAREN = L(")").suppress() | ||
| 133 | |||
| 134 | MARKER_EXPR = Forward() | ||
| 135 | MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) | ||
| 136 | MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) | ||
| 137 | |||
| 138 | MARKER = stringStart + MARKER_EXPR + stringEnd | ||
| 139 | |||
| 140 | |||
| 141 | def _coerce_parse_result(results): | ||
| 142 | if isinstance(results, ParseResults): | ||
| 143 | return [_coerce_parse_result(i) for i in results] | ||
| 144 | else: | ||
| 145 | return results | ||
| 146 | |||
| 147 | |||
| 148 | def _format_marker(marker, first=True): | ||
| 149 | assert isinstance(marker, (list, tuple, string_types)) | ||
| 150 | |||
| 151 | # Sometimes we have a structure like [[...]] which is a single item list | ||
| 152 | # where the single item is itself it's own list. In that case we want skip | ||
| 153 | # the rest of this function so that we don't get extraneous () on the | ||
| 154 | # outside. | ||
| 155 | if (isinstance(marker, list) and len(marker) == 1 and | ||
| 156 | isinstance(marker[0], (list, tuple))): | ||
| 157 | return _format_marker(marker[0]) | ||
| 158 | |||
| 159 | if isinstance(marker, list): | ||
| 160 | inner = (_format_marker(m, first=False) for m in marker) | ||
| 161 | if first: | ||
| 162 | return " ".join(inner) | ||
| 163 | else: | ||
| 164 | return "(" + " ".join(inner) + ")" | ||
| 165 | elif isinstance(marker, tuple): | ||
| 166 | return " ".join([m.serialize() for m in marker]) | ||
| 167 | else: | ||
| 168 | return marker | ||
| 169 | |||
| 170 | |||
| 171 | _operators = { | ||
| 172 | "in": lambda lhs, rhs: lhs in rhs, | ||
| 173 | "not in": lambda lhs, rhs: lhs not in rhs, | ||
| 174 | "<": operator.lt, | ||
| 175 | "<=": operator.le, | ||
| 176 | "==": operator.eq, | ||
| 177 | "!=": operator.ne, | ||
| 178 | ">=": operator.ge, | ||
| 179 | ">": operator.gt, | ||
| 180 | } | ||
| 181 | |||
| 182 | |||
| 183 | def _eval_op(lhs, op, rhs): | ||
| 184 | try: | ||
| 185 | spec = Specifier("".join([op.serialize(), rhs])) | ||
| 186 | except InvalidSpecifier: | ||
| 187 | pass | ||
| 188 | else: | ||
| 189 | return spec.contains(lhs) | ||
| 190 | |||
| 191 | oper = _operators.get(op.serialize()) | ||
| 192 | if oper is None: | ||
| 193 | raise UndefinedComparison( | ||
| 194 | "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) | ||
| 195 | ) | ||
| 196 | |||
| 197 | return oper(lhs, rhs) | ||
| 198 | |||
| 199 | |||
| 200 | _undefined = object() | ||
| 201 | |||
| 202 | |||
| 203 | def _get_env(environment, name): | ||
| 204 | value = environment.get(name, _undefined) | ||
| 205 | |||
| 206 | if value is _undefined: | ||
| 207 | raise UndefinedEnvironmentName( | ||
| 208 | "{0!r} does not exist in evaluation environment.".format(name) | ||
| 209 | ) | ||
| 210 | |||
| 211 | return value | ||
| 212 | |||
| 213 | |||
| 214 | def _evaluate_markers(markers, environment): | ||
| 215 | groups = [[]] | ||
| 216 | |||
| 217 | for marker in markers: | ||
| 218 | assert isinstance(marker, (list, tuple, string_types)) | ||
| 219 | |||
| 220 | if isinstance(marker, list): | ||
| 221 | groups[-1].append(_evaluate_markers(marker, environment)) | ||
| 222 | elif isinstance(marker, tuple): | ||
| 223 | lhs, op, rhs = marker | ||
| 224 | |||
| 225 | if isinstance(lhs, Variable): | ||
| 226 | lhs_value = _get_env(environment, lhs.value) | ||
| 227 | rhs_value = rhs.value | ||
| 228 | else: | ||
| 229 | lhs_value = lhs.value | ||
| 230 | rhs_value = _get_env(environment, rhs.value) | ||
| 231 | |||
| 232 | groups[-1].append(_eval_op(lhs_value, op, rhs_value)) | ||
| 233 | else: | ||
| 234 | assert marker in ["and", "or"] | ||
| 235 | if marker == "or": | ||
| 236 | groups.append([]) | ||
| 237 | |||
| 238 | return any(all(item) for item in groups) | ||
| 239 | |||
| 240 | |||
| 241 | def format_full_version(info): | ||
| 242 | version = '{0.major}.{0.minor}.{0.micro}'.format(info) | ||
| 243 | kind = info.releaselevel | ||
| 244 | if kind != 'final': | ||
| 245 | version += kind[0] + str(info.serial) | ||
| 246 | return version | ||
| 247 | |||
| 248 | |||
| 249 | def default_environment(): | ||
| 250 | if hasattr(sys, 'implementation'): | ||
| 251 | iver = format_full_version(sys.implementation.version) | ||
| 252 | implementation_name = sys.implementation.name | ||
| 253 | else: | ||
| 254 | iver = '0' | ||
| 255 | implementation_name = '' | ||
| 256 | |||
| 257 | return { | ||
| 258 | "implementation_name": implementation_name, | ||
| 259 | "implementation_version": iver, | ||
| 260 | "os_name": os.name, | ||
| 261 | "platform_machine": platform.machine(), | ||
| 262 | "platform_release": platform.release(), | ||
| 263 | "platform_system": platform.system(), | ||
| 264 | "platform_version": platform.version(), | ||
| 265 | "python_full_version": platform.python_version(), | ||
| 266 | "platform_python_implementation": platform.python_implementation(), | ||
| 267 | "python_version": platform.python_version()[:3], | ||
| 268 | "sys_platform": sys.platform, | ||
| 269 | } | ||
| 270 | |||
| 271 | |||
| 272 | class Marker(object): | ||
| 273 | |||
| 274 | def __init__(self, marker): | ||
| 275 | try: | ||
| 276 | self._markers = _coerce_parse_result(MARKER.parseString(marker)) | ||
| 277 | except ParseException as e: | ||
| 278 | err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( | ||
| 279 | marker, marker[e.loc:e.loc + 8]) | ||
| 280 | raise InvalidMarker(err_str) | ||
| 281 | |||
| 282 | def __str__(self): | ||
| 283 | return _format_marker(self._markers) | ||
| 284 | |||
| 285 | def __repr__(self): | ||
| 286 | return "<Marker({0!r})>".format(str(self)) | ||
| 287 | |||
| 288 | def evaluate(self, environment=None): | ||
| 289 | """Evaluate a marker. | ||
| 290 | |||
| 291 | Return the boolean from evaluating the given marker against the | ||
| 292 | environment. environment is an optional argument to override all or | ||
| 293 | part of the determined environment. | ||
| 294 | |||
| 295 | The environment is determined from the current Python process. | ||
| 296 | """ | ||
| 297 | current_environment = default_environment() | ||
| 298 | if environment is not None: | ||
| 299 | current_environment.update(environment) | ||
| 300 | |||
| 301 | return _evaluate_markers(self._markers, current_environment) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/requirements.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/requirements.py new file mode 100644 index 0000000..98bc507 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/requirements.py | |||
| @@ -0,0 +1,130 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import string | ||
| 7 | import re | ||
| 8 | |||
| 9 | from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException | ||
| 10 | from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine | ||
| 11 | from pip._vendor.pyparsing import Literal as L # noqa | ||
| 12 | from pip._vendor.six.moves.urllib import parse as urlparse | ||
| 13 | |||
| 14 | from .markers import MARKER_EXPR, Marker | ||
| 15 | from .specifiers import LegacySpecifier, Specifier, SpecifierSet | ||
| 16 | |||
| 17 | |||
| 18 | class InvalidRequirement(ValueError): | ||
| 19 | """ | ||
| 20 | An invalid requirement was found, users should refer to PEP 508. | ||
| 21 | """ | ||
| 22 | |||
| 23 | |||
| 24 | ALPHANUM = Word(string.ascii_letters + string.digits) | ||
| 25 | |||
| 26 | LBRACKET = L("[").suppress() | ||
| 27 | RBRACKET = L("]").suppress() | ||
| 28 | LPAREN = L("(").suppress() | ||
| 29 | RPAREN = L(")").suppress() | ||
| 30 | COMMA = L(",").suppress() | ||
| 31 | SEMICOLON = L(";").suppress() | ||
| 32 | AT = L("@").suppress() | ||
| 33 | |||
| 34 | PUNCTUATION = Word("-_.") | ||
| 35 | IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) | ||
| 36 | IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) | ||
| 37 | |||
| 38 | NAME = IDENTIFIER("name") | ||
| 39 | EXTRA = IDENTIFIER | ||
| 40 | |||
| 41 | URI = Regex(r'[^ ]+')("url") | ||
| 42 | URL = (AT + URI) | ||
| 43 | |||
| 44 | EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) | ||
| 45 | EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") | ||
| 46 | |||
| 47 | VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) | ||
| 48 | VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) | ||
| 49 | |||
| 50 | VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY | ||
| 51 | VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), | ||
| 52 | joinString=",", adjacent=False)("_raw_spec") | ||
| 53 | _VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) | ||
| 54 | _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') | ||
| 55 | |||
| 56 | VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") | ||
| 57 | VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) | ||
| 58 | |||
| 59 | MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") | ||
| 60 | MARKER_EXPR.setParseAction( | ||
| 61 | lambda s, l, t: Marker(s[t._original_start:t._original_end]) | ||
| 62 | ) | ||
| 63 | MARKER_SEPARATOR = SEMICOLON | ||
| 64 | MARKER = MARKER_SEPARATOR + MARKER_EXPR | ||
| 65 | |||
| 66 | VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) | ||
| 67 | URL_AND_MARKER = URL + Optional(MARKER) | ||
| 68 | |||
| 69 | NAMED_REQUIREMENT = \ | ||
| 70 | NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) | ||
| 71 | |||
| 72 | REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd | ||
| 73 | # pyparsing isn't thread safe during initialization, so we do it eagerly, see | ||
| 74 | # issue #104 | ||
| 75 | REQUIREMENT.parseString("x[]") | ||
| 76 | |||
| 77 | |||
| 78 | class Requirement(object): | ||
| 79 | """Parse a requirement. | ||
| 80 | |||
| 81 | Parse a given requirement string into its parts, such as name, specifier, | ||
| 82 | URL, and extras. Raises InvalidRequirement on a badly-formed requirement | ||
| 83 | string. | ||
| 84 | """ | ||
| 85 | |||
| 86 | # TODO: Can we test whether something is contained within a requirement? | ||
| 87 | # If so how do we do that? Do we need to test against the _name_ of | ||
| 88 | # the thing as well as the version? What about the markers? | ||
| 89 | # TODO: Can we normalize the name and extra name? | ||
| 90 | |||
| 91 | def __init__(self, requirement_string): | ||
| 92 | try: | ||
| 93 | req = REQUIREMENT.parseString(requirement_string) | ||
| 94 | except ParseException as e: | ||
| 95 | raise InvalidRequirement( | ||
| 96 | "Invalid requirement, parse error at \"{0!r}\"".format( | ||
| 97 | requirement_string[e.loc:e.loc + 8])) | ||
| 98 | |||
| 99 | self.name = req.name | ||
| 100 | if req.url: | ||
| 101 | parsed_url = urlparse.urlparse(req.url) | ||
| 102 | if not (parsed_url.scheme and parsed_url.netloc) or ( | ||
| 103 | not parsed_url.scheme and not parsed_url.netloc): | ||
| 104 | raise InvalidRequirement("Invalid URL given") | ||
| 105 | self.url = req.url | ||
| 106 | else: | ||
| 107 | self.url = None | ||
| 108 | self.extras = set(req.extras.asList() if req.extras else []) | ||
| 109 | self.specifier = SpecifierSet(req.specifier) | ||
| 110 | self.marker = req.marker if req.marker else None | ||
| 111 | |||
| 112 | def __str__(self): | ||
| 113 | parts = [self.name] | ||
| 114 | |||
| 115 | if self.extras: | ||
| 116 | parts.append("[{0}]".format(",".join(sorted(self.extras)))) | ||
| 117 | |||
| 118 | if self.specifier: | ||
| 119 | parts.append(str(self.specifier)) | ||
| 120 | |||
| 121 | if self.url: | ||
| 122 | parts.append("@ {0}".format(self.url)) | ||
| 123 | |||
| 124 | if self.marker: | ||
| 125 | parts.append("; {0}".format(self.marker)) | ||
| 126 | |||
| 127 | return "".join(parts) | ||
| 128 | |||
| 129 | def __repr__(self): | ||
| 130 | return "<Requirement({0!r})>".format(str(self)) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/specifiers.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..7d2fe4c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/specifiers.py | |||
| @@ -0,0 +1,774 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import abc | ||
| 7 | import functools | ||
| 8 | import itertools | ||
| 9 | import re | ||
| 10 | |||
| 11 | from ._compat import string_types, with_metaclass | ||
| 12 | from .version import Version, LegacyVersion, parse | ||
| 13 | |||
| 14 | |||
| 15 | class InvalidSpecifier(ValueError): | ||
| 16 | """ | ||
| 17 | An invalid specifier was found, users should refer to PEP 440. | ||
| 18 | """ | ||
| 19 | |||
| 20 | |||
| 21 | class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): | ||
| 22 | |||
| 23 | @abc.abstractmethod | ||
| 24 | def __str__(self): | ||
| 25 | """ | ||
| 26 | Returns the str representation of this Specifier like object. This | ||
| 27 | should be representative of the Specifier itself. | ||
| 28 | """ | ||
| 29 | |||
| 30 | @abc.abstractmethod | ||
| 31 | def __hash__(self): | ||
| 32 | """ | ||
| 33 | Returns a hash value for this Specifier like object. | ||
| 34 | """ | ||
| 35 | |||
| 36 | @abc.abstractmethod | ||
| 37 | def __eq__(self, other): | ||
| 38 | """ | ||
| 39 | Returns a boolean representing whether or not the two Specifier like | ||
| 40 | objects are equal. | ||
| 41 | """ | ||
| 42 | |||
| 43 | @abc.abstractmethod | ||
| 44 | def __ne__(self, other): | ||
| 45 | """ | ||
| 46 | Returns a boolean representing whether or not the two Specifier like | ||
| 47 | objects are not equal. | ||
| 48 | """ | ||
| 49 | |||
| 50 | @abc.abstractproperty | ||
| 51 | def prereleases(self): | ||
| 52 | """ | ||
| 53 | Returns whether or not pre-releases as a whole are allowed by this | ||
| 54 | specifier. | ||
| 55 | """ | ||
| 56 | |||
| 57 | @prereleases.setter | ||
| 58 | def prereleases(self, value): | ||
| 59 | """ | ||
| 60 | Sets whether or not pre-releases as a whole are allowed by this | ||
| 61 | specifier. | ||
| 62 | """ | ||
| 63 | |||
| 64 | @abc.abstractmethod | ||
| 65 | def contains(self, item, prereleases=None): | ||
| 66 | """ | ||
| 67 | Determines if the given item is contained within this specifier. | ||
| 68 | """ | ||
| 69 | |||
| 70 | @abc.abstractmethod | ||
| 71 | def filter(self, iterable, prereleases=None): | ||
| 72 | """ | ||
| 73 | Takes an iterable of items and filters them so that only items which | ||
| 74 | are contained within this specifier are allowed in it. | ||
| 75 | """ | ||
| 76 | |||
| 77 | |||
| 78 | class _IndividualSpecifier(BaseSpecifier): | ||
| 79 | |||
| 80 | _operators = {} | ||
| 81 | |||
| 82 | def __init__(self, spec="", prereleases=None): | ||
| 83 | match = self._regex.search(spec) | ||
| 84 | if not match: | ||
| 85 | raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) | ||
| 86 | |||
| 87 | self._spec = ( | ||
| 88 | match.group("operator").strip(), | ||
| 89 | match.group("version").strip(), | ||
| 90 | ) | ||
| 91 | |||
| 92 | # Store whether or not this Specifier should accept prereleases | ||
| 93 | self._prereleases = prereleases | ||
| 94 | |||
| 95 | def __repr__(self): | ||
| 96 | pre = ( | ||
| 97 | ", prereleases={0!r}".format(self.prereleases) | ||
| 98 | if self._prereleases is not None | ||
| 99 | else "" | ||
| 100 | ) | ||
| 101 | |||
| 102 | return "<{0}({1!r}{2})>".format( | ||
| 103 | self.__class__.__name__, | ||
| 104 | str(self), | ||
| 105 | pre, | ||
| 106 | ) | ||
| 107 | |||
| 108 | def __str__(self): | ||
| 109 | return "{0}{1}".format(*self._spec) | ||
| 110 | |||
| 111 | def __hash__(self): | ||
| 112 | return hash(self._spec) | ||
| 113 | |||
| 114 | def __eq__(self, other): | ||
| 115 | if isinstance(other, string_types): | ||
| 116 | try: | ||
| 117 | other = self.__class__(other) | ||
| 118 | except InvalidSpecifier: | ||
| 119 | return NotImplemented | ||
| 120 | elif not isinstance(other, self.__class__): | ||
| 121 | return NotImplemented | ||
| 122 | |||
| 123 | return self._spec == other._spec | ||
| 124 | |||
| 125 | def __ne__(self, other): | ||
| 126 | if isinstance(other, string_types): | ||
| 127 | try: | ||
| 128 | other = self.__class__(other) | ||
| 129 | except InvalidSpecifier: | ||
| 130 | return NotImplemented | ||
| 131 | elif not isinstance(other, self.__class__): | ||
| 132 | return NotImplemented | ||
| 133 | |||
| 134 | return self._spec != other._spec | ||
| 135 | |||
| 136 | def _get_operator(self, op): | ||
| 137 | return getattr(self, "_compare_{0}".format(self._operators[op])) | ||
| 138 | |||
| 139 | def _coerce_version(self, version): | ||
| 140 | if not isinstance(version, (LegacyVersion, Version)): | ||
| 141 | version = parse(version) | ||
| 142 | return version | ||
| 143 | |||
| 144 | @property | ||
| 145 | def operator(self): | ||
| 146 | return self._spec[0] | ||
| 147 | |||
| 148 | @property | ||
| 149 | def version(self): | ||
| 150 | return self._spec[1] | ||
| 151 | |||
| 152 | @property | ||
| 153 | def prereleases(self): | ||
| 154 | return self._prereleases | ||
| 155 | |||
| 156 | @prereleases.setter | ||
| 157 | def prereleases(self, value): | ||
| 158 | self._prereleases = value | ||
| 159 | |||
| 160 | def __contains__(self, item): | ||
| 161 | return self.contains(item) | ||
| 162 | |||
| 163 | def contains(self, item, prereleases=None): | ||
| 164 | # Determine if prereleases are to be allowed or not. | ||
| 165 | if prereleases is None: | ||
| 166 | prereleases = self.prereleases | ||
| 167 | |||
| 168 | # Normalize item to a Version or LegacyVersion, this allows us to have | ||
| 169 | # a shortcut for ``"2.0" in Specifier(">=2") | ||
| 170 | item = self._coerce_version(item) | ||
| 171 | |||
| 172 | # Determine if we should be supporting prereleases in this specifier | ||
| 173 | # or not, if we do not support prereleases than we can short circuit | ||
| 174 | # logic if this version is a prereleases. | ||
| 175 | if item.is_prerelease and not prereleases: | ||
| 176 | return False | ||
| 177 | |||
| 178 | # Actually do the comparison to determine if this item is contained | ||
| 179 | # within this Specifier or not. | ||
| 180 | return self._get_operator(self.operator)(item, self.version) | ||
| 181 | |||
| 182 | def filter(self, iterable, prereleases=None): | ||
| 183 | yielded = False | ||
| 184 | found_prereleases = [] | ||
| 185 | |||
| 186 | kw = {"prereleases": prereleases if prereleases is not None else True} | ||
| 187 | |||
| 188 | # Attempt to iterate over all the values in the iterable and if any of | ||
| 189 | # them match, yield them. | ||
| 190 | for version in iterable: | ||
| 191 | parsed_version = self._coerce_version(version) | ||
| 192 | |||
| 193 | if self.contains(parsed_version, **kw): | ||
| 194 | # If our version is a prerelease, and we were not set to allow | ||
| 195 | # prereleases, then we'll store it for later incase nothing | ||
| 196 | # else matches this specifier. | ||
| 197 | if (parsed_version.is_prerelease and not | ||
| 198 | (prereleases or self.prereleases)): | ||
| 199 | found_prereleases.append(version) | ||
| 200 | # Either this is not a prerelease, or we should have been | ||
| 201 | # accepting prereleases from the beginning. | ||
| 202 | else: | ||
| 203 | yielded = True | ||
| 204 | yield version | ||
| 205 | |||
| 206 | # Now that we've iterated over everything, determine if we've yielded | ||
| 207 | # any values, and if we have not and we have any prereleases stored up | ||
| 208 | # then we will go ahead and yield the prereleases. | ||
| 209 | if not yielded and found_prereleases: | ||
| 210 | for version in found_prereleases: | ||
| 211 | yield version | ||
| 212 | |||
| 213 | |||
| 214 | class LegacySpecifier(_IndividualSpecifier): | ||
| 215 | |||
| 216 | _regex_str = ( | ||
| 217 | r""" | ||
| 218 | (?P<operator>(==|!=|<=|>=|<|>)) | ||
| 219 | \s* | ||
| 220 | (?P<version> | ||
| 221 | [^,;\s)]* # Since this is a "legacy" specifier, and the version | ||
| 222 | # string can be just about anything, we match everything | ||
| 223 | # except for whitespace, a semi-colon for marker support, | ||
| 224 | # a closing paren since versions can be enclosed in | ||
| 225 | # them, and a comma since it's a version separator. | ||
| 226 | ) | ||
| 227 | """ | ||
| 228 | ) | ||
| 229 | |||
| 230 | _regex = re.compile( | ||
| 231 | r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) | ||
| 232 | |||
| 233 | _operators = { | ||
| 234 | "==": "equal", | ||
| 235 | "!=": "not_equal", | ||
| 236 | "<=": "less_than_equal", | ||
| 237 | ">=": "greater_than_equal", | ||
| 238 | "<": "less_than", | ||
| 239 | ">": "greater_than", | ||
| 240 | } | ||
| 241 | |||
| 242 | def _coerce_version(self, version): | ||
| 243 | if not isinstance(version, LegacyVersion): | ||
| 244 | version = LegacyVersion(str(version)) | ||
| 245 | return version | ||
| 246 | |||
| 247 | def _compare_equal(self, prospective, spec): | ||
| 248 | return prospective == self._coerce_version(spec) | ||
| 249 | |||
| 250 | def _compare_not_equal(self, prospective, spec): | ||
| 251 | return prospective != self._coerce_version(spec) | ||
| 252 | |||
| 253 | def _compare_less_than_equal(self, prospective, spec): | ||
| 254 | return prospective <= self._coerce_version(spec) | ||
| 255 | |||
| 256 | def _compare_greater_than_equal(self, prospective, spec): | ||
| 257 | return prospective >= self._coerce_version(spec) | ||
| 258 | |||
| 259 | def _compare_less_than(self, prospective, spec): | ||
| 260 | return prospective < self._coerce_version(spec) | ||
| 261 | |||
| 262 | def _compare_greater_than(self, prospective, spec): | ||
| 263 | return prospective > self._coerce_version(spec) | ||
| 264 | |||
| 265 | |||
| 266 | def _require_version_compare(fn): | ||
| 267 | @functools.wraps(fn) | ||
| 268 | def wrapped(self, prospective, spec): | ||
| 269 | if not isinstance(prospective, Version): | ||
| 270 | return False | ||
| 271 | return fn(self, prospective, spec) | ||
| 272 | return wrapped | ||
| 273 | |||
| 274 | |||
| 275 | class Specifier(_IndividualSpecifier): | ||
| 276 | |||
| 277 | _regex_str = ( | ||
| 278 | r""" | ||
| 279 | (?P<operator>(~=|==|!=|<=|>=|<|>|===)) | ||
| 280 | (?P<version> | ||
| 281 | (?: | ||
| 282 | # The identity operators allow for an escape hatch that will | ||
| 283 | # do an exact string match of the version you wish to install. | ||
| 284 | # This will not be parsed by PEP 440 and we cannot determine | ||
| 285 | # any semantic meaning from it. This operator is discouraged | ||
| 286 | # but included entirely as an escape hatch. | ||
| 287 | (?<====) # Only match for the identity operator | ||
| 288 | \s* | ||
| 289 | [^\s]* # We just match everything, except for whitespace | ||
| 290 | # since we are only testing for strict identity. | ||
| 291 | ) | ||
| 292 | | | ||
| 293 | (?: | ||
| 294 | # The (non)equality operators allow for wild card and local | ||
| 295 | # versions to be specified so we have to define these two | ||
| 296 | # operators separately to enable that. | ||
| 297 | (?<===|!=) # Only match for equals and not equals | ||
| 298 | |||
| 299 | \s* | ||
| 300 | v? | ||
| 301 | (?:[0-9]+!)? # epoch | ||
| 302 | [0-9]+(?:\.[0-9]+)* # release | ||
| 303 | (?: # pre release | ||
| 304 | [-_\.]? | ||
| 305 | (a|b|c|rc|alpha|beta|pre|preview) | ||
| 306 | [-_\.]? | ||
| 307 | [0-9]* | ||
| 308 | )? | ||
| 309 | (?: # post release | ||
| 310 | (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) | ||
| 311 | )? | ||
| 312 | |||
| 313 | # You cannot use a wild card and a dev or local version | ||
| 314 | # together so group them with a | and make them optional. | ||
| 315 | (?: | ||
| 316 | (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release | ||
| 317 | (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local | ||
| 318 | | | ||
| 319 | \.\* # Wild card syntax of .* | ||
| 320 | )? | ||
| 321 | ) | ||
| 322 | | | ||
| 323 | (?: | ||
| 324 | # The compatible operator requires at least two digits in the | ||
| 325 | # release segment. | ||
| 326 | (?<=~=) # Only match for the compatible operator | ||
| 327 | |||
| 328 | \s* | ||
| 329 | v? | ||
| 330 | (?:[0-9]+!)? # epoch | ||
| 331 | [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) | ||
| 332 | (?: # pre release | ||
| 333 | [-_\.]? | ||
| 334 | (a|b|c|rc|alpha|beta|pre|preview) | ||
| 335 | [-_\.]? | ||
| 336 | [0-9]* | ||
| 337 | )? | ||
| 338 | (?: # post release | ||
| 339 | (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) | ||
| 340 | )? | ||
| 341 | (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release | ||
| 342 | ) | ||
| 343 | | | ||
| 344 | (?: | ||
| 345 | # All other operators only allow a sub set of what the | ||
| 346 | # (non)equality operators do. Specifically they do not allow | ||
| 347 | # local versions to be specified nor do they allow the prefix | ||
| 348 | # matching wild cards. | ||
| 349 | (?<!==|!=|~=) # We have special cases for these | ||
| 350 | # operators so we want to make sure they | ||
| 351 | # don't match here. | ||
| 352 | |||
| 353 | \s* | ||
| 354 | v? | ||
| 355 | (?:[0-9]+!)? # epoch | ||
| 356 | [0-9]+(?:\.[0-9]+)* # release | ||
| 357 | (?: # pre release | ||
| 358 | [-_\.]? | ||
| 359 | (a|b|c|rc|alpha|beta|pre|preview) | ||
| 360 | [-_\.]? | ||
| 361 | [0-9]* | ||
| 362 | )? | ||
| 363 | (?: # post release | ||
| 364 | (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) | ||
| 365 | )? | ||
| 366 | (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release | ||
| 367 | ) | ||
| 368 | ) | ||
| 369 | """ | ||
| 370 | ) | ||
| 371 | |||
| 372 | _regex = re.compile( | ||
| 373 | r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) | ||
| 374 | |||
| 375 | _operators = { | ||
| 376 | "~=": "compatible", | ||
| 377 | "==": "equal", | ||
| 378 | "!=": "not_equal", | ||
| 379 | "<=": "less_than_equal", | ||
| 380 | ">=": "greater_than_equal", | ||
| 381 | "<": "less_than", | ||
| 382 | ">": "greater_than", | ||
| 383 | "===": "arbitrary", | ||
| 384 | } | ||
| 385 | |||
| 386 | @_require_version_compare | ||
| 387 | def _compare_compatible(self, prospective, spec): | ||
| 388 | # Compatible releases have an equivalent combination of >= and ==. That | ||
| 389 | # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to | ||
| 390 | # implement this in terms of the other specifiers instead of | ||
| 391 | # implementing it ourselves. The only thing we need to do is construct | ||
| 392 | # the other specifiers. | ||
| 393 | |||
| 394 | # We want everything but the last item in the version, but we want to | ||
| 395 | # ignore post and dev releases and we want to treat the pre-release as | ||
| 396 | # it's own separate segment. | ||
| 397 | prefix = ".".join( | ||
| 398 | list( | ||
| 399 | itertools.takewhile( | ||
| 400 | lambda x: (not x.startswith("post") and not | ||
| 401 | x.startswith("dev")), | ||
| 402 | _version_split(spec), | ||
| 403 | ) | ||
| 404 | )[:-1] | ||
| 405 | ) | ||
| 406 | |||
| 407 | # Add the prefix notation to the end of our string | ||
| 408 | prefix += ".*" | ||
| 409 | |||
| 410 | return (self._get_operator(">=")(prospective, spec) and | ||
| 411 | self._get_operator("==")(prospective, prefix)) | ||
| 412 | |||
| 413 | @_require_version_compare | ||
| 414 | def _compare_equal(self, prospective, spec): | ||
| 415 | # We need special logic to handle prefix matching | ||
| 416 | if spec.endswith(".*"): | ||
| 417 | # In the case of prefix matching we want to ignore local segment. | ||
| 418 | prospective = Version(prospective.public) | ||
| 419 | # Split the spec out by dots, and pretend that there is an implicit | ||
| 420 | # dot in between a release segment and a pre-release segment. | ||
| 421 | spec = _version_split(spec[:-2]) # Remove the trailing .* | ||
| 422 | |||
| 423 | # Split the prospective version out by dots, and pretend that there | ||
| 424 | # is an implicit dot in between a release segment and a pre-release | ||
| 425 | # segment. | ||
| 426 | prospective = _version_split(str(prospective)) | ||
| 427 | |||
| 428 | # Shorten the prospective version to be the same length as the spec | ||
| 429 | # so that we can determine if the specifier is a prefix of the | ||
| 430 | # prospective version or not. | ||
| 431 | prospective = prospective[:len(spec)] | ||
| 432 | |||
| 433 | # Pad out our two sides with zeros so that they both equal the same | ||
| 434 | # length. | ||
| 435 | spec, prospective = _pad_version(spec, prospective) | ||
| 436 | else: | ||
| 437 | # Convert our spec string into a Version | ||
| 438 | spec = Version(spec) | ||
| 439 | |||
| 440 | # If the specifier does not have a local segment, then we want to | ||
| 441 | # act as if the prospective version also does not have a local | ||
| 442 | # segment. | ||
| 443 | if not spec.local: | ||
| 444 | prospective = Version(prospective.public) | ||
| 445 | |||
| 446 | return prospective == spec | ||
| 447 | |||
| 448 | @_require_version_compare | ||
| 449 | def _compare_not_equal(self, prospective, spec): | ||
| 450 | return not self._compare_equal(prospective, spec) | ||
| 451 | |||
| 452 | @_require_version_compare | ||
| 453 | def _compare_less_than_equal(self, prospective, spec): | ||
| 454 | return prospective <= Version(spec) | ||
| 455 | |||
| 456 | @_require_version_compare | ||
| 457 | def _compare_greater_than_equal(self, prospective, spec): | ||
| 458 | return prospective >= Version(spec) | ||
| 459 | |||
| 460 | @_require_version_compare | ||
| 461 | def _compare_less_than(self, prospective, spec): | ||
| 462 | # Convert our spec to a Version instance, since we'll want to work with | ||
| 463 | # it as a version. | ||
| 464 | spec = Version(spec) | ||
| 465 | |||
| 466 | # Check to see if the prospective version is less than the spec | ||
| 467 | # version. If it's not we can short circuit and just return False now | ||
| 468 | # instead of doing extra unneeded work. | ||
| 469 | if not prospective < spec: | ||
| 470 | return False | ||
| 471 | |||
| 472 | # This special case is here so that, unless the specifier itself | ||
| 473 | # includes is a pre-release version, that we do not accept pre-release | ||
| 474 | # versions for the version mentioned in the specifier (e.g. <3.1 should | ||
| 475 | # not match 3.1.dev0, but should match 3.0.dev0). | ||
| 476 | if not spec.is_prerelease and prospective.is_prerelease: | ||
| 477 | if Version(prospective.base_version) == Version(spec.base_version): | ||
| 478 | return False | ||
| 479 | |||
| 480 | # If we've gotten to here, it means that prospective version is both | ||
| 481 | # less than the spec version *and* it's not a pre-release of the same | ||
| 482 | # version in the spec. | ||
| 483 | return True | ||
| 484 | |||
| 485 | @_require_version_compare | ||
| 486 | def _compare_greater_than(self, prospective, spec): | ||
| 487 | # Convert our spec to a Version instance, since we'll want to work with | ||
| 488 | # it as a version. | ||
| 489 | spec = Version(spec) | ||
| 490 | |||
| 491 | # Check to see if the prospective version is greater than the spec | ||
| 492 | # version. If it's not we can short circuit and just return False now | ||
| 493 | # instead of doing extra unneeded work. | ||
| 494 | if not prospective > spec: | ||
| 495 | return False | ||
| 496 | |||
| 497 | # This special case is here so that, unless the specifier itself | ||
| 498 | # includes is a post-release version, that we do not accept | ||
| 499 | # post-release versions for the version mentioned in the specifier | ||
| 500 | # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). | ||
| 501 | if not spec.is_postrelease and prospective.is_postrelease: | ||
| 502 | if Version(prospective.base_version) == Version(spec.base_version): | ||
| 503 | return False | ||
| 504 | |||
| 505 | # Ensure that we do not allow a local version of the version mentioned | ||
| 506 | # in the specifier, which is techincally greater than, to match. | ||
| 507 | if prospective.local is not None: | ||
| 508 | if Version(prospective.base_version) == Version(spec.base_version): | ||
| 509 | return False | ||
| 510 | |||
| 511 | # If we've gotten to here, it means that prospective version is both | ||
| 512 | # greater than the spec version *and* it's not a pre-release of the | ||
| 513 | # same version in the spec. | ||
| 514 | return True | ||
| 515 | |||
| 516 | def _compare_arbitrary(self, prospective, spec): | ||
| 517 | return str(prospective).lower() == str(spec).lower() | ||
| 518 | |||
| 519 | @property | ||
| 520 | def prereleases(self): | ||
| 521 | # If there is an explicit prereleases set for this, then we'll just | ||
| 522 | # blindly use that. | ||
| 523 | if self._prereleases is not None: | ||
| 524 | return self._prereleases | ||
| 525 | |||
| 526 | # Look at all of our specifiers and determine if they are inclusive | ||
| 527 | # operators, and if they are if they are including an explicit | ||
| 528 | # prerelease. | ||
| 529 | operator, version = self._spec | ||
| 530 | if operator in ["==", ">=", "<=", "~=", "==="]: | ||
| 531 | # The == specifier can include a trailing .*, if it does we | ||
| 532 | # want to remove before parsing. | ||
| 533 | if operator == "==" and version.endswith(".*"): | ||
| 534 | version = version[:-2] | ||
| 535 | |||
| 536 | # Parse the version, and if it is a pre-release than this | ||
| 537 | # specifier allows pre-releases. | ||
| 538 | if parse(version).is_prerelease: | ||
| 539 | return True | ||
| 540 | |||
| 541 | return False | ||
| 542 | |||
| 543 | @prereleases.setter | ||
| 544 | def prereleases(self, value): | ||
| 545 | self._prereleases = value | ||
| 546 | |||
| 547 | |||
| 548 | _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") | ||
| 549 | |||
| 550 | |||
| 551 | def _version_split(version): | ||
| 552 | result = [] | ||
| 553 | for item in version.split("."): | ||
| 554 | match = _prefix_regex.search(item) | ||
| 555 | if match: | ||
| 556 | result.extend(match.groups()) | ||
| 557 | else: | ||
| 558 | result.append(item) | ||
| 559 | return result | ||
| 560 | |||
| 561 | |||
| 562 | def _pad_version(left, right): | ||
| 563 | left_split, right_split = [], [] | ||
| 564 | |||
| 565 | # Get the release segment of our versions | ||
| 566 | left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) | ||
| 567 | right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) | ||
| 568 | |||
| 569 | # Get the rest of our versions | ||
| 570 | left_split.append(left[len(left_split[0]):]) | ||
| 571 | right_split.append(right[len(right_split[0]):]) | ||
| 572 | |||
| 573 | # Insert our padding | ||
| 574 | left_split.insert( | ||
| 575 | 1, | ||
| 576 | ["0"] * max(0, len(right_split[0]) - len(left_split[0])), | ||
| 577 | ) | ||
| 578 | right_split.insert( | ||
| 579 | 1, | ||
| 580 | ["0"] * max(0, len(left_split[0]) - len(right_split[0])), | ||
| 581 | ) | ||
| 582 | |||
| 583 | return ( | ||
| 584 | list(itertools.chain(*left_split)), | ||
| 585 | list(itertools.chain(*right_split)), | ||
| 586 | ) | ||
| 587 | |||
| 588 | |||
| 589 | class SpecifierSet(BaseSpecifier): | ||
| 590 | |||
| 591 | def __init__(self, specifiers="", prereleases=None): | ||
| 592 | # Split on , to break each indidivual specifier into it's own item, and | ||
| 593 | # strip each item to remove leading/trailing whitespace. | ||
| 594 | specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] | ||
| 595 | |||
| 596 | # Parsed each individual specifier, attempting first to make it a | ||
| 597 | # Specifier and falling back to a LegacySpecifier. | ||
| 598 | parsed = set() | ||
| 599 | for specifier in specifiers: | ||
| 600 | try: | ||
| 601 | parsed.add(Specifier(specifier)) | ||
| 602 | except InvalidSpecifier: | ||
| 603 | parsed.add(LegacySpecifier(specifier)) | ||
| 604 | |||
| 605 | # Turn our parsed specifiers into a frozen set and save them for later. | ||
| 606 | self._specs = frozenset(parsed) | ||
| 607 | |||
| 608 | # Store our prereleases value so we can use it later to determine if | ||
| 609 | # we accept prereleases or not. | ||
| 610 | self._prereleases = prereleases | ||
| 611 | |||
| 612 | def __repr__(self): | ||
| 613 | pre = ( | ||
| 614 | ", prereleases={0!r}".format(self.prereleases) | ||
| 615 | if self._prereleases is not None | ||
| 616 | else "" | ||
| 617 | ) | ||
| 618 | |||
| 619 | return "<SpecifierSet({0!r}{1})>".format(str(self), pre) | ||
| 620 | |||
| 621 | def __str__(self): | ||
| 622 | return ",".join(sorted(str(s) for s in self._specs)) | ||
| 623 | |||
| 624 | def __hash__(self): | ||
| 625 | return hash(self._specs) | ||
| 626 | |||
| 627 | def __and__(self, other): | ||
| 628 | if isinstance(other, string_types): | ||
| 629 | other = SpecifierSet(other) | ||
| 630 | elif not isinstance(other, SpecifierSet): | ||
| 631 | return NotImplemented | ||
| 632 | |||
| 633 | specifier = SpecifierSet() | ||
| 634 | specifier._specs = frozenset(self._specs | other._specs) | ||
| 635 | |||
| 636 | if self._prereleases is None and other._prereleases is not None: | ||
| 637 | specifier._prereleases = other._prereleases | ||
| 638 | elif self._prereleases is not None and other._prereleases is None: | ||
| 639 | specifier._prereleases = self._prereleases | ||
| 640 | elif self._prereleases == other._prereleases: | ||
| 641 | specifier._prereleases = self._prereleases | ||
| 642 | else: | ||
| 643 | raise ValueError( | ||
| 644 | "Cannot combine SpecifierSets with True and False prerelease " | ||
| 645 | "overrides." | ||
| 646 | ) | ||
| 647 | |||
| 648 | return specifier | ||
| 649 | |||
| 650 | def __eq__(self, other): | ||
| 651 | if isinstance(other, string_types): | ||
| 652 | other = SpecifierSet(other) | ||
| 653 | elif isinstance(other, _IndividualSpecifier): | ||
| 654 | other = SpecifierSet(str(other)) | ||
| 655 | elif not isinstance(other, SpecifierSet): | ||
| 656 | return NotImplemented | ||
| 657 | |||
| 658 | return self._specs == other._specs | ||
| 659 | |||
| 660 | def __ne__(self, other): | ||
| 661 | if isinstance(other, string_types): | ||
| 662 | other = SpecifierSet(other) | ||
| 663 | elif isinstance(other, _IndividualSpecifier): | ||
| 664 | other = SpecifierSet(str(other)) | ||
| 665 | elif not isinstance(other, SpecifierSet): | ||
| 666 | return NotImplemented | ||
| 667 | |||
| 668 | return self._specs != other._specs | ||
| 669 | |||
| 670 | def __len__(self): | ||
| 671 | return len(self._specs) | ||
| 672 | |||
| 673 | def __iter__(self): | ||
| 674 | return iter(self._specs) | ||
| 675 | |||
| 676 | @property | ||
| 677 | def prereleases(self): | ||
| 678 | # If we have been given an explicit prerelease modifier, then we'll | ||
| 679 | # pass that through here. | ||
| 680 | if self._prereleases is not None: | ||
| 681 | return self._prereleases | ||
| 682 | |||
| 683 | # If we don't have any specifiers, and we don't have a forced value, | ||
| 684 | # then we'll just return None since we don't know if this should have | ||
| 685 | # pre-releases or not. | ||
| 686 | if not self._specs: | ||
| 687 | return None | ||
| 688 | |||
| 689 | # Otherwise we'll see if any of the given specifiers accept | ||
| 690 | # prereleases, if any of them do we'll return True, otherwise False. | ||
| 691 | return any(s.prereleases for s in self._specs) | ||
| 692 | |||
| 693 | @prereleases.setter | ||
| 694 | def prereleases(self, value): | ||
| 695 | self._prereleases = value | ||
| 696 | |||
| 697 | def __contains__(self, item): | ||
| 698 | return self.contains(item) | ||
| 699 | |||
| 700 | def contains(self, item, prereleases=None): | ||
| 701 | # Ensure that our item is a Version or LegacyVersion instance. | ||
| 702 | if not isinstance(item, (LegacyVersion, Version)): | ||
| 703 | item = parse(item) | ||
| 704 | |||
| 705 | # Determine if we're forcing a prerelease or not, if we're not forcing | ||
| 706 | # one for this particular filter call, then we'll use whatever the | ||
| 707 | # SpecifierSet thinks for whether or not we should support prereleases. | ||
| 708 | if prereleases is None: | ||
| 709 | prereleases = self.prereleases | ||
| 710 | |||
| 711 | # We can determine if we're going to allow pre-releases by looking to | ||
| 712 | # see if any of the underlying items supports them. If none of them do | ||
| 713 | # and this item is a pre-release then we do not allow it and we can | ||
| 714 | # short circuit that here. | ||
| 715 | # Note: This means that 1.0.dev1 would not be contained in something | ||
| 716 | # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 | ||
| 717 | if not prereleases and item.is_prerelease: | ||
| 718 | return False | ||
| 719 | |||
| 720 | # We simply dispatch to the underlying specs here to make sure that the | ||
| 721 | # given version is contained within all of them. | ||
| 722 | # Note: This use of all() here means that an empty set of specifiers | ||
| 723 | # will always return True, this is an explicit design decision. | ||
| 724 | return all( | ||
| 725 | s.contains(item, prereleases=prereleases) | ||
| 726 | for s in self._specs | ||
| 727 | ) | ||
| 728 | |||
| 729 | def filter(self, iterable, prereleases=None): | ||
| 730 | # Determine if we're forcing a prerelease or not, if we're not forcing | ||
| 731 | # one for this particular filter call, then we'll use whatever the | ||
| 732 | # SpecifierSet thinks for whether or not we should support prereleases. | ||
| 733 | if prereleases is None: | ||
| 734 | prereleases = self.prereleases | ||
| 735 | |||
| 736 | # If we have any specifiers, then we want to wrap our iterable in the | ||
| 737 | # filter method for each one, this will act as a logical AND amongst | ||
| 738 | # each specifier. | ||
| 739 | if self._specs: | ||
| 740 | for spec in self._specs: | ||
| 741 | iterable = spec.filter(iterable, prereleases=bool(prereleases)) | ||
| 742 | return iterable | ||
| 743 | # If we do not have any specifiers, then we need to have a rough filter | ||
| 744 | # which will filter out any pre-releases, unless there are no final | ||
| 745 | # releases, and which will filter out LegacyVersion in general. | ||
| 746 | else: | ||
| 747 | filtered = [] | ||
| 748 | found_prereleases = [] | ||
| 749 | |||
| 750 | for item in iterable: | ||
| 751 | # Ensure that we some kind of Version class for this item. | ||
| 752 | if not isinstance(item, (LegacyVersion, Version)): | ||
| 753 | parsed_version = parse(item) | ||
| 754 | else: | ||
| 755 | parsed_version = item | ||
| 756 | |||
| 757 | # Filter out any item which is parsed as a LegacyVersion | ||
| 758 | if isinstance(parsed_version, LegacyVersion): | ||
| 759 | continue | ||
| 760 | |||
| 761 | # Store any item which is a pre-release for later unless we've | ||
| 762 | # already found a final version or we are accepting prereleases | ||
| 763 | if parsed_version.is_prerelease and not prereleases: | ||
| 764 | if not filtered: | ||
| 765 | found_prereleases.append(item) | ||
| 766 | else: | ||
| 767 | filtered.append(item) | ||
| 768 | |||
| 769 | # If we've found no items except for pre-releases, then we'll go | ||
| 770 | # ahead and use the pre-releases | ||
| 771 | if not filtered and found_prereleases and prereleases is None: | ||
| 772 | return found_prereleases | ||
| 773 | |||
| 774 | return filtered | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/utils.py new file mode 100644 index 0000000..5151f9f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/utils.py | |||
| @@ -0,0 +1,63 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import re | ||
| 7 | |||
| 8 | from .version import InvalidVersion, Version | ||
| 9 | |||
| 10 | |||
| 11 | _canonicalize_regex = re.compile(r"[-_.]+") | ||
| 12 | |||
| 13 | |||
| 14 | def canonicalize_name(name): | ||
| 15 | # This is taken from PEP 503. | ||
| 16 | return _canonicalize_regex.sub("-", name).lower() | ||
| 17 | |||
| 18 | |||
| 19 | def canonicalize_version(version): | ||
| 20 | """ | ||
| 21 | This is very similar to Version.__str__, but has one subtle differences | ||
| 22 | with the way it handles the release segment. | ||
| 23 | """ | ||
| 24 | |||
| 25 | try: | ||
| 26 | version = Version(version) | ||
| 27 | except InvalidVersion: | ||
| 28 | # Legacy versions cannot be normalized | ||
| 29 | return version | ||
| 30 | |||
| 31 | parts = [] | ||
| 32 | |||
| 33 | # Epoch | ||
| 34 | if version.epoch != 0: | ||
| 35 | parts.append("{0}!".format(version.epoch)) | ||
| 36 | |||
| 37 | # Release segment | ||
| 38 | # NB: This strips trailing '.0's to normalize | ||
| 39 | parts.append( | ||
| 40 | re.sub( | ||
| 41 | r'(\.0)+$', | ||
| 42 | '', | ||
| 43 | ".".join(str(x) for x in version.release) | ||
| 44 | ) | ||
| 45 | ) | ||
| 46 | |||
| 47 | # Pre-release | ||
| 48 | if version.pre is not None: | ||
| 49 | parts.append("".join(str(x) for x in version.pre)) | ||
| 50 | |||
| 51 | # Post-release | ||
| 52 | if version.post is not None: | ||
| 53 | parts.append(".post{0}".format(version.post)) | ||
| 54 | |||
| 55 | # Development release | ||
| 56 | if version.dev is not None: | ||
| 57 | parts.append(".dev{0}".format(version.dev)) | ||
| 58 | |||
| 59 | # Local version segment | ||
| 60 | if version.local is not None: | ||
| 61 | parts.append("+{0}".format(version.local)) | ||
| 62 | |||
| 63 | return "".join(parts) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/version.py new file mode 100644 index 0000000..a8affbd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/packaging/version.py | |||
| @@ -0,0 +1,441 @@ | |||
| 1 | # This file is dual licensed under the terms of the Apache License, Version | ||
| 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository | ||
| 3 | # for complete details. | ||
| 4 | from __future__ import absolute_import, division, print_function | ||
| 5 | |||
| 6 | import collections | ||
| 7 | import itertools | ||
| 8 | import re | ||
| 9 | |||
| 10 | from ._structures import Infinity | ||
| 11 | |||
| 12 | |||
| 13 | __all__ = [ | ||
| 14 | "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" | ||
| 15 | ] | ||
| 16 | |||
| 17 | |||
| 18 | _Version = collections.namedtuple( | ||
| 19 | "_Version", | ||
| 20 | ["epoch", "release", "dev", "pre", "post", "local"], | ||
| 21 | ) | ||
| 22 | |||
| 23 | |||
| 24 | def parse(version): | ||
| 25 | """ | ||
| 26 | Parse the given version string and return either a :class:`Version` object | ||
| 27 | or a :class:`LegacyVersion` object depending on if the given version is | ||
| 28 | a valid PEP 440 version or a legacy version. | ||
| 29 | """ | ||
| 30 | try: | ||
| 31 | return Version(version) | ||
| 32 | except InvalidVersion: | ||
| 33 | return LegacyVersion(version) | ||
| 34 | |||
| 35 | |||
| 36 | class InvalidVersion(ValueError): | ||
| 37 | """ | ||
| 38 | An invalid version was found, users should refer to PEP 440. | ||
| 39 | """ | ||
| 40 | |||
| 41 | |||
| 42 | class _BaseVersion(object): | ||
| 43 | |||
| 44 | def __hash__(self): | ||
| 45 | return hash(self._key) | ||
| 46 | |||
| 47 | def __lt__(self, other): | ||
| 48 | return self._compare(other, lambda s, o: s < o) | ||
| 49 | |||
| 50 | def __le__(self, other): | ||
| 51 | return self._compare(other, lambda s, o: s <= o) | ||
| 52 | |||
| 53 | def __eq__(self, other): | ||
| 54 | return self._compare(other, lambda s, o: s == o) | ||
| 55 | |||
| 56 | def __ge__(self, other): | ||
| 57 | return self._compare(other, lambda s, o: s >= o) | ||
| 58 | |||
| 59 | def __gt__(self, other): | ||
| 60 | return self._compare(other, lambda s, o: s > o) | ||
| 61 | |||
| 62 | def __ne__(self, other): | ||
| 63 | return self._compare(other, lambda s, o: s != o) | ||
| 64 | |||
| 65 | def _compare(self, other, method): | ||
| 66 | if not isinstance(other, _BaseVersion): | ||
| 67 | return NotImplemented | ||
| 68 | |||
| 69 | return method(self._key, other._key) | ||
| 70 | |||
| 71 | |||
| 72 | class LegacyVersion(_BaseVersion): | ||
| 73 | |||
| 74 | def __init__(self, version): | ||
| 75 | self._version = str(version) | ||
| 76 | self._key = _legacy_cmpkey(self._version) | ||
| 77 | |||
| 78 | def __str__(self): | ||
| 79 | return self._version | ||
| 80 | |||
| 81 | def __repr__(self): | ||
| 82 | return "<LegacyVersion({0})>".format(repr(str(self))) | ||
| 83 | |||
| 84 | @property | ||
| 85 | def public(self): | ||
| 86 | return self._version | ||
| 87 | |||
| 88 | @property | ||
| 89 | def base_version(self): | ||
| 90 | return self._version | ||
| 91 | |||
| 92 | @property | ||
| 93 | def epoch(self): | ||
| 94 | return -1 | ||
| 95 | |||
| 96 | @property | ||
| 97 | def release(self): | ||
| 98 | return None | ||
| 99 | |||
| 100 | @property | ||
| 101 | def pre(self): | ||
| 102 | return None | ||
| 103 | |||
| 104 | @property | ||
| 105 | def post(self): | ||
| 106 | return None | ||
| 107 | |||
| 108 | @property | ||
| 109 | def dev(self): | ||
| 110 | return None | ||
| 111 | |||
| 112 | @property | ||
| 113 | def local(self): | ||
| 114 | return None | ||
| 115 | |||
| 116 | @property | ||
| 117 | def is_prerelease(self): | ||
| 118 | return False | ||
| 119 | |||
| 120 | @property | ||
| 121 | def is_postrelease(self): | ||
| 122 | return False | ||
| 123 | |||
| 124 | @property | ||
| 125 | def is_devrelease(self): | ||
| 126 | return False | ||
| 127 | |||
| 128 | |||
| 129 | _legacy_version_component_re = re.compile( | ||
| 130 | r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, | ||
| 131 | ) | ||
| 132 | |||
| 133 | _legacy_version_replacement_map = { | ||
| 134 | "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", | ||
| 135 | } | ||
| 136 | |||
| 137 | |||
| 138 | def _parse_version_parts(s): | ||
| 139 | for part in _legacy_version_component_re.split(s): | ||
| 140 | part = _legacy_version_replacement_map.get(part, part) | ||
| 141 | |||
| 142 | if not part or part == ".": | ||
| 143 | continue | ||
| 144 | |||
| 145 | if part[:1] in "0123456789": | ||
| 146 | # pad for numeric comparison | ||
| 147 | yield part.zfill(8) | ||
| 148 | else: | ||
| 149 | yield "*" + part | ||
| 150 | |||
| 151 | # ensure that alpha/beta/candidate are before final | ||
| 152 | yield "*final" | ||
| 153 | |||
| 154 | |||
| 155 | def _legacy_cmpkey(version): | ||
| 156 | # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch | ||
| 157 | # greater than or equal to 0. This will effectively put the LegacyVersion, | ||
| 158 | # which uses the defacto standard originally implemented by setuptools, | ||
| 159 | # as before all PEP 440 versions. | ||
| 160 | epoch = -1 | ||
| 161 | |||
| 162 | # This scheme is taken from pkg_resources.parse_version setuptools prior to | ||
| 163 | # it's adoption of the packaging library. | ||
| 164 | parts = [] | ||
| 165 | for part in _parse_version_parts(version.lower()): | ||
| 166 | if part.startswith("*"): | ||
| 167 | # remove "-" before a prerelease tag | ||
| 168 | if part < "*final": | ||
| 169 | while parts and parts[-1] == "*final-": | ||
| 170 | parts.pop() | ||
| 171 | |||
| 172 | # remove trailing zeros from each series of numeric parts | ||
| 173 | while parts and parts[-1] == "00000000": | ||
| 174 | parts.pop() | ||
| 175 | |||
| 176 | parts.append(part) | ||
| 177 | parts = tuple(parts) | ||
| 178 | |||
| 179 | return epoch, parts | ||
| 180 | |||
| 181 | |||
| 182 | # Deliberately not anchored to the start and end of the string, to make it | ||
| 183 | # easier for 3rd party code to reuse | ||
| 184 | VERSION_PATTERN = r""" | ||
| 185 | v? | ||
| 186 | (?: | ||
| 187 | (?:(?P<epoch>[0-9]+)!)? # epoch | ||
| 188 | (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment | ||
| 189 | (?P<pre> # pre-release | ||
| 190 | [-_\.]? | ||
| 191 | (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) | ||
| 192 | [-_\.]? | ||
| 193 | (?P<pre_n>[0-9]+)? | ||
| 194 | )? | ||
| 195 | (?P<post> # post release | ||
| 196 | (?:-(?P<post_n1>[0-9]+)) | ||
| 197 | | | ||
| 198 | (?: | ||
| 199 | [-_\.]? | ||
| 200 | (?P<post_l>post|rev|r) | ||
| 201 | [-_\.]? | ||
| 202 | (?P<post_n2>[0-9]+)? | ||
| 203 | ) | ||
| 204 | )? | ||
| 205 | (?P<dev> # dev release | ||
| 206 | [-_\.]? | ||
| 207 | (?P<dev_l>dev) | ||
| 208 | [-_\.]? | ||
| 209 | (?P<dev_n>[0-9]+)? | ||
| 210 | )? | ||
| 211 | ) | ||
| 212 | (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version | ||
| 213 | """ | ||
| 214 | |||
| 215 | |||
| 216 | class Version(_BaseVersion): | ||
| 217 | |||
| 218 | _regex = re.compile( | ||
| 219 | r"^\s*" + VERSION_PATTERN + r"\s*$", | ||
| 220 | re.VERBOSE | re.IGNORECASE, | ||
| 221 | ) | ||
| 222 | |||
| 223 | def __init__(self, version): | ||
| 224 | # Validate the version and parse it into pieces | ||
| 225 | match = self._regex.search(version) | ||
| 226 | if not match: | ||
| 227 | raise InvalidVersion("Invalid version: '{0}'".format(version)) | ||
| 228 | |||
| 229 | # Store the parsed out pieces of the version | ||
| 230 | self._version = _Version( | ||
| 231 | epoch=int(match.group("epoch")) if match.group("epoch") else 0, | ||
| 232 | release=tuple(int(i) for i in match.group("release").split(".")), | ||
| 233 | pre=_parse_letter_version( | ||
| 234 | match.group("pre_l"), | ||
| 235 | match.group("pre_n"), | ||
| 236 | ), | ||
| 237 | post=_parse_letter_version( | ||
| 238 | match.group("post_l"), | ||
| 239 | match.group("post_n1") or match.group("post_n2"), | ||
| 240 | ), | ||
| 241 | dev=_parse_letter_version( | ||
| 242 | match.group("dev_l"), | ||
| 243 | match.group("dev_n"), | ||
| 244 | ), | ||
| 245 | local=_parse_local_version(match.group("local")), | ||
| 246 | ) | ||
| 247 | |||
| 248 | # Generate a key which will be used for sorting | ||
| 249 | self._key = _cmpkey( | ||
| 250 | self._version.epoch, | ||
| 251 | self._version.release, | ||
| 252 | self._version.pre, | ||
| 253 | self._version.post, | ||
| 254 | self._version.dev, | ||
| 255 | self._version.local, | ||
| 256 | ) | ||
| 257 | |||
| 258 | def __repr__(self): | ||
| 259 | return "<Version({0})>".format(repr(str(self))) | ||
| 260 | |||
| 261 | def __str__(self): | ||
| 262 | parts = [] | ||
| 263 | |||
| 264 | # Epoch | ||
| 265 | if self.epoch != 0: | ||
| 266 | parts.append("{0}!".format(self.epoch)) | ||
| 267 | |||
| 268 | # Release segment | ||
| 269 | parts.append(".".join(str(x) for x in self.release)) | ||
| 270 | |||
| 271 | # Pre-release | ||
| 272 | if self.pre is not None: | ||
| 273 | parts.append("".join(str(x) for x in self.pre)) | ||
| 274 | |||
| 275 | # Post-release | ||
| 276 | if self.post is not None: | ||
| 277 | parts.append(".post{0}".format(self.post)) | ||
| 278 | |||
| 279 | # Development release | ||
| 280 | if self.dev is not None: | ||
| 281 | parts.append(".dev{0}".format(self.dev)) | ||
| 282 | |||
| 283 | # Local version segment | ||
| 284 | if self.local is not None: | ||
| 285 | parts.append("+{0}".format(self.local)) | ||
| 286 | |||
| 287 | return "".join(parts) | ||
| 288 | |||
| 289 | @property | ||
| 290 | def epoch(self): | ||
| 291 | return self._version.epoch | ||
| 292 | |||
| 293 | @property | ||
| 294 | def release(self): | ||
| 295 | return self._version.release | ||
| 296 | |||
| 297 | @property | ||
| 298 | def pre(self): | ||
| 299 | return self._version.pre | ||
| 300 | |||
| 301 | @property | ||
| 302 | def post(self): | ||
| 303 | return self._version.post[1] if self._version.post else None | ||
| 304 | |||
| 305 | @property | ||
| 306 | def dev(self): | ||
| 307 | return self._version.dev[1] if self._version.dev else None | ||
| 308 | |||
| 309 | @property | ||
| 310 | def local(self): | ||
| 311 | if self._version.local: | ||
| 312 | return ".".join(str(x) for x in self._version.local) | ||
| 313 | else: | ||
| 314 | return None | ||
| 315 | |||
| 316 | @property | ||
| 317 | def public(self): | ||
| 318 | return str(self).split("+", 1)[0] | ||
| 319 | |||
| 320 | @property | ||
| 321 | def base_version(self): | ||
| 322 | parts = [] | ||
| 323 | |||
| 324 | # Epoch | ||
| 325 | if self.epoch != 0: | ||
| 326 | parts.append("{0}!".format(self.epoch)) | ||
| 327 | |||
| 328 | # Release segment | ||
| 329 | parts.append(".".join(str(x) for x in self.release)) | ||
| 330 | |||
| 331 | return "".join(parts) | ||
| 332 | |||
| 333 | @property | ||
| 334 | def is_prerelease(self): | ||
| 335 | return self.dev is not None or self.pre is not None | ||
| 336 | |||
| 337 | @property | ||
| 338 | def is_postrelease(self): | ||
| 339 | return self.post is not None | ||
| 340 | |||
| 341 | @property | ||
| 342 | def is_devrelease(self): | ||
| 343 | return self.dev is not None | ||
| 344 | |||
| 345 | |||
| 346 | def _parse_letter_version(letter, number): | ||
| 347 | if letter: | ||
| 348 | # We consider there to be an implicit 0 in a pre-release if there is | ||
| 349 | # not a numeral associated with it. | ||
| 350 | if number is None: | ||
| 351 | number = 0 | ||
| 352 | |||
| 353 | # We normalize any letters to their lower case form | ||
| 354 | letter = letter.lower() | ||
| 355 | |||
| 356 | # We consider some words to be alternate spellings of other words and | ||
| 357 | # in those cases we want to normalize the spellings to our preferred | ||
| 358 | # spelling. | ||
| 359 | if letter == "alpha": | ||
| 360 | letter = "a" | ||
| 361 | elif letter == "beta": | ||
| 362 | letter = "b" | ||
| 363 | elif letter in ["c", "pre", "preview"]: | ||
| 364 | letter = "rc" | ||
| 365 | elif letter in ["rev", "r"]: | ||
| 366 | letter = "post" | ||
| 367 | |||
| 368 | return letter, int(number) | ||
| 369 | if not letter and number: | ||
| 370 | # We assume if we are given a number, but we are not given a letter | ||
| 371 | # then this is using the implicit post release syntax (e.g. 1.0-1) | ||
| 372 | letter = "post" | ||
| 373 | |||
| 374 | return letter, int(number) | ||
| 375 | |||
| 376 | |||
| 377 | _local_version_separators = re.compile(r"[\._-]") | ||
| 378 | |||
| 379 | |||
| 380 | def _parse_local_version(local): | ||
| 381 | """ | ||
| 382 | Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). | ||
| 383 | """ | ||
| 384 | if local is not None: | ||
| 385 | return tuple( | ||
| 386 | part.lower() if not part.isdigit() else int(part) | ||
| 387 | for part in _local_version_separators.split(local) | ||
| 388 | ) | ||
| 389 | |||
| 390 | |||
| 391 | def _cmpkey(epoch, release, pre, post, dev, local): | ||
| 392 | # When we compare a release version, we want to compare it with all of the | ||
| 393 | # trailing zeros removed. So we'll use a reverse the list, drop all the now | ||
| 394 | # leading zeros until we come to something non zero, then take the rest | ||
| 395 | # re-reverse it back into the correct order and make it a tuple and use | ||
| 396 | # that for our sorting key. | ||
| 397 | release = tuple( | ||
| 398 | reversed(list( | ||
| 399 | itertools.dropwhile( | ||
| 400 | lambda x: x == 0, | ||
| 401 | reversed(release), | ||
| 402 | ) | ||
| 403 | )) | ||
| 404 | ) | ||
| 405 | |||
| 406 | # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. | ||
| 407 | # We'll do this by abusing the pre segment, but we _only_ want to do this | ||
| 408 | # if there is not a pre or a post segment. If we have one of those then | ||
| 409 | # the normal sorting rules will handle this case correctly. | ||
| 410 | if pre is None and post is None and dev is not None: | ||
| 411 | pre = -Infinity | ||
| 412 | # Versions without a pre-release (except as noted above) should sort after | ||
| 413 | # those with one. | ||
| 414 | elif pre is None: | ||
| 415 | pre = Infinity | ||
| 416 | |||
| 417 | # Versions without a post segment should sort before those with one. | ||
| 418 | if post is None: | ||
| 419 | post = -Infinity | ||
| 420 | |||
| 421 | # Versions without a development segment should sort after those with one. | ||
| 422 | if dev is None: | ||
| 423 | dev = Infinity | ||
| 424 | |||
| 425 | if local is None: | ||
| 426 | # Versions without a local segment should sort before those with one. | ||
| 427 | local = -Infinity | ||
| 428 | else: | ||
| 429 | # Versions with a local segment need that segment parsed to implement | ||
| 430 | # the sorting rules in PEP440. | ||
| 431 | # - Alpha numeric segments sort before numeric segments | ||
| 432 | # - Alpha numeric segments sort lexicographically | ||
| 433 | # - Numeric segments sort numerically | ||
| 434 | # - Shorter versions sort before longer versions when the prefixes | ||
| 435 | # match exactly | ||
| 436 | local = tuple( | ||
| 437 | (i, "") if isinstance(i, int) else (-Infinity, i) | ||
| 438 | for i in local | ||
| 439 | ) | ||
| 440 | |||
| 441 | return epoch, release, pre, post, dev, local | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py new file mode 100644 index 0000000..6e1fb52 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py | |||
| @@ -0,0 +1,3125 @@ | |||
| 1 | # coding: utf-8 | ||
| 2 | """ | ||
| 3 | Package resource API | ||
| 4 | -------------------- | ||
| 5 | |||
| 6 | A resource is a logical file contained within a package, or a logical | ||
| 7 | subdirectory thereof. The package resource API expects resource names | ||
| 8 | to have their path parts separated with ``/``, *not* whatever the local | ||
| 9 | path separator is. Do not use os.path operations to manipulate resource | ||
| 10 | names being passed into the API. | ||
| 11 | |||
| 12 | The package resource API is designed to work with normal filesystem packages, | ||
| 13 | .egg files, and unpacked .egg files. It can also work in a limited way with | ||
| 14 | .zip files and with custom PEP 302 loaders that support the ``get_data()`` | ||
| 15 | method. | ||
| 16 | """ | ||
| 17 | |||
| 18 | from __future__ import absolute_import | ||
| 19 | |||
| 20 | import sys | ||
| 21 | import os | ||
| 22 | import io | ||
| 23 | import time | ||
| 24 | import re | ||
| 25 | import types | ||
| 26 | import zipfile | ||
| 27 | import zipimport | ||
| 28 | import warnings | ||
| 29 | import stat | ||
| 30 | import functools | ||
| 31 | import pkgutil | ||
| 32 | import operator | ||
| 33 | import platform | ||
| 34 | import collections | ||
| 35 | import plistlib | ||
| 36 | import email.parser | ||
| 37 | import errno | ||
| 38 | import tempfile | ||
| 39 | import textwrap | ||
| 40 | import itertools | ||
| 41 | import inspect | ||
| 42 | from pkgutil import get_importer | ||
| 43 | |||
| 44 | try: | ||
| 45 | import _imp | ||
| 46 | except ImportError: | ||
| 47 | # Python 3.2 compatibility | ||
| 48 | import imp as _imp | ||
| 49 | |||
| 50 | from pip._vendor import six | ||
| 51 | from pip._vendor.six.moves import urllib, map, filter | ||
| 52 | |||
| 53 | # capture these to bypass sandboxing | ||
| 54 | from os import utime | ||
| 55 | try: | ||
| 56 | from os import mkdir, rename, unlink | ||
| 57 | WRITE_SUPPORT = True | ||
| 58 | except ImportError: | ||
| 59 | # no write support, probably under GAE | ||
| 60 | WRITE_SUPPORT = False | ||
| 61 | |||
| 62 | from os import open as os_open | ||
| 63 | from os.path import isdir, split | ||
| 64 | |||
| 65 | try: | ||
| 66 | import importlib.machinery as importlib_machinery | ||
| 67 | # access attribute to force import under delayed import mechanisms. | ||
| 68 | importlib_machinery.__name__ | ||
| 69 | except ImportError: | ||
| 70 | importlib_machinery = None | ||
| 71 | |||
| 72 | from . import py31compat | ||
| 73 | from pip._vendor import appdirs | ||
| 74 | from pip._vendor import packaging | ||
| 75 | __import__('pip._vendor.packaging.version') | ||
| 76 | __import__('pip._vendor.packaging.specifiers') | ||
| 77 | __import__('pip._vendor.packaging.requirements') | ||
| 78 | __import__('pip._vendor.packaging.markers') | ||
| 79 | |||
| 80 | |||
| 81 | if (3, 0) < sys.version_info < (3, 3): | ||
| 82 | raise RuntimeError("Python 3.3 or later is required") | ||
| 83 | |||
| 84 | if six.PY2: | ||
| 85 | # Those builtin exceptions are only defined in Python 3 | ||
| 86 | PermissionError = None | ||
| 87 | NotADirectoryError = None | ||
| 88 | |||
| 89 | # declare some globals that will be defined later to | ||
| 90 | # satisfy the linters. | ||
| 91 | require = None | ||
| 92 | working_set = None | ||
| 93 | add_activation_listener = None | ||
| 94 | resources_stream = None | ||
| 95 | cleanup_resources = None | ||
| 96 | resource_dir = None | ||
| 97 | resource_stream = None | ||
| 98 | set_extraction_path = None | ||
| 99 | resource_isdir = None | ||
| 100 | resource_string = None | ||
| 101 | iter_entry_points = None | ||
| 102 | resource_listdir = None | ||
| 103 | resource_filename = None | ||
| 104 | resource_exists = None | ||
| 105 | _distribution_finders = None | ||
| 106 | _namespace_handlers = None | ||
| 107 | _namespace_packages = None | ||
| 108 | |||
| 109 | |||
| 110 | class PEP440Warning(RuntimeWarning): | ||
| 111 | """ | ||
| 112 | Used when there is an issue with a version or specifier not complying with | ||
| 113 | PEP 440. | ||
| 114 | """ | ||
| 115 | |||
| 116 | |||
| 117 | def parse_version(v): | ||
| 118 | try: | ||
| 119 | return packaging.version.Version(v) | ||
| 120 | except packaging.version.InvalidVersion: | ||
| 121 | return packaging.version.LegacyVersion(v) | ||
| 122 | |||
| 123 | |||
| 124 | _state_vars = {} | ||
| 125 | |||
| 126 | |||
| 127 | def _declare_state(vartype, **kw): | ||
| 128 | globals().update(kw) | ||
| 129 | _state_vars.update(dict.fromkeys(kw, vartype)) | ||
| 130 | |||
| 131 | |||
| 132 | def __getstate__(): | ||
| 133 | state = {} | ||
| 134 | g = globals() | ||
| 135 | for k, v in _state_vars.items(): | ||
| 136 | state[k] = g['_sget_' + v](g[k]) | ||
| 137 | return state | ||
| 138 | |||
| 139 | |||
| 140 | def __setstate__(state): | ||
| 141 | g = globals() | ||
| 142 | for k, v in state.items(): | ||
| 143 | g['_sset_' + _state_vars[k]](k, g[k], v) | ||
| 144 | return state | ||
| 145 | |||
| 146 | |||
| 147 | def _sget_dict(val): | ||
| 148 | return val.copy() | ||
| 149 | |||
| 150 | |||
| 151 | def _sset_dict(key, ob, state): | ||
| 152 | ob.clear() | ||
| 153 | ob.update(state) | ||
| 154 | |||
| 155 | |||
| 156 | def _sget_object(val): | ||
| 157 | return val.__getstate__() | ||
| 158 | |||
| 159 | |||
| 160 | def _sset_object(key, ob, state): | ||
| 161 | ob.__setstate__(state) | ||
| 162 | |||
| 163 | |||
| 164 | _sget_none = _sset_none = lambda *args: None | ||
| 165 | |||
| 166 | |||
| 167 | def get_supported_platform(): | ||
| 168 | """Return this platform's maximum compatible version. | ||
| 169 | |||
| 170 | distutils.util.get_platform() normally reports the minimum version | ||
| 171 | of Mac OS X that would be required to *use* extensions produced by | ||
| 172 | distutils. But what we want when checking compatibility is to know the | ||
| 173 | version of Mac OS X that we are *running*. To allow usage of packages that | ||
| 174 | explicitly require a newer version of Mac OS X, we must also know the | ||
| 175 | current version of the OS. | ||
| 176 | |||
| 177 | If this condition occurs for any other platform with a version in its | ||
| 178 | platform strings, this function should be extended accordingly. | ||
| 179 | """ | ||
| 180 | plat = get_build_platform() | ||
| 181 | m = macosVersionString.match(plat) | ||
| 182 | if m is not None and sys.platform == "darwin": | ||
| 183 | try: | ||
| 184 | plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) | ||
| 185 | except ValueError: | ||
| 186 | # not Mac OS X | ||
| 187 | pass | ||
| 188 | return plat | ||
| 189 | |||
| 190 | |||
| 191 | __all__ = [ | ||
| 192 | # Basic resource access and distribution/entry point discovery | ||
| 193 | 'require', 'run_script', 'get_provider', 'get_distribution', | ||
| 194 | 'load_entry_point', 'get_entry_map', 'get_entry_info', | ||
| 195 | 'iter_entry_points', | ||
| 196 | 'resource_string', 'resource_stream', 'resource_filename', | ||
| 197 | 'resource_listdir', 'resource_exists', 'resource_isdir', | ||
| 198 | |||
| 199 | # Environmental control | ||
| 200 | 'declare_namespace', 'working_set', 'add_activation_listener', | ||
| 201 | 'find_distributions', 'set_extraction_path', 'cleanup_resources', | ||
| 202 | 'get_default_cache', | ||
| 203 | |||
| 204 | # Primary implementation classes | ||
| 205 | 'Environment', 'WorkingSet', 'ResourceManager', | ||
| 206 | 'Distribution', 'Requirement', 'EntryPoint', | ||
| 207 | |||
| 208 | # Exceptions | ||
| 209 | 'ResolutionError', 'VersionConflict', 'DistributionNotFound', | ||
| 210 | 'UnknownExtra', 'ExtractionError', | ||
| 211 | |||
| 212 | # Warnings | ||
| 213 | 'PEP440Warning', | ||
| 214 | |||
| 215 | # Parsing functions and string utilities | ||
| 216 | 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', | ||
| 217 | 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', | ||
| 218 | 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', | ||
| 219 | |||
| 220 | # filesystem utilities | ||
| 221 | 'ensure_directory', 'normalize_path', | ||
| 222 | |||
| 223 | # Distribution "precedence" constants | ||
| 224 | 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', | ||
| 225 | |||
| 226 | # "Provider" interfaces, implementations, and registration/lookup APIs | ||
| 227 | 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', | ||
| 228 | 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', | ||
| 229 | 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', | ||
| 230 | 'register_finder', 'register_namespace_handler', 'register_loader_type', | ||
| 231 | 'fixup_namespace_packages', 'get_importer', | ||
| 232 | |||
| 233 | # Deprecated/backward compatibility only | ||
| 234 | 'run_main', 'AvailableDistributions', | ||
| 235 | ] | ||
| 236 | |||
| 237 | |||
| 238 | class ResolutionError(Exception): | ||
| 239 | """Abstract base for dependency resolution errors""" | ||
| 240 | |||
| 241 | def __repr__(self): | ||
| 242 | return self.__class__.__name__ + repr(self.args) | ||
| 243 | |||
| 244 | |||
| 245 | class VersionConflict(ResolutionError): | ||
| 246 | """ | ||
| 247 | An already-installed version conflicts with the requested version. | ||
| 248 | |||
| 249 | Should be initialized with the installed Distribution and the requested | ||
| 250 | Requirement. | ||
| 251 | """ | ||
| 252 | |||
| 253 | _template = "{self.dist} is installed but {self.req} is required" | ||
| 254 | |||
| 255 | @property | ||
| 256 | def dist(self): | ||
| 257 | return self.args[0] | ||
| 258 | |||
| 259 | @property | ||
| 260 | def req(self): | ||
| 261 | return self.args[1] | ||
| 262 | |||
| 263 | def report(self): | ||
| 264 | return self._template.format(**locals()) | ||
| 265 | |||
| 266 | def with_context(self, required_by): | ||
| 267 | """ | ||
| 268 | If required_by is non-empty, return a version of self that is a | ||
| 269 | ContextualVersionConflict. | ||
| 270 | """ | ||
| 271 | if not required_by: | ||
| 272 | return self | ||
| 273 | args = self.args + (required_by,) | ||
| 274 | return ContextualVersionConflict(*args) | ||
| 275 | |||
| 276 | |||
| 277 | class ContextualVersionConflict(VersionConflict): | ||
| 278 | """ | ||
| 279 | A VersionConflict that accepts a third parameter, the set of the | ||
| 280 | requirements that required the installed Distribution. | ||
| 281 | """ | ||
| 282 | |||
| 283 | _template = VersionConflict._template + ' by {self.required_by}' | ||
| 284 | |||
| 285 | @property | ||
| 286 | def required_by(self): | ||
| 287 | return self.args[2] | ||
| 288 | |||
| 289 | |||
| 290 | class DistributionNotFound(ResolutionError): | ||
| 291 | """A requested distribution was not found""" | ||
| 292 | |||
| 293 | _template = ("The '{self.req}' distribution was not found " | ||
| 294 | "and is required by {self.requirers_str}") | ||
| 295 | |||
| 296 | @property | ||
| 297 | def req(self): | ||
| 298 | return self.args[0] | ||
| 299 | |||
| 300 | @property | ||
| 301 | def requirers(self): | ||
| 302 | return self.args[1] | ||
| 303 | |||
| 304 | @property | ||
| 305 | def requirers_str(self): | ||
| 306 | if not self.requirers: | ||
| 307 | return 'the application' | ||
| 308 | return ', '.join(self.requirers) | ||
| 309 | |||
| 310 | def report(self): | ||
| 311 | return self._template.format(**locals()) | ||
| 312 | |||
| 313 | def __str__(self): | ||
| 314 | return self.report() | ||
| 315 | |||
| 316 | |||
| 317 | class UnknownExtra(ResolutionError): | ||
| 318 | """Distribution doesn't have an "extra feature" of the given name""" | ||
| 319 | |||
| 320 | |||
| 321 | _provider_factories = {} | ||
| 322 | |||
| 323 | PY_MAJOR = sys.version[:3] | ||
| 324 | EGG_DIST = 3 | ||
| 325 | BINARY_DIST = 2 | ||
| 326 | SOURCE_DIST = 1 | ||
| 327 | CHECKOUT_DIST = 0 | ||
| 328 | DEVELOP_DIST = -1 | ||
| 329 | |||
| 330 | |||
| 331 | def register_loader_type(loader_type, provider_factory): | ||
| 332 | """Register `provider_factory` to make providers for `loader_type` | ||
| 333 | |||
| 334 | `loader_type` is the type or class of a PEP 302 ``module.__loader__``, | ||
| 335 | and `provider_factory` is a function that, passed a *module* object, | ||
| 336 | returns an ``IResourceProvider`` for that module. | ||
| 337 | """ | ||
| 338 | _provider_factories[loader_type] = provider_factory | ||
| 339 | |||
| 340 | |||
| 341 | def get_provider(moduleOrReq): | ||
| 342 | """Return an IResourceProvider for the named module or requirement""" | ||
| 343 | if isinstance(moduleOrReq, Requirement): | ||
| 344 | return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] | ||
| 345 | try: | ||
| 346 | module = sys.modules[moduleOrReq] | ||
| 347 | except KeyError: | ||
| 348 | __import__(moduleOrReq) | ||
| 349 | module = sys.modules[moduleOrReq] | ||
| 350 | loader = getattr(module, '__loader__', None) | ||
| 351 | return _find_adapter(_provider_factories, loader)(module) | ||
| 352 | |||
| 353 | |||
| 354 | def _macosx_vers(_cache=[]): | ||
| 355 | if not _cache: | ||
| 356 | version = platform.mac_ver()[0] | ||
| 357 | # fallback for MacPorts | ||
| 358 | if version == '': | ||
| 359 | plist = '/System/Library/CoreServices/SystemVersion.plist' | ||
| 360 | if os.path.exists(plist): | ||
| 361 | if hasattr(plistlib, 'readPlist'): | ||
| 362 | plist_content = plistlib.readPlist(plist) | ||
| 363 | if 'ProductVersion' in plist_content: | ||
| 364 | version = plist_content['ProductVersion'] | ||
| 365 | |||
| 366 | _cache.append(version.split('.')) | ||
| 367 | return _cache[0] | ||
| 368 | |||
| 369 | |||
| 370 | def _macosx_arch(machine): | ||
| 371 | return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) | ||
| 372 | |||
| 373 | |||
| 374 | def get_build_platform(): | ||
| 375 | """Return this platform's string for platform-specific distributions | ||
| 376 | |||
| 377 | XXX Currently this is the same as ``distutils.util.get_platform()``, but it | ||
| 378 | needs some hacks for Linux and Mac OS X. | ||
| 379 | """ | ||
| 380 | try: | ||
| 381 | # Python 2.7 or >=3.2 | ||
| 382 | from sysconfig import get_platform | ||
| 383 | except ImportError: | ||
| 384 | from distutils.util import get_platform | ||
| 385 | |||
| 386 | plat = get_platform() | ||
| 387 | if sys.platform == "darwin" and not plat.startswith('macosx-'): | ||
| 388 | try: | ||
| 389 | version = _macosx_vers() | ||
| 390 | machine = os.uname()[4].replace(" ", "_") | ||
| 391 | return "macosx-%d.%d-%s" % ( | ||
| 392 | int(version[0]), int(version[1]), | ||
| 393 | _macosx_arch(machine), | ||
| 394 | ) | ||
| 395 | except ValueError: | ||
| 396 | # if someone is running a non-Mac darwin system, this will fall | ||
| 397 | # through to the default implementation | ||
| 398 | pass | ||
| 399 | return plat | ||
| 400 | |||
| 401 | |||
| 402 | macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") | ||
| 403 | darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") | ||
| 404 | # XXX backward compat | ||
| 405 | get_platform = get_build_platform | ||
| 406 | |||
| 407 | |||
| 408 | def compatible_platforms(provided, required): | ||
| 409 | """Can code for the `provided` platform run on the `required` platform? | ||
| 410 | |||
| 411 | Returns true if either platform is ``None``, or the platforms are equal. | ||
| 412 | |||
| 413 | XXX Needs compatibility checks for Linux and other unixy OSes. | ||
| 414 | """ | ||
| 415 | if provided is None or required is None or provided == required: | ||
| 416 | # easy case | ||
| 417 | return True | ||
| 418 | |||
| 419 | # Mac OS X special cases | ||
| 420 | reqMac = macosVersionString.match(required) | ||
| 421 | if reqMac: | ||
| 422 | provMac = macosVersionString.match(provided) | ||
| 423 | |||
| 424 | # is this a Mac package? | ||
| 425 | if not provMac: | ||
| 426 | # this is backwards compatibility for packages built before | ||
| 427 | # setuptools 0.6. All packages built after this point will | ||
| 428 | # use the new macosx designation. | ||
| 429 | provDarwin = darwinVersionString.match(provided) | ||
| 430 | if provDarwin: | ||
| 431 | dversion = int(provDarwin.group(1)) | ||
| 432 | macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) | ||
| 433 | if dversion == 7 and macosversion >= "10.3" or \ | ||
| 434 | dversion == 8 and macosversion >= "10.4": | ||
| 435 | return True | ||
| 436 | # egg isn't macosx or legacy darwin | ||
| 437 | return False | ||
| 438 | |||
| 439 | # are they the same major version and machine type? | ||
| 440 | if provMac.group(1) != reqMac.group(1) or \ | ||
| 441 | provMac.group(3) != reqMac.group(3): | ||
| 442 | return False | ||
| 443 | |||
| 444 | # is the required OS major update >= the provided one? | ||
| 445 | if int(provMac.group(2)) > int(reqMac.group(2)): | ||
| 446 | return False | ||
| 447 | |||
| 448 | return True | ||
| 449 | |||
| 450 | # XXX Linux and other platforms' special cases should go here | ||
| 451 | return False | ||
| 452 | |||
| 453 | |||
| 454 | def run_script(dist_spec, script_name): | ||
| 455 | """Locate distribution `dist_spec` and run its `script_name` script""" | ||
| 456 | ns = sys._getframe(1).f_globals | ||
| 457 | name = ns['__name__'] | ||
| 458 | ns.clear() | ||
| 459 | ns['__name__'] = name | ||
| 460 | require(dist_spec)[0].run_script(script_name, ns) | ||
| 461 | |||
| 462 | |||
| 463 | # backward compatibility | ||
| 464 | run_main = run_script | ||
| 465 | |||
| 466 | |||
| 467 | def get_distribution(dist): | ||
| 468 | """Return a current distribution object for a Requirement or string""" | ||
| 469 | if isinstance(dist, six.string_types): | ||
| 470 | dist = Requirement.parse(dist) | ||
| 471 | if isinstance(dist, Requirement): | ||
| 472 | dist = get_provider(dist) | ||
| 473 | if not isinstance(dist, Distribution): | ||
| 474 | raise TypeError("Expected string, Requirement, or Distribution", dist) | ||
| 475 | return dist | ||
| 476 | |||
| 477 | |||
| 478 | def load_entry_point(dist, group, name): | ||
| 479 | """Return `name` entry point of `group` for `dist` or raise ImportError""" | ||
| 480 | return get_distribution(dist).load_entry_point(group, name) | ||
| 481 | |||
| 482 | |||
| 483 | def get_entry_map(dist, group=None): | ||
| 484 | """Return the entry point map for `group`, or the full entry map""" | ||
| 485 | return get_distribution(dist).get_entry_map(group) | ||
| 486 | |||
| 487 | |||
| 488 | def get_entry_info(dist, group, name): | ||
| 489 | """Return the EntryPoint object for `group`+`name`, or ``None``""" | ||
| 490 | return get_distribution(dist).get_entry_info(group, name) | ||
| 491 | |||
| 492 | |||
| 493 | class IMetadataProvider: | ||
| 494 | def has_metadata(name): | ||
| 495 | """Does the package's distribution contain the named metadata?""" | ||
| 496 | |||
| 497 | def get_metadata(name): | ||
| 498 | """The named metadata resource as a string""" | ||
| 499 | |||
| 500 | def get_metadata_lines(name): | ||
| 501 | """Yield named metadata resource as list of non-blank non-comment lines | ||
| 502 | |||
| 503 | Leading and trailing whitespace is stripped from each line, and lines | ||
| 504 | with ``#`` as the first non-blank character are omitted.""" | ||
| 505 | |||
| 506 | def metadata_isdir(name): | ||
| 507 | """Is the named metadata a directory? (like ``os.path.isdir()``)""" | ||
| 508 | |||
| 509 | def metadata_listdir(name): | ||
| 510 | """List of metadata names in the directory (like ``os.listdir()``)""" | ||
| 511 | |||
| 512 | def run_script(script_name, namespace): | ||
| 513 | """Execute the named script in the supplied namespace dictionary""" | ||
| 514 | |||
| 515 | |||
| 516 | class IResourceProvider(IMetadataProvider): | ||
| 517 | """An object that provides access to package resources""" | ||
| 518 | |||
| 519 | def get_resource_filename(manager, resource_name): | ||
| 520 | """Return a true filesystem path for `resource_name` | ||
| 521 | |||
| 522 | `manager` must be an ``IResourceManager``""" | ||
| 523 | |||
| 524 | def get_resource_stream(manager, resource_name): | ||
| 525 | """Return a readable file-like object for `resource_name` | ||
| 526 | |||
| 527 | `manager` must be an ``IResourceManager``""" | ||
| 528 | |||
| 529 | def get_resource_string(manager, resource_name): | ||
| 530 | """Return a string containing the contents of `resource_name` | ||
| 531 | |||
| 532 | `manager` must be an ``IResourceManager``""" | ||
| 533 | |||
| 534 | def has_resource(resource_name): | ||
| 535 | """Does the package contain the named resource?""" | ||
| 536 | |||
| 537 | def resource_isdir(resource_name): | ||
| 538 | """Is the named resource a directory? (like ``os.path.isdir()``)""" | ||
| 539 | |||
| 540 | def resource_listdir(resource_name): | ||
| 541 | """List of resource names in the directory (like ``os.listdir()``)""" | ||
| 542 | |||
| 543 | |||
| 544 | class WorkingSet(object): | ||
| 545 | """A collection of active distributions on sys.path (or a similar list)""" | ||
| 546 | |||
| 547 | def __init__(self, entries=None): | ||
| 548 | """Create working set from list of path entries (default=sys.path)""" | ||
| 549 | self.entries = [] | ||
| 550 | self.entry_keys = {} | ||
| 551 | self.by_key = {} | ||
| 552 | self.callbacks = [] | ||
| 553 | |||
| 554 | if entries is None: | ||
| 555 | entries = sys.path | ||
| 556 | |||
| 557 | for entry in entries: | ||
| 558 | self.add_entry(entry) | ||
| 559 | |||
| 560 | @classmethod | ||
| 561 | def _build_master(cls): | ||
| 562 | """ | ||
| 563 | Prepare the master working set. | ||
| 564 | """ | ||
| 565 | ws = cls() | ||
| 566 | try: | ||
| 567 | from __main__ import __requires__ | ||
| 568 | except ImportError: | ||
| 569 | # The main program does not list any requirements | ||
| 570 | return ws | ||
| 571 | |||
| 572 | # ensure the requirements are met | ||
| 573 | try: | ||
| 574 | ws.require(__requires__) | ||
| 575 | except VersionConflict: | ||
| 576 | return cls._build_from_requirements(__requires__) | ||
| 577 | |||
| 578 | return ws | ||
| 579 | |||
| 580 | @classmethod | ||
| 581 | def _build_from_requirements(cls, req_spec): | ||
| 582 | """ | ||
| 583 | Build a working set from a requirement spec. Rewrites sys.path. | ||
| 584 | """ | ||
| 585 | # try it without defaults already on sys.path | ||
| 586 | # by starting with an empty path | ||
| 587 | ws = cls([]) | ||
| 588 | reqs = parse_requirements(req_spec) | ||
| 589 | dists = ws.resolve(reqs, Environment()) | ||
| 590 | for dist in dists: | ||
| 591 | ws.add(dist) | ||
| 592 | |||
| 593 | # add any missing entries from sys.path | ||
| 594 | for entry in sys.path: | ||
| 595 | if entry not in ws.entries: | ||
| 596 | ws.add_entry(entry) | ||
| 597 | |||
| 598 | # then copy back to sys.path | ||
| 599 | sys.path[:] = ws.entries | ||
| 600 | return ws | ||
| 601 | |||
| 602 | def add_entry(self, entry): | ||
| 603 | """Add a path item to ``.entries``, finding any distributions on it | ||
| 604 | |||
| 605 | ``find_distributions(entry, True)`` is used to find distributions | ||
| 606 | corresponding to the path entry, and they are added. `entry` is | ||
| 607 | always appended to ``.entries``, even if it is already present. | ||
| 608 | (This is because ``sys.path`` can contain the same value more than | ||
| 609 | once, and the ``.entries`` of the ``sys.path`` WorkingSet should always | ||
| 610 | equal ``sys.path``.) | ||
| 611 | """ | ||
| 612 | self.entry_keys.setdefault(entry, []) | ||
| 613 | self.entries.append(entry) | ||
| 614 | for dist in find_distributions(entry, True): | ||
| 615 | self.add(dist, entry, False) | ||
| 616 | |||
| 617 | def __contains__(self, dist): | ||
| 618 | """True if `dist` is the active distribution for its project""" | ||
| 619 | return self.by_key.get(dist.key) == dist | ||
| 620 | |||
| 621 | def find(self, req): | ||
| 622 | """Find a distribution matching requirement `req` | ||
| 623 | |||
| 624 | If there is an active distribution for the requested project, this | ||
| 625 | returns it as long as it meets the version requirement specified by | ||
| 626 | `req`. But, if there is an active distribution for the project and it | ||
| 627 | does *not* meet the `req` requirement, ``VersionConflict`` is raised. | ||
| 628 | If there is no active distribution for the requested project, ``None`` | ||
| 629 | is returned. | ||
| 630 | """ | ||
| 631 | dist = self.by_key.get(req.key) | ||
| 632 | if dist is not None and dist not in req: | ||
| 633 | # XXX add more info | ||
| 634 | raise VersionConflict(dist, req) | ||
| 635 | return dist | ||
| 636 | |||
| 637 | def iter_entry_points(self, group, name=None): | ||
| 638 | """Yield entry point objects from `group` matching `name` | ||
| 639 | |||
| 640 | If `name` is None, yields all entry points in `group` from all | ||
| 641 | distributions in the working set, otherwise only ones matching | ||
| 642 | both `group` and `name` are yielded (in distribution order). | ||
| 643 | """ | ||
| 644 | for dist in self: | ||
| 645 | entries = dist.get_entry_map(group) | ||
| 646 | if name is None: | ||
| 647 | for ep in entries.values(): | ||
| 648 | yield ep | ||
| 649 | elif name in entries: | ||
| 650 | yield entries[name] | ||
| 651 | |||
| 652 | def run_script(self, requires, script_name): | ||
| 653 | """Locate distribution for `requires` and run `script_name` script""" | ||
| 654 | ns = sys._getframe(1).f_globals | ||
| 655 | name = ns['__name__'] | ||
| 656 | ns.clear() | ||
| 657 | ns['__name__'] = name | ||
| 658 | self.require(requires)[0].run_script(script_name, ns) | ||
| 659 | |||
| 660 | def __iter__(self): | ||
| 661 | """Yield distributions for non-duplicate projects in the working set | ||
| 662 | |||
| 663 | The yield order is the order in which the items' path entries were | ||
| 664 | added to the working set. | ||
| 665 | """ | ||
| 666 | seen = {} | ||
| 667 | for item in self.entries: | ||
| 668 | if item not in self.entry_keys: | ||
| 669 | # workaround a cache issue | ||
| 670 | continue | ||
| 671 | |||
| 672 | for key in self.entry_keys[item]: | ||
| 673 | if key not in seen: | ||
| 674 | seen[key] = 1 | ||
| 675 | yield self.by_key[key] | ||
| 676 | |||
| 677 | def add(self, dist, entry=None, insert=True, replace=False): | ||
| 678 | """Add `dist` to working set, associated with `entry` | ||
| 679 | |||
| 680 | If `entry` is unspecified, it defaults to the ``.location`` of `dist`. | ||
| 681 | On exit from this routine, `entry` is added to the end of the working | ||
| 682 | set's ``.entries`` (if it wasn't already present). | ||
| 683 | |||
| 684 | `dist` is only added to the working set if it's for a project that | ||
| 685 | doesn't already have a distribution in the set, unless `replace=True`. | ||
| 686 | If it's added, any callbacks registered with the ``subscribe()`` method | ||
| 687 | will be called. | ||
| 688 | """ | ||
| 689 | if insert: | ||
| 690 | dist.insert_on(self.entries, entry, replace=replace) | ||
| 691 | |||
| 692 | if entry is None: | ||
| 693 | entry = dist.location | ||
| 694 | keys = self.entry_keys.setdefault(entry, []) | ||
| 695 | keys2 = self.entry_keys.setdefault(dist.location, []) | ||
| 696 | if not replace and dist.key in self.by_key: | ||
| 697 | # ignore hidden distros | ||
| 698 | return | ||
| 699 | |||
| 700 | self.by_key[dist.key] = dist | ||
| 701 | if dist.key not in keys: | ||
| 702 | keys.append(dist.key) | ||
| 703 | if dist.key not in keys2: | ||
| 704 | keys2.append(dist.key) | ||
| 705 | self._added_new(dist) | ||
| 706 | |||
| 707 | def resolve(self, requirements, env=None, installer=None, | ||
| 708 | replace_conflicting=False, extras=None): | ||
| 709 | """List all distributions needed to (recursively) meet `requirements` | ||
| 710 | |||
| 711 | `requirements` must be a sequence of ``Requirement`` objects. `env`, | ||
| 712 | if supplied, should be an ``Environment`` instance. If | ||
| 713 | not supplied, it defaults to all distributions available within any | ||
| 714 | entry or distribution in the working set. `installer`, if supplied, | ||
| 715 | will be invoked with each requirement that cannot be met by an | ||
| 716 | already-installed distribution; it should return a ``Distribution`` or | ||
| 717 | ``None``. | ||
| 718 | |||
| 719 | Unless `replace_conflicting=True`, raises a VersionConflict exception | ||
| 720 | if | ||
| 721 | any requirements are found on the path that have the correct name but | ||
| 722 | the wrong version. Otherwise, if an `installer` is supplied it will be | ||
| 723 | invoked to obtain the correct version of the requirement and activate | ||
| 724 | it. | ||
| 725 | |||
| 726 | `extras` is a list of the extras to be used with these requirements. | ||
| 727 | This is important because extra requirements may look like `my_req; | ||
| 728 | extra = "my_extra"`, which would otherwise be interpreted as a purely | ||
| 729 | optional requirement. Instead, we want to be able to assert that these | ||
| 730 | requirements are truly required. | ||
| 731 | """ | ||
| 732 | |||
| 733 | # set up the stack | ||
| 734 | requirements = list(requirements)[::-1] | ||
| 735 | # set of processed requirements | ||
| 736 | processed = {} | ||
| 737 | # key -> dist | ||
| 738 | best = {} | ||
| 739 | to_activate = [] | ||
| 740 | |||
| 741 | req_extras = _ReqExtras() | ||
| 742 | |||
| 743 | # Mapping of requirement to set of distributions that required it; | ||
| 744 | # useful for reporting info about conflicts. | ||
| 745 | required_by = collections.defaultdict(set) | ||
| 746 | |||
| 747 | while requirements: | ||
| 748 | # process dependencies breadth-first | ||
| 749 | req = requirements.pop(0) | ||
| 750 | if req in processed: | ||
| 751 | # Ignore cyclic or redundant dependencies | ||
| 752 | continue | ||
| 753 | |||
| 754 | if not req_extras.markers_pass(req, extras): | ||
| 755 | continue | ||
| 756 | |||
| 757 | dist = best.get(req.key) | ||
| 758 | if dist is None: | ||
| 759 | # Find the best distribution and add it to the map | ||
| 760 | dist = self.by_key.get(req.key) | ||
| 761 | if dist is None or (dist not in req and replace_conflicting): | ||
| 762 | ws = self | ||
| 763 | if env is None: | ||
| 764 | if dist is None: | ||
| 765 | env = Environment(self.entries) | ||
| 766 | else: | ||
| 767 | # Use an empty environment and workingset to avoid | ||
| 768 | # any further conflicts with the conflicting | ||
| 769 | # distribution | ||
| 770 | env = Environment([]) | ||
| 771 | ws = WorkingSet([]) | ||
| 772 | dist = best[req.key] = env.best_match( | ||
| 773 | req, ws, installer, | ||
| 774 | replace_conflicting=replace_conflicting | ||
| 775 | ) | ||
| 776 | if dist is None: | ||
| 777 | requirers = required_by.get(req, None) | ||
| 778 | raise DistributionNotFound(req, requirers) | ||
| 779 | to_activate.append(dist) | ||
| 780 | if dist not in req: | ||
| 781 | # Oops, the "best" so far conflicts with a dependency | ||
| 782 | dependent_req = required_by[req] | ||
| 783 | raise VersionConflict(dist, req).with_context(dependent_req) | ||
| 784 | |||
| 785 | # push the new requirements onto the stack | ||
| 786 | new_requirements = dist.requires(req.extras)[::-1] | ||
| 787 | requirements.extend(new_requirements) | ||
| 788 | |||
| 789 | # Register the new requirements needed by req | ||
| 790 | for new_requirement in new_requirements: | ||
| 791 | required_by[new_requirement].add(req.project_name) | ||
| 792 | req_extras[new_requirement] = req.extras | ||
| 793 | |||
| 794 | processed[req] = True | ||
| 795 | |||
| 796 | # return list of distros to activate | ||
| 797 | return to_activate | ||
| 798 | |||
| 799 | def find_plugins( | ||
| 800 | self, plugin_env, full_env=None, installer=None, fallback=True): | ||
| 801 | """Find all activatable distributions in `plugin_env` | ||
| 802 | |||
| 803 | Example usage:: | ||
| 804 | |||
| 805 | distributions, errors = working_set.find_plugins( | ||
| 806 | Environment(plugin_dirlist) | ||
| 807 | ) | ||
| 808 | # add plugins+libs to sys.path | ||
| 809 | map(working_set.add, distributions) | ||
| 810 | # display errors | ||
| 811 | print('Could not load', errors) | ||
| 812 | |||
| 813 | The `plugin_env` should be an ``Environment`` instance that contains | ||
| 814 | only distributions that are in the project's "plugin directory" or | ||
| 815 | directories. The `full_env`, if supplied, should be an ``Environment`` | ||
| 816 | contains all currently-available distributions. If `full_env` is not | ||
| 817 | supplied, one is created automatically from the ``WorkingSet`` this | ||
| 818 | method is called on, which will typically mean that every directory on | ||
| 819 | ``sys.path`` will be scanned for distributions. | ||
| 820 | |||
| 821 | `installer` is a standard installer callback as used by the | ||
| 822 | ``resolve()`` method. The `fallback` flag indicates whether we should | ||
| 823 | attempt to resolve older versions of a plugin if the newest version | ||
| 824 | cannot be resolved. | ||
| 825 | |||
| 826 | This method returns a 2-tuple: (`distributions`, `error_info`), where | ||
| 827 | `distributions` is a list of the distributions found in `plugin_env` | ||
| 828 | that were loadable, along with any other distributions that are needed | ||
| 829 | to resolve their dependencies. `error_info` is a dictionary mapping | ||
| 830 | unloadable plugin distributions to an exception instance describing the | ||
| 831 | error that occurred. Usually this will be a ``DistributionNotFound`` or | ||
| 832 | ``VersionConflict`` instance. | ||
| 833 | """ | ||
| 834 | |||
| 835 | plugin_projects = list(plugin_env) | ||
| 836 | # scan project names in alphabetic order | ||
| 837 | plugin_projects.sort() | ||
| 838 | |||
| 839 | error_info = {} | ||
| 840 | distributions = {} | ||
| 841 | |||
| 842 | if full_env is None: | ||
| 843 | env = Environment(self.entries) | ||
| 844 | env += plugin_env | ||
| 845 | else: | ||
| 846 | env = full_env + plugin_env | ||
| 847 | |||
| 848 | shadow_set = self.__class__([]) | ||
| 849 | # put all our entries in shadow_set | ||
| 850 | list(map(shadow_set.add, self)) | ||
| 851 | |||
| 852 | for project_name in plugin_projects: | ||
| 853 | |||
| 854 | for dist in plugin_env[project_name]: | ||
| 855 | |||
| 856 | req = [dist.as_requirement()] | ||
| 857 | |||
| 858 | try: | ||
| 859 | resolvees = shadow_set.resolve(req, env, installer) | ||
| 860 | |||
| 861 | except ResolutionError as v: | ||
| 862 | # save error info | ||
| 863 | error_info[dist] = v | ||
| 864 | if fallback: | ||
| 865 | # try the next older version of project | ||
| 866 | continue | ||
| 867 | else: | ||
| 868 | # give up on this project, keep going | ||
| 869 | break | ||
| 870 | |||
| 871 | else: | ||
| 872 | list(map(shadow_set.add, resolvees)) | ||
| 873 | distributions.update(dict.fromkeys(resolvees)) | ||
| 874 | |||
| 875 | # success, no need to try any more versions of this project | ||
| 876 | break | ||
| 877 | |||
| 878 | distributions = list(distributions) | ||
| 879 | distributions.sort() | ||
| 880 | |||
| 881 | return distributions, error_info | ||
| 882 | |||
| 883 | def require(self, *requirements): | ||
| 884 | """Ensure that distributions matching `requirements` are activated | ||
| 885 | |||
| 886 | `requirements` must be a string or a (possibly-nested) sequence | ||
| 887 | thereof, specifying the distributions and versions required. The | ||
| 888 | return value is a sequence of the distributions that needed to be | ||
| 889 | activated to fulfill the requirements; all relevant distributions are | ||
| 890 | included, even if they were already activated in this working set. | ||
| 891 | """ | ||
| 892 | needed = self.resolve(parse_requirements(requirements)) | ||
| 893 | |||
| 894 | for dist in needed: | ||
| 895 | self.add(dist) | ||
| 896 | |||
| 897 | return needed | ||
| 898 | |||
| 899 | def subscribe(self, callback, existing=True): | ||
| 900 | """Invoke `callback` for all distributions | ||
| 901 | |||
| 902 | If `existing=True` (default), | ||
| 903 | call on all existing ones, as well. | ||
| 904 | """ | ||
| 905 | if callback in self.callbacks: | ||
| 906 | return | ||
| 907 | self.callbacks.append(callback) | ||
| 908 | if not existing: | ||
| 909 | return | ||
| 910 | for dist in self: | ||
| 911 | callback(dist) | ||
| 912 | |||
| 913 | def _added_new(self, dist): | ||
| 914 | for callback in self.callbacks: | ||
| 915 | callback(dist) | ||
| 916 | |||
| 917 | def __getstate__(self): | ||
| 918 | return ( | ||
| 919 | self.entries[:], self.entry_keys.copy(), self.by_key.copy(), | ||
| 920 | self.callbacks[:] | ||
| 921 | ) | ||
| 922 | |||
| 923 | def __setstate__(self, e_k_b_c): | ||
| 924 | entries, keys, by_key, callbacks = e_k_b_c | ||
| 925 | self.entries = entries[:] | ||
| 926 | self.entry_keys = keys.copy() | ||
| 927 | self.by_key = by_key.copy() | ||
| 928 | self.callbacks = callbacks[:] | ||
| 929 | |||
| 930 | |||
| 931 | class _ReqExtras(dict): | ||
| 932 | """ | ||
| 933 | Map each requirement to the extras that demanded it. | ||
| 934 | """ | ||
| 935 | |||
| 936 | def markers_pass(self, req, extras=None): | ||
| 937 | """ | ||
| 938 | Evaluate markers for req against each extra that | ||
| 939 | demanded it. | ||
| 940 | |||
| 941 | Return False if the req has a marker and fails | ||
| 942 | evaluation. Otherwise, return True. | ||
| 943 | """ | ||
| 944 | extra_evals = ( | ||
| 945 | req.marker.evaluate({'extra': extra}) | ||
| 946 | for extra in self.get(req, ()) + (extras or (None,)) | ||
| 947 | ) | ||
| 948 | return not req.marker or any(extra_evals) | ||
| 949 | |||
| 950 | |||
| 951 | class Environment(object): | ||
| 952 | """Searchable snapshot of distributions on a search path""" | ||
| 953 | |||
| 954 | def __init__( | ||
| 955 | self, search_path=None, platform=get_supported_platform(), | ||
| 956 | python=PY_MAJOR): | ||
| 957 | """Snapshot distributions available on a search path | ||
| 958 | |||
| 959 | Any distributions found on `search_path` are added to the environment. | ||
| 960 | `search_path` should be a sequence of ``sys.path`` items. If not | ||
| 961 | supplied, ``sys.path`` is used. | ||
| 962 | |||
| 963 | `platform` is an optional string specifying the name of the platform | ||
| 964 | that platform-specific distributions must be compatible with. If | ||
| 965 | unspecified, it defaults to the current platform. `python` is an | ||
| 966 | optional string naming the desired version of Python (e.g. ``'3.3'``); | ||
| 967 | it defaults to the current version. | ||
| 968 | |||
| 969 | You may explicitly set `platform` (and/or `python`) to ``None`` if you | ||
| 970 | wish to map *all* distributions, not just those compatible with the | ||
| 971 | running platform or Python version. | ||
| 972 | """ | ||
| 973 | self._distmap = {} | ||
| 974 | self.platform = platform | ||
| 975 | self.python = python | ||
| 976 | self.scan(search_path) | ||
| 977 | |||
| 978 | def can_add(self, dist): | ||
| 979 | """Is distribution `dist` acceptable for this environment? | ||
| 980 | |||
| 981 | The distribution must match the platform and python version | ||
| 982 | requirements specified when this environment was created, or False | ||
| 983 | is returned. | ||
| 984 | """ | ||
| 985 | py_compat = ( | ||
| 986 | self.python is None | ||
| 987 | or dist.py_version is None | ||
| 988 | or dist.py_version == self.python | ||
| 989 | ) | ||
| 990 | return py_compat and compatible_platforms(dist.platform, self.platform) | ||
| 991 | |||
| 992 | def remove(self, dist): | ||
| 993 | """Remove `dist` from the environment""" | ||
| 994 | self._distmap[dist.key].remove(dist) | ||
| 995 | |||
| 996 | def scan(self, search_path=None): | ||
| 997 | """Scan `search_path` for distributions usable in this environment | ||
| 998 | |||
| 999 | Any distributions found are added to the environment. | ||
| 1000 | `search_path` should be a sequence of ``sys.path`` items. If not | ||
| 1001 | supplied, ``sys.path`` is used. Only distributions conforming to | ||
| 1002 | the platform/python version defined at initialization are added. | ||
| 1003 | """ | ||
| 1004 | if search_path is None: | ||
| 1005 | search_path = sys.path | ||
| 1006 | |||
| 1007 | for item in search_path: | ||
| 1008 | for dist in find_distributions(item): | ||
| 1009 | self.add(dist) | ||
| 1010 | |||
| 1011 | def __getitem__(self, project_name): | ||
| 1012 | """Return a newest-to-oldest list of distributions for `project_name` | ||
| 1013 | |||
| 1014 | Uses case-insensitive `project_name` comparison, assuming all the | ||
| 1015 | project's distributions use their project's name converted to all | ||
| 1016 | lowercase as their key. | ||
| 1017 | |||
| 1018 | """ | ||
| 1019 | distribution_key = project_name.lower() | ||
| 1020 | return self._distmap.get(distribution_key, []) | ||
| 1021 | |||
| 1022 | def add(self, dist): | ||
| 1023 | """Add `dist` if we ``can_add()`` it and it has not already been added | ||
| 1024 | """ | ||
| 1025 | if self.can_add(dist) and dist.has_version(): | ||
| 1026 | dists = self._distmap.setdefault(dist.key, []) | ||
| 1027 | if dist not in dists: | ||
| 1028 | dists.append(dist) | ||
| 1029 | dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) | ||
| 1030 | |||
| 1031 | def best_match( | ||
| 1032 | self, req, working_set, installer=None, replace_conflicting=False): | ||
| 1033 | """Find distribution best matching `req` and usable on `working_set` | ||
| 1034 | |||
| 1035 | This calls the ``find(req)`` method of the `working_set` to see if a | ||
| 1036 | suitable distribution is already active. (This may raise | ||
| 1037 | ``VersionConflict`` if an unsuitable version of the project is already | ||
| 1038 | active in the specified `working_set`.) If a suitable distribution | ||
| 1039 | isn't active, this method returns the newest distribution in the | ||
| 1040 | environment that meets the ``Requirement`` in `req`. If no suitable | ||
| 1041 | distribution is found, and `installer` is supplied, then the result of | ||
| 1042 | calling the environment's ``obtain(req, installer)`` method will be | ||
| 1043 | returned. | ||
| 1044 | """ | ||
| 1045 | try: | ||
| 1046 | dist = working_set.find(req) | ||
| 1047 | except VersionConflict: | ||
| 1048 | if not replace_conflicting: | ||
| 1049 | raise | ||
| 1050 | dist = None | ||
| 1051 | if dist is not None: | ||
| 1052 | return dist | ||
| 1053 | for dist in self[req.key]: | ||
| 1054 | if dist in req: | ||
| 1055 | return dist | ||
| 1056 | # try to download/install | ||
| 1057 | return self.obtain(req, installer) | ||
| 1058 | |||
| 1059 | def obtain(self, requirement, installer=None): | ||
| 1060 | """Obtain a distribution matching `requirement` (e.g. via download) | ||
| 1061 | |||
| 1062 | Obtain a distro that matches requirement (e.g. via download). In the | ||
| 1063 | base ``Environment`` class, this routine just returns | ||
| 1064 | ``installer(requirement)``, unless `installer` is None, in which case | ||
| 1065 | None is returned instead. This method is a hook that allows subclasses | ||
| 1066 | to attempt other ways of obtaining a distribution before falling back | ||
| 1067 | to the `installer` argument.""" | ||
| 1068 | if installer is not None: | ||
| 1069 | return installer(requirement) | ||
| 1070 | |||
| 1071 | def __iter__(self): | ||
| 1072 | """Yield the unique project names of the available distributions""" | ||
| 1073 | for key in self._distmap.keys(): | ||
| 1074 | if self[key]: | ||
| 1075 | yield key | ||
| 1076 | |||
| 1077 | def __iadd__(self, other): | ||
| 1078 | """In-place addition of a distribution or environment""" | ||
| 1079 | if isinstance(other, Distribution): | ||
| 1080 | self.add(other) | ||
| 1081 | elif isinstance(other, Environment): | ||
| 1082 | for project in other: | ||
| 1083 | for dist in other[project]: | ||
| 1084 | self.add(dist) | ||
| 1085 | else: | ||
| 1086 | raise TypeError("Can't add %r to environment" % (other,)) | ||
| 1087 | return self | ||
| 1088 | |||
| 1089 | def __add__(self, other): | ||
| 1090 | """Add an environment or distribution to an environment""" | ||
| 1091 | new = self.__class__([], platform=None, python=None) | ||
| 1092 | for env in self, other: | ||
| 1093 | new += env | ||
| 1094 | return new | ||
| 1095 | |||
| 1096 | |||
| 1097 | # XXX backward compatibility | ||
| 1098 | AvailableDistributions = Environment | ||
| 1099 | |||
| 1100 | |||
| 1101 | class ExtractionError(RuntimeError): | ||
| 1102 | """An error occurred extracting a resource | ||
| 1103 | |||
| 1104 | The following attributes are available from instances of this exception: | ||
| 1105 | |||
| 1106 | manager | ||
| 1107 | The resource manager that raised this exception | ||
| 1108 | |||
| 1109 | cache_path | ||
| 1110 | The base directory for resource extraction | ||
| 1111 | |||
| 1112 | original_error | ||
| 1113 | The exception instance that caused extraction to fail | ||
| 1114 | """ | ||
| 1115 | |||
| 1116 | |||
| 1117 | class ResourceManager: | ||
| 1118 | """Manage resource extraction and packages""" | ||
| 1119 | extraction_path = None | ||
| 1120 | |||
| 1121 | def __init__(self): | ||
| 1122 | self.cached_files = {} | ||
| 1123 | |||
| 1124 | def resource_exists(self, package_or_requirement, resource_name): | ||
| 1125 | """Does the named resource exist?""" | ||
| 1126 | return get_provider(package_or_requirement).has_resource(resource_name) | ||
| 1127 | |||
| 1128 | def resource_isdir(self, package_or_requirement, resource_name): | ||
| 1129 | """Is the named resource an existing directory?""" | ||
| 1130 | return get_provider(package_or_requirement).resource_isdir( | ||
| 1131 | resource_name | ||
| 1132 | ) | ||
| 1133 | |||
| 1134 | def resource_filename(self, package_or_requirement, resource_name): | ||
| 1135 | """Return a true filesystem path for specified resource""" | ||
| 1136 | return get_provider(package_or_requirement).get_resource_filename( | ||
| 1137 | self, resource_name | ||
| 1138 | ) | ||
| 1139 | |||
| 1140 | def resource_stream(self, package_or_requirement, resource_name): | ||
| 1141 | """Return a readable file-like object for specified resource""" | ||
| 1142 | return get_provider(package_or_requirement).get_resource_stream( | ||
| 1143 | self, resource_name | ||
| 1144 | ) | ||
| 1145 | |||
| 1146 | def resource_string(self, package_or_requirement, resource_name): | ||
| 1147 | """Return specified resource as a string""" | ||
| 1148 | return get_provider(package_or_requirement).get_resource_string( | ||
| 1149 | self, resource_name | ||
| 1150 | ) | ||
| 1151 | |||
| 1152 | def resource_listdir(self, package_or_requirement, resource_name): | ||
| 1153 | """List the contents of the named resource directory""" | ||
| 1154 | return get_provider(package_or_requirement).resource_listdir( | ||
| 1155 | resource_name | ||
| 1156 | ) | ||
| 1157 | |||
| 1158 | def extraction_error(self): | ||
| 1159 | """Give an error message for problems extracting file(s)""" | ||
| 1160 | |||
| 1161 | old_exc = sys.exc_info()[1] | ||
| 1162 | cache_path = self.extraction_path or get_default_cache() | ||
| 1163 | |||
| 1164 | tmpl = textwrap.dedent(""" | ||
| 1165 | Can't extract file(s) to egg cache | ||
| 1166 | |||
| 1167 | The following error occurred while trying to extract file(s) | ||
| 1168 | to the Python egg cache: | ||
| 1169 | |||
| 1170 | {old_exc} | ||
| 1171 | |||
| 1172 | The Python egg cache directory is currently set to: | ||
| 1173 | |||
| 1174 | {cache_path} | ||
| 1175 | |||
| 1176 | Perhaps your account does not have write access to this directory? | ||
| 1177 | You can change the cache directory by setting the PYTHON_EGG_CACHE | ||
| 1178 | environment variable to point to an accessible directory. | ||
| 1179 | """).lstrip() | ||
| 1180 | err = ExtractionError(tmpl.format(**locals())) | ||
| 1181 | err.manager = self | ||
| 1182 | err.cache_path = cache_path | ||
| 1183 | err.original_error = old_exc | ||
| 1184 | raise err | ||
| 1185 | |||
| 1186 | def get_cache_path(self, archive_name, names=()): | ||
| 1187 | """Return absolute location in cache for `archive_name` and `names` | ||
| 1188 | |||
| 1189 | The parent directory of the resulting path will be created if it does | ||
| 1190 | not already exist. `archive_name` should be the base filename of the | ||
| 1191 | enclosing egg (which may not be the name of the enclosing zipfile!), | ||
| 1192 | including its ".egg" extension. `names`, if provided, should be a | ||
| 1193 | sequence of path name parts "under" the egg's extraction location. | ||
| 1194 | |||
| 1195 | This method should only be called by resource providers that need to | ||
| 1196 | obtain an extraction location, and only for names they intend to | ||
| 1197 | extract, as it tracks the generated names for possible cleanup later. | ||
| 1198 | """ | ||
| 1199 | extract_path = self.extraction_path or get_default_cache() | ||
| 1200 | target_path = os.path.join(extract_path, archive_name + '-tmp', *names) | ||
| 1201 | try: | ||
| 1202 | _bypass_ensure_directory(target_path) | ||
| 1203 | except Exception: | ||
| 1204 | self.extraction_error() | ||
| 1205 | |||
| 1206 | self._warn_unsafe_extraction_path(extract_path) | ||
| 1207 | |||
| 1208 | self.cached_files[target_path] = 1 | ||
| 1209 | return target_path | ||
| 1210 | |||
| 1211 | @staticmethod | ||
| 1212 | def _warn_unsafe_extraction_path(path): | ||
| 1213 | """ | ||
| 1214 | If the default extraction path is overridden and set to an insecure | ||
| 1215 | location, such as /tmp, it opens up an opportunity for an attacker to | ||
| 1216 | replace an extracted file with an unauthorized payload. Warn the user | ||
| 1217 | if a known insecure location is used. | ||
| 1218 | |||
| 1219 | See Distribute #375 for more details. | ||
| 1220 | """ | ||
| 1221 | if os.name == 'nt' and not path.startswith(os.environ['windir']): | ||
| 1222 | # On Windows, permissions are generally restrictive by default | ||
| 1223 | # and temp directories are not writable by other users, so | ||
| 1224 | # bypass the warning. | ||
| 1225 | return | ||
| 1226 | mode = os.stat(path).st_mode | ||
| 1227 | if mode & stat.S_IWOTH or mode & stat.S_IWGRP: | ||
| 1228 | msg = ( | ||
| 1229 | "%s is writable by group/others and vulnerable to attack " | ||
| 1230 | "when " | ||
| 1231 | "used with get_resource_filename. Consider a more secure " | ||
| 1232 | "location (set with .set_extraction_path or the " | ||
| 1233 | "PYTHON_EGG_CACHE environment variable)." % path | ||
| 1234 | ) | ||
| 1235 | warnings.warn(msg, UserWarning) | ||
| 1236 | |||
| 1237 | def postprocess(self, tempname, filename): | ||
| 1238 | """Perform any platform-specific postprocessing of `tempname` | ||
| 1239 | |||
| 1240 | This is where Mac header rewrites should be done; other platforms don't | ||
| 1241 | have anything special they should do. | ||
| 1242 | |||
| 1243 | Resource providers should call this method ONLY after successfully | ||
| 1244 | extracting a compressed resource. They must NOT call it on resources | ||
| 1245 | that are already in the filesystem. | ||
| 1246 | |||
| 1247 | `tempname` is the current (temporary) name of the file, and `filename` | ||
| 1248 | is the name it will be renamed to by the caller after this routine | ||
| 1249 | returns. | ||
| 1250 | """ | ||
| 1251 | |||
| 1252 | if os.name == 'posix': | ||
| 1253 | # Make the resource executable | ||
| 1254 | mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 | ||
| 1255 | os.chmod(tempname, mode) | ||
| 1256 | |||
| 1257 | def set_extraction_path(self, path): | ||
| 1258 | """Set the base path where resources will be extracted to, if needed. | ||
| 1259 | |||
| 1260 | If you do not call this routine before any extractions take place, the | ||
| 1261 | path defaults to the return value of ``get_default_cache()``. (Which | ||
| 1262 | is based on the ``PYTHON_EGG_CACHE`` environment variable, with various | ||
| 1263 | platform-specific fallbacks. See that routine's documentation for more | ||
| 1264 | details.) | ||
| 1265 | |||
| 1266 | Resources are extracted to subdirectories of this path based upon | ||
| 1267 | information given by the ``IResourceProvider``. You may set this to a | ||
| 1268 | temporary directory, but then you must call ``cleanup_resources()`` to | ||
| 1269 | delete the extracted files when done. There is no guarantee that | ||
| 1270 | ``cleanup_resources()`` will be able to remove all extracted files. | ||
| 1271 | |||
| 1272 | (Note: you may not change the extraction path for a given resource | ||
| 1273 | manager once resources have been extracted, unless you first call | ||
| 1274 | ``cleanup_resources()``.) | ||
| 1275 | """ | ||
| 1276 | if self.cached_files: | ||
| 1277 | raise ValueError( | ||
| 1278 | "Can't change extraction path, files already extracted" | ||
| 1279 | ) | ||
| 1280 | |||
| 1281 | self.extraction_path = path | ||
| 1282 | |||
| 1283 | def cleanup_resources(self, force=False): | ||
| 1284 | """ | ||
| 1285 | Delete all extracted resource files and directories, returning a list | ||
| 1286 | of the file and directory names that could not be successfully removed. | ||
| 1287 | This function does not have any concurrency protection, so it should | ||
| 1288 | generally only be called when the extraction path is a temporary | ||
| 1289 | directory exclusive to a single process. This method is not | ||
| 1290 | automatically called; you must call it explicitly or register it as an | ||
| 1291 | ``atexit`` function if you wish to ensure cleanup of a temporary | ||
| 1292 | directory used for extractions. | ||
| 1293 | """ | ||
| 1294 | # XXX | ||
| 1295 | |||
| 1296 | |||
| 1297 | def get_default_cache(): | ||
| 1298 | """ | ||
| 1299 | Return the ``PYTHON_EGG_CACHE`` environment variable | ||
| 1300 | or a platform-relevant user cache dir for an app | ||
| 1301 | named "Python-Eggs". | ||
| 1302 | """ | ||
| 1303 | return ( | ||
| 1304 | os.environ.get('PYTHON_EGG_CACHE') | ||
| 1305 | or appdirs.user_cache_dir(appname='Python-Eggs') | ||
| 1306 | ) | ||
| 1307 | |||
| 1308 | |||
| 1309 | def safe_name(name): | ||
| 1310 | """Convert an arbitrary string to a standard distribution name | ||
| 1311 | |||
| 1312 | Any runs of non-alphanumeric/. characters are replaced with a single '-'. | ||
| 1313 | """ | ||
| 1314 | return re.sub('[^A-Za-z0-9.]+', '-', name) | ||
| 1315 | |||
| 1316 | |||
| 1317 | def safe_version(version): | ||
| 1318 | """ | ||
| 1319 | Convert an arbitrary string to a standard version string | ||
| 1320 | """ | ||
| 1321 | try: | ||
| 1322 | # normalize the version | ||
| 1323 | return str(packaging.version.Version(version)) | ||
| 1324 | except packaging.version.InvalidVersion: | ||
| 1325 | version = version.replace(' ', '.') | ||
| 1326 | return re.sub('[^A-Za-z0-9.]+', '-', version) | ||
| 1327 | |||
| 1328 | |||
| 1329 | def safe_extra(extra): | ||
| 1330 | """Convert an arbitrary string to a standard 'extra' name | ||
| 1331 | |||
| 1332 | Any runs of non-alphanumeric characters are replaced with a single '_', | ||
| 1333 | and the result is always lowercased. | ||
| 1334 | """ | ||
| 1335 | return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() | ||
| 1336 | |||
| 1337 | |||
| 1338 | def to_filename(name): | ||
| 1339 | """Convert a project or version name to its filename-escaped form | ||
| 1340 | |||
| 1341 | Any '-' characters are currently replaced with '_'. | ||
| 1342 | """ | ||
| 1343 | return name.replace('-', '_') | ||
| 1344 | |||
| 1345 | |||
| 1346 | def invalid_marker(text): | ||
| 1347 | """ | ||
| 1348 | Validate text as a PEP 508 environment marker; return an exception | ||
| 1349 | if invalid or False otherwise. | ||
| 1350 | """ | ||
| 1351 | try: | ||
| 1352 | evaluate_marker(text) | ||
| 1353 | except SyntaxError as e: | ||
| 1354 | e.filename = None | ||
| 1355 | e.lineno = None | ||
| 1356 | return e | ||
| 1357 | return False | ||
| 1358 | |||
| 1359 | |||
| 1360 | def evaluate_marker(text, extra=None): | ||
| 1361 | """ | ||
| 1362 | Evaluate a PEP 508 environment marker. | ||
| 1363 | Return a boolean indicating the marker result in this environment. | ||
| 1364 | Raise SyntaxError if marker is invalid. | ||
| 1365 | |||
| 1366 | This implementation uses the 'pyparsing' module. | ||
| 1367 | """ | ||
| 1368 | try: | ||
| 1369 | marker = packaging.markers.Marker(text) | ||
| 1370 | return marker.evaluate() | ||
| 1371 | except packaging.markers.InvalidMarker as e: | ||
| 1372 | raise SyntaxError(e) | ||
| 1373 | |||
| 1374 | |||
| 1375 | class NullProvider: | ||
| 1376 | """Try to implement resources and metadata for arbitrary PEP 302 loaders""" | ||
| 1377 | |||
| 1378 | egg_name = None | ||
| 1379 | egg_info = None | ||
| 1380 | loader = None | ||
| 1381 | |||
| 1382 | def __init__(self, module): | ||
| 1383 | self.loader = getattr(module, '__loader__', None) | ||
| 1384 | self.module_path = os.path.dirname(getattr(module, '__file__', '')) | ||
| 1385 | |||
| 1386 | def get_resource_filename(self, manager, resource_name): | ||
| 1387 | return self._fn(self.module_path, resource_name) | ||
| 1388 | |||
| 1389 | def get_resource_stream(self, manager, resource_name): | ||
| 1390 | return io.BytesIO(self.get_resource_string(manager, resource_name)) | ||
| 1391 | |||
| 1392 | def get_resource_string(self, manager, resource_name): | ||
| 1393 | return self._get(self._fn(self.module_path, resource_name)) | ||
| 1394 | |||
| 1395 | def has_resource(self, resource_name): | ||
| 1396 | return self._has(self._fn(self.module_path, resource_name)) | ||
| 1397 | |||
| 1398 | def has_metadata(self, name): | ||
| 1399 | return self.egg_info and self._has(self._fn(self.egg_info, name)) | ||
| 1400 | |||
| 1401 | def get_metadata(self, name): | ||
| 1402 | if not self.egg_info: | ||
| 1403 | return "" | ||
| 1404 | value = self._get(self._fn(self.egg_info, name)) | ||
| 1405 | return value.decode('utf-8') if six.PY3 else value | ||
| 1406 | |||
| 1407 | def get_metadata_lines(self, name): | ||
| 1408 | return yield_lines(self.get_metadata(name)) | ||
| 1409 | |||
| 1410 | def resource_isdir(self, resource_name): | ||
| 1411 | return self._isdir(self._fn(self.module_path, resource_name)) | ||
| 1412 | |||
| 1413 | def metadata_isdir(self, name): | ||
| 1414 | return self.egg_info and self._isdir(self._fn(self.egg_info, name)) | ||
| 1415 | |||
| 1416 | def resource_listdir(self, resource_name): | ||
| 1417 | return self._listdir(self._fn(self.module_path, resource_name)) | ||
| 1418 | |||
| 1419 | def metadata_listdir(self, name): | ||
| 1420 | if self.egg_info: | ||
| 1421 | return self._listdir(self._fn(self.egg_info, name)) | ||
| 1422 | return [] | ||
| 1423 | |||
| 1424 | def run_script(self, script_name, namespace): | ||
| 1425 | script = 'scripts/' + script_name | ||
| 1426 | if not self.has_metadata(script): | ||
| 1427 | raise ResolutionError( | ||
| 1428 | "Script {script!r} not found in metadata at {self.egg_info!r}" | ||
| 1429 | .format(**locals()), | ||
| 1430 | ) | ||
| 1431 | script_text = self.get_metadata(script).replace('\r\n', '\n') | ||
| 1432 | script_text = script_text.replace('\r', '\n') | ||
| 1433 | script_filename = self._fn(self.egg_info, script) | ||
| 1434 | namespace['__file__'] = script_filename | ||
| 1435 | if os.path.exists(script_filename): | ||
| 1436 | source = open(script_filename).read() | ||
| 1437 | code = compile(source, script_filename, 'exec') | ||
| 1438 | exec(code, namespace, namespace) | ||
| 1439 | else: | ||
| 1440 | from linecache import cache | ||
| 1441 | cache[script_filename] = ( | ||
| 1442 | len(script_text), 0, script_text.split('\n'), script_filename | ||
| 1443 | ) | ||
| 1444 | script_code = compile(script_text, script_filename, 'exec') | ||
| 1445 | exec(script_code, namespace, namespace) | ||
| 1446 | |||
| 1447 | def _has(self, path): | ||
| 1448 | raise NotImplementedError( | ||
| 1449 | "Can't perform this operation for unregistered loader type" | ||
| 1450 | ) | ||
| 1451 | |||
| 1452 | def _isdir(self, path): | ||
| 1453 | raise NotImplementedError( | ||
| 1454 | "Can't perform this operation for unregistered loader type" | ||
| 1455 | ) | ||
| 1456 | |||
| 1457 | def _listdir(self, path): | ||
| 1458 | raise NotImplementedError( | ||
| 1459 | "Can't perform this operation for unregistered loader type" | ||
| 1460 | ) | ||
| 1461 | |||
| 1462 | def _fn(self, base, resource_name): | ||
| 1463 | if resource_name: | ||
| 1464 | return os.path.join(base, *resource_name.split('/')) | ||
| 1465 | return base | ||
| 1466 | |||
| 1467 | def _get(self, path): | ||
| 1468 | if hasattr(self.loader, 'get_data'): | ||
| 1469 | return self.loader.get_data(path) | ||
| 1470 | raise NotImplementedError( | ||
| 1471 | "Can't perform this operation for loaders without 'get_data()'" | ||
| 1472 | ) | ||
| 1473 | |||
| 1474 | |||
| 1475 | register_loader_type(object, NullProvider) | ||
| 1476 | |||
| 1477 | |||
| 1478 | class EggProvider(NullProvider): | ||
| 1479 | """Provider based on a virtual filesystem""" | ||
| 1480 | |||
| 1481 | def __init__(self, module): | ||
| 1482 | NullProvider.__init__(self, module) | ||
| 1483 | self._setup_prefix() | ||
| 1484 | |||
| 1485 | def _setup_prefix(self): | ||
| 1486 | # we assume here that our metadata may be nested inside a "basket" | ||
| 1487 | # of multiple eggs; that's why we use module_path instead of .archive | ||
| 1488 | path = self.module_path | ||
| 1489 | old = None | ||
| 1490 | while path != old: | ||
| 1491 | if _is_egg_path(path): | ||
| 1492 | self.egg_name = os.path.basename(path) | ||
| 1493 | self.egg_info = os.path.join(path, 'EGG-INFO') | ||
| 1494 | self.egg_root = path | ||
| 1495 | break | ||
| 1496 | old = path | ||
| 1497 | path, base = os.path.split(path) | ||
| 1498 | |||
| 1499 | |||
| 1500 | class DefaultProvider(EggProvider): | ||
| 1501 | """Provides access to package resources in the filesystem""" | ||
| 1502 | |||
| 1503 | def _has(self, path): | ||
| 1504 | return os.path.exists(path) | ||
| 1505 | |||
| 1506 | def _isdir(self, path): | ||
| 1507 | return os.path.isdir(path) | ||
| 1508 | |||
| 1509 | def _listdir(self, path): | ||
| 1510 | return os.listdir(path) | ||
| 1511 | |||
| 1512 | def get_resource_stream(self, manager, resource_name): | ||
| 1513 | return open(self._fn(self.module_path, resource_name), 'rb') | ||
| 1514 | |||
| 1515 | def _get(self, path): | ||
| 1516 | with open(path, 'rb') as stream: | ||
| 1517 | return stream.read() | ||
| 1518 | |||
| 1519 | @classmethod | ||
| 1520 | def _register(cls): | ||
| 1521 | loader_cls = getattr( | ||
| 1522 | importlib_machinery, | ||
| 1523 | 'SourceFileLoader', | ||
| 1524 | type(None), | ||
| 1525 | ) | ||
| 1526 | register_loader_type(loader_cls, cls) | ||
| 1527 | |||
| 1528 | |||
| 1529 | DefaultProvider._register() | ||
| 1530 | |||
| 1531 | |||
| 1532 | class EmptyProvider(NullProvider): | ||
| 1533 | """Provider that returns nothing for all requests""" | ||
| 1534 | |||
| 1535 | module_path = None | ||
| 1536 | |||
| 1537 | _isdir = _has = lambda self, path: False | ||
| 1538 | |||
| 1539 | def _get(self, path): | ||
| 1540 | return '' | ||
| 1541 | |||
| 1542 | def _listdir(self, path): | ||
| 1543 | return [] | ||
| 1544 | |||
| 1545 | def __init__(self): | ||
| 1546 | pass | ||
| 1547 | |||
| 1548 | |||
| 1549 | empty_provider = EmptyProvider() | ||
| 1550 | |||
| 1551 | |||
| 1552 | class ZipManifests(dict): | ||
| 1553 | """ | ||
| 1554 | zip manifest builder | ||
| 1555 | """ | ||
| 1556 | |||
| 1557 | @classmethod | ||
| 1558 | def build(cls, path): | ||
| 1559 | """ | ||
| 1560 | Build a dictionary similar to the zipimport directory | ||
| 1561 | caches, except instead of tuples, store ZipInfo objects. | ||
| 1562 | |||
| 1563 | Use a platform-specific path separator (os.sep) for the path keys | ||
| 1564 | for compatibility with pypy on Windows. | ||
| 1565 | """ | ||
| 1566 | with zipfile.ZipFile(path) as zfile: | ||
| 1567 | items = ( | ||
| 1568 | ( | ||
| 1569 | name.replace('/', os.sep), | ||
| 1570 | zfile.getinfo(name), | ||
| 1571 | ) | ||
| 1572 | for name in zfile.namelist() | ||
| 1573 | ) | ||
| 1574 | return dict(items) | ||
| 1575 | |||
| 1576 | load = build | ||
| 1577 | |||
| 1578 | |||
| 1579 | class MemoizedZipManifests(ZipManifests): | ||
| 1580 | """ | ||
| 1581 | Memoized zipfile manifests. | ||
| 1582 | """ | ||
| 1583 | manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') | ||
| 1584 | |||
| 1585 | def load(self, path): | ||
| 1586 | """ | ||
| 1587 | Load a manifest at path or return a suitable manifest already loaded. | ||
| 1588 | """ | ||
| 1589 | path = os.path.normpath(path) | ||
| 1590 | mtime = os.stat(path).st_mtime | ||
| 1591 | |||
| 1592 | if path not in self or self[path].mtime != mtime: | ||
| 1593 | manifest = self.build(path) | ||
| 1594 | self[path] = self.manifest_mod(manifest, mtime) | ||
| 1595 | |||
| 1596 | return self[path].manifest | ||
| 1597 | |||
| 1598 | |||
| 1599 | class ZipProvider(EggProvider): | ||
| 1600 | """Resource support for zips and eggs""" | ||
| 1601 | |||
| 1602 | eagers = None | ||
| 1603 | _zip_manifests = MemoizedZipManifests() | ||
| 1604 | |||
| 1605 | def __init__(self, module): | ||
| 1606 | EggProvider.__init__(self, module) | ||
| 1607 | self.zip_pre = self.loader.archive + os.sep | ||
| 1608 | |||
| 1609 | def _zipinfo_name(self, fspath): | ||
| 1610 | # Convert a virtual filename (full path to file) into a zipfile subpath | ||
| 1611 | # usable with the zipimport directory cache for our target archive | ||
| 1612 | fspath = fspath.rstrip(os.sep) | ||
| 1613 | if fspath == self.loader.archive: | ||
| 1614 | return '' | ||
| 1615 | if fspath.startswith(self.zip_pre): | ||
| 1616 | return fspath[len(self.zip_pre):] | ||
| 1617 | raise AssertionError( | ||
| 1618 | "%s is not a subpath of %s" % (fspath, self.zip_pre) | ||
| 1619 | ) | ||
| 1620 | |||
| 1621 | def _parts(self, zip_path): | ||
| 1622 | # Convert a zipfile subpath into an egg-relative path part list. | ||
| 1623 | # pseudo-fs path | ||
| 1624 | fspath = self.zip_pre + zip_path | ||
| 1625 | if fspath.startswith(self.egg_root + os.sep): | ||
| 1626 | return fspath[len(self.egg_root) + 1:].split(os.sep) | ||
| 1627 | raise AssertionError( | ||
| 1628 | "%s is not a subpath of %s" % (fspath, self.egg_root) | ||
| 1629 | ) | ||
| 1630 | |||
| 1631 | @property | ||
| 1632 | def zipinfo(self): | ||
| 1633 | return self._zip_manifests.load(self.loader.archive) | ||
| 1634 | |||
| 1635 | def get_resource_filename(self, manager, resource_name): | ||
| 1636 | if not self.egg_name: | ||
| 1637 | raise NotImplementedError( | ||
| 1638 | "resource_filename() only supported for .egg, not .zip" | ||
| 1639 | ) | ||
| 1640 | # no need to lock for extraction, since we use temp names | ||
| 1641 | zip_path = self._resource_to_zip(resource_name) | ||
| 1642 | eagers = self._get_eager_resources() | ||
| 1643 | if '/'.join(self._parts(zip_path)) in eagers: | ||
| 1644 | for name in eagers: | ||
| 1645 | self._extract_resource(manager, self._eager_to_zip(name)) | ||
| 1646 | return self._extract_resource(manager, zip_path) | ||
| 1647 | |||
| 1648 | @staticmethod | ||
| 1649 | def _get_date_and_size(zip_stat): | ||
| 1650 | size = zip_stat.file_size | ||
| 1651 | # ymdhms+wday, yday, dst | ||
| 1652 | date_time = zip_stat.date_time + (0, 0, -1) | ||
| 1653 | # 1980 offset already done | ||
| 1654 | timestamp = time.mktime(date_time) | ||
| 1655 | return timestamp, size | ||
| 1656 | |||
| 1657 | def _extract_resource(self, manager, zip_path): | ||
| 1658 | |||
| 1659 | if zip_path in self._index(): | ||
| 1660 | for name in self._index()[zip_path]: | ||
| 1661 | last = self._extract_resource( | ||
| 1662 | manager, os.path.join(zip_path, name) | ||
| 1663 | ) | ||
| 1664 | # return the extracted directory name | ||
| 1665 | return os.path.dirname(last) | ||
| 1666 | |||
| 1667 | timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) | ||
| 1668 | |||
| 1669 | if not WRITE_SUPPORT: | ||
| 1670 | raise IOError('"os.rename" and "os.unlink" are not supported ' | ||
| 1671 | 'on this platform') | ||
| 1672 | try: | ||
| 1673 | |||
| 1674 | real_path = manager.get_cache_path( | ||
| 1675 | self.egg_name, self._parts(zip_path) | ||
| 1676 | ) | ||
| 1677 | |||
| 1678 | if self._is_current(real_path, zip_path): | ||
| 1679 | return real_path | ||
| 1680 | |||
| 1681 | outf, tmpnam = _mkstemp( | ||
| 1682 | ".$extract", | ||
| 1683 | dir=os.path.dirname(real_path), | ||
| 1684 | ) | ||
| 1685 | os.write(outf, self.loader.get_data(zip_path)) | ||
| 1686 | os.close(outf) | ||
| 1687 | utime(tmpnam, (timestamp, timestamp)) | ||
| 1688 | manager.postprocess(tmpnam, real_path) | ||
| 1689 | |||
| 1690 | try: | ||
| 1691 | rename(tmpnam, real_path) | ||
| 1692 | |||
| 1693 | except os.error: | ||
| 1694 | if os.path.isfile(real_path): | ||
| 1695 | if self._is_current(real_path, zip_path): | ||
| 1696 | # the file became current since it was checked above, | ||
| 1697 | # so proceed. | ||
| 1698 | return real_path | ||
| 1699 | # Windows, del old file and retry | ||
| 1700 | elif os.name == 'nt': | ||
| 1701 | unlink(real_path) | ||
| 1702 | rename(tmpnam, real_path) | ||
| 1703 | return real_path | ||
| 1704 | raise | ||
| 1705 | |||
| 1706 | except os.error: | ||
| 1707 | # report a user-friendly error | ||
| 1708 | manager.extraction_error() | ||
| 1709 | |||
| 1710 | return real_path | ||
| 1711 | |||
| 1712 | def _is_current(self, file_path, zip_path): | ||
| 1713 | """ | ||
| 1714 | Return True if the file_path is current for this zip_path | ||
| 1715 | """ | ||
| 1716 | timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) | ||
| 1717 | if not os.path.isfile(file_path): | ||
| 1718 | return False | ||
| 1719 | stat = os.stat(file_path) | ||
| 1720 | if stat.st_size != size or stat.st_mtime != timestamp: | ||
| 1721 | return False | ||
| 1722 | # check that the contents match | ||
| 1723 | zip_contents = self.loader.get_data(zip_path) | ||
| 1724 | with open(file_path, 'rb') as f: | ||
| 1725 | file_contents = f.read() | ||
| 1726 | return zip_contents == file_contents | ||
| 1727 | |||
| 1728 | def _get_eager_resources(self): | ||
| 1729 | if self.eagers is None: | ||
| 1730 | eagers = [] | ||
| 1731 | for name in ('native_libs.txt', 'eager_resources.txt'): | ||
| 1732 | if self.has_metadata(name): | ||
| 1733 | eagers.extend(self.get_metadata_lines(name)) | ||
| 1734 | self.eagers = eagers | ||
| 1735 | return self.eagers | ||
| 1736 | |||
| 1737 | def _index(self): | ||
| 1738 | try: | ||
| 1739 | return self._dirindex | ||
| 1740 | except AttributeError: | ||
| 1741 | ind = {} | ||
| 1742 | for path in self.zipinfo: | ||
| 1743 | parts = path.split(os.sep) | ||
| 1744 | while parts: | ||
| 1745 | parent = os.sep.join(parts[:-1]) | ||
| 1746 | if parent in ind: | ||
| 1747 | ind[parent].append(parts[-1]) | ||
| 1748 | break | ||
| 1749 | else: | ||
| 1750 | ind[parent] = [parts.pop()] | ||
| 1751 | self._dirindex = ind | ||
| 1752 | return ind | ||
| 1753 | |||
| 1754 | def _has(self, fspath): | ||
| 1755 | zip_path = self._zipinfo_name(fspath) | ||
| 1756 | return zip_path in self.zipinfo or zip_path in self._index() | ||
| 1757 | |||
| 1758 | def _isdir(self, fspath): | ||
| 1759 | return self._zipinfo_name(fspath) in self._index() | ||
| 1760 | |||
| 1761 | def _listdir(self, fspath): | ||
| 1762 | return list(self._index().get(self._zipinfo_name(fspath), ())) | ||
| 1763 | |||
| 1764 | def _eager_to_zip(self, resource_name): | ||
| 1765 | return self._zipinfo_name(self._fn(self.egg_root, resource_name)) | ||
| 1766 | |||
| 1767 | def _resource_to_zip(self, resource_name): | ||
| 1768 | return self._zipinfo_name(self._fn(self.module_path, resource_name)) | ||
| 1769 | |||
| 1770 | |||
| 1771 | register_loader_type(zipimport.zipimporter, ZipProvider) | ||
| 1772 | |||
| 1773 | |||
| 1774 | class FileMetadata(EmptyProvider): | ||
| 1775 | """Metadata handler for standalone PKG-INFO files | ||
| 1776 | |||
| 1777 | Usage:: | ||
| 1778 | |||
| 1779 | metadata = FileMetadata("/path/to/PKG-INFO") | ||
| 1780 | |||
| 1781 | This provider rejects all data and metadata requests except for PKG-INFO, | ||
| 1782 | which is treated as existing, and will be the contents of the file at | ||
| 1783 | the provided location. | ||
| 1784 | """ | ||
| 1785 | |||
| 1786 | def __init__(self, path): | ||
| 1787 | self.path = path | ||
| 1788 | |||
| 1789 | def has_metadata(self, name): | ||
| 1790 | return name == 'PKG-INFO' and os.path.isfile(self.path) | ||
| 1791 | |||
| 1792 | def get_metadata(self, name): | ||
| 1793 | if name != 'PKG-INFO': | ||
| 1794 | raise KeyError("No metadata except PKG-INFO is available") | ||
| 1795 | |||
| 1796 | with io.open(self.path, encoding='utf-8', errors="replace") as f: | ||
| 1797 | metadata = f.read() | ||
| 1798 | self._warn_on_replacement(metadata) | ||
| 1799 | return metadata | ||
| 1800 | |||
| 1801 | def _warn_on_replacement(self, metadata): | ||
| 1802 | # Python 2.7 compat for: replacement_char = '�' | ||
| 1803 | replacement_char = b'\xef\xbf\xbd'.decode('utf-8') | ||
| 1804 | if replacement_char in metadata: | ||
| 1805 | tmpl = "{self.path} could not be properly decoded in UTF-8" | ||
| 1806 | msg = tmpl.format(**locals()) | ||
| 1807 | warnings.warn(msg) | ||
| 1808 | |||
| 1809 | def get_metadata_lines(self, name): | ||
| 1810 | return yield_lines(self.get_metadata(name)) | ||
| 1811 | |||
| 1812 | |||
| 1813 | class PathMetadata(DefaultProvider): | ||
| 1814 | """Metadata provider for egg directories | ||
| 1815 | |||
| 1816 | Usage:: | ||
| 1817 | |||
| 1818 | # Development eggs: | ||
| 1819 | |||
| 1820 | egg_info = "/path/to/PackageName.egg-info" | ||
| 1821 | base_dir = os.path.dirname(egg_info) | ||
| 1822 | metadata = PathMetadata(base_dir, egg_info) | ||
| 1823 | dist_name = os.path.splitext(os.path.basename(egg_info))[0] | ||
| 1824 | dist = Distribution(basedir, project_name=dist_name, metadata=metadata) | ||
| 1825 | |||
| 1826 | # Unpacked egg directories: | ||
| 1827 | |||
| 1828 | egg_path = "/path/to/PackageName-ver-pyver-etc.egg" | ||
| 1829 | metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) | ||
| 1830 | dist = Distribution.from_filename(egg_path, metadata=metadata) | ||
| 1831 | """ | ||
| 1832 | |||
| 1833 | def __init__(self, path, egg_info): | ||
| 1834 | self.module_path = path | ||
| 1835 | self.egg_info = egg_info | ||
| 1836 | |||
| 1837 | |||
| 1838 | class EggMetadata(ZipProvider): | ||
| 1839 | """Metadata provider for .egg files""" | ||
| 1840 | |||
| 1841 | def __init__(self, importer): | ||
| 1842 | """Create a metadata provider from a zipimporter""" | ||
| 1843 | |||
| 1844 | self.zip_pre = importer.archive + os.sep | ||
| 1845 | self.loader = importer | ||
| 1846 | if importer.prefix: | ||
| 1847 | self.module_path = os.path.join(importer.archive, importer.prefix) | ||
| 1848 | else: | ||
| 1849 | self.module_path = importer.archive | ||
| 1850 | self._setup_prefix() | ||
| 1851 | |||
| 1852 | |||
| 1853 | _declare_state('dict', _distribution_finders={}) | ||
| 1854 | |||
| 1855 | |||
| 1856 | def register_finder(importer_type, distribution_finder): | ||
| 1857 | """Register `distribution_finder` to find distributions in sys.path items | ||
| 1858 | |||
| 1859 | `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item | ||
| 1860 | handler), and `distribution_finder` is a callable that, passed a path | ||
| 1861 | item and the importer instance, yields ``Distribution`` instances found on | ||
| 1862 | that path item. See ``pkg_resources.find_on_path`` for an example.""" | ||
| 1863 | _distribution_finders[importer_type] = distribution_finder | ||
| 1864 | |||
| 1865 | |||
| 1866 | def find_distributions(path_item, only=False): | ||
| 1867 | """Yield distributions accessible via `path_item`""" | ||
| 1868 | importer = get_importer(path_item) | ||
| 1869 | finder = _find_adapter(_distribution_finders, importer) | ||
| 1870 | return finder(importer, path_item, only) | ||
| 1871 | |||
| 1872 | |||
| 1873 | def find_eggs_in_zip(importer, path_item, only=False): | ||
| 1874 | """ | ||
| 1875 | Find eggs in zip files; possibly multiple nested eggs. | ||
| 1876 | """ | ||
| 1877 | if importer.archive.endswith('.whl'): | ||
| 1878 | # wheels are not supported with this finder | ||
| 1879 | # they don't have PKG-INFO metadata, and won't ever contain eggs | ||
| 1880 | return | ||
| 1881 | metadata = EggMetadata(importer) | ||
| 1882 | if metadata.has_metadata('PKG-INFO'): | ||
| 1883 | yield Distribution.from_filename(path_item, metadata=metadata) | ||
| 1884 | if only: | ||
| 1885 | # don't yield nested distros | ||
| 1886 | return | ||
| 1887 | for subitem in metadata.resource_listdir('/'): | ||
| 1888 | if _is_egg_path(subitem): | ||
| 1889 | subpath = os.path.join(path_item, subitem) | ||
| 1890 | dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) | ||
| 1891 | for dist in dists: | ||
| 1892 | yield dist | ||
| 1893 | elif subitem.lower().endswith('.dist-info'): | ||
| 1894 | subpath = os.path.join(path_item, subitem) | ||
| 1895 | submeta = EggMetadata(zipimport.zipimporter(subpath)) | ||
| 1896 | submeta.egg_info = subpath | ||
| 1897 | yield Distribution.from_location(path_item, subitem, submeta) | ||
| 1898 | |||
| 1899 | |||
| 1900 | register_finder(zipimport.zipimporter, find_eggs_in_zip) | ||
| 1901 | |||
| 1902 | |||
| 1903 | def find_nothing(importer, path_item, only=False): | ||
| 1904 | return () | ||
| 1905 | |||
| 1906 | |||
| 1907 | register_finder(object, find_nothing) | ||
| 1908 | |||
| 1909 | |||
| 1910 | def _by_version_descending(names): | ||
| 1911 | """ | ||
| 1912 | Given a list of filenames, return them in descending order | ||
| 1913 | by version number. | ||
| 1914 | |||
| 1915 | >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' | ||
| 1916 | >>> _by_version_descending(names) | ||
| 1917 | ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] | ||
| 1918 | >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' | ||
| 1919 | >>> _by_version_descending(names) | ||
| 1920 | ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] | ||
| 1921 | >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' | ||
| 1922 | >>> _by_version_descending(names) | ||
| 1923 | ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] | ||
| 1924 | """ | ||
| 1925 | def _by_version(name): | ||
| 1926 | """ | ||
| 1927 | Parse each component of the filename | ||
| 1928 | """ | ||
| 1929 | name, ext = os.path.splitext(name) | ||
| 1930 | parts = itertools.chain(name.split('-'), [ext]) | ||
| 1931 | return [packaging.version.parse(part) for part in parts] | ||
| 1932 | |||
| 1933 | return sorted(names, key=_by_version, reverse=True) | ||
| 1934 | |||
| 1935 | |||
| 1936 | def find_on_path(importer, path_item, only=False): | ||
| 1937 | """Yield distributions accessible on a sys.path directory""" | ||
| 1938 | path_item = _normalize_cached(path_item) | ||
| 1939 | |||
| 1940 | if _is_unpacked_egg(path_item): | ||
| 1941 | yield Distribution.from_filename( | ||
| 1942 | path_item, metadata=PathMetadata( | ||
| 1943 | path_item, os.path.join(path_item, 'EGG-INFO') | ||
| 1944 | ) | ||
| 1945 | ) | ||
| 1946 | return | ||
| 1947 | |||
| 1948 | entries = safe_listdir(path_item) | ||
| 1949 | |||
| 1950 | # for performance, before sorting by version, | ||
| 1951 | # screen entries for only those that will yield | ||
| 1952 | # distributions | ||
| 1953 | filtered = ( | ||
| 1954 | entry | ||
| 1955 | for entry in entries | ||
| 1956 | if dist_factory(path_item, entry, only) | ||
| 1957 | ) | ||
| 1958 | |||
| 1959 | # scan for .egg and .egg-info in directory | ||
| 1960 | path_item_entries = _by_version_descending(filtered) | ||
| 1961 | for entry in path_item_entries: | ||
| 1962 | fullpath = os.path.join(path_item, entry) | ||
| 1963 | factory = dist_factory(path_item, entry, only) | ||
| 1964 | for dist in factory(fullpath): | ||
| 1965 | yield dist | ||
| 1966 | |||
| 1967 | |||
| 1968 | def dist_factory(path_item, entry, only): | ||
| 1969 | """ | ||
| 1970 | Return a dist_factory for a path_item and entry | ||
| 1971 | """ | ||
| 1972 | lower = entry.lower() | ||
| 1973 | is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) | ||
| 1974 | return ( | ||
| 1975 | distributions_from_metadata | ||
| 1976 | if is_meta else | ||
| 1977 | find_distributions | ||
| 1978 | if not only and _is_egg_path(entry) else | ||
| 1979 | resolve_egg_link | ||
| 1980 | if not only and lower.endswith('.egg-link') else | ||
| 1981 | NoDists() | ||
| 1982 | ) | ||
| 1983 | |||
| 1984 | |||
| 1985 | class NoDists: | ||
| 1986 | """ | ||
| 1987 | >>> bool(NoDists()) | ||
| 1988 | False | ||
| 1989 | |||
| 1990 | >>> list(NoDists()('anything')) | ||
| 1991 | [] | ||
| 1992 | """ | ||
| 1993 | def __bool__(self): | ||
| 1994 | return False | ||
| 1995 | if six.PY2: | ||
| 1996 | __nonzero__ = __bool__ | ||
| 1997 | |||
| 1998 | def __call__(self, fullpath): | ||
| 1999 | return iter(()) | ||
| 2000 | |||
| 2001 | |||
| 2002 | def safe_listdir(path): | ||
| 2003 | """ | ||
| 2004 | Attempt to list contents of path, but suppress some exceptions. | ||
| 2005 | """ | ||
| 2006 | try: | ||
| 2007 | return os.listdir(path) | ||
| 2008 | except (PermissionError, NotADirectoryError): | ||
| 2009 | pass | ||
| 2010 | except OSError as e: | ||
| 2011 | # Ignore the directory if does not exist, not a directory or | ||
| 2012 | # permission denied | ||
| 2013 | ignorable = ( | ||
| 2014 | e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) | ||
| 2015 | # Python 2 on Windows needs to be handled this way :( | ||
| 2016 | or getattr(e, "winerror", None) == 267 | ||
| 2017 | ) | ||
| 2018 | if not ignorable: | ||
| 2019 | raise | ||
| 2020 | return () | ||
| 2021 | |||
| 2022 | |||
| 2023 | def distributions_from_metadata(path): | ||
| 2024 | root = os.path.dirname(path) | ||
| 2025 | if os.path.isdir(path): | ||
| 2026 | if len(os.listdir(path)) == 0: | ||
| 2027 | # empty metadata dir; skip | ||
| 2028 | return | ||
| 2029 | metadata = PathMetadata(root, path) | ||
| 2030 | else: | ||
| 2031 | metadata = FileMetadata(path) | ||
| 2032 | entry = os.path.basename(path) | ||
| 2033 | yield Distribution.from_location( | ||
| 2034 | root, entry, metadata, precedence=DEVELOP_DIST, | ||
| 2035 | ) | ||
| 2036 | |||
| 2037 | |||
| 2038 | def non_empty_lines(path): | ||
| 2039 | """ | ||
| 2040 | Yield non-empty lines from file at path | ||
| 2041 | """ | ||
| 2042 | with open(path) as f: | ||
| 2043 | for line in f: | ||
| 2044 | line = line.strip() | ||
| 2045 | if line: | ||
| 2046 | yield line | ||
| 2047 | |||
| 2048 | |||
| 2049 | def resolve_egg_link(path): | ||
| 2050 | """ | ||
| 2051 | Given a path to an .egg-link, resolve distributions | ||
| 2052 | present in the referenced path. | ||
| 2053 | """ | ||
| 2054 | referenced_paths = non_empty_lines(path) | ||
| 2055 | resolved_paths = ( | ||
| 2056 | os.path.join(os.path.dirname(path), ref) | ||
| 2057 | for ref in referenced_paths | ||
| 2058 | ) | ||
| 2059 | dist_groups = map(find_distributions, resolved_paths) | ||
| 2060 | return next(dist_groups, ()) | ||
| 2061 | |||
| 2062 | |||
| 2063 | register_finder(pkgutil.ImpImporter, find_on_path) | ||
| 2064 | |||
| 2065 | if hasattr(importlib_machinery, 'FileFinder'): | ||
| 2066 | register_finder(importlib_machinery.FileFinder, find_on_path) | ||
| 2067 | |||
| 2068 | _declare_state('dict', _namespace_handlers={}) | ||
| 2069 | _declare_state('dict', _namespace_packages={}) | ||
| 2070 | |||
| 2071 | |||
| 2072 | def register_namespace_handler(importer_type, namespace_handler): | ||
| 2073 | """Register `namespace_handler` to declare namespace packages | ||
| 2074 | |||
| 2075 | `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item | ||
| 2076 | handler), and `namespace_handler` is a callable like this:: | ||
| 2077 | |||
| 2078 | def namespace_handler(importer, path_entry, moduleName, module): | ||
| 2079 | # return a path_entry to use for child packages | ||
| 2080 | |||
| 2081 | Namespace handlers are only called if the importer object has already | ||
| 2082 | agreed that it can handle the relevant path item, and they should only | ||
| 2083 | return a subpath if the module __path__ does not already contain an | ||
| 2084 | equivalent subpath. For an example namespace handler, see | ||
| 2085 | ``pkg_resources.file_ns_handler``. | ||
| 2086 | """ | ||
| 2087 | _namespace_handlers[importer_type] = namespace_handler | ||
| 2088 | |||
| 2089 | |||
| 2090 | def _handle_ns(packageName, path_item): | ||
| 2091 | """Ensure that named package includes a subpath of path_item (if needed)""" | ||
| 2092 | |||
| 2093 | importer = get_importer(path_item) | ||
| 2094 | if importer is None: | ||
| 2095 | return None | ||
| 2096 | loader = importer.find_module(packageName) | ||
| 2097 | if loader is None: | ||
| 2098 | return None | ||
| 2099 | module = sys.modules.get(packageName) | ||
| 2100 | if module is None: | ||
| 2101 | module = sys.modules[packageName] = types.ModuleType(packageName) | ||
| 2102 | module.__path__ = [] | ||
| 2103 | _set_parent_ns(packageName) | ||
| 2104 | elif not hasattr(module, '__path__'): | ||
| 2105 | raise TypeError("Not a package:", packageName) | ||
| 2106 | handler = _find_adapter(_namespace_handlers, importer) | ||
| 2107 | subpath = handler(importer, path_item, packageName, module) | ||
| 2108 | if subpath is not None: | ||
| 2109 | path = module.__path__ | ||
| 2110 | path.append(subpath) | ||
| 2111 | loader.load_module(packageName) | ||
| 2112 | _rebuild_mod_path(path, packageName, module) | ||
| 2113 | return subpath | ||
| 2114 | |||
| 2115 | |||
| 2116 | def _rebuild_mod_path(orig_path, package_name, module): | ||
| 2117 | """ | ||
| 2118 | Rebuild module.__path__ ensuring that all entries are ordered | ||
| 2119 | corresponding to their sys.path order | ||
| 2120 | """ | ||
| 2121 | sys_path = [_normalize_cached(p) for p in sys.path] | ||
| 2122 | |||
| 2123 | def safe_sys_path_index(entry): | ||
| 2124 | """ | ||
| 2125 | Workaround for #520 and #513. | ||
| 2126 | """ | ||
| 2127 | try: | ||
| 2128 | return sys_path.index(entry) | ||
| 2129 | except ValueError: | ||
| 2130 | return float('inf') | ||
| 2131 | |||
| 2132 | def position_in_sys_path(path): | ||
| 2133 | """ | ||
| 2134 | Return the ordinal of the path based on its position in sys.path | ||
| 2135 | """ | ||
| 2136 | path_parts = path.split(os.sep) | ||
| 2137 | module_parts = package_name.count('.') + 1 | ||
| 2138 | parts = path_parts[:-module_parts] | ||
| 2139 | return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) | ||
| 2140 | |||
| 2141 | if not isinstance(orig_path, list): | ||
| 2142 | # Is this behavior useful when module.__path__ is not a list? | ||
| 2143 | return | ||
| 2144 | |||
| 2145 | orig_path.sort(key=position_in_sys_path) | ||
| 2146 | module.__path__[:] = [_normalize_cached(p) for p in orig_path] | ||
| 2147 | |||
| 2148 | |||
| 2149 | def declare_namespace(packageName): | ||
| 2150 | """Declare that package 'packageName' is a namespace package""" | ||
| 2151 | |||
| 2152 | _imp.acquire_lock() | ||
| 2153 | try: | ||
| 2154 | if packageName in _namespace_packages: | ||
| 2155 | return | ||
| 2156 | |||
| 2157 | path, parent = sys.path, None | ||
| 2158 | if '.' in packageName: | ||
| 2159 | parent = '.'.join(packageName.split('.')[:-1]) | ||
| 2160 | declare_namespace(parent) | ||
| 2161 | if parent not in _namespace_packages: | ||
| 2162 | __import__(parent) | ||
| 2163 | try: | ||
| 2164 | path = sys.modules[parent].__path__ | ||
| 2165 | except AttributeError: | ||
| 2166 | raise TypeError("Not a package:", parent) | ||
| 2167 | |||
| 2168 | # Track what packages are namespaces, so when new path items are added, | ||
| 2169 | # they can be updated | ||
| 2170 | _namespace_packages.setdefault(parent, []).append(packageName) | ||
| 2171 | _namespace_packages.setdefault(packageName, []) | ||
| 2172 | |||
| 2173 | for path_item in path: | ||
| 2174 | # Ensure all the parent's path items are reflected in the child, | ||
| 2175 | # if they apply | ||
| 2176 | _handle_ns(packageName, path_item) | ||
| 2177 | |||
| 2178 | finally: | ||
| 2179 | _imp.release_lock() | ||
| 2180 | |||
| 2181 | |||
| 2182 | def fixup_namespace_packages(path_item, parent=None): | ||
| 2183 | """Ensure that previously-declared namespace packages include path_item""" | ||
| 2184 | _imp.acquire_lock() | ||
| 2185 | try: | ||
| 2186 | for package in _namespace_packages.get(parent, ()): | ||
| 2187 | subpath = _handle_ns(package, path_item) | ||
| 2188 | if subpath: | ||
| 2189 | fixup_namespace_packages(subpath, package) | ||
| 2190 | finally: | ||
| 2191 | _imp.release_lock() | ||
| 2192 | |||
| 2193 | |||
| 2194 | def file_ns_handler(importer, path_item, packageName, module): | ||
| 2195 | """Compute an ns-package subpath for a filesystem or zipfile importer""" | ||
| 2196 | |||
| 2197 | subpath = os.path.join(path_item, packageName.split('.')[-1]) | ||
| 2198 | normalized = _normalize_cached(subpath) | ||
| 2199 | for item in module.__path__: | ||
| 2200 | if _normalize_cached(item) == normalized: | ||
| 2201 | break | ||
| 2202 | else: | ||
| 2203 | # Only return the path if it's not already there | ||
| 2204 | return subpath | ||
| 2205 | |||
| 2206 | |||
| 2207 | register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) | ||
| 2208 | register_namespace_handler(zipimport.zipimporter, file_ns_handler) | ||
| 2209 | |||
| 2210 | if hasattr(importlib_machinery, 'FileFinder'): | ||
| 2211 | register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) | ||
| 2212 | |||
| 2213 | |||
| 2214 | def null_ns_handler(importer, path_item, packageName, module): | ||
| 2215 | return None | ||
| 2216 | |||
| 2217 | |||
| 2218 | register_namespace_handler(object, null_ns_handler) | ||
| 2219 | |||
| 2220 | |||
| 2221 | def normalize_path(filename): | ||
| 2222 | """Normalize a file/dir name for comparison purposes""" | ||
| 2223 | return os.path.normcase(os.path.realpath(filename)) | ||
| 2224 | |||
| 2225 | |||
| 2226 | def _normalize_cached(filename, _cache={}): | ||
| 2227 | try: | ||
| 2228 | return _cache[filename] | ||
| 2229 | except KeyError: | ||
| 2230 | _cache[filename] = result = normalize_path(filename) | ||
| 2231 | return result | ||
| 2232 | |||
| 2233 | |||
| 2234 | def _is_egg_path(path): | ||
| 2235 | """ | ||
| 2236 | Determine if given path appears to be an egg. | ||
| 2237 | """ | ||
| 2238 | return path.lower().endswith('.egg') | ||
| 2239 | |||
| 2240 | |||
| 2241 | def _is_unpacked_egg(path): | ||
| 2242 | """ | ||
| 2243 | Determine if given path appears to be an unpacked egg. | ||
| 2244 | """ | ||
| 2245 | return ( | ||
| 2246 | _is_egg_path(path) and | ||
| 2247 | os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) | ||
| 2248 | ) | ||
| 2249 | |||
| 2250 | |||
| 2251 | def _set_parent_ns(packageName): | ||
| 2252 | parts = packageName.split('.') | ||
| 2253 | name = parts.pop() | ||
| 2254 | if parts: | ||
| 2255 | parent = '.'.join(parts) | ||
| 2256 | setattr(sys.modules[parent], name, sys.modules[packageName]) | ||
| 2257 | |||
| 2258 | |||
| 2259 | def yield_lines(strs): | ||
| 2260 | """Yield non-empty/non-comment lines of a string or sequence""" | ||
| 2261 | if isinstance(strs, six.string_types): | ||
| 2262 | for s in strs.splitlines(): | ||
| 2263 | s = s.strip() | ||
| 2264 | # skip blank lines/comments | ||
| 2265 | if s and not s.startswith('#'): | ||
| 2266 | yield s | ||
| 2267 | else: | ||
| 2268 | for ss in strs: | ||
| 2269 | for s in yield_lines(ss): | ||
| 2270 | yield s | ||
| 2271 | |||
| 2272 | |||
| 2273 | MODULE = re.compile(r"\w+(\.\w+)*$").match | ||
| 2274 | EGG_NAME = re.compile( | ||
| 2275 | r""" | ||
| 2276 | (?P<name>[^-]+) ( | ||
| 2277 | -(?P<ver>[^-]+) ( | ||
| 2278 | -py(?P<pyver>[^-]+) ( | ||
| 2279 | -(?P<plat>.+) | ||
| 2280 | )? | ||
| 2281 | )? | ||
| 2282 | )? | ||
| 2283 | """, | ||
| 2284 | re.VERBOSE | re.IGNORECASE, | ||
| 2285 | ).match | ||
| 2286 | |||
| 2287 | |||
| 2288 | class EntryPoint(object): | ||
| 2289 | """Object representing an advertised importable object""" | ||
| 2290 | |||
| 2291 | def __init__(self, name, module_name, attrs=(), extras=(), dist=None): | ||
| 2292 | if not MODULE(module_name): | ||
| 2293 | raise ValueError("Invalid module name", module_name) | ||
| 2294 | self.name = name | ||
| 2295 | self.module_name = module_name | ||
| 2296 | self.attrs = tuple(attrs) | ||
| 2297 | self.extras = tuple(extras) | ||
| 2298 | self.dist = dist | ||
| 2299 | |||
| 2300 | def __str__(self): | ||
| 2301 | s = "%s = %s" % (self.name, self.module_name) | ||
| 2302 | if self.attrs: | ||
| 2303 | s += ':' + '.'.join(self.attrs) | ||
| 2304 | if self.extras: | ||
| 2305 | s += ' [%s]' % ','.join(self.extras) | ||
| 2306 | return s | ||
| 2307 | |||
| 2308 | def __repr__(self): | ||
| 2309 | return "EntryPoint.parse(%r)" % str(self) | ||
| 2310 | |||
| 2311 | def load(self, require=True, *args, **kwargs): | ||
| 2312 | """ | ||
| 2313 | Require packages for this EntryPoint, then resolve it. | ||
| 2314 | """ | ||
| 2315 | if not require or args or kwargs: | ||
| 2316 | warnings.warn( | ||
| 2317 | "Parameters to load are deprecated. Call .resolve and " | ||
| 2318 | ".require separately.", | ||
| 2319 | DeprecationWarning, | ||
| 2320 | stacklevel=2, | ||
| 2321 | ) | ||
| 2322 | if require: | ||
| 2323 | self.require(*args, **kwargs) | ||
| 2324 | return self.resolve() | ||
| 2325 | |||
| 2326 | def resolve(self): | ||
| 2327 | """ | ||
| 2328 | Resolve the entry point from its module and attrs. | ||
| 2329 | """ | ||
| 2330 | module = __import__(self.module_name, fromlist=['__name__'], level=0) | ||
| 2331 | try: | ||
| 2332 | return functools.reduce(getattr, self.attrs, module) | ||
| 2333 | except AttributeError as exc: | ||
| 2334 | raise ImportError(str(exc)) | ||
| 2335 | |||
| 2336 | def require(self, env=None, installer=None): | ||
| 2337 | if self.extras and not self.dist: | ||
| 2338 | raise UnknownExtra("Can't require() without a distribution", self) | ||
| 2339 | |||
| 2340 | # Get the requirements for this entry point with all its extras and | ||
| 2341 | # then resolve them. We have to pass `extras` along when resolving so | ||
| 2342 | # that the working set knows what extras we want. Otherwise, for | ||
| 2343 | # dist-info distributions, the working set will assume that the | ||
| 2344 | # requirements for that extra are purely optional and skip over them. | ||
| 2345 | reqs = self.dist.requires(self.extras) | ||
| 2346 | items = working_set.resolve(reqs, env, installer, extras=self.extras) | ||
| 2347 | list(map(working_set.add, items)) | ||
| 2348 | |||
| 2349 | pattern = re.compile( | ||
| 2350 | r'\s*' | ||
| 2351 | r'(?P<name>.+?)\s*' | ||
| 2352 | r'=\s*' | ||
| 2353 | r'(?P<module>[\w.]+)\s*' | ||
| 2354 | r'(:\s*(?P<attr>[\w.]+))?\s*' | ||
| 2355 | r'(?P<extras>\[.*\])?\s*$' | ||
| 2356 | ) | ||
| 2357 | |||
| 2358 | @classmethod | ||
| 2359 | def parse(cls, src, dist=None): | ||
| 2360 | """Parse a single entry point from string `src` | ||
| 2361 | |||
| 2362 | Entry point syntax follows the form:: | ||
| 2363 | |||
| 2364 | name = some.module:some.attr [extra1, extra2] | ||
| 2365 | |||
| 2366 | The entry name and module name are required, but the ``:attrs`` and | ||
| 2367 | ``[extras]`` parts are optional | ||
| 2368 | """ | ||
| 2369 | m = cls.pattern.match(src) | ||
| 2370 | if not m: | ||
| 2371 | msg = "EntryPoint must be in 'name=module:attrs [extras]' format" | ||
| 2372 | raise ValueError(msg, src) | ||
| 2373 | res = m.groupdict() | ||
| 2374 | extras = cls._parse_extras(res['extras']) | ||
| 2375 | attrs = res['attr'].split('.') if res['attr'] else () | ||
| 2376 | return cls(res['name'], res['module'], attrs, extras, dist) | ||
| 2377 | |||
| 2378 | @classmethod | ||
| 2379 | def _parse_extras(cls, extras_spec): | ||
| 2380 | if not extras_spec: | ||
| 2381 | return () | ||
| 2382 | req = Requirement.parse('x' + extras_spec) | ||
| 2383 | if req.specs: | ||
| 2384 | raise ValueError() | ||
| 2385 | return req.extras | ||
| 2386 | |||
| 2387 | @classmethod | ||
| 2388 | def parse_group(cls, group, lines, dist=None): | ||
| 2389 | """Parse an entry point group""" | ||
| 2390 | if not MODULE(group): | ||
| 2391 | raise ValueError("Invalid group name", group) | ||
| 2392 | this = {} | ||
| 2393 | for line in yield_lines(lines): | ||
| 2394 | ep = cls.parse(line, dist) | ||
| 2395 | if ep.name in this: | ||
| 2396 | raise ValueError("Duplicate entry point", group, ep.name) | ||
| 2397 | this[ep.name] = ep | ||
| 2398 | return this | ||
| 2399 | |||
| 2400 | @classmethod | ||
| 2401 | def parse_map(cls, data, dist=None): | ||
| 2402 | """Parse a map of entry point groups""" | ||
| 2403 | if isinstance(data, dict): | ||
| 2404 | data = data.items() | ||
| 2405 | else: | ||
| 2406 | data = split_sections(data) | ||
| 2407 | maps = {} | ||
| 2408 | for group, lines in data: | ||
| 2409 | if group is None: | ||
| 2410 | if not lines: | ||
| 2411 | continue | ||
| 2412 | raise ValueError("Entry points must be listed in groups") | ||
| 2413 | group = group.strip() | ||
| 2414 | if group in maps: | ||
| 2415 | raise ValueError("Duplicate group name", group) | ||
| 2416 | maps[group] = cls.parse_group(group, lines, dist) | ||
| 2417 | return maps | ||
| 2418 | |||
| 2419 | |||
| 2420 | def _remove_md5_fragment(location): | ||
| 2421 | if not location: | ||
| 2422 | return '' | ||
| 2423 | parsed = urllib.parse.urlparse(location) | ||
| 2424 | if parsed[-1].startswith('md5='): | ||
| 2425 | return urllib.parse.urlunparse(parsed[:-1] + ('',)) | ||
| 2426 | return location | ||
| 2427 | |||
| 2428 | |||
| 2429 | def _version_from_file(lines): | ||
| 2430 | """ | ||
| 2431 | Given an iterable of lines from a Metadata file, return | ||
| 2432 | the value of the Version field, if present, or None otherwise. | ||
| 2433 | """ | ||
| 2434 | def is_version_line(line): | ||
| 2435 | return line.lower().startswith('version:') | ||
| 2436 | version_lines = filter(is_version_line, lines) | ||
| 2437 | line = next(iter(version_lines), '') | ||
| 2438 | _, _, value = line.partition(':') | ||
| 2439 | return safe_version(value.strip()) or None | ||
| 2440 | |||
| 2441 | |||
| 2442 | class Distribution(object): | ||
| 2443 | """Wrap an actual or potential sys.path entry w/metadata""" | ||
| 2444 | PKG_INFO = 'PKG-INFO' | ||
| 2445 | |||
| 2446 | def __init__( | ||
| 2447 | self, location=None, metadata=None, project_name=None, | ||
| 2448 | version=None, py_version=PY_MAJOR, platform=None, | ||
| 2449 | precedence=EGG_DIST): | ||
| 2450 | self.project_name = safe_name(project_name or 'Unknown') | ||
| 2451 | if version is not None: | ||
| 2452 | self._version = safe_version(version) | ||
| 2453 | self.py_version = py_version | ||
| 2454 | self.platform = platform | ||
| 2455 | self.location = location | ||
| 2456 | self.precedence = precedence | ||
| 2457 | self._provider = metadata or empty_provider | ||
| 2458 | |||
| 2459 | @classmethod | ||
| 2460 | def from_location(cls, location, basename, metadata=None, **kw): | ||
| 2461 | project_name, version, py_version, platform = [None] * 4 | ||
| 2462 | basename, ext = os.path.splitext(basename) | ||
| 2463 | if ext.lower() in _distributionImpl: | ||
| 2464 | cls = _distributionImpl[ext.lower()] | ||
| 2465 | |||
| 2466 | match = EGG_NAME(basename) | ||
| 2467 | if match: | ||
| 2468 | project_name, version, py_version, platform = match.group( | ||
| 2469 | 'name', 'ver', 'pyver', 'plat' | ||
| 2470 | ) | ||
| 2471 | return cls( | ||
| 2472 | location, metadata, project_name=project_name, version=version, | ||
| 2473 | py_version=py_version, platform=platform, **kw | ||
| 2474 | )._reload_version() | ||
| 2475 | |||
| 2476 | def _reload_version(self): | ||
| 2477 | return self | ||
| 2478 | |||
| 2479 | @property | ||
| 2480 | def hashcmp(self): | ||
| 2481 | return ( | ||
| 2482 | self.parsed_version, | ||
| 2483 | self.precedence, | ||
| 2484 | self.key, | ||
| 2485 | _remove_md5_fragment(self.location), | ||
| 2486 | self.py_version or '', | ||
| 2487 | self.platform or '', | ||
| 2488 | ) | ||
| 2489 | |||
| 2490 | def __hash__(self): | ||
| 2491 | return hash(self.hashcmp) | ||
| 2492 | |||
| 2493 | def __lt__(self, other): | ||
| 2494 | return self.hashcmp < other.hashcmp | ||
| 2495 | |||
| 2496 | def __le__(self, other): | ||
| 2497 | return self.hashcmp <= other.hashcmp | ||
| 2498 | |||
| 2499 | def __gt__(self, other): | ||
| 2500 | return self.hashcmp > other.hashcmp | ||
| 2501 | |||
| 2502 | def __ge__(self, other): | ||
| 2503 | return self.hashcmp >= other.hashcmp | ||
| 2504 | |||
| 2505 | def __eq__(self, other): | ||
| 2506 | if not isinstance(other, self.__class__): | ||
| 2507 | # It's not a Distribution, so they are not equal | ||
| 2508 | return False | ||
| 2509 | return self.hashcmp == other.hashcmp | ||
| 2510 | |||
| 2511 | def __ne__(self, other): | ||
| 2512 | return not self == other | ||
| 2513 | |||
| 2514 | # These properties have to be lazy so that we don't have to load any | ||
| 2515 | # metadata until/unless it's actually needed. (i.e., some distributions | ||
| 2516 | # may not know their name or version without loading PKG-INFO) | ||
| 2517 | |||
| 2518 | @property | ||
| 2519 | def key(self): | ||
| 2520 | try: | ||
| 2521 | return self._key | ||
| 2522 | except AttributeError: | ||
| 2523 | self._key = key = self.project_name.lower() | ||
| 2524 | return key | ||
| 2525 | |||
| 2526 | @property | ||
| 2527 | def parsed_version(self): | ||
| 2528 | if not hasattr(self, "_parsed_version"): | ||
| 2529 | self._parsed_version = parse_version(self.version) | ||
| 2530 | |||
| 2531 | return self._parsed_version | ||
| 2532 | |||
| 2533 | def _warn_legacy_version(self): | ||
| 2534 | LV = packaging.version.LegacyVersion | ||
| 2535 | is_legacy = isinstance(self._parsed_version, LV) | ||
| 2536 | if not is_legacy: | ||
| 2537 | return | ||
| 2538 | |||
| 2539 | # While an empty version is technically a legacy version and | ||
| 2540 | # is not a valid PEP 440 version, it's also unlikely to | ||
| 2541 | # actually come from someone and instead it is more likely that | ||
| 2542 | # it comes from setuptools attempting to parse a filename and | ||
| 2543 | # including it in the list. So for that we'll gate this warning | ||
| 2544 | # on if the version is anything at all or not. | ||
| 2545 | if not self.version: | ||
| 2546 | return | ||
| 2547 | |||
| 2548 | tmpl = textwrap.dedent(""" | ||
| 2549 | '{project_name} ({version})' is being parsed as a legacy, | ||
| 2550 | non PEP 440, | ||
| 2551 | version. You may find odd behavior and sort order. | ||
| 2552 | In particular it will be sorted as less than 0.0. It | ||
| 2553 | is recommended to migrate to PEP 440 compatible | ||
| 2554 | versions. | ||
| 2555 | """).strip().replace('\n', ' ') | ||
| 2556 | |||
| 2557 | warnings.warn(tmpl.format(**vars(self)), PEP440Warning) | ||
| 2558 | |||
| 2559 | @property | ||
| 2560 | def version(self): | ||
| 2561 | try: | ||
| 2562 | return self._version | ||
| 2563 | except AttributeError: | ||
| 2564 | version = _version_from_file(self._get_metadata(self.PKG_INFO)) | ||
| 2565 | if version is None: | ||
| 2566 | tmpl = "Missing 'Version:' header and/or %s file" | ||
| 2567 | raise ValueError(tmpl % self.PKG_INFO, self) | ||
| 2568 | return version | ||
| 2569 | |||
| 2570 | @property | ||
| 2571 | def _dep_map(self): | ||
| 2572 | """ | ||
| 2573 | A map of extra to its list of (direct) requirements | ||
| 2574 | for this distribution, including the null extra. | ||
| 2575 | """ | ||
| 2576 | try: | ||
| 2577 | return self.__dep_map | ||
| 2578 | except AttributeError: | ||
| 2579 | self.__dep_map = self._filter_extras(self._build_dep_map()) | ||
| 2580 | return self.__dep_map | ||
| 2581 | |||
| 2582 | @staticmethod | ||
| 2583 | def _filter_extras(dm): | ||
| 2584 | """ | ||
| 2585 | Given a mapping of extras to dependencies, strip off | ||
| 2586 | environment markers and filter out any dependencies | ||
| 2587 | not matching the markers. | ||
| 2588 | """ | ||
| 2589 | for extra in list(filter(None, dm)): | ||
| 2590 | new_extra = extra | ||
| 2591 | reqs = dm.pop(extra) | ||
| 2592 | new_extra, _, marker = extra.partition(':') | ||
| 2593 | fails_marker = marker and ( | ||
| 2594 | invalid_marker(marker) | ||
| 2595 | or not evaluate_marker(marker) | ||
| 2596 | ) | ||
| 2597 | if fails_marker: | ||
| 2598 | reqs = [] | ||
| 2599 | new_extra = safe_extra(new_extra) or None | ||
| 2600 | |||
| 2601 | dm.setdefault(new_extra, []).extend(reqs) | ||
| 2602 | return dm | ||
| 2603 | |||
| 2604 | def _build_dep_map(self): | ||
| 2605 | dm = {} | ||
| 2606 | for name in 'requires.txt', 'depends.txt': | ||
| 2607 | for extra, reqs in split_sections(self._get_metadata(name)): | ||
| 2608 | dm.setdefault(extra, []).extend(parse_requirements(reqs)) | ||
| 2609 | return dm | ||
| 2610 | |||
| 2611 | def requires(self, extras=()): | ||
| 2612 | """List of Requirements needed for this distro if `extras` are used""" | ||
| 2613 | dm = self._dep_map | ||
| 2614 | deps = [] | ||
| 2615 | deps.extend(dm.get(None, ())) | ||
| 2616 | for ext in extras: | ||
| 2617 | try: | ||
| 2618 | deps.extend(dm[safe_extra(ext)]) | ||
| 2619 | except KeyError: | ||
| 2620 | raise UnknownExtra( | ||
| 2621 | "%s has no such extra feature %r" % (self, ext) | ||
| 2622 | ) | ||
| 2623 | return deps | ||
| 2624 | |||
| 2625 | def _get_metadata(self, name): | ||
| 2626 | if self.has_metadata(name): | ||
| 2627 | for line in self.get_metadata_lines(name): | ||
| 2628 | yield line | ||
| 2629 | |||
| 2630 | def activate(self, path=None, replace=False): | ||
| 2631 | """Ensure distribution is importable on `path` (default=sys.path)""" | ||
| 2632 | if path is None: | ||
| 2633 | path = sys.path | ||
| 2634 | self.insert_on(path, replace=replace) | ||
| 2635 | if path is sys.path: | ||
| 2636 | fixup_namespace_packages(self.location) | ||
| 2637 | for pkg in self._get_metadata('namespace_packages.txt'): | ||
| 2638 | if pkg in sys.modules: | ||
| 2639 | declare_namespace(pkg) | ||
| 2640 | |||
| 2641 | def egg_name(self): | ||
| 2642 | """Return what this distribution's standard .egg filename should be""" | ||
| 2643 | filename = "%s-%s-py%s" % ( | ||
| 2644 | to_filename(self.project_name), to_filename(self.version), | ||
| 2645 | self.py_version or PY_MAJOR | ||
| 2646 | ) | ||
| 2647 | |||
| 2648 | if self.platform: | ||
| 2649 | filename += '-' + self.platform | ||
| 2650 | return filename | ||
| 2651 | |||
| 2652 | def __repr__(self): | ||
| 2653 | if self.location: | ||
| 2654 | return "%s (%s)" % (self, self.location) | ||
| 2655 | else: | ||
| 2656 | return str(self) | ||
| 2657 | |||
| 2658 | def __str__(self): | ||
| 2659 | try: | ||
| 2660 | version = getattr(self, 'version', None) | ||
| 2661 | except ValueError: | ||
| 2662 | version = None | ||
| 2663 | version = version or "[unknown version]" | ||
| 2664 | return "%s %s" % (self.project_name, version) | ||
| 2665 | |||
| 2666 | def __getattr__(self, attr): | ||
| 2667 | """Delegate all unrecognized public attributes to .metadata provider""" | ||
| 2668 | if attr.startswith('_'): | ||
| 2669 | raise AttributeError(attr) | ||
| 2670 | return getattr(self._provider, attr) | ||
| 2671 | |||
| 2672 | @classmethod | ||
| 2673 | def from_filename(cls, filename, metadata=None, **kw): | ||
| 2674 | return cls.from_location( | ||
| 2675 | _normalize_cached(filename), os.path.basename(filename), metadata, | ||
| 2676 | **kw | ||
| 2677 | ) | ||
| 2678 | |||
| 2679 | def as_requirement(self): | ||
| 2680 | """Return a ``Requirement`` that matches this distribution exactly""" | ||
| 2681 | if isinstance(self.parsed_version, packaging.version.Version): | ||
| 2682 | spec = "%s==%s" % (self.project_name, self.parsed_version) | ||
| 2683 | else: | ||
| 2684 | spec = "%s===%s" % (self.project_name, self.parsed_version) | ||
| 2685 | |||
| 2686 | return Requirement.parse(spec) | ||
| 2687 | |||
| 2688 | def load_entry_point(self, group, name): | ||
| 2689 | """Return the `name` entry point of `group` or raise ImportError""" | ||
| 2690 | ep = self.get_entry_info(group, name) | ||
| 2691 | if ep is None: | ||
| 2692 | raise ImportError("Entry point %r not found" % ((group, name),)) | ||
| 2693 | return ep.load() | ||
| 2694 | |||
| 2695 | def get_entry_map(self, group=None): | ||
| 2696 | """Return the entry point map for `group`, or the full entry map""" | ||
| 2697 | try: | ||
| 2698 | ep_map = self._ep_map | ||
| 2699 | except AttributeError: | ||
| 2700 | ep_map = self._ep_map = EntryPoint.parse_map( | ||
| 2701 | self._get_metadata('entry_points.txt'), self | ||
| 2702 | ) | ||
| 2703 | if group is not None: | ||
| 2704 | return ep_map.get(group, {}) | ||
| 2705 | return ep_map | ||
| 2706 | |||
| 2707 | def get_entry_info(self, group, name): | ||
| 2708 | """Return the EntryPoint object for `group`+`name`, or ``None``""" | ||
| 2709 | return self.get_entry_map(group).get(name) | ||
| 2710 | |||
| 2711 | def insert_on(self, path, loc=None, replace=False): | ||
| 2712 | """Ensure self.location is on path | ||
| 2713 | |||
| 2714 | If replace=False (default): | ||
| 2715 | - If location is already in path anywhere, do nothing. | ||
| 2716 | - Else: | ||
| 2717 | - If it's an egg and its parent directory is on path, | ||
| 2718 | insert just ahead of the parent. | ||
| 2719 | - Else: add to the end of path. | ||
| 2720 | If replace=True: | ||
| 2721 | - If location is already on path anywhere (not eggs) | ||
| 2722 | or higher priority than its parent (eggs) | ||
| 2723 | do nothing. | ||
| 2724 | - Else: | ||
| 2725 | - If it's an egg and its parent directory is on path, | ||
| 2726 | insert just ahead of the parent, | ||
| 2727 | removing any lower-priority entries. | ||
| 2728 | - Else: add it to the front of path. | ||
| 2729 | """ | ||
| 2730 | |||
| 2731 | loc = loc or self.location | ||
| 2732 | if not loc: | ||
| 2733 | return | ||
| 2734 | |||
| 2735 | nloc = _normalize_cached(loc) | ||
| 2736 | bdir = os.path.dirname(nloc) | ||
| 2737 | npath = [(p and _normalize_cached(p) or p) for p in path] | ||
| 2738 | |||
| 2739 | for p, item in enumerate(npath): | ||
| 2740 | if item == nloc: | ||
| 2741 | if replace: | ||
| 2742 | break | ||
| 2743 | else: | ||
| 2744 | # don't modify path (even removing duplicates) if | ||
| 2745 | # found and not replace | ||
| 2746 | return | ||
| 2747 | elif item == bdir and self.precedence == EGG_DIST: | ||
| 2748 | # if it's an .egg, give it precedence over its directory | ||
| 2749 | # UNLESS it's already been added to sys.path and replace=False | ||
| 2750 | if (not replace) and nloc in npath[p:]: | ||
| 2751 | return | ||
| 2752 | if path is sys.path: | ||
| 2753 | self.check_version_conflict() | ||
| 2754 | path.insert(p, loc) | ||
| 2755 | npath.insert(p, nloc) | ||
| 2756 | break | ||
| 2757 | else: | ||
| 2758 | if path is sys.path: | ||
| 2759 | self.check_version_conflict() | ||
| 2760 | if replace: | ||
| 2761 | path.insert(0, loc) | ||
| 2762 | else: | ||
| 2763 | path.append(loc) | ||
| 2764 | return | ||
| 2765 | |||
| 2766 | # p is the spot where we found or inserted loc; now remove duplicates | ||
| 2767 | while True: | ||
| 2768 | try: | ||
| 2769 | np = npath.index(nloc, p + 1) | ||
| 2770 | except ValueError: | ||
| 2771 | break | ||
| 2772 | else: | ||
| 2773 | del npath[np], path[np] | ||
| 2774 | # ha! | ||
| 2775 | p = np | ||
| 2776 | |||
| 2777 | return | ||
| 2778 | |||
| 2779 | def check_version_conflict(self): | ||
| 2780 | if self.key == 'setuptools': | ||
| 2781 | # ignore the inevitable setuptools self-conflicts :( | ||
| 2782 | return | ||
| 2783 | |||
| 2784 | nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) | ||
| 2785 | loc = normalize_path(self.location) | ||
| 2786 | for modname in self._get_metadata('top_level.txt'): | ||
| 2787 | if (modname not in sys.modules or modname in nsp | ||
| 2788 | or modname in _namespace_packages): | ||
| 2789 | continue | ||
| 2790 | if modname in ('pkg_resources', 'setuptools', 'site'): | ||
| 2791 | continue | ||
| 2792 | fn = getattr(sys.modules[modname], '__file__', None) | ||
| 2793 | if fn and (normalize_path(fn).startswith(loc) or | ||
| 2794 | fn.startswith(self.location)): | ||
| 2795 | continue | ||
| 2796 | issue_warning( | ||
| 2797 | "Module %s was already imported from %s, but %s is being added" | ||
| 2798 | " to sys.path" % (modname, fn, self.location), | ||
| 2799 | ) | ||
| 2800 | |||
| 2801 | def has_version(self): | ||
| 2802 | try: | ||
| 2803 | self.version | ||
| 2804 | except ValueError: | ||
| 2805 | issue_warning("Unbuilt egg for " + repr(self)) | ||
| 2806 | return False | ||
| 2807 | return True | ||
| 2808 | |||
| 2809 | def clone(self, **kw): | ||
| 2810 | """Copy this distribution, substituting in any changed keyword args""" | ||
| 2811 | names = 'project_name version py_version platform location precedence' | ||
| 2812 | for attr in names.split(): | ||
| 2813 | kw.setdefault(attr, getattr(self, attr, None)) | ||
| 2814 | kw.setdefault('metadata', self._provider) | ||
| 2815 | return self.__class__(**kw) | ||
| 2816 | |||
| 2817 | @property | ||
| 2818 | def extras(self): | ||
| 2819 | return [dep for dep in self._dep_map if dep] | ||
| 2820 | |||
| 2821 | |||
| 2822 | class EggInfoDistribution(Distribution): | ||
| 2823 | def _reload_version(self): | ||
| 2824 | """ | ||
| 2825 | Packages installed by distutils (e.g. numpy or scipy), | ||
| 2826 | which uses an old safe_version, and so | ||
| 2827 | their version numbers can get mangled when | ||
| 2828 | converted to filenames (e.g., 1.11.0.dev0+2329eae to | ||
| 2829 | 1.11.0.dev0_2329eae). These distributions will not be | ||
| 2830 | parsed properly | ||
| 2831 | downstream by Distribution and safe_version, so | ||
| 2832 | take an extra step and try to get the version number from | ||
| 2833 | the metadata file itself instead of the filename. | ||
| 2834 | """ | ||
| 2835 | md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) | ||
| 2836 | if md_version: | ||
| 2837 | self._version = md_version | ||
| 2838 | return self | ||
| 2839 | |||
| 2840 | |||
| 2841 | class DistInfoDistribution(Distribution): | ||
| 2842 | """ | ||
| 2843 | Wrap an actual or potential sys.path entry | ||
| 2844 | w/metadata, .dist-info style. | ||
| 2845 | """ | ||
| 2846 | PKG_INFO = 'METADATA' | ||
| 2847 | EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") | ||
| 2848 | |||
| 2849 | @property | ||
| 2850 | def _parsed_pkg_info(self): | ||
| 2851 | """Parse and cache metadata""" | ||
| 2852 | try: | ||
| 2853 | return self._pkg_info | ||
| 2854 | except AttributeError: | ||
| 2855 | metadata = self.get_metadata(self.PKG_INFO) | ||
| 2856 | self._pkg_info = email.parser.Parser().parsestr(metadata) | ||
| 2857 | return self._pkg_info | ||
| 2858 | |||
| 2859 | @property | ||
| 2860 | def _dep_map(self): | ||
| 2861 | try: | ||
| 2862 | return self.__dep_map | ||
| 2863 | except AttributeError: | ||
| 2864 | self.__dep_map = self._compute_dependencies() | ||
| 2865 | return self.__dep_map | ||
| 2866 | |||
| 2867 | def _compute_dependencies(self): | ||
| 2868 | """Recompute this distribution's dependencies.""" | ||
| 2869 | dm = self.__dep_map = {None: []} | ||
| 2870 | |||
| 2871 | reqs = [] | ||
| 2872 | # Including any condition expressions | ||
| 2873 | for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: | ||
| 2874 | reqs.extend(parse_requirements(req)) | ||
| 2875 | |||
| 2876 | def reqs_for_extra(extra): | ||
| 2877 | for req in reqs: | ||
| 2878 | if not req.marker or req.marker.evaluate({'extra': extra}): | ||
| 2879 | yield req | ||
| 2880 | |||
| 2881 | common = frozenset(reqs_for_extra(None)) | ||
| 2882 | dm[None].extend(common) | ||
| 2883 | |||
| 2884 | for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: | ||
| 2885 | s_extra = safe_extra(extra.strip()) | ||
| 2886 | dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) | ||
| 2887 | |||
| 2888 | return dm | ||
| 2889 | |||
| 2890 | |||
| 2891 | _distributionImpl = { | ||
| 2892 | '.egg': Distribution, | ||
| 2893 | '.egg-info': EggInfoDistribution, | ||
| 2894 | '.dist-info': DistInfoDistribution, | ||
| 2895 | } | ||
| 2896 | |||
| 2897 | |||
| 2898 | def issue_warning(*args, **kw): | ||
| 2899 | level = 1 | ||
| 2900 | g = globals() | ||
| 2901 | try: | ||
| 2902 | # find the first stack frame that is *not* code in | ||
| 2903 | # the pkg_resources module, to use for the warning | ||
| 2904 | while sys._getframe(level).f_globals is g: | ||
| 2905 | level += 1 | ||
| 2906 | except ValueError: | ||
| 2907 | pass | ||
| 2908 | warnings.warn(stacklevel=level + 1, *args, **kw) | ||
| 2909 | |||
| 2910 | |||
| 2911 | class RequirementParseError(ValueError): | ||
| 2912 | def __str__(self): | ||
| 2913 | return ' '.join(self.args) | ||
| 2914 | |||
| 2915 | |||
| 2916 | def parse_requirements(strs): | ||
| 2917 | """Yield ``Requirement`` objects for each specification in `strs` | ||
| 2918 | |||
| 2919 | `strs` must be a string, or a (possibly-nested) iterable thereof. | ||
| 2920 | """ | ||
| 2921 | # create a steppable iterator, so we can handle \-continuations | ||
| 2922 | lines = iter(yield_lines(strs)) | ||
| 2923 | |||
| 2924 | for line in lines: | ||
| 2925 | # Drop comments -- a hash without a space may be in a URL. | ||
| 2926 | if ' #' in line: | ||
| 2927 | line = line[:line.find(' #')] | ||
| 2928 | # If there is a line continuation, drop it, and append the next line. | ||
| 2929 | if line.endswith('\\'): | ||
| 2930 | line = line[:-2].strip() | ||
| 2931 | try: | ||
| 2932 | line += next(lines) | ||
| 2933 | except StopIteration: | ||
| 2934 | return | ||
| 2935 | yield Requirement(line) | ||
| 2936 | |||
| 2937 | |||
| 2938 | class Requirement(packaging.requirements.Requirement): | ||
| 2939 | def __init__(self, requirement_string): | ||
| 2940 | """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" | ||
| 2941 | try: | ||
| 2942 | super(Requirement, self).__init__(requirement_string) | ||
| 2943 | except packaging.requirements.InvalidRequirement as e: | ||
| 2944 | raise RequirementParseError(str(e)) | ||
| 2945 | self.unsafe_name = self.name | ||
| 2946 | project_name = safe_name(self.name) | ||
| 2947 | self.project_name, self.key = project_name, project_name.lower() | ||
| 2948 | self.specs = [ | ||
| 2949 | (spec.operator, spec.version) for spec in self.specifier] | ||
| 2950 | self.extras = tuple(map(safe_extra, self.extras)) | ||
| 2951 | self.hashCmp = ( | ||
| 2952 | self.key, | ||
| 2953 | self.specifier, | ||
| 2954 | frozenset(self.extras), | ||
| 2955 | str(self.marker) if self.marker else None, | ||
| 2956 | ) | ||
| 2957 | self.__hash = hash(self.hashCmp) | ||
| 2958 | |||
| 2959 | def __eq__(self, other): | ||
| 2960 | return ( | ||
| 2961 | isinstance(other, Requirement) and | ||
| 2962 | self.hashCmp == other.hashCmp | ||
| 2963 | ) | ||
| 2964 | |||
| 2965 | def __ne__(self, other): | ||
| 2966 | return not self == other | ||
| 2967 | |||
| 2968 | def __contains__(self, item): | ||
| 2969 | if isinstance(item, Distribution): | ||
| 2970 | if item.key != self.key: | ||
| 2971 | return False | ||
| 2972 | |||
| 2973 | item = item.version | ||
| 2974 | |||
| 2975 | # Allow prereleases always in order to match the previous behavior of | ||
| 2976 | # this method. In the future this should be smarter and follow PEP 440 | ||
| 2977 | # more accurately. | ||
| 2978 | return self.specifier.contains(item, prereleases=True) | ||
| 2979 | |||
| 2980 | def __hash__(self): | ||
| 2981 | return self.__hash | ||
| 2982 | |||
| 2983 | def __repr__(self): | ||
| 2984 | return "Requirement.parse(%r)" % str(self) | ||
| 2985 | |||
| 2986 | @staticmethod | ||
| 2987 | def parse(s): | ||
| 2988 | req, = parse_requirements(s) | ||
| 2989 | return req | ||
| 2990 | |||
| 2991 | |||
| 2992 | def _always_object(classes): | ||
| 2993 | """ | ||
| 2994 | Ensure object appears in the mro even | ||
| 2995 | for old-style classes. | ||
| 2996 | """ | ||
| 2997 | if object not in classes: | ||
| 2998 | return classes + (object,) | ||
| 2999 | return classes | ||
| 3000 | |||
| 3001 | |||
| 3002 | def _find_adapter(registry, ob): | ||
| 3003 | """Return an adapter factory for `ob` from `registry`""" | ||
| 3004 | types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) | ||
| 3005 | for t in types: | ||
| 3006 | if t in registry: | ||
| 3007 | return registry[t] | ||
| 3008 | |||
| 3009 | |||
| 3010 | def ensure_directory(path): | ||
| 3011 | """Ensure that the parent directory of `path` exists""" | ||
| 3012 | dirname = os.path.dirname(path) | ||
| 3013 | py31compat.makedirs(dirname, exist_ok=True) | ||
| 3014 | |||
| 3015 | |||
| 3016 | def _bypass_ensure_directory(path): | ||
| 3017 | """Sandbox-bypassing version of ensure_directory()""" | ||
| 3018 | if not WRITE_SUPPORT: | ||
| 3019 | raise IOError('"os.mkdir" not supported on this platform.') | ||
| 3020 | dirname, filename = split(path) | ||
| 3021 | if dirname and filename and not isdir(dirname): | ||
| 3022 | _bypass_ensure_directory(dirname) | ||
| 3023 | mkdir(dirname, 0o755) | ||
| 3024 | |||
| 3025 | |||
| 3026 | def split_sections(s): | ||
| 3027 | """Split a string or iterable thereof into (section, content) pairs | ||
| 3028 | |||
| 3029 | Each ``section`` is a stripped version of the section header ("[section]") | ||
| 3030 | and each ``content`` is a list of stripped lines excluding blank lines and | ||
| 3031 | comment-only lines. If there are any such lines before the first section | ||
| 3032 | header, they're returned in a first ``section`` of ``None``. | ||
| 3033 | """ | ||
| 3034 | section = None | ||
| 3035 | content = [] | ||
| 3036 | for line in yield_lines(s): | ||
| 3037 | if line.startswith("["): | ||
| 3038 | if line.endswith("]"): | ||
| 3039 | if section or content: | ||
| 3040 | yield section, content | ||
| 3041 | section = line[1:-1].strip() | ||
| 3042 | content = [] | ||
| 3043 | else: | ||
| 3044 | raise ValueError("Invalid section heading", line) | ||
| 3045 | else: | ||
| 3046 | content.append(line) | ||
| 3047 | |||
| 3048 | # wrap up last segment | ||
| 3049 | yield section, content | ||
| 3050 | |||
| 3051 | |||
| 3052 | def _mkstemp(*args, **kw): | ||
| 3053 | old_open = os.open | ||
| 3054 | try: | ||
| 3055 | # temporarily bypass sandboxing | ||
| 3056 | os.open = os_open | ||
| 3057 | return tempfile.mkstemp(*args, **kw) | ||
| 3058 | finally: | ||
| 3059 | # and then put it back | ||
| 3060 | os.open = old_open | ||
| 3061 | |||
| 3062 | |||
| 3063 | # Silence the PEP440Warning by default, so that end users don't get hit by it | ||
| 3064 | # randomly just because they use pkg_resources. We want to append the rule | ||
| 3065 | # because we want earlier uses of filterwarnings to take precedence over this | ||
| 3066 | # one. | ||
| 3067 | warnings.filterwarnings("ignore", category=PEP440Warning, append=True) | ||
| 3068 | |||
| 3069 | |||
| 3070 | # from jaraco.functools 1.3 | ||
| 3071 | def _call_aside(f, *args, **kwargs): | ||
| 3072 | f(*args, **kwargs) | ||
| 3073 | return f | ||
| 3074 | |||
| 3075 | |||
| 3076 | @_call_aside | ||
| 3077 | def _initialize(g=globals()): | ||
| 3078 | "Set up global resource manager (deliberately not state-saved)" | ||
| 3079 | manager = ResourceManager() | ||
| 3080 | g['_manager'] = manager | ||
| 3081 | g.update( | ||
| 3082 | (name, getattr(manager, name)) | ||
| 3083 | for name in dir(manager) | ||
| 3084 | if not name.startswith('_') | ||
| 3085 | ) | ||
| 3086 | |||
| 3087 | |||
| 3088 | @_call_aside | ||
| 3089 | def _initialize_master_working_set(): | ||
| 3090 | """ | ||
| 3091 | Prepare the master working set and make the ``require()`` | ||
| 3092 | API available. | ||
| 3093 | |||
| 3094 | This function has explicit effects on the global state | ||
| 3095 | of pkg_resources. It is intended to be invoked once at | ||
| 3096 | the initialization of this module. | ||
| 3097 | |||
| 3098 | Invocation by other packages is unsupported and done | ||
| 3099 | at their own risk. | ||
| 3100 | """ | ||
| 3101 | working_set = WorkingSet._build_master() | ||
| 3102 | _declare_state('object', working_set=working_set) | ||
| 3103 | |||
| 3104 | require = working_set.require | ||
| 3105 | iter_entry_points = working_set.iter_entry_points | ||
| 3106 | add_activation_listener = working_set.subscribe | ||
| 3107 | run_script = working_set.run_script | ||
| 3108 | # backward compatibility | ||
| 3109 | run_main = run_script | ||
| 3110 | # Activate all distributions already on sys.path with replace=False and | ||
| 3111 | # ensure that all distributions added to the working set in the future | ||
| 3112 | # (e.g. by calling ``require()``) will get activated as well, | ||
| 3113 | # with higher priority (replace=True). | ||
| 3114 | tuple( | ||
| 3115 | dist.activate(replace=False) | ||
| 3116 | for dist in working_set | ||
| 3117 | ) | ||
| 3118 | add_activation_listener( | ||
| 3119 | lambda dist: dist.activate(replace=True), | ||
| 3120 | existing=False, | ||
| 3121 | ) | ||
| 3122 | working_set.entries = [] | ||
| 3123 | # match order | ||
| 3124 | list(map(working_set.add_entry, sys.path)) | ||
| 3125 | globals().update(locals()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py new file mode 100644 index 0000000..3e1c152 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py | |||
| @@ -0,0 +1,22 @@ | |||
| 1 | import os | ||
| 2 | import errno | ||
| 3 | import sys | ||
| 4 | |||
| 5 | |||
| 6 | def _makedirs_31(path, exist_ok=False): | ||
| 7 | try: | ||
| 8 | os.makedirs(path) | ||
| 9 | except OSError as exc: | ||
| 10 | if not exist_ok or exc.errno != errno.EEXIST: | ||
| 11 | raise | ||
| 12 | |||
| 13 | |||
| 14 | # rely on compatibility behavior until mode considerations | ||
| 15 | # and exists_ok considerations are disentangled. | ||
| 16 | # See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 | ||
| 17 | needs_makedirs = ( | ||
| 18 | sys.version_info < (3, 2, 5) or | ||
| 19 | (3, 3) <= sys.version_info < (3, 3, 6) or | ||
| 20 | (3, 4) <= sys.version_info < (3, 4, 1) | ||
| 21 | ) | ||
| 22 | makedirs = _makedirs_31 if needs_makedirs else os.makedirs | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/__init__.py new file mode 100644 index 0000000..4aa97fc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/__init__.py | |||
| @@ -0,0 +1,127 @@ | |||
| 1 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> | ||
| 2 | # | ||
| 3 | # Permission to use, copy, modify, and distribute this software for any | ||
| 4 | # purpose with or without fee is hereby granted, provided that the above | ||
| 5 | # copyright notice and this permission notice appear in all copies. | ||
| 6 | # | ||
| 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||
| 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||
| 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||
| 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||
| 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||
| 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||
| 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||
| 14 | |||
| 15 | from __future__ import division | ||
| 16 | |||
| 17 | from collections import deque | ||
| 18 | from datetime import timedelta | ||
| 19 | from math import ceil | ||
| 20 | from sys import stderr | ||
| 21 | from time import time | ||
| 22 | |||
| 23 | |||
| 24 | __version__ = '1.3' | ||
| 25 | |||
| 26 | |||
| 27 | class Infinite(object): | ||
| 28 | file = stderr | ||
| 29 | sma_window = 10 # Simple Moving Average window | ||
| 30 | |||
| 31 | def __init__(self, *args, **kwargs): | ||
| 32 | self.index = 0 | ||
| 33 | self.start_ts = time() | ||
| 34 | self.avg = 0 | ||
| 35 | self._ts = self.start_ts | ||
| 36 | self._xput = deque(maxlen=self.sma_window) | ||
| 37 | for key, val in kwargs.items(): | ||
| 38 | setattr(self, key, val) | ||
| 39 | |||
| 40 | def __getitem__(self, key): | ||
| 41 | if key.startswith('_'): | ||
| 42 | return None | ||
| 43 | return getattr(self, key, None) | ||
| 44 | |||
| 45 | @property | ||
| 46 | def elapsed(self): | ||
| 47 | return int(time() - self.start_ts) | ||
| 48 | |||
| 49 | @property | ||
| 50 | def elapsed_td(self): | ||
| 51 | return timedelta(seconds=self.elapsed) | ||
| 52 | |||
| 53 | def update_avg(self, n, dt): | ||
| 54 | if n > 0: | ||
| 55 | self._xput.append(dt / n) | ||
| 56 | self.avg = sum(self._xput) / len(self._xput) | ||
| 57 | |||
| 58 | def update(self): | ||
| 59 | pass | ||
| 60 | |||
| 61 | def start(self): | ||
| 62 | pass | ||
| 63 | |||
| 64 | def finish(self): | ||
| 65 | pass | ||
| 66 | |||
| 67 | def next(self, n=1): | ||
| 68 | now = time() | ||
| 69 | dt = now - self._ts | ||
| 70 | self.update_avg(n, dt) | ||
| 71 | self._ts = now | ||
| 72 | self.index = self.index + n | ||
| 73 | self.update() | ||
| 74 | |||
| 75 | def iter(self, it): | ||
| 76 | try: | ||
| 77 | for x in it: | ||
| 78 | yield x | ||
| 79 | self.next() | ||
| 80 | finally: | ||
| 81 | self.finish() | ||
| 82 | |||
| 83 | |||
| 84 | class Progress(Infinite): | ||
| 85 | def __init__(self, *args, **kwargs): | ||
| 86 | super(Progress, self).__init__(*args, **kwargs) | ||
| 87 | self.max = kwargs.get('max', 100) | ||
| 88 | |||
| 89 | @property | ||
| 90 | def eta(self): | ||
| 91 | return int(ceil(self.avg * self.remaining)) | ||
| 92 | |||
| 93 | @property | ||
| 94 | def eta_td(self): | ||
| 95 | return timedelta(seconds=self.eta) | ||
| 96 | |||
| 97 | @property | ||
| 98 | def percent(self): | ||
| 99 | return self.progress * 100 | ||
| 100 | |||
| 101 | @property | ||
| 102 | def progress(self): | ||
| 103 | return min(1, self.index / self.max) | ||
| 104 | |||
| 105 | @property | ||
| 106 | def remaining(self): | ||
| 107 | return max(self.max - self.index, 0) | ||
| 108 | |||
| 109 | def start(self): | ||
| 110 | self.update() | ||
| 111 | |||
| 112 | def goto(self, index): | ||
| 113 | incr = index - self.index | ||
| 114 | self.next(incr) | ||
| 115 | |||
| 116 | def iter(self, it): | ||
| 117 | try: | ||
| 118 | self.max = len(it) | ||
| 119 | except TypeError: | ||
| 120 | pass | ||
| 121 | |||
| 122 | try: | ||
| 123 | for x in it: | ||
| 124 | yield x | ||
| 125 | self.next() | ||
| 126 | finally: | ||
| 127 | self.finish() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/bar.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/bar.py new file mode 100644 index 0000000..3fdd703 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/bar.py | |||
| @@ -0,0 +1,88 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> | ||
| 4 | # | ||
| 5 | # Permission to use, copy, modify, and distribute this software for any | ||
| 6 | # purpose with or without fee is hereby granted, provided that the above | ||
| 7 | # copyright notice and this permission notice appear in all copies. | ||
| 8 | # | ||
| 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||
| 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||
| 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||
| 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||
| 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||
| 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||
| 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||
| 16 | |||
| 17 | from __future__ import unicode_literals | ||
| 18 | from . import Progress | ||
| 19 | from .helpers import WritelnMixin | ||
| 20 | |||
| 21 | |||
| 22 | class Bar(WritelnMixin, Progress): | ||
| 23 | width = 32 | ||
| 24 | message = '' | ||
| 25 | suffix = '%(index)d/%(max)d' | ||
| 26 | bar_prefix = ' |' | ||
| 27 | bar_suffix = '| ' | ||
| 28 | empty_fill = ' ' | ||
| 29 | fill = '#' | ||
| 30 | hide_cursor = True | ||
| 31 | |||
| 32 | def update(self): | ||
| 33 | filled_length = int(self.width * self.progress) | ||
| 34 | empty_length = self.width - filled_length | ||
| 35 | |||
| 36 | message = self.message % self | ||
| 37 | bar = self.fill * filled_length | ||
| 38 | empty = self.empty_fill * empty_length | ||
| 39 | suffix = self.suffix % self | ||
| 40 | line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, | ||
| 41 | suffix]) | ||
| 42 | self.writeln(line) | ||
| 43 | |||
| 44 | |||
| 45 | class ChargingBar(Bar): | ||
| 46 | suffix = '%(percent)d%%' | ||
| 47 | bar_prefix = ' ' | ||
| 48 | bar_suffix = ' ' | ||
| 49 | empty_fill = '∙' | ||
| 50 | fill = '█' | ||
| 51 | |||
| 52 | |||
| 53 | class FillingSquaresBar(ChargingBar): | ||
| 54 | empty_fill = '▢' | ||
| 55 | fill = '▣' | ||
| 56 | |||
| 57 | |||
| 58 | class FillingCirclesBar(ChargingBar): | ||
| 59 | empty_fill = '◯' | ||
| 60 | fill = '◉' | ||
| 61 | |||
| 62 | |||
| 63 | class IncrementalBar(Bar): | ||
| 64 | phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') | ||
| 65 | |||
| 66 | def update(self): | ||
| 67 | nphases = len(self.phases) | ||
| 68 | filled_len = self.width * self.progress | ||
| 69 | nfull = int(filled_len) # Number of full chars | ||
| 70 | phase = int((filled_len - nfull) * nphases) # Phase of last char | ||
| 71 | nempty = self.width - nfull # Number of empty chars | ||
| 72 | |||
| 73 | message = self.message % self | ||
| 74 | bar = self.phases[-1] * nfull | ||
| 75 | current = self.phases[phase] if phase > 0 else '' | ||
| 76 | empty = self.empty_fill * max(0, nempty - len(current)) | ||
| 77 | suffix = self.suffix % self | ||
| 78 | line = ''.join([message, self.bar_prefix, bar, current, empty, | ||
| 79 | self.bar_suffix, suffix]) | ||
| 80 | self.writeln(line) | ||
| 81 | |||
| 82 | |||
| 83 | class PixelBar(IncrementalBar): | ||
| 84 | phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿') | ||
| 85 | |||
| 86 | |||
| 87 | class ShadyBar(IncrementalBar): | ||
| 88 | phases = (' ', '░', '▒', '▓', '█') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/counter.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/counter.py new file mode 100644 index 0000000..e993a51 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/counter.py | |||
| @@ -0,0 +1,48 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> | ||
| 4 | # | ||
| 5 | # Permission to use, copy, modify, and distribute this software for any | ||
| 6 | # purpose with or without fee is hereby granted, provided that the above | ||
| 7 | # copyright notice and this permission notice appear in all copies. | ||
| 8 | # | ||
| 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||
| 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||
| 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||
| 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||
| 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||
| 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||
| 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||
| 16 | |||
| 17 | from __future__ import unicode_literals | ||
| 18 | from . import Infinite, Progress | ||
| 19 | from .helpers import WriteMixin | ||
| 20 | |||
| 21 | |||
| 22 | class Counter(WriteMixin, Infinite): | ||
| 23 | message = '' | ||
| 24 | hide_cursor = True | ||
| 25 | |||
| 26 | def update(self): | ||
| 27 | self.write(str(self.index)) | ||
| 28 | |||
| 29 | |||
| 30 | class Countdown(WriteMixin, Progress): | ||
| 31 | hide_cursor = True | ||
| 32 | |||
| 33 | def update(self): | ||
| 34 | self.write(str(self.remaining)) | ||
| 35 | |||
| 36 | |||
| 37 | class Stack(WriteMixin, Progress): | ||
| 38 | phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') | ||
| 39 | hide_cursor = True | ||
| 40 | |||
| 41 | def update(self): | ||
| 42 | nphases = len(self.phases) | ||
| 43 | i = min(nphases - 1, int(self.progress * nphases)) | ||
| 44 | self.write(self.phases[i]) | ||
| 45 | |||
| 46 | |||
| 47 | class Pie(Stack): | ||
| 48 | phases = ('○', '◔', '◑', '◕', '●') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/helpers.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/helpers.py new file mode 100644 index 0000000..96c8800 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/helpers.py | |||
| @@ -0,0 +1,91 @@ | |||
| 1 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> | ||
| 2 | # | ||
| 3 | # Permission to use, copy, modify, and distribute this software for any | ||
| 4 | # purpose with or without fee is hereby granted, provided that the above | ||
| 5 | # copyright notice and this permission notice appear in all copies. | ||
| 6 | # | ||
| 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||
| 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||
| 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||
| 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||
| 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||
| 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||
| 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||
| 14 | |||
| 15 | from __future__ import print_function | ||
| 16 | |||
| 17 | |||
| 18 | HIDE_CURSOR = '\x1b[?25l' | ||
| 19 | SHOW_CURSOR = '\x1b[?25h' | ||
| 20 | |||
| 21 | |||
| 22 | class WriteMixin(object): | ||
| 23 | hide_cursor = False | ||
| 24 | |||
| 25 | def __init__(self, message=None, **kwargs): | ||
| 26 | super(WriteMixin, self).__init__(**kwargs) | ||
| 27 | self._width = 0 | ||
| 28 | if message: | ||
| 29 | self.message = message | ||
| 30 | |||
| 31 | if self.file.isatty(): | ||
| 32 | if self.hide_cursor: | ||
| 33 | print(HIDE_CURSOR, end='', file=self.file) | ||
| 34 | print(self.message, end='', file=self.file) | ||
| 35 | self.file.flush() | ||
| 36 | |||
| 37 | def write(self, s): | ||
| 38 | if self.file.isatty(): | ||
| 39 | b = '\b' * self._width | ||
| 40 | c = s.ljust(self._width) | ||
| 41 | print(b + c, end='', file=self.file) | ||
| 42 | self._width = max(self._width, len(s)) | ||
| 43 | self.file.flush() | ||
| 44 | |||
| 45 | def finish(self): | ||
| 46 | if self.file.isatty() and self.hide_cursor: | ||
| 47 | print(SHOW_CURSOR, end='', file=self.file) | ||
| 48 | |||
| 49 | |||
| 50 | class WritelnMixin(object): | ||
| 51 | hide_cursor = False | ||
| 52 | |||
| 53 | def __init__(self, message=None, **kwargs): | ||
| 54 | super(WritelnMixin, self).__init__(**kwargs) | ||
| 55 | if message: | ||
| 56 | self.message = message | ||
| 57 | |||
| 58 | if self.file.isatty() and self.hide_cursor: | ||
| 59 | print(HIDE_CURSOR, end='', file=self.file) | ||
| 60 | |||
| 61 | def clearln(self): | ||
| 62 | if self.file.isatty(): | ||
| 63 | print('\r\x1b[K', end='', file=self.file) | ||
| 64 | |||
| 65 | def writeln(self, line): | ||
| 66 | if self.file.isatty(): | ||
| 67 | self.clearln() | ||
| 68 | print(line, end='', file=self.file) | ||
| 69 | self.file.flush() | ||
| 70 | |||
| 71 | def finish(self): | ||
| 72 | if self.file.isatty(): | ||
| 73 | print(file=self.file) | ||
| 74 | if self.hide_cursor: | ||
| 75 | print(SHOW_CURSOR, end='', file=self.file) | ||
| 76 | |||
| 77 | |||
| 78 | from signal import signal, SIGINT | ||
| 79 | from sys import exit | ||
| 80 | |||
| 81 | |||
| 82 | class SigIntMixin(object): | ||
| 83 | """Registers a signal handler that calls finish on SIGINT""" | ||
| 84 | |||
| 85 | def __init__(self, *args, **kwargs): | ||
| 86 | super(SigIntMixin, self).__init__(*args, **kwargs) | ||
| 87 | signal(SIGINT, self._sigint_handler) | ||
| 88 | |||
| 89 | def _sigint_handler(self, signum, frame): | ||
| 90 | self.finish() | ||
| 91 | exit(0) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/spinner.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/spinner.py new file mode 100644 index 0000000..d67c679 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/progress/spinner.py | |||
| @@ -0,0 +1,44 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> | ||
| 4 | # | ||
| 5 | # Permission to use, copy, modify, and distribute this software for any | ||
| 6 | # purpose with or without fee is hereby granted, provided that the above | ||
| 7 | # copyright notice and this permission notice appear in all copies. | ||
| 8 | # | ||
| 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES | ||
| 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF | ||
| 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR | ||
| 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES | ||
| 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN | ||
| 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF | ||
| 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. | ||
| 16 | |||
| 17 | from __future__ import unicode_literals | ||
| 18 | from . import Infinite | ||
| 19 | from .helpers import WriteMixin | ||
| 20 | |||
| 21 | |||
| 22 | class Spinner(WriteMixin, Infinite): | ||
| 23 | message = '' | ||
| 24 | phases = ('-', '\\', '|', '/') | ||
| 25 | hide_cursor = True | ||
| 26 | |||
| 27 | def update(self): | ||
| 28 | i = self.index % len(self.phases) | ||
| 29 | self.write(self.phases[i]) | ||
| 30 | |||
| 31 | |||
| 32 | class PieSpinner(Spinner): | ||
| 33 | phases = ['◷', '◶', '◵', '◴'] | ||
| 34 | |||
| 35 | |||
| 36 | class MoonSpinner(Spinner): | ||
| 37 | phases = ['◑', '◒', '◐', '◓'] | ||
| 38 | |||
| 39 | |||
| 40 | class LineSpinner(Spinner): | ||
| 41 | phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] | ||
| 42 | |||
| 43 | class PixelSpinner(Spinner): | ||
| 44 | phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py new file mode 100644 index 0000000..d362e08 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py | |||
| @@ -0,0 +1,5720 @@ | |||
| 1 | # module pyparsing.py | ||
| 2 | # | ||
| 3 | # Copyright (c) 2003-2016 Paul T. McGuire | ||
| 4 | # | ||
| 5 | # Permission is hereby granted, free of charge, to any person obtaining | ||
| 6 | # a copy of this software and associated documentation files (the | ||
| 7 | # "Software"), to deal in the Software without restriction, including | ||
| 8 | # without limitation the rights to use, copy, modify, merge, publish, | ||
| 9 | # distribute, sublicense, and/or sell copies of the Software, and to | ||
| 10 | # permit persons to whom the Software is furnished to do so, subject to | ||
| 11 | # the following conditions: | ||
| 12 | # | ||
| 13 | # The above copyright notice and this permission notice shall be | ||
| 14 | # included in all copies or substantial portions of the Software. | ||
| 15 | # | ||
| 16 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||
| 17 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ||
| 18 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. | ||
| 19 | # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY | ||
| 20 | # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, | ||
| 21 | # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE | ||
| 22 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ||
| 23 | # | ||
| 24 | |||
| 25 | __doc__ = \ | ||
| 26 | """ | ||
| 27 | pyparsing module - Classes and methods to define and execute parsing grammars | ||
| 28 | |||
| 29 | The pyparsing module is an alternative approach to creating and executing simple grammars, | ||
| 30 | vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you | ||
| 31 | don't need to learn a new syntax for defining grammars or matching expressions - the parsing module | ||
| 32 | provides a library of classes that you use to construct the grammar directly in Python. | ||
| 33 | |||
| 34 | Here is a program to parse "Hello, World!" (or any greeting of the form | ||
| 35 | C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements | ||
| 36 | (L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to | ||
| 37 | L{Literal} expressions):: | ||
| 38 | |||
| 39 | from pip._vendor.pyparsing import Word, alphas | ||
| 40 | |||
| 41 | # define grammar of a greeting | ||
| 42 | greet = Word(alphas) + "," + Word(alphas) + "!" | ||
| 43 | |||
| 44 | hello = "Hello, World!" | ||
| 45 | print (hello, "->", greet.parseString(hello)) | ||
| 46 | |||
| 47 | The program outputs the following:: | ||
| 48 | |||
| 49 | Hello, World! -> ['Hello', ',', 'World', '!'] | ||
| 50 | |||
| 51 | The Python representation of the grammar is quite readable, owing to the self-explanatory | ||
| 52 | class names, and the use of '+', '|' and '^' operators. | ||
| 53 | |||
| 54 | The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an | ||
| 55 | object with named attributes. | ||
| 56 | |||
| 57 | The pyparsing module handles some of the problems that are typically vexing when writing text parsers: | ||
| 58 | - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) | ||
| 59 | - quoted strings | ||
| 60 | - embedded comments | ||
| 61 | """ | ||
| 62 | |||
| 63 | __version__ = "2.2.0" | ||
| 64 | __versionTime__ = "06 Mar 2017 02:06 UTC" | ||
| 65 | __author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" | ||
| 66 | |||
| 67 | import string | ||
| 68 | from weakref import ref as wkref | ||
| 69 | import copy | ||
| 70 | import sys | ||
| 71 | import warnings | ||
| 72 | import re | ||
| 73 | import sre_constants | ||
| 74 | import collections | ||
| 75 | import pprint | ||
| 76 | import traceback | ||
| 77 | import types | ||
| 78 | from datetime import datetime | ||
| 79 | |||
| 80 | try: | ||
| 81 | from _thread import RLock | ||
| 82 | except ImportError: | ||
| 83 | from threading import RLock | ||
| 84 | |||
| 85 | try: | ||
| 86 | from collections import OrderedDict as _OrderedDict | ||
| 87 | except ImportError: | ||
| 88 | try: | ||
| 89 | from ordereddict import OrderedDict as _OrderedDict | ||
| 90 | except ImportError: | ||
| 91 | _OrderedDict = None | ||
| 92 | |||
| 93 | #~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) | ||
| 94 | |||
| 95 | __all__ = [ | ||
| 96 | 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', | ||
| 97 | 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', | ||
| 98 | 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', | ||
| 99 | 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', | ||
| 100 | 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', | ||
| 101 | 'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', | ||
| 102 | 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', | ||
| 103 | 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', | ||
| 104 | 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', | ||
| 105 | 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', | ||
| 106 | 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', | ||
| 107 | 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', | ||
| 108 | 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', | ||
| 109 | 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', | ||
| 110 | 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', | ||
| 111 | 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', | ||
| 112 | 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', | ||
| 113 | 'CloseMatch', 'tokenMap', 'pyparsing_common', | ||
| 114 | ] | ||
| 115 | |||
| 116 | system_version = tuple(sys.version_info)[:3] | ||
| 117 | PY_3 = system_version[0] == 3 | ||
| 118 | if PY_3: | ||
| 119 | _MAX_INT = sys.maxsize | ||
| 120 | basestring = str | ||
| 121 | unichr = chr | ||
| 122 | _ustr = str | ||
| 123 | |||
| 124 | # build list of single arg builtins, that can be used as parse actions | ||
| 125 | singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] | ||
| 126 | |||
| 127 | else: | ||
| 128 | _MAX_INT = sys.maxint | ||
| 129 | range = xrange | ||
| 130 | |||
| 131 | def _ustr(obj): | ||
| 132 | """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries | ||
| 133 | str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It | ||
| 134 | then < returns the unicode object | encodes it with the default encoding | ... >. | ||
| 135 | """ | ||
| 136 | if isinstance(obj,unicode): | ||
| 137 | return obj | ||
| 138 | |||
| 139 | try: | ||
| 140 | # If this works, then _ustr(obj) has the same behaviour as str(obj), so | ||
| 141 | # it won't break any existing code. | ||
| 142 | return str(obj) | ||
| 143 | |||
| 144 | except UnicodeEncodeError: | ||
| 145 | # Else encode it | ||
| 146 | ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') | ||
| 147 | xmlcharref = Regex(r'&#\d+;') | ||
| 148 | xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) | ||
| 149 | return xmlcharref.transformString(ret) | ||
| 150 | |||
| 151 | # build list of single arg builtins, tolerant of Python version, that can be used as parse actions | ||
| 152 | singleArgBuiltins = [] | ||
| 153 | import __builtin__ | ||
| 154 | for fname in "sum len sorted reversed list tuple set any all min max".split(): | ||
| 155 | try: | ||
| 156 | singleArgBuiltins.append(getattr(__builtin__,fname)) | ||
| 157 | except AttributeError: | ||
| 158 | continue | ||
| 159 | |||
| 160 | _generatorType = type((y for y in range(1))) | ||
| 161 | |||
| 162 | def _xml_escape(data): | ||
| 163 | """Escape &, <, >, ", ', etc. in a string of data.""" | ||
| 164 | |||
| 165 | # ampersand must be replaced first | ||
| 166 | from_symbols = '&><"\'' | ||
| 167 | to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) | ||
| 168 | for from_,to_ in zip(from_symbols, to_symbols): | ||
| 169 | data = data.replace(from_, to_) | ||
| 170 | return data | ||
| 171 | |||
| 172 | class _Constants(object): | ||
| 173 | pass | ||
| 174 | |||
| 175 | alphas = string.ascii_uppercase + string.ascii_lowercase | ||
| 176 | nums = "0123456789" | ||
| 177 | hexnums = nums + "ABCDEFabcdef" | ||
| 178 | alphanums = alphas + nums | ||
| 179 | _bslash = chr(92) | ||
| 180 | printables = "".join(c for c in string.printable if c not in string.whitespace) | ||
| 181 | |||
| 182 | class ParseBaseException(Exception): | ||
| 183 | """base exception class for all parsing runtime exceptions""" | ||
| 184 | # Performance tuning: we construct a *lot* of these, so keep this | ||
| 185 | # constructor as small and fast as possible | ||
| 186 | def __init__( self, pstr, loc=0, msg=None, elem=None ): | ||
| 187 | self.loc = loc | ||
| 188 | if msg is None: | ||
| 189 | self.msg = pstr | ||
| 190 | self.pstr = "" | ||
| 191 | else: | ||
| 192 | self.msg = msg | ||
| 193 | self.pstr = pstr | ||
| 194 | self.parserElement = elem | ||
| 195 | self.args = (pstr, loc, msg) | ||
| 196 | |||
| 197 | @classmethod | ||
| 198 | def _from_exception(cls, pe): | ||
| 199 | """ | ||
| 200 | internal factory method to simplify creating one type of ParseException | ||
| 201 | from another - avoids having __init__ signature conflicts among subclasses | ||
| 202 | """ | ||
| 203 | return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) | ||
| 204 | |||
| 205 | def __getattr__( self, aname ): | ||
| 206 | """supported attributes by name are: | ||
| 207 | - lineno - returns the line number of the exception text | ||
| 208 | - col - returns the column number of the exception text | ||
| 209 | - line - returns the line containing the exception text | ||
| 210 | """ | ||
| 211 | if( aname == "lineno" ): | ||
| 212 | return lineno( self.loc, self.pstr ) | ||
| 213 | elif( aname in ("col", "column") ): | ||
| 214 | return col( self.loc, self.pstr ) | ||
| 215 | elif( aname == "line" ): | ||
| 216 | return line( self.loc, self.pstr ) | ||
| 217 | else: | ||
| 218 | raise AttributeError(aname) | ||
| 219 | |||
| 220 | def __str__( self ): | ||
| 221 | return "%s (at char %d), (line:%d, col:%d)" % \ | ||
| 222 | ( self.msg, self.loc, self.lineno, self.column ) | ||
| 223 | def __repr__( self ): | ||
| 224 | return _ustr(self) | ||
| 225 | def markInputline( self, markerString = ">!<" ): | ||
| 226 | """Extracts the exception line from the input string, and marks | ||
| 227 | the location of the exception with a special symbol. | ||
| 228 | """ | ||
| 229 | line_str = self.line | ||
| 230 | line_column = self.column - 1 | ||
| 231 | if markerString: | ||
| 232 | line_str = "".join((line_str[:line_column], | ||
| 233 | markerString, line_str[line_column:])) | ||
| 234 | return line_str.strip() | ||
| 235 | def __dir__(self): | ||
| 236 | return "lineno col line".split() + dir(type(self)) | ||
| 237 | |||
| 238 | class ParseException(ParseBaseException): | ||
| 239 | """ | ||
| 240 | Exception thrown when parse expressions don't match class; | ||
| 241 | supported attributes by name are: | ||
| 242 | - lineno - returns the line number of the exception text | ||
| 243 | - col - returns the column number of the exception text | ||
| 244 | - line - returns the line containing the exception text | ||
| 245 | |||
| 246 | Example:: | ||
| 247 | try: | ||
| 248 | Word(nums).setName("integer").parseString("ABC") | ||
| 249 | except ParseException as pe: | ||
| 250 | print(pe) | ||
| 251 | print("column: {}".format(pe.col)) | ||
| 252 | |||
| 253 | prints:: | ||
| 254 | Expected integer (at char 0), (line:1, col:1) | ||
| 255 | column: 1 | ||
| 256 | """ | ||
| 257 | pass | ||
| 258 | |||
| 259 | class ParseFatalException(ParseBaseException): | ||
| 260 | """user-throwable exception thrown when inconsistent parse content | ||
| 261 | is found; stops all parsing immediately""" | ||
| 262 | pass | ||
| 263 | |||
| 264 | class ParseSyntaxException(ParseFatalException): | ||
| 265 | """just like L{ParseFatalException}, but thrown internally when an | ||
| 266 | L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop | ||
| 267 | immediately because an unbacktrackable syntax error has been found""" | ||
| 268 | pass | ||
| 269 | |||
| 270 | #~ class ReparseException(ParseBaseException): | ||
| 271 | #~ """Experimental class - parse actions can raise this exception to cause | ||
| 272 | #~ pyparsing to reparse the input string: | ||
| 273 | #~ - with a modified input string, and/or | ||
| 274 | #~ - with a modified start location | ||
| 275 | #~ Set the values of the ReparseException in the constructor, and raise the | ||
| 276 | #~ exception in a parse action to cause pyparsing to use the new string/location. | ||
| 277 | #~ Setting the values as None causes no change to be made. | ||
| 278 | #~ """ | ||
| 279 | #~ def __init_( self, newstring, restartLoc ): | ||
| 280 | #~ self.newParseText = newstring | ||
| 281 | #~ self.reparseLoc = restartLoc | ||
| 282 | |||
| 283 | class RecursiveGrammarException(Exception): | ||
| 284 | """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" | ||
| 285 | def __init__( self, parseElementList ): | ||
| 286 | self.parseElementTrace = parseElementList | ||
| 287 | |||
| 288 | def __str__( self ): | ||
| 289 | return "RecursiveGrammarException: %s" % self.parseElementTrace | ||
| 290 | |||
| 291 | class _ParseResultsWithOffset(object): | ||
| 292 | def __init__(self,p1,p2): | ||
| 293 | self.tup = (p1,p2) | ||
| 294 | def __getitem__(self,i): | ||
| 295 | return self.tup[i] | ||
| 296 | def __repr__(self): | ||
| 297 | return repr(self.tup[0]) | ||
| 298 | def setOffset(self,i): | ||
| 299 | self.tup = (self.tup[0],i) | ||
| 300 | |||
| 301 | class ParseResults(object): | ||
| 302 | """ | ||
| 303 | Structured parse results, to provide multiple means of access to the parsed data: | ||
| 304 | - as a list (C{len(results)}) | ||
| 305 | - by list index (C{results[0], results[1]}, etc.) | ||
| 306 | - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) | ||
| 307 | |||
| 308 | Example:: | ||
| 309 | integer = Word(nums) | ||
| 310 | date_str = (integer.setResultsName("year") + '/' | ||
| 311 | + integer.setResultsName("month") + '/' | ||
| 312 | + integer.setResultsName("day")) | ||
| 313 | # equivalent form: | ||
| 314 | # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 315 | |||
| 316 | # parseString returns a ParseResults object | ||
| 317 | result = date_str.parseString("1999/12/31") | ||
| 318 | |||
| 319 | def test(s, fn=repr): | ||
| 320 | print("%s -> %s" % (s, fn(eval(s)))) | ||
| 321 | test("list(result)") | ||
| 322 | test("result[0]") | ||
| 323 | test("result['month']") | ||
| 324 | test("result.day") | ||
| 325 | test("'month' in result") | ||
| 326 | test("'minutes' in result") | ||
| 327 | test("result.dump()", str) | ||
| 328 | prints:: | ||
| 329 | list(result) -> ['1999', '/', '12', '/', '31'] | ||
| 330 | result[0] -> '1999' | ||
| 331 | result['month'] -> '12' | ||
| 332 | result.day -> '31' | ||
| 333 | 'month' in result -> True | ||
| 334 | 'minutes' in result -> False | ||
| 335 | result.dump() -> ['1999', '/', '12', '/', '31'] | ||
| 336 | - day: 31 | ||
| 337 | - month: 12 | ||
| 338 | - year: 1999 | ||
| 339 | """ | ||
| 340 | def __new__(cls, toklist=None, name=None, asList=True, modal=True ): | ||
| 341 | if isinstance(toklist, cls): | ||
| 342 | return toklist | ||
| 343 | retobj = object.__new__(cls) | ||
| 344 | retobj.__doinit = True | ||
| 345 | return retobj | ||
| 346 | |||
| 347 | # Performance tuning: we construct a *lot* of these, so keep this | ||
| 348 | # constructor as small and fast as possible | ||
| 349 | def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): | ||
| 350 | if self.__doinit: | ||
| 351 | self.__doinit = False | ||
| 352 | self.__name = None | ||
| 353 | self.__parent = None | ||
| 354 | self.__accumNames = {} | ||
| 355 | self.__asList = asList | ||
| 356 | self.__modal = modal | ||
| 357 | if toklist is None: | ||
| 358 | toklist = [] | ||
| 359 | if isinstance(toklist, list): | ||
| 360 | self.__toklist = toklist[:] | ||
| 361 | elif isinstance(toklist, _generatorType): | ||
| 362 | self.__toklist = list(toklist) | ||
| 363 | else: | ||
| 364 | self.__toklist = [toklist] | ||
| 365 | self.__tokdict = dict() | ||
| 366 | |||
| 367 | if name is not None and name: | ||
| 368 | if not modal: | ||
| 369 | self.__accumNames[name] = 0 | ||
| 370 | if isinstance(name,int): | ||
| 371 | name = _ustr(name) # will always return a str, but use _ustr for consistency | ||
| 372 | self.__name = name | ||
| 373 | if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): | ||
| 374 | if isinstance(toklist,basestring): | ||
| 375 | toklist = [ toklist ] | ||
| 376 | if asList: | ||
| 377 | if isinstance(toklist,ParseResults): | ||
| 378 | self[name] = _ParseResultsWithOffset(toklist.copy(),0) | ||
| 379 | else: | ||
| 380 | self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) | ||
| 381 | self[name].__name = name | ||
| 382 | else: | ||
| 383 | try: | ||
| 384 | self[name] = toklist[0] | ||
| 385 | except (KeyError,TypeError,IndexError): | ||
| 386 | self[name] = toklist | ||
| 387 | |||
| 388 | def __getitem__( self, i ): | ||
| 389 | if isinstance( i, (int,slice) ): | ||
| 390 | return self.__toklist[i] | ||
| 391 | else: | ||
| 392 | if i not in self.__accumNames: | ||
| 393 | return self.__tokdict[i][-1][0] | ||
| 394 | else: | ||
| 395 | return ParseResults([ v[0] for v in self.__tokdict[i] ]) | ||
| 396 | |||
| 397 | def __setitem__( self, k, v, isinstance=isinstance ): | ||
| 398 | if isinstance(v,_ParseResultsWithOffset): | ||
| 399 | self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] | ||
| 400 | sub = v[0] | ||
| 401 | elif isinstance(k,(int,slice)): | ||
| 402 | self.__toklist[k] = v | ||
| 403 | sub = v | ||
| 404 | else: | ||
| 405 | self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] | ||
| 406 | sub = v | ||
| 407 | if isinstance(sub,ParseResults): | ||
| 408 | sub.__parent = wkref(self) | ||
| 409 | |||
| 410 | def __delitem__( self, i ): | ||
| 411 | if isinstance(i,(int,slice)): | ||
| 412 | mylen = len( self.__toklist ) | ||
| 413 | del self.__toklist[i] | ||
| 414 | |||
| 415 | # convert int to slice | ||
| 416 | if isinstance(i, int): | ||
| 417 | if i < 0: | ||
| 418 | i += mylen | ||
| 419 | i = slice(i, i+1) | ||
| 420 | # get removed indices | ||
| 421 | removed = list(range(*i.indices(mylen))) | ||
| 422 | removed.reverse() | ||
| 423 | # fixup indices in token dictionary | ||
| 424 | for name,occurrences in self.__tokdict.items(): | ||
| 425 | for j in removed: | ||
| 426 | for k, (value, position) in enumerate(occurrences): | ||
| 427 | occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) | ||
| 428 | else: | ||
| 429 | del self.__tokdict[i] | ||
| 430 | |||
| 431 | def __contains__( self, k ): | ||
| 432 | return k in self.__tokdict | ||
| 433 | |||
| 434 | def __len__( self ): return len( self.__toklist ) | ||
| 435 | def __bool__(self): return ( not not self.__toklist ) | ||
| 436 | __nonzero__ = __bool__ | ||
| 437 | def __iter__( self ): return iter( self.__toklist ) | ||
| 438 | def __reversed__( self ): return iter( self.__toklist[::-1] ) | ||
| 439 | def _iterkeys( self ): | ||
| 440 | if hasattr(self.__tokdict, "iterkeys"): | ||
| 441 | return self.__tokdict.iterkeys() | ||
| 442 | else: | ||
| 443 | return iter(self.__tokdict) | ||
| 444 | |||
| 445 | def _itervalues( self ): | ||
| 446 | return (self[k] for k in self._iterkeys()) | ||
| 447 | |||
| 448 | def _iteritems( self ): | ||
| 449 | return ((k, self[k]) for k in self._iterkeys()) | ||
| 450 | |||
| 451 | if PY_3: | ||
| 452 | keys = _iterkeys | ||
| 453 | """Returns an iterator of all named result keys (Python 3.x only).""" | ||
| 454 | |||
| 455 | values = _itervalues | ||
| 456 | """Returns an iterator of all named result values (Python 3.x only).""" | ||
| 457 | |||
| 458 | items = _iteritems | ||
| 459 | """Returns an iterator of all named result key-value tuples (Python 3.x only).""" | ||
| 460 | |||
| 461 | else: | ||
| 462 | iterkeys = _iterkeys | ||
| 463 | """Returns an iterator of all named result keys (Python 2.x only).""" | ||
| 464 | |||
| 465 | itervalues = _itervalues | ||
| 466 | """Returns an iterator of all named result values (Python 2.x only).""" | ||
| 467 | |||
| 468 | iteritems = _iteritems | ||
| 469 | """Returns an iterator of all named result key-value tuples (Python 2.x only).""" | ||
| 470 | |||
| 471 | def keys( self ): | ||
| 472 | """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" | ||
| 473 | return list(self.iterkeys()) | ||
| 474 | |||
| 475 | def values( self ): | ||
| 476 | """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" | ||
| 477 | return list(self.itervalues()) | ||
| 478 | |||
| 479 | def items( self ): | ||
| 480 | """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" | ||
| 481 | return list(self.iteritems()) | ||
| 482 | |||
| 483 | def haskeys( self ): | ||
| 484 | """Since keys() returns an iterator, this method is helpful in bypassing | ||
| 485 | code that looks for the existence of any defined results names.""" | ||
| 486 | return bool(self.__tokdict) | ||
| 487 | |||
| 488 | def pop( self, *args, **kwargs): | ||
| 489 | """ | ||
| 490 | Removes and returns item at specified index (default=C{last}). | ||
| 491 | Supports both C{list} and C{dict} semantics for C{pop()}. If passed no | ||
| 492 | argument or an integer argument, it will use C{list} semantics | ||
| 493 | and pop tokens from the list of parsed tokens. If passed a | ||
| 494 | non-integer argument (most likely a string), it will use C{dict} | ||
| 495 | semantics and pop the corresponding value from any defined | ||
| 496 | results names. A second default return value argument is | ||
| 497 | supported, just as in C{dict.pop()}. | ||
| 498 | |||
| 499 | Example:: | ||
| 500 | def remove_first(tokens): | ||
| 501 | tokens.pop(0) | ||
| 502 | print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] | ||
| 503 | print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] | ||
| 504 | |||
| 505 | label = Word(alphas) | ||
| 506 | patt = label("LABEL") + OneOrMore(Word(nums)) | ||
| 507 | print(patt.parseString("AAB 123 321").dump()) | ||
| 508 | |||
| 509 | # Use pop() in a parse action to remove named result (note that corresponding value is not | ||
| 510 | # removed from list form of results) | ||
| 511 | def remove_LABEL(tokens): | ||
| 512 | tokens.pop("LABEL") | ||
| 513 | return tokens | ||
| 514 | patt.addParseAction(remove_LABEL) | ||
| 515 | print(patt.parseString("AAB 123 321").dump()) | ||
| 516 | prints:: | ||
| 517 | ['AAB', '123', '321'] | ||
| 518 | - LABEL: AAB | ||
| 519 | |||
| 520 | ['AAB', '123', '321'] | ||
| 521 | """ | ||
| 522 | if not args: | ||
| 523 | args = [-1] | ||
| 524 | for k,v in kwargs.items(): | ||
| 525 | if k == 'default': | ||
| 526 | args = (args[0], v) | ||
| 527 | else: | ||
| 528 | raise TypeError("pop() got an unexpected keyword argument '%s'" % k) | ||
| 529 | if (isinstance(args[0], int) or | ||
| 530 | len(args) == 1 or | ||
| 531 | args[0] in self): | ||
| 532 | index = args[0] | ||
| 533 | ret = self[index] | ||
| 534 | del self[index] | ||
| 535 | return ret | ||
| 536 | else: | ||
| 537 | defaultvalue = args[1] | ||
| 538 | return defaultvalue | ||
| 539 | |||
| 540 | def get(self, key, defaultValue=None): | ||
| 541 | """ | ||
| 542 | Returns named result matching the given key, or if there is no | ||
| 543 | such name, then returns the given C{defaultValue} or C{None} if no | ||
| 544 | C{defaultValue} is specified. | ||
| 545 | |||
| 546 | Similar to C{dict.get()}. | ||
| 547 | |||
| 548 | Example:: | ||
| 549 | integer = Word(nums) | ||
| 550 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 551 | |||
| 552 | result = date_str.parseString("1999/12/31") | ||
| 553 | print(result.get("year")) # -> '1999' | ||
| 554 | print(result.get("hour", "not specified")) # -> 'not specified' | ||
| 555 | print(result.get("hour")) # -> None | ||
| 556 | """ | ||
| 557 | if key in self: | ||
| 558 | return self[key] | ||
| 559 | else: | ||
| 560 | return defaultValue | ||
| 561 | |||
| 562 | def insert( self, index, insStr ): | ||
| 563 | """ | ||
| 564 | Inserts new element at location index in the list of parsed tokens. | ||
| 565 | |||
| 566 | Similar to C{list.insert()}. | ||
| 567 | |||
| 568 | Example:: | ||
| 569 | print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] | ||
| 570 | |||
| 571 | # use a parse action to insert the parse location in the front of the parsed results | ||
| 572 | def insert_locn(locn, tokens): | ||
| 573 | tokens.insert(0, locn) | ||
| 574 | print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] | ||
| 575 | """ | ||
| 576 | self.__toklist.insert(index, insStr) | ||
| 577 | # fixup indices in token dictionary | ||
| 578 | for name,occurrences in self.__tokdict.items(): | ||
| 579 | for k, (value, position) in enumerate(occurrences): | ||
| 580 | occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) | ||
| 581 | |||
| 582 | def append( self, item ): | ||
| 583 | """ | ||
| 584 | Add single element to end of ParseResults list of elements. | ||
| 585 | |||
| 586 | Example:: | ||
| 587 | print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] | ||
| 588 | |||
| 589 | # use a parse action to compute the sum of the parsed integers, and add it to the end | ||
| 590 | def append_sum(tokens): | ||
| 591 | tokens.append(sum(map(int, tokens))) | ||
| 592 | print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] | ||
| 593 | """ | ||
| 594 | self.__toklist.append(item) | ||
| 595 | |||
| 596 | def extend( self, itemseq ): | ||
| 597 | """ | ||
| 598 | Add sequence of elements to end of ParseResults list of elements. | ||
| 599 | |||
| 600 | Example:: | ||
| 601 | patt = OneOrMore(Word(alphas)) | ||
| 602 | |||
| 603 | # use a parse action to append the reverse of the matched strings, to make a palindrome | ||
| 604 | def make_palindrome(tokens): | ||
| 605 | tokens.extend(reversed([t[::-1] for t in tokens])) | ||
| 606 | return ''.join(tokens) | ||
| 607 | print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' | ||
| 608 | """ | ||
| 609 | if isinstance(itemseq, ParseResults): | ||
| 610 | self += itemseq | ||
| 611 | else: | ||
| 612 | self.__toklist.extend(itemseq) | ||
| 613 | |||
| 614 | def clear( self ): | ||
| 615 | """ | ||
| 616 | Clear all elements and results names. | ||
| 617 | """ | ||
| 618 | del self.__toklist[:] | ||
| 619 | self.__tokdict.clear() | ||
| 620 | |||
| 621 | def __getattr__( self, name ): | ||
| 622 | try: | ||
| 623 | return self[name] | ||
| 624 | except KeyError: | ||
| 625 | return "" | ||
| 626 | |||
| 627 | if name in self.__tokdict: | ||
| 628 | if name not in self.__accumNames: | ||
| 629 | return self.__tokdict[name][-1][0] | ||
| 630 | else: | ||
| 631 | return ParseResults([ v[0] for v in self.__tokdict[name] ]) | ||
| 632 | else: | ||
| 633 | return "" | ||
| 634 | |||
| 635 | def __add__( self, other ): | ||
| 636 | ret = self.copy() | ||
| 637 | ret += other | ||
| 638 | return ret | ||
| 639 | |||
| 640 | def __iadd__( self, other ): | ||
| 641 | if other.__tokdict: | ||
| 642 | offset = len(self.__toklist) | ||
| 643 | addoffset = lambda a: offset if a<0 else a+offset | ||
| 644 | otheritems = other.__tokdict.items() | ||
| 645 | otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) | ||
| 646 | for (k,vlist) in otheritems for v in vlist] | ||
| 647 | for k,v in otherdictitems: | ||
| 648 | self[k] = v | ||
| 649 | if isinstance(v[0],ParseResults): | ||
| 650 | v[0].__parent = wkref(self) | ||
| 651 | |||
| 652 | self.__toklist += other.__toklist | ||
| 653 | self.__accumNames.update( other.__accumNames ) | ||
| 654 | return self | ||
| 655 | |||
| 656 | def __radd__(self, other): | ||
| 657 | if isinstance(other,int) and other == 0: | ||
| 658 | # useful for merging many ParseResults using sum() builtin | ||
| 659 | return self.copy() | ||
| 660 | else: | ||
| 661 | # this may raise a TypeError - so be it | ||
| 662 | return other + self | ||
| 663 | |||
| 664 | def __repr__( self ): | ||
| 665 | return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) | ||
| 666 | |||
| 667 | def __str__( self ): | ||
| 668 | return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' | ||
| 669 | |||
| 670 | def _asStringList( self, sep='' ): | ||
| 671 | out = [] | ||
| 672 | for item in self.__toklist: | ||
| 673 | if out and sep: | ||
| 674 | out.append(sep) | ||
| 675 | if isinstance( item, ParseResults ): | ||
| 676 | out += item._asStringList() | ||
| 677 | else: | ||
| 678 | out.append( _ustr(item) ) | ||
| 679 | return out | ||
| 680 | |||
| 681 | def asList( self ): | ||
| 682 | """ | ||
| 683 | Returns the parse results as a nested list of matching tokens, all converted to strings. | ||
| 684 | |||
| 685 | Example:: | ||
| 686 | patt = OneOrMore(Word(alphas)) | ||
| 687 | result = patt.parseString("sldkj lsdkj sldkj") | ||
| 688 | # even though the result prints in string-like form, it is actually a pyparsing ParseResults | ||
| 689 | print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] | ||
| 690 | |||
| 691 | # Use asList() to create an actual list | ||
| 692 | result_list = result.asList() | ||
| 693 | print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] | ||
| 694 | """ | ||
| 695 | return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] | ||
| 696 | |||
| 697 | def asDict( self ): | ||
| 698 | """ | ||
| 699 | Returns the named parse results as a nested dictionary. | ||
| 700 | |||
| 701 | Example:: | ||
| 702 | integer = Word(nums) | ||
| 703 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 704 | |||
| 705 | result = date_str.parseString('12/31/1999') | ||
| 706 | print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) | ||
| 707 | |||
| 708 | result_dict = result.asDict() | ||
| 709 | print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} | ||
| 710 | |||
| 711 | # even though a ParseResults supports dict-like access, sometime you just need to have a dict | ||
| 712 | import json | ||
| 713 | print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable | ||
| 714 | print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} | ||
| 715 | """ | ||
| 716 | if PY_3: | ||
| 717 | item_fn = self.items | ||
| 718 | else: | ||
| 719 | item_fn = self.iteritems | ||
| 720 | |||
| 721 | def toItem(obj): | ||
| 722 | if isinstance(obj, ParseResults): | ||
| 723 | if obj.haskeys(): | ||
| 724 | return obj.asDict() | ||
| 725 | else: | ||
| 726 | return [toItem(v) for v in obj] | ||
| 727 | else: | ||
| 728 | return obj | ||
| 729 | |||
| 730 | return dict((k,toItem(v)) for k,v in item_fn()) | ||
| 731 | |||
| 732 | def copy( self ): | ||
| 733 | """ | ||
| 734 | Returns a new copy of a C{ParseResults} object. | ||
| 735 | """ | ||
| 736 | ret = ParseResults( self.__toklist ) | ||
| 737 | ret.__tokdict = self.__tokdict.copy() | ||
| 738 | ret.__parent = self.__parent | ||
| 739 | ret.__accumNames.update( self.__accumNames ) | ||
| 740 | ret.__name = self.__name | ||
| 741 | return ret | ||
| 742 | |||
| 743 | def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): | ||
| 744 | """ | ||
| 745 | (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. | ||
| 746 | """ | ||
| 747 | nl = "\n" | ||
| 748 | out = [] | ||
| 749 | namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() | ||
| 750 | for v in vlist) | ||
| 751 | nextLevelIndent = indent + " " | ||
| 752 | |||
| 753 | # collapse out indents if formatting is not desired | ||
| 754 | if not formatted: | ||
| 755 | indent = "" | ||
| 756 | nextLevelIndent = "" | ||
| 757 | nl = "" | ||
| 758 | |||
| 759 | selfTag = None | ||
| 760 | if doctag is not None: | ||
| 761 | selfTag = doctag | ||
| 762 | else: | ||
| 763 | if self.__name: | ||
| 764 | selfTag = self.__name | ||
| 765 | |||
| 766 | if not selfTag: | ||
| 767 | if namedItemsOnly: | ||
| 768 | return "" | ||
| 769 | else: | ||
| 770 | selfTag = "ITEM" | ||
| 771 | |||
| 772 | out += [ nl, indent, "<", selfTag, ">" ] | ||
| 773 | |||
| 774 | for i,res in enumerate(self.__toklist): | ||
| 775 | if isinstance(res,ParseResults): | ||
| 776 | if i in namedItems: | ||
| 777 | out += [ res.asXML(namedItems[i], | ||
| 778 | namedItemsOnly and doctag is None, | ||
| 779 | nextLevelIndent, | ||
| 780 | formatted)] | ||
| 781 | else: | ||
| 782 | out += [ res.asXML(None, | ||
| 783 | namedItemsOnly and doctag is None, | ||
| 784 | nextLevelIndent, | ||
| 785 | formatted)] | ||
| 786 | else: | ||
| 787 | # individual token, see if there is a name for it | ||
| 788 | resTag = None | ||
| 789 | if i in namedItems: | ||
| 790 | resTag = namedItems[i] | ||
| 791 | if not resTag: | ||
| 792 | if namedItemsOnly: | ||
| 793 | continue | ||
| 794 | else: | ||
| 795 | resTag = "ITEM" | ||
| 796 | xmlBodyText = _xml_escape(_ustr(res)) | ||
| 797 | out += [ nl, nextLevelIndent, "<", resTag, ">", | ||
| 798 | xmlBodyText, | ||
| 799 | "</", resTag, ">" ] | ||
| 800 | |||
| 801 | out += [ nl, indent, "</", selfTag, ">" ] | ||
| 802 | return "".join(out) | ||
| 803 | |||
| 804 | def __lookup(self,sub): | ||
| 805 | for k,vlist in self.__tokdict.items(): | ||
| 806 | for v,loc in vlist: | ||
| 807 | if sub is v: | ||
| 808 | return k | ||
| 809 | return None | ||
| 810 | |||
| 811 | def getName(self): | ||
| 812 | r""" | ||
| 813 | Returns the results name for this token expression. Useful when several | ||
| 814 | different expressions might match at a particular location. | ||
| 815 | |||
| 816 | Example:: | ||
| 817 | integer = Word(nums) | ||
| 818 | ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") | ||
| 819 | house_number_expr = Suppress('#') + Word(nums, alphanums) | ||
| 820 | user_data = (Group(house_number_expr)("house_number") | ||
| 821 | | Group(ssn_expr)("ssn") | ||
| 822 | | Group(integer)("age")) | ||
| 823 | user_info = OneOrMore(user_data) | ||
| 824 | |||
| 825 | result = user_info.parseString("22 111-22-3333 #221B") | ||
| 826 | for item in result: | ||
| 827 | print(item.getName(), ':', item[0]) | ||
| 828 | prints:: | ||
| 829 | age : 22 | ||
| 830 | ssn : 111-22-3333 | ||
| 831 | house_number : 221B | ||
| 832 | """ | ||
| 833 | if self.__name: | ||
| 834 | return self.__name | ||
| 835 | elif self.__parent: | ||
| 836 | par = self.__parent() | ||
| 837 | if par: | ||
| 838 | return par.__lookup(self) | ||
| 839 | else: | ||
| 840 | return None | ||
| 841 | elif (len(self) == 1 and | ||
| 842 | len(self.__tokdict) == 1 and | ||
| 843 | next(iter(self.__tokdict.values()))[0][1] in (0,-1)): | ||
| 844 | return next(iter(self.__tokdict.keys())) | ||
| 845 | else: | ||
| 846 | return None | ||
| 847 | |||
| 848 | def dump(self, indent='', depth=0, full=True): | ||
| 849 | """ | ||
| 850 | Diagnostic method for listing out the contents of a C{ParseResults}. | ||
| 851 | Accepts an optional C{indent} argument so that this string can be embedded | ||
| 852 | in a nested display of other data. | ||
| 853 | |||
| 854 | Example:: | ||
| 855 | integer = Word(nums) | ||
| 856 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 857 | |||
| 858 | result = date_str.parseString('12/31/1999') | ||
| 859 | print(result.dump()) | ||
| 860 | prints:: | ||
| 861 | ['12', '/', '31', '/', '1999'] | ||
| 862 | - day: 1999 | ||
| 863 | - month: 31 | ||
| 864 | - year: 12 | ||
| 865 | """ | ||
| 866 | out = [] | ||
| 867 | NL = '\n' | ||
| 868 | out.append( indent+_ustr(self.asList()) ) | ||
| 869 | if full: | ||
| 870 | if self.haskeys(): | ||
| 871 | items = sorted((str(k), v) for k,v in self.items()) | ||
| 872 | for k,v in items: | ||
| 873 | if out: | ||
| 874 | out.append(NL) | ||
| 875 | out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) | ||
| 876 | if isinstance(v,ParseResults): | ||
| 877 | if v: | ||
| 878 | out.append( v.dump(indent,depth+1) ) | ||
| 879 | else: | ||
| 880 | out.append(_ustr(v)) | ||
| 881 | else: | ||
| 882 | out.append(repr(v)) | ||
| 883 | elif any(isinstance(vv,ParseResults) for vv in self): | ||
| 884 | v = self | ||
| 885 | for i,vv in enumerate(v): | ||
| 886 | if isinstance(vv,ParseResults): | ||
| 887 | out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) | ||
| 888 | else: | ||
| 889 | out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) | ||
| 890 | |||
| 891 | return "".join(out) | ||
| 892 | |||
| 893 | def pprint(self, *args, **kwargs): | ||
| 894 | """ | ||
| 895 | Pretty-printer for parsed results as a list, using the C{pprint} module. | ||
| 896 | Accepts additional positional or keyword args as defined for the | ||
| 897 | C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) | ||
| 898 | |||
| 899 | Example:: | ||
| 900 | ident = Word(alphas, alphanums) | ||
| 901 | num = Word(nums) | ||
| 902 | func = Forward() | ||
| 903 | term = ident | num | Group('(' + func + ')') | ||
| 904 | func <<= ident + Group(Optional(delimitedList(term))) | ||
| 905 | result = func.parseString("fna a,b,(fnb c,d,200),100") | ||
| 906 | result.pprint(width=40) | ||
| 907 | prints:: | ||
| 908 | ['fna', | ||
| 909 | ['a', | ||
| 910 | 'b', | ||
| 911 | ['(', 'fnb', ['c', 'd', '200'], ')'], | ||
| 912 | '100']] | ||
| 913 | """ | ||
| 914 | pprint.pprint(self.asList(), *args, **kwargs) | ||
| 915 | |||
| 916 | # add support for pickle protocol | ||
| 917 | def __getstate__(self): | ||
| 918 | return ( self.__toklist, | ||
| 919 | ( self.__tokdict.copy(), | ||
| 920 | self.__parent is not None and self.__parent() or None, | ||
| 921 | self.__accumNames, | ||
| 922 | self.__name ) ) | ||
| 923 | |||
| 924 | def __setstate__(self,state): | ||
| 925 | self.__toklist = state[0] | ||
| 926 | (self.__tokdict, | ||
| 927 | par, | ||
| 928 | inAccumNames, | ||
| 929 | self.__name) = state[1] | ||
| 930 | self.__accumNames = {} | ||
| 931 | self.__accumNames.update(inAccumNames) | ||
| 932 | if par is not None: | ||
| 933 | self.__parent = wkref(par) | ||
| 934 | else: | ||
| 935 | self.__parent = None | ||
| 936 | |||
| 937 | def __getnewargs__(self): | ||
| 938 | return self.__toklist, self.__name, self.__asList, self.__modal | ||
| 939 | |||
| 940 | def __dir__(self): | ||
| 941 | return (dir(type(self)) + list(self.keys())) | ||
| 942 | |||
| 943 | collections.MutableMapping.register(ParseResults) | ||
| 944 | |||
| 945 | def col (loc,strg): | ||
| 946 | """Returns current column within a string, counting newlines as line separators. | ||
| 947 | The first column is number 1. | ||
| 948 | |||
| 949 | Note: the default parsing behavior is to expand tabs in the input string | ||
| 950 | before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information | ||
| 951 | on parsing strings containing C{<TAB>}s, and suggested methods to maintain a | ||
| 952 | consistent view of the parsed string, the parse location, and line and column | ||
| 953 | positions within the parsed string. | ||
| 954 | """ | ||
| 955 | s = strg | ||
| 956 | return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) | ||
| 957 | |||
| 958 | def lineno(loc,strg): | ||
| 959 | """Returns current line number within a string, counting newlines as line separators. | ||
| 960 | The first line is number 1. | ||
| 961 | |||
| 962 | Note: the default parsing behavior is to expand tabs in the input string | ||
| 963 | before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information | ||
| 964 | on parsing strings containing C{<TAB>}s, and suggested methods to maintain a | ||
| 965 | consistent view of the parsed string, the parse location, and line and column | ||
| 966 | positions within the parsed string. | ||
| 967 | """ | ||
| 968 | return strg.count("\n",0,loc) + 1 | ||
| 969 | |||
| 970 | def line( loc, strg ): | ||
| 971 | """Returns the line of text containing loc within a string, counting newlines as line separators. | ||
| 972 | """ | ||
| 973 | lastCR = strg.rfind("\n", 0, loc) | ||
| 974 | nextCR = strg.find("\n", loc) | ||
| 975 | if nextCR >= 0: | ||
| 976 | return strg[lastCR+1:nextCR] | ||
| 977 | else: | ||
| 978 | return strg[lastCR+1:] | ||
| 979 | |||
| 980 | def _defaultStartDebugAction( instring, loc, expr ): | ||
| 981 | print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) | ||
| 982 | |||
| 983 | def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): | ||
| 984 | print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) | ||
| 985 | |||
| 986 | def _defaultExceptionDebugAction( instring, loc, expr, exc ): | ||
| 987 | print ("Exception raised:" + _ustr(exc)) | ||
| 988 | |||
| 989 | def nullDebugAction(*args): | ||
| 990 | """'Do-nothing' debug action, to suppress debugging output during parsing.""" | ||
| 991 | pass | ||
| 992 | |||
| 993 | # Only works on Python 3.x - nonlocal is toxic to Python 2 installs | ||
| 994 | #~ 'decorator to trim function calls to match the arity of the target' | ||
| 995 | #~ def _trim_arity(func, maxargs=3): | ||
| 996 | #~ if func in singleArgBuiltins: | ||
| 997 | #~ return lambda s,l,t: func(t) | ||
| 998 | #~ limit = 0 | ||
| 999 | #~ foundArity = False | ||
| 1000 | #~ def wrapper(*args): | ||
| 1001 | #~ nonlocal limit,foundArity | ||
| 1002 | #~ while 1: | ||
| 1003 | #~ try: | ||
| 1004 | #~ ret = func(*args[limit:]) | ||
| 1005 | #~ foundArity = True | ||
| 1006 | #~ return ret | ||
| 1007 | #~ except TypeError: | ||
| 1008 | #~ if limit == maxargs or foundArity: | ||
| 1009 | #~ raise | ||
| 1010 | #~ limit += 1 | ||
| 1011 | #~ continue | ||
| 1012 | #~ return wrapper | ||
| 1013 | |||
| 1014 | # this version is Python 2.x-3.x cross-compatible | ||
| 1015 | 'decorator to trim function calls to match the arity of the target' | ||
| 1016 | def _trim_arity(func, maxargs=2): | ||
| 1017 | if func in singleArgBuiltins: | ||
| 1018 | return lambda s,l,t: func(t) | ||
| 1019 | limit = [0] | ||
| 1020 | foundArity = [False] | ||
| 1021 | |||
| 1022 | # traceback return data structure changed in Py3.5 - normalize back to plain tuples | ||
| 1023 | if system_version[:2] >= (3,5): | ||
| 1024 | def extract_stack(limit=0): | ||
| 1025 | # special handling for Python 3.5.0 - extra deep call stack by 1 | ||
| 1026 | offset = -3 if system_version == (3,5,0) else -2 | ||
| 1027 | frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] | ||
| 1028 | return [(frame_summary.filename, frame_summary.lineno)] | ||
| 1029 | def extract_tb(tb, limit=0): | ||
| 1030 | frames = traceback.extract_tb(tb, limit=limit) | ||
| 1031 | frame_summary = frames[-1] | ||
| 1032 | return [(frame_summary.filename, frame_summary.lineno)] | ||
| 1033 | else: | ||
| 1034 | extract_stack = traceback.extract_stack | ||
| 1035 | extract_tb = traceback.extract_tb | ||
| 1036 | |||
| 1037 | # synthesize what would be returned by traceback.extract_stack at the call to | ||
| 1038 | # user's parse action 'func', so that we don't incur call penalty at parse time | ||
| 1039 | |||
| 1040 | LINE_DIFF = 6 | ||
| 1041 | # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND | ||
| 1042 | # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! | ||
| 1043 | this_line = extract_stack(limit=2)[-1] | ||
| 1044 | pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) | ||
| 1045 | |||
| 1046 | def wrapper(*args): | ||
| 1047 | while 1: | ||
| 1048 | try: | ||
| 1049 | ret = func(*args[limit[0]:]) | ||
| 1050 | foundArity[0] = True | ||
| 1051 | return ret | ||
| 1052 | except TypeError: | ||
| 1053 | # re-raise TypeErrors if they did not come from our arity testing | ||
| 1054 | if foundArity[0]: | ||
| 1055 | raise | ||
| 1056 | else: | ||
| 1057 | try: | ||
| 1058 | tb = sys.exc_info()[-1] | ||
| 1059 | if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: | ||
| 1060 | raise | ||
| 1061 | finally: | ||
| 1062 | del tb | ||
| 1063 | |||
| 1064 | if limit[0] <= maxargs: | ||
| 1065 | limit[0] += 1 | ||
| 1066 | continue | ||
| 1067 | raise | ||
| 1068 | |||
| 1069 | # copy func name to wrapper for sensible debug output | ||
| 1070 | func_name = "<parse action>" | ||
| 1071 | try: | ||
| 1072 | func_name = getattr(func, '__name__', | ||
| 1073 | getattr(func, '__class__').__name__) | ||
| 1074 | except Exception: | ||
| 1075 | func_name = str(func) | ||
| 1076 | wrapper.__name__ = func_name | ||
| 1077 | |||
| 1078 | return wrapper | ||
| 1079 | |||
| 1080 | class ParserElement(object): | ||
| 1081 | """Abstract base level parser element class.""" | ||
| 1082 | DEFAULT_WHITE_CHARS = " \n\t\r" | ||
| 1083 | verbose_stacktrace = False | ||
| 1084 | |||
| 1085 | @staticmethod | ||
| 1086 | def setDefaultWhitespaceChars( chars ): | ||
| 1087 | r""" | ||
| 1088 | Overrides the default whitespace chars | ||
| 1089 | |||
| 1090 | Example:: | ||
| 1091 | # default whitespace chars are space, <TAB> and newline | ||
| 1092 | OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] | ||
| 1093 | |||
| 1094 | # change to just treat newline as significant | ||
| 1095 | ParserElement.setDefaultWhitespaceChars(" \t") | ||
| 1096 | OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] | ||
| 1097 | """ | ||
| 1098 | ParserElement.DEFAULT_WHITE_CHARS = chars | ||
| 1099 | |||
| 1100 | @staticmethod | ||
| 1101 | def inlineLiteralsUsing(cls): | ||
| 1102 | """ | ||
| 1103 | Set class to be used for inclusion of string literals into a parser. | ||
| 1104 | |||
| 1105 | Example:: | ||
| 1106 | # default literal class used is Literal | ||
| 1107 | integer = Word(nums) | ||
| 1108 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 1109 | |||
| 1110 | date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] | ||
| 1111 | |||
| 1112 | |||
| 1113 | # change to Suppress | ||
| 1114 | ParserElement.inlineLiteralsUsing(Suppress) | ||
| 1115 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 1116 | |||
| 1117 | date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] | ||
| 1118 | """ | ||
| 1119 | ParserElement._literalStringClass = cls | ||
| 1120 | |||
| 1121 | def __init__( self, savelist=False ): | ||
| 1122 | self.parseAction = list() | ||
| 1123 | self.failAction = None | ||
| 1124 | #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall | ||
| 1125 | self.strRepr = None | ||
| 1126 | self.resultsName = None | ||
| 1127 | self.saveAsList = savelist | ||
| 1128 | self.skipWhitespace = True | ||
| 1129 | self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS | ||
| 1130 | self.copyDefaultWhiteChars = True | ||
| 1131 | self.mayReturnEmpty = False # used when checking for left-recursion | ||
| 1132 | self.keepTabs = False | ||
| 1133 | self.ignoreExprs = list() | ||
| 1134 | self.debug = False | ||
| 1135 | self.streamlined = False | ||
| 1136 | self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index | ||
| 1137 | self.errmsg = "" | ||
| 1138 | self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) | ||
| 1139 | self.debugActions = ( None, None, None ) #custom debug actions | ||
| 1140 | self.re = None | ||
| 1141 | self.callPreparse = True # used to avoid redundant calls to preParse | ||
| 1142 | self.callDuringTry = False | ||
| 1143 | |||
| 1144 | def copy( self ): | ||
| 1145 | """ | ||
| 1146 | Make a copy of this C{ParserElement}. Useful for defining different parse actions | ||
| 1147 | for the same parsing pattern, using copies of the original parse element. | ||
| 1148 | |||
| 1149 | Example:: | ||
| 1150 | integer = Word(nums).setParseAction(lambda toks: int(toks[0])) | ||
| 1151 | integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") | ||
| 1152 | integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") | ||
| 1153 | |||
| 1154 | print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) | ||
| 1155 | prints:: | ||
| 1156 | [5120, 100, 655360, 268435456] | ||
| 1157 | Equivalent form of C{expr.copy()} is just C{expr()}:: | ||
| 1158 | integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") | ||
| 1159 | """ | ||
| 1160 | cpy = copy.copy( self ) | ||
| 1161 | cpy.parseAction = self.parseAction[:] | ||
| 1162 | cpy.ignoreExprs = self.ignoreExprs[:] | ||
| 1163 | if self.copyDefaultWhiteChars: | ||
| 1164 | cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS | ||
| 1165 | return cpy | ||
| 1166 | |||
| 1167 | def setName( self, name ): | ||
| 1168 | """ | ||
| 1169 | Define name for this expression, makes debugging and exception messages clearer. | ||
| 1170 | |||
| 1171 | Example:: | ||
| 1172 | Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) | ||
| 1173 | Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) | ||
| 1174 | """ | ||
| 1175 | self.name = name | ||
| 1176 | self.errmsg = "Expected " + self.name | ||
| 1177 | if hasattr(self,"exception"): | ||
| 1178 | self.exception.msg = self.errmsg | ||
| 1179 | return self | ||
| 1180 | |||
| 1181 | def setResultsName( self, name, listAllMatches=False ): | ||
| 1182 | """ | ||
| 1183 | Define name for referencing matching tokens as a nested attribute | ||
| 1184 | of the returned parse results. | ||
| 1185 | NOTE: this returns a *copy* of the original C{ParserElement} object; | ||
| 1186 | this is so that the client can define a basic element, such as an | ||
| 1187 | integer, and reference it in multiple places with different names. | ||
| 1188 | |||
| 1189 | You can also set results names using the abbreviated syntax, | ||
| 1190 | C{expr("name")} in place of C{expr.setResultsName("name")} - | ||
| 1191 | see L{I{__call__}<__call__>}. | ||
| 1192 | |||
| 1193 | Example:: | ||
| 1194 | date_str = (integer.setResultsName("year") + '/' | ||
| 1195 | + integer.setResultsName("month") + '/' | ||
| 1196 | + integer.setResultsName("day")) | ||
| 1197 | |||
| 1198 | # equivalent form: | ||
| 1199 | date_str = integer("year") + '/' + integer("month") + '/' + integer("day") | ||
| 1200 | """ | ||
| 1201 | newself = self.copy() | ||
| 1202 | if name.endswith("*"): | ||
| 1203 | name = name[:-1] | ||
| 1204 | listAllMatches=True | ||
| 1205 | newself.resultsName = name | ||
| 1206 | newself.modalResults = not listAllMatches | ||
| 1207 | return newself | ||
| 1208 | |||
| 1209 | def setBreak(self,breakFlag = True): | ||
| 1210 | """Method to invoke the Python pdb debugger when this element is | ||
| 1211 | about to be parsed. Set C{breakFlag} to True to enable, False to | ||
| 1212 | disable. | ||
| 1213 | """ | ||
| 1214 | if breakFlag: | ||
| 1215 | _parseMethod = self._parse | ||
| 1216 | def breaker(instring, loc, doActions=True, callPreParse=True): | ||
| 1217 | import pdb | ||
| 1218 | pdb.set_trace() | ||
| 1219 | return _parseMethod( instring, loc, doActions, callPreParse ) | ||
| 1220 | breaker._originalParseMethod = _parseMethod | ||
| 1221 | self._parse = breaker | ||
| 1222 | else: | ||
| 1223 | if hasattr(self._parse,"_originalParseMethod"): | ||
| 1224 | self._parse = self._parse._originalParseMethod | ||
| 1225 | return self | ||
| 1226 | |||
| 1227 | def setParseAction( self, *fns, **kwargs ): | ||
| 1228 | """ | ||
| 1229 | Define one or more actions to perform when successfully matching parse element definition. | ||
| 1230 | Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, | ||
| 1231 | C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: | ||
| 1232 | - s = the original string being parsed (see note below) | ||
| 1233 | - loc = the location of the matching substring | ||
| 1234 | - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object | ||
| 1235 | If the functions in fns modify the tokens, they can return them as the return | ||
| 1236 | value from fn, and the modified list of tokens will replace the original. | ||
| 1237 | Otherwise, fn does not need to return any value. | ||
| 1238 | |||
| 1239 | Optional keyword arguments: | ||
| 1240 | - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing | ||
| 1241 | |||
| 1242 | Note: the default parsing behavior is to expand tabs in the input string | ||
| 1243 | before starting the parsing process. See L{I{parseString}<parseString>} for more information | ||
| 1244 | on parsing strings containing C{<TAB>}s, and suggested methods to maintain a | ||
| 1245 | consistent view of the parsed string, the parse location, and line and column | ||
| 1246 | positions within the parsed string. | ||
| 1247 | |||
| 1248 | Example:: | ||
| 1249 | integer = Word(nums) | ||
| 1250 | date_str = integer + '/' + integer + '/' + integer | ||
| 1251 | |||
| 1252 | date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] | ||
| 1253 | |||
| 1254 | # use parse action to convert to ints at parse time | ||
| 1255 | integer = Word(nums).setParseAction(lambda toks: int(toks[0])) | ||
| 1256 | date_str = integer + '/' + integer + '/' + integer | ||
| 1257 | |||
| 1258 | # note that integer fields are now ints, not strings | ||
| 1259 | date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] | ||
| 1260 | """ | ||
| 1261 | self.parseAction = list(map(_trim_arity, list(fns))) | ||
| 1262 | self.callDuringTry = kwargs.get("callDuringTry", False) | ||
| 1263 | return self | ||
| 1264 | |||
| 1265 | def addParseAction( self, *fns, **kwargs ): | ||
| 1266 | """ | ||
| 1267 | Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. | ||
| 1268 | |||
| 1269 | See examples in L{I{copy}<copy>}. | ||
| 1270 | """ | ||
| 1271 | self.parseAction += list(map(_trim_arity, list(fns))) | ||
| 1272 | self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) | ||
| 1273 | return self | ||
| 1274 | |||
| 1275 | def addCondition(self, *fns, **kwargs): | ||
| 1276 | """Add a boolean predicate function to expression's list of parse actions. See | ||
| 1277 | L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, | ||
| 1278 | functions passed to C{addCondition} need to return boolean success/fail of the condition. | ||
| 1279 | |||
| 1280 | Optional keyword arguments: | ||
| 1281 | - message = define a custom message to be used in the raised exception | ||
| 1282 | - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException | ||
| 1283 | |||
| 1284 | Example:: | ||
| 1285 | integer = Word(nums).setParseAction(lambda toks: int(toks[0])) | ||
| 1286 | year_int = integer.copy() | ||
| 1287 | year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") | ||
| 1288 | date_str = year_int + '/' + integer + '/' + integer | ||
| 1289 | |||
| 1290 | result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) | ||
| 1291 | """ | ||
| 1292 | msg = kwargs.get("message", "failed user-defined condition") | ||
| 1293 | exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException | ||
| 1294 | for fn in fns: | ||
| 1295 | def pa(s,l,t): | ||
| 1296 | if not bool(_trim_arity(fn)(s,l,t)): | ||
| 1297 | raise exc_type(s,l,msg) | ||
| 1298 | self.parseAction.append(pa) | ||
| 1299 | self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) | ||
| 1300 | return self | ||
| 1301 | |||
| 1302 | def setFailAction( self, fn ): | ||
| 1303 | """Define action to perform if parsing fails at this expression. | ||
| 1304 | Fail acton fn is a callable function that takes the arguments | ||
| 1305 | C{fn(s,loc,expr,err)} where: | ||
| 1306 | - s = string being parsed | ||
| 1307 | - loc = location where expression match was attempted and failed | ||
| 1308 | - expr = the parse expression that failed | ||
| 1309 | - err = the exception thrown | ||
| 1310 | The function returns no value. It may throw C{L{ParseFatalException}} | ||
| 1311 | if it is desired to stop parsing immediately.""" | ||
| 1312 | self.failAction = fn | ||
| 1313 | return self | ||
| 1314 | |||
| 1315 | def _skipIgnorables( self, instring, loc ): | ||
| 1316 | exprsFound = True | ||
| 1317 | while exprsFound: | ||
| 1318 | exprsFound = False | ||
| 1319 | for e in self.ignoreExprs: | ||
| 1320 | try: | ||
| 1321 | while 1: | ||
| 1322 | loc,dummy = e._parse( instring, loc ) | ||
| 1323 | exprsFound = True | ||
| 1324 | except ParseException: | ||
| 1325 | pass | ||
| 1326 | return loc | ||
| 1327 | |||
| 1328 | def preParse( self, instring, loc ): | ||
| 1329 | if self.ignoreExprs: | ||
| 1330 | loc = self._skipIgnorables( instring, loc ) | ||
| 1331 | |||
| 1332 | if self.skipWhitespace: | ||
| 1333 | wt = self.whiteChars | ||
| 1334 | instrlen = len(instring) | ||
| 1335 | while loc < instrlen and instring[loc] in wt: | ||
| 1336 | loc += 1 | ||
| 1337 | |||
| 1338 | return loc | ||
| 1339 | |||
| 1340 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 1341 | return loc, [] | ||
| 1342 | |||
| 1343 | def postParse( self, instring, loc, tokenlist ): | ||
| 1344 | return tokenlist | ||
| 1345 | |||
| 1346 | #~ @profile | ||
| 1347 | def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): | ||
| 1348 | debugging = ( self.debug ) #and doActions ) | ||
| 1349 | |||
| 1350 | if debugging or self.failAction: | ||
| 1351 | #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) | ||
| 1352 | if (self.debugActions[0] ): | ||
| 1353 | self.debugActions[0]( instring, loc, self ) | ||
| 1354 | if callPreParse and self.callPreparse: | ||
| 1355 | preloc = self.preParse( instring, loc ) | ||
| 1356 | else: | ||
| 1357 | preloc = loc | ||
| 1358 | tokensStart = preloc | ||
| 1359 | try: | ||
| 1360 | try: | ||
| 1361 | loc,tokens = self.parseImpl( instring, preloc, doActions ) | ||
| 1362 | except IndexError: | ||
| 1363 | raise ParseException( instring, len(instring), self.errmsg, self ) | ||
| 1364 | except ParseBaseException as err: | ||
| 1365 | #~ print ("Exception raised:", err) | ||
| 1366 | if self.debugActions[2]: | ||
| 1367 | self.debugActions[2]( instring, tokensStart, self, err ) | ||
| 1368 | if self.failAction: | ||
| 1369 | self.failAction( instring, tokensStart, self, err ) | ||
| 1370 | raise | ||
| 1371 | else: | ||
| 1372 | if callPreParse and self.callPreparse: | ||
| 1373 | preloc = self.preParse( instring, loc ) | ||
| 1374 | else: | ||
| 1375 | preloc = loc | ||
| 1376 | tokensStart = preloc | ||
| 1377 | if self.mayIndexError or loc >= len(instring): | ||
| 1378 | try: | ||
| 1379 | loc,tokens = self.parseImpl( instring, preloc, doActions ) | ||
| 1380 | except IndexError: | ||
| 1381 | raise ParseException( instring, len(instring), self.errmsg, self ) | ||
| 1382 | else: | ||
| 1383 | loc,tokens = self.parseImpl( instring, preloc, doActions ) | ||
| 1384 | |||
| 1385 | tokens = self.postParse( instring, loc, tokens ) | ||
| 1386 | |||
| 1387 | retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) | ||
| 1388 | if self.parseAction and (doActions or self.callDuringTry): | ||
| 1389 | if debugging: | ||
| 1390 | try: | ||
| 1391 | for fn in self.parseAction: | ||
| 1392 | tokens = fn( instring, tokensStart, retTokens ) | ||
| 1393 | if tokens is not None: | ||
| 1394 | retTokens = ParseResults( tokens, | ||
| 1395 | self.resultsName, | ||
| 1396 | asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), | ||
| 1397 | modal=self.modalResults ) | ||
| 1398 | except ParseBaseException as err: | ||
| 1399 | #~ print "Exception raised in user parse action:", err | ||
| 1400 | if (self.debugActions[2] ): | ||
| 1401 | self.debugActions[2]( instring, tokensStart, self, err ) | ||
| 1402 | raise | ||
| 1403 | else: | ||
| 1404 | for fn in self.parseAction: | ||
| 1405 | tokens = fn( instring, tokensStart, retTokens ) | ||
| 1406 | if tokens is not None: | ||
| 1407 | retTokens = ParseResults( tokens, | ||
| 1408 | self.resultsName, | ||
| 1409 | asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), | ||
| 1410 | modal=self.modalResults ) | ||
| 1411 | |||
| 1412 | if debugging: | ||
| 1413 | #~ print ("Matched",self,"->",retTokens.asList()) | ||
| 1414 | if (self.debugActions[1] ): | ||
| 1415 | self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) | ||
| 1416 | |||
| 1417 | return loc, retTokens | ||
| 1418 | |||
| 1419 | def tryParse( self, instring, loc ): | ||
| 1420 | try: | ||
| 1421 | return self._parse( instring, loc, doActions=False )[0] | ||
| 1422 | except ParseFatalException: | ||
| 1423 | raise ParseException( instring, loc, self.errmsg, self) | ||
| 1424 | |||
| 1425 | def canParseNext(self, instring, loc): | ||
| 1426 | try: | ||
| 1427 | self.tryParse(instring, loc) | ||
| 1428 | except (ParseException, IndexError): | ||
| 1429 | return False | ||
| 1430 | else: | ||
| 1431 | return True | ||
| 1432 | |||
| 1433 | class _UnboundedCache(object): | ||
| 1434 | def __init__(self): | ||
| 1435 | cache = {} | ||
| 1436 | self.not_in_cache = not_in_cache = object() | ||
| 1437 | |||
| 1438 | def get(self, key): | ||
| 1439 | return cache.get(key, not_in_cache) | ||
| 1440 | |||
| 1441 | def set(self, key, value): | ||
| 1442 | cache[key] = value | ||
| 1443 | |||
| 1444 | def clear(self): | ||
| 1445 | cache.clear() | ||
| 1446 | |||
| 1447 | def cache_len(self): | ||
| 1448 | return len(cache) | ||
| 1449 | |||
| 1450 | self.get = types.MethodType(get, self) | ||
| 1451 | self.set = types.MethodType(set, self) | ||
| 1452 | self.clear = types.MethodType(clear, self) | ||
| 1453 | self.__len__ = types.MethodType(cache_len, self) | ||
| 1454 | |||
| 1455 | if _OrderedDict is not None: | ||
| 1456 | class _FifoCache(object): | ||
| 1457 | def __init__(self, size): | ||
| 1458 | self.not_in_cache = not_in_cache = object() | ||
| 1459 | |||
| 1460 | cache = _OrderedDict() | ||
| 1461 | |||
| 1462 | def get(self, key): | ||
| 1463 | return cache.get(key, not_in_cache) | ||
| 1464 | |||
| 1465 | def set(self, key, value): | ||
| 1466 | cache[key] = value | ||
| 1467 | while len(cache) > size: | ||
| 1468 | try: | ||
| 1469 | cache.popitem(False) | ||
| 1470 | except KeyError: | ||
| 1471 | pass | ||
| 1472 | |||
| 1473 | def clear(self): | ||
| 1474 | cache.clear() | ||
| 1475 | |||
| 1476 | def cache_len(self): | ||
| 1477 | return len(cache) | ||
| 1478 | |||
| 1479 | self.get = types.MethodType(get, self) | ||
| 1480 | self.set = types.MethodType(set, self) | ||
| 1481 | self.clear = types.MethodType(clear, self) | ||
| 1482 | self.__len__ = types.MethodType(cache_len, self) | ||
| 1483 | |||
| 1484 | else: | ||
| 1485 | class _FifoCache(object): | ||
| 1486 | def __init__(self, size): | ||
| 1487 | self.not_in_cache = not_in_cache = object() | ||
| 1488 | |||
| 1489 | cache = {} | ||
| 1490 | key_fifo = collections.deque([], size) | ||
| 1491 | |||
| 1492 | def get(self, key): | ||
| 1493 | return cache.get(key, not_in_cache) | ||
| 1494 | |||
| 1495 | def set(self, key, value): | ||
| 1496 | cache[key] = value | ||
| 1497 | while len(key_fifo) > size: | ||
| 1498 | cache.pop(key_fifo.popleft(), None) | ||
| 1499 | key_fifo.append(key) | ||
| 1500 | |||
| 1501 | def clear(self): | ||
| 1502 | cache.clear() | ||
| 1503 | key_fifo.clear() | ||
| 1504 | |||
| 1505 | def cache_len(self): | ||
| 1506 | return len(cache) | ||
| 1507 | |||
| 1508 | self.get = types.MethodType(get, self) | ||
| 1509 | self.set = types.MethodType(set, self) | ||
| 1510 | self.clear = types.MethodType(clear, self) | ||
| 1511 | self.__len__ = types.MethodType(cache_len, self) | ||
| 1512 | |||
| 1513 | # argument cache for optimizing repeated calls when backtracking through recursive expressions | ||
| 1514 | packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail | ||
| 1515 | packrat_cache_lock = RLock() | ||
| 1516 | packrat_cache_stats = [0, 0] | ||
| 1517 | |||
| 1518 | # this method gets repeatedly called during backtracking with the same arguments - | ||
| 1519 | # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression | ||
| 1520 | def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): | ||
| 1521 | HIT, MISS = 0, 1 | ||
| 1522 | lookup = (self, instring, loc, callPreParse, doActions) | ||
| 1523 | with ParserElement.packrat_cache_lock: | ||
| 1524 | cache = ParserElement.packrat_cache | ||
| 1525 | value = cache.get(lookup) | ||
| 1526 | if value is cache.not_in_cache: | ||
| 1527 | ParserElement.packrat_cache_stats[MISS] += 1 | ||
| 1528 | try: | ||
| 1529 | value = self._parseNoCache(instring, loc, doActions, callPreParse) | ||
| 1530 | except ParseBaseException as pe: | ||
| 1531 | # cache a copy of the exception, without the traceback | ||
| 1532 | cache.set(lookup, pe.__class__(*pe.args)) | ||
| 1533 | raise | ||
| 1534 | else: | ||
| 1535 | cache.set(lookup, (value[0], value[1].copy())) | ||
| 1536 | return value | ||
| 1537 | else: | ||
| 1538 | ParserElement.packrat_cache_stats[HIT] += 1 | ||
| 1539 | if isinstance(value, Exception): | ||
| 1540 | raise value | ||
| 1541 | return (value[0], value[1].copy()) | ||
| 1542 | |||
| 1543 | _parse = _parseNoCache | ||
| 1544 | |||
| 1545 | @staticmethod | ||
| 1546 | def resetCache(): | ||
| 1547 | ParserElement.packrat_cache.clear() | ||
| 1548 | ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) | ||
| 1549 | |||
| 1550 | _packratEnabled = False | ||
| 1551 | @staticmethod | ||
| 1552 | def enablePackrat(cache_size_limit=128): | ||
| 1553 | """Enables "packrat" parsing, which adds memoizing to the parsing logic. | ||
| 1554 | Repeated parse attempts at the same string location (which happens | ||
| 1555 | often in many complex grammars) can immediately return a cached value, | ||
| 1556 | instead of re-executing parsing/validating code. Memoizing is done of | ||
| 1557 | both valid results and parsing exceptions. | ||
| 1558 | |||
| 1559 | Parameters: | ||
| 1560 | - cache_size_limit - (default=C{128}) - if an integer value is provided | ||
| 1561 | will limit the size of the packrat cache; if None is passed, then | ||
| 1562 | the cache size will be unbounded; if 0 is passed, the cache will | ||
| 1563 | be effectively disabled. | ||
| 1564 | |||
| 1565 | This speedup may break existing programs that use parse actions that | ||
| 1566 | have side-effects. For this reason, packrat parsing is disabled when | ||
| 1567 | you first import pyparsing. To activate the packrat feature, your | ||
| 1568 | program must call the class method C{ParserElement.enablePackrat()}. If | ||
| 1569 | your program uses C{psyco} to "compile as you go", you must call | ||
| 1570 | C{enablePackrat} before calling C{psyco.full()}. If you do not do this, | ||
| 1571 | Python will crash. For best results, call C{enablePackrat()} immediately | ||
| 1572 | after importing pyparsing. | ||
| 1573 | |||
| 1574 | Example:: | ||
| 1575 | from pip._vendor import pyparsing | ||
| 1576 | pyparsing.ParserElement.enablePackrat() | ||
| 1577 | """ | ||
| 1578 | if not ParserElement._packratEnabled: | ||
| 1579 | ParserElement._packratEnabled = True | ||
| 1580 | if cache_size_limit is None: | ||
| 1581 | ParserElement.packrat_cache = ParserElement._UnboundedCache() | ||
| 1582 | else: | ||
| 1583 | ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) | ||
| 1584 | ParserElement._parse = ParserElement._parseCache | ||
| 1585 | |||
| 1586 | def parseString( self, instring, parseAll=False ): | ||
| 1587 | """ | ||
| 1588 | Execute the parse expression with the given string. | ||
| 1589 | This is the main interface to the client code, once the complete | ||
| 1590 | expression has been built. | ||
| 1591 | |||
| 1592 | If you want the grammar to require that the entire input string be | ||
| 1593 | successfully parsed, then set C{parseAll} to True (equivalent to ending | ||
| 1594 | the grammar with C{L{StringEnd()}}). | ||
| 1595 | |||
| 1596 | Note: C{parseString} implicitly calls C{expandtabs()} on the input string, | ||
| 1597 | in order to report proper column numbers in parse actions. | ||
| 1598 | If the input string contains tabs and | ||
| 1599 | the grammar uses parse actions that use the C{loc} argument to index into the | ||
| 1600 | string being parsed, you can ensure you have a consistent view of the input | ||
| 1601 | string by: | ||
| 1602 | - calling C{parseWithTabs} on your grammar before calling C{parseString} | ||
| 1603 | (see L{I{parseWithTabs}<parseWithTabs>}) | ||
| 1604 | - define your parse action using the full C{(s,loc,toks)} signature, and | ||
| 1605 | reference the input string using the parse action's C{s} argument | ||
| 1606 | - explictly expand the tabs in your input string before calling | ||
| 1607 | C{parseString} | ||
| 1608 | |||
| 1609 | Example:: | ||
| 1610 | Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] | ||
| 1611 | Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text | ||
| 1612 | """ | ||
| 1613 | ParserElement.resetCache() | ||
| 1614 | if not self.streamlined: | ||
| 1615 | self.streamline() | ||
| 1616 | #~ self.saveAsList = True | ||
| 1617 | for e in self.ignoreExprs: | ||
| 1618 | e.streamline() | ||
| 1619 | if not self.keepTabs: | ||
| 1620 | instring = instring.expandtabs() | ||
| 1621 | try: | ||
| 1622 | loc, tokens = self._parse( instring, 0 ) | ||
| 1623 | if parseAll: | ||
| 1624 | loc = self.preParse( instring, loc ) | ||
| 1625 | se = Empty() + StringEnd() | ||
| 1626 | se._parse( instring, loc ) | ||
| 1627 | except ParseBaseException as exc: | ||
| 1628 | if ParserElement.verbose_stacktrace: | ||
| 1629 | raise | ||
| 1630 | else: | ||
| 1631 | # catch and re-raise exception from here, clears out pyparsing internal stack trace | ||
| 1632 | raise exc | ||
| 1633 | else: | ||
| 1634 | return tokens | ||
| 1635 | |||
| 1636 | def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): | ||
| 1637 | """ | ||
| 1638 | Scan the input string for expression matches. Each match will return the | ||
| 1639 | matching tokens, start location, and end location. May be called with optional | ||
| 1640 | C{maxMatches} argument, to clip scanning after 'n' matches are found. If | ||
| 1641 | C{overlap} is specified, then overlapping matches will be reported. | ||
| 1642 | |||
| 1643 | Note that the start and end locations are reported relative to the string | ||
| 1644 | being parsed. See L{I{parseString}<parseString>} for more information on parsing | ||
| 1645 | strings with embedded tabs. | ||
| 1646 | |||
| 1647 | Example:: | ||
| 1648 | source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" | ||
| 1649 | print(source) | ||
| 1650 | for tokens,start,end in Word(alphas).scanString(source): | ||
| 1651 | print(' '*start + '^'*(end-start)) | ||
| 1652 | print(' '*start + tokens[0]) | ||
| 1653 | |||
| 1654 | prints:: | ||
| 1655 | |||
| 1656 | sldjf123lsdjjkf345sldkjf879lkjsfd987 | ||
| 1657 | ^^^^^ | ||
| 1658 | sldjf | ||
| 1659 | ^^^^^^^ | ||
| 1660 | lsdjjkf | ||
| 1661 | ^^^^^^ | ||
| 1662 | sldkjf | ||
| 1663 | ^^^^^^ | ||
| 1664 | lkjsfd | ||
| 1665 | """ | ||
| 1666 | if not self.streamlined: | ||
| 1667 | self.streamline() | ||
| 1668 | for e in self.ignoreExprs: | ||
| 1669 | e.streamline() | ||
| 1670 | |||
| 1671 | if not self.keepTabs: | ||
| 1672 | instring = _ustr(instring).expandtabs() | ||
| 1673 | instrlen = len(instring) | ||
| 1674 | loc = 0 | ||
| 1675 | preparseFn = self.preParse | ||
| 1676 | parseFn = self._parse | ||
| 1677 | ParserElement.resetCache() | ||
| 1678 | matches = 0 | ||
| 1679 | try: | ||
| 1680 | while loc <= instrlen and matches < maxMatches: | ||
| 1681 | try: | ||
| 1682 | preloc = preparseFn( instring, loc ) | ||
| 1683 | nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) | ||
| 1684 | except ParseException: | ||
| 1685 | loc = preloc+1 | ||
| 1686 | else: | ||
| 1687 | if nextLoc > loc: | ||
| 1688 | matches += 1 | ||
| 1689 | yield tokens, preloc, nextLoc | ||
| 1690 | if overlap: | ||
| 1691 | nextloc = preparseFn( instring, loc ) | ||
| 1692 | if nextloc > loc: | ||
| 1693 | loc = nextLoc | ||
| 1694 | else: | ||
| 1695 | loc += 1 | ||
| 1696 | else: | ||
| 1697 | loc = nextLoc | ||
| 1698 | else: | ||
| 1699 | loc = preloc+1 | ||
| 1700 | except ParseBaseException as exc: | ||
| 1701 | if ParserElement.verbose_stacktrace: | ||
| 1702 | raise | ||
| 1703 | else: | ||
| 1704 | # catch and re-raise exception from here, clears out pyparsing internal stack trace | ||
| 1705 | raise exc | ||
| 1706 | |||
| 1707 | def transformString( self, instring ): | ||
| 1708 | """ | ||
| 1709 | Extension to C{L{scanString}}, to modify matching text with modified tokens that may | ||
| 1710 | be returned from a parse action. To use C{transformString}, define a grammar and | ||
| 1711 | attach a parse action to it that modifies the returned token list. | ||
| 1712 | Invoking C{transformString()} on a target string will then scan for matches, | ||
| 1713 | and replace the matched text patterns according to the logic in the parse | ||
| 1714 | action. C{transformString()} returns the resulting transformed string. | ||
| 1715 | |||
| 1716 | Example:: | ||
| 1717 | wd = Word(alphas) | ||
| 1718 | wd.setParseAction(lambda toks: toks[0].title()) | ||
| 1719 | |||
| 1720 | print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) | ||
| 1721 | Prints:: | ||
| 1722 | Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. | ||
| 1723 | """ | ||
| 1724 | out = [] | ||
| 1725 | lastE = 0 | ||
| 1726 | # force preservation of <TAB>s, to minimize unwanted transformation of string, and to | ||
| 1727 | # keep string locs straight between transformString and scanString | ||
| 1728 | self.keepTabs = True | ||
| 1729 | try: | ||
| 1730 | for t,s,e in self.scanString( instring ): | ||
| 1731 | out.append( instring[lastE:s] ) | ||
| 1732 | if t: | ||
| 1733 | if isinstance(t,ParseResults): | ||
| 1734 | out += t.asList() | ||
| 1735 | elif isinstance(t,list): | ||
| 1736 | out += t | ||
| 1737 | else: | ||
| 1738 | out.append(t) | ||
| 1739 | lastE = e | ||
| 1740 | out.append(instring[lastE:]) | ||
| 1741 | out = [o for o in out if o] | ||
| 1742 | return "".join(map(_ustr,_flatten(out))) | ||
| 1743 | except ParseBaseException as exc: | ||
| 1744 | if ParserElement.verbose_stacktrace: | ||
| 1745 | raise | ||
| 1746 | else: | ||
| 1747 | # catch and re-raise exception from here, clears out pyparsing internal stack trace | ||
| 1748 | raise exc | ||
| 1749 | |||
| 1750 | def searchString( self, instring, maxMatches=_MAX_INT ): | ||
| 1751 | """ | ||
| 1752 | Another extension to C{L{scanString}}, simplifying the access to the tokens found | ||
| 1753 | to match the given parse expression. May be called with optional | ||
| 1754 | C{maxMatches} argument, to clip searching after 'n' matches are found. | ||
| 1755 | |||
| 1756 | Example:: | ||
| 1757 | # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters | ||
| 1758 | cap_word = Word(alphas.upper(), alphas.lower()) | ||
| 1759 | |||
| 1760 | print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) | ||
| 1761 | |||
| 1762 | # the sum() builtin can be used to merge results into a single ParseResults object | ||
| 1763 | print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) | ||
| 1764 | prints:: | ||
| 1765 | [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] | ||
| 1766 | ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] | ||
| 1767 | """ | ||
| 1768 | try: | ||
| 1769 | return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) | ||
| 1770 | except ParseBaseException as exc: | ||
| 1771 | if ParserElement.verbose_stacktrace: | ||
| 1772 | raise | ||
| 1773 | else: | ||
| 1774 | # catch and re-raise exception from here, clears out pyparsing internal stack trace | ||
| 1775 | raise exc | ||
| 1776 | |||
| 1777 | def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): | ||
| 1778 | """ | ||
| 1779 | Generator method to split a string using the given expression as a separator. | ||
| 1780 | May be called with optional C{maxsplit} argument, to limit the number of splits; | ||
| 1781 | and the optional C{includeSeparators} argument (default=C{False}), if the separating | ||
| 1782 | matching text should be included in the split results. | ||
| 1783 | |||
| 1784 | Example:: | ||
| 1785 | punc = oneOf(list(".,;:/-!?")) | ||
| 1786 | print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) | ||
| 1787 | prints:: | ||
| 1788 | ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] | ||
| 1789 | """ | ||
| 1790 | splits = 0 | ||
| 1791 | last = 0 | ||
| 1792 | for t,s,e in self.scanString(instring, maxMatches=maxsplit): | ||
| 1793 | yield instring[last:s] | ||
| 1794 | if includeSeparators: | ||
| 1795 | yield t[0] | ||
| 1796 | last = e | ||
| 1797 | yield instring[last:] | ||
| 1798 | |||
| 1799 | def __add__(self, other ): | ||
| 1800 | """ | ||
| 1801 | Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement | ||
| 1802 | converts them to L{Literal}s by default. | ||
| 1803 | |||
| 1804 | Example:: | ||
| 1805 | greet = Word(alphas) + "," + Word(alphas) + "!" | ||
| 1806 | hello = "Hello, World!" | ||
| 1807 | print (hello, "->", greet.parseString(hello)) | ||
| 1808 | Prints:: | ||
| 1809 | Hello, World! -> ['Hello', ',', 'World', '!'] | ||
| 1810 | """ | ||
| 1811 | if isinstance( other, basestring ): | ||
| 1812 | other = ParserElement._literalStringClass( other ) | ||
| 1813 | if not isinstance( other, ParserElement ): | ||
| 1814 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1815 | SyntaxWarning, stacklevel=2) | ||
| 1816 | return None | ||
| 1817 | return And( [ self, other ] ) | ||
| 1818 | |||
| 1819 | def __radd__(self, other ): | ||
| 1820 | """ | ||
| 1821 | Implementation of + operator when left operand is not a C{L{ParserElement}} | ||
| 1822 | """ | ||
| 1823 | if isinstance( other, basestring ): | ||
| 1824 | other = ParserElement._literalStringClass( other ) | ||
| 1825 | if not isinstance( other, ParserElement ): | ||
| 1826 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1827 | SyntaxWarning, stacklevel=2) | ||
| 1828 | return None | ||
| 1829 | return other + self | ||
| 1830 | |||
| 1831 | def __sub__(self, other): | ||
| 1832 | """ | ||
| 1833 | Implementation of - operator, returns C{L{And}} with error stop | ||
| 1834 | """ | ||
| 1835 | if isinstance( other, basestring ): | ||
| 1836 | other = ParserElement._literalStringClass( other ) | ||
| 1837 | if not isinstance( other, ParserElement ): | ||
| 1838 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1839 | SyntaxWarning, stacklevel=2) | ||
| 1840 | return None | ||
| 1841 | return self + And._ErrorStop() + other | ||
| 1842 | |||
| 1843 | def __rsub__(self, other ): | ||
| 1844 | """ | ||
| 1845 | Implementation of - operator when left operand is not a C{L{ParserElement}} | ||
| 1846 | """ | ||
| 1847 | if isinstance( other, basestring ): | ||
| 1848 | other = ParserElement._literalStringClass( other ) | ||
| 1849 | if not isinstance( other, ParserElement ): | ||
| 1850 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1851 | SyntaxWarning, stacklevel=2) | ||
| 1852 | return None | ||
| 1853 | return other - self | ||
| 1854 | |||
| 1855 | def __mul__(self,other): | ||
| 1856 | """ | ||
| 1857 | Implementation of * operator, allows use of C{expr * 3} in place of | ||
| 1858 | C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer | ||
| 1859 | tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples | ||
| 1860 | may also include C{None} as in: | ||
| 1861 | - C{expr*(n,None)} or C{expr*(n,)} is equivalent | ||
| 1862 | to C{expr*n + L{ZeroOrMore}(expr)} | ||
| 1863 | (read as "at least n instances of C{expr}") | ||
| 1864 | - C{expr*(None,n)} is equivalent to C{expr*(0,n)} | ||
| 1865 | (read as "0 to n instances of C{expr}") | ||
| 1866 | - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} | ||
| 1867 | - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} | ||
| 1868 | |||
| 1869 | Note that C{expr*(None,n)} does not raise an exception if | ||
| 1870 | more than n exprs exist in the input stream; that is, | ||
| 1871 | C{expr*(None,n)} does not enforce a maximum number of expr | ||
| 1872 | occurrences. If this behavior is desired, then write | ||
| 1873 | C{expr*(None,n) + ~expr} | ||
| 1874 | """ | ||
| 1875 | if isinstance(other,int): | ||
| 1876 | minElements, optElements = other,0 | ||
| 1877 | elif isinstance(other,tuple): | ||
| 1878 | other = (other + (None, None))[:2] | ||
| 1879 | if other[0] is None: | ||
| 1880 | other = (0, other[1]) | ||
| 1881 | if isinstance(other[0],int) and other[1] is None: | ||
| 1882 | if other[0] == 0: | ||
| 1883 | return ZeroOrMore(self) | ||
| 1884 | if other[0] == 1: | ||
| 1885 | return OneOrMore(self) | ||
| 1886 | else: | ||
| 1887 | return self*other[0] + ZeroOrMore(self) | ||
| 1888 | elif isinstance(other[0],int) and isinstance(other[1],int): | ||
| 1889 | minElements, optElements = other | ||
| 1890 | optElements -= minElements | ||
| 1891 | else: | ||
| 1892 | raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) | ||
| 1893 | else: | ||
| 1894 | raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) | ||
| 1895 | |||
| 1896 | if minElements < 0: | ||
| 1897 | raise ValueError("cannot multiply ParserElement by negative value") | ||
| 1898 | if optElements < 0: | ||
| 1899 | raise ValueError("second tuple value must be greater or equal to first tuple value") | ||
| 1900 | if minElements == optElements == 0: | ||
| 1901 | raise ValueError("cannot multiply ParserElement by 0 or (0,0)") | ||
| 1902 | |||
| 1903 | if (optElements): | ||
| 1904 | def makeOptionalList(n): | ||
| 1905 | if n>1: | ||
| 1906 | return Optional(self + makeOptionalList(n-1)) | ||
| 1907 | else: | ||
| 1908 | return Optional(self) | ||
| 1909 | if minElements: | ||
| 1910 | if minElements == 1: | ||
| 1911 | ret = self + makeOptionalList(optElements) | ||
| 1912 | else: | ||
| 1913 | ret = And([self]*minElements) + makeOptionalList(optElements) | ||
| 1914 | else: | ||
| 1915 | ret = makeOptionalList(optElements) | ||
| 1916 | else: | ||
| 1917 | if minElements == 1: | ||
| 1918 | ret = self | ||
| 1919 | else: | ||
| 1920 | ret = And([self]*minElements) | ||
| 1921 | return ret | ||
| 1922 | |||
| 1923 | def __rmul__(self, other): | ||
| 1924 | return self.__mul__(other) | ||
| 1925 | |||
| 1926 | def __or__(self, other ): | ||
| 1927 | """ | ||
| 1928 | Implementation of | operator - returns C{L{MatchFirst}} | ||
| 1929 | """ | ||
| 1930 | if isinstance( other, basestring ): | ||
| 1931 | other = ParserElement._literalStringClass( other ) | ||
| 1932 | if not isinstance( other, ParserElement ): | ||
| 1933 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1934 | SyntaxWarning, stacklevel=2) | ||
| 1935 | return None | ||
| 1936 | return MatchFirst( [ self, other ] ) | ||
| 1937 | |||
| 1938 | def __ror__(self, other ): | ||
| 1939 | """ | ||
| 1940 | Implementation of | operator when left operand is not a C{L{ParserElement}} | ||
| 1941 | """ | ||
| 1942 | if isinstance( other, basestring ): | ||
| 1943 | other = ParserElement._literalStringClass( other ) | ||
| 1944 | if not isinstance( other, ParserElement ): | ||
| 1945 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1946 | SyntaxWarning, stacklevel=2) | ||
| 1947 | return None | ||
| 1948 | return other | self | ||
| 1949 | |||
| 1950 | def __xor__(self, other ): | ||
| 1951 | """ | ||
| 1952 | Implementation of ^ operator - returns C{L{Or}} | ||
| 1953 | """ | ||
| 1954 | if isinstance( other, basestring ): | ||
| 1955 | other = ParserElement._literalStringClass( other ) | ||
| 1956 | if not isinstance( other, ParserElement ): | ||
| 1957 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1958 | SyntaxWarning, stacklevel=2) | ||
| 1959 | return None | ||
| 1960 | return Or( [ self, other ] ) | ||
| 1961 | |||
| 1962 | def __rxor__(self, other ): | ||
| 1963 | """ | ||
| 1964 | Implementation of ^ operator when left operand is not a C{L{ParserElement}} | ||
| 1965 | """ | ||
| 1966 | if isinstance( other, basestring ): | ||
| 1967 | other = ParserElement._literalStringClass( other ) | ||
| 1968 | if not isinstance( other, ParserElement ): | ||
| 1969 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1970 | SyntaxWarning, stacklevel=2) | ||
| 1971 | return None | ||
| 1972 | return other ^ self | ||
| 1973 | |||
| 1974 | def __and__(self, other ): | ||
| 1975 | """ | ||
| 1976 | Implementation of & operator - returns C{L{Each}} | ||
| 1977 | """ | ||
| 1978 | if isinstance( other, basestring ): | ||
| 1979 | other = ParserElement._literalStringClass( other ) | ||
| 1980 | if not isinstance( other, ParserElement ): | ||
| 1981 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1982 | SyntaxWarning, stacklevel=2) | ||
| 1983 | return None | ||
| 1984 | return Each( [ self, other ] ) | ||
| 1985 | |||
| 1986 | def __rand__(self, other ): | ||
| 1987 | """ | ||
| 1988 | Implementation of & operator when left operand is not a C{L{ParserElement}} | ||
| 1989 | """ | ||
| 1990 | if isinstance( other, basestring ): | ||
| 1991 | other = ParserElement._literalStringClass( other ) | ||
| 1992 | if not isinstance( other, ParserElement ): | ||
| 1993 | warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), | ||
| 1994 | SyntaxWarning, stacklevel=2) | ||
| 1995 | return None | ||
| 1996 | return other & self | ||
| 1997 | |||
| 1998 | def __invert__( self ): | ||
| 1999 | """ | ||
| 2000 | Implementation of ~ operator - returns C{L{NotAny}} | ||
| 2001 | """ | ||
| 2002 | return NotAny( self ) | ||
| 2003 | |||
| 2004 | def __call__(self, name=None): | ||
| 2005 | """ | ||
| 2006 | Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. | ||
| 2007 | |||
| 2008 | If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be | ||
| 2009 | passed as C{True}. | ||
| 2010 | |||
| 2011 | If C{name} is omitted, same as calling C{L{copy}}. | ||
| 2012 | |||
| 2013 | Example:: | ||
| 2014 | # these are equivalent | ||
| 2015 | userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") | ||
| 2016 | userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") | ||
| 2017 | """ | ||
| 2018 | if name is not None: | ||
| 2019 | return self.setResultsName(name) | ||
| 2020 | else: | ||
| 2021 | return self.copy() | ||
| 2022 | |||
| 2023 | def suppress( self ): | ||
| 2024 | """ | ||
| 2025 | Suppresses the output of this C{ParserElement}; useful to keep punctuation from | ||
| 2026 | cluttering up returned output. | ||
| 2027 | """ | ||
| 2028 | return Suppress( self ) | ||
| 2029 | |||
| 2030 | def leaveWhitespace( self ): | ||
| 2031 | """ | ||
| 2032 | Disables the skipping of whitespace before matching the characters in the | ||
| 2033 | C{ParserElement}'s defined pattern. This is normally only used internally by | ||
| 2034 | the pyparsing module, but may be needed in some whitespace-sensitive grammars. | ||
| 2035 | """ | ||
| 2036 | self.skipWhitespace = False | ||
| 2037 | return self | ||
| 2038 | |||
| 2039 | def setWhitespaceChars( self, chars ): | ||
| 2040 | """ | ||
| 2041 | Overrides the default whitespace chars | ||
| 2042 | """ | ||
| 2043 | self.skipWhitespace = True | ||
| 2044 | self.whiteChars = chars | ||
| 2045 | self.copyDefaultWhiteChars = False | ||
| 2046 | return self | ||
| 2047 | |||
| 2048 | def parseWithTabs( self ): | ||
| 2049 | """ | ||
| 2050 | Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. | ||
| 2051 | Must be called before C{parseString} when the input grammar contains elements that | ||
| 2052 | match C{<TAB>} characters. | ||
| 2053 | """ | ||
| 2054 | self.keepTabs = True | ||
| 2055 | return self | ||
| 2056 | |||
| 2057 | def ignore( self, other ): | ||
| 2058 | """ | ||
| 2059 | Define expression to be ignored (e.g., comments) while doing pattern | ||
| 2060 | matching; may be called repeatedly, to define multiple comment or other | ||
| 2061 | ignorable patterns. | ||
| 2062 | |||
| 2063 | Example:: | ||
| 2064 | patt = OneOrMore(Word(alphas)) | ||
| 2065 | patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] | ||
| 2066 | |||
| 2067 | patt.ignore(cStyleComment) | ||
| 2068 | patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] | ||
| 2069 | """ | ||
| 2070 | if isinstance(other, basestring): | ||
| 2071 | other = Suppress(other) | ||
| 2072 | |||
| 2073 | if isinstance( other, Suppress ): | ||
| 2074 | if other not in self.ignoreExprs: | ||
| 2075 | self.ignoreExprs.append(other) | ||
| 2076 | else: | ||
| 2077 | self.ignoreExprs.append( Suppress( other.copy() ) ) | ||
| 2078 | return self | ||
| 2079 | |||
| 2080 | def setDebugActions( self, startAction, successAction, exceptionAction ): | ||
| 2081 | """ | ||
| 2082 | Enable display of debugging messages while doing pattern matching. | ||
| 2083 | """ | ||
| 2084 | self.debugActions = (startAction or _defaultStartDebugAction, | ||
| 2085 | successAction or _defaultSuccessDebugAction, | ||
| 2086 | exceptionAction or _defaultExceptionDebugAction) | ||
| 2087 | self.debug = True | ||
| 2088 | return self | ||
| 2089 | |||
| 2090 | def setDebug( self, flag=True ): | ||
| 2091 | """ | ||
| 2092 | Enable display of debugging messages while doing pattern matching. | ||
| 2093 | Set C{flag} to True to enable, False to disable. | ||
| 2094 | |||
| 2095 | Example:: | ||
| 2096 | wd = Word(alphas).setName("alphaword") | ||
| 2097 | integer = Word(nums).setName("numword") | ||
| 2098 | term = wd | integer | ||
| 2099 | |||
| 2100 | # turn on debugging for wd | ||
| 2101 | wd.setDebug() | ||
| 2102 | |||
| 2103 | OneOrMore(term).parseString("abc 123 xyz 890") | ||
| 2104 | |||
| 2105 | prints:: | ||
| 2106 | Match alphaword at loc 0(1,1) | ||
| 2107 | Matched alphaword -> ['abc'] | ||
| 2108 | Match alphaword at loc 3(1,4) | ||
| 2109 | Exception raised:Expected alphaword (at char 4), (line:1, col:5) | ||
| 2110 | Match alphaword at loc 7(1,8) | ||
| 2111 | Matched alphaword -> ['xyz'] | ||
| 2112 | Match alphaword at loc 11(1,12) | ||
| 2113 | Exception raised:Expected alphaword (at char 12), (line:1, col:13) | ||
| 2114 | Match alphaword at loc 15(1,16) | ||
| 2115 | Exception raised:Expected alphaword (at char 15), (line:1, col:16) | ||
| 2116 | |||
| 2117 | The output shown is that produced by the default debug actions - custom debug actions can be | ||
| 2118 | specified using L{setDebugActions}. Prior to attempting | ||
| 2119 | to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} | ||
| 2120 | is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} | ||
| 2121 | message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, | ||
| 2122 | which makes debugging and exception messages easier to understand - for instance, the default | ||
| 2123 | name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. | ||
| 2124 | """ | ||
| 2125 | if flag: | ||
| 2126 | self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) | ||
| 2127 | else: | ||
| 2128 | self.debug = False | ||
| 2129 | return self | ||
| 2130 | |||
| 2131 | def __str__( self ): | ||
| 2132 | return self.name | ||
| 2133 | |||
| 2134 | def __repr__( self ): | ||
| 2135 | return _ustr(self) | ||
| 2136 | |||
| 2137 | def streamline( self ): | ||
| 2138 | self.streamlined = True | ||
| 2139 | self.strRepr = None | ||
| 2140 | return self | ||
| 2141 | |||
| 2142 | def checkRecursion( self, parseElementList ): | ||
| 2143 | pass | ||
| 2144 | |||
| 2145 | def validate( self, validateTrace=[] ): | ||
| 2146 | """ | ||
| 2147 | Check defined expressions for valid structure, check for infinite recursive definitions. | ||
| 2148 | """ | ||
| 2149 | self.checkRecursion( [] ) | ||
| 2150 | |||
| 2151 | def parseFile( self, file_or_filename, parseAll=False ): | ||
| 2152 | """ | ||
| 2153 | Execute the parse expression on the given file or filename. | ||
| 2154 | If a filename is specified (instead of a file object), | ||
| 2155 | the entire file is opened, read, and closed before parsing. | ||
| 2156 | """ | ||
| 2157 | try: | ||
| 2158 | file_contents = file_or_filename.read() | ||
| 2159 | except AttributeError: | ||
| 2160 | with open(file_or_filename, "r") as f: | ||
| 2161 | file_contents = f.read() | ||
| 2162 | try: | ||
| 2163 | return self.parseString(file_contents, parseAll) | ||
| 2164 | except ParseBaseException as exc: | ||
| 2165 | if ParserElement.verbose_stacktrace: | ||
| 2166 | raise | ||
| 2167 | else: | ||
| 2168 | # catch and re-raise exception from here, clears out pyparsing internal stack trace | ||
| 2169 | raise exc | ||
| 2170 | |||
| 2171 | def __eq__(self,other): | ||
| 2172 | if isinstance(other, ParserElement): | ||
| 2173 | return self is other or vars(self) == vars(other) | ||
| 2174 | elif isinstance(other, basestring): | ||
| 2175 | return self.matches(other) | ||
| 2176 | else: | ||
| 2177 | return super(ParserElement,self)==other | ||
| 2178 | |||
| 2179 | def __ne__(self,other): | ||
| 2180 | return not (self == other) | ||
| 2181 | |||
| 2182 | def __hash__(self): | ||
| 2183 | return hash(id(self)) | ||
| 2184 | |||
| 2185 | def __req__(self,other): | ||
| 2186 | return self == other | ||
| 2187 | |||
| 2188 | def __rne__(self,other): | ||
| 2189 | return not (self == other) | ||
| 2190 | |||
| 2191 | def matches(self, testString, parseAll=True): | ||
| 2192 | """ | ||
| 2193 | Method for quick testing of a parser against a test string. Good for simple | ||
| 2194 | inline microtests of sub expressions while building up larger parser. | ||
| 2195 | |||
| 2196 | Parameters: | ||
| 2197 | - testString - to test against this expression for a match | ||
| 2198 | - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests | ||
| 2199 | |||
| 2200 | Example:: | ||
| 2201 | expr = Word(nums) | ||
| 2202 | assert expr.matches("100") | ||
| 2203 | """ | ||
| 2204 | try: | ||
| 2205 | self.parseString(_ustr(testString), parseAll=parseAll) | ||
| 2206 | return True | ||
| 2207 | except ParseBaseException: | ||
| 2208 | return False | ||
| 2209 | |||
| 2210 | def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): | ||
| 2211 | """ | ||
| 2212 | Execute the parse expression on a series of test strings, showing each | ||
| 2213 | test, the parsed results or where the parse failed. Quick and easy way to | ||
| 2214 | run a parse expression against a list of sample strings. | ||
| 2215 | |||
| 2216 | Parameters: | ||
| 2217 | - tests - a list of separate test strings, or a multiline string of test strings | ||
| 2218 | - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests | ||
| 2219 | - comment - (default=C{'#'}) - expression for indicating embedded comments in the test | ||
| 2220 | string; pass None to disable comment filtering | ||
| 2221 | - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; | ||
| 2222 | if False, only dump nested list | ||
| 2223 | - printResults - (default=C{True}) prints test output to stdout | ||
| 2224 | - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing | ||
| 2225 | |||
| 2226 | Returns: a (success, results) tuple, where success indicates that all tests succeeded | ||
| 2227 | (or failed if C{failureTests} is True), and the results contain a list of lines of each | ||
| 2228 | test's output | ||
| 2229 | |||
| 2230 | Example:: | ||
| 2231 | number_expr = pyparsing_common.number.copy() | ||
| 2232 | |||
| 2233 | result = number_expr.runTests(''' | ||
| 2234 | # unsigned integer | ||
| 2235 | 100 | ||
| 2236 | # negative integer | ||
| 2237 | -100 | ||
| 2238 | # float with scientific notation | ||
| 2239 | 6.02e23 | ||
| 2240 | # integer with scientific notation | ||
| 2241 | 1e-12 | ||
| 2242 | ''') | ||
| 2243 | print("Success" if result[0] else "Failed!") | ||
| 2244 | |||
| 2245 | result = number_expr.runTests(''' | ||
| 2246 | # stray character | ||
| 2247 | 100Z | ||
| 2248 | # missing leading digit before '.' | ||
| 2249 | -.100 | ||
| 2250 | # too many '.' | ||
| 2251 | 3.14.159 | ||
| 2252 | ''', failureTests=True) | ||
| 2253 | print("Success" if result[0] else "Failed!") | ||
| 2254 | prints:: | ||
| 2255 | # unsigned integer | ||
| 2256 | 100 | ||
| 2257 | [100] | ||
| 2258 | |||
| 2259 | # negative integer | ||
| 2260 | -100 | ||
| 2261 | [-100] | ||
| 2262 | |||
| 2263 | # float with scientific notation | ||
| 2264 | 6.02e23 | ||
| 2265 | [6.02e+23] | ||
| 2266 | |||
| 2267 | # integer with scientific notation | ||
| 2268 | 1e-12 | ||
| 2269 | [1e-12] | ||
| 2270 | |||
| 2271 | Success | ||
| 2272 | |||
| 2273 | # stray character | ||
| 2274 | 100Z | ||
| 2275 | ^ | ||
| 2276 | FAIL: Expected end of text (at char 3), (line:1, col:4) | ||
| 2277 | |||
| 2278 | # missing leading digit before '.' | ||
| 2279 | -.100 | ||
| 2280 | ^ | ||
| 2281 | FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) | ||
| 2282 | |||
| 2283 | # too many '.' | ||
| 2284 | 3.14.159 | ||
| 2285 | ^ | ||
| 2286 | FAIL: Expected end of text (at char 4), (line:1, col:5) | ||
| 2287 | |||
| 2288 | Success | ||
| 2289 | |||
| 2290 | Each test string must be on a single line. If you want to test a string that spans multiple | ||
| 2291 | lines, create a test like this:: | ||
| 2292 | |||
| 2293 | expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") | ||
| 2294 | |||
| 2295 | (Note that this is a raw string literal, you must include the leading 'r'.) | ||
| 2296 | """ | ||
| 2297 | if isinstance(tests, basestring): | ||
| 2298 | tests = list(map(str.strip, tests.rstrip().splitlines())) | ||
| 2299 | if isinstance(comment, basestring): | ||
| 2300 | comment = Literal(comment) | ||
| 2301 | allResults = [] | ||
| 2302 | comments = [] | ||
| 2303 | success = True | ||
| 2304 | for t in tests: | ||
| 2305 | if comment is not None and comment.matches(t, False) or comments and not t: | ||
| 2306 | comments.append(t) | ||
| 2307 | continue | ||
| 2308 | if not t: | ||
| 2309 | continue | ||
| 2310 | out = ['\n'.join(comments), t] | ||
| 2311 | comments = [] | ||
| 2312 | try: | ||
| 2313 | t = t.replace(r'\n','\n') | ||
| 2314 | result = self.parseString(t, parseAll=parseAll) | ||
| 2315 | out.append(result.dump(full=fullDump)) | ||
| 2316 | success = success and not failureTests | ||
| 2317 | except ParseBaseException as pe: | ||
| 2318 | fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" | ||
| 2319 | if '\n' in t: | ||
| 2320 | out.append(line(pe.loc, t)) | ||
| 2321 | out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) | ||
| 2322 | else: | ||
| 2323 | out.append(' '*pe.loc + '^' + fatal) | ||
| 2324 | out.append("FAIL: " + str(pe)) | ||
| 2325 | success = success and failureTests | ||
| 2326 | result = pe | ||
| 2327 | except Exception as exc: | ||
| 2328 | out.append("FAIL-EXCEPTION: " + str(exc)) | ||
| 2329 | success = success and failureTests | ||
| 2330 | result = exc | ||
| 2331 | |||
| 2332 | if printResults: | ||
| 2333 | if fullDump: | ||
| 2334 | out.append('') | ||
| 2335 | print('\n'.join(out)) | ||
| 2336 | |||
| 2337 | allResults.append((t, result)) | ||
| 2338 | |||
| 2339 | return success, allResults | ||
| 2340 | |||
| 2341 | |||
| 2342 | class Token(ParserElement): | ||
| 2343 | """ | ||
| 2344 | Abstract C{ParserElement} subclass, for defining atomic matching patterns. | ||
| 2345 | """ | ||
| 2346 | def __init__( self ): | ||
| 2347 | super(Token,self).__init__( savelist=False ) | ||
| 2348 | |||
| 2349 | |||
| 2350 | class Empty(Token): | ||
| 2351 | """ | ||
| 2352 | An empty token, will always match. | ||
| 2353 | """ | ||
| 2354 | def __init__( self ): | ||
| 2355 | super(Empty,self).__init__() | ||
| 2356 | self.name = "Empty" | ||
| 2357 | self.mayReturnEmpty = True | ||
| 2358 | self.mayIndexError = False | ||
| 2359 | |||
| 2360 | |||
| 2361 | class NoMatch(Token): | ||
| 2362 | """ | ||
| 2363 | A token that will never match. | ||
| 2364 | """ | ||
| 2365 | def __init__( self ): | ||
| 2366 | super(NoMatch,self).__init__() | ||
| 2367 | self.name = "NoMatch" | ||
| 2368 | self.mayReturnEmpty = True | ||
| 2369 | self.mayIndexError = False | ||
| 2370 | self.errmsg = "Unmatchable token" | ||
| 2371 | |||
| 2372 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2373 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2374 | |||
| 2375 | |||
| 2376 | class Literal(Token): | ||
| 2377 | """ | ||
| 2378 | Token to exactly match a specified string. | ||
| 2379 | |||
| 2380 | Example:: | ||
| 2381 | Literal('blah').parseString('blah') # -> ['blah'] | ||
| 2382 | Literal('blah').parseString('blahfooblah') # -> ['blah'] | ||
| 2383 | Literal('blah').parseString('bla') # -> Exception: Expected "blah" | ||
| 2384 | |||
| 2385 | For case-insensitive matching, use L{CaselessLiteral}. | ||
| 2386 | |||
| 2387 | For keyword matching (force word break before and after the matched string), | ||
| 2388 | use L{Keyword} or L{CaselessKeyword}. | ||
| 2389 | """ | ||
| 2390 | def __init__( self, matchString ): | ||
| 2391 | super(Literal,self).__init__() | ||
| 2392 | self.match = matchString | ||
| 2393 | self.matchLen = len(matchString) | ||
| 2394 | try: | ||
| 2395 | self.firstMatchChar = matchString[0] | ||
| 2396 | except IndexError: | ||
| 2397 | warnings.warn("null string passed to Literal; use Empty() instead", | ||
| 2398 | SyntaxWarning, stacklevel=2) | ||
| 2399 | self.__class__ = Empty | ||
| 2400 | self.name = '"%s"' % _ustr(self.match) | ||
| 2401 | self.errmsg = "Expected " + self.name | ||
| 2402 | self.mayReturnEmpty = False | ||
| 2403 | self.mayIndexError = False | ||
| 2404 | |||
| 2405 | # Performance tuning: this routine gets called a *lot* | ||
| 2406 | # if this is a single character match string and the first character matches, | ||
| 2407 | # short-circuit as quickly as possible, and avoid calling startswith | ||
| 2408 | #~ @profile | ||
| 2409 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2410 | if (instring[loc] == self.firstMatchChar and | ||
| 2411 | (self.matchLen==1 or instring.startswith(self.match,loc)) ): | ||
| 2412 | return loc+self.matchLen, self.match | ||
| 2413 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2414 | _L = Literal | ||
| 2415 | ParserElement._literalStringClass = Literal | ||
| 2416 | |||
| 2417 | class Keyword(Token): | ||
| 2418 | """ | ||
| 2419 | Token to exactly match a specified string as a keyword, that is, it must be | ||
| 2420 | immediately followed by a non-keyword character. Compare with C{L{Literal}}: | ||
| 2421 | - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. | ||
| 2422 | - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} | ||
| 2423 | Accepts two optional constructor arguments in addition to the keyword string: | ||
| 2424 | - C{identChars} is a string of characters that would be valid identifier characters, | ||
| 2425 | defaulting to all alphanumerics + "_" and "$" | ||
| 2426 | - C{caseless} allows case-insensitive matching, default is C{False}. | ||
| 2427 | |||
| 2428 | Example:: | ||
| 2429 | Keyword("start").parseString("start") # -> ['start'] | ||
| 2430 | Keyword("start").parseString("starting") # -> Exception | ||
| 2431 | |||
| 2432 | For case-insensitive matching, use L{CaselessKeyword}. | ||
| 2433 | """ | ||
| 2434 | DEFAULT_KEYWORD_CHARS = alphanums+"_$" | ||
| 2435 | |||
| 2436 | def __init__( self, matchString, identChars=None, caseless=False ): | ||
| 2437 | super(Keyword,self).__init__() | ||
| 2438 | if identChars is None: | ||
| 2439 | identChars = Keyword.DEFAULT_KEYWORD_CHARS | ||
| 2440 | self.match = matchString | ||
| 2441 | self.matchLen = len(matchString) | ||
| 2442 | try: | ||
| 2443 | self.firstMatchChar = matchString[0] | ||
| 2444 | except IndexError: | ||
| 2445 | warnings.warn("null string passed to Keyword; use Empty() instead", | ||
| 2446 | SyntaxWarning, stacklevel=2) | ||
| 2447 | self.name = '"%s"' % self.match | ||
| 2448 | self.errmsg = "Expected " + self.name | ||
| 2449 | self.mayReturnEmpty = False | ||
| 2450 | self.mayIndexError = False | ||
| 2451 | self.caseless = caseless | ||
| 2452 | if caseless: | ||
| 2453 | self.caselessmatch = matchString.upper() | ||
| 2454 | identChars = identChars.upper() | ||
| 2455 | self.identChars = set(identChars) | ||
| 2456 | |||
| 2457 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2458 | if self.caseless: | ||
| 2459 | if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and | ||
| 2460 | (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and | ||
| 2461 | (loc == 0 or instring[loc-1].upper() not in self.identChars) ): | ||
| 2462 | return loc+self.matchLen, self.match | ||
| 2463 | else: | ||
| 2464 | if (instring[loc] == self.firstMatchChar and | ||
| 2465 | (self.matchLen==1 or instring.startswith(self.match,loc)) and | ||
| 2466 | (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and | ||
| 2467 | (loc == 0 or instring[loc-1] not in self.identChars) ): | ||
| 2468 | return loc+self.matchLen, self.match | ||
| 2469 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2470 | |||
| 2471 | def copy(self): | ||
| 2472 | c = super(Keyword,self).copy() | ||
| 2473 | c.identChars = Keyword.DEFAULT_KEYWORD_CHARS | ||
| 2474 | return c | ||
| 2475 | |||
| 2476 | @staticmethod | ||
| 2477 | def setDefaultKeywordChars( chars ): | ||
| 2478 | """Overrides the default Keyword chars | ||
| 2479 | """ | ||
| 2480 | Keyword.DEFAULT_KEYWORD_CHARS = chars | ||
| 2481 | |||
| 2482 | class CaselessLiteral(Literal): | ||
| 2483 | """ | ||
| 2484 | Token to match a specified string, ignoring case of letters. | ||
| 2485 | Note: the matched results will always be in the case of the given | ||
| 2486 | match string, NOT the case of the input text. | ||
| 2487 | |||
| 2488 | Example:: | ||
| 2489 | OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] | ||
| 2490 | |||
| 2491 | (Contrast with example for L{CaselessKeyword}.) | ||
| 2492 | """ | ||
| 2493 | def __init__( self, matchString ): | ||
| 2494 | super(CaselessLiteral,self).__init__( matchString.upper() ) | ||
| 2495 | # Preserve the defining literal. | ||
| 2496 | self.returnString = matchString | ||
| 2497 | self.name = "'%s'" % self.returnString | ||
| 2498 | self.errmsg = "Expected " + self.name | ||
| 2499 | |||
| 2500 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2501 | if instring[ loc:loc+self.matchLen ].upper() == self.match: | ||
| 2502 | return loc+self.matchLen, self.returnString | ||
| 2503 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2504 | |||
| 2505 | class CaselessKeyword(Keyword): | ||
| 2506 | """ | ||
| 2507 | Caseless version of L{Keyword}. | ||
| 2508 | |||
| 2509 | Example:: | ||
| 2510 | OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] | ||
| 2511 | |||
| 2512 | (Contrast with example for L{CaselessLiteral}.) | ||
| 2513 | """ | ||
| 2514 | def __init__( self, matchString, identChars=None ): | ||
| 2515 | super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) | ||
| 2516 | |||
| 2517 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2518 | if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and | ||
| 2519 | (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): | ||
| 2520 | return loc+self.matchLen, self.match | ||
| 2521 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2522 | |||
| 2523 | class CloseMatch(Token): | ||
| 2524 | """ | ||
| 2525 | A variation on L{Literal} which matches "close" matches, that is, | ||
| 2526 | strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: | ||
| 2527 | - C{match_string} - string to be matched | ||
| 2528 | - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match | ||
| 2529 | |||
| 2530 | The results from a successful parse will contain the matched text from the input string and the following named results: | ||
| 2531 | - C{mismatches} - a list of the positions within the match_string where mismatches were found | ||
| 2532 | - C{original} - the original match_string used to compare against the input string | ||
| 2533 | |||
| 2534 | If C{mismatches} is an empty list, then the match was an exact match. | ||
| 2535 | |||
| 2536 | Example:: | ||
| 2537 | patt = CloseMatch("ATCATCGAATGGA") | ||
| 2538 | patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) | ||
| 2539 | patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) | ||
| 2540 | |||
| 2541 | # exact match | ||
| 2542 | patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) | ||
| 2543 | |||
| 2544 | # close match allowing up to 2 mismatches | ||
| 2545 | patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) | ||
| 2546 | patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) | ||
| 2547 | """ | ||
| 2548 | def __init__(self, match_string, maxMismatches=1): | ||
| 2549 | super(CloseMatch,self).__init__() | ||
| 2550 | self.name = match_string | ||
| 2551 | self.match_string = match_string | ||
| 2552 | self.maxMismatches = maxMismatches | ||
| 2553 | self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) | ||
| 2554 | self.mayIndexError = False | ||
| 2555 | self.mayReturnEmpty = False | ||
| 2556 | |||
| 2557 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2558 | start = loc | ||
| 2559 | instrlen = len(instring) | ||
| 2560 | maxloc = start + len(self.match_string) | ||
| 2561 | |||
| 2562 | if maxloc <= instrlen: | ||
| 2563 | match_string = self.match_string | ||
| 2564 | match_stringloc = 0 | ||
| 2565 | mismatches = [] | ||
| 2566 | maxMismatches = self.maxMismatches | ||
| 2567 | |||
| 2568 | for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): | ||
| 2569 | src,mat = s_m | ||
| 2570 | if src != mat: | ||
| 2571 | mismatches.append(match_stringloc) | ||
| 2572 | if len(mismatches) > maxMismatches: | ||
| 2573 | break | ||
| 2574 | else: | ||
| 2575 | loc = match_stringloc + 1 | ||
| 2576 | results = ParseResults([instring[start:loc]]) | ||
| 2577 | results['original'] = self.match_string | ||
| 2578 | results['mismatches'] = mismatches | ||
| 2579 | return loc, results | ||
| 2580 | |||
| 2581 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2582 | |||
| 2583 | |||
| 2584 | class Word(Token): | ||
| 2585 | """ | ||
| 2586 | Token for matching words composed of allowed character sets. | ||
| 2587 | Defined with string containing all allowed initial characters, | ||
| 2588 | an optional string containing allowed body characters (if omitted, | ||
| 2589 | defaults to the initial character set), and an optional minimum, | ||
| 2590 | maximum, and/or exact length. The default value for C{min} is 1 (a | ||
| 2591 | minimum value < 1 is not valid); the default values for C{max} and C{exact} | ||
| 2592 | are 0, meaning no maximum or exact length restriction. An optional | ||
| 2593 | C{excludeChars} parameter can list characters that might be found in | ||
| 2594 | the input C{bodyChars} string; useful to define a word of all printables | ||
| 2595 | except for one or two characters, for instance. | ||
| 2596 | |||
| 2597 | L{srange} is useful for defining custom character set strings for defining | ||
| 2598 | C{Word} expressions, using range notation from regular expression character sets. | ||
| 2599 | |||
| 2600 | A common mistake is to use C{Word} to match a specific literal string, as in | ||
| 2601 | C{Word("Address")}. Remember that C{Word} uses the string argument to define | ||
| 2602 | I{sets} of matchable characters. This expression would match "Add", "AAA", | ||
| 2603 | "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. | ||
| 2604 | To match an exact literal string, use L{Literal} or L{Keyword}. | ||
| 2605 | |||
| 2606 | pyparsing includes helper strings for building Words: | ||
| 2607 | - L{alphas} | ||
| 2608 | - L{nums} | ||
| 2609 | - L{alphanums} | ||
| 2610 | - L{hexnums} | ||
| 2611 | - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) | ||
| 2612 | - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) | ||
| 2613 | - L{printables} (any non-whitespace character) | ||
| 2614 | |||
| 2615 | Example:: | ||
| 2616 | # a word composed of digits | ||
| 2617 | integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) | ||
| 2618 | |||
| 2619 | # a word with a leading capital, and zero or more lowercase | ||
| 2620 | capital_word = Word(alphas.upper(), alphas.lower()) | ||
| 2621 | |||
| 2622 | # hostnames are alphanumeric, with leading alpha, and '-' | ||
| 2623 | hostname = Word(alphas, alphanums+'-') | ||
| 2624 | |||
| 2625 | # roman numeral (not a strict parser, accepts invalid mix of characters) | ||
| 2626 | roman = Word("IVXLCDM") | ||
| 2627 | |||
| 2628 | # any string of non-whitespace characters, except for ',' | ||
| 2629 | csv_value = Word(printables, excludeChars=",") | ||
| 2630 | """ | ||
| 2631 | def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): | ||
| 2632 | super(Word,self).__init__() | ||
| 2633 | if excludeChars: | ||
| 2634 | initChars = ''.join(c for c in initChars if c not in excludeChars) | ||
| 2635 | if bodyChars: | ||
| 2636 | bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) | ||
| 2637 | self.initCharsOrig = initChars | ||
| 2638 | self.initChars = set(initChars) | ||
| 2639 | if bodyChars : | ||
| 2640 | self.bodyCharsOrig = bodyChars | ||
| 2641 | self.bodyChars = set(bodyChars) | ||
| 2642 | else: | ||
| 2643 | self.bodyCharsOrig = initChars | ||
| 2644 | self.bodyChars = set(initChars) | ||
| 2645 | |||
| 2646 | self.maxSpecified = max > 0 | ||
| 2647 | |||
| 2648 | if min < 1: | ||
| 2649 | raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") | ||
| 2650 | |||
| 2651 | self.minLen = min | ||
| 2652 | |||
| 2653 | if max > 0: | ||
| 2654 | self.maxLen = max | ||
| 2655 | else: | ||
| 2656 | self.maxLen = _MAX_INT | ||
| 2657 | |||
| 2658 | if exact > 0: | ||
| 2659 | self.maxLen = exact | ||
| 2660 | self.minLen = exact | ||
| 2661 | |||
| 2662 | self.name = _ustr(self) | ||
| 2663 | self.errmsg = "Expected " + self.name | ||
| 2664 | self.mayIndexError = False | ||
| 2665 | self.asKeyword = asKeyword | ||
| 2666 | |||
| 2667 | if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): | ||
| 2668 | if self.bodyCharsOrig == self.initCharsOrig: | ||
| 2669 | self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) | ||
| 2670 | elif len(self.initCharsOrig) == 1: | ||
| 2671 | self.reString = "%s[%s]*" % \ | ||
| 2672 | (re.escape(self.initCharsOrig), | ||
| 2673 | _escapeRegexRangeChars(self.bodyCharsOrig),) | ||
| 2674 | else: | ||
| 2675 | self.reString = "[%s][%s]*" % \ | ||
| 2676 | (_escapeRegexRangeChars(self.initCharsOrig), | ||
| 2677 | _escapeRegexRangeChars(self.bodyCharsOrig),) | ||
| 2678 | if self.asKeyword: | ||
| 2679 | self.reString = r"\b"+self.reString+r"\b" | ||
| 2680 | try: | ||
| 2681 | self.re = re.compile( self.reString ) | ||
| 2682 | except Exception: | ||
| 2683 | self.re = None | ||
| 2684 | |||
| 2685 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2686 | if self.re: | ||
| 2687 | result = self.re.match(instring,loc) | ||
| 2688 | if not result: | ||
| 2689 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2690 | |||
| 2691 | loc = result.end() | ||
| 2692 | return loc, result.group() | ||
| 2693 | |||
| 2694 | if not(instring[ loc ] in self.initChars): | ||
| 2695 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2696 | |||
| 2697 | start = loc | ||
| 2698 | loc += 1 | ||
| 2699 | instrlen = len(instring) | ||
| 2700 | bodychars = self.bodyChars | ||
| 2701 | maxloc = start + self.maxLen | ||
| 2702 | maxloc = min( maxloc, instrlen ) | ||
| 2703 | while loc < maxloc and instring[loc] in bodychars: | ||
| 2704 | loc += 1 | ||
| 2705 | |||
| 2706 | throwException = False | ||
| 2707 | if loc - start < self.minLen: | ||
| 2708 | throwException = True | ||
| 2709 | if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: | ||
| 2710 | throwException = True | ||
| 2711 | if self.asKeyword: | ||
| 2712 | if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): | ||
| 2713 | throwException = True | ||
| 2714 | |||
| 2715 | if throwException: | ||
| 2716 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2717 | |||
| 2718 | return loc, instring[start:loc] | ||
| 2719 | |||
| 2720 | def __str__( self ): | ||
| 2721 | try: | ||
| 2722 | return super(Word,self).__str__() | ||
| 2723 | except Exception: | ||
| 2724 | pass | ||
| 2725 | |||
| 2726 | |||
| 2727 | if self.strRepr is None: | ||
| 2728 | |||
| 2729 | def charsAsStr(s): | ||
| 2730 | if len(s)>4: | ||
| 2731 | return s[:4]+"..." | ||
| 2732 | else: | ||
| 2733 | return s | ||
| 2734 | |||
| 2735 | if ( self.initCharsOrig != self.bodyCharsOrig ): | ||
| 2736 | self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) | ||
| 2737 | else: | ||
| 2738 | self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) | ||
| 2739 | |||
| 2740 | return self.strRepr | ||
| 2741 | |||
| 2742 | |||
| 2743 | class Regex(Token): | ||
| 2744 | r""" | ||
| 2745 | Token for matching strings that match a given regular expression. | ||
| 2746 | Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. | ||
| 2747 | If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as | ||
| 2748 | named parse results. | ||
| 2749 | |||
| 2750 | Example:: | ||
| 2751 | realnum = Regex(r"[+-]?\d+\.\d*") | ||
| 2752 | date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') | ||
| 2753 | # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression | ||
| 2754 | roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") | ||
| 2755 | """ | ||
| 2756 | compiledREtype = type(re.compile("[A-Z]")) | ||
| 2757 | def __init__( self, pattern, flags=0): | ||
| 2758 | """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" | ||
| 2759 | super(Regex,self).__init__() | ||
| 2760 | |||
| 2761 | if isinstance(pattern, basestring): | ||
| 2762 | if not pattern: | ||
| 2763 | warnings.warn("null string passed to Regex; use Empty() instead", | ||
| 2764 | SyntaxWarning, stacklevel=2) | ||
| 2765 | |||
| 2766 | self.pattern = pattern | ||
| 2767 | self.flags = flags | ||
| 2768 | |||
| 2769 | try: | ||
| 2770 | self.re = re.compile(self.pattern, self.flags) | ||
| 2771 | self.reString = self.pattern | ||
| 2772 | except sre_constants.error: | ||
| 2773 | warnings.warn("invalid pattern (%s) passed to Regex" % pattern, | ||
| 2774 | SyntaxWarning, stacklevel=2) | ||
| 2775 | raise | ||
| 2776 | |||
| 2777 | elif isinstance(pattern, Regex.compiledREtype): | ||
| 2778 | self.re = pattern | ||
| 2779 | self.pattern = \ | ||
| 2780 | self.reString = str(pattern) | ||
| 2781 | self.flags = flags | ||
| 2782 | |||
| 2783 | else: | ||
| 2784 | raise ValueError("Regex may only be constructed with a string or a compiled RE object") | ||
| 2785 | |||
| 2786 | self.name = _ustr(self) | ||
| 2787 | self.errmsg = "Expected " + self.name | ||
| 2788 | self.mayIndexError = False | ||
| 2789 | self.mayReturnEmpty = True | ||
| 2790 | |||
| 2791 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2792 | result = self.re.match(instring,loc) | ||
| 2793 | if not result: | ||
| 2794 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2795 | |||
| 2796 | loc = result.end() | ||
| 2797 | d = result.groupdict() | ||
| 2798 | ret = ParseResults(result.group()) | ||
| 2799 | if d: | ||
| 2800 | for k in d: | ||
| 2801 | ret[k] = d[k] | ||
| 2802 | return loc,ret | ||
| 2803 | |||
| 2804 | def __str__( self ): | ||
| 2805 | try: | ||
| 2806 | return super(Regex,self).__str__() | ||
| 2807 | except Exception: | ||
| 2808 | pass | ||
| 2809 | |||
| 2810 | if self.strRepr is None: | ||
| 2811 | self.strRepr = "Re:(%s)" % repr(self.pattern) | ||
| 2812 | |||
| 2813 | return self.strRepr | ||
| 2814 | |||
| 2815 | |||
| 2816 | class QuotedString(Token): | ||
| 2817 | r""" | ||
| 2818 | Token for matching strings that are delimited by quoting characters. | ||
| 2819 | |||
| 2820 | Defined with the following parameters: | ||
| 2821 | - quoteChar - string of one or more characters defining the quote delimiting string | ||
| 2822 | - escChar - character to escape quotes, typically backslash (default=C{None}) | ||
| 2823 | - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) | ||
| 2824 | - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) | ||
| 2825 | - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) | ||
| 2826 | - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) | ||
| 2827 | - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) | ||
| 2828 | |||
| 2829 | Example:: | ||
| 2830 | qs = QuotedString('"') | ||
| 2831 | print(qs.searchString('lsjdf "This is the quote" sldjf')) | ||
| 2832 | complex_qs = QuotedString('{{', endQuoteChar='}}') | ||
| 2833 | print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) | ||
| 2834 | sql_qs = QuotedString('"', escQuote='""') | ||
| 2835 | print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) | ||
| 2836 | prints:: | ||
| 2837 | [['This is the quote']] | ||
| 2838 | [['This is the "quote"']] | ||
| 2839 | [['This is the quote with "embedded" quotes']] | ||
| 2840 | """ | ||
| 2841 | def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): | ||
| 2842 | super(QuotedString,self).__init__() | ||
| 2843 | |||
| 2844 | # remove white space from quote chars - wont work anyway | ||
| 2845 | quoteChar = quoteChar.strip() | ||
| 2846 | if not quoteChar: | ||
| 2847 | warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) | ||
| 2848 | raise SyntaxError() | ||
| 2849 | |||
| 2850 | if endQuoteChar is None: | ||
| 2851 | endQuoteChar = quoteChar | ||
| 2852 | else: | ||
| 2853 | endQuoteChar = endQuoteChar.strip() | ||
| 2854 | if not endQuoteChar: | ||
| 2855 | warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) | ||
| 2856 | raise SyntaxError() | ||
| 2857 | |||
| 2858 | self.quoteChar = quoteChar | ||
| 2859 | self.quoteCharLen = len(quoteChar) | ||
| 2860 | self.firstQuoteChar = quoteChar[0] | ||
| 2861 | self.endQuoteChar = endQuoteChar | ||
| 2862 | self.endQuoteCharLen = len(endQuoteChar) | ||
| 2863 | self.escChar = escChar | ||
| 2864 | self.escQuote = escQuote | ||
| 2865 | self.unquoteResults = unquoteResults | ||
| 2866 | self.convertWhitespaceEscapes = convertWhitespaceEscapes | ||
| 2867 | |||
| 2868 | if multiline: | ||
| 2869 | self.flags = re.MULTILINE | re.DOTALL | ||
| 2870 | self.pattern = r'%s(?:[^%s%s]' % \ | ||
| 2871 | ( re.escape(self.quoteChar), | ||
| 2872 | _escapeRegexRangeChars(self.endQuoteChar[0]), | ||
| 2873 | (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) | ||
| 2874 | else: | ||
| 2875 | self.flags = 0 | ||
| 2876 | self.pattern = r'%s(?:[^%s\n\r%s]' % \ | ||
| 2877 | ( re.escape(self.quoteChar), | ||
| 2878 | _escapeRegexRangeChars(self.endQuoteChar[0]), | ||
| 2879 | (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) | ||
| 2880 | if len(self.endQuoteChar) > 1: | ||
| 2881 | self.pattern += ( | ||
| 2882 | '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), | ||
| 2883 | _escapeRegexRangeChars(self.endQuoteChar[i])) | ||
| 2884 | for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' | ||
| 2885 | ) | ||
| 2886 | if escQuote: | ||
| 2887 | self.pattern += (r'|(?:%s)' % re.escape(escQuote)) | ||
| 2888 | if escChar: | ||
| 2889 | self.pattern += (r'|(?:%s.)' % re.escape(escChar)) | ||
| 2890 | self.escCharReplacePattern = re.escape(self.escChar)+"(.)" | ||
| 2891 | self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) | ||
| 2892 | |||
| 2893 | try: | ||
| 2894 | self.re = re.compile(self.pattern, self.flags) | ||
| 2895 | self.reString = self.pattern | ||
| 2896 | except sre_constants.error: | ||
| 2897 | warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, | ||
| 2898 | SyntaxWarning, stacklevel=2) | ||
| 2899 | raise | ||
| 2900 | |||
| 2901 | self.name = _ustr(self) | ||
| 2902 | self.errmsg = "Expected " + self.name | ||
| 2903 | self.mayIndexError = False | ||
| 2904 | self.mayReturnEmpty = True | ||
| 2905 | |||
| 2906 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2907 | result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None | ||
| 2908 | if not result: | ||
| 2909 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2910 | |||
| 2911 | loc = result.end() | ||
| 2912 | ret = result.group() | ||
| 2913 | |||
| 2914 | if self.unquoteResults: | ||
| 2915 | |||
| 2916 | # strip off quotes | ||
| 2917 | ret = ret[self.quoteCharLen:-self.endQuoteCharLen] | ||
| 2918 | |||
| 2919 | if isinstance(ret,basestring): | ||
| 2920 | # replace escaped whitespace | ||
| 2921 | if '\\' in ret and self.convertWhitespaceEscapes: | ||
| 2922 | ws_map = { | ||
| 2923 | r'\t' : '\t', | ||
| 2924 | r'\n' : '\n', | ||
| 2925 | r'\f' : '\f', | ||
| 2926 | r'\r' : '\r', | ||
| 2927 | } | ||
| 2928 | for wslit,wschar in ws_map.items(): | ||
| 2929 | ret = ret.replace(wslit, wschar) | ||
| 2930 | |||
| 2931 | # replace escaped characters | ||
| 2932 | if self.escChar: | ||
| 2933 | ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) | ||
| 2934 | |||
| 2935 | # replace escaped quotes | ||
| 2936 | if self.escQuote: | ||
| 2937 | ret = ret.replace(self.escQuote, self.endQuoteChar) | ||
| 2938 | |||
| 2939 | return loc, ret | ||
| 2940 | |||
| 2941 | def __str__( self ): | ||
| 2942 | try: | ||
| 2943 | return super(QuotedString,self).__str__() | ||
| 2944 | except Exception: | ||
| 2945 | pass | ||
| 2946 | |||
| 2947 | if self.strRepr is None: | ||
| 2948 | self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) | ||
| 2949 | |||
| 2950 | return self.strRepr | ||
| 2951 | |||
| 2952 | |||
| 2953 | class CharsNotIn(Token): | ||
| 2954 | """ | ||
| 2955 | Token for matching words composed of characters I{not} in a given set (will | ||
| 2956 | include whitespace in matched characters if not listed in the provided exclusion set - see example). | ||
| 2957 | Defined with string containing all disallowed characters, and an optional | ||
| 2958 | minimum, maximum, and/or exact length. The default value for C{min} is 1 (a | ||
| 2959 | minimum value < 1 is not valid); the default values for C{max} and C{exact} | ||
| 2960 | are 0, meaning no maximum or exact length restriction. | ||
| 2961 | |||
| 2962 | Example:: | ||
| 2963 | # define a comma-separated-value as anything that is not a ',' | ||
| 2964 | csv_value = CharsNotIn(',') | ||
| 2965 | print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) | ||
| 2966 | prints:: | ||
| 2967 | ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] | ||
| 2968 | """ | ||
| 2969 | def __init__( self, notChars, min=1, max=0, exact=0 ): | ||
| 2970 | super(CharsNotIn,self).__init__() | ||
| 2971 | self.skipWhitespace = False | ||
| 2972 | self.notChars = notChars | ||
| 2973 | |||
| 2974 | if min < 1: | ||
| 2975 | raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") | ||
| 2976 | |||
| 2977 | self.minLen = min | ||
| 2978 | |||
| 2979 | if max > 0: | ||
| 2980 | self.maxLen = max | ||
| 2981 | else: | ||
| 2982 | self.maxLen = _MAX_INT | ||
| 2983 | |||
| 2984 | if exact > 0: | ||
| 2985 | self.maxLen = exact | ||
| 2986 | self.minLen = exact | ||
| 2987 | |||
| 2988 | self.name = _ustr(self) | ||
| 2989 | self.errmsg = "Expected " + self.name | ||
| 2990 | self.mayReturnEmpty = ( self.minLen == 0 ) | ||
| 2991 | self.mayIndexError = False | ||
| 2992 | |||
| 2993 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 2994 | if instring[loc] in self.notChars: | ||
| 2995 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 2996 | |||
| 2997 | start = loc | ||
| 2998 | loc += 1 | ||
| 2999 | notchars = self.notChars | ||
| 3000 | maxlen = min( start+self.maxLen, len(instring) ) | ||
| 3001 | while loc < maxlen and \ | ||
| 3002 | (instring[loc] not in notchars): | ||
| 3003 | loc += 1 | ||
| 3004 | |||
| 3005 | if loc - start < self.minLen: | ||
| 3006 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3007 | |||
| 3008 | return loc, instring[start:loc] | ||
| 3009 | |||
| 3010 | def __str__( self ): | ||
| 3011 | try: | ||
| 3012 | return super(CharsNotIn, self).__str__() | ||
| 3013 | except Exception: | ||
| 3014 | pass | ||
| 3015 | |||
| 3016 | if self.strRepr is None: | ||
| 3017 | if len(self.notChars) > 4: | ||
| 3018 | self.strRepr = "!W:(%s...)" % self.notChars[:4] | ||
| 3019 | else: | ||
| 3020 | self.strRepr = "!W:(%s)" % self.notChars | ||
| 3021 | |||
| 3022 | return self.strRepr | ||
| 3023 | |||
| 3024 | class White(Token): | ||
| 3025 | """ | ||
| 3026 | Special matching class for matching whitespace. Normally, whitespace is ignored | ||
| 3027 | by pyparsing grammars. This class is included when some whitespace structures | ||
| 3028 | are significant. Define with a string containing the whitespace characters to be | ||
| 3029 | matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, | ||
| 3030 | as defined for the C{L{Word}} class. | ||
| 3031 | """ | ||
| 3032 | whiteStrs = { | ||
| 3033 | " " : "<SPC>", | ||
| 3034 | "\t": "<TAB>", | ||
| 3035 | "\n": "<LF>", | ||
| 3036 | "\r": "<CR>", | ||
| 3037 | "\f": "<FF>", | ||
| 3038 | } | ||
| 3039 | def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): | ||
| 3040 | super(White,self).__init__() | ||
| 3041 | self.matchWhite = ws | ||
| 3042 | self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) | ||
| 3043 | #~ self.leaveWhitespace() | ||
| 3044 | self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) | ||
| 3045 | self.mayReturnEmpty = True | ||
| 3046 | self.errmsg = "Expected " + self.name | ||
| 3047 | |||
| 3048 | self.minLen = min | ||
| 3049 | |||
| 3050 | if max > 0: | ||
| 3051 | self.maxLen = max | ||
| 3052 | else: | ||
| 3053 | self.maxLen = _MAX_INT | ||
| 3054 | |||
| 3055 | if exact > 0: | ||
| 3056 | self.maxLen = exact | ||
| 3057 | self.minLen = exact | ||
| 3058 | |||
| 3059 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3060 | if not(instring[ loc ] in self.matchWhite): | ||
| 3061 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3062 | start = loc | ||
| 3063 | loc += 1 | ||
| 3064 | maxloc = start + self.maxLen | ||
| 3065 | maxloc = min( maxloc, len(instring) ) | ||
| 3066 | while loc < maxloc and instring[loc] in self.matchWhite: | ||
| 3067 | loc += 1 | ||
| 3068 | |||
| 3069 | if loc - start < self.minLen: | ||
| 3070 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3071 | |||
| 3072 | return loc, instring[start:loc] | ||
| 3073 | |||
| 3074 | |||
| 3075 | class _PositionToken(Token): | ||
| 3076 | def __init__( self ): | ||
| 3077 | super(_PositionToken,self).__init__() | ||
| 3078 | self.name=self.__class__.__name__ | ||
| 3079 | self.mayReturnEmpty = True | ||
| 3080 | self.mayIndexError = False | ||
| 3081 | |||
| 3082 | class GoToColumn(_PositionToken): | ||
| 3083 | """ | ||
| 3084 | Token to advance to a specific column of input text; useful for tabular report scraping. | ||
| 3085 | """ | ||
| 3086 | def __init__( self, colno ): | ||
| 3087 | super(GoToColumn,self).__init__() | ||
| 3088 | self.col = colno | ||
| 3089 | |||
| 3090 | def preParse( self, instring, loc ): | ||
| 3091 | if col(loc,instring) != self.col: | ||
| 3092 | instrlen = len(instring) | ||
| 3093 | if self.ignoreExprs: | ||
| 3094 | loc = self._skipIgnorables( instring, loc ) | ||
| 3095 | while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : | ||
| 3096 | loc += 1 | ||
| 3097 | return loc | ||
| 3098 | |||
| 3099 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3100 | thiscol = col( loc, instring ) | ||
| 3101 | if thiscol > self.col: | ||
| 3102 | raise ParseException( instring, loc, "Text not in expected column", self ) | ||
| 3103 | newloc = loc + self.col - thiscol | ||
| 3104 | ret = instring[ loc: newloc ] | ||
| 3105 | return newloc, ret | ||
| 3106 | |||
| 3107 | |||
| 3108 | class LineStart(_PositionToken): | ||
| 3109 | """ | ||
| 3110 | Matches if current position is at the beginning of a line within the parse string | ||
| 3111 | |||
| 3112 | Example:: | ||
| 3113 | |||
| 3114 | test = '''\ | ||
| 3115 | AAA this line | ||
| 3116 | AAA and this line | ||
| 3117 | AAA but not this one | ||
| 3118 | B AAA and definitely not this one | ||
| 3119 | ''' | ||
| 3120 | |||
| 3121 | for t in (LineStart() + 'AAA' + restOfLine).searchString(test): | ||
| 3122 | print(t) | ||
| 3123 | |||
| 3124 | Prints:: | ||
| 3125 | ['AAA', ' this line'] | ||
| 3126 | ['AAA', ' and this line'] | ||
| 3127 | |||
| 3128 | """ | ||
| 3129 | def __init__( self ): | ||
| 3130 | super(LineStart,self).__init__() | ||
| 3131 | self.errmsg = "Expected start of line" | ||
| 3132 | |||
| 3133 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3134 | if col(loc, instring) == 1: | ||
| 3135 | return loc, [] | ||
| 3136 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3137 | |||
| 3138 | class LineEnd(_PositionToken): | ||
| 3139 | """ | ||
| 3140 | Matches if current position is at the end of a line within the parse string | ||
| 3141 | """ | ||
| 3142 | def __init__( self ): | ||
| 3143 | super(LineEnd,self).__init__() | ||
| 3144 | self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) | ||
| 3145 | self.errmsg = "Expected end of line" | ||
| 3146 | |||
| 3147 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3148 | if loc<len(instring): | ||
| 3149 | if instring[loc] == "\n": | ||
| 3150 | return loc+1, "\n" | ||
| 3151 | else: | ||
| 3152 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3153 | elif loc == len(instring): | ||
| 3154 | return loc+1, [] | ||
| 3155 | else: | ||
| 3156 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3157 | |||
| 3158 | class StringStart(_PositionToken): | ||
| 3159 | """ | ||
| 3160 | Matches if current position is at the beginning of the parse string | ||
| 3161 | """ | ||
| 3162 | def __init__( self ): | ||
| 3163 | super(StringStart,self).__init__() | ||
| 3164 | self.errmsg = "Expected start of text" | ||
| 3165 | |||
| 3166 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3167 | if loc != 0: | ||
| 3168 | # see if entire string up to here is just whitespace and ignoreables | ||
| 3169 | if loc != self.preParse( instring, 0 ): | ||
| 3170 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3171 | return loc, [] | ||
| 3172 | |||
| 3173 | class StringEnd(_PositionToken): | ||
| 3174 | """ | ||
| 3175 | Matches if current position is at the end of the parse string | ||
| 3176 | """ | ||
| 3177 | def __init__( self ): | ||
| 3178 | super(StringEnd,self).__init__() | ||
| 3179 | self.errmsg = "Expected end of text" | ||
| 3180 | |||
| 3181 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3182 | if loc < len(instring): | ||
| 3183 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3184 | elif loc == len(instring): | ||
| 3185 | return loc+1, [] | ||
| 3186 | elif loc > len(instring): | ||
| 3187 | return loc, [] | ||
| 3188 | else: | ||
| 3189 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3190 | |||
| 3191 | class WordStart(_PositionToken): | ||
| 3192 | """ | ||
| 3193 | Matches if the current position is at the beginning of a Word, and | ||
| 3194 | is not preceded by any character in a given set of C{wordChars} | ||
| 3195 | (default=C{printables}). To emulate the C{\b} behavior of regular expressions, | ||
| 3196 | use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of | ||
| 3197 | the string being parsed, or at the beginning of a line. | ||
| 3198 | """ | ||
| 3199 | def __init__(self, wordChars = printables): | ||
| 3200 | super(WordStart,self).__init__() | ||
| 3201 | self.wordChars = set(wordChars) | ||
| 3202 | self.errmsg = "Not at the start of a word" | ||
| 3203 | |||
| 3204 | def parseImpl(self, instring, loc, doActions=True ): | ||
| 3205 | if loc != 0: | ||
| 3206 | if (instring[loc-1] in self.wordChars or | ||
| 3207 | instring[loc] not in self.wordChars): | ||
| 3208 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3209 | return loc, [] | ||
| 3210 | |||
| 3211 | class WordEnd(_PositionToken): | ||
| 3212 | """ | ||
| 3213 | Matches if the current position is at the end of a Word, and | ||
| 3214 | is not followed by any character in a given set of C{wordChars} | ||
| 3215 | (default=C{printables}). To emulate the C{\b} behavior of regular expressions, | ||
| 3216 | use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of | ||
| 3217 | the string being parsed, or at the end of a line. | ||
| 3218 | """ | ||
| 3219 | def __init__(self, wordChars = printables): | ||
| 3220 | super(WordEnd,self).__init__() | ||
| 3221 | self.wordChars = set(wordChars) | ||
| 3222 | self.skipWhitespace = False | ||
| 3223 | self.errmsg = "Not at the end of a word" | ||
| 3224 | |||
| 3225 | def parseImpl(self, instring, loc, doActions=True ): | ||
| 3226 | instrlen = len(instring) | ||
| 3227 | if instrlen>0 and loc<instrlen: | ||
| 3228 | if (instring[loc] in self.wordChars or | ||
| 3229 | instring[loc-1] not in self.wordChars): | ||
| 3230 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3231 | return loc, [] | ||
| 3232 | |||
| 3233 | |||
| 3234 | class ParseExpression(ParserElement): | ||
| 3235 | """ | ||
| 3236 | Abstract subclass of ParserElement, for combining and post-processing parsed tokens. | ||
| 3237 | """ | ||
| 3238 | def __init__( self, exprs, savelist = False ): | ||
| 3239 | super(ParseExpression,self).__init__(savelist) | ||
| 3240 | if isinstance( exprs, _generatorType ): | ||
| 3241 | exprs = list(exprs) | ||
| 3242 | |||
| 3243 | if isinstance( exprs, basestring ): | ||
| 3244 | self.exprs = [ ParserElement._literalStringClass( exprs ) ] | ||
| 3245 | elif isinstance( exprs, collections.Iterable ): | ||
| 3246 | exprs = list(exprs) | ||
| 3247 | # if sequence of strings provided, wrap with Literal | ||
| 3248 | if all(isinstance(expr, basestring) for expr in exprs): | ||
| 3249 | exprs = map(ParserElement._literalStringClass, exprs) | ||
| 3250 | self.exprs = list(exprs) | ||
| 3251 | else: | ||
| 3252 | try: | ||
| 3253 | self.exprs = list( exprs ) | ||
| 3254 | except TypeError: | ||
| 3255 | self.exprs = [ exprs ] | ||
| 3256 | self.callPreparse = False | ||
| 3257 | |||
| 3258 | def __getitem__( self, i ): | ||
| 3259 | return self.exprs[i] | ||
| 3260 | |||
| 3261 | def append( self, other ): | ||
| 3262 | self.exprs.append( other ) | ||
| 3263 | self.strRepr = None | ||
| 3264 | return self | ||
| 3265 | |||
| 3266 | def leaveWhitespace( self ): | ||
| 3267 | """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on | ||
| 3268 | all contained expressions.""" | ||
| 3269 | self.skipWhitespace = False | ||
| 3270 | self.exprs = [ e.copy() for e in self.exprs ] | ||
| 3271 | for e in self.exprs: | ||
| 3272 | e.leaveWhitespace() | ||
| 3273 | return self | ||
| 3274 | |||
| 3275 | def ignore( self, other ): | ||
| 3276 | if isinstance( other, Suppress ): | ||
| 3277 | if other not in self.ignoreExprs: | ||
| 3278 | super( ParseExpression, self).ignore( other ) | ||
| 3279 | for e in self.exprs: | ||
| 3280 | e.ignore( self.ignoreExprs[-1] ) | ||
| 3281 | else: | ||
| 3282 | super( ParseExpression, self).ignore( other ) | ||
| 3283 | for e in self.exprs: | ||
| 3284 | e.ignore( self.ignoreExprs[-1] ) | ||
| 3285 | return self | ||
| 3286 | |||
| 3287 | def __str__( self ): | ||
| 3288 | try: | ||
| 3289 | return super(ParseExpression,self).__str__() | ||
| 3290 | except Exception: | ||
| 3291 | pass | ||
| 3292 | |||
| 3293 | if self.strRepr is None: | ||
| 3294 | self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) | ||
| 3295 | return self.strRepr | ||
| 3296 | |||
| 3297 | def streamline( self ): | ||
| 3298 | super(ParseExpression,self).streamline() | ||
| 3299 | |||
| 3300 | for e in self.exprs: | ||
| 3301 | e.streamline() | ||
| 3302 | |||
| 3303 | # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) | ||
| 3304 | # but only if there are no parse actions or resultsNames on the nested And's | ||
| 3305 | # (likewise for Or's and MatchFirst's) | ||
| 3306 | if ( len(self.exprs) == 2 ): | ||
| 3307 | other = self.exprs[0] | ||
| 3308 | if ( isinstance( other, self.__class__ ) and | ||
| 3309 | not(other.parseAction) and | ||
| 3310 | other.resultsName is None and | ||
| 3311 | not other.debug ): | ||
| 3312 | self.exprs = other.exprs[:] + [ self.exprs[1] ] | ||
| 3313 | self.strRepr = None | ||
| 3314 | self.mayReturnEmpty |= other.mayReturnEmpty | ||
| 3315 | self.mayIndexError |= other.mayIndexError | ||
| 3316 | |||
| 3317 | other = self.exprs[-1] | ||
| 3318 | if ( isinstance( other, self.__class__ ) and | ||
| 3319 | not(other.parseAction) and | ||
| 3320 | other.resultsName is None and | ||
| 3321 | not other.debug ): | ||
| 3322 | self.exprs = self.exprs[:-1] + other.exprs[:] | ||
| 3323 | self.strRepr = None | ||
| 3324 | self.mayReturnEmpty |= other.mayReturnEmpty | ||
| 3325 | self.mayIndexError |= other.mayIndexError | ||
| 3326 | |||
| 3327 | self.errmsg = "Expected " + _ustr(self) | ||
| 3328 | |||
| 3329 | return self | ||
| 3330 | |||
| 3331 | def setResultsName( self, name, listAllMatches=False ): | ||
| 3332 | ret = super(ParseExpression,self).setResultsName(name,listAllMatches) | ||
| 3333 | return ret | ||
| 3334 | |||
| 3335 | def validate( self, validateTrace=[] ): | ||
| 3336 | tmp = validateTrace[:]+[self] | ||
| 3337 | for e in self.exprs: | ||
| 3338 | e.validate(tmp) | ||
| 3339 | self.checkRecursion( [] ) | ||
| 3340 | |||
| 3341 | def copy(self): | ||
| 3342 | ret = super(ParseExpression,self).copy() | ||
| 3343 | ret.exprs = [e.copy() for e in self.exprs] | ||
| 3344 | return ret | ||
| 3345 | |||
| 3346 | class And(ParseExpression): | ||
| 3347 | """ | ||
| 3348 | Requires all given C{ParseExpression}s to be found in the given order. | ||
| 3349 | Expressions may be separated by whitespace. | ||
| 3350 | May be constructed using the C{'+'} operator. | ||
| 3351 | May also be constructed using the C{'-'} operator, which will suppress backtracking. | ||
| 3352 | |||
| 3353 | Example:: | ||
| 3354 | integer = Word(nums) | ||
| 3355 | name_expr = OneOrMore(Word(alphas)) | ||
| 3356 | |||
| 3357 | expr = And([integer("id"),name_expr("name"),integer("age")]) | ||
| 3358 | # more easily written as: | ||
| 3359 | expr = integer("id") + name_expr("name") + integer("age") | ||
| 3360 | """ | ||
| 3361 | |||
| 3362 | class _ErrorStop(Empty): | ||
| 3363 | def __init__(self, *args, **kwargs): | ||
| 3364 | super(And._ErrorStop,self).__init__(*args, **kwargs) | ||
| 3365 | self.name = '-' | ||
| 3366 | self.leaveWhitespace() | ||
| 3367 | |||
| 3368 | def __init__( self, exprs, savelist = True ): | ||
| 3369 | super(And,self).__init__(exprs, savelist) | ||
| 3370 | self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) | ||
| 3371 | self.setWhitespaceChars( self.exprs[0].whiteChars ) | ||
| 3372 | self.skipWhitespace = self.exprs[0].skipWhitespace | ||
| 3373 | self.callPreparse = True | ||
| 3374 | |||
| 3375 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3376 | # pass False as last arg to _parse for first element, since we already | ||
| 3377 | # pre-parsed the string as part of our And pre-parsing | ||
| 3378 | loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) | ||
| 3379 | errorStop = False | ||
| 3380 | for e in self.exprs[1:]: | ||
| 3381 | if isinstance(e, And._ErrorStop): | ||
| 3382 | errorStop = True | ||
| 3383 | continue | ||
| 3384 | if errorStop: | ||
| 3385 | try: | ||
| 3386 | loc, exprtokens = e._parse( instring, loc, doActions ) | ||
| 3387 | except ParseSyntaxException: | ||
| 3388 | raise | ||
| 3389 | except ParseBaseException as pe: | ||
| 3390 | pe.__traceback__ = None | ||
| 3391 | raise ParseSyntaxException._from_exception(pe) | ||
| 3392 | except IndexError: | ||
| 3393 | raise ParseSyntaxException(instring, len(instring), self.errmsg, self) | ||
| 3394 | else: | ||
| 3395 | loc, exprtokens = e._parse( instring, loc, doActions ) | ||
| 3396 | if exprtokens or exprtokens.haskeys(): | ||
| 3397 | resultlist += exprtokens | ||
| 3398 | return loc, resultlist | ||
| 3399 | |||
| 3400 | def __iadd__(self, other ): | ||
| 3401 | if isinstance( other, basestring ): | ||
| 3402 | other = ParserElement._literalStringClass( other ) | ||
| 3403 | return self.append( other ) #And( [ self, other ] ) | ||
| 3404 | |||
| 3405 | def checkRecursion( self, parseElementList ): | ||
| 3406 | subRecCheckList = parseElementList[:] + [ self ] | ||
| 3407 | for e in self.exprs: | ||
| 3408 | e.checkRecursion( subRecCheckList ) | ||
| 3409 | if not e.mayReturnEmpty: | ||
| 3410 | break | ||
| 3411 | |||
| 3412 | def __str__( self ): | ||
| 3413 | if hasattr(self,"name"): | ||
| 3414 | return self.name | ||
| 3415 | |||
| 3416 | if self.strRepr is None: | ||
| 3417 | self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" | ||
| 3418 | |||
| 3419 | return self.strRepr | ||
| 3420 | |||
| 3421 | |||
| 3422 | class Or(ParseExpression): | ||
| 3423 | """ | ||
| 3424 | Requires that at least one C{ParseExpression} is found. | ||
| 3425 | If two expressions match, the expression that matches the longest string will be used. | ||
| 3426 | May be constructed using the C{'^'} operator. | ||
| 3427 | |||
| 3428 | Example:: | ||
| 3429 | # construct Or using '^' operator | ||
| 3430 | |||
| 3431 | number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) | ||
| 3432 | print(number.searchString("123 3.1416 789")) | ||
| 3433 | prints:: | ||
| 3434 | [['123'], ['3.1416'], ['789']] | ||
| 3435 | """ | ||
| 3436 | def __init__( self, exprs, savelist = False ): | ||
| 3437 | super(Or,self).__init__(exprs, savelist) | ||
| 3438 | if self.exprs: | ||
| 3439 | self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) | ||
| 3440 | else: | ||
| 3441 | self.mayReturnEmpty = True | ||
| 3442 | |||
| 3443 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3444 | maxExcLoc = -1 | ||
| 3445 | maxException = None | ||
| 3446 | matches = [] | ||
| 3447 | for e in self.exprs: | ||
| 3448 | try: | ||
| 3449 | loc2 = e.tryParse( instring, loc ) | ||
| 3450 | except ParseException as err: | ||
| 3451 | err.__traceback__ = None | ||
| 3452 | if err.loc > maxExcLoc: | ||
| 3453 | maxException = err | ||
| 3454 | maxExcLoc = err.loc | ||
| 3455 | except IndexError: | ||
| 3456 | if len(instring) > maxExcLoc: | ||
| 3457 | maxException = ParseException(instring,len(instring),e.errmsg,self) | ||
| 3458 | maxExcLoc = len(instring) | ||
| 3459 | else: | ||
| 3460 | # save match among all matches, to retry longest to shortest | ||
| 3461 | matches.append((loc2, e)) | ||
| 3462 | |||
| 3463 | if matches: | ||
| 3464 | matches.sort(key=lambda x: -x[0]) | ||
| 3465 | for _,e in matches: | ||
| 3466 | try: | ||
| 3467 | return e._parse( instring, loc, doActions ) | ||
| 3468 | except ParseException as err: | ||
| 3469 | err.__traceback__ = None | ||
| 3470 | if err.loc > maxExcLoc: | ||
| 3471 | maxException = err | ||
| 3472 | maxExcLoc = err.loc | ||
| 3473 | |||
| 3474 | if maxException is not None: | ||
| 3475 | maxException.msg = self.errmsg | ||
| 3476 | raise maxException | ||
| 3477 | else: | ||
| 3478 | raise ParseException(instring, loc, "no defined alternatives to match", self) | ||
| 3479 | |||
| 3480 | |||
| 3481 | def __ixor__(self, other ): | ||
| 3482 | if isinstance( other, basestring ): | ||
| 3483 | other = ParserElement._literalStringClass( other ) | ||
| 3484 | return self.append( other ) #Or( [ self, other ] ) | ||
| 3485 | |||
| 3486 | def __str__( self ): | ||
| 3487 | if hasattr(self,"name"): | ||
| 3488 | return self.name | ||
| 3489 | |||
| 3490 | if self.strRepr is None: | ||
| 3491 | self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" | ||
| 3492 | |||
| 3493 | return self.strRepr | ||
| 3494 | |||
| 3495 | def checkRecursion( self, parseElementList ): | ||
| 3496 | subRecCheckList = parseElementList[:] + [ self ] | ||
| 3497 | for e in self.exprs: | ||
| 3498 | e.checkRecursion( subRecCheckList ) | ||
| 3499 | |||
| 3500 | |||
| 3501 | class MatchFirst(ParseExpression): | ||
| 3502 | """ | ||
| 3503 | Requires that at least one C{ParseExpression} is found. | ||
| 3504 | If two expressions match, the first one listed is the one that will match. | ||
| 3505 | May be constructed using the C{'|'} operator. | ||
| 3506 | |||
| 3507 | Example:: | ||
| 3508 | # construct MatchFirst using '|' operator | ||
| 3509 | |||
| 3510 | # watch the order of expressions to match | ||
| 3511 | number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) | ||
| 3512 | print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] | ||
| 3513 | |||
| 3514 | # put more selective expression first | ||
| 3515 | number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) | ||
| 3516 | print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] | ||
| 3517 | """ | ||
| 3518 | def __init__( self, exprs, savelist = False ): | ||
| 3519 | super(MatchFirst,self).__init__(exprs, savelist) | ||
| 3520 | if self.exprs: | ||
| 3521 | self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) | ||
| 3522 | else: | ||
| 3523 | self.mayReturnEmpty = True | ||
| 3524 | |||
| 3525 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3526 | maxExcLoc = -1 | ||
| 3527 | maxException = None | ||
| 3528 | for e in self.exprs: | ||
| 3529 | try: | ||
| 3530 | ret = e._parse( instring, loc, doActions ) | ||
| 3531 | return ret | ||
| 3532 | except ParseException as err: | ||
| 3533 | if err.loc > maxExcLoc: | ||
| 3534 | maxException = err | ||
| 3535 | maxExcLoc = err.loc | ||
| 3536 | except IndexError: | ||
| 3537 | if len(instring) > maxExcLoc: | ||
| 3538 | maxException = ParseException(instring,len(instring),e.errmsg,self) | ||
| 3539 | maxExcLoc = len(instring) | ||
| 3540 | |||
| 3541 | # only got here if no expression matched, raise exception for match that made it the furthest | ||
| 3542 | else: | ||
| 3543 | if maxException is not None: | ||
| 3544 | maxException.msg = self.errmsg | ||
| 3545 | raise maxException | ||
| 3546 | else: | ||
| 3547 | raise ParseException(instring, loc, "no defined alternatives to match", self) | ||
| 3548 | |||
| 3549 | def __ior__(self, other ): | ||
| 3550 | if isinstance( other, basestring ): | ||
| 3551 | other = ParserElement._literalStringClass( other ) | ||
| 3552 | return self.append( other ) #MatchFirst( [ self, other ] ) | ||
| 3553 | |||
| 3554 | def __str__( self ): | ||
| 3555 | if hasattr(self,"name"): | ||
| 3556 | return self.name | ||
| 3557 | |||
| 3558 | if self.strRepr is None: | ||
| 3559 | self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" | ||
| 3560 | |||
| 3561 | return self.strRepr | ||
| 3562 | |||
| 3563 | def checkRecursion( self, parseElementList ): | ||
| 3564 | subRecCheckList = parseElementList[:] + [ self ] | ||
| 3565 | for e in self.exprs: | ||
| 3566 | e.checkRecursion( subRecCheckList ) | ||
| 3567 | |||
| 3568 | |||
| 3569 | class Each(ParseExpression): | ||
| 3570 | """ | ||
| 3571 | Requires all given C{ParseExpression}s to be found, but in any order. | ||
| 3572 | Expressions may be separated by whitespace. | ||
| 3573 | May be constructed using the C{'&'} operator. | ||
| 3574 | |||
| 3575 | Example:: | ||
| 3576 | color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") | ||
| 3577 | shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") | ||
| 3578 | integer = Word(nums) | ||
| 3579 | shape_attr = "shape:" + shape_type("shape") | ||
| 3580 | posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") | ||
| 3581 | color_attr = "color:" + color("color") | ||
| 3582 | size_attr = "size:" + integer("size") | ||
| 3583 | |||
| 3584 | # use Each (using operator '&') to accept attributes in any order | ||
| 3585 | # (shape and posn are required, color and size are optional) | ||
| 3586 | shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) | ||
| 3587 | |||
| 3588 | shape_spec.runTests(''' | ||
| 3589 | shape: SQUARE color: BLACK posn: 100, 120 | ||
| 3590 | shape: CIRCLE size: 50 color: BLUE posn: 50,80 | ||
| 3591 | color:GREEN size:20 shape:TRIANGLE posn:20,40 | ||
| 3592 | ''' | ||
| 3593 | ) | ||
| 3594 | prints:: | ||
| 3595 | shape: SQUARE color: BLACK posn: 100, 120 | ||
| 3596 | ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] | ||
| 3597 | - color: BLACK | ||
| 3598 | - posn: ['100', ',', '120'] | ||
| 3599 | - x: 100 | ||
| 3600 | - y: 120 | ||
| 3601 | - shape: SQUARE | ||
| 3602 | |||
| 3603 | |||
| 3604 | shape: CIRCLE size: 50 color: BLUE posn: 50,80 | ||
| 3605 | ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] | ||
| 3606 | - color: BLUE | ||
| 3607 | - posn: ['50', ',', '80'] | ||
| 3608 | - x: 50 | ||
| 3609 | - y: 80 | ||
| 3610 | - shape: CIRCLE | ||
| 3611 | - size: 50 | ||
| 3612 | |||
| 3613 | |||
| 3614 | color: GREEN size: 20 shape: TRIANGLE posn: 20,40 | ||
| 3615 | ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] | ||
| 3616 | - color: GREEN | ||
| 3617 | - posn: ['20', ',', '40'] | ||
| 3618 | - x: 20 | ||
| 3619 | - y: 40 | ||
| 3620 | - shape: TRIANGLE | ||
| 3621 | - size: 20 | ||
| 3622 | """ | ||
| 3623 | def __init__( self, exprs, savelist = True ): | ||
| 3624 | super(Each,self).__init__(exprs, savelist) | ||
| 3625 | self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) | ||
| 3626 | self.skipWhitespace = True | ||
| 3627 | self.initExprGroups = True | ||
| 3628 | |||
| 3629 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3630 | if self.initExprGroups: | ||
| 3631 | self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) | ||
| 3632 | opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] | ||
| 3633 | opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] | ||
| 3634 | self.optionals = opt1 + opt2 | ||
| 3635 | self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] | ||
| 3636 | self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] | ||
| 3637 | self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] | ||
| 3638 | self.required += self.multirequired | ||
| 3639 | self.initExprGroups = False | ||
| 3640 | tmpLoc = loc | ||
| 3641 | tmpReqd = self.required[:] | ||
| 3642 | tmpOpt = self.optionals[:] | ||
| 3643 | matchOrder = [] | ||
| 3644 | |||
| 3645 | keepMatching = True | ||
| 3646 | while keepMatching: | ||
| 3647 | tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired | ||
| 3648 | failed = [] | ||
| 3649 | for e in tmpExprs: | ||
| 3650 | try: | ||
| 3651 | tmpLoc = e.tryParse( instring, tmpLoc ) | ||
| 3652 | except ParseException: | ||
| 3653 | failed.append(e) | ||
| 3654 | else: | ||
| 3655 | matchOrder.append(self.opt1map.get(id(e),e)) | ||
| 3656 | if e in tmpReqd: | ||
| 3657 | tmpReqd.remove(e) | ||
| 3658 | elif e in tmpOpt: | ||
| 3659 | tmpOpt.remove(e) | ||
| 3660 | if len(failed) == len(tmpExprs): | ||
| 3661 | keepMatching = False | ||
| 3662 | |||
| 3663 | if tmpReqd: | ||
| 3664 | missing = ", ".join(_ustr(e) for e in tmpReqd) | ||
| 3665 | raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) | ||
| 3666 | |||
| 3667 | # add any unmatched Optionals, in case they have default values defined | ||
| 3668 | matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] | ||
| 3669 | |||
| 3670 | resultlist = [] | ||
| 3671 | for e in matchOrder: | ||
| 3672 | loc,results = e._parse(instring,loc,doActions) | ||
| 3673 | resultlist.append(results) | ||
| 3674 | |||
| 3675 | finalResults = sum(resultlist, ParseResults([])) | ||
| 3676 | return loc, finalResults | ||
| 3677 | |||
| 3678 | def __str__( self ): | ||
| 3679 | if hasattr(self,"name"): | ||
| 3680 | return self.name | ||
| 3681 | |||
| 3682 | if self.strRepr is None: | ||
| 3683 | self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" | ||
| 3684 | |||
| 3685 | return self.strRepr | ||
| 3686 | |||
| 3687 | def checkRecursion( self, parseElementList ): | ||
| 3688 | subRecCheckList = parseElementList[:] + [ self ] | ||
| 3689 | for e in self.exprs: | ||
| 3690 | e.checkRecursion( subRecCheckList ) | ||
| 3691 | |||
| 3692 | |||
| 3693 | class ParseElementEnhance(ParserElement): | ||
| 3694 | """ | ||
| 3695 | Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. | ||
| 3696 | """ | ||
| 3697 | def __init__( self, expr, savelist=False ): | ||
| 3698 | super(ParseElementEnhance,self).__init__(savelist) | ||
| 3699 | if isinstance( expr, basestring ): | ||
| 3700 | if issubclass(ParserElement._literalStringClass, Token): | ||
| 3701 | expr = ParserElement._literalStringClass(expr) | ||
| 3702 | else: | ||
| 3703 | expr = ParserElement._literalStringClass(Literal(expr)) | ||
| 3704 | self.expr = expr | ||
| 3705 | self.strRepr = None | ||
| 3706 | if expr is not None: | ||
| 3707 | self.mayIndexError = expr.mayIndexError | ||
| 3708 | self.mayReturnEmpty = expr.mayReturnEmpty | ||
| 3709 | self.setWhitespaceChars( expr.whiteChars ) | ||
| 3710 | self.skipWhitespace = expr.skipWhitespace | ||
| 3711 | self.saveAsList = expr.saveAsList | ||
| 3712 | self.callPreparse = expr.callPreparse | ||
| 3713 | self.ignoreExprs.extend(expr.ignoreExprs) | ||
| 3714 | |||
| 3715 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3716 | if self.expr is not None: | ||
| 3717 | return self.expr._parse( instring, loc, doActions, callPreParse=False ) | ||
| 3718 | else: | ||
| 3719 | raise ParseException("",loc,self.errmsg,self) | ||
| 3720 | |||
| 3721 | def leaveWhitespace( self ): | ||
| 3722 | self.skipWhitespace = False | ||
| 3723 | self.expr = self.expr.copy() | ||
| 3724 | if self.expr is not None: | ||
| 3725 | self.expr.leaveWhitespace() | ||
| 3726 | return self | ||
| 3727 | |||
| 3728 | def ignore( self, other ): | ||
| 3729 | if isinstance( other, Suppress ): | ||
| 3730 | if other not in self.ignoreExprs: | ||
| 3731 | super( ParseElementEnhance, self).ignore( other ) | ||
| 3732 | if self.expr is not None: | ||
| 3733 | self.expr.ignore( self.ignoreExprs[-1] ) | ||
| 3734 | else: | ||
| 3735 | super( ParseElementEnhance, self).ignore( other ) | ||
| 3736 | if self.expr is not None: | ||
| 3737 | self.expr.ignore( self.ignoreExprs[-1] ) | ||
| 3738 | return self | ||
| 3739 | |||
| 3740 | def streamline( self ): | ||
| 3741 | super(ParseElementEnhance,self).streamline() | ||
| 3742 | if self.expr is not None: | ||
| 3743 | self.expr.streamline() | ||
| 3744 | return self | ||
| 3745 | |||
| 3746 | def checkRecursion( self, parseElementList ): | ||
| 3747 | if self in parseElementList: | ||
| 3748 | raise RecursiveGrammarException( parseElementList+[self] ) | ||
| 3749 | subRecCheckList = parseElementList[:] + [ self ] | ||
| 3750 | if self.expr is not None: | ||
| 3751 | self.expr.checkRecursion( subRecCheckList ) | ||
| 3752 | |||
| 3753 | def validate( self, validateTrace=[] ): | ||
| 3754 | tmp = validateTrace[:]+[self] | ||
| 3755 | if self.expr is not None: | ||
| 3756 | self.expr.validate(tmp) | ||
| 3757 | self.checkRecursion( [] ) | ||
| 3758 | |||
| 3759 | def __str__( self ): | ||
| 3760 | try: | ||
| 3761 | return super(ParseElementEnhance,self).__str__() | ||
| 3762 | except Exception: | ||
| 3763 | pass | ||
| 3764 | |||
| 3765 | if self.strRepr is None and self.expr is not None: | ||
| 3766 | self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) | ||
| 3767 | return self.strRepr | ||
| 3768 | |||
| 3769 | |||
| 3770 | class FollowedBy(ParseElementEnhance): | ||
| 3771 | """ | ||
| 3772 | Lookahead matching of the given parse expression. C{FollowedBy} | ||
| 3773 | does I{not} advance the parsing position within the input string, it only | ||
| 3774 | verifies that the specified parse expression matches at the current | ||
| 3775 | position. C{FollowedBy} always returns a null token list. | ||
| 3776 | |||
| 3777 | Example:: | ||
| 3778 | # use FollowedBy to match a label only if it is followed by a ':' | ||
| 3779 | data_word = Word(alphas) | ||
| 3780 | label = data_word + FollowedBy(':') | ||
| 3781 | attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) | ||
| 3782 | |||
| 3783 | OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() | ||
| 3784 | prints:: | ||
| 3785 | [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] | ||
| 3786 | """ | ||
| 3787 | def __init__( self, expr ): | ||
| 3788 | super(FollowedBy,self).__init__(expr) | ||
| 3789 | self.mayReturnEmpty = True | ||
| 3790 | |||
| 3791 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3792 | self.expr.tryParse( instring, loc ) | ||
| 3793 | return loc, [] | ||
| 3794 | |||
| 3795 | |||
| 3796 | class NotAny(ParseElementEnhance): | ||
| 3797 | """ | ||
| 3798 | Lookahead to disallow matching with the given parse expression. C{NotAny} | ||
| 3799 | does I{not} advance the parsing position within the input string, it only | ||
| 3800 | verifies that the specified parse expression does I{not} match at the current | ||
| 3801 | position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} | ||
| 3802 | always returns a null token list. May be constructed using the '~' operator. | ||
| 3803 | |||
| 3804 | Example:: | ||
| 3805 | |||
| 3806 | """ | ||
| 3807 | def __init__( self, expr ): | ||
| 3808 | super(NotAny,self).__init__(expr) | ||
| 3809 | #~ self.leaveWhitespace() | ||
| 3810 | self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs | ||
| 3811 | self.mayReturnEmpty = True | ||
| 3812 | self.errmsg = "Found unwanted token, "+_ustr(self.expr) | ||
| 3813 | |||
| 3814 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3815 | if self.expr.canParseNext(instring, loc): | ||
| 3816 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 3817 | return loc, [] | ||
| 3818 | |||
| 3819 | def __str__( self ): | ||
| 3820 | if hasattr(self,"name"): | ||
| 3821 | return self.name | ||
| 3822 | |||
| 3823 | if self.strRepr is None: | ||
| 3824 | self.strRepr = "~{" + _ustr(self.expr) + "}" | ||
| 3825 | |||
| 3826 | return self.strRepr | ||
| 3827 | |||
| 3828 | class _MultipleMatch(ParseElementEnhance): | ||
| 3829 | def __init__( self, expr, stopOn=None): | ||
| 3830 | super(_MultipleMatch, self).__init__(expr) | ||
| 3831 | self.saveAsList = True | ||
| 3832 | ender = stopOn | ||
| 3833 | if isinstance(ender, basestring): | ||
| 3834 | ender = ParserElement._literalStringClass(ender) | ||
| 3835 | self.not_ender = ~ender if ender is not None else None | ||
| 3836 | |||
| 3837 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3838 | self_expr_parse = self.expr._parse | ||
| 3839 | self_skip_ignorables = self._skipIgnorables | ||
| 3840 | check_ender = self.not_ender is not None | ||
| 3841 | if check_ender: | ||
| 3842 | try_not_ender = self.not_ender.tryParse | ||
| 3843 | |||
| 3844 | # must be at least one (but first see if we are the stopOn sentinel; | ||
| 3845 | # if so, fail) | ||
| 3846 | if check_ender: | ||
| 3847 | try_not_ender(instring, loc) | ||
| 3848 | loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) | ||
| 3849 | try: | ||
| 3850 | hasIgnoreExprs = (not not self.ignoreExprs) | ||
| 3851 | while 1: | ||
| 3852 | if check_ender: | ||
| 3853 | try_not_ender(instring, loc) | ||
| 3854 | if hasIgnoreExprs: | ||
| 3855 | preloc = self_skip_ignorables( instring, loc ) | ||
| 3856 | else: | ||
| 3857 | preloc = loc | ||
| 3858 | loc, tmptokens = self_expr_parse( instring, preloc, doActions ) | ||
| 3859 | if tmptokens or tmptokens.haskeys(): | ||
| 3860 | tokens += tmptokens | ||
| 3861 | except (ParseException,IndexError): | ||
| 3862 | pass | ||
| 3863 | |||
| 3864 | return loc, tokens | ||
| 3865 | |||
| 3866 | class OneOrMore(_MultipleMatch): | ||
| 3867 | """ | ||
| 3868 | Repetition of one or more of the given expression. | ||
| 3869 | |||
| 3870 | Parameters: | ||
| 3871 | - expr - expression that must match one or more times | ||
| 3872 | - stopOn - (default=C{None}) - expression for a terminating sentinel | ||
| 3873 | (only required if the sentinel would ordinarily match the repetition | ||
| 3874 | expression) | ||
| 3875 | |||
| 3876 | Example:: | ||
| 3877 | data_word = Word(alphas) | ||
| 3878 | label = data_word + FollowedBy(':') | ||
| 3879 | attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) | ||
| 3880 | |||
| 3881 | text = "shape: SQUARE posn: upper left color: BLACK" | ||
| 3882 | OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] | ||
| 3883 | |||
| 3884 | # use stopOn attribute for OneOrMore to avoid reading label string as part of the data | ||
| 3885 | attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) | ||
| 3886 | OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] | ||
| 3887 | |||
| 3888 | # could also be written as | ||
| 3889 | (attr_expr * (1,)).parseString(text).pprint() | ||
| 3890 | """ | ||
| 3891 | |||
| 3892 | def __str__( self ): | ||
| 3893 | if hasattr(self,"name"): | ||
| 3894 | return self.name | ||
| 3895 | |||
| 3896 | if self.strRepr is None: | ||
| 3897 | self.strRepr = "{" + _ustr(self.expr) + "}..." | ||
| 3898 | |||
| 3899 | return self.strRepr | ||
| 3900 | |||
| 3901 | class ZeroOrMore(_MultipleMatch): | ||
| 3902 | """ | ||
| 3903 | Optional repetition of zero or more of the given expression. | ||
| 3904 | |||
| 3905 | Parameters: | ||
| 3906 | - expr - expression that must match zero or more times | ||
| 3907 | - stopOn - (default=C{None}) - expression for a terminating sentinel | ||
| 3908 | (only required if the sentinel would ordinarily match the repetition | ||
| 3909 | expression) | ||
| 3910 | |||
| 3911 | Example: similar to L{OneOrMore} | ||
| 3912 | """ | ||
| 3913 | def __init__( self, expr, stopOn=None): | ||
| 3914 | super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) | ||
| 3915 | self.mayReturnEmpty = True | ||
| 3916 | |||
| 3917 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3918 | try: | ||
| 3919 | return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) | ||
| 3920 | except (ParseException,IndexError): | ||
| 3921 | return loc, [] | ||
| 3922 | |||
| 3923 | def __str__( self ): | ||
| 3924 | if hasattr(self,"name"): | ||
| 3925 | return self.name | ||
| 3926 | |||
| 3927 | if self.strRepr is None: | ||
| 3928 | self.strRepr = "[" + _ustr(self.expr) + "]..." | ||
| 3929 | |||
| 3930 | return self.strRepr | ||
| 3931 | |||
| 3932 | class _NullToken(object): | ||
| 3933 | def __bool__(self): | ||
| 3934 | return False | ||
| 3935 | __nonzero__ = __bool__ | ||
| 3936 | def __str__(self): | ||
| 3937 | return "" | ||
| 3938 | |||
| 3939 | _optionalNotMatched = _NullToken() | ||
| 3940 | class Optional(ParseElementEnhance): | ||
| 3941 | """ | ||
| 3942 | Optional matching of the given expression. | ||
| 3943 | |||
| 3944 | Parameters: | ||
| 3945 | - expr - expression that must match zero or more times | ||
| 3946 | - default (optional) - value to be returned if the optional expression is not found. | ||
| 3947 | |||
| 3948 | Example:: | ||
| 3949 | # US postal code can be a 5-digit zip, plus optional 4-digit qualifier | ||
| 3950 | zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) | ||
| 3951 | zip.runTests(''' | ||
| 3952 | # traditional ZIP code | ||
| 3953 | 12345 | ||
| 3954 | |||
| 3955 | # ZIP+4 form | ||
| 3956 | 12101-0001 | ||
| 3957 | |||
| 3958 | # invalid ZIP | ||
| 3959 | 98765- | ||
| 3960 | ''') | ||
| 3961 | prints:: | ||
| 3962 | # traditional ZIP code | ||
| 3963 | 12345 | ||
| 3964 | ['12345'] | ||
| 3965 | |||
| 3966 | # ZIP+4 form | ||
| 3967 | 12101-0001 | ||
| 3968 | ['12101-0001'] | ||
| 3969 | |||
| 3970 | # invalid ZIP | ||
| 3971 | 98765- | ||
| 3972 | ^ | ||
| 3973 | FAIL: Expected end of text (at char 5), (line:1, col:6) | ||
| 3974 | """ | ||
| 3975 | def __init__( self, expr, default=_optionalNotMatched ): | ||
| 3976 | super(Optional,self).__init__( expr, savelist=False ) | ||
| 3977 | self.saveAsList = self.expr.saveAsList | ||
| 3978 | self.defaultValue = default | ||
| 3979 | self.mayReturnEmpty = True | ||
| 3980 | |||
| 3981 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 3982 | try: | ||
| 3983 | loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) | ||
| 3984 | except (ParseException,IndexError): | ||
| 3985 | if self.defaultValue is not _optionalNotMatched: | ||
| 3986 | if self.expr.resultsName: | ||
| 3987 | tokens = ParseResults([ self.defaultValue ]) | ||
| 3988 | tokens[self.expr.resultsName] = self.defaultValue | ||
| 3989 | else: | ||
| 3990 | tokens = [ self.defaultValue ] | ||
| 3991 | else: | ||
| 3992 | tokens = [] | ||
| 3993 | return loc, tokens | ||
| 3994 | |||
| 3995 | def __str__( self ): | ||
| 3996 | if hasattr(self,"name"): | ||
| 3997 | return self.name | ||
| 3998 | |||
| 3999 | if self.strRepr is None: | ||
| 4000 | self.strRepr = "[" + _ustr(self.expr) + "]" | ||
| 4001 | |||
| 4002 | return self.strRepr | ||
| 4003 | |||
| 4004 | class SkipTo(ParseElementEnhance): | ||
| 4005 | """ | ||
| 4006 | Token for skipping over all undefined text until the matched expression is found. | ||
| 4007 | |||
| 4008 | Parameters: | ||
| 4009 | - expr - target expression marking the end of the data to be skipped | ||
| 4010 | - include - (default=C{False}) if True, the target expression is also parsed | ||
| 4011 | (the skipped text and target expression are returned as a 2-element list). | ||
| 4012 | - ignore - (default=C{None}) used to define grammars (typically quoted strings and | ||
| 4013 | comments) that might contain false matches to the target expression | ||
| 4014 | - failOn - (default=C{None}) define expressions that are not allowed to be | ||
| 4015 | included in the skipped test; if found before the target expression is found, | ||
| 4016 | the SkipTo is not a match | ||
| 4017 | |||
| 4018 | Example:: | ||
| 4019 | report = ''' | ||
| 4020 | Outstanding Issues Report - 1 Jan 2000 | ||
| 4021 | |||
| 4022 | # | Severity | Description | Days Open | ||
| 4023 | -----+----------+-------------------------------------------+----------- | ||
| 4024 | 101 | Critical | Intermittent system crash | 6 | ||
| 4025 | 94 | Cosmetic | Spelling error on Login ('log|n') | 14 | ||
| 4026 | 79 | Minor | System slow when running too many reports | 47 | ||
| 4027 | ''' | ||
| 4028 | integer = Word(nums) | ||
| 4029 | SEP = Suppress('|') | ||
| 4030 | # use SkipTo to simply match everything up until the next SEP | ||
| 4031 | # - ignore quoted strings, so that a '|' character inside a quoted string does not match | ||
| 4032 | # - parse action will call token.strip() for each matched token, i.e., the description body | ||
| 4033 | string_data = SkipTo(SEP, ignore=quotedString) | ||
| 4034 | string_data.setParseAction(tokenMap(str.strip)) | ||
| 4035 | ticket_expr = (integer("issue_num") + SEP | ||
| 4036 | + string_data("sev") + SEP | ||
| 4037 | + string_data("desc") + SEP | ||
| 4038 | + integer("days_open")) | ||
| 4039 | |||
| 4040 | for tkt in ticket_expr.searchString(report): | ||
| 4041 | print tkt.dump() | ||
| 4042 | prints:: | ||
| 4043 | ['101', 'Critical', 'Intermittent system crash', '6'] | ||
| 4044 | - days_open: 6 | ||
| 4045 | - desc: Intermittent system crash | ||
| 4046 | - issue_num: 101 | ||
| 4047 | - sev: Critical | ||
| 4048 | ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] | ||
| 4049 | - days_open: 14 | ||
| 4050 | - desc: Spelling error on Login ('log|n') | ||
| 4051 | - issue_num: 94 | ||
| 4052 | - sev: Cosmetic | ||
| 4053 | ['79', 'Minor', 'System slow when running too many reports', '47'] | ||
| 4054 | - days_open: 47 | ||
| 4055 | - desc: System slow when running too many reports | ||
| 4056 | - issue_num: 79 | ||
| 4057 | - sev: Minor | ||
| 4058 | """ | ||
| 4059 | def __init__( self, other, include=False, ignore=None, failOn=None ): | ||
| 4060 | super( SkipTo, self ).__init__( other ) | ||
| 4061 | self.ignoreExpr = ignore | ||
| 4062 | self.mayReturnEmpty = True | ||
| 4063 | self.mayIndexError = False | ||
| 4064 | self.includeMatch = include | ||
| 4065 | self.asList = False | ||
| 4066 | if isinstance(failOn, basestring): | ||
| 4067 | self.failOn = ParserElement._literalStringClass(failOn) | ||
| 4068 | else: | ||
| 4069 | self.failOn = failOn | ||
| 4070 | self.errmsg = "No match found for "+_ustr(self.expr) | ||
| 4071 | |||
| 4072 | def parseImpl( self, instring, loc, doActions=True ): | ||
| 4073 | startloc = loc | ||
| 4074 | instrlen = len(instring) | ||
| 4075 | expr = self.expr | ||
| 4076 | expr_parse = self.expr._parse | ||
| 4077 | self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None | ||
| 4078 | self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None | ||
| 4079 | |||
| 4080 | tmploc = loc | ||
| 4081 | while tmploc <= instrlen: | ||
| 4082 | if self_failOn_canParseNext is not None: | ||
| 4083 | # break if failOn expression matches | ||
| 4084 | if self_failOn_canParseNext(instring, tmploc): | ||
| 4085 | break | ||
| 4086 | |||
| 4087 | if self_ignoreExpr_tryParse is not None: | ||
| 4088 | # advance past ignore expressions | ||
| 4089 | while 1: | ||
| 4090 | try: | ||
| 4091 | tmploc = self_ignoreExpr_tryParse(instring, tmploc) | ||
| 4092 | except ParseBaseException: | ||
| 4093 | break | ||
| 4094 | |||
| 4095 | try: | ||
| 4096 | expr_parse(instring, tmploc, doActions=False, callPreParse=False) | ||
| 4097 | except (ParseException, IndexError): | ||
| 4098 | # no match, advance loc in string | ||
| 4099 | tmploc += 1 | ||
| 4100 | else: | ||
| 4101 | # matched skipto expr, done | ||
| 4102 | break | ||
| 4103 | |||
| 4104 | else: | ||
| 4105 | # ran off the end of the input string without matching skipto expr, fail | ||
| 4106 | raise ParseException(instring, loc, self.errmsg, self) | ||
| 4107 | |||
| 4108 | # build up return values | ||
| 4109 | loc = tmploc | ||
| 4110 | skiptext = instring[startloc:loc] | ||
| 4111 | skipresult = ParseResults(skiptext) | ||
| 4112 | |||
| 4113 | if self.includeMatch: | ||
| 4114 | loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) | ||
| 4115 | skipresult += mat | ||
| 4116 | |||
| 4117 | return loc, skipresult | ||
| 4118 | |||
| 4119 | class Forward(ParseElementEnhance): | ||
| 4120 | """ | ||
| 4121 | Forward declaration of an expression to be defined later - | ||
| 4122 | used for recursive grammars, such as algebraic infix notation. | ||
| 4123 | When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. | ||
| 4124 | |||
| 4125 | Note: take care when assigning to C{Forward} not to overlook precedence of operators. | ||
| 4126 | Specifically, '|' has a lower precedence than '<<', so that:: | ||
| 4127 | fwdExpr << a | b | c | ||
| 4128 | will actually be evaluated as:: | ||
| 4129 | (fwdExpr << a) | b | c | ||
| 4130 | thereby leaving b and c out as parseable alternatives. It is recommended that you | ||
| 4131 | explicitly group the values inserted into the C{Forward}:: | ||
| 4132 | fwdExpr << (a | b | c) | ||
| 4133 | Converting to use the '<<=' operator instead will avoid this problem. | ||
| 4134 | |||
| 4135 | See L{ParseResults.pprint} for an example of a recursive parser created using | ||
| 4136 | C{Forward}. | ||
| 4137 | """ | ||
| 4138 | def __init__( self, other=None ): | ||
| 4139 | super(Forward,self).__init__( other, savelist=False ) | ||
| 4140 | |||
| 4141 | def __lshift__( self, other ): | ||
| 4142 | if isinstance( other, basestring ): | ||
| 4143 | other = ParserElement._literalStringClass(other) | ||
| 4144 | self.expr = other | ||
| 4145 | self.strRepr = None | ||
| 4146 | self.mayIndexError = self.expr.mayIndexError | ||
| 4147 | self.mayReturnEmpty = self.expr.mayReturnEmpty | ||
| 4148 | self.setWhitespaceChars( self.expr.whiteChars ) | ||
| 4149 | self.skipWhitespace = self.expr.skipWhitespace | ||
| 4150 | self.saveAsList = self.expr.saveAsList | ||
| 4151 | self.ignoreExprs.extend(self.expr.ignoreExprs) | ||
| 4152 | return self | ||
| 4153 | |||
| 4154 | def __ilshift__(self, other): | ||
| 4155 | return self << other | ||
| 4156 | |||
| 4157 | def leaveWhitespace( self ): | ||
| 4158 | self.skipWhitespace = False | ||
| 4159 | return self | ||
| 4160 | |||
| 4161 | def streamline( self ): | ||
| 4162 | if not self.streamlined: | ||
| 4163 | self.streamlined = True | ||
| 4164 | if self.expr is not None: | ||
| 4165 | self.expr.streamline() | ||
| 4166 | return self | ||
| 4167 | |||
| 4168 | def validate( self, validateTrace=[] ): | ||
| 4169 | if self not in validateTrace: | ||
| 4170 | tmp = validateTrace[:]+[self] | ||
| 4171 | if self.expr is not None: | ||
| 4172 | self.expr.validate(tmp) | ||
| 4173 | self.checkRecursion([]) | ||
| 4174 | |||
| 4175 | def __str__( self ): | ||
| 4176 | if hasattr(self,"name"): | ||
| 4177 | return self.name | ||
| 4178 | return self.__class__.__name__ + ": ..." | ||
| 4179 | |||
| 4180 | # stubbed out for now - creates awful memory and perf issues | ||
| 4181 | self._revertClass = self.__class__ | ||
| 4182 | self.__class__ = _ForwardNoRecurse | ||
| 4183 | try: | ||
| 4184 | if self.expr is not None: | ||
| 4185 | retString = _ustr(self.expr) | ||
| 4186 | else: | ||
| 4187 | retString = "None" | ||
| 4188 | finally: | ||
| 4189 | self.__class__ = self._revertClass | ||
| 4190 | return self.__class__.__name__ + ": " + retString | ||
| 4191 | |||
| 4192 | def copy(self): | ||
| 4193 | if self.expr is not None: | ||
| 4194 | return super(Forward,self).copy() | ||
| 4195 | else: | ||
| 4196 | ret = Forward() | ||
| 4197 | ret <<= self | ||
| 4198 | return ret | ||
| 4199 | |||
| 4200 | class _ForwardNoRecurse(Forward): | ||
| 4201 | def __str__( self ): | ||
| 4202 | return "..." | ||
| 4203 | |||
| 4204 | class TokenConverter(ParseElementEnhance): | ||
| 4205 | """ | ||
| 4206 | Abstract subclass of C{ParseExpression}, for converting parsed results. | ||
| 4207 | """ | ||
| 4208 | def __init__( self, expr, savelist=False ): | ||
| 4209 | super(TokenConverter,self).__init__( expr )#, savelist ) | ||
| 4210 | self.saveAsList = False | ||
| 4211 | |||
| 4212 | class Combine(TokenConverter): | ||
| 4213 | """ | ||
| 4214 | Converter to concatenate all matching tokens to a single string. | ||
| 4215 | By default, the matching patterns must also be contiguous in the input string; | ||
| 4216 | this can be disabled by specifying C{'adjacent=False'} in the constructor. | ||
| 4217 | |||
| 4218 | Example:: | ||
| 4219 | real = Word(nums) + '.' + Word(nums) | ||
| 4220 | print(real.parseString('3.1416')) # -> ['3', '.', '1416'] | ||
| 4221 | # will also erroneously match the following | ||
| 4222 | print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] | ||
| 4223 | |||
| 4224 | real = Combine(Word(nums) + '.' + Word(nums)) | ||
| 4225 | print(real.parseString('3.1416')) # -> ['3.1416'] | ||
| 4226 | # no match when there are internal spaces | ||
| 4227 | print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) | ||
| 4228 | """ | ||
| 4229 | def __init__( self, expr, joinString="", adjacent=True ): | ||
| 4230 | super(Combine,self).__init__( expr ) | ||
| 4231 | # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself | ||
| 4232 | if adjacent: | ||
| 4233 | self.leaveWhitespace() | ||
| 4234 | self.adjacent = adjacent | ||
| 4235 | self.skipWhitespace = True | ||
| 4236 | self.joinString = joinString | ||
| 4237 | self.callPreparse = True | ||
| 4238 | |||
| 4239 | def ignore( self, other ): | ||
| 4240 | if self.adjacent: | ||
| 4241 | ParserElement.ignore(self, other) | ||
| 4242 | else: | ||
| 4243 | super( Combine, self).ignore( other ) | ||
| 4244 | return self | ||
| 4245 | |||
| 4246 | def postParse( self, instring, loc, tokenlist ): | ||
| 4247 | retToks = tokenlist.copy() | ||
| 4248 | del retToks[:] | ||
| 4249 | retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) | ||
| 4250 | |||
| 4251 | if self.resultsName and retToks.haskeys(): | ||
| 4252 | return [ retToks ] | ||
| 4253 | else: | ||
| 4254 | return retToks | ||
| 4255 | |||
| 4256 | class Group(TokenConverter): | ||
| 4257 | """ | ||
| 4258 | Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. | ||
| 4259 | |||
| 4260 | Example:: | ||
| 4261 | ident = Word(alphas) | ||
| 4262 | num = Word(nums) | ||
| 4263 | term = ident | num | ||
| 4264 | func = ident + Optional(delimitedList(term)) | ||
| 4265 | print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] | ||
| 4266 | |||
| 4267 | func = ident + Group(Optional(delimitedList(term))) | ||
| 4268 | print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] | ||
| 4269 | """ | ||
| 4270 | def __init__( self, expr ): | ||
| 4271 | super(Group,self).__init__( expr ) | ||
| 4272 | self.saveAsList = True | ||
| 4273 | |||
| 4274 | def postParse( self, instring, loc, tokenlist ): | ||
| 4275 | return [ tokenlist ] | ||
| 4276 | |||
| 4277 | class Dict(TokenConverter): | ||
| 4278 | """ | ||
| 4279 | Converter to return a repetitive expression as a list, but also as a dictionary. | ||
| 4280 | Each element can also be referenced using the first token in the expression as its key. | ||
| 4281 | Useful for tabular report scraping when the first column can be used as a item key. | ||
| 4282 | |||
| 4283 | Example:: | ||
| 4284 | data_word = Word(alphas) | ||
| 4285 | label = data_word + FollowedBy(':') | ||
| 4286 | attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) | ||
| 4287 | |||
| 4288 | text = "shape: SQUARE posn: upper left color: light blue texture: burlap" | ||
| 4289 | attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) | ||
| 4290 | |||
| 4291 | # print attributes as plain groups | ||
| 4292 | print(OneOrMore(attr_expr).parseString(text).dump()) | ||
| 4293 | |||
| 4294 | # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names | ||
| 4295 | result = Dict(OneOrMore(Group(attr_expr))).parseString(text) | ||
| 4296 | print(result.dump()) | ||
| 4297 | |||
| 4298 | # access named fields as dict entries, or output as dict | ||
| 4299 | print(result['shape']) | ||
| 4300 | print(result.asDict()) | ||
| 4301 | prints:: | ||
| 4302 | ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] | ||
| 4303 | |||
| 4304 | [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] | ||
| 4305 | - color: light blue | ||
| 4306 | - posn: upper left | ||
| 4307 | - shape: SQUARE | ||
| 4308 | - texture: burlap | ||
| 4309 | SQUARE | ||
| 4310 | {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} | ||
| 4311 | See more examples at L{ParseResults} of accessing fields by results name. | ||
| 4312 | """ | ||
| 4313 | def __init__( self, expr ): | ||
| 4314 | super(Dict,self).__init__( expr ) | ||
| 4315 | self.saveAsList = True | ||
| 4316 | |||
| 4317 | def postParse( self, instring, loc, tokenlist ): | ||
| 4318 | for i,tok in enumerate(tokenlist): | ||
| 4319 | if len(tok) == 0: | ||
| 4320 | continue | ||
| 4321 | ikey = tok[0] | ||
| 4322 | if isinstance(ikey,int): | ||
| 4323 | ikey = _ustr(tok[0]).strip() | ||
| 4324 | if len(tok)==1: | ||
| 4325 | tokenlist[ikey] = _ParseResultsWithOffset("",i) | ||
| 4326 | elif len(tok)==2 and not isinstance(tok[1],ParseResults): | ||
| 4327 | tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) | ||
| 4328 | else: | ||
| 4329 | dictvalue = tok.copy() #ParseResults(i) | ||
| 4330 | del dictvalue[0] | ||
| 4331 | if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): | ||
| 4332 | tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) | ||
| 4333 | else: | ||
| 4334 | tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) | ||
| 4335 | |||
| 4336 | if self.resultsName: | ||
| 4337 | return [ tokenlist ] | ||
| 4338 | else: | ||
| 4339 | return tokenlist | ||
| 4340 | |||
| 4341 | |||
| 4342 | class Suppress(TokenConverter): | ||
| 4343 | """ | ||
| 4344 | Converter for ignoring the results of a parsed expression. | ||
| 4345 | |||
| 4346 | Example:: | ||
| 4347 | source = "a, b, c,d" | ||
| 4348 | wd = Word(alphas) | ||
| 4349 | wd_list1 = wd + ZeroOrMore(',' + wd) | ||
| 4350 | print(wd_list1.parseString(source)) | ||
| 4351 | |||
| 4352 | # often, delimiters that are useful during parsing are just in the | ||
| 4353 | # way afterward - use Suppress to keep them out of the parsed output | ||
| 4354 | wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) | ||
| 4355 | print(wd_list2.parseString(source)) | ||
| 4356 | prints:: | ||
| 4357 | ['a', ',', 'b', ',', 'c', ',', 'd'] | ||
| 4358 | ['a', 'b', 'c', 'd'] | ||
| 4359 | (See also L{delimitedList}.) | ||
| 4360 | """ | ||
| 4361 | def postParse( self, instring, loc, tokenlist ): | ||
| 4362 | return [] | ||
| 4363 | |||
| 4364 | def suppress( self ): | ||
| 4365 | return self | ||
| 4366 | |||
| 4367 | |||
| 4368 | class OnlyOnce(object): | ||
| 4369 | """ | ||
| 4370 | Wrapper for parse actions, to ensure they are only called once. | ||
| 4371 | """ | ||
| 4372 | def __init__(self, methodCall): | ||
| 4373 | self.callable = _trim_arity(methodCall) | ||
| 4374 | self.called = False | ||
| 4375 | def __call__(self,s,l,t): | ||
| 4376 | if not self.called: | ||
| 4377 | results = self.callable(s,l,t) | ||
| 4378 | self.called = True | ||
| 4379 | return results | ||
| 4380 | raise ParseException(s,l,"") | ||
| 4381 | def reset(self): | ||
| 4382 | self.called = False | ||
| 4383 | |||
| 4384 | def traceParseAction(f): | ||
| 4385 | """ | ||
| 4386 | Decorator for debugging parse actions. | ||
| 4387 | |||
| 4388 | When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} | ||
| 4389 | When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. | ||
| 4390 | |||
| 4391 | Example:: | ||
| 4392 | wd = Word(alphas) | ||
| 4393 | |||
| 4394 | @traceParseAction | ||
| 4395 | def remove_duplicate_chars(tokens): | ||
| 4396 | return ''.join(sorted(set(''.join(tokens))) | ||
| 4397 | |||
| 4398 | wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) | ||
| 4399 | print(wds.parseString("slkdjs sld sldd sdlf sdljf")) | ||
| 4400 | prints:: | ||
| 4401 | >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) | ||
| 4402 | <<leaving remove_duplicate_chars (ret: 'dfjkls') | ||
| 4403 | ['dfjkls'] | ||
| 4404 | """ | ||
| 4405 | f = _trim_arity(f) | ||
| 4406 | def z(*paArgs): | ||
| 4407 | thisFunc = f.__name__ | ||
| 4408 | s,l,t = paArgs[-3:] | ||
| 4409 | if len(paArgs)>3: | ||
| 4410 | thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc | ||
| 4411 | sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) | ||
| 4412 | try: | ||
| 4413 | ret = f(*paArgs) | ||
| 4414 | except Exception as exc: | ||
| 4415 | sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) | ||
| 4416 | raise | ||
| 4417 | sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) | ||
| 4418 | return ret | ||
| 4419 | try: | ||
| 4420 | z.__name__ = f.__name__ | ||
| 4421 | except AttributeError: | ||
| 4422 | pass | ||
| 4423 | return z | ||
| 4424 | |||
| 4425 | # | ||
| 4426 | # global helpers | ||
| 4427 | # | ||
| 4428 | def delimitedList( expr, delim=",", combine=False ): | ||
| 4429 | """ | ||
| 4430 | Helper to define a delimited list of expressions - the delimiter defaults to ','. | ||
| 4431 | By default, the list elements and delimiters can have intervening whitespace, and | ||
| 4432 | comments, but this can be overridden by passing C{combine=True} in the constructor. | ||
| 4433 | If C{combine} is set to C{True}, the matching tokens are returned as a single token | ||
| 4434 | string, with the delimiters included; otherwise, the matching tokens are returned | ||
| 4435 | as a list of tokens, with the delimiters suppressed. | ||
| 4436 | |||
| 4437 | Example:: | ||
| 4438 | delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] | ||
| 4439 | delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] | ||
| 4440 | """ | ||
| 4441 | dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." | ||
| 4442 | if combine: | ||
| 4443 | return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) | ||
| 4444 | else: | ||
| 4445 | return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) | ||
| 4446 | |||
| 4447 | def countedArray( expr, intExpr=None ): | ||
| 4448 | """ | ||
| 4449 | Helper to define a counted list of expressions. | ||
| 4450 | This helper defines a pattern of the form:: | ||
| 4451 | integer expr expr expr... | ||
| 4452 | where the leading integer tells how many expr expressions follow. | ||
| 4453 | The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. | ||
| 4454 | |||
| 4455 | If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. | ||
| 4456 | |||
| 4457 | Example:: | ||
| 4458 | countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] | ||
| 4459 | |||
| 4460 | # in this parser, the leading integer value is given in binary, | ||
| 4461 | # '10' indicating that 2 values are in the array | ||
| 4462 | binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) | ||
| 4463 | countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] | ||
| 4464 | """ | ||
| 4465 | arrayExpr = Forward() | ||
| 4466 | def countFieldParseAction(s,l,t): | ||
| 4467 | n = t[0] | ||
| 4468 | arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) | ||
| 4469 | return [] | ||
| 4470 | if intExpr is None: | ||
| 4471 | intExpr = Word(nums).setParseAction(lambda t:int(t[0])) | ||
| 4472 | else: | ||
| 4473 | intExpr = intExpr.copy() | ||
| 4474 | intExpr.setName("arrayLen") | ||
| 4475 | intExpr.addParseAction(countFieldParseAction, callDuringTry=True) | ||
| 4476 | return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') | ||
| 4477 | |||
| 4478 | def _flatten(L): | ||
| 4479 | ret = [] | ||
| 4480 | for i in L: | ||
| 4481 | if isinstance(i,list): | ||
| 4482 | ret.extend(_flatten(i)) | ||
| 4483 | else: | ||
| 4484 | ret.append(i) | ||
| 4485 | return ret | ||
| 4486 | |||
| 4487 | def matchPreviousLiteral(expr): | ||
| 4488 | """ | ||
| 4489 | Helper to define an expression that is indirectly defined from | ||
| 4490 | the tokens matched in a previous expression, that is, it looks | ||
| 4491 | for a 'repeat' of a previous expression. For example:: | ||
| 4492 | first = Word(nums) | ||
| 4493 | second = matchPreviousLiteral(first) | ||
| 4494 | matchExpr = first + ":" + second | ||
| 4495 | will match C{"1:1"}, but not C{"1:2"}. Because this matches a | ||
| 4496 | previous literal, will also match the leading C{"1:1"} in C{"1:10"}. | ||
| 4497 | If this is not desired, use C{matchPreviousExpr}. | ||
| 4498 | Do I{not} use with packrat parsing enabled. | ||
| 4499 | """ | ||
| 4500 | rep = Forward() | ||
| 4501 | def copyTokenToRepeater(s,l,t): | ||
| 4502 | if t: | ||
| 4503 | if len(t) == 1: | ||
| 4504 | rep << t[0] | ||
| 4505 | else: | ||
| 4506 | # flatten t tokens | ||
| 4507 | tflat = _flatten(t.asList()) | ||
| 4508 | rep << And(Literal(tt) for tt in tflat) | ||
| 4509 | else: | ||
| 4510 | rep << Empty() | ||
| 4511 | expr.addParseAction(copyTokenToRepeater, callDuringTry=True) | ||
| 4512 | rep.setName('(prev) ' + _ustr(expr)) | ||
| 4513 | return rep | ||
| 4514 | |||
| 4515 | def matchPreviousExpr(expr): | ||
| 4516 | """ | ||
| 4517 | Helper to define an expression that is indirectly defined from | ||
| 4518 | the tokens matched in a previous expression, that is, it looks | ||
| 4519 | for a 'repeat' of a previous expression. For example:: | ||
| 4520 | first = Word(nums) | ||
| 4521 | second = matchPreviousExpr(first) | ||
| 4522 | matchExpr = first + ":" + second | ||
| 4523 | will match C{"1:1"}, but not C{"1:2"}. Because this matches by | ||
| 4524 | expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; | ||
| 4525 | the expressions are evaluated first, and then compared, so | ||
| 4526 | C{"1"} is compared with C{"10"}. | ||
| 4527 | Do I{not} use with packrat parsing enabled. | ||
| 4528 | """ | ||
| 4529 | rep = Forward() | ||
| 4530 | e2 = expr.copy() | ||
| 4531 | rep <<= e2 | ||
| 4532 | def copyTokenToRepeater(s,l,t): | ||
| 4533 | matchTokens = _flatten(t.asList()) | ||
| 4534 | def mustMatchTheseTokens(s,l,t): | ||
| 4535 | theseTokens = _flatten(t.asList()) | ||
| 4536 | if theseTokens != matchTokens: | ||
| 4537 | raise ParseException("",0,"") | ||
| 4538 | rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) | ||
| 4539 | expr.addParseAction(copyTokenToRepeater, callDuringTry=True) | ||
| 4540 | rep.setName('(prev) ' + _ustr(expr)) | ||
| 4541 | return rep | ||
| 4542 | |||
| 4543 | def _escapeRegexRangeChars(s): | ||
| 4544 | #~ escape these chars: ^-] | ||
| 4545 | for c in r"\^-]": | ||
| 4546 | s = s.replace(c,_bslash+c) | ||
| 4547 | s = s.replace("\n",r"\n") | ||
| 4548 | s = s.replace("\t",r"\t") | ||
| 4549 | return _ustr(s) | ||
| 4550 | |||
| 4551 | def oneOf( strs, caseless=False, useRegex=True ): | ||
| 4552 | """ | ||
| 4553 | Helper to quickly define a set of alternative Literals, and makes sure to do | ||
| 4554 | longest-first testing when there is a conflict, regardless of the input order, | ||
| 4555 | but returns a C{L{MatchFirst}} for best performance. | ||
| 4556 | |||
| 4557 | Parameters: | ||
| 4558 | - strs - a string of space-delimited literals, or a collection of string literals | ||
| 4559 | - caseless - (default=C{False}) - treat all literals as caseless | ||
| 4560 | - useRegex - (default=C{True}) - as an optimization, will generate a Regex | ||
| 4561 | object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or | ||
| 4562 | if creating a C{Regex} raises an exception) | ||
| 4563 | |||
| 4564 | Example:: | ||
| 4565 | comp_oper = oneOf("< = > <= >= !=") | ||
| 4566 | var = Word(alphas) | ||
| 4567 | number = Word(nums) | ||
| 4568 | term = var | number | ||
| 4569 | comparison_expr = term + comp_oper + term | ||
| 4570 | print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) | ||
| 4571 | prints:: | ||
| 4572 | [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] | ||
| 4573 | """ | ||
| 4574 | if caseless: | ||
| 4575 | isequal = ( lambda a,b: a.upper() == b.upper() ) | ||
| 4576 | masks = ( lambda a,b: b.upper().startswith(a.upper()) ) | ||
| 4577 | parseElementClass = CaselessLiteral | ||
| 4578 | else: | ||
| 4579 | isequal = ( lambda a,b: a == b ) | ||
| 4580 | masks = ( lambda a,b: b.startswith(a) ) | ||
| 4581 | parseElementClass = Literal | ||
| 4582 | |||
| 4583 | symbols = [] | ||
| 4584 | if isinstance(strs,basestring): | ||
| 4585 | symbols = strs.split() | ||
| 4586 | elif isinstance(strs, collections.Iterable): | ||
| 4587 | symbols = list(strs) | ||
| 4588 | else: | ||
| 4589 | warnings.warn("Invalid argument to oneOf, expected string or iterable", | ||
| 4590 | SyntaxWarning, stacklevel=2) | ||
| 4591 | if not symbols: | ||
| 4592 | return NoMatch() | ||
| 4593 | |||
| 4594 | i = 0 | ||
| 4595 | while i < len(symbols)-1: | ||
| 4596 | cur = symbols[i] | ||
| 4597 | for j,other in enumerate(symbols[i+1:]): | ||
| 4598 | if ( isequal(other, cur) ): | ||
| 4599 | del symbols[i+j+1] | ||
| 4600 | break | ||
| 4601 | elif ( masks(cur, other) ): | ||
| 4602 | del symbols[i+j+1] | ||
| 4603 | symbols.insert(i,other) | ||
| 4604 | cur = other | ||
| 4605 | break | ||
| 4606 | else: | ||
| 4607 | i += 1 | ||
| 4608 | |||
| 4609 | if not caseless and useRegex: | ||
| 4610 | #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) | ||
| 4611 | try: | ||
| 4612 | if len(symbols)==len("".join(symbols)): | ||
| 4613 | return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) | ||
| 4614 | else: | ||
| 4615 | return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) | ||
| 4616 | except Exception: | ||
| 4617 | warnings.warn("Exception creating Regex for oneOf, building MatchFirst", | ||
| 4618 | SyntaxWarning, stacklevel=2) | ||
| 4619 | |||
| 4620 | |||
| 4621 | # last resort, just use MatchFirst | ||
| 4622 | return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) | ||
| 4623 | |||
| 4624 | def dictOf( key, value ): | ||
| 4625 | """ | ||
| 4626 | Helper to easily and clearly define a dictionary by specifying the respective patterns | ||
| 4627 | for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens | ||
| 4628 | in the proper order. The key pattern can include delimiting markers or punctuation, | ||
| 4629 | as long as they are suppressed, thereby leaving the significant key text. The value | ||
| 4630 | pattern can include named results, so that the C{Dict} results can include named token | ||
| 4631 | fields. | ||
| 4632 | |||
| 4633 | Example:: | ||
| 4634 | text = "shape: SQUARE posn: upper left color: light blue texture: burlap" | ||
| 4635 | attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) | ||
| 4636 | print(OneOrMore(attr_expr).parseString(text).dump()) | ||
| 4637 | |||
| 4638 | attr_label = label | ||
| 4639 | attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) | ||
| 4640 | |||
| 4641 | # similar to Dict, but simpler call format | ||
| 4642 | result = dictOf(attr_label, attr_value).parseString(text) | ||
| 4643 | print(result.dump()) | ||
| 4644 | print(result['shape']) | ||
| 4645 | print(result.shape) # object attribute access works too | ||
| 4646 | print(result.asDict()) | ||
| 4647 | prints:: | ||
| 4648 | [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] | ||
| 4649 | - color: light blue | ||
| 4650 | - posn: upper left | ||
| 4651 | - shape: SQUARE | ||
| 4652 | - texture: burlap | ||
| 4653 | SQUARE | ||
| 4654 | SQUARE | ||
| 4655 | {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} | ||
| 4656 | """ | ||
| 4657 | return Dict( ZeroOrMore( Group ( key + value ) ) ) | ||
| 4658 | |||
| 4659 | def originalTextFor(expr, asString=True): | ||
| 4660 | """ | ||
| 4661 | Helper to return the original, untokenized text for a given expression. Useful to | ||
| 4662 | restore the parsed fields of an HTML start tag into the raw tag text itself, or to | ||
| 4663 | revert separate tokens with intervening whitespace back to the original matching | ||
| 4664 | input text. By default, returns astring containing the original parsed text. | ||
| 4665 | |||
| 4666 | If the optional C{asString} argument is passed as C{False}, then the return value is a | ||
| 4667 | C{L{ParseResults}} containing any results names that were originally matched, and a | ||
| 4668 | single token containing the original matched text from the input string. So if | ||
| 4669 | the expression passed to C{L{originalTextFor}} contains expressions with defined | ||
| 4670 | results names, you must set C{asString} to C{False} if you want to preserve those | ||
| 4671 | results name values. | ||
| 4672 | |||
| 4673 | Example:: | ||
| 4674 | src = "this is test <b> bold <i>text</i> </b> normal text " | ||
| 4675 | for tag in ("b","i"): | ||
| 4676 | opener,closer = makeHTMLTags(tag) | ||
| 4677 | patt = originalTextFor(opener + SkipTo(closer) + closer) | ||
| 4678 | print(patt.searchString(src)[0]) | ||
| 4679 | prints:: | ||
| 4680 | ['<b> bold <i>text</i> </b>'] | ||
| 4681 | ['<i>text</i>'] | ||
| 4682 | """ | ||
| 4683 | locMarker = Empty().setParseAction(lambda s,loc,t: loc) | ||
| 4684 | endlocMarker = locMarker.copy() | ||
| 4685 | endlocMarker.callPreparse = False | ||
| 4686 | matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") | ||
| 4687 | if asString: | ||
| 4688 | extractText = lambda s,l,t: s[t._original_start:t._original_end] | ||
| 4689 | else: | ||
| 4690 | def extractText(s,l,t): | ||
| 4691 | t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] | ||
| 4692 | matchExpr.setParseAction(extractText) | ||
| 4693 | matchExpr.ignoreExprs = expr.ignoreExprs | ||
| 4694 | return matchExpr | ||
| 4695 | |||
| 4696 | def ungroup(expr): | ||
| 4697 | """ | ||
| 4698 | Helper to undo pyparsing's default grouping of And expressions, even | ||
| 4699 | if all but one are non-empty. | ||
| 4700 | """ | ||
| 4701 | return TokenConverter(expr).setParseAction(lambda t:t[0]) | ||
| 4702 | |||
| 4703 | def locatedExpr(expr): | ||
| 4704 | """ | ||
| 4705 | Helper to decorate a returned token with its starting and ending locations in the input string. | ||
| 4706 | This helper adds the following results names: | ||
| 4707 | - locn_start = location where matched expression begins | ||
| 4708 | - locn_end = location where matched expression ends | ||
| 4709 | - value = the actual parsed results | ||
| 4710 | |||
| 4711 | Be careful if the input text contains C{<TAB>} characters, you may want to call | ||
| 4712 | C{L{ParserElement.parseWithTabs}} | ||
| 4713 | |||
| 4714 | Example:: | ||
| 4715 | wd = Word(alphas) | ||
| 4716 | for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): | ||
| 4717 | print(match) | ||
| 4718 | prints:: | ||
| 4719 | [[0, 'ljsdf', 5]] | ||
| 4720 | [[8, 'lksdjjf', 15]] | ||
| 4721 | [[18, 'lkkjj', 23]] | ||
| 4722 | """ | ||
| 4723 | locator = Empty().setParseAction(lambda s,l,t: l) | ||
| 4724 | return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) | ||
| 4725 | |||
| 4726 | |||
| 4727 | # convenience constants for positional expressions | ||
| 4728 | empty = Empty().setName("empty") | ||
| 4729 | lineStart = LineStart().setName("lineStart") | ||
| 4730 | lineEnd = LineEnd().setName("lineEnd") | ||
| 4731 | stringStart = StringStart().setName("stringStart") | ||
| 4732 | stringEnd = StringEnd().setName("stringEnd") | ||
| 4733 | |||
| 4734 | _escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) | ||
| 4735 | _escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) | ||
| 4736 | _escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) | ||
| 4737 | _singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) | ||
| 4738 | _charRange = Group(_singleChar + Suppress("-") + _singleChar) | ||
| 4739 | _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" | ||
| 4740 | |||
| 4741 | def srange(s): | ||
| 4742 | r""" | ||
| 4743 | Helper to easily define string ranges for use in Word construction. Borrows | ||
| 4744 | syntax from regexp '[]' string range definitions:: | ||
| 4745 | srange("[0-9]") -> "0123456789" | ||
| 4746 | srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" | ||
| 4747 | srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" | ||
| 4748 | The input string must be enclosed in []'s, and the returned string is the expanded | ||
| 4749 | character set joined into a single string. | ||
| 4750 | The values enclosed in the []'s may be: | ||
| 4751 | - a single character | ||
| 4752 | - an escaped character with a leading backslash (such as C{\-} or C{\]}) | ||
| 4753 | - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) | ||
| 4754 | (C{\0x##} is also supported for backwards compatibility) | ||
| 4755 | - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) | ||
| 4756 | - a range of any of the above, separated by a dash (C{'a-z'}, etc.) | ||
| 4757 | - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) | ||
| 4758 | """ | ||
| 4759 | _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) | ||
| 4760 | try: | ||
| 4761 | return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) | ||
| 4762 | except Exception: | ||
| 4763 | return "" | ||
| 4764 | |||
| 4765 | def matchOnlyAtCol(n): | ||
| 4766 | """ | ||
| 4767 | Helper method for defining parse actions that require matching at a specific | ||
| 4768 | column in the input text. | ||
| 4769 | """ | ||
| 4770 | def verifyCol(strg,locn,toks): | ||
| 4771 | if col(locn,strg) != n: | ||
| 4772 | raise ParseException(strg,locn,"matched token not at column %d" % n) | ||
| 4773 | return verifyCol | ||
| 4774 | |||
| 4775 | def replaceWith(replStr): | ||
| 4776 | """ | ||
| 4777 | Helper method for common parse actions that simply return a literal value. Especially | ||
| 4778 | useful when used with C{L{transformString<ParserElement.transformString>}()}. | ||
| 4779 | |||
| 4780 | Example:: | ||
| 4781 | num = Word(nums).setParseAction(lambda toks: int(toks[0])) | ||
| 4782 | na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) | ||
| 4783 | term = na | num | ||
| 4784 | |||
| 4785 | OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] | ||
| 4786 | """ | ||
| 4787 | return lambda s,l,t: [replStr] | ||
| 4788 | |||
| 4789 | def removeQuotes(s,l,t): | ||
| 4790 | """ | ||
| 4791 | Helper parse action for removing quotation marks from parsed quoted strings. | ||
| 4792 | |||
| 4793 | Example:: | ||
| 4794 | # by default, quotation marks are included in parsed results | ||
| 4795 | quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] | ||
| 4796 | |||
| 4797 | # use removeQuotes to strip quotation marks from parsed results | ||
| 4798 | quotedString.setParseAction(removeQuotes) | ||
| 4799 | quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] | ||
| 4800 | """ | ||
| 4801 | return t[0][1:-1] | ||
| 4802 | |||
| 4803 | def tokenMap(func, *args): | ||
| 4804 | """ | ||
| 4805 | Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional | ||
| 4806 | args are passed, they are forwarded to the given function as additional arguments after | ||
| 4807 | the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the | ||
| 4808 | parsed data to an integer using base 16. | ||
| 4809 | |||
| 4810 | Example (compare the last to example in L{ParserElement.transformString}:: | ||
| 4811 | hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) | ||
| 4812 | hex_ints.runTests(''' | ||
| 4813 | 00 11 22 aa FF 0a 0d 1a | ||
| 4814 | ''') | ||
| 4815 | |||
| 4816 | upperword = Word(alphas).setParseAction(tokenMap(str.upper)) | ||
| 4817 | OneOrMore(upperword).runTests(''' | ||
| 4818 | my kingdom for a horse | ||
| 4819 | ''') | ||
| 4820 | |||
| 4821 | wd = Word(alphas).setParseAction(tokenMap(str.title)) | ||
| 4822 | OneOrMore(wd).setParseAction(' '.join).runTests(''' | ||
| 4823 | now is the winter of our discontent made glorious summer by this sun of york | ||
| 4824 | ''') | ||
| 4825 | prints:: | ||
| 4826 | 00 11 22 aa FF 0a 0d 1a | ||
| 4827 | [0, 17, 34, 170, 255, 10, 13, 26] | ||
| 4828 | |||
| 4829 | my kingdom for a horse | ||
| 4830 | ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] | ||
| 4831 | |||
| 4832 | now is the winter of our discontent made glorious summer by this sun of york | ||
| 4833 | ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] | ||
| 4834 | """ | ||
| 4835 | def pa(s,l,t): | ||
| 4836 | return [func(tokn, *args) for tokn in t] | ||
| 4837 | |||
| 4838 | try: | ||
| 4839 | func_name = getattr(func, '__name__', | ||
| 4840 | getattr(func, '__class__').__name__) | ||
| 4841 | except Exception: | ||
| 4842 | func_name = str(func) | ||
| 4843 | pa.__name__ = func_name | ||
| 4844 | |||
| 4845 | return pa | ||
| 4846 | |||
| 4847 | upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) | ||
| 4848 | """(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" | ||
| 4849 | |||
| 4850 | downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) | ||
| 4851 | """(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" | ||
| 4852 | |||
| 4853 | def _makeTags(tagStr, xml): | ||
| 4854 | """Internal helper to construct opening and closing tag expressions, given a tag name""" | ||
| 4855 | if isinstance(tagStr,basestring): | ||
| 4856 | resname = tagStr | ||
| 4857 | tagStr = Keyword(tagStr, caseless=not xml) | ||
| 4858 | else: | ||
| 4859 | resname = tagStr.name | ||
| 4860 | |||
| 4861 | tagAttrName = Word(alphas,alphanums+"_-:") | ||
| 4862 | if (xml): | ||
| 4863 | tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) | ||
| 4864 | openTag = Suppress("<") + tagStr("tag") + \ | ||
| 4865 | Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ | ||
| 4866 | Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") | ||
| 4867 | else: | ||
| 4868 | printablesLessRAbrack = "".join(c for c in printables if c not in ">") | ||
| 4869 | tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) | ||
| 4870 | openTag = Suppress("<") + tagStr("tag") + \ | ||
| 4871 | Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ | ||
| 4872 | Optional( Suppress("=") + tagAttrValue ) ))) + \ | ||
| 4873 | Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") | ||
| 4874 | closeTag = Combine(_L("</") + tagStr + ">") | ||
| 4875 | |||
| 4876 | openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) | ||
| 4877 | closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) | ||
| 4878 | openTag.tag = resname | ||
| 4879 | closeTag.tag = resname | ||
| 4880 | return openTag, closeTag | ||
| 4881 | |||
| 4882 | def makeHTMLTags(tagStr): | ||
| 4883 | """ | ||
| 4884 | Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches | ||
| 4885 | tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. | ||
| 4886 | |||
| 4887 | Example:: | ||
| 4888 | text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' | ||
| 4889 | # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple | ||
| 4890 | a,a_end = makeHTMLTags("A") | ||
| 4891 | link_expr = a + SkipTo(a_end)("link_text") + a_end | ||
| 4892 | |||
| 4893 | for link in link_expr.searchString(text): | ||
| 4894 | # attributes in the <A> tag (like "href" shown here) are also accessible as named results | ||
| 4895 | print(link.link_text, '->', link.href) | ||
| 4896 | prints:: | ||
| 4897 | pyparsing -> http://pyparsing.wikispaces.com | ||
| 4898 | """ | ||
| 4899 | return _makeTags( tagStr, False ) | ||
| 4900 | |||
| 4901 | def makeXMLTags(tagStr): | ||
| 4902 | """ | ||
| 4903 | Helper to construct opening and closing tag expressions for XML, given a tag name. Matches | ||
| 4904 | tags only in the given upper/lower case. | ||
| 4905 | |||
| 4906 | Example: similar to L{makeHTMLTags} | ||
| 4907 | """ | ||
| 4908 | return _makeTags( tagStr, True ) | ||
| 4909 | |||
| 4910 | def withAttribute(*args,**attrDict): | ||
| 4911 | """ | ||
| 4912 | Helper to create a validating parse action to be used with start tags created | ||
| 4913 | with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag | ||
| 4914 | with a required attribute value, to avoid false matches on common tags such as | ||
| 4915 | C{<TD>} or C{<DIV>}. | ||
| 4916 | |||
| 4917 | Call C{withAttribute} with a series of attribute names and values. Specify the list | ||
| 4918 | of filter attributes names and values as: | ||
| 4919 | - keyword arguments, as in C{(align="right")}, or | ||
| 4920 | - as an explicit dict with C{**} operator, when an attribute name is also a Python | ||
| 4921 | reserved word, as in C{**{"class":"Customer", "align":"right"}} | ||
| 4922 | - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) | ||
| 4923 | For attribute names with a namespace prefix, you must use the second form. Attribute | ||
| 4924 | names are matched insensitive to upper/lower case. | ||
| 4925 | |||
| 4926 | If just testing for C{class} (with or without a namespace), use C{L{withClass}}. | ||
| 4927 | |||
| 4928 | To verify that the attribute exists, but without specifying a value, pass | ||
| 4929 | C{withAttribute.ANY_VALUE} as the value. | ||
| 4930 | |||
| 4931 | Example:: | ||
| 4932 | html = ''' | ||
| 4933 | <div> | ||
| 4934 | Some text | ||
| 4935 | <div type="grid">1 4 0 1 0</div> | ||
| 4936 | <div type="graph">1,3 2,3 1,1</div> | ||
| 4937 | <div>this has no type</div> | ||
| 4938 | </div> | ||
| 4939 | |||
| 4940 | ''' | ||
| 4941 | div,div_end = makeHTMLTags("div") | ||
| 4942 | |||
| 4943 | # only match div tag having a type attribute with value "grid" | ||
| 4944 | div_grid = div().setParseAction(withAttribute(type="grid")) | ||
| 4945 | grid_expr = div_grid + SkipTo(div | div_end)("body") | ||
| 4946 | for grid_header in grid_expr.searchString(html): | ||
| 4947 | print(grid_header.body) | ||
| 4948 | |||
| 4949 | # construct a match with any div tag having a type attribute, regardless of the value | ||
| 4950 | div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) | ||
| 4951 | div_expr = div_any_type + SkipTo(div | div_end)("body") | ||
| 4952 | for div_header in div_expr.searchString(html): | ||
| 4953 | print(div_header.body) | ||
| 4954 | prints:: | ||
| 4955 | 1 4 0 1 0 | ||
| 4956 | |||
| 4957 | 1 4 0 1 0 | ||
| 4958 | 1,3 2,3 1,1 | ||
| 4959 | """ | ||
| 4960 | if args: | ||
| 4961 | attrs = args[:] | ||
| 4962 | else: | ||
| 4963 | attrs = attrDict.items() | ||
| 4964 | attrs = [(k,v) for k,v in attrs] | ||
| 4965 | def pa(s,l,tokens): | ||
| 4966 | for attrName,attrValue in attrs: | ||
| 4967 | if attrName not in tokens: | ||
| 4968 | raise ParseException(s,l,"no matching attribute " + attrName) | ||
| 4969 | if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: | ||
| 4970 | raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % | ||
| 4971 | (attrName, tokens[attrName], attrValue)) | ||
| 4972 | return pa | ||
| 4973 | withAttribute.ANY_VALUE = object() | ||
| 4974 | |||
| 4975 | def withClass(classname, namespace=''): | ||
| 4976 | """ | ||
| 4977 | Simplified version of C{L{withAttribute}} when matching on a div class - made | ||
| 4978 | difficult because C{class} is a reserved word in Python. | ||
| 4979 | |||
| 4980 | Example:: | ||
| 4981 | html = ''' | ||
| 4982 | <div> | ||
| 4983 | Some text | ||
| 4984 | <div class="grid">1 4 0 1 0</div> | ||
| 4985 | <div class="graph">1,3 2,3 1,1</div> | ||
| 4986 | <div>this <div> has no class</div> | ||
| 4987 | </div> | ||
| 4988 | |||
| 4989 | ''' | ||
| 4990 | div,div_end = makeHTMLTags("div") | ||
| 4991 | div_grid = div().setParseAction(withClass("grid")) | ||
| 4992 | |||
| 4993 | grid_expr = div_grid + SkipTo(div | div_end)("body") | ||
| 4994 | for grid_header in grid_expr.searchString(html): | ||
| 4995 | print(grid_header.body) | ||
| 4996 | |||
| 4997 | div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) | ||
| 4998 | div_expr = div_any_type + SkipTo(div | div_end)("body") | ||
| 4999 | for div_header in div_expr.searchString(html): | ||
| 5000 | print(div_header.body) | ||
| 5001 | prints:: | ||
| 5002 | 1 4 0 1 0 | ||
| 5003 | |||
| 5004 | 1 4 0 1 0 | ||
| 5005 | 1,3 2,3 1,1 | ||
| 5006 | """ | ||
| 5007 | classattr = "%s:class" % namespace if namespace else "class" | ||
| 5008 | return withAttribute(**{classattr : classname}) | ||
| 5009 | |||
| 5010 | opAssoc = _Constants() | ||
| 5011 | opAssoc.LEFT = object() | ||
| 5012 | opAssoc.RIGHT = object() | ||
| 5013 | |||
| 5014 | def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): | ||
| 5015 | """ | ||
| 5016 | Helper method for constructing grammars of expressions made up of | ||
| 5017 | operators working in a precedence hierarchy. Operators may be unary or | ||
| 5018 | binary, left- or right-associative. Parse actions can also be attached | ||
| 5019 | to operator expressions. The generated parser will also recognize the use | ||
| 5020 | of parentheses to override operator precedences (see example below). | ||
| 5021 | |||
| 5022 | Note: if you define a deep operator list, you may see performance issues | ||
| 5023 | when using infixNotation. See L{ParserElement.enablePackrat} for a | ||
| 5024 | mechanism to potentially improve your parser performance. | ||
| 5025 | |||
| 5026 | Parameters: | ||
| 5027 | - baseExpr - expression representing the most basic element for the nested | ||
| 5028 | - opList - list of tuples, one for each operator precedence level in the | ||
| 5029 | expression grammar; each tuple is of the form | ||
| 5030 | (opExpr, numTerms, rightLeftAssoc, parseAction), where: | ||
| 5031 | - opExpr is the pyparsing expression for the operator; | ||
| 5032 | may also be a string, which will be converted to a Literal; | ||
| 5033 | if numTerms is 3, opExpr is a tuple of two expressions, for the | ||
| 5034 | two operators separating the 3 terms | ||
| 5035 | - numTerms is the number of terms for this operator (must | ||
| 5036 | be 1, 2, or 3) | ||
| 5037 | - rightLeftAssoc is the indicator whether the operator is | ||
| 5038 | right or left associative, using the pyparsing-defined | ||
| 5039 | constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. | ||
| 5040 | - parseAction is the parse action to be associated with | ||
| 5041 | expressions matching this operator expression (the | ||
| 5042 | parse action tuple member may be omitted); if the parse action | ||
| 5043 | is passed a tuple or list of functions, this is equivalent to | ||
| 5044 | calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) | ||
| 5045 | - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) | ||
| 5046 | - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) | ||
| 5047 | |||
| 5048 | Example:: | ||
| 5049 | # simple example of four-function arithmetic with ints and variable names | ||
| 5050 | integer = pyparsing_common.signed_integer | ||
| 5051 | varname = pyparsing_common.identifier | ||
| 5052 | |||
| 5053 | arith_expr = infixNotation(integer | varname, | ||
| 5054 | [ | ||
| 5055 | ('-', 1, opAssoc.RIGHT), | ||
| 5056 | (oneOf('* /'), 2, opAssoc.LEFT), | ||
| 5057 | (oneOf('+ -'), 2, opAssoc.LEFT), | ||
| 5058 | ]) | ||
| 5059 | |||
| 5060 | arith_expr.runTests(''' | ||
| 5061 | 5+3*6 | ||
| 5062 | (5+3)*6 | ||
| 5063 | -2--11 | ||
| 5064 | ''', fullDump=False) | ||
| 5065 | prints:: | ||
| 5066 | 5+3*6 | ||
| 5067 | [[5, '+', [3, '*', 6]]] | ||
| 5068 | |||
| 5069 | (5+3)*6 | ||
| 5070 | [[[5, '+', 3], '*', 6]] | ||
| 5071 | |||
| 5072 | -2--11 | ||
| 5073 | [[['-', 2], '-', ['-', 11]]] | ||
| 5074 | """ | ||
| 5075 | ret = Forward() | ||
| 5076 | lastExpr = baseExpr | ( lpar + ret + rpar ) | ||
| 5077 | for i,operDef in enumerate(opList): | ||
| 5078 | opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] | ||
| 5079 | termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr | ||
| 5080 | if arity == 3: | ||
| 5081 | if opExpr is None or len(opExpr) != 2: | ||
| 5082 | raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") | ||
| 5083 | opExpr1, opExpr2 = opExpr | ||
| 5084 | thisExpr = Forward().setName(termName) | ||
| 5085 | if rightLeftAssoc == opAssoc.LEFT: | ||
| 5086 | if arity == 1: | ||
| 5087 | matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) | ||
| 5088 | elif arity == 2: | ||
| 5089 | if opExpr is not None: | ||
| 5090 | matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) | ||
| 5091 | else: | ||
| 5092 | matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) | ||
| 5093 | elif arity == 3: | ||
| 5094 | matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ | ||
| 5095 | Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) | ||
| 5096 | else: | ||
| 5097 | raise ValueError("operator must be unary (1), binary (2), or ternary (3)") | ||
| 5098 | elif rightLeftAssoc == opAssoc.RIGHT: | ||
| 5099 | if arity == 1: | ||
| 5100 | # try to avoid LR with this extra test | ||
| 5101 | if not isinstance(opExpr, Optional): | ||
| 5102 | opExpr = Optional(opExpr) | ||
| 5103 | matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) | ||
| 5104 | elif arity == 2: | ||
| 5105 | if opExpr is not None: | ||
| 5106 | matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) | ||
| 5107 | else: | ||
| 5108 | matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) | ||
| 5109 | elif arity == 3: | ||
| 5110 | matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ | ||
| 5111 | Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) | ||
| 5112 | else: | ||
| 5113 | raise ValueError("operator must be unary (1), binary (2), or ternary (3)") | ||
| 5114 | else: | ||
| 5115 | raise ValueError("operator must indicate right or left associativity") | ||
| 5116 | if pa: | ||
| 5117 | if isinstance(pa, (tuple, list)): | ||
| 5118 | matchExpr.setParseAction(*pa) | ||
| 5119 | else: | ||
| 5120 | matchExpr.setParseAction(pa) | ||
| 5121 | thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) | ||
| 5122 | lastExpr = thisExpr | ||
| 5123 | ret <<= lastExpr | ||
| 5124 | return ret | ||
| 5125 | |||
| 5126 | operatorPrecedence = infixNotation | ||
| 5127 | """(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" | ||
| 5128 | |||
| 5129 | dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") | ||
| 5130 | sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") | ||
| 5131 | quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| | ||
| 5132 | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") | ||
| 5133 | unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") | ||
| 5134 | |||
| 5135 | def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): | ||
| 5136 | """ | ||
| 5137 | Helper method for defining nested lists enclosed in opening and closing | ||
| 5138 | delimiters ("(" and ")" are the default). | ||
| 5139 | |||
| 5140 | Parameters: | ||
| 5141 | - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression | ||
| 5142 | - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression | ||
| 5143 | - content - expression for items within the nested lists (default=C{None}) | ||
| 5144 | - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) | ||
| 5145 | |||
| 5146 | If an expression is not provided for the content argument, the nested | ||
| 5147 | expression will capture all whitespace-delimited content between delimiters | ||
| 5148 | as a list of separate values. | ||
| 5149 | |||
| 5150 | Use the C{ignoreExpr} argument to define expressions that may contain | ||
| 5151 | opening or closing characters that should not be treated as opening | ||
| 5152 | or closing characters for nesting, such as quotedString or a comment | ||
| 5153 | expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. | ||
| 5154 | The default is L{quotedString}, but if no expressions are to be ignored, | ||
| 5155 | then pass C{None} for this argument. | ||
| 5156 | |||
| 5157 | Example:: | ||
| 5158 | data_type = oneOf("void int short long char float double") | ||
| 5159 | decl_data_type = Combine(data_type + Optional(Word('*'))) | ||
| 5160 | ident = Word(alphas+'_', alphanums+'_') | ||
| 5161 | number = pyparsing_common.number | ||
| 5162 | arg = Group(decl_data_type + ident) | ||
| 5163 | LPAR,RPAR = map(Suppress, "()") | ||
| 5164 | |||
| 5165 | code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) | ||
| 5166 | |||
| 5167 | c_function = (decl_data_type("type") | ||
| 5168 | + ident("name") | ||
| 5169 | + LPAR + Optional(delimitedList(arg), [])("args") + RPAR | ||
| 5170 | + code_body("body")) | ||
| 5171 | c_function.ignore(cStyleComment) | ||
| 5172 | |||
| 5173 | source_code = ''' | ||
| 5174 | int is_odd(int x) { | ||
| 5175 | return (x%2); | ||
| 5176 | } | ||
| 5177 | |||
| 5178 | int dec_to_hex(char hchar) { | ||
| 5179 | if (hchar >= '0' && hchar <= '9') { | ||
| 5180 | return (ord(hchar)-ord('0')); | ||
| 5181 | } else { | ||
| 5182 | return (10+ord(hchar)-ord('A')); | ||
| 5183 | } | ||
| 5184 | } | ||
| 5185 | ''' | ||
| 5186 | for func in c_function.searchString(source_code): | ||
| 5187 | print("%(name)s (%(type)s) args: %(args)s" % func) | ||
| 5188 | |||
| 5189 | prints:: | ||
| 5190 | is_odd (int) args: [['int', 'x']] | ||
| 5191 | dec_to_hex (int) args: [['char', 'hchar']] | ||
| 5192 | """ | ||
| 5193 | if opener == closer: | ||
| 5194 | raise ValueError("opening and closing strings cannot be the same") | ||
| 5195 | if content is None: | ||
| 5196 | if isinstance(opener,basestring) and isinstance(closer,basestring): | ||
| 5197 | if len(opener) == 1 and len(closer)==1: | ||
| 5198 | if ignoreExpr is not None: | ||
| 5199 | content = (Combine(OneOrMore(~ignoreExpr + | ||
| 5200 | CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) | ||
| 5201 | ).setParseAction(lambda t:t[0].strip())) | ||
| 5202 | else: | ||
| 5203 | content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS | ||
| 5204 | ).setParseAction(lambda t:t[0].strip())) | ||
| 5205 | else: | ||
| 5206 | if ignoreExpr is not None: | ||
| 5207 | content = (Combine(OneOrMore(~ignoreExpr + | ||
| 5208 | ~Literal(opener) + ~Literal(closer) + | ||
| 5209 | CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) | ||
| 5210 | ).setParseAction(lambda t:t[0].strip())) | ||
| 5211 | else: | ||
| 5212 | content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + | ||
| 5213 | CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) | ||
| 5214 | ).setParseAction(lambda t:t[0].strip())) | ||
| 5215 | else: | ||
| 5216 | raise ValueError("opening and closing arguments must be strings if no content expression is given") | ||
| 5217 | ret = Forward() | ||
| 5218 | if ignoreExpr is not None: | ||
| 5219 | ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) | ||
| 5220 | else: | ||
| 5221 | ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) | ||
| 5222 | ret.setName('nested %s%s expression' % (opener,closer)) | ||
| 5223 | return ret | ||
| 5224 | |||
| 5225 | def indentedBlock(blockStatementExpr, indentStack, indent=True): | ||
| 5226 | """ | ||
| 5227 | Helper method for defining space-delimited indentation blocks, such as | ||
| 5228 | those used to define block statements in Python source code. | ||
| 5229 | |||
| 5230 | Parameters: | ||
| 5231 | - blockStatementExpr - expression defining syntax of statement that | ||
| 5232 | is repeated within the indented block | ||
| 5233 | - indentStack - list created by caller to manage indentation stack | ||
| 5234 | (multiple statementWithIndentedBlock expressions within a single grammar | ||
| 5235 | should share a common indentStack) | ||
| 5236 | - indent - boolean indicating whether block must be indented beyond the | ||
| 5237 | the current level; set to False for block of left-most statements | ||
| 5238 | (default=C{True}) | ||
| 5239 | |||
| 5240 | A valid block must contain at least one C{blockStatement}. | ||
| 5241 | |||
| 5242 | Example:: | ||
| 5243 | data = ''' | ||
| 5244 | def A(z): | ||
| 5245 | A1 | ||
| 5246 | B = 100 | ||
| 5247 | G = A2 | ||
| 5248 | A2 | ||
| 5249 | A3 | ||
| 5250 | B | ||
| 5251 | def BB(a,b,c): | ||
| 5252 | BB1 | ||
| 5253 | def BBA(): | ||
| 5254 | bba1 | ||
| 5255 | bba2 | ||
| 5256 | bba3 | ||
| 5257 | C | ||
| 5258 | D | ||
| 5259 | def spam(x,y): | ||
| 5260 | def eggs(z): | ||
| 5261 | pass | ||
| 5262 | ''' | ||
| 5263 | |||
| 5264 | |||
| 5265 | indentStack = [1] | ||
| 5266 | stmt = Forward() | ||
| 5267 | |||
| 5268 | identifier = Word(alphas, alphanums) | ||
| 5269 | funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") | ||
| 5270 | func_body = indentedBlock(stmt, indentStack) | ||
| 5271 | funcDef = Group( funcDecl + func_body ) | ||
| 5272 | |||
| 5273 | rvalue = Forward() | ||
| 5274 | funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") | ||
| 5275 | rvalue << (funcCall | identifier | Word(nums)) | ||
| 5276 | assignment = Group(identifier + "=" + rvalue) | ||
| 5277 | stmt << ( funcDef | assignment | identifier ) | ||
| 5278 | |||
| 5279 | module_body = OneOrMore(stmt) | ||
| 5280 | |||
| 5281 | parseTree = module_body.parseString(data) | ||
| 5282 | parseTree.pprint() | ||
| 5283 | prints:: | ||
| 5284 | [['def', | ||
| 5285 | 'A', | ||
| 5286 | ['(', 'z', ')'], | ||
| 5287 | ':', | ||
| 5288 | [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], | ||
| 5289 | 'B', | ||
| 5290 | ['def', | ||
| 5291 | 'BB', | ||
| 5292 | ['(', 'a', 'b', 'c', ')'], | ||
| 5293 | ':', | ||
| 5294 | [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], | ||
| 5295 | 'C', | ||
| 5296 | 'D', | ||
| 5297 | ['def', | ||
| 5298 | 'spam', | ||
| 5299 | ['(', 'x', 'y', ')'], | ||
| 5300 | ':', | ||
| 5301 | [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] | ||
| 5302 | """ | ||
| 5303 | def checkPeerIndent(s,l,t): | ||
| 5304 | if l >= len(s): return | ||
| 5305 | curCol = col(l,s) | ||
| 5306 | if curCol != indentStack[-1]: | ||
| 5307 | if curCol > indentStack[-1]: | ||
| 5308 | raise ParseFatalException(s,l,"illegal nesting") | ||
| 5309 | raise ParseException(s,l,"not a peer entry") | ||
| 5310 | |||
| 5311 | def checkSubIndent(s,l,t): | ||
| 5312 | curCol = col(l,s) | ||
| 5313 | if curCol > indentStack[-1]: | ||
| 5314 | indentStack.append( curCol ) | ||
| 5315 | else: | ||
| 5316 | raise ParseException(s,l,"not a subentry") | ||
| 5317 | |||
| 5318 | def checkUnindent(s,l,t): | ||
| 5319 | if l >= len(s): return | ||
| 5320 | curCol = col(l,s) | ||
| 5321 | if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): | ||
| 5322 | raise ParseException(s,l,"not an unindent") | ||
| 5323 | indentStack.pop() | ||
| 5324 | |||
| 5325 | NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) | ||
| 5326 | INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') | ||
| 5327 | PEER = Empty().setParseAction(checkPeerIndent).setName('') | ||
| 5328 | UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') | ||
| 5329 | if indent: | ||
| 5330 | smExpr = Group( Optional(NL) + | ||
| 5331 | #~ FollowedBy(blockStatementExpr) + | ||
| 5332 | INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) | ||
| 5333 | else: | ||
| 5334 | smExpr = Group( Optional(NL) + | ||
| 5335 | (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) | ||
| 5336 | blockStatementExpr.ignore(_bslash + LineEnd()) | ||
| 5337 | return smExpr.setName('indented block') | ||
| 5338 | |||
| 5339 | alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") | ||
| 5340 | punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") | ||
| 5341 | |||
| 5342 | anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) | ||
| 5343 | _htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) | ||
| 5344 | commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") | ||
| 5345 | def replaceHTMLEntity(t): | ||
| 5346 | """Helper parser action to replace common HTML entities with their special characters""" | ||
| 5347 | return _htmlEntityMap.get(t.entity) | ||
| 5348 | |||
| 5349 | # it's easy to get these comment structures wrong - they're very common, so may as well make them available | ||
| 5350 | cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") | ||
| 5351 | "Comment of the form C{/* ... */}" | ||
| 5352 | |||
| 5353 | htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") | ||
| 5354 | "Comment of the form C{<!-- ... -->}" | ||
| 5355 | |||
| 5356 | restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") | ||
| 5357 | dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") | ||
| 5358 | "Comment of the form C{// ... (to end of line)}" | ||
| 5359 | |||
| 5360 | cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") | ||
| 5361 | "Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" | ||
| 5362 | |||
| 5363 | javaStyleComment = cppStyleComment | ||
| 5364 | "Same as C{L{cppStyleComment}}" | ||
| 5365 | |||
| 5366 | pythonStyleComment = Regex(r"#.*").setName("Python style comment") | ||
| 5367 | "Comment of the form C{# ... (to end of line)}" | ||
| 5368 | |||
| 5369 | _commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + | ||
| 5370 | Optional( Word(" \t") + | ||
| 5371 | ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") | ||
| 5372 | commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") | ||
| 5373 | """(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. | ||
| 5374 | This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" | ||
| 5375 | |||
| 5376 | # some other useful expressions - using lower-case class name since we are really using this as a namespace | ||
| 5377 | class pyparsing_common: | ||
| 5378 | """ | ||
| 5379 | Here are some common low-level expressions that may be useful in jump-starting parser development: | ||
| 5380 | - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) | ||
| 5381 | - common L{programming identifiers<identifier>} | ||
| 5382 | - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) | ||
| 5383 | - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} | ||
| 5384 | - L{UUID<uuid>} | ||
| 5385 | - L{comma-separated list<comma_separated_list>} | ||
| 5386 | Parse actions: | ||
| 5387 | - C{L{convertToInteger}} | ||
| 5388 | - C{L{convertToFloat}} | ||
| 5389 | - C{L{convertToDate}} | ||
| 5390 | - C{L{convertToDatetime}} | ||
| 5391 | - C{L{stripHTMLTags}} | ||
| 5392 | - C{L{upcaseTokens}} | ||
| 5393 | - C{L{downcaseTokens}} | ||
| 5394 | |||
| 5395 | Example:: | ||
| 5396 | pyparsing_common.number.runTests(''' | ||
| 5397 | # any int or real number, returned as the appropriate type | ||
| 5398 | 100 | ||
| 5399 | -100 | ||
| 5400 | +100 | ||
| 5401 | 3.14159 | ||
| 5402 | 6.02e23 | ||
| 5403 | 1e-12 | ||
| 5404 | ''') | ||
| 5405 | |||
| 5406 | pyparsing_common.fnumber.runTests(''' | ||
| 5407 | # any int or real number, returned as float | ||
| 5408 | 100 | ||
| 5409 | -100 | ||
| 5410 | +100 | ||
| 5411 | 3.14159 | ||
| 5412 | 6.02e23 | ||
| 5413 | 1e-12 | ||
| 5414 | ''') | ||
| 5415 | |||
| 5416 | pyparsing_common.hex_integer.runTests(''' | ||
| 5417 | # hex numbers | ||
| 5418 | 100 | ||
| 5419 | FF | ||
| 5420 | ''') | ||
| 5421 | |||
| 5422 | pyparsing_common.fraction.runTests(''' | ||
| 5423 | # fractions | ||
| 5424 | 1/2 | ||
| 5425 | -3/4 | ||
| 5426 | ''') | ||
| 5427 | |||
| 5428 | pyparsing_common.mixed_integer.runTests(''' | ||
| 5429 | # mixed fractions | ||
| 5430 | 1 | ||
| 5431 | 1/2 | ||
| 5432 | -3/4 | ||
| 5433 | 1-3/4 | ||
| 5434 | ''') | ||
| 5435 | |||
| 5436 | import uuid | ||
| 5437 | pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) | ||
| 5438 | pyparsing_common.uuid.runTests(''' | ||
| 5439 | # uuid | ||
| 5440 | 12345678-1234-5678-1234-567812345678 | ||
| 5441 | ''') | ||
| 5442 | prints:: | ||
| 5443 | # any int or real number, returned as the appropriate type | ||
| 5444 | 100 | ||
| 5445 | [100] | ||
| 5446 | |||
| 5447 | -100 | ||
| 5448 | [-100] | ||
| 5449 | |||
| 5450 | +100 | ||
| 5451 | [100] | ||
| 5452 | |||
| 5453 | 3.14159 | ||
| 5454 | [3.14159] | ||
| 5455 | |||
| 5456 | 6.02e23 | ||
| 5457 | [6.02e+23] | ||
| 5458 | |||
| 5459 | 1e-12 | ||
| 5460 | [1e-12] | ||
| 5461 | |||
| 5462 | # any int or real number, returned as float | ||
| 5463 | 100 | ||
| 5464 | [100.0] | ||
| 5465 | |||
| 5466 | -100 | ||
| 5467 | [-100.0] | ||
| 5468 | |||
| 5469 | +100 | ||
| 5470 | [100.0] | ||
| 5471 | |||
| 5472 | 3.14159 | ||
| 5473 | [3.14159] | ||
| 5474 | |||
| 5475 | 6.02e23 | ||
| 5476 | [6.02e+23] | ||
| 5477 | |||
| 5478 | 1e-12 | ||
| 5479 | [1e-12] | ||
| 5480 | |||
| 5481 | # hex numbers | ||
| 5482 | 100 | ||
| 5483 | [256] | ||
| 5484 | |||
| 5485 | FF | ||
| 5486 | [255] | ||
| 5487 | |||
| 5488 | # fractions | ||
| 5489 | 1/2 | ||
| 5490 | [0.5] | ||
| 5491 | |||
| 5492 | -3/4 | ||
| 5493 | [-0.75] | ||
| 5494 | |||
| 5495 | # mixed fractions | ||
| 5496 | 1 | ||
| 5497 | [1] | ||
| 5498 | |||
| 5499 | 1/2 | ||
| 5500 | [0.5] | ||
| 5501 | |||
| 5502 | -3/4 | ||
| 5503 | [-0.75] | ||
| 5504 | |||
| 5505 | 1-3/4 | ||
| 5506 | [1.75] | ||
| 5507 | |||
| 5508 | # uuid | ||
| 5509 | 12345678-1234-5678-1234-567812345678 | ||
| 5510 | [UUID('12345678-1234-5678-1234-567812345678')] | ||
| 5511 | """ | ||
| 5512 | |||
| 5513 | convertToInteger = tokenMap(int) | ||
| 5514 | """ | ||
| 5515 | Parse action for converting parsed integers to Python int | ||
| 5516 | """ | ||
| 5517 | |||
| 5518 | convertToFloat = tokenMap(float) | ||
| 5519 | """ | ||
| 5520 | Parse action for converting parsed numbers to Python float | ||
| 5521 | """ | ||
| 5522 | |||
| 5523 | integer = Word(nums).setName("integer").setParseAction(convertToInteger) | ||
| 5524 | """expression that parses an unsigned integer, returns an int""" | ||
| 5525 | |||
| 5526 | hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) | ||
| 5527 | """expression that parses a hexadecimal integer, returns an int""" | ||
| 5528 | |||
| 5529 | signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) | ||
| 5530 | """expression that parses an integer with optional leading sign, returns an int""" | ||
| 5531 | |||
| 5532 | fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") | ||
| 5533 | """fractional expression of an integer divided by an integer, returns a float""" | ||
| 5534 | fraction.addParseAction(lambda t: t[0]/t[-1]) | ||
| 5535 | |||
| 5536 | mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") | ||
| 5537 | """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" | ||
| 5538 | mixed_integer.addParseAction(sum) | ||
| 5539 | |||
| 5540 | real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) | ||
| 5541 | """expression that parses a floating point number and returns a float""" | ||
| 5542 | |||
| 5543 | sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) | ||
| 5544 | """expression that parses a floating point number with optional scientific notation and returns a float""" | ||
| 5545 | |||
| 5546 | # streamlining this expression makes the docs nicer-looking | ||
| 5547 | number = (sci_real | real | signed_integer).streamline() | ||
| 5548 | """any numeric expression, returns the corresponding Python type""" | ||
| 5549 | |||
| 5550 | fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) | ||
| 5551 | """any int or real number, returned as float""" | ||
| 5552 | |||
| 5553 | identifier = Word(alphas+'_', alphanums+'_').setName("identifier") | ||
| 5554 | """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" | ||
| 5555 | |||
| 5556 | ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") | ||
| 5557 | "IPv4 address (C{0.0.0.0 - 255.255.255.255})" | ||
| 5558 | |||
| 5559 | _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") | ||
| 5560 | _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") | ||
| 5561 | _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") | ||
| 5562 | _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) | ||
| 5563 | _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") | ||
| 5564 | ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") | ||
| 5565 | "IPv6 address (long, short, or mixed form)" | ||
| 5566 | |||
| 5567 | mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") | ||
| 5568 | "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" | ||
| 5569 | |||
| 5570 | @staticmethod | ||
| 5571 | def convertToDate(fmt="%Y-%m-%d"): | ||
| 5572 | """ | ||
| 5573 | Helper to create a parse action for converting parsed date string to Python datetime.date | ||
| 5574 | |||
| 5575 | Params - | ||
| 5576 | - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) | ||
| 5577 | |||
| 5578 | Example:: | ||
| 5579 | date_expr = pyparsing_common.iso8601_date.copy() | ||
| 5580 | date_expr.setParseAction(pyparsing_common.convertToDate()) | ||
| 5581 | print(date_expr.parseString("1999-12-31")) | ||
| 5582 | prints:: | ||
| 5583 | [datetime.date(1999, 12, 31)] | ||
| 5584 | """ | ||
| 5585 | def cvt_fn(s,l,t): | ||
| 5586 | try: | ||
| 5587 | return datetime.strptime(t[0], fmt).date() | ||
| 5588 | except ValueError as ve: | ||
| 5589 | raise ParseException(s, l, str(ve)) | ||
| 5590 | return cvt_fn | ||
| 5591 | |||
| 5592 | @staticmethod | ||
| 5593 | def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): | ||
| 5594 | """ | ||
| 5595 | Helper to create a parse action for converting parsed datetime string to Python datetime.datetime | ||
| 5596 | |||
| 5597 | Params - | ||
| 5598 | - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) | ||
| 5599 | |||
| 5600 | Example:: | ||
| 5601 | dt_expr = pyparsing_common.iso8601_datetime.copy() | ||
| 5602 | dt_expr.setParseAction(pyparsing_common.convertToDatetime()) | ||
| 5603 | print(dt_expr.parseString("1999-12-31T23:59:59.999")) | ||
| 5604 | prints:: | ||
| 5605 | [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] | ||
| 5606 | """ | ||
| 5607 | def cvt_fn(s,l,t): | ||
| 5608 | try: | ||
| 5609 | return datetime.strptime(t[0], fmt) | ||
| 5610 | except ValueError as ve: | ||
| 5611 | raise ParseException(s, l, str(ve)) | ||
| 5612 | return cvt_fn | ||
| 5613 | |||
| 5614 | iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") | ||
| 5615 | "ISO8601 date (C{yyyy-mm-dd})" | ||
| 5616 | |||
| 5617 | iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") | ||
| 5618 | "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" | ||
| 5619 | |||
| 5620 | uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") | ||
| 5621 | "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" | ||
| 5622 | |||
| 5623 | _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() | ||
| 5624 | @staticmethod | ||
| 5625 | def stripHTMLTags(s, l, tokens): | ||
| 5626 | """ | ||
| 5627 | Parse action to remove HTML tags from web page HTML source | ||
| 5628 | |||
| 5629 | Example:: | ||
| 5630 | # strip HTML links from normal text | ||
| 5631 | text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' | ||
| 5632 | td,td_end = makeHTMLTags("TD") | ||
| 5633 | table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end | ||
| 5634 | |||
| 5635 | print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' | ||
| 5636 | """ | ||
| 5637 | return pyparsing_common._html_stripper.transformString(tokens[0]) | ||
| 5638 | |||
| 5639 | _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') | ||
| 5640 | + Optional( White(" \t") ) ) ).streamline().setName("commaItem") | ||
| 5641 | comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") | ||
| 5642 | """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" | ||
| 5643 | |||
| 5644 | upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) | ||
| 5645 | """Parse action to convert tokens to upper case.""" | ||
| 5646 | |||
| 5647 | downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) | ||
| 5648 | """Parse action to convert tokens to lower case.""" | ||
| 5649 | |||
| 5650 | |||
| 5651 | if __name__ == "__main__": | ||
| 5652 | |||
| 5653 | selectToken = CaselessLiteral("select") | ||
| 5654 | fromToken = CaselessLiteral("from") | ||
| 5655 | |||
| 5656 | ident = Word(alphas, alphanums + "_$") | ||
| 5657 | |||
| 5658 | columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) | ||
| 5659 | columnNameList = Group(delimitedList(columnName)).setName("columns") | ||
| 5660 | columnSpec = ('*' | columnNameList) | ||
| 5661 | |||
| 5662 | tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) | ||
| 5663 | tableNameList = Group(delimitedList(tableName)).setName("tables") | ||
| 5664 | |||
| 5665 | simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") | ||
| 5666 | |||
| 5667 | # demo runTests method, including embedded comments in test string | ||
| 5668 | simpleSQL.runTests(""" | ||
| 5669 | # '*' as column list and dotted table name | ||
| 5670 | select * from SYS.XYZZY | ||
| 5671 | |||
| 5672 | # caseless match on "SELECT", and casts back to "select" | ||
| 5673 | SELECT * from XYZZY, ABC | ||
| 5674 | |||
| 5675 | # list of column names, and mixed case SELECT keyword | ||
| 5676 | Select AA,BB,CC from Sys.dual | ||
| 5677 | |||
| 5678 | # multiple tables | ||
| 5679 | Select A, B, C from Sys.dual, Table2 | ||
| 5680 | |||
| 5681 | # invalid SELECT keyword - should fail | ||
| 5682 | Xelect A, B, C from Sys.dual | ||
| 5683 | |||
| 5684 | # incomplete command - should fail | ||
| 5685 | Select | ||
| 5686 | |||
| 5687 | # invalid column name - should fail | ||
| 5688 | Select ^^^ frox Sys.dual | ||
| 5689 | |||
| 5690 | """) | ||
| 5691 | |||
| 5692 | pyparsing_common.number.runTests(""" | ||
| 5693 | 100 | ||
| 5694 | -100 | ||
| 5695 | +100 | ||
| 5696 | 3.14159 | ||
| 5697 | 6.02e23 | ||
| 5698 | 1e-12 | ||
| 5699 | """) | ||
| 5700 | |||
| 5701 | # any int or real number, returned as float | ||
| 5702 | pyparsing_common.fnumber.runTests(""" | ||
| 5703 | 100 | ||
| 5704 | -100 | ||
| 5705 | +100 | ||
| 5706 | 3.14159 | ||
| 5707 | 6.02e23 | ||
| 5708 | 1e-12 | ||
| 5709 | """) | ||
| 5710 | |||
| 5711 | pyparsing_common.hex_integer.runTests(""" | ||
| 5712 | 100 | ||
| 5713 | FF | ||
| 5714 | """) | ||
| 5715 | |||
| 5716 | import uuid | ||
| 5717 | pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) | ||
| 5718 | pyparsing_common.uuid.runTests(""" | ||
| 5719 | 12345678-1234-5678-1234-567812345678 | ||
| 5720 | """) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/__init__.py new file mode 100644 index 0000000..222a196 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/__init__.py | |||
| @@ -0,0 +1,3 @@ | |||
| 1 | from .core import TomlError | ||
| 2 | from .parser import load, loads | ||
| 3 | from .writer import dump, dumps | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/core.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/core.py new file mode 100644 index 0000000..0fcada4 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/core.py | |||
| @@ -0,0 +1,13 @@ | |||
| 1 | class TomlError(RuntimeError): | ||
| 2 | def __init__(self, message, line, col, filename): | ||
| 3 | RuntimeError.__init__(self, message, line, col, filename) | ||
| 4 | self.message = message | ||
| 5 | self.line = line | ||
| 6 | self.col = col | ||
| 7 | self.filename = filename | ||
| 8 | |||
| 9 | def __str__(self): | ||
| 10 | return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) | ||
| 11 | |||
| 12 | def __repr__(self): | ||
| 13 | return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/parser.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/parser.py new file mode 100644 index 0000000..c416ed5 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/parser.py | |||
| @@ -0,0 +1,374 @@ | |||
| 1 | import string, re, sys, datetime | ||
| 2 | from .core import TomlError | ||
| 3 | |||
| 4 | if sys.version_info[0] == 2: | ||
| 5 | _chr = unichr | ||
| 6 | else: | ||
| 7 | _chr = chr | ||
| 8 | |||
| 9 | def load(fin, translate=lambda t, x, v: v): | ||
| 10 | return loads(fin.read(), translate=translate, filename=getattr(fin, 'name', repr(fin))) | ||
| 11 | |||
| 12 | def loads(s, filename='<string>', translate=lambda t, x, v: v): | ||
| 13 | if isinstance(s, bytes): | ||
| 14 | s = s.decode('utf-8') | ||
| 15 | |||
| 16 | s = s.replace('\r\n', '\n') | ||
| 17 | |||
| 18 | root = {} | ||
| 19 | tables = {} | ||
| 20 | scope = root | ||
| 21 | |||
| 22 | src = _Source(s, filename=filename) | ||
| 23 | ast = _p_toml(src) | ||
| 24 | |||
| 25 | def error(msg): | ||
| 26 | raise TomlError(msg, pos[0], pos[1], filename) | ||
| 27 | |||
| 28 | def process_value(v): | ||
| 29 | kind, text, value, pos = v | ||
| 30 | if kind == 'str' and value.startswith('\n'): | ||
| 31 | value = value[1:] | ||
| 32 | if kind == 'array': | ||
| 33 | if value and any(k != value[0][0] for k, t, v, p in value[1:]): | ||
| 34 | error('array-type-mismatch') | ||
| 35 | value = [process_value(item) for item in value] | ||
| 36 | elif kind == 'table': | ||
| 37 | value = dict([(k, process_value(value[k])) for k in value]) | ||
| 38 | return translate(kind, text, value) | ||
| 39 | |||
| 40 | for kind, value, pos in ast: | ||
| 41 | if kind == 'kv': | ||
| 42 | k, v = value | ||
| 43 | if k in scope: | ||
| 44 | error('duplicate_keys. Key "{0}" was used more than once.'.format(k)) | ||
| 45 | scope[k] = process_value(v) | ||
| 46 | else: | ||
| 47 | is_table_array = (kind == 'table_array') | ||
| 48 | cur = tables | ||
| 49 | for name in value[:-1]: | ||
| 50 | if isinstance(cur.get(name), list): | ||
| 51 | d, cur = cur[name][-1] | ||
| 52 | else: | ||
| 53 | d, cur = cur.setdefault(name, (None, {})) | ||
| 54 | |||
| 55 | scope = {} | ||
| 56 | name = value[-1] | ||
| 57 | if name not in cur: | ||
| 58 | if is_table_array: | ||
| 59 | cur[name] = [(scope, {})] | ||
| 60 | else: | ||
| 61 | cur[name] = (scope, {}) | ||
| 62 | elif isinstance(cur[name], list): | ||
| 63 | if not is_table_array: | ||
| 64 | error('table_type_mismatch') | ||
| 65 | cur[name].append((scope, {})) | ||
| 66 | else: | ||
| 67 | if is_table_array: | ||
| 68 | error('table_type_mismatch') | ||
| 69 | old_scope, next_table = cur[name] | ||
| 70 | if old_scope is not None: | ||
| 71 | error('duplicate_tables') | ||
| 72 | cur[name] = (scope, next_table) | ||
| 73 | |||
| 74 | def merge_tables(scope, tables): | ||
| 75 | if scope is None: | ||
| 76 | scope = {} | ||
| 77 | for k in tables: | ||
| 78 | if k in scope: | ||
| 79 | error('key_table_conflict') | ||
| 80 | v = tables[k] | ||
| 81 | if isinstance(v, list): | ||
| 82 | scope[k] = [merge_tables(sc, tbl) for sc, tbl in v] | ||
| 83 | else: | ||
| 84 | scope[k] = merge_tables(v[0], v[1]) | ||
| 85 | return scope | ||
| 86 | |||
| 87 | return merge_tables(root, tables) | ||
| 88 | |||
| 89 | class _Source: | ||
| 90 | def __init__(self, s, filename=None): | ||
| 91 | self.s = s | ||
| 92 | self._pos = (1, 1) | ||
| 93 | self._last = None | ||
| 94 | self._filename = filename | ||
| 95 | self.backtrack_stack = [] | ||
| 96 | |||
| 97 | def last(self): | ||
| 98 | return self._last | ||
| 99 | |||
| 100 | def pos(self): | ||
| 101 | return self._pos | ||
| 102 | |||
| 103 | def fail(self): | ||
| 104 | return self._expect(None) | ||
| 105 | |||
| 106 | def consume_dot(self): | ||
| 107 | if self.s: | ||
| 108 | self._last = self.s[0] | ||
| 109 | self.s = self[1:] | ||
| 110 | self._advance(self._last) | ||
| 111 | return self._last | ||
| 112 | return None | ||
| 113 | |||
| 114 | def expect_dot(self): | ||
| 115 | return self._expect(self.consume_dot()) | ||
| 116 | |||
| 117 | def consume_eof(self): | ||
| 118 | if not self.s: | ||
| 119 | self._last = '' | ||
| 120 | return True | ||
| 121 | return False | ||
| 122 | |||
| 123 | def expect_eof(self): | ||
| 124 | return self._expect(self.consume_eof()) | ||
| 125 | |||
| 126 | def consume(self, s): | ||
| 127 | if self.s.startswith(s): | ||
| 128 | self.s = self.s[len(s):] | ||
| 129 | self._last = s | ||
| 130 | self._advance(s) | ||
| 131 | return True | ||
| 132 | return False | ||
| 133 | |||
| 134 | def expect(self, s): | ||
| 135 | return self._expect(self.consume(s)) | ||
| 136 | |||
| 137 | def consume_re(self, re): | ||
| 138 | m = re.match(self.s) | ||
| 139 | if m: | ||
| 140 | self.s = self.s[len(m.group(0)):] | ||
| 141 | self._last = m | ||
| 142 | self._advance(m.group(0)) | ||
| 143 | return m | ||
| 144 | return None | ||
| 145 | |||
| 146 | def expect_re(self, re): | ||
| 147 | return self._expect(self.consume_re(re)) | ||
| 148 | |||
| 149 | def __enter__(self): | ||
| 150 | self.backtrack_stack.append((self.s, self._pos)) | ||
| 151 | |||
| 152 | def __exit__(self, type, value, traceback): | ||
| 153 | if type is None: | ||
| 154 | self.backtrack_stack.pop() | ||
| 155 | else: | ||
| 156 | self.s, self._pos = self.backtrack_stack.pop() | ||
| 157 | return type == TomlError | ||
| 158 | |||
| 159 | def commit(self): | ||
| 160 | self.backtrack_stack[-1] = (self.s, self._pos) | ||
| 161 | |||
| 162 | def _expect(self, r): | ||
| 163 | if not r: | ||
| 164 | raise TomlError('msg', self._pos[0], self._pos[1], self._filename) | ||
| 165 | return r | ||
| 166 | |||
| 167 | def _advance(self, s): | ||
| 168 | suffix_pos = s.rfind('\n') | ||
| 169 | if suffix_pos == -1: | ||
| 170 | self._pos = (self._pos[0], self._pos[1] + len(s)) | ||
| 171 | else: | ||
| 172 | self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos) | ||
| 173 | |||
| 174 | _ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*') | ||
| 175 | def _p_ews(s): | ||
| 176 | s.expect_re(_ews_re) | ||
| 177 | |||
| 178 | _ws_re = re.compile(r'[ \t]*') | ||
| 179 | def _p_ws(s): | ||
| 180 | s.expect_re(_ws_re) | ||
| 181 | |||
| 182 | _escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'', | ||
| 183 | '\\': '\\', '/': '/', 'f': '\f' } | ||
| 184 | |||
| 185 | _basicstr_re = re.compile(r'[^"\\\000-\037]*') | ||
| 186 | _short_uni_re = re.compile(r'u([0-9a-fA-F]{4})') | ||
| 187 | _long_uni_re = re.compile(r'U([0-9a-fA-F]{8})') | ||
| 188 | _escapes_re = re.compile('[bnrt"\'\\\\/f]') | ||
| 189 | _newline_esc_re = re.compile('\n[ \t\n]*') | ||
| 190 | def _p_basicstr_content(s, content=_basicstr_re): | ||
| 191 | res = [] | ||
| 192 | while True: | ||
| 193 | res.append(s.expect_re(content).group(0)) | ||
| 194 | if not s.consume('\\'): | ||
| 195 | break | ||
| 196 | if s.consume_re(_newline_esc_re): | ||
| 197 | pass | ||
| 198 | elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re): | ||
| 199 | res.append(_chr(int(s.last().group(1), 16))) | ||
| 200 | else: | ||
| 201 | s.expect_re(_escapes_re) | ||
| 202 | res.append(_escapes[s.last().group(0)]) | ||
| 203 | return ''.join(res) | ||
| 204 | |||
| 205 | _key_re = re.compile(r'[0-9a-zA-Z-_]+') | ||
| 206 | def _p_key(s): | ||
| 207 | with s: | ||
| 208 | s.expect('"') | ||
| 209 | r = _p_basicstr_content(s, _basicstr_re) | ||
| 210 | s.expect('"') | ||
| 211 | return r | ||
| 212 | if s.consume('\''): | ||
| 213 | if s.consume('\'\''): | ||
| 214 | r = s.expect_re(_litstr_ml_re).group(0) | ||
| 215 | s.expect('\'\'\'') | ||
| 216 | else: | ||
| 217 | r = s.expect_re(_litstr_re).group(0) | ||
| 218 | s.expect('\'') | ||
| 219 | return r | ||
| 220 | return s.expect_re(_key_re).group(0) | ||
| 221 | |||
| 222 | _float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?') | ||
| 223 | _datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') | ||
| 224 | |||
| 225 | _basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*') | ||
| 226 | _litstr_re = re.compile(r"[^'\000-\037]*") | ||
| 227 | _litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*") | ||
| 228 | def _p_value(s): | ||
| 229 | pos = s.pos() | ||
| 230 | |||
| 231 | if s.consume('true'): | ||
| 232 | return 'bool', s.last(), True, pos | ||
| 233 | if s.consume('false'): | ||
| 234 | return 'bool', s.last(), False, pos | ||
| 235 | |||
| 236 | if s.consume('"'): | ||
| 237 | if s.consume('""'): | ||
| 238 | r = _p_basicstr_content(s, _basicstr_ml_re) | ||
| 239 | s.expect('"""') | ||
| 240 | else: | ||
| 241 | r = _p_basicstr_content(s, _basicstr_re) | ||
| 242 | s.expect('"') | ||
| 243 | return 'str', r, r, pos | ||
| 244 | |||
| 245 | if s.consume('\''): | ||
| 246 | if s.consume('\'\''): | ||
| 247 | r = s.expect_re(_litstr_ml_re).group(0) | ||
| 248 | s.expect('\'\'\'') | ||
| 249 | else: | ||
| 250 | r = s.expect_re(_litstr_re).group(0) | ||
| 251 | s.expect('\'') | ||
| 252 | return 'str', r, r, pos | ||
| 253 | |||
| 254 | if s.consume_re(_datetime_re): | ||
| 255 | m = s.last() | ||
| 256 | s0 = m.group(0) | ||
| 257 | r = map(int, m.groups()[:6]) | ||
| 258 | if m.group(7): | ||
| 259 | micro = float(m.group(7)) | ||
| 260 | else: | ||
| 261 | micro = 0 | ||
| 262 | |||
| 263 | if m.group(8): | ||
| 264 | g = int(m.group(8), 10) * 60 + int(m.group(9), 10) | ||
| 265 | tz = _TimeZone(datetime.timedelta(0, g * 60)) | ||
| 266 | else: | ||
| 267 | tz = _TimeZone(datetime.timedelta(0, 0)) | ||
| 268 | |||
| 269 | y, m, d, H, M, S = r | ||
| 270 | dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) | ||
| 271 | return 'datetime', s0, dt, pos | ||
| 272 | |||
| 273 | if s.consume_re(_float_re): | ||
| 274 | m = s.last().group(0) | ||
| 275 | r = m.replace('_','') | ||
| 276 | if '.' in m or 'e' in m or 'E' in m: | ||
| 277 | return 'float', m, float(r), pos | ||
| 278 | else: | ||
| 279 | return 'int', m, int(r, 10), pos | ||
| 280 | |||
| 281 | if s.consume('['): | ||
| 282 | items = [] | ||
| 283 | with s: | ||
| 284 | while True: | ||
| 285 | _p_ews(s) | ||
| 286 | items.append(_p_value(s)) | ||
| 287 | s.commit() | ||
| 288 | _p_ews(s) | ||
| 289 | s.expect(',') | ||
| 290 | s.commit() | ||
| 291 | _p_ews(s) | ||
| 292 | s.expect(']') | ||
| 293 | return 'array', None, items, pos | ||
| 294 | |||
| 295 | if s.consume('{'): | ||
| 296 | _p_ws(s) | ||
| 297 | items = {} | ||
| 298 | if not s.consume('}'): | ||
| 299 | k = _p_key(s) | ||
| 300 | _p_ws(s) | ||
| 301 | s.expect('=') | ||
| 302 | _p_ws(s) | ||
| 303 | items[k] = _p_value(s) | ||
| 304 | _p_ws(s) | ||
| 305 | while s.consume(','): | ||
| 306 | _p_ws(s) | ||
| 307 | k = _p_key(s) | ||
| 308 | _p_ws(s) | ||
| 309 | s.expect('=') | ||
| 310 | _p_ws(s) | ||
| 311 | items[k] = _p_value(s) | ||
| 312 | _p_ws(s) | ||
| 313 | s.expect('}') | ||
| 314 | return 'table', None, items, pos | ||
| 315 | |||
| 316 | s.fail() | ||
| 317 | |||
| 318 | def _p_stmt(s): | ||
| 319 | pos = s.pos() | ||
| 320 | if s.consume( '['): | ||
| 321 | is_array = s.consume('[') | ||
| 322 | _p_ws(s) | ||
| 323 | keys = [_p_key(s)] | ||
| 324 | _p_ws(s) | ||
| 325 | while s.consume('.'): | ||
| 326 | _p_ws(s) | ||
| 327 | keys.append(_p_key(s)) | ||
| 328 | _p_ws(s) | ||
| 329 | s.expect(']') | ||
| 330 | if is_array: | ||
| 331 | s.expect(']') | ||
| 332 | return 'table_array' if is_array else 'table', keys, pos | ||
| 333 | |||
| 334 | key = _p_key(s) | ||
| 335 | _p_ws(s) | ||
| 336 | s.expect('=') | ||
| 337 | _p_ws(s) | ||
| 338 | value = _p_value(s) | ||
| 339 | return 'kv', (key, value), pos | ||
| 340 | |||
| 341 | _stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*') | ||
| 342 | def _p_toml(s): | ||
| 343 | stmts = [] | ||
| 344 | _p_ews(s) | ||
| 345 | with s: | ||
| 346 | stmts.append(_p_stmt(s)) | ||
| 347 | while True: | ||
| 348 | s.commit() | ||
| 349 | s.expect_re(_stmtsep_re) | ||
| 350 | stmts.append(_p_stmt(s)) | ||
| 351 | _p_ews(s) | ||
| 352 | s.expect_eof() | ||
| 353 | return stmts | ||
| 354 | |||
| 355 | class _TimeZone(datetime.tzinfo): | ||
| 356 | def __init__(self, offset): | ||
| 357 | self._offset = offset | ||
| 358 | |||
| 359 | def utcoffset(self, dt): | ||
| 360 | return self._offset | ||
| 361 | |||
| 362 | def dst(self, dt): | ||
| 363 | return None | ||
| 364 | |||
| 365 | def tzname(self, dt): | ||
| 366 | m = self._offset.total_seconds() // 60 | ||
| 367 | if m < 0: | ||
| 368 | res = '-' | ||
| 369 | m = -m | ||
| 370 | else: | ||
| 371 | res = '+' | ||
| 372 | h = m // 60 | ||
| 373 | m = m - h * 60 | ||
| 374 | return '{}{:.02}{:.02}'.format(res, h, m) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/writer.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/writer.py new file mode 100644 index 0000000..19a8c6e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pytoml/writer.py | |||
| @@ -0,0 +1,127 @@ | |||
| 1 | from __future__ import unicode_literals | ||
| 2 | import io, datetime, math, sys | ||
| 3 | |||
| 4 | if sys.version_info[0] == 3: | ||
| 5 | long = int | ||
| 6 | unicode = str | ||
| 7 | |||
| 8 | |||
| 9 | def dumps(obj, sort_keys=False): | ||
| 10 | fout = io.StringIO() | ||
| 11 | dump(obj, fout, sort_keys=sort_keys) | ||
| 12 | return fout.getvalue() | ||
| 13 | |||
| 14 | |||
| 15 | _escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'} | ||
| 16 | |||
| 17 | |||
| 18 | def _escape_string(s): | ||
| 19 | res = [] | ||
| 20 | start = 0 | ||
| 21 | |||
| 22 | def flush(): | ||
| 23 | if start != i: | ||
| 24 | res.append(s[start:i]) | ||
| 25 | return i + 1 | ||
| 26 | |||
| 27 | i = 0 | ||
| 28 | while i < len(s): | ||
| 29 | c = s[i] | ||
| 30 | if c in '"\\\n\r\t\b\f': | ||
| 31 | start = flush() | ||
| 32 | res.append('\\' + _escapes[c]) | ||
| 33 | elif ord(c) < 0x20: | ||
| 34 | start = flush() | ||
| 35 | res.append('\\u%04x' % ord(c)) | ||
| 36 | i += 1 | ||
| 37 | |||
| 38 | flush() | ||
| 39 | return '"' + ''.join(res) + '"' | ||
| 40 | |||
| 41 | |||
| 42 | def _escape_id(s): | ||
| 43 | if any(not c.isalnum() and c not in '-_' for c in s): | ||
| 44 | return _escape_string(s) | ||
| 45 | return s | ||
| 46 | |||
| 47 | |||
| 48 | def _format_list(v): | ||
| 49 | return '[{0}]'.format(', '.join(_format_value(obj) for obj in v)) | ||
| 50 | |||
| 51 | # Formula from: | ||
| 52 | # https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds | ||
| 53 | # Once support for py26 is dropped, this can be replaced by td.total_seconds() | ||
| 54 | def _total_seconds(td): | ||
| 55 | return ((td.microseconds | ||
| 56 | + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6) | ||
| 57 | |||
| 58 | def _format_value(v): | ||
| 59 | if isinstance(v, bool): | ||
| 60 | return 'true' if v else 'false' | ||
| 61 | if isinstance(v, int) or isinstance(v, long): | ||
| 62 | return unicode(v) | ||
| 63 | if isinstance(v, float): | ||
| 64 | if math.isnan(v) or math.isinf(v): | ||
| 65 | raise ValueError("{0} is not a valid TOML value".format(v)) | ||
| 66 | else: | ||
| 67 | return repr(v) | ||
| 68 | elif isinstance(v, unicode) or isinstance(v, bytes): | ||
| 69 | return _escape_string(v) | ||
| 70 | elif isinstance(v, datetime.datetime): | ||
| 71 | offs = v.utcoffset() | ||
| 72 | offs = _total_seconds(offs) // 60 if offs is not None else 0 | ||
| 73 | |||
| 74 | if offs == 0: | ||
| 75 | suffix = 'Z' | ||
| 76 | else: | ||
| 77 | if offs > 0: | ||
| 78 | suffix = '+' | ||
| 79 | else: | ||
| 80 | suffix = '-' | ||
| 81 | offs = -offs | ||
| 82 | suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60) | ||
| 83 | |||
| 84 | if v.microsecond: | ||
| 85 | return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix | ||
| 86 | else: | ||
| 87 | return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix | ||
| 88 | elif isinstance(v, list): | ||
| 89 | return _format_list(v) | ||
| 90 | else: | ||
| 91 | raise RuntimeError(v) | ||
| 92 | |||
| 93 | |||
| 94 | def dump(obj, fout, sort_keys=False): | ||
| 95 | tables = [((), obj, False)] | ||
| 96 | |||
| 97 | while tables: | ||
| 98 | name, table, is_array = tables.pop() | ||
| 99 | if name: | ||
| 100 | section_name = '.'.join(_escape_id(c) for c in name) | ||
| 101 | if is_array: | ||
| 102 | fout.write('[[{0}]]\n'.format(section_name)) | ||
| 103 | else: | ||
| 104 | fout.write('[{0}]\n'.format(section_name)) | ||
| 105 | |||
| 106 | table_keys = sorted(table.keys()) if sort_keys else table.keys() | ||
| 107 | new_tables = [] | ||
| 108 | has_kv = False | ||
| 109 | for k in table_keys: | ||
| 110 | v = table[k] | ||
| 111 | if isinstance(v, dict): | ||
| 112 | new_tables.append((name + (k,), v, False)) | ||
| 113 | elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v): | ||
| 114 | new_tables.extend((name + (k,), d, True) for d in v) | ||
| 115 | elif v is None: | ||
| 116 | # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344 | ||
| 117 | fout.write( | ||
| 118 | '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k))) | ||
| 119 | has_kv = True | ||
| 120 | else: | ||
| 121 | fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v))) | ||
| 122 | has_kv = True | ||
| 123 | |||
| 124 | tables.extend(reversed(new_tables)) | ||
| 125 | |||
| 126 | if (name or has_kv) and tables: | ||
| 127 | fout.write('\n') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py new file mode 100644 index 0000000..f9565cb --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py | |||
| @@ -0,0 +1,123 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | # __ | ||
| 4 | # /__) _ _ _ _ _/ _ | ||
| 5 | # / ( (- (/ (/ (- _) / _) | ||
| 6 | # / | ||
| 7 | |||
| 8 | """ | ||
| 9 | Requests HTTP Library | ||
| 10 | ~~~~~~~~~~~~~~~~~~~~~ | ||
| 11 | |||
| 12 | Requests is an HTTP library, written in Python, for human beings. Basic GET | ||
| 13 | usage: | ||
| 14 | |||
| 15 | >>> import requests | ||
| 16 | >>> r = requests.get('https://www.python.org') | ||
| 17 | >>> r.status_code | ||
| 18 | 200 | ||
| 19 | >>> 'Python is a programming language' in r.content | ||
| 20 | True | ||
| 21 | |||
| 22 | ... or POST: | ||
| 23 | |||
| 24 | >>> payload = dict(key1='value1', key2='value2') | ||
| 25 | >>> r = requests.post('http://httpbin.org/post', data=payload) | ||
| 26 | >>> print(r.text) | ||
| 27 | { | ||
| 28 | ... | ||
| 29 | "form": { | ||
| 30 | "key2": "value2", | ||
| 31 | "key1": "value1" | ||
| 32 | }, | ||
| 33 | ... | ||
| 34 | } | ||
| 35 | |||
| 36 | The other HTTP methods are supported - see `requests.api`. Full documentation | ||
| 37 | is at <http://python-requests.org>. | ||
| 38 | |||
| 39 | :copyright: (c) 2017 by Kenneth Reitz. | ||
| 40 | :license: Apache 2.0, see LICENSE for more details. | ||
| 41 | """ | ||
| 42 | |||
| 43 | from pip._vendor import urllib3 | ||
| 44 | from pip._vendor import chardet | ||
| 45 | import warnings | ||
| 46 | from .exceptions import RequestsDependencyWarning | ||
| 47 | |||
| 48 | |||
| 49 | def check_compatibility(urllib3_version, chardet_version): | ||
| 50 | urllib3_version = urllib3_version.split('.') | ||
| 51 | assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. | ||
| 52 | |||
| 53 | # Sometimes, urllib3 only reports its version as 16.1. | ||
| 54 | if len(urllib3_version) == 2: | ||
| 55 | urllib3_version.append('0') | ||
| 56 | |||
| 57 | # Check urllib3 for compatibility. | ||
| 58 | major, minor, patch = urllib3_version # noqa: F811 | ||
| 59 | major, minor, patch = int(major), int(minor), int(patch) | ||
| 60 | # urllib3 >= 1.21.1, <= 1.22 | ||
| 61 | assert major == 1 | ||
| 62 | assert minor >= 21 | ||
| 63 | assert minor <= 22 | ||
| 64 | |||
| 65 | # Check chardet for compatibility. | ||
| 66 | major, minor, patch = chardet_version.split('.')[:3] | ||
| 67 | major, minor, patch = int(major), int(minor), int(patch) | ||
| 68 | # chardet >= 3.0.2, < 3.1.0 | ||
| 69 | assert major == 3 | ||
| 70 | assert minor < 1 | ||
| 71 | assert patch >= 2 | ||
| 72 | |||
| 73 | |||
| 74 | # Check imported dependencies for compatibility. | ||
| 75 | try: | ||
| 76 | check_compatibility(urllib3.__version__, chardet.__version__) | ||
| 77 | except (AssertionError, ValueError): | ||
| 78 | warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported " | ||
| 79 | "version!".format(urllib3.__version__, chardet.__version__), | ||
| 80 | RequestsDependencyWarning) | ||
| 81 | |||
| 82 | # Attempt to enable urllib3's SNI support, if possible | ||
| 83 | from pip._internal.compat import WINDOWS | ||
| 84 | if not WINDOWS: | ||
| 85 | try: | ||
| 86 | from pip._vendor.urllib3.contrib import pyopenssl | ||
| 87 | pyopenssl.inject_into_urllib3() | ||
| 88 | except ImportError: | ||
| 89 | pass | ||
| 90 | |||
| 91 | # urllib3's DependencyWarnings should be silenced. | ||
| 92 | from pip._vendor.urllib3.exceptions import DependencyWarning | ||
| 93 | warnings.simplefilter('ignore', DependencyWarning) | ||
| 94 | |||
| 95 | from .__version__ import __title__, __description__, __url__, __version__ | ||
| 96 | from .__version__ import __build__, __author__, __author_email__, __license__ | ||
| 97 | from .__version__ import __copyright__, __cake__ | ||
| 98 | |||
| 99 | from . import utils | ||
| 100 | from . import packages | ||
| 101 | from .models import Request, Response, PreparedRequest | ||
| 102 | from .api import request, get, head, post, patch, put, delete, options | ||
| 103 | from .sessions import session, Session | ||
| 104 | from .status_codes import codes | ||
| 105 | from .exceptions import ( | ||
| 106 | RequestException, Timeout, URLRequired, | ||
| 107 | TooManyRedirects, HTTPError, ConnectionError, | ||
| 108 | FileModeWarning, ConnectTimeout, ReadTimeout | ||
| 109 | ) | ||
| 110 | |||
| 111 | # Set default logging handler to avoid "No handler found" warnings. | ||
| 112 | import logging | ||
| 113 | try: # Python 2.7+ | ||
| 114 | from logging import NullHandler | ||
| 115 | except ImportError: | ||
| 116 | class NullHandler(logging.Handler): | ||
| 117 | def emit(self, record): | ||
| 118 | pass | ||
| 119 | |||
| 120 | logging.getLogger(__name__).addHandler(NullHandler()) | ||
| 121 | |||
| 122 | # FileModeWarnings go off per the default. | ||
| 123 | warnings.simplefilter('default', FileModeWarning, append=True) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py new file mode 100644 index 0000000..d380286 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py | |||
| @@ -0,0 +1,14 @@ | |||
| 1 | # .-. .-. .-. . . .-. .-. .-. .-. | ||
| 2 | # |( |- |.| | | |- `-. | `-. | ||
| 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' | ||
| 4 | |||
| 5 | __title__ = 'requests' | ||
| 6 | __description__ = 'Python HTTP for Humans.' | ||
| 7 | __url__ = 'http://python-requests.org' | ||
| 8 | __version__ = '2.18.4' | ||
| 9 | __build__ = 0x021804 | ||
| 10 | __author__ = 'Kenneth Reitz' | ||
| 11 | __author_email__ = 'me@kennethreitz.org' | ||
| 12 | __license__ = 'Apache 2.0' | ||
| 13 | __copyright__ = 'Copyright 2017 Kenneth Reitz' | ||
| 14 | __cake__ = u'\u2728 \U0001f370 \u2728' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..405b025 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py | |||
| @@ -0,0 +1,42 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests._internal_utils | ||
| 5 | ~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | Provides utility functions that are consumed internally by Requests | ||
| 8 | which depend on extremely few external helpers (such as compat) | ||
| 9 | """ | ||
| 10 | |||
| 11 | from .compat import is_py2, builtin_str, str | ||
| 12 | |||
| 13 | |||
| 14 | def to_native_string(string, encoding='ascii'): | ||
| 15 | """Given a string object, regardless of type, returns a representation of | ||
| 16 | that string in the native string type, encoding and decoding where | ||
| 17 | necessary. This assumes ASCII unless told otherwise. | ||
| 18 | """ | ||
| 19 | if isinstance(string, builtin_str): | ||
| 20 | out = string | ||
| 21 | else: | ||
| 22 | if is_py2: | ||
| 23 | out = string.encode(encoding) | ||
| 24 | else: | ||
| 25 | out = string.decode(encoding) | ||
| 26 | |||
| 27 | return out | ||
| 28 | |||
| 29 | |||
| 30 | def unicode_is_ascii(u_string): | ||
| 31 | """Determine if unicode string only contains ASCII characters. | ||
| 32 | |||
| 33 | :param str u_string: unicode string to check. Must be unicode | ||
| 34 | and not Python 2 `str`. | ||
| 35 | :rtype: bool | ||
| 36 | """ | ||
| 37 | assert isinstance(u_string, str) | ||
| 38 | try: | ||
| 39 | u_string.encode('ascii') | ||
| 40 | return True | ||
| 41 | except UnicodeEncodeError: | ||
| 42 | return False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..c50585c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py | |||
| @@ -0,0 +1,525 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.adapters | ||
| 5 | ~~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module contains the transport adapters that Requests uses to define | ||
| 8 | and maintain connections. | ||
| 9 | """ | ||
| 10 | |||
| 11 | import os.path | ||
| 12 | import socket | ||
| 13 | |||
| 14 | from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url | ||
| 15 | from pip._vendor.urllib3.response import HTTPResponse | ||
| 16 | from pip._vendor.urllib3.util import Timeout as TimeoutSauce | ||
| 17 | from pip._vendor.urllib3.util.retry import Retry | ||
| 18 | from pip._vendor.urllib3.exceptions import ClosedPoolError | ||
| 19 | from pip._vendor.urllib3.exceptions import ConnectTimeoutError | ||
| 20 | from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError | ||
| 21 | from pip._vendor.urllib3.exceptions import MaxRetryError | ||
| 22 | from pip._vendor.urllib3.exceptions import NewConnectionError | ||
| 23 | from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError | ||
| 24 | from pip._vendor.urllib3.exceptions import ProtocolError | ||
| 25 | from pip._vendor.urllib3.exceptions import ReadTimeoutError | ||
| 26 | from pip._vendor.urllib3.exceptions import SSLError as _SSLError | ||
| 27 | from pip._vendor.urllib3.exceptions import ResponseError | ||
| 28 | |||
| 29 | from .models import Response | ||
| 30 | from .compat import urlparse, basestring | ||
| 31 | from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, | ||
| 32 | prepend_scheme_if_needed, get_auth_from_url, urldefragauth, | ||
| 33 | select_proxy) | ||
| 34 | from .structures import CaseInsensitiveDict | ||
| 35 | from .cookies import extract_cookies_to_jar | ||
| 36 | from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, | ||
| 37 | ProxyError, RetryError, InvalidSchema) | ||
| 38 | from .auth import _basic_auth_str | ||
| 39 | |||
| 40 | try: | ||
| 41 | from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager | ||
| 42 | except ImportError: | ||
| 43 | def SOCKSProxyManager(*args, **kwargs): | ||
| 44 | raise InvalidSchema("Missing dependencies for SOCKS support.") | ||
| 45 | |||
| 46 | DEFAULT_POOLBLOCK = False | ||
| 47 | DEFAULT_POOLSIZE = 10 | ||
| 48 | DEFAULT_RETRIES = 0 | ||
| 49 | DEFAULT_POOL_TIMEOUT = None | ||
| 50 | |||
| 51 | |||
| 52 | class BaseAdapter(object): | ||
| 53 | """The Base Transport Adapter""" | ||
| 54 | |||
| 55 | def __init__(self): | ||
| 56 | super(BaseAdapter, self).__init__() | ||
| 57 | |||
| 58 | def send(self, request, stream=False, timeout=None, verify=True, | ||
| 59 | cert=None, proxies=None): | ||
| 60 | """Sends PreparedRequest object. Returns Response object. | ||
| 61 | |||
| 62 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
| 63 | :param stream: (optional) Whether to stream the request content. | ||
| 64 | :param timeout: (optional) How long to wait for the server to send | ||
| 65 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
| 66 | read timeout) <timeouts>` tuple. | ||
| 67 | :type timeout: float or tuple | ||
| 68 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
| 69 | the server's TLS certificate, or a string, in which case it must be a path | ||
| 70 | to a CA bundle to use | ||
| 71 | :param cert: (optional) Any user-provided SSL certificate to be trusted. | ||
| 72 | :param proxies: (optional) The proxies dictionary to apply to the request. | ||
| 73 | """ | ||
| 74 | raise NotImplementedError | ||
| 75 | |||
| 76 | def close(self): | ||
| 77 | """Cleans up adapter specific items.""" | ||
| 78 | raise NotImplementedError | ||
| 79 | |||
| 80 | |||
| 81 | class HTTPAdapter(BaseAdapter): | ||
| 82 | """The built-in HTTP Adapter for urllib3. | ||
| 83 | |||
| 84 | Provides a general-case interface for Requests sessions to contact HTTP and | ||
| 85 | HTTPS urls by implementing the Transport Adapter interface. This class will | ||
| 86 | usually be created by the :class:`Session <Session>` class under the | ||
| 87 | covers. | ||
| 88 | |||
| 89 | :param pool_connections: The number of urllib3 connection pools to cache. | ||
| 90 | :param pool_maxsize: The maximum number of connections to save in the pool. | ||
| 91 | :param max_retries: The maximum number of retries each connection | ||
| 92 | should attempt. Note, this applies only to failed DNS lookups, socket | ||
| 93 | connections and connection timeouts, never to requests where data has | ||
| 94 | made it to the server. By default, Requests does not retry failed | ||
| 95 | connections. If you need granular control over the conditions under | ||
| 96 | which we retry a request, import urllib3's ``Retry`` class and pass | ||
| 97 | that instead. | ||
| 98 | :param pool_block: Whether the connection pool should block for connections. | ||
| 99 | |||
| 100 | Usage:: | ||
| 101 | |||
| 102 | >>> import requests | ||
| 103 | >>> s = requests.Session() | ||
| 104 | >>> a = requests.adapters.HTTPAdapter(max_retries=3) | ||
| 105 | >>> s.mount('http://', a) | ||
| 106 | """ | ||
| 107 | __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', | ||
| 108 | '_pool_block'] | ||
| 109 | |||
| 110 | def __init__(self, pool_connections=DEFAULT_POOLSIZE, | ||
| 111 | pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, | ||
| 112 | pool_block=DEFAULT_POOLBLOCK): | ||
| 113 | if max_retries == DEFAULT_RETRIES: | ||
| 114 | self.max_retries = Retry(0, read=False) | ||
| 115 | else: | ||
| 116 | self.max_retries = Retry.from_int(max_retries) | ||
| 117 | self.config = {} | ||
| 118 | self.proxy_manager = {} | ||
| 119 | |||
| 120 | super(HTTPAdapter, self).__init__() | ||
| 121 | |||
| 122 | self._pool_connections = pool_connections | ||
| 123 | self._pool_maxsize = pool_maxsize | ||
| 124 | self._pool_block = pool_block | ||
| 125 | |||
| 126 | self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) | ||
| 127 | |||
| 128 | def __getstate__(self): | ||
| 129 | return dict((attr, getattr(self, attr, None)) for attr in | ||
| 130 | self.__attrs__) | ||
| 131 | |||
| 132 | def __setstate__(self, state): | ||
| 133 | # Can't handle by adding 'proxy_manager' to self.__attrs__ because | ||
| 134 | # self.poolmanager uses a lambda function, which isn't pickleable. | ||
| 135 | self.proxy_manager = {} | ||
| 136 | self.config = {} | ||
| 137 | |||
| 138 | for attr, value in state.items(): | ||
| 139 | setattr(self, attr, value) | ||
| 140 | |||
| 141 | self.init_poolmanager(self._pool_connections, self._pool_maxsize, | ||
| 142 | block=self._pool_block) | ||
| 143 | |||
| 144 | def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): | ||
| 145 | """Initializes a urllib3 PoolManager. | ||
| 146 | |||
| 147 | This method should not be called from user code, and is only | ||
| 148 | exposed for use when subclassing the | ||
| 149 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 150 | |||
| 151 | :param connections: The number of urllib3 connection pools to cache. | ||
| 152 | :param maxsize: The maximum number of connections to save in the pool. | ||
| 153 | :param block: Block when no free connections are available. | ||
| 154 | :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. | ||
| 155 | """ | ||
| 156 | # save these values for pickling | ||
| 157 | self._pool_connections = connections | ||
| 158 | self._pool_maxsize = maxsize | ||
| 159 | self._pool_block = block | ||
| 160 | |||
| 161 | self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, | ||
| 162 | block=block, strict=True, **pool_kwargs) | ||
| 163 | |||
| 164 | def proxy_manager_for(self, proxy, **proxy_kwargs): | ||
| 165 | """Return urllib3 ProxyManager for the given proxy. | ||
| 166 | |||
| 167 | This method should not be called from user code, and is only | ||
| 168 | exposed for use when subclassing the | ||
| 169 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 170 | |||
| 171 | :param proxy: The proxy to return a urllib3 ProxyManager for. | ||
| 172 | :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. | ||
| 173 | :returns: ProxyManager | ||
| 174 | :rtype: urllib3.ProxyManager | ||
| 175 | """ | ||
| 176 | if proxy in self.proxy_manager: | ||
| 177 | manager = self.proxy_manager[proxy] | ||
| 178 | elif proxy.lower().startswith('socks'): | ||
| 179 | username, password = get_auth_from_url(proxy) | ||
| 180 | manager = self.proxy_manager[proxy] = SOCKSProxyManager( | ||
| 181 | proxy, | ||
| 182 | username=username, | ||
| 183 | password=password, | ||
| 184 | num_pools=self._pool_connections, | ||
| 185 | maxsize=self._pool_maxsize, | ||
| 186 | block=self._pool_block, | ||
| 187 | **proxy_kwargs | ||
| 188 | ) | ||
| 189 | else: | ||
| 190 | proxy_headers = self.proxy_headers(proxy) | ||
| 191 | manager = self.proxy_manager[proxy] = proxy_from_url( | ||
| 192 | proxy, | ||
| 193 | proxy_headers=proxy_headers, | ||
| 194 | num_pools=self._pool_connections, | ||
| 195 | maxsize=self._pool_maxsize, | ||
| 196 | block=self._pool_block, | ||
| 197 | **proxy_kwargs) | ||
| 198 | |||
| 199 | return manager | ||
| 200 | |||
| 201 | def cert_verify(self, conn, url, verify, cert): | ||
| 202 | """Verify a SSL certificate. This method should not be called from user | ||
| 203 | code, and is only exposed for use when subclassing the | ||
| 204 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 205 | |||
| 206 | :param conn: The urllib3 connection object associated with the cert. | ||
| 207 | :param url: The requested URL. | ||
| 208 | :param verify: Either a boolean, in which case it controls whether we verify | ||
| 209 | the server's TLS certificate, or a string, in which case it must be a path | ||
| 210 | to a CA bundle to use | ||
| 211 | :param cert: The SSL certificate to verify. | ||
| 212 | """ | ||
| 213 | if url.lower().startswith('https') and verify: | ||
| 214 | |||
| 215 | cert_loc = None | ||
| 216 | |||
| 217 | # Allow self-specified cert location. | ||
| 218 | if verify is not True: | ||
| 219 | cert_loc = verify | ||
| 220 | |||
| 221 | if not cert_loc: | ||
| 222 | cert_loc = DEFAULT_CA_BUNDLE_PATH | ||
| 223 | |||
| 224 | if not cert_loc or not os.path.exists(cert_loc): | ||
| 225 | raise IOError("Could not find a suitable TLS CA certificate bundle, " | ||
| 226 | "invalid path: {0}".format(cert_loc)) | ||
| 227 | |||
| 228 | conn.cert_reqs = 'CERT_REQUIRED' | ||
| 229 | |||
| 230 | if not os.path.isdir(cert_loc): | ||
| 231 | conn.ca_certs = cert_loc | ||
| 232 | else: | ||
| 233 | conn.ca_cert_dir = cert_loc | ||
| 234 | else: | ||
| 235 | conn.cert_reqs = 'CERT_NONE' | ||
| 236 | conn.ca_certs = None | ||
| 237 | conn.ca_cert_dir = None | ||
| 238 | |||
| 239 | if cert: | ||
| 240 | if not isinstance(cert, basestring): | ||
| 241 | conn.cert_file = cert[0] | ||
| 242 | conn.key_file = cert[1] | ||
| 243 | else: | ||
| 244 | conn.cert_file = cert | ||
| 245 | conn.key_file = None | ||
| 246 | if conn.cert_file and not os.path.exists(conn.cert_file): | ||
| 247 | raise IOError("Could not find the TLS certificate file, " | ||
| 248 | "invalid path: {0}".format(conn.cert_file)) | ||
| 249 | if conn.key_file and not os.path.exists(conn.key_file): | ||
| 250 | raise IOError("Could not find the TLS key file, " | ||
| 251 | "invalid path: {0}".format(conn.key_file)) | ||
| 252 | |||
| 253 | def build_response(self, req, resp): | ||
| 254 | """Builds a :class:`Response <requests.Response>` object from a urllib3 | ||
| 255 | response. This should not be called from user code, and is only exposed | ||
| 256 | for use when subclassing the | ||
| 257 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` | ||
| 258 | |||
| 259 | :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. | ||
| 260 | :param resp: The urllib3 response object. | ||
| 261 | :rtype: requests.Response | ||
| 262 | """ | ||
| 263 | response = Response() | ||
| 264 | |||
| 265 | # Fallback to None if there's no status_code, for whatever reason. | ||
| 266 | response.status_code = getattr(resp, 'status', None) | ||
| 267 | |||
| 268 | # Make headers case-insensitive. | ||
| 269 | response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) | ||
| 270 | |||
| 271 | # Set encoding. | ||
| 272 | response.encoding = get_encoding_from_headers(response.headers) | ||
| 273 | response.raw = resp | ||
| 274 | response.reason = response.raw.reason | ||
| 275 | |||
| 276 | if isinstance(req.url, bytes): | ||
| 277 | response.url = req.url.decode('utf-8') | ||
| 278 | else: | ||
| 279 | response.url = req.url | ||
| 280 | |||
| 281 | # Add new cookies from the server. | ||
| 282 | extract_cookies_to_jar(response.cookies, req, resp) | ||
| 283 | |||
| 284 | # Give the Response some context. | ||
| 285 | response.request = req | ||
| 286 | response.connection = self | ||
| 287 | |||
| 288 | return response | ||
| 289 | |||
| 290 | def get_connection(self, url, proxies=None): | ||
| 291 | """Returns a urllib3 connection for the given URL. This should not be | ||
| 292 | called from user code, and is only exposed for use when subclassing the | ||
| 293 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 294 | |||
| 295 | :param url: The URL to connect to. | ||
| 296 | :param proxies: (optional) A Requests-style dictionary of proxies used on this request. | ||
| 297 | :rtype: urllib3.ConnectionPool | ||
| 298 | """ | ||
| 299 | proxy = select_proxy(url, proxies) | ||
| 300 | |||
| 301 | if proxy: | ||
| 302 | proxy = prepend_scheme_if_needed(proxy, 'http') | ||
| 303 | proxy_manager = self.proxy_manager_for(proxy) | ||
| 304 | conn = proxy_manager.connection_from_url(url) | ||
| 305 | else: | ||
| 306 | # Only scheme should be lower case | ||
| 307 | parsed = urlparse(url) | ||
| 308 | url = parsed.geturl() | ||
| 309 | conn = self.poolmanager.connection_from_url(url) | ||
| 310 | |||
| 311 | return conn | ||
| 312 | |||
| 313 | def close(self): | ||
| 314 | """Disposes of any internal state. | ||
| 315 | |||
| 316 | Currently, this closes the PoolManager and any active ProxyManager, | ||
| 317 | which closes any pooled connections. | ||
| 318 | """ | ||
| 319 | self.poolmanager.clear() | ||
| 320 | for proxy in self.proxy_manager.values(): | ||
| 321 | proxy.clear() | ||
| 322 | |||
| 323 | def request_url(self, request, proxies): | ||
| 324 | """Obtain the url to use when making the final request. | ||
| 325 | |||
| 326 | If the message is being sent through a HTTP proxy, the full URL has to | ||
| 327 | be used. Otherwise, we should only use the path portion of the URL. | ||
| 328 | |||
| 329 | This should not be called from user code, and is only exposed for use | ||
| 330 | when subclassing the | ||
| 331 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 332 | |||
| 333 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
| 334 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. | ||
| 335 | :rtype: str | ||
| 336 | """ | ||
| 337 | proxy = select_proxy(request.url, proxies) | ||
| 338 | scheme = urlparse(request.url).scheme | ||
| 339 | |||
| 340 | is_proxied_http_request = (proxy and scheme != 'https') | ||
| 341 | using_socks_proxy = False | ||
| 342 | if proxy: | ||
| 343 | proxy_scheme = urlparse(proxy).scheme.lower() | ||
| 344 | using_socks_proxy = proxy_scheme.startswith('socks') | ||
| 345 | |||
| 346 | url = request.path_url | ||
| 347 | if is_proxied_http_request and not using_socks_proxy: | ||
| 348 | url = urldefragauth(request.url) | ||
| 349 | |||
| 350 | return url | ||
| 351 | |||
| 352 | def add_headers(self, request, **kwargs): | ||
| 353 | """Add any headers needed by the connection. As of v2.0 this does | ||
| 354 | nothing by default, but is left for overriding by users that subclass | ||
| 355 | the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 356 | |||
| 357 | This should not be called from user code, and is only exposed for use | ||
| 358 | when subclassing the | ||
| 359 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 360 | |||
| 361 | :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. | ||
| 362 | :param kwargs: The keyword arguments from the call to send(). | ||
| 363 | """ | ||
| 364 | pass | ||
| 365 | |||
| 366 | def proxy_headers(self, proxy): | ||
| 367 | """Returns a dictionary of the headers to add to any request sent | ||
| 368 | through a proxy. This works with urllib3 magic to ensure that they are | ||
| 369 | correctly sent to the proxy, rather than in a tunnelled request if | ||
| 370 | CONNECT is being used. | ||
| 371 | |||
| 372 | This should not be called from user code, and is only exposed for use | ||
| 373 | when subclassing the | ||
| 374 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
| 375 | |||
| 376 | :param proxies: The url of the proxy being used for this request. | ||
| 377 | :rtype: dict | ||
| 378 | """ | ||
| 379 | headers = {} | ||
| 380 | username, password = get_auth_from_url(proxy) | ||
| 381 | |||
| 382 | if username: | ||
| 383 | headers['Proxy-Authorization'] = _basic_auth_str(username, | ||
| 384 | password) | ||
| 385 | |||
| 386 | return headers | ||
| 387 | |||
| 388 | def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): | ||
| 389 | """Sends PreparedRequest object. Returns Response object. | ||
| 390 | |||
| 391 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
| 392 | :param stream: (optional) Whether to stream the request content. | ||
| 393 | :param timeout: (optional) How long to wait for the server to send | ||
| 394 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
| 395 | read timeout) <timeouts>` tuple. | ||
| 396 | :type timeout: float or tuple or urllib3 Timeout object | ||
| 397 | :param verify: (optional) Either a boolean, in which case it controls whether | ||
| 398 | we verify the server's TLS certificate, or a string, in which case it | ||
| 399 | must be a path to a CA bundle to use | ||
| 400 | :param cert: (optional) Any user-provided SSL certificate to be trusted. | ||
| 401 | :param proxies: (optional) The proxies dictionary to apply to the request. | ||
| 402 | :rtype: requests.Response | ||
| 403 | """ | ||
| 404 | |||
| 405 | conn = self.get_connection(request.url, proxies) | ||
| 406 | |||
| 407 | self.cert_verify(conn, request.url, verify, cert) | ||
| 408 | url = self.request_url(request, proxies) | ||
| 409 | self.add_headers(request) | ||
| 410 | |||
| 411 | chunked = not (request.body is None or 'Content-Length' in request.headers) | ||
| 412 | |||
| 413 | if isinstance(timeout, tuple): | ||
| 414 | try: | ||
| 415 | connect, read = timeout | ||
| 416 | timeout = TimeoutSauce(connect=connect, read=read) | ||
| 417 | except ValueError as e: | ||
| 418 | # this may raise a string formatting error. | ||
| 419 | err = ("Invalid timeout {0}. Pass a (connect, read) " | ||
| 420 | "timeout tuple, or a single float to set " | ||
| 421 | "both timeouts to the same value".format(timeout)) | ||
| 422 | raise ValueError(err) | ||
| 423 | elif isinstance(timeout, TimeoutSauce): | ||
| 424 | pass | ||
| 425 | else: | ||
| 426 | timeout = TimeoutSauce(connect=timeout, read=timeout) | ||
| 427 | |||
| 428 | try: | ||
| 429 | if not chunked: | ||
| 430 | resp = conn.urlopen( | ||
| 431 | method=request.method, | ||
| 432 | url=url, | ||
| 433 | body=request.body, | ||
| 434 | headers=request.headers, | ||
| 435 | redirect=False, | ||
| 436 | assert_same_host=False, | ||
| 437 | preload_content=False, | ||
| 438 | decode_content=False, | ||
| 439 | retries=self.max_retries, | ||
| 440 | timeout=timeout | ||
| 441 | ) | ||
| 442 | |||
| 443 | # Send the request. | ||
| 444 | else: | ||
| 445 | if hasattr(conn, 'proxy_pool'): | ||
| 446 | conn = conn.proxy_pool | ||
| 447 | |||
| 448 | low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) | ||
| 449 | |||
| 450 | try: | ||
| 451 | low_conn.putrequest(request.method, | ||
| 452 | url, | ||
| 453 | skip_accept_encoding=True) | ||
| 454 | |||
| 455 | for header, value in request.headers.items(): | ||
| 456 | low_conn.putheader(header, value) | ||
| 457 | |||
| 458 | low_conn.endheaders() | ||
| 459 | |||
| 460 | for i in request.body: | ||
| 461 | low_conn.send(hex(len(i))[2:].encode('utf-8')) | ||
| 462 | low_conn.send(b'\r\n') | ||
| 463 | low_conn.send(i) | ||
| 464 | low_conn.send(b'\r\n') | ||
| 465 | low_conn.send(b'0\r\n\r\n') | ||
| 466 | |||
| 467 | # Receive the response from the server | ||
| 468 | try: | ||
| 469 | # For Python 2.7+ versions, use buffering of HTTP | ||
| 470 | # responses | ||
| 471 | r = low_conn.getresponse(buffering=True) | ||
| 472 | except TypeError: | ||
| 473 | # For compatibility with Python 2.6 versions and back | ||
| 474 | r = low_conn.getresponse() | ||
| 475 | |||
| 476 | resp = HTTPResponse.from_httplib( | ||
| 477 | r, | ||
| 478 | pool=conn, | ||
| 479 | connection=low_conn, | ||
| 480 | preload_content=False, | ||
| 481 | decode_content=False | ||
| 482 | ) | ||
| 483 | except: | ||
| 484 | # If we hit any problems here, clean up the connection. | ||
| 485 | # Then, reraise so that we can handle the actual exception. | ||
| 486 | low_conn.close() | ||
| 487 | raise | ||
| 488 | |||
| 489 | except (ProtocolError, socket.error) as err: | ||
| 490 | raise ConnectionError(err, request=request) | ||
| 491 | |||
| 492 | except MaxRetryError as e: | ||
| 493 | if isinstance(e.reason, ConnectTimeoutError): | ||
| 494 | # TODO: Remove this in 3.0.0: see #2811 | ||
| 495 | if not isinstance(e.reason, NewConnectionError): | ||
| 496 | raise ConnectTimeout(e, request=request) | ||
| 497 | |||
| 498 | if isinstance(e.reason, ResponseError): | ||
| 499 | raise RetryError(e, request=request) | ||
| 500 | |||
| 501 | if isinstance(e.reason, _ProxyError): | ||
| 502 | raise ProxyError(e, request=request) | ||
| 503 | |||
| 504 | if isinstance(e.reason, _SSLError): | ||
| 505 | # This branch is for urllib3 v1.22 and later. | ||
| 506 | raise SSLError(e, request=request) | ||
| 507 | |||
| 508 | raise ConnectionError(e, request=request) | ||
| 509 | |||
| 510 | except ClosedPoolError as e: | ||
| 511 | raise ConnectionError(e, request=request) | ||
| 512 | |||
| 513 | except _ProxyError as e: | ||
| 514 | raise ProxyError(e) | ||
| 515 | |||
| 516 | except (_SSLError, _HTTPError) as e: | ||
| 517 | if isinstance(e, _SSLError): | ||
| 518 | # This branch is for urllib3 versions earlier than v1.22 | ||
| 519 | raise SSLError(e, request=request) | ||
| 520 | elif isinstance(e, ReadTimeoutError): | ||
| 521 | raise ReadTimeout(e, request=request) | ||
| 522 | else: | ||
| 523 | raise | ||
| 524 | |||
| 525 | return self.build_response(request, resp) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py new file mode 100644 index 0000000..f9ffabf --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py | |||
| @@ -0,0 +1,152 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.api | ||
| 5 | ~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module implements the Requests API. | ||
| 8 | |||
| 9 | :copyright: (c) 2012 by Kenneth Reitz. | ||
| 10 | :license: Apache2, see LICENSE for more details. | ||
| 11 | """ | ||
| 12 | |||
| 13 | from . import sessions | ||
| 14 | |||
| 15 | |||
| 16 | def request(method, url, **kwargs): | ||
| 17 | """Constructs and sends a :class:`Request <Request>`. | ||
| 18 | |||
| 19 | :param method: method for the new :class:`Request` object. | ||
| 20 | :param url: URL for the new :class:`Request` object. | ||
| 21 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. | ||
| 22 | :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 23 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
| 24 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. | ||
| 25 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. | ||
| 26 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. | ||
| 27 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` | ||
| 28 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string | ||
| 29 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers | ||
| 30 | to add for the file. | ||
| 31 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. | ||
| 32 | :param timeout: (optional) How many seconds to wait for the server to send data | ||
| 33 | before giving up, as a float, or a :ref:`(connect timeout, read | ||
| 34 | timeout) <timeouts>` tuple. | ||
| 35 | :type timeout: float or tuple | ||
| 36 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. | ||
| 37 | :type allow_redirects: bool | ||
| 38 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. | ||
| 39 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
| 40 | the server's TLS certificate, or a string, in which case it must be a path | ||
| 41 | to a CA bundle to use. Defaults to ``True``. | ||
| 42 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. | ||
| 43 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. | ||
| 44 | :return: :class:`Response <Response>` object | ||
| 45 | :rtype: requests.Response | ||
| 46 | |||
| 47 | Usage:: | ||
| 48 | |||
| 49 | >>> import requests | ||
| 50 | >>> req = requests.request('GET', 'http://httpbin.org/get') | ||
| 51 | <Response [200]> | ||
| 52 | """ | ||
| 53 | |||
| 54 | # By using the 'with' statement we are sure the session is closed, thus we | ||
| 55 | # avoid leaving sockets open which can trigger a ResourceWarning in some | ||
| 56 | # cases, and look like a memory leak in others. | ||
| 57 | with sessions.Session() as session: | ||
| 58 | return session.request(method=method, url=url, **kwargs) | ||
| 59 | |||
| 60 | |||
| 61 | def get(url, params=None, **kwargs): | ||
| 62 | r"""Sends a GET request. | ||
| 63 | |||
| 64 | :param url: URL for the new :class:`Request` object. | ||
| 65 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. | ||
| 66 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 67 | :return: :class:`Response <Response>` object | ||
| 68 | :rtype: requests.Response | ||
| 69 | """ | ||
| 70 | |||
| 71 | kwargs.setdefault('allow_redirects', True) | ||
| 72 | return request('get', url, params=params, **kwargs) | ||
| 73 | |||
| 74 | |||
| 75 | def options(url, **kwargs): | ||
| 76 | r"""Sends an OPTIONS request. | ||
| 77 | |||
| 78 | :param url: URL for the new :class:`Request` object. | ||
| 79 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 80 | :return: :class:`Response <Response>` object | ||
| 81 | :rtype: requests.Response | ||
| 82 | """ | ||
| 83 | |||
| 84 | kwargs.setdefault('allow_redirects', True) | ||
| 85 | return request('options', url, **kwargs) | ||
| 86 | |||
| 87 | |||
| 88 | def head(url, **kwargs): | ||
| 89 | r"""Sends a HEAD request. | ||
| 90 | |||
| 91 | :param url: URL for the new :class:`Request` object. | ||
| 92 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 93 | :return: :class:`Response <Response>` object | ||
| 94 | :rtype: requests.Response | ||
| 95 | """ | ||
| 96 | |||
| 97 | kwargs.setdefault('allow_redirects', False) | ||
| 98 | return request('head', url, **kwargs) | ||
| 99 | |||
| 100 | |||
| 101 | def post(url, data=None, json=None, **kwargs): | ||
| 102 | r"""Sends a POST request. | ||
| 103 | |||
| 104 | :param url: URL for the new :class:`Request` object. | ||
| 105 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 106 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
| 107 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 108 | :return: :class:`Response <Response>` object | ||
| 109 | :rtype: requests.Response | ||
| 110 | """ | ||
| 111 | |||
| 112 | return request('post', url, data=data, json=json, **kwargs) | ||
| 113 | |||
| 114 | |||
| 115 | def put(url, data=None, **kwargs): | ||
| 116 | r"""Sends a PUT request. | ||
| 117 | |||
| 118 | :param url: URL for the new :class:`Request` object. | ||
| 119 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 120 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
| 121 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 122 | :return: :class:`Response <Response>` object | ||
| 123 | :rtype: requests.Response | ||
| 124 | """ | ||
| 125 | |||
| 126 | return request('put', url, data=data, **kwargs) | ||
| 127 | |||
| 128 | |||
| 129 | def patch(url, data=None, **kwargs): | ||
| 130 | r"""Sends a PATCH request. | ||
| 131 | |||
| 132 | :param url: URL for the new :class:`Request` object. | ||
| 133 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 134 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
| 135 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 136 | :return: :class:`Response <Response>` object | ||
| 137 | :rtype: requests.Response | ||
| 138 | """ | ||
| 139 | |||
| 140 | return request('patch', url, data=data, **kwargs) | ||
| 141 | |||
| 142 | |||
| 143 | def delete(url, **kwargs): | ||
| 144 | r"""Sends a DELETE request. | ||
| 145 | |||
| 146 | :param url: URL for the new :class:`Request` object. | ||
| 147 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 148 | :return: :class:`Response <Response>` object | ||
| 149 | :rtype: requests.Response | ||
| 150 | """ | ||
| 151 | |||
| 152 | return request('delete', url, **kwargs) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..73e4534 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py | |||
| @@ -0,0 +1,293 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.auth | ||
| 5 | ~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module contains the authentication handlers for Requests. | ||
| 8 | """ | ||
| 9 | |||
| 10 | import os | ||
| 11 | import re | ||
| 12 | import time | ||
| 13 | import hashlib | ||
| 14 | import threading | ||
| 15 | import warnings | ||
| 16 | |||
| 17 | from base64 import b64encode | ||
| 18 | |||
| 19 | from .compat import urlparse, str, basestring | ||
| 20 | from .cookies import extract_cookies_to_jar | ||
| 21 | from ._internal_utils import to_native_string | ||
| 22 | from .utils import parse_dict_header | ||
| 23 | |||
| 24 | CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' | ||
| 25 | CONTENT_TYPE_MULTI_PART = 'multipart/form-data' | ||
| 26 | |||
| 27 | |||
| 28 | def _basic_auth_str(username, password): | ||
| 29 | """Returns a Basic Auth string.""" | ||
| 30 | |||
| 31 | # "I want us to put a big-ol' comment on top of it that | ||
| 32 | # says that this behaviour is dumb but we need to preserve | ||
| 33 | # it because people are relying on it." | ||
| 34 | # - Lukasa | ||
| 35 | # | ||
| 36 | # These are here solely to maintain backwards compatibility | ||
| 37 | # for things like ints. This will be removed in 3.0.0. | ||
| 38 | if not isinstance(username, basestring): | ||
| 39 | warnings.warn( | ||
| 40 | "Non-string usernames will no longer be supported in Requests " | ||
| 41 | "3.0.0. Please convert the object you've passed in ({0!r}) to " | ||
| 42 | "a string or bytes object in the near future to avoid " | ||
| 43 | "problems.".format(username), | ||
| 44 | category=DeprecationWarning, | ||
| 45 | ) | ||
| 46 | username = str(username) | ||
| 47 | |||
| 48 | if not isinstance(password, basestring): | ||
| 49 | warnings.warn( | ||
| 50 | "Non-string passwords will no longer be supported in Requests " | ||
| 51 | "3.0.0. Please convert the object you've passed in ({0!r}) to " | ||
| 52 | "a string or bytes object in the near future to avoid " | ||
| 53 | "problems.".format(password), | ||
| 54 | category=DeprecationWarning, | ||
| 55 | ) | ||
| 56 | password = str(password) | ||
| 57 | # -- End Removal -- | ||
| 58 | |||
| 59 | if isinstance(username, str): | ||
| 60 | username = username.encode('latin1') | ||
| 61 | |||
| 62 | if isinstance(password, str): | ||
| 63 | password = password.encode('latin1') | ||
| 64 | |||
| 65 | authstr = 'Basic ' + to_native_string( | ||
| 66 | b64encode(b':'.join((username, password))).strip() | ||
| 67 | ) | ||
| 68 | |||
| 69 | return authstr | ||
| 70 | |||
| 71 | |||
| 72 | class AuthBase(object): | ||
| 73 | """Base class that all auth implementations derive from""" | ||
| 74 | |||
| 75 | def __call__(self, r): | ||
| 76 | raise NotImplementedError('Auth hooks must be callable.') | ||
| 77 | |||
| 78 | |||
| 79 | class HTTPBasicAuth(AuthBase): | ||
| 80 | """Attaches HTTP Basic Authentication to the given Request object.""" | ||
| 81 | |||
| 82 | def __init__(self, username, password): | ||
| 83 | self.username = username | ||
| 84 | self.password = password | ||
| 85 | |||
| 86 | def __eq__(self, other): | ||
| 87 | return all([ | ||
| 88 | self.username == getattr(other, 'username', None), | ||
| 89 | self.password == getattr(other, 'password', None) | ||
| 90 | ]) | ||
| 91 | |||
| 92 | def __ne__(self, other): | ||
| 93 | return not self == other | ||
| 94 | |||
| 95 | def __call__(self, r): | ||
| 96 | r.headers['Authorization'] = _basic_auth_str(self.username, self.password) | ||
| 97 | return r | ||
| 98 | |||
| 99 | |||
| 100 | class HTTPProxyAuth(HTTPBasicAuth): | ||
| 101 | """Attaches HTTP Proxy Authentication to a given Request object.""" | ||
| 102 | |||
| 103 | def __call__(self, r): | ||
| 104 | r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) | ||
| 105 | return r | ||
| 106 | |||
| 107 | |||
| 108 | class HTTPDigestAuth(AuthBase): | ||
| 109 | """Attaches HTTP Digest Authentication to the given Request object.""" | ||
| 110 | |||
| 111 | def __init__(self, username, password): | ||
| 112 | self.username = username | ||
| 113 | self.password = password | ||
| 114 | # Keep state in per-thread local storage | ||
| 115 | self._thread_local = threading.local() | ||
| 116 | |||
| 117 | def init_per_thread_state(self): | ||
| 118 | # Ensure state is initialized just once per-thread | ||
| 119 | if not hasattr(self._thread_local, 'init'): | ||
| 120 | self._thread_local.init = True | ||
| 121 | self._thread_local.last_nonce = '' | ||
| 122 | self._thread_local.nonce_count = 0 | ||
| 123 | self._thread_local.chal = {} | ||
| 124 | self._thread_local.pos = None | ||
| 125 | self._thread_local.num_401_calls = None | ||
| 126 | |||
| 127 | def build_digest_header(self, method, url): | ||
| 128 | """ | ||
| 129 | :rtype: str | ||
| 130 | """ | ||
| 131 | |||
| 132 | realm = self._thread_local.chal['realm'] | ||
| 133 | nonce = self._thread_local.chal['nonce'] | ||
| 134 | qop = self._thread_local.chal.get('qop') | ||
| 135 | algorithm = self._thread_local.chal.get('algorithm') | ||
| 136 | opaque = self._thread_local.chal.get('opaque') | ||
| 137 | hash_utf8 = None | ||
| 138 | |||
| 139 | if algorithm is None: | ||
| 140 | _algorithm = 'MD5' | ||
| 141 | else: | ||
| 142 | _algorithm = algorithm.upper() | ||
| 143 | # lambdas assume digest modules are imported at the top level | ||
| 144 | if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': | ||
| 145 | def md5_utf8(x): | ||
| 146 | if isinstance(x, str): | ||
| 147 | x = x.encode('utf-8') | ||
| 148 | return hashlib.md5(x).hexdigest() | ||
| 149 | hash_utf8 = md5_utf8 | ||
| 150 | elif _algorithm == 'SHA': | ||
| 151 | def sha_utf8(x): | ||
| 152 | if isinstance(x, str): | ||
| 153 | x = x.encode('utf-8') | ||
| 154 | return hashlib.sha1(x).hexdigest() | ||
| 155 | hash_utf8 = sha_utf8 | ||
| 156 | |||
| 157 | KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) | ||
| 158 | |||
| 159 | if hash_utf8 is None: | ||
| 160 | return None | ||
| 161 | |||
| 162 | # XXX not implemented yet | ||
| 163 | entdig = None | ||
| 164 | p_parsed = urlparse(url) | ||
| 165 | #: path is request-uri defined in RFC 2616 which should not be empty | ||
| 166 | path = p_parsed.path or "/" | ||
| 167 | if p_parsed.query: | ||
| 168 | path += '?' + p_parsed.query | ||
| 169 | |||
| 170 | A1 = '%s:%s:%s' % (self.username, realm, self.password) | ||
| 171 | A2 = '%s:%s' % (method, path) | ||
| 172 | |||
| 173 | HA1 = hash_utf8(A1) | ||
| 174 | HA2 = hash_utf8(A2) | ||
| 175 | |||
| 176 | if nonce == self._thread_local.last_nonce: | ||
| 177 | self._thread_local.nonce_count += 1 | ||
| 178 | else: | ||
| 179 | self._thread_local.nonce_count = 1 | ||
| 180 | ncvalue = '%08x' % self._thread_local.nonce_count | ||
| 181 | s = str(self._thread_local.nonce_count).encode('utf-8') | ||
| 182 | s += nonce.encode('utf-8') | ||
| 183 | s += time.ctime().encode('utf-8') | ||
| 184 | s += os.urandom(8) | ||
| 185 | |||
| 186 | cnonce = (hashlib.sha1(s).hexdigest()[:16]) | ||
| 187 | if _algorithm == 'MD5-SESS': | ||
| 188 | HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) | ||
| 189 | |||
| 190 | if not qop: | ||
| 191 | respdig = KD(HA1, "%s:%s" % (nonce, HA2)) | ||
| 192 | elif qop == 'auth' or 'auth' in qop.split(','): | ||
| 193 | noncebit = "%s:%s:%s:%s:%s" % ( | ||
| 194 | nonce, ncvalue, cnonce, 'auth', HA2 | ||
| 195 | ) | ||
| 196 | respdig = KD(HA1, noncebit) | ||
| 197 | else: | ||
| 198 | # XXX handle auth-int. | ||
| 199 | return None | ||
| 200 | |||
| 201 | self._thread_local.last_nonce = nonce | ||
| 202 | |||
| 203 | # XXX should the partial digests be encoded too? | ||
| 204 | base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ | ||
| 205 | 'response="%s"' % (self.username, realm, nonce, path, respdig) | ||
| 206 | if opaque: | ||
| 207 | base += ', opaque="%s"' % opaque | ||
| 208 | if algorithm: | ||
| 209 | base += ', algorithm="%s"' % algorithm | ||
| 210 | if entdig: | ||
| 211 | base += ', digest="%s"' % entdig | ||
| 212 | if qop: | ||
| 213 | base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) | ||
| 214 | |||
| 215 | return 'Digest %s' % (base) | ||
| 216 | |||
| 217 | def handle_redirect(self, r, **kwargs): | ||
| 218 | """Reset num_401_calls counter on redirects.""" | ||
| 219 | if r.is_redirect: | ||
| 220 | self._thread_local.num_401_calls = 1 | ||
| 221 | |||
| 222 | def handle_401(self, r, **kwargs): | ||
| 223 | """ | ||
| 224 | Takes the given response and tries digest-auth, if needed. | ||
| 225 | |||
| 226 | :rtype: requests.Response | ||
| 227 | """ | ||
| 228 | |||
| 229 | # If response is not 4xx, do not auth | ||
| 230 | # See https://github.com/requests/requests/issues/3772 | ||
| 231 | if not 400 <= r.status_code < 500: | ||
| 232 | self._thread_local.num_401_calls = 1 | ||
| 233 | return r | ||
| 234 | |||
| 235 | if self._thread_local.pos is not None: | ||
| 236 | # Rewind the file position indicator of the body to where | ||
| 237 | # it was to resend the request. | ||
| 238 | r.request.body.seek(self._thread_local.pos) | ||
| 239 | s_auth = r.headers.get('www-authenticate', '') | ||
| 240 | |||
| 241 | if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: | ||
| 242 | |||
| 243 | self._thread_local.num_401_calls += 1 | ||
| 244 | pat = re.compile(r'digest ', flags=re.IGNORECASE) | ||
| 245 | self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) | ||
| 246 | |||
| 247 | # Consume content and release the original connection | ||
| 248 | # to allow our new request to reuse the same one. | ||
| 249 | r.content | ||
| 250 | r.close() | ||
| 251 | prep = r.request.copy() | ||
| 252 | extract_cookies_to_jar(prep._cookies, r.request, r.raw) | ||
| 253 | prep.prepare_cookies(prep._cookies) | ||
| 254 | |||
| 255 | prep.headers['Authorization'] = self.build_digest_header( | ||
| 256 | prep.method, prep.url) | ||
| 257 | _r = r.connection.send(prep, **kwargs) | ||
| 258 | _r.history.append(r) | ||
| 259 | _r.request = prep | ||
| 260 | |||
| 261 | return _r | ||
| 262 | |||
| 263 | self._thread_local.num_401_calls = 1 | ||
| 264 | return r | ||
| 265 | |||
| 266 | def __call__(self, r): | ||
| 267 | # Initialize per-thread state, if needed | ||
| 268 | self.init_per_thread_state() | ||
| 269 | # If we have a saved nonce, skip the 401 | ||
| 270 | if self._thread_local.last_nonce: | ||
| 271 | r.headers['Authorization'] = self.build_digest_header(r.method, r.url) | ||
| 272 | try: | ||
| 273 | self._thread_local.pos = r.body.tell() | ||
| 274 | except AttributeError: | ||
| 275 | # In the case of HTTPDigestAuth being reused and the body of | ||
| 276 | # the previous request was a file-like object, pos has the | ||
| 277 | # file position of the previous body. Ensure it's set to | ||
| 278 | # None. | ||
| 279 | self._thread_local.pos = None | ||
| 280 | r.register_hook('response', self.handle_401) | ||
| 281 | r.register_hook('response', self.handle_redirect) | ||
| 282 | self._thread_local.num_401_calls = 1 | ||
| 283 | |||
| 284 | return r | ||
| 285 | |||
| 286 | def __eq__(self, other): | ||
| 287 | return all([ | ||
| 288 | self.username == getattr(other, 'username', None), | ||
| 289 | self.password == getattr(other, 'password', None) | ||
| 290 | ]) | ||
| 291 | |||
| 292 | def __ne__(self, other): | ||
| 293 | return not self == other | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..9742f6e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py | |||
| @@ -0,0 +1,18 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # -*- coding: utf-8 -*- | ||
| 3 | |||
| 4 | """ | ||
| 5 | requests.certs | ||
| 6 | ~~~~~~~~~~~~~~ | ||
| 7 | |||
| 8 | This module returns the preferred default CA certificate bundle. There is | ||
| 9 | only one — the one from the certifi package. | ||
| 10 | |||
| 11 | If you are packaging Requests, e.g., for a Linux distribution or a managed | ||
| 12 | environment, you can change the definition of where() to return a separately | ||
| 13 | packaged CA bundle. | ||
| 14 | """ | ||
| 15 | from pip._vendor.certifi import where | ||
| 16 | |||
| 17 | if __name__ == '__main__': | ||
| 18 | print(where()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..4cea25e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py | |||
| @@ -0,0 +1,73 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.compat | ||
| 5 | ~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module handles import compatibility issues between Python 2 and | ||
| 8 | Python 3. | ||
| 9 | """ | ||
| 10 | |||
| 11 | from pip._vendor import chardet | ||
| 12 | |||
| 13 | import sys | ||
| 14 | |||
| 15 | # ------- | ||
| 16 | # Pythons | ||
| 17 | # ------- | ||
| 18 | |||
| 19 | # Syntax sugar. | ||
| 20 | _ver = sys.version_info | ||
| 21 | |||
| 22 | #: Python 2.x? | ||
| 23 | is_py2 = (_ver[0] == 2) | ||
| 24 | |||
| 25 | #: Python 3.x? | ||
| 26 | is_py3 = (_ver[0] == 3) | ||
| 27 | |||
| 28 | # Note: We've patched out simplejson support in pip because it prevents | ||
| 29 | # upgrading simplejson on Windows. | ||
| 30 | # try: | ||
| 31 | # import simplejson as json | ||
| 32 | # except (ImportError, SyntaxError): | ||
| 33 | # # simplejson does not support Python 3.2, it throws a SyntaxError | ||
| 34 | # # because of u'...' Unicode literals. | ||
| 35 | import json | ||
| 36 | |||
| 37 | # --------- | ||
| 38 | # Specifics | ||
| 39 | # --------- | ||
| 40 | |||
| 41 | if is_py2: | ||
| 42 | from urllib import ( | ||
| 43 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, | ||
| 44 | proxy_bypass, proxy_bypass_environment, getproxies_environment) | ||
| 45 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag | ||
| 46 | from urllib2 import parse_http_list | ||
| 47 | import cookielib | ||
| 48 | from Cookie import Morsel | ||
| 49 | from StringIO import StringIO | ||
| 50 | |||
| 51 | from pip._vendor.urllib3.packages.ordered_dict import OrderedDict | ||
| 52 | |||
| 53 | builtin_str = str | ||
| 54 | bytes = str | ||
| 55 | str = unicode | ||
| 56 | basestring = basestring | ||
| 57 | numeric_types = (int, long, float) | ||
| 58 | integer_types = (int, long) | ||
| 59 | |||
| 60 | elif is_py3: | ||
| 61 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag | ||
| 62 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment | ||
| 63 | from http import cookiejar as cookielib | ||
| 64 | from http.cookies import Morsel | ||
| 65 | from io import StringIO | ||
| 66 | from collections import OrderedDict | ||
| 67 | |||
| 68 | builtin_str = str | ||
| 69 | str = str | ||
| 70 | bytes = bytes | ||
| 71 | basestring = (str, bytes) | ||
| 72 | numeric_types = (int, float) | ||
| 73 | integer_types = (int,) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..e69d22e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py | |||
| @@ -0,0 +1,542 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.cookies | ||
| 5 | ~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | Compatibility code to be able to use `cookielib.CookieJar` with requests. | ||
| 8 | |||
| 9 | requests.utils imports from here, so be careful with imports. | ||
| 10 | """ | ||
| 11 | |||
| 12 | import copy | ||
| 13 | import time | ||
| 14 | import calendar | ||
| 15 | import collections | ||
| 16 | |||
| 17 | from ._internal_utils import to_native_string | ||
| 18 | from .compat import cookielib, urlparse, urlunparse, Morsel | ||
| 19 | |||
| 20 | try: | ||
| 21 | import threading | ||
| 22 | except ImportError: | ||
| 23 | import dummy_threading as threading | ||
| 24 | |||
| 25 | |||
| 26 | class MockRequest(object): | ||
| 27 | """Wraps a `requests.Request` to mimic a `urllib2.Request`. | ||
| 28 | |||
| 29 | The code in `cookielib.CookieJar` expects this interface in order to correctly | ||
| 30 | manage cookie policies, i.e., determine whether a cookie can be set, given the | ||
| 31 | domains of the request and the cookie. | ||
| 32 | |||
| 33 | The original request object is read-only. The client is responsible for collecting | ||
| 34 | the new headers via `get_new_headers()` and interpreting them appropriately. You | ||
| 35 | probably want `get_cookie_header`, defined below. | ||
| 36 | """ | ||
| 37 | |||
| 38 | def __init__(self, request): | ||
| 39 | self._r = request | ||
| 40 | self._new_headers = {} | ||
| 41 | self.type = urlparse(self._r.url).scheme | ||
| 42 | |||
| 43 | def get_type(self): | ||
| 44 | return self.type | ||
| 45 | |||
| 46 | def get_host(self): | ||
| 47 | return urlparse(self._r.url).netloc | ||
| 48 | |||
| 49 | def get_origin_req_host(self): | ||
| 50 | return self.get_host() | ||
| 51 | |||
| 52 | def get_full_url(self): | ||
| 53 | # Only return the response's URL if the user hadn't set the Host | ||
| 54 | # header | ||
| 55 | if not self._r.headers.get('Host'): | ||
| 56 | return self._r.url | ||
| 57 | # If they did set it, retrieve it and reconstruct the expected domain | ||
| 58 | host = to_native_string(self._r.headers['Host'], encoding='utf-8') | ||
| 59 | parsed = urlparse(self._r.url) | ||
| 60 | # Reconstruct the URL as we expect it | ||
| 61 | return urlunparse([ | ||
| 62 | parsed.scheme, host, parsed.path, parsed.params, parsed.query, | ||
| 63 | parsed.fragment | ||
| 64 | ]) | ||
| 65 | |||
| 66 | def is_unverifiable(self): | ||
| 67 | return True | ||
| 68 | |||
| 69 | def has_header(self, name): | ||
| 70 | return name in self._r.headers or name in self._new_headers | ||
| 71 | |||
| 72 | def get_header(self, name, default=None): | ||
| 73 | return self._r.headers.get(name, self._new_headers.get(name, default)) | ||
| 74 | |||
| 75 | def add_header(self, key, val): | ||
| 76 | """cookielib has no legitimate use for this method; add it back if you find one.""" | ||
| 77 | raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") | ||
| 78 | |||
| 79 | def add_unredirected_header(self, name, value): | ||
| 80 | self._new_headers[name] = value | ||
| 81 | |||
| 82 | def get_new_headers(self): | ||
| 83 | return self._new_headers | ||
| 84 | |||
| 85 | @property | ||
| 86 | def unverifiable(self): | ||
| 87 | return self.is_unverifiable() | ||
| 88 | |||
| 89 | @property | ||
| 90 | def origin_req_host(self): | ||
| 91 | return self.get_origin_req_host() | ||
| 92 | |||
| 93 | @property | ||
| 94 | def host(self): | ||
| 95 | return self.get_host() | ||
| 96 | |||
| 97 | |||
| 98 | class MockResponse(object): | ||
| 99 | """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. | ||
| 100 | |||
| 101 | ...what? Basically, expose the parsed HTTP headers from the server response | ||
| 102 | the way `cookielib` expects to see them. | ||
| 103 | """ | ||
| 104 | |||
| 105 | def __init__(self, headers): | ||
| 106 | """Make a MockResponse for `cookielib` to read. | ||
| 107 | |||
| 108 | :param headers: a httplib.HTTPMessage or analogous carrying the headers | ||
| 109 | """ | ||
| 110 | self._headers = headers | ||
| 111 | |||
| 112 | def info(self): | ||
| 113 | return self._headers | ||
| 114 | |||
| 115 | def getheaders(self, name): | ||
| 116 | self._headers.getheaders(name) | ||
| 117 | |||
| 118 | |||
| 119 | def extract_cookies_to_jar(jar, request, response): | ||
| 120 | """Extract the cookies from the response into a CookieJar. | ||
| 121 | |||
| 122 | :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) | ||
| 123 | :param request: our own requests.Request object | ||
| 124 | :param response: urllib3.HTTPResponse object | ||
| 125 | """ | ||
| 126 | if not (hasattr(response, '_original_response') and | ||
| 127 | response._original_response): | ||
| 128 | return | ||
| 129 | # the _original_response field is the wrapped httplib.HTTPResponse object, | ||
| 130 | req = MockRequest(request) | ||
| 131 | # pull out the HTTPMessage with the headers and put it in the mock: | ||
| 132 | res = MockResponse(response._original_response.msg) | ||
| 133 | jar.extract_cookies(res, req) | ||
| 134 | |||
| 135 | |||
| 136 | def get_cookie_header(jar, request): | ||
| 137 | """ | ||
| 138 | Produce an appropriate Cookie header string to be sent with `request`, or None. | ||
| 139 | |||
| 140 | :rtype: str | ||
| 141 | """ | ||
| 142 | r = MockRequest(request) | ||
| 143 | jar.add_cookie_header(r) | ||
| 144 | return r.get_new_headers().get('Cookie') | ||
| 145 | |||
| 146 | |||
| 147 | def remove_cookie_by_name(cookiejar, name, domain=None, path=None): | ||
| 148 | """Unsets a cookie by name, by default over all domains and paths. | ||
| 149 | |||
| 150 | Wraps CookieJar.clear(), is O(n). | ||
| 151 | """ | ||
| 152 | clearables = [] | ||
| 153 | for cookie in cookiejar: | ||
| 154 | if cookie.name != name: | ||
| 155 | continue | ||
| 156 | if domain is not None and domain != cookie.domain: | ||
| 157 | continue | ||
| 158 | if path is not None and path != cookie.path: | ||
| 159 | continue | ||
| 160 | clearables.append((cookie.domain, cookie.path, cookie.name)) | ||
| 161 | |||
| 162 | for domain, path, name in clearables: | ||
| 163 | cookiejar.clear(domain, path, name) | ||
| 164 | |||
| 165 | |||
| 166 | class CookieConflictError(RuntimeError): | ||
| 167 | """There are two cookies that meet the criteria specified in the cookie jar. | ||
| 168 | Use .get and .set and include domain and path args in order to be more specific. | ||
| 169 | """ | ||
| 170 | |||
| 171 | |||
| 172 | class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): | ||
| 173 | """Compatibility class; is a cookielib.CookieJar, but exposes a dict | ||
| 174 | interface. | ||
| 175 | |||
| 176 | This is the CookieJar we create by default for requests and sessions that | ||
| 177 | don't specify one, since some clients may expect response.cookies and | ||
| 178 | session.cookies to support dict operations. | ||
| 179 | |||
| 180 | Requests does not use the dict interface internally; it's just for | ||
| 181 | compatibility with external client code. All requests code should work | ||
| 182 | out of the box with externally provided instances of ``CookieJar``, e.g. | ||
| 183 | ``LWPCookieJar`` and ``FileCookieJar``. | ||
| 184 | |||
| 185 | Unlike a regular CookieJar, this class is pickleable. | ||
| 186 | |||
| 187 | .. warning:: dictionary operations that are normally O(1) may be O(n). | ||
| 188 | """ | ||
| 189 | |||
| 190 | def get(self, name, default=None, domain=None, path=None): | ||
| 191 | """Dict-like get() that also supports optional domain and path args in | ||
| 192 | order to resolve naming collisions from using one cookie jar over | ||
| 193 | multiple domains. | ||
| 194 | |||
| 195 | .. warning:: operation is O(n), not O(1). | ||
| 196 | """ | ||
| 197 | try: | ||
| 198 | return self._find_no_duplicates(name, domain, path) | ||
| 199 | except KeyError: | ||
| 200 | return default | ||
| 201 | |||
| 202 | def set(self, name, value, **kwargs): | ||
| 203 | """Dict-like set() that also supports optional domain and path args in | ||
| 204 | order to resolve naming collisions from using one cookie jar over | ||
| 205 | multiple domains. | ||
| 206 | """ | ||
| 207 | # support client code that unsets cookies by assignment of a None value: | ||
| 208 | if value is None: | ||
| 209 | remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) | ||
| 210 | return | ||
| 211 | |||
| 212 | if isinstance(value, Morsel): | ||
| 213 | c = morsel_to_cookie(value) | ||
| 214 | else: | ||
| 215 | c = create_cookie(name, value, **kwargs) | ||
| 216 | self.set_cookie(c) | ||
| 217 | return c | ||
| 218 | |||
| 219 | def iterkeys(self): | ||
| 220 | """Dict-like iterkeys() that returns an iterator of names of cookies | ||
| 221 | from the jar. | ||
| 222 | |||
| 223 | .. seealso:: itervalues() and iteritems(). | ||
| 224 | """ | ||
| 225 | for cookie in iter(self): | ||
| 226 | yield cookie.name | ||
| 227 | |||
| 228 | def keys(self): | ||
| 229 | """Dict-like keys() that returns a list of names of cookies from the | ||
| 230 | jar. | ||
| 231 | |||
| 232 | .. seealso:: values() and items(). | ||
| 233 | """ | ||
| 234 | return list(self.iterkeys()) | ||
| 235 | |||
| 236 | def itervalues(self): | ||
| 237 | """Dict-like itervalues() that returns an iterator of values of cookies | ||
| 238 | from the jar. | ||
| 239 | |||
| 240 | .. seealso:: iterkeys() and iteritems(). | ||
| 241 | """ | ||
| 242 | for cookie in iter(self): | ||
| 243 | yield cookie.value | ||
| 244 | |||
| 245 | def values(self): | ||
| 246 | """Dict-like values() that returns a list of values of cookies from the | ||
| 247 | jar. | ||
| 248 | |||
| 249 | .. seealso:: keys() and items(). | ||
| 250 | """ | ||
| 251 | return list(self.itervalues()) | ||
| 252 | |||
| 253 | def iteritems(self): | ||
| 254 | """Dict-like iteritems() that returns an iterator of name-value tuples | ||
| 255 | from the jar. | ||
| 256 | |||
| 257 | .. seealso:: iterkeys() and itervalues(). | ||
| 258 | """ | ||
| 259 | for cookie in iter(self): | ||
| 260 | yield cookie.name, cookie.value | ||
| 261 | |||
| 262 | def items(self): | ||
| 263 | """Dict-like items() that returns a list of name-value tuples from the | ||
| 264 | jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a | ||
| 265 | vanilla python dict of key value pairs. | ||
| 266 | |||
| 267 | .. seealso:: keys() and values(). | ||
| 268 | """ | ||
| 269 | return list(self.iteritems()) | ||
| 270 | |||
| 271 | def list_domains(self): | ||
| 272 | """Utility method to list all the domains in the jar.""" | ||
| 273 | domains = [] | ||
| 274 | for cookie in iter(self): | ||
| 275 | if cookie.domain not in domains: | ||
| 276 | domains.append(cookie.domain) | ||
| 277 | return domains | ||
| 278 | |||
| 279 | def list_paths(self): | ||
| 280 | """Utility method to list all the paths in the jar.""" | ||
| 281 | paths = [] | ||
| 282 | for cookie in iter(self): | ||
| 283 | if cookie.path not in paths: | ||
| 284 | paths.append(cookie.path) | ||
| 285 | return paths | ||
| 286 | |||
| 287 | def multiple_domains(self): | ||
| 288 | """Returns True if there are multiple domains in the jar. | ||
| 289 | Returns False otherwise. | ||
| 290 | |||
| 291 | :rtype: bool | ||
| 292 | """ | ||
| 293 | domains = [] | ||
| 294 | for cookie in iter(self): | ||
| 295 | if cookie.domain is not None and cookie.domain in domains: | ||
| 296 | return True | ||
| 297 | domains.append(cookie.domain) | ||
| 298 | return False # there is only one domain in jar | ||
| 299 | |||
| 300 | def get_dict(self, domain=None, path=None): | ||
| 301 | """Takes as an argument an optional domain and path and returns a plain | ||
| 302 | old Python dict of name-value pairs of cookies that meet the | ||
| 303 | requirements. | ||
| 304 | |||
| 305 | :rtype: dict | ||
| 306 | """ | ||
| 307 | dictionary = {} | ||
| 308 | for cookie in iter(self): | ||
| 309 | if ( | ||
| 310 | (domain is None or cookie.domain == domain) and | ||
| 311 | (path is None or cookie.path == path) | ||
| 312 | ): | ||
| 313 | dictionary[cookie.name] = cookie.value | ||
| 314 | return dictionary | ||
| 315 | |||
| 316 | def __contains__(self, name): | ||
| 317 | try: | ||
| 318 | return super(RequestsCookieJar, self).__contains__(name) | ||
| 319 | except CookieConflictError: | ||
| 320 | return True | ||
| 321 | |||
| 322 | def __getitem__(self, name): | ||
| 323 | """Dict-like __getitem__() for compatibility with client code. Throws | ||
| 324 | exception if there are more than one cookie with name. In that case, | ||
| 325 | use the more explicit get() method instead. | ||
| 326 | |||
| 327 | .. warning:: operation is O(n), not O(1). | ||
| 328 | """ | ||
| 329 | return self._find_no_duplicates(name) | ||
| 330 | |||
| 331 | def __setitem__(self, name, value): | ||
| 332 | """Dict-like __setitem__ for compatibility with client code. Throws | ||
| 333 | exception if there is already a cookie of that name in the jar. In that | ||
| 334 | case, use the more explicit set() method instead. | ||
| 335 | """ | ||
| 336 | self.set(name, value) | ||
| 337 | |||
| 338 | def __delitem__(self, name): | ||
| 339 | """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s | ||
| 340 | ``remove_cookie_by_name()``. | ||
| 341 | """ | ||
| 342 | remove_cookie_by_name(self, name) | ||
| 343 | |||
| 344 | def set_cookie(self, cookie, *args, **kwargs): | ||
| 345 | if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): | ||
| 346 | cookie.value = cookie.value.replace('\\"', '') | ||
| 347 | return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) | ||
| 348 | |||
| 349 | def update(self, other): | ||
| 350 | """Updates this jar with cookies from another CookieJar or dict-like""" | ||
| 351 | if isinstance(other, cookielib.CookieJar): | ||
| 352 | for cookie in other: | ||
| 353 | self.set_cookie(copy.copy(cookie)) | ||
| 354 | else: | ||
| 355 | super(RequestsCookieJar, self).update(other) | ||
| 356 | |||
| 357 | def _find(self, name, domain=None, path=None): | ||
| 358 | """Requests uses this method internally to get cookie values. | ||
| 359 | |||
| 360 | If there are conflicting cookies, _find arbitrarily chooses one. | ||
| 361 | See _find_no_duplicates if you want an exception thrown if there are | ||
| 362 | conflicting cookies. | ||
| 363 | |||
| 364 | :param name: a string containing name of cookie | ||
| 365 | :param domain: (optional) string containing domain of cookie | ||
| 366 | :param path: (optional) string containing path of cookie | ||
| 367 | :return: cookie.value | ||
| 368 | """ | ||
| 369 | for cookie in iter(self): | ||
| 370 | if cookie.name == name: | ||
| 371 | if domain is None or cookie.domain == domain: | ||
| 372 | if path is None or cookie.path == path: | ||
| 373 | return cookie.value | ||
| 374 | |||
| 375 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) | ||
| 376 | |||
| 377 | def _find_no_duplicates(self, name, domain=None, path=None): | ||
| 378 | """Both ``__get_item__`` and ``get`` call this function: it's never | ||
| 379 | used elsewhere in Requests. | ||
| 380 | |||
| 381 | :param name: a string containing name of cookie | ||
| 382 | :param domain: (optional) string containing domain of cookie | ||
| 383 | :param path: (optional) string containing path of cookie | ||
| 384 | :raises KeyError: if cookie is not found | ||
| 385 | :raises CookieConflictError: if there are multiple cookies | ||
| 386 | that match name and optionally domain and path | ||
| 387 | :return: cookie.value | ||
| 388 | """ | ||
| 389 | toReturn = None | ||
| 390 | for cookie in iter(self): | ||
| 391 | if cookie.name == name: | ||
| 392 | if domain is None or cookie.domain == domain: | ||
| 393 | if path is None or cookie.path == path: | ||
| 394 | if toReturn is not None: # if there are multiple cookies that meet passed in criteria | ||
| 395 | raise CookieConflictError('There are multiple cookies with name, %r' % (name)) | ||
| 396 | toReturn = cookie.value # we will eventually return this as long as no cookie conflict | ||
| 397 | |||
| 398 | if toReturn: | ||
| 399 | return toReturn | ||
| 400 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) | ||
| 401 | |||
| 402 | def __getstate__(self): | ||
| 403 | """Unlike a normal CookieJar, this class is pickleable.""" | ||
| 404 | state = self.__dict__.copy() | ||
| 405 | # remove the unpickleable RLock object | ||
| 406 | state.pop('_cookies_lock') | ||
| 407 | return state | ||
| 408 | |||
| 409 | def __setstate__(self, state): | ||
| 410 | """Unlike a normal CookieJar, this class is pickleable.""" | ||
| 411 | self.__dict__.update(state) | ||
| 412 | if '_cookies_lock' not in self.__dict__: | ||
| 413 | self._cookies_lock = threading.RLock() | ||
| 414 | |||
| 415 | def copy(self): | ||
| 416 | """Return a copy of this RequestsCookieJar.""" | ||
| 417 | new_cj = RequestsCookieJar() | ||
| 418 | new_cj.update(self) | ||
| 419 | return new_cj | ||
| 420 | |||
| 421 | |||
| 422 | def _copy_cookie_jar(jar): | ||
| 423 | if jar is None: | ||
| 424 | return None | ||
| 425 | |||
| 426 | if hasattr(jar, 'copy'): | ||
| 427 | # We're dealing with an instance of RequestsCookieJar | ||
| 428 | return jar.copy() | ||
| 429 | # We're dealing with a generic CookieJar instance | ||
| 430 | new_jar = copy.copy(jar) | ||
| 431 | new_jar.clear() | ||
| 432 | for cookie in jar: | ||
| 433 | new_jar.set_cookie(copy.copy(cookie)) | ||
| 434 | return new_jar | ||
| 435 | |||
| 436 | |||
| 437 | def create_cookie(name, value, **kwargs): | ||
| 438 | """Make a cookie from underspecified parameters. | ||
| 439 | |||
| 440 | By default, the pair of `name` and `value` will be set for the domain '' | ||
| 441 | and sent on every request (this is sometimes called a "supercookie"). | ||
| 442 | """ | ||
| 443 | result = dict( | ||
| 444 | version=0, | ||
| 445 | name=name, | ||
| 446 | value=value, | ||
| 447 | port=None, | ||
| 448 | domain='', | ||
| 449 | path='/', | ||
| 450 | secure=False, | ||
| 451 | expires=None, | ||
| 452 | discard=True, | ||
| 453 | comment=None, | ||
| 454 | comment_url=None, | ||
| 455 | rest={'HttpOnly': None}, | ||
| 456 | rfc2109=False,) | ||
| 457 | |||
| 458 | badargs = set(kwargs) - set(result) | ||
| 459 | if badargs: | ||
| 460 | err = 'create_cookie() got unexpected keyword arguments: %s' | ||
| 461 | raise TypeError(err % list(badargs)) | ||
| 462 | |||
| 463 | result.update(kwargs) | ||
| 464 | result['port_specified'] = bool(result['port']) | ||
| 465 | result['domain_specified'] = bool(result['domain']) | ||
| 466 | result['domain_initial_dot'] = result['domain'].startswith('.') | ||
| 467 | result['path_specified'] = bool(result['path']) | ||
| 468 | |||
| 469 | return cookielib.Cookie(**result) | ||
| 470 | |||
| 471 | |||
| 472 | def morsel_to_cookie(morsel): | ||
| 473 | """Convert a Morsel object into a Cookie containing the one k/v pair.""" | ||
| 474 | |||
| 475 | expires = None | ||
| 476 | if morsel['max-age']: | ||
| 477 | try: | ||
| 478 | expires = int(time.time() + int(morsel['max-age'])) | ||
| 479 | except ValueError: | ||
| 480 | raise TypeError('max-age: %s must be integer' % morsel['max-age']) | ||
| 481 | elif morsel['expires']: | ||
| 482 | time_template = '%a, %d-%b-%Y %H:%M:%S GMT' | ||
| 483 | expires = calendar.timegm( | ||
| 484 | time.strptime(morsel['expires'], time_template) | ||
| 485 | ) | ||
| 486 | return create_cookie( | ||
| 487 | comment=morsel['comment'], | ||
| 488 | comment_url=bool(morsel['comment']), | ||
| 489 | discard=False, | ||
| 490 | domain=morsel['domain'], | ||
| 491 | expires=expires, | ||
| 492 | name=morsel.key, | ||
| 493 | path=morsel['path'], | ||
| 494 | port=None, | ||
| 495 | rest={'HttpOnly': morsel['httponly']}, | ||
| 496 | rfc2109=False, | ||
| 497 | secure=bool(morsel['secure']), | ||
| 498 | value=morsel.value, | ||
| 499 | version=morsel['version'] or 0, | ||
| 500 | ) | ||
| 501 | |||
| 502 | |||
| 503 | def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): | ||
| 504 | """Returns a CookieJar from a key/value dictionary. | ||
| 505 | |||
| 506 | :param cookie_dict: Dict of key/values to insert into CookieJar. | ||
| 507 | :param cookiejar: (optional) A cookiejar to add the cookies to. | ||
| 508 | :param overwrite: (optional) If False, will not replace cookies | ||
| 509 | already in the jar with new ones. | ||
| 510 | """ | ||
| 511 | if cookiejar is None: | ||
| 512 | cookiejar = RequestsCookieJar() | ||
| 513 | |||
| 514 | if cookie_dict is not None: | ||
| 515 | names_from_jar = [cookie.name for cookie in cookiejar] | ||
| 516 | for name in cookie_dict: | ||
| 517 | if overwrite or (name not in names_from_jar): | ||
| 518 | cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) | ||
| 519 | |||
| 520 | return cookiejar | ||
| 521 | |||
| 522 | |||
| 523 | def merge_cookies(cookiejar, cookies): | ||
| 524 | """Add cookies to cookiejar and returns a merged CookieJar. | ||
| 525 | |||
| 526 | :param cookiejar: CookieJar object to add the cookies to. | ||
| 527 | :param cookies: Dictionary or CookieJar object to be added. | ||
| 528 | """ | ||
| 529 | if not isinstance(cookiejar, cookielib.CookieJar): | ||
| 530 | raise ValueError('You can only merge into CookieJar') | ||
| 531 | |||
| 532 | if isinstance(cookies, dict): | ||
| 533 | cookiejar = cookiejar_from_dict( | ||
| 534 | cookies, cookiejar=cookiejar, overwrite=False) | ||
| 535 | elif isinstance(cookies, cookielib.CookieJar): | ||
| 536 | try: | ||
| 537 | cookiejar.update(cookies) | ||
| 538 | except AttributeError: | ||
| 539 | for cookie_in_jar in cookies: | ||
| 540 | cookiejar.set_cookie(cookie_in_jar) | ||
| 541 | |||
| 542 | return cookiejar | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..377c4c2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py | |||
| @@ -0,0 +1,122 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.exceptions | ||
| 5 | ~~~~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module contains the set of Requests' exceptions. | ||
| 8 | """ | ||
| 9 | from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError | ||
| 10 | |||
| 11 | |||
| 12 | class RequestException(IOError): | ||
| 13 | """There was an ambiguous exception that occurred while handling your | ||
| 14 | request. | ||
| 15 | """ | ||
| 16 | |||
| 17 | def __init__(self, *args, **kwargs): | ||
| 18 | """Initialize RequestException with `request` and `response` objects.""" | ||
| 19 | response = kwargs.pop('response', None) | ||
| 20 | self.response = response | ||
| 21 | self.request = kwargs.pop('request', None) | ||
| 22 | if (response is not None and not self.request and | ||
| 23 | hasattr(response, 'request')): | ||
| 24 | self.request = self.response.request | ||
| 25 | super(RequestException, self).__init__(*args, **kwargs) | ||
| 26 | |||
| 27 | |||
| 28 | class HTTPError(RequestException): | ||
| 29 | """An HTTP error occurred.""" | ||
| 30 | |||
| 31 | |||
| 32 | class ConnectionError(RequestException): | ||
| 33 | """A Connection error occurred.""" | ||
| 34 | |||
| 35 | |||
| 36 | class ProxyError(ConnectionError): | ||
| 37 | """A proxy error occurred.""" | ||
| 38 | |||
| 39 | |||
| 40 | class SSLError(ConnectionError): | ||
| 41 | """An SSL error occurred.""" | ||
| 42 | |||
| 43 | |||
| 44 | class Timeout(RequestException): | ||
| 45 | """The request timed out. | ||
| 46 | |||
| 47 | Catching this error will catch both | ||
| 48 | :exc:`~requests.exceptions.ConnectTimeout` and | ||
| 49 | :exc:`~requests.exceptions.ReadTimeout` errors. | ||
| 50 | """ | ||
| 51 | |||
| 52 | |||
| 53 | class ConnectTimeout(ConnectionError, Timeout): | ||
| 54 | """The request timed out while trying to connect to the remote server. | ||
| 55 | |||
| 56 | Requests that produced this error are safe to retry. | ||
| 57 | """ | ||
| 58 | |||
| 59 | |||
| 60 | class ReadTimeout(Timeout): | ||
| 61 | """The server did not send any data in the allotted amount of time.""" | ||
| 62 | |||
| 63 | |||
| 64 | class URLRequired(RequestException): | ||
| 65 | """A valid URL is required to make a request.""" | ||
| 66 | |||
| 67 | |||
| 68 | class TooManyRedirects(RequestException): | ||
| 69 | """Too many redirects.""" | ||
| 70 | |||
| 71 | |||
| 72 | class MissingSchema(RequestException, ValueError): | ||
| 73 | """The URL schema (e.g. http or https) is missing.""" | ||
| 74 | |||
| 75 | |||
| 76 | class InvalidSchema(RequestException, ValueError): | ||
| 77 | """See defaults.py for valid schemas.""" | ||
| 78 | |||
| 79 | |||
| 80 | class InvalidURL(RequestException, ValueError): | ||
| 81 | """The URL provided was somehow invalid.""" | ||
| 82 | |||
| 83 | |||
| 84 | class InvalidHeader(RequestException, ValueError): | ||
| 85 | """The header value provided was somehow invalid.""" | ||
| 86 | |||
| 87 | |||
| 88 | class ChunkedEncodingError(RequestException): | ||
| 89 | """The server declared chunked encoding but sent an invalid chunk.""" | ||
| 90 | |||
| 91 | |||
| 92 | class ContentDecodingError(RequestException, BaseHTTPError): | ||
| 93 | """Failed to decode response content""" | ||
| 94 | |||
| 95 | |||
| 96 | class StreamConsumedError(RequestException, TypeError): | ||
| 97 | """The content for this response was already consumed""" | ||
| 98 | |||
| 99 | |||
| 100 | class RetryError(RequestException): | ||
| 101 | """Custom retries logic failed""" | ||
| 102 | |||
| 103 | |||
| 104 | class UnrewindableBodyError(RequestException): | ||
| 105 | """Requests encountered an error when trying to rewind a body""" | ||
| 106 | |||
| 107 | # Warnings | ||
| 108 | |||
| 109 | |||
| 110 | class RequestsWarning(Warning): | ||
| 111 | """Base warning for Requests.""" | ||
| 112 | pass | ||
| 113 | |||
| 114 | |||
| 115 | class FileModeWarning(RequestsWarning, DeprecationWarning): | ||
| 116 | """A file was opened in text mode, but Requests determined its binary length.""" | ||
| 117 | pass | ||
| 118 | |||
| 119 | |||
| 120 | class RequestsDependencyWarning(RequestsWarning): | ||
| 121 | """An imported dependency doesn't match the expected version range.""" | ||
| 122 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py new file mode 100644 index 0000000..28385f8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py | |||
| @@ -0,0 +1,120 @@ | |||
| 1 | """Module containing bug report helper(s).""" | ||
| 2 | from __future__ import print_function | ||
| 3 | |||
| 4 | import json | ||
| 5 | import platform | ||
| 6 | import sys | ||
| 7 | import ssl | ||
| 8 | |||
| 9 | from pip._vendor import idna | ||
| 10 | from pip._vendor import urllib3 | ||
| 11 | from pip._vendor import chardet | ||
| 12 | |||
| 13 | from . import __version__ as requests_version | ||
| 14 | |||
| 15 | try: | ||
| 16 | from .packages.urllib3.contrib import pyopenssl | ||
| 17 | except ImportError: | ||
| 18 | pyopenssl = None | ||
| 19 | OpenSSL = None | ||
| 20 | cryptography = None | ||
| 21 | else: | ||
| 22 | import OpenSSL | ||
| 23 | import cryptography | ||
| 24 | |||
| 25 | |||
| 26 | def _implementation(): | ||
| 27 | """Return a dict with the Python implementation and version. | ||
| 28 | |||
| 29 | Provide both the name and the version of the Python implementation | ||
| 30 | currently running. For example, on CPython 2.7.5 it will return | ||
| 31 | {'name': 'CPython', 'version': '2.7.5'}. | ||
| 32 | |||
| 33 | This function works best on CPython and PyPy: in particular, it probably | ||
| 34 | doesn't work for Jython or IronPython. Future investigation should be done | ||
| 35 | to work out the correct shape of the code for those platforms. | ||
| 36 | """ | ||
| 37 | implementation = platform.python_implementation() | ||
| 38 | |||
| 39 | if implementation == 'CPython': | ||
| 40 | implementation_version = platform.python_version() | ||
| 41 | elif implementation == 'PyPy': | ||
| 42 | implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, | ||
| 43 | sys.pypy_version_info.minor, | ||
| 44 | sys.pypy_version_info.micro) | ||
| 45 | if sys.pypy_version_info.releaselevel != 'final': | ||
| 46 | implementation_version = ''.join([ | ||
| 47 | implementation_version, sys.pypy_version_info.releaselevel | ||
| 48 | ]) | ||
| 49 | elif implementation == 'Jython': | ||
| 50 | implementation_version = platform.python_version() # Complete Guess | ||
| 51 | elif implementation == 'IronPython': | ||
| 52 | implementation_version = platform.python_version() # Complete Guess | ||
| 53 | else: | ||
| 54 | implementation_version = 'Unknown' | ||
| 55 | |||
| 56 | return {'name': implementation, 'version': implementation_version} | ||
| 57 | |||
| 58 | |||
| 59 | def info(): | ||
| 60 | """Generate information for a bug report.""" | ||
| 61 | try: | ||
| 62 | platform_info = { | ||
| 63 | 'system': platform.system(), | ||
| 64 | 'release': platform.release(), | ||
| 65 | } | ||
| 66 | except IOError: | ||
| 67 | platform_info = { | ||
| 68 | 'system': 'Unknown', | ||
| 69 | 'release': 'Unknown', | ||
| 70 | } | ||
| 71 | |||
| 72 | implementation_info = _implementation() | ||
| 73 | urllib3_info = {'version': urllib3.__version__} | ||
| 74 | chardet_info = {'version': chardet.__version__} | ||
| 75 | |||
| 76 | pyopenssl_info = { | ||
| 77 | 'version': None, | ||
| 78 | 'openssl_version': '', | ||
| 79 | } | ||
| 80 | if OpenSSL: | ||
| 81 | pyopenssl_info = { | ||
| 82 | 'version': OpenSSL.__version__, | ||
| 83 | 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, | ||
| 84 | } | ||
| 85 | cryptography_info = { | ||
| 86 | 'version': getattr(cryptography, '__version__', ''), | ||
| 87 | } | ||
| 88 | idna_info = { | ||
| 89 | 'version': getattr(idna, '__version__', ''), | ||
| 90 | } | ||
| 91 | |||
| 92 | # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. | ||
| 93 | system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) | ||
| 94 | system_ssl_info = { | ||
| 95 | 'version': '%x' % system_ssl if system_ssl is not None else '' | ||
| 96 | } | ||
| 97 | |||
| 98 | return { | ||
| 99 | 'platform': platform_info, | ||
| 100 | 'implementation': implementation_info, | ||
| 101 | 'system_ssl': system_ssl_info, | ||
| 102 | 'using_pyopenssl': pyopenssl is not None, | ||
| 103 | 'pyOpenSSL': pyopenssl_info, | ||
| 104 | 'urllib3': urllib3_info, | ||
| 105 | 'chardet': chardet_info, | ||
| 106 | 'cryptography': cryptography_info, | ||
| 107 | 'idna': idna_info, | ||
| 108 | 'requests': { | ||
| 109 | 'version': requests_version, | ||
| 110 | }, | ||
| 111 | } | ||
| 112 | |||
| 113 | |||
| 114 | def main(): | ||
| 115 | """Pretty-print the bug information as JSON.""" | ||
| 116 | print(json.dumps(info(), sort_keys=True, indent=2)) | ||
| 117 | |||
| 118 | |||
| 119 | if __name__ == '__main__': | ||
| 120 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..14db0c8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py | |||
| @@ -0,0 +1,34 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.hooks | ||
| 5 | ~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module provides the capabilities for the Requests hooks system. | ||
| 8 | |||
| 9 | Available hooks: | ||
| 10 | |||
| 11 | ``response``: | ||
| 12 | The response generated from a Request. | ||
| 13 | """ | ||
| 14 | HOOKS = ['response'] | ||
| 15 | |||
| 16 | |||
| 17 | def default_hooks(): | ||
| 18 | return dict((event, []) for event in HOOKS) | ||
| 19 | |||
| 20 | # TODO: response is the only one | ||
| 21 | |||
| 22 | |||
| 23 | def dispatch_hook(key, hooks, hook_data, **kwargs): | ||
| 24 | """Dispatches a hook dictionary on a given piece of data.""" | ||
| 25 | hooks = hooks or dict() | ||
| 26 | hooks = hooks.get(key) | ||
| 27 | if hooks: | ||
| 28 | if hasattr(hooks, '__call__'): | ||
| 29 | hooks = [hooks] | ||
| 30 | for hook in hooks: | ||
| 31 | _hook_data = hook(hook_data, **kwargs) | ||
| 32 | if _hook_data is not None: | ||
| 33 | hook_data = _hook_data | ||
| 34 | return hook_data | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py new file mode 100644 index 0000000..6f5b0fb --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py | |||
| @@ -0,0 +1,948 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.models | ||
| 5 | ~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module contains the primary objects that power Requests. | ||
| 8 | """ | ||
| 9 | |||
| 10 | import collections | ||
| 11 | import datetime | ||
| 12 | import sys | ||
| 13 | |||
| 14 | # Import encoding now, to avoid implicit import later. | ||
| 15 | # Implicit import within threads may cause LookupError when standard library is in a ZIP, | ||
| 16 | # such as in Embedded Python. See https://github.com/requests/requests/issues/3578. | ||
| 17 | import encodings.idna | ||
| 18 | |||
| 19 | from pip._vendor.urllib3.fields import RequestField | ||
| 20 | from pip._vendor.urllib3.filepost import encode_multipart_formdata | ||
| 21 | from pip._vendor.urllib3.util import parse_url | ||
| 22 | from pip._vendor.urllib3.exceptions import ( | ||
| 23 | DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) | ||
| 24 | |||
| 25 | from io import UnsupportedOperation | ||
| 26 | from .hooks import default_hooks | ||
| 27 | from .structures import CaseInsensitiveDict | ||
| 28 | |||
| 29 | from .auth import HTTPBasicAuth | ||
| 30 | from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar | ||
| 31 | from .exceptions import ( | ||
| 32 | HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, | ||
| 33 | ContentDecodingError, ConnectionError, StreamConsumedError) | ||
| 34 | from ._internal_utils import to_native_string, unicode_is_ascii | ||
| 35 | from .utils import ( | ||
| 36 | guess_filename, get_auth_from_url, requote_uri, | ||
| 37 | stream_decode_response_unicode, to_key_val_list, parse_header_links, | ||
| 38 | iter_slices, guess_json_utf, super_len, check_header_validity) | ||
| 39 | from .compat import ( | ||
| 40 | cookielib, urlunparse, urlsplit, urlencode, str, bytes, | ||
| 41 | is_py2, chardet, builtin_str, basestring) | ||
| 42 | from .compat import json as complexjson | ||
| 43 | from .status_codes import codes | ||
| 44 | |||
| 45 | #: The set of HTTP status codes that indicate an automatically | ||
| 46 | #: processable redirect. | ||
| 47 | REDIRECT_STATI = ( | ||
| 48 | codes.moved, # 301 | ||
| 49 | codes.found, # 302 | ||
| 50 | codes.other, # 303 | ||
| 51 | codes.temporary_redirect, # 307 | ||
| 52 | codes.permanent_redirect, # 308 | ||
| 53 | ) | ||
| 54 | |||
| 55 | DEFAULT_REDIRECT_LIMIT = 30 | ||
| 56 | CONTENT_CHUNK_SIZE = 10 * 1024 | ||
| 57 | ITER_CHUNK_SIZE = 512 | ||
| 58 | |||
| 59 | |||
| 60 | class RequestEncodingMixin(object): | ||
| 61 | @property | ||
| 62 | def path_url(self): | ||
| 63 | """Build the path URL to use.""" | ||
| 64 | |||
| 65 | url = [] | ||
| 66 | |||
| 67 | p = urlsplit(self.url) | ||
| 68 | |||
| 69 | path = p.path | ||
| 70 | if not path: | ||
| 71 | path = '/' | ||
| 72 | |||
| 73 | url.append(path) | ||
| 74 | |||
| 75 | query = p.query | ||
| 76 | if query: | ||
| 77 | url.append('?') | ||
| 78 | url.append(query) | ||
| 79 | |||
| 80 | return ''.join(url) | ||
| 81 | |||
| 82 | @staticmethod | ||
| 83 | def _encode_params(data): | ||
| 84 | """Encode parameters in a piece of data. | ||
| 85 | |||
| 86 | Will successfully encode parameters when passed as a dict or a list of | ||
| 87 | 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary | ||
| 88 | if parameters are supplied as a dict. | ||
| 89 | """ | ||
| 90 | |||
| 91 | if isinstance(data, (str, bytes)): | ||
| 92 | return data | ||
| 93 | elif hasattr(data, 'read'): | ||
| 94 | return data | ||
| 95 | elif hasattr(data, '__iter__'): | ||
| 96 | result = [] | ||
| 97 | for k, vs in to_key_val_list(data): | ||
| 98 | if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): | ||
| 99 | vs = [vs] | ||
| 100 | for v in vs: | ||
| 101 | if v is not None: | ||
| 102 | result.append( | ||
| 103 | (k.encode('utf-8') if isinstance(k, str) else k, | ||
| 104 | v.encode('utf-8') if isinstance(v, str) else v)) | ||
| 105 | return urlencode(result, doseq=True) | ||
| 106 | else: | ||
| 107 | return data | ||
| 108 | |||
| 109 | @staticmethod | ||
| 110 | def _encode_files(files, data): | ||
| 111 | """Build the body for a multipart/form-data request. | ||
| 112 | |||
| 113 | Will successfully encode files when passed as a dict or a list of | ||
| 114 | tuples. Order is retained if data is a list of tuples but arbitrary | ||
| 115 | if parameters are supplied as a dict. | ||
| 116 | The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) | ||
| 117 | or 4-tuples (filename, fileobj, contentype, custom_headers). | ||
| 118 | """ | ||
| 119 | if (not files): | ||
| 120 | raise ValueError("Files must be provided.") | ||
| 121 | elif isinstance(data, basestring): | ||
| 122 | raise ValueError("Data must not be a string.") | ||
| 123 | |||
| 124 | new_fields = [] | ||
| 125 | fields = to_key_val_list(data or {}) | ||
| 126 | files = to_key_val_list(files or {}) | ||
| 127 | |||
| 128 | for field, val in fields: | ||
| 129 | if isinstance(val, basestring) or not hasattr(val, '__iter__'): | ||
| 130 | val = [val] | ||
| 131 | for v in val: | ||
| 132 | if v is not None: | ||
| 133 | # Don't call str() on bytestrings: in Py3 it all goes wrong. | ||
| 134 | if not isinstance(v, bytes): | ||
| 135 | v = str(v) | ||
| 136 | |||
| 137 | new_fields.append( | ||
| 138 | (field.decode('utf-8') if isinstance(field, bytes) else field, | ||
| 139 | v.encode('utf-8') if isinstance(v, str) else v)) | ||
| 140 | |||
| 141 | for (k, v) in files: | ||
| 142 | # support for explicit filename | ||
| 143 | ft = None | ||
| 144 | fh = None | ||
| 145 | if isinstance(v, (tuple, list)): | ||
| 146 | if len(v) == 2: | ||
| 147 | fn, fp = v | ||
| 148 | elif len(v) == 3: | ||
| 149 | fn, fp, ft = v | ||
| 150 | else: | ||
| 151 | fn, fp, ft, fh = v | ||
| 152 | else: | ||
| 153 | fn = guess_filename(v) or k | ||
| 154 | fp = v | ||
| 155 | |||
| 156 | if isinstance(fp, (str, bytes, bytearray)): | ||
| 157 | fdata = fp | ||
| 158 | else: | ||
| 159 | fdata = fp.read() | ||
| 160 | |||
| 161 | rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) | ||
| 162 | rf.make_multipart(content_type=ft) | ||
| 163 | new_fields.append(rf) | ||
| 164 | |||
| 165 | body, content_type = encode_multipart_formdata(new_fields) | ||
| 166 | |||
| 167 | return body, content_type | ||
| 168 | |||
| 169 | |||
| 170 | class RequestHooksMixin(object): | ||
| 171 | def register_hook(self, event, hook): | ||
| 172 | """Properly register a hook.""" | ||
| 173 | |||
| 174 | if event not in self.hooks: | ||
| 175 | raise ValueError('Unsupported event specified, with event name "%s"' % (event)) | ||
| 176 | |||
| 177 | if isinstance(hook, collections.Callable): | ||
| 178 | self.hooks[event].append(hook) | ||
| 179 | elif hasattr(hook, '__iter__'): | ||
| 180 | self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) | ||
| 181 | |||
| 182 | def deregister_hook(self, event, hook): | ||
| 183 | """Deregister a previously registered hook. | ||
| 184 | Returns True if the hook existed, False if not. | ||
| 185 | """ | ||
| 186 | |||
| 187 | try: | ||
| 188 | self.hooks[event].remove(hook) | ||
| 189 | return True | ||
| 190 | except ValueError: | ||
| 191 | return False | ||
| 192 | |||
| 193 | |||
| 194 | class Request(RequestHooksMixin): | ||
| 195 | """A user-created :class:`Request <Request>` object. | ||
| 196 | |||
| 197 | Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. | ||
| 198 | |||
| 199 | :param method: HTTP method to use. | ||
| 200 | :param url: URL to send. | ||
| 201 | :param headers: dictionary of headers to send. | ||
| 202 | :param files: dictionary of {filename: fileobject} files to multipart upload. | ||
| 203 | :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. | ||
| 204 | :param json: json for the body to attach to the request (if files or data is not specified). | ||
| 205 | :param params: dictionary of URL parameters to append to the URL. | ||
| 206 | :param auth: Auth handler or (user, pass) tuple. | ||
| 207 | :param cookies: dictionary or CookieJar of cookies to attach to this request. | ||
| 208 | :param hooks: dictionary of callback hooks, for internal usage. | ||
| 209 | |||
| 210 | Usage:: | ||
| 211 | |||
| 212 | >>> import requests | ||
| 213 | >>> req = requests.Request('GET', 'http://httpbin.org/get') | ||
| 214 | >>> req.prepare() | ||
| 215 | <PreparedRequest [GET]> | ||
| 216 | """ | ||
| 217 | |||
| 218 | def __init__(self, | ||
| 219 | method=None, url=None, headers=None, files=None, data=None, | ||
| 220 | params=None, auth=None, cookies=None, hooks=None, json=None): | ||
| 221 | |||
| 222 | # Default empty dicts for dict params. | ||
| 223 | data = [] if data is None else data | ||
| 224 | files = [] if files is None else files | ||
| 225 | headers = {} if headers is None else headers | ||
| 226 | params = {} if params is None else params | ||
| 227 | hooks = {} if hooks is None else hooks | ||
| 228 | |||
| 229 | self.hooks = default_hooks() | ||
| 230 | for (k, v) in list(hooks.items()): | ||
| 231 | self.register_hook(event=k, hook=v) | ||
| 232 | |||
| 233 | self.method = method | ||
| 234 | self.url = url | ||
| 235 | self.headers = headers | ||
| 236 | self.files = files | ||
| 237 | self.data = data | ||
| 238 | self.json = json | ||
| 239 | self.params = params | ||
| 240 | self.auth = auth | ||
| 241 | self.cookies = cookies | ||
| 242 | |||
| 243 | def __repr__(self): | ||
| 244 | return '<Request [%s]>' % (self.method) | ||
| 245 | |||
| 246 | def prepare(self): | ||
| 247 | """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" | ||
| 248 | p = PreparedRequest() | ||
| 249 | p.prepare( | ||
| 250 | method=self.method, | ||
| 251 | url=self.url, | ||
| 252 | headers=self.headers, | ||
| 253 | files=self.files, | ||
| 254 | data=self.data, | ||
| 255 | json=self.json, | ||
| 256 | params=self.params, | ||
| 257 | auth=self.auth, | ||
| 258 | cookies=self.cookies, | ||
| 259 | hooks=self.hooks, | ||
| 260 | ) | ||
| 261 | return p | ||
| 262 | |||
| 263 | |||
| 264 | class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): | ||
| 265 | """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, | ||
| 266 | containing the exact bytes that will be sent to the server. | ||
| 267 | |||
| 268 | Generated from either a :class:`Request <Request>` object or manually. | ||
| 269 | |||
| 270 | Usage:: | ||
| 271 | |||
| 272 | >>> import requests | ||
| 273 | >>> req = requests.Request('GET', 'http://httpbin.org/get') | ||
| 274 | >>> r = req.prepare() | ||
| 275 | <PreparedRequest [GET]> | ||
| 276 | |||
| 277 | >>> s = requests.Session() | ||
| 278 | >>> s.send(r) | ||
| 279 | <Response [200]> | ||
| 280 | """ | ||
| 281 | |||
| 282 | def __init__(self): | ||
| 283 | #: HTTP verb to send to the server. | ||
| 284 | self.method = None | ||
| 285 | #: HTTP URL to send the request to. | ||
| 286 | self.url = None | ||
| 287 | #: dictionary of HTTP headers. | ||
| 288 | self.headers = None | ||
| 289 | # The `CookieJar` used to create the Cookie header will be stored here | ||
| 290 | # after prepare_cookies is called | ||
| 291 | self._cookies = None | ||
| 292 | #: request body to send to the server. | ||
| 293 | self.body = None | ||
| 294 | #: dictionary of callback hooks, for internal usage. | ||
| 295 | self.hooks = default_hooks() | ||
| 296 | #: integer denoting starting position of a readable file-like body. | ||
| 297 | self._body_position = None | ||
| 298 | |||
| 299 | def prepare(self, | ||
| 300 | method=None, url=None, headers=None, files=None, data=None, | ||
| 301 | params=None, auth=None, cookies=None, hooks=None, json=None): | ||
| 302 | """Prepares the entire request with the given parameters.""" | ||
| 303 | |||
| 304 | self.prepare_method(method) | ||
| 305 | self.prepare_url(url, params) | ||
| 306 | self.prepare_headers(headers) | ||
| 307 | self.prepare_cookies(cookies) | ||
| 308 | self.prepare_body(data, files, json) | ||
| 309 | self.prepare_auth(auth, url) | ||
| 310 | |||
| 311 | # Note that prepare_auth must be last to enable authentication schemes | ||
| 312 | # such as OAuth to work on a fully prepared request. | ||
| 313 | |||
| 314 | # This MUST go after prepare_auth. Authenticators could add a hook | ||
| 315 | self.prepare_hooks(hooks) | ||
| 316 | |||
| 317 | def __repr__(self): | ||
| 318 | return '<PreparedRequest [%s]>' % (self.method) | ||
| 319 | |||
| 320 | def copy(self): | ||
| 321 | p = PreparedRequest() | ||
| 322 | p.method = self.method | ||
| 323 | p.url = self.url | ||
| 324 | p.headers = self.headers.copy() if self.headers is not None else None | ||
| 325 | p._cookies = _copy_cookie_jar(self._cookies) | ||
| 326 | p.body = self.body | ||
| 327 | p.hooks = self.hooks | ||
| 328 | p._body_position = self._body_position | ||
| 329 | return p | ||
| 330 | |||
| 331 | def prepare_method(self, method): | ||
| 332 | """Prepares the given HTTP method.""" | ||
| 333 | self.method = method | ||
| 334 | if self.method is not None: | ||
| 335 | self.method = to_native_string(self.method.upper()) | ||
| 336 | |||
| 337 | @staticmethod | ||
| 338 | def _get_idna_encoded_host(host): | ||
| 339 | from pip._vendor import idna | ||
| 340 | |||
| 341 | try: | ||
| 342 | host = idna.encode(host, uts46=True).decode('utf-8') | ||
| 343 | except idna.IDNAError: | ||
| 344 | raise UnicodeError | ||
| 345 | return host | ||
| 346 | |||
| 347 | def prepare_url(self, url, params): | ||
| 348 | """Prepares the given HTTP URL.""" | ||
| 349 | #: Accept objects that have string representations. | ||
| 350 | #: We're unable to blindly call unicode/str functions | ||
| 351 | #: as this will include the bytestring indicator (b'') | ||
| 352 | #: on python 3.x. | ||
| 353 | #: https://github.com/requests/requests/pull/2238 | ||
| 354 | if isinstance(url, bytes): | ||
| 355 | url = url.decode('utf8') | ||
| 356 | else: | ||
| 357 | url = unicode(url) if is_py2 else str(url) | ||
| 358 | |||
| 359 | # Remove leading whitespaces from url | ||
| 360 | url = url.lstrip() | ||
| 361 | |||
| 362 | # Don't do any URL preparation for non-HTTP schemes like `mailto`, | ||
| 363 | # `data` etc to work around exceptions from `url_parse`, which | ||
| 364 | # handles RFC 3986 only. | ||
| 365 | if ':' in url and not url.lower().startswith('http'): | ||
| 366 | self.url = url | ||
| 367 | return | ||
| 368 | |||
| 369 | # Support for unicode domain names and paths. | ||
| 370 | try: | ||
| 371 | scheme, auth, host, port, path, query, fragment = parse_url(url) | ||
| 372 | except LocationParseError as e: | ||
| 373 | raise InvalidURL(*e.args) | ||
| 374 | |||
| 375 | if not scheme: | ||
| 376 | error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") | ||
| 377 | error = error.format(to_native_string(url, 'utf8')) | ||
| 378 | |||
| 379 | raise MissingSchema(error) | ||
| 380 | |||
| 381 | if not host: | ||
| 382 | raise InvalidURL("Invalid URL %r: No host supplied" % url) | ||
| 383 | |||
| 384 | # In general, we want to try IDNA encoding the hostname if the string contains | ||
| 385 | # non-ASCII characters. This allows users to automatically get the correct IDNA | ||
| 386 | # behaviour. For strings containing only ASCII characters, we need to also verify | ||
| 387 | # it doesn't start with a wildcard (*), before allowing the unencoded hostname. | ||
| 388 | if not unicode_is_ascii(host): | ||
| 389 | try: | ||
| 390 | host = self._get_idna_encoded_host(host) | ||
| 391 | except UnicodeError: | ||
| 392 | raise InvalidURL('URL has an invalid label.') | ||
| 393 | elif host.startswith(u'*'): | ||
| 394 | raise InvalidURL('URL has an invalid label.') | ||
| 395 | |||
| 396 | # Carefully reconstruct the network location | ||
| 397 | netloc = auth or '' | ||
| 398 | if netloc: | ||
| 399 | netloc += '@' | ||
| 400 | netloc += host | ||
| 401 | if port: | ||
| 402 | netloc += ':' + str(port) | ||
| 403 | |||
| 404 | # Bare domains aren't valid URLs. | ||
| 405 | if not path: | ||
| 406 | path = '/' | ||
| 407 | |||
| 408 | if is_py2: | ||
| 409 | if isinstance(scheme, str): | ||
| 410 | scheme = scheme.encode('utf-8') | ||
| 411 | if isinstance(netloc, str): | ||
| 412 | netloc = netloc.encode('utf-8') | ||
| 413 | if isinstance(path, str): | ||
| 414 | path = path.encode('utf-8') | ||
| 415 | if isinstance(query, str): | ||
| 416 | query = query.encode('utf-8') | ||
| 417 | if isinstance(fragment, str): | ||
| 418 | fragment = fragment.encode('utf-8') | ||
| 419 | |||
| 420 | if isinstance(params, (str, bytes)): | ||
| 421 | params = to_native_string(params) | ||
| 422 | |||
| 423 | enc_params = self._encode_params(params) | ||
| 424 | if enc_params: | ||
| 425 | if query: | ||
| 426 | query = '%s&%s' % (query, enc_params) | ||
| 427 | else: | ||
| 428 | query = enc_params | ||
| 429 | |||
| 430 | url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) | ||
| 431 | self.url = url | ||
| 432 | |||
| 433 | def prepare_headers(self, headers): | ||
| 434 | """Prepares the given HTTP headers.""" | ||
| 435 | |||
| 436 | self.headers = CaseInsensitiveDict() | ||
| 437 | if headers: | ||
| 438 | for header in headers.items(): | ||
| 439 | # Raise exception on invalid header value. | ||
| 440 | check_header_validity(header) | ||
| 441 | name, value = header | ||
| 442 | self.headers[to_native_string(name)] = value | ||
| 443 | |||
| 444 | def prepare_body(self, data, files, json=None): | ||
| 445 | """Prepares the given HTTP body data.""" | ||
| 446 | |||
| 447 | # Check if file, fo, generator, iterator. | ||
| 448 | # If not, run through normal process. | ||
| 449 | |||
| 450 | # Nottin' on you. | ||
| 451 | body = None | ||
| 452 | content_type = None | ||
| 453 | |||
| 454 | if not data and json is not None: | ||
| 455 | # urllib3 requires a bytes-like body. Python 2's json.dumps | ||
| 456 | # provides this natively, but Python 3 gives a Unicode string. | ||
| 457 | content_type = 'application/json' | ||
| 458 | body = complexjson.dumps(json) | ||
| 459 | if not isinstance(body, bytes): | ||
| 460 | body = body.encode('utf-8') | ||
| 461 | |||
| 462 | is_stream = all([ | ||
| 463 | hasattr(data, '__iter__'), | ||
| 464 | not isinstance(data, (basestring, list, tuple, collections.Mapping)) | ||
| 465 | ]) | ||
| 466 | |||
| 467 | try: | ||
| 468 | length = super_len(data) | ||
| 469 | except (TypeError, AttributeError, UnsupportedOperation): | ||
| 470 | length = None | ||
| 471 | |||
| 472 | if is_stream: | ||
| 473 | body = data | ||
| 474 | |||
| 475 | if getattr(body, 'tell', None) is not None: | ||
| 476 | # Record the current file position before reading. | ||
| 477 | # This will allow us to rewind a file in the event | ||
| 478 | # of a redirect. | ||
| 479 | try: | ||
| 480 | self._body_position = body.tell() | ||
| 481 | except (IOError, OSError): | ||
| 482 | # This differentiates from None, allowing us to catch | ||
| 483 | # a failed `tell()` later when trying to rewind the body | ||
| 484 | self._body_position = object() | ||
| 485 | |||
| 486 | if files: | ||
| 487 | raise NotImplementedError('Streamed bodies and files are mutually exclusive.') | ||
| 488 | |||
| 489 | if length: | ||
| 490 | self.headers['Content-Length'] = builtin_str(length) | ||
| 491 | else: | ||
| 492 | self.headers['Transfer-Encoding'] = 'chunked' | ||
| 493 | else: | ||
| 494 | # Multi-part file uploads. | ||
| 495 | if files: | ||
| 496 | (body, content_type) = self._encode_files(files, data) | ||
| 497 | else: | ||
| 498 | if data: | ||
| 499 | body = self._encode_params(data) | ||
| 500 | if isinstance(data, basestring) or hasattr(data, 'read'): | ||
| 501 | content_type = None | ||
| 502 | else: | ||
| 503 | content_type = 'application/x-www-form-urlencoded' | ||
| 504 | |||
| 505 | self.prepare_content_length(body) | ||
| 506 | |||
| 507 | # Add content-type if it wasn't explicitly provided. | ||
| 508 | if content_type and ('content-type' not in self.headers): | ||
| 509 | self.headers['Content-Type'] = content_type | ||
| 510 | |||
| 511 | self.body = body | ||
| 512 | |||
| 513 | def prepare_content_length(self, body): | ||
| 514 | """Prepare Content-Length header based on request method and body""" | ||
| 515 | if body is not None: | ||
| 516 | length = super_len(body) | ||
| 517 | if length: | ||
| 518 | # If length exists, set it. Otherwise, we fallback | ||
| 519 | # to Transfer-Encoding: chunked. | ||
| 520 | self.headers['Content-Length'] = builtin_str(length) | ||
| 521 | elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: | ||
| 522 | # Set Content-Length to 0 for methods that can have a body | ||
| 523 | # but don't provide one. (i.e. not GET or HEAD) | ||
| 524 | self.headers['Content-Length'] = '0' | ||
| 525 | |||
| 526 | def prepare_auth(self, auth, url=''): | ||
| 527 | """Prepares the given HTTP auth data.""" | ||
| 528 | |||
| 529 | # If no Auth is explicitly provided, extract it from the URL first. | ||
| 530 | if auth is None: | ||
| 531 | url_auth = get_auth_from_url(self.url) | ||
| 532 | auth = url_auth if any(url_auth) else None | ||
| 533 | |||
| 534 | if auth: | ||
| 535 | if isinstance(auth, tuple) and len(auth) == 2: | ||
| 536 | # special-case basic HTTP auth | ||
| 537 | auth = HTTPBasicAuth(*auth) | ||
| 538 | |||
| 539 | # Allow auth to make its changes. | ||
| 540 | r = auth(self) | ||
| 541 | |||
| 542 | # Update self to reflect the auth changes. | ||
| 543 | self.__dict__.update(r.__dict__) | ||
| 544 | |||
| 545 | # Recompute Content-Length | ||
| 546 | self.prepare_content_length(self.body) | ||
| 547 | |||
| 548 | def prepare_cookies(self, cookies): | ||
| 549 | """Prepares the given HTTP cookie data. | ||
| 550 | |||
| 551 | This function eventually generates a ``Cookie`` header from the | ||
| 552 | given cookies using cookielib. Due to cookielib's design, the header | ||
| 553 | will not be regenerated if it already exists, meaning this function | ||
| 554 | can only be called once for the life of the | ||
| 555 | :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls | ||
| 556 | to ``prepare_cookies`` will have no actual effect, unless the "Cookie" | ||
| 557 | header is removed beforehand. | ||
| 558 | """ | ||
| 559 | if isinstance(cookies, cookielib.CookieJar): | ||
| 560 | self._cookies = cookies | ||
| 561 | else: | ||
| 562 | self._cookies = cookiejar_from_dict(cookies) | ||
| 563 | |||
| 564 | cookie_header = get_cookie_header(self._cookies, self) | ||
| 565 | if cookie_header is not None: | ||
| 566 | self.headers['Cookie'] = cookie_header | ||
| 567 | |||
| 568 | def prepare_hooks(self, hooks): | ||
| 569 | """Prepares the given hooks.""" | ||
| 570 | # hooks can be passed as None to the prepare method and to this | ||
| 571 | # method. To prevent iterating over None, simply use an empty list | ||
| 572 | # if hooks is False-y | ||
| 573 | hooks = hooks or [] | ||
| 574 | for event in hooks: | ||
| 575 | self.register_hook(event, hooks[event]) | ||
| 576 | |||
| 577 | |||
| 578 | class Response(object): | ||
| 579 | """The :class:`Response <Response>` object, which contains a | ||
| 580 | server's response to an HTTP request. | ||
| 581 | """ | ||
| 582 | |||
| 583 | __attrs__ = [ | ||
| 584 | '_content', 'status_code', 'headers', 'url', 'history', | ||
| 585 | 'encoding', 'reason', 'cookies', 'elapsed', 'request' | ||
| 586 | ] | ||
| 587 | |||
| 588 | def __init__(self): | ||
| 589 | self._content = False | ||
| 590 | self._content_consumed = False | ||
| 591 | self._next = None | ||
| 592 | |||
| 593 | #: Integer Code of responded HTTP Status, e.g. 404 or 200. | ||
| 594 | self.status_code = None | ||
| 595 | |||
| 596 | #: Case-insensitive Dictionary of Response Headers. | ||
| 597 | #: For example, ``headers['content-encoding']`` will return the | ||
| 598 | #: value of a ``'Content-Encoding'`` response header. | ||
| 599 | self.headers = CaseInsensitiveDict() | ||
| 600 | |||
| 601 | #: File-like object representation of response (for advanced usage). | ||
| 602 | #: Use of ``raw`` requires that ``stream=True`` be set on the request. | ||
| 603 | # This requirement does not apply for use internally to Requests. | ||
| 604 | self.raw = None | ||
| 605 | |||
| 606 | #: Final URL location of Response. | ||
| 607 | self.url = None | ||
| 608 | |||
| 609 | #: Encoding to decode with when accessing r.text. | ||
| 610 | self.encoding = None | ||
| 611 | |||
| 612 | #: A list of :class:`Response <Response>` objects from | ||
| 613 | #: the history of the Request. Any redirect responses will end | ||
| 614 | #: up here. The list is sorted from the oldest to the most recent request. | ||
| 615 | self.history = [] | ||
| 616 | |||
| 617 | #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". | ||
| 618 | self.reason = None | ||
| 619 | |||
| 620 | #: A CookieJar of Cookies the server sent back. | ||
| 621 | self.cookies = cookiejar_from_dict({}) | ||
| 622 | |||
| 623 | #: The amount of time elapsed between sending the request | ||
| 624 | #: and the arrival of the response (as a timedelta). | ||
| 625 | #: This property specifically measures the time taken between sending | ||
| 626 | #: the first byte of the request and finishing parsing the headers. It | ||
| 627 | #: is therefore unaffected by consuming the response content or the | ||
| 628 | #: value of the ``stream`` keyword argument. | ||
| 629 | self.elapsed = datetime.timedelta(0) | ||
| 630 | |||
| 631 | #: The :class:`PreparedRequest <PreparedRequest>` object to which this | ||
| 632 | #: is a response. | ||
| 633 | self.request = None | ||
| 634 | |||
| 635 | def __enter__(self): | ||
| 636 | return self | ||
| 637 | |||
| 638 | def __exit__(self, *args): | ||
| 639 | self.close() | ||
| 640 | |||
| 641 | def __getstate__(self): | ||
| 642 | # Consume everything; accessing the content attribute makes | ||
| 643 | # sure the content has been fully read. | ||
| 644 | if not self._content_consumed: | ||
| 645 | self.content | ||
| 646 | |||
| 647 | return dict( | ||
| 648 | (attr, getattr(self, attr, None)) | ||
| 649 | for attr in self.__attrs__ | ||
| 650 | ) | ||
| 651 | |||
| 652 | def __setstate__(self, state): | ||
| 653 | for name, value in state.items(): | ||
| 654 | setattr(self, name, value) | ||
| 655 | |||
| 656 | # pickled objects do not have .raw | ||
| 657 | setattr(self, '_content_consumed', True) | ||
| 658 | setattr(self, 'raw', None) | ||
| 659 | |||
| 660 | def __repr__(self): | ||
| 661 | return '<Response [%s]>' % (self.status_code) | ||
| 662 | |||
| 663 | def __bool__(self): | ||
| 664 | """Returns True if :attr:`status_code` is less than 400. | ||
| 665 | |||
| 666 | This attribute checks if the status code of the response is between | ||
| 667 | 400 and 600 to see if there was a client error or a server error. If | ||
| 668 | the status code, is between 200 and 400, this will return True. This | ||
| 669 | is **not** a check to see if the response code is ``200 OK``. | ||
| 670 | """ | ||
| 671 | return self.ok | ||
| 672 | |||
| 673 | def __nonzero__(self): | ||
| 674 | """Returns True if :attr:`status_code` is less than 400. | ||
| 675 | |||
| 676 | This attribute checks if the status code of the response is between | ||
| 677 | 400 and 600 to see if there was a client error or a server error. If | ||
| 678 | the status code, is between 200 and 400, this will return True. This | ||
| 679 | is **not** a check to see if the response code is ``200 OK``. | ||
| 680 | """ | ||
| 681 | return self.ok | ||
| 682 | |||
| 683 | def __iter__(self): | ||
| 684 | """Allows you to use a response as an iterator.""" | ||
| 685 | return self.iter_content(128) | ||
| 686 | |||
| 687 | @property | ||
| 688 | def ok(self): | ||
| 689 | """Returns True if :attr:`status_code` is less than 400. | ||
| 690 | |||
| 691 | This attribute checks if the status code of the response is between | ||
| 692 | 400 and 600 to see if there was a client error or a server error. If | ||
| 693 | the status code, is between 200 and 400, this will return True. This | ||
| 694 | is **not** a check to see if the response code is ``200 OK``. | ||
| 695 | """ | ||
| 696 | try: | ||
| 697 | self.raise_for_status() | ||
| 698 | except HTTPError: | ||
| 699 | return False | ||
| 700 | return True | ||
| 701 | |||
| 702 | @property | ||
| 703 | def is_redirect(self): | ||
| 704 | """True if this Response is a well-formed HTTP redirect that could have | ||
| 705 | been processed automatically (by :meth:`Session.resolve_redirects`). | ||
| 706 | """ | ||
| 707 | return ('location' in self.headers and self.status_code in REDIRECT_STATI) | ||
| 708 | |||
| 709 | @property | ||
| 710 | def is_permanent_redirect(self): | ||
| 711 | """True if this Response one of the permanent versions of redirect.""" | ||
| 712 | return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) | ||
| 713 | |||
| 714 | @property | ||
| 715 | def next(self): | ||
| 716 | """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" | ||
| 717 | return self._next | ||
| 718 | |||
| 719 | @property | ||
| 720 | def apparent_encoding(self): | ||
| 721 | """The apparent encoding, provided by the chardet library.""" | ||
| 722 | return chardet.detect(self.content)['encoding'] | ||
| 723 | |||
| 724 | def iter_content(self, chunk_size=1, decode_unicode=False): | ||
| 725 | """Iterates over the response data. When stream=True is set on the | ||
| 726 | request, this avoids reading the content at once into memory for | ||
| 727 | large responses. The chunk size is the number of bytes it should | ||
| 728 | read into memory. This is not necessarily the length of each item | ||
| 729 | returned as decoding can take place. | ||
| 730 | |||
| 731 | chunk_size must be of type int or None. A value of None will | ||
| 732 | function differently depending on the value of `stream`. | ||
| 733 | stream=True will read data as it arrives in whatever size the | ||
| 734 | chunks are received. If stream=False, data is returned as | ||
| 735 | a single chunk. | ||
| 736 | |||
| 737 | If decode_unicode is True, content will be decoded using the best | ||
| 738 | available encoding based on the response. | ||
| 739 | """ | ||
| 740 | |||
| 741 | def generate(): | ||
| 742 | # Special case for urllib3. | ||
| 743 | if hasattr(self.raw, 'stream'): | ||
| 744 | try: | ||
| 745 | for chunk in self.raw.stream(chunk_size, decode_content=True): | ||
| 746 | yield chunk | ||
| 747 | except ProtocolError as e: | ||
| 748 | raise ChunkedEncodingError(e) | ||
| 749 | except DecodeError as e: | ||
| 750 | raise ContentDecodingError(e) | ||
| 751 | except ReadTimeoutError as e: | ||
| 752 | raise ConnectionError(e) | ||
| 753 | else: | ||
| 754 | # Standard file-like object. | ||
| 755 | while True: | ||
| 756 | chunk = self.raw.read(chunk_size) | ||
| 757 | if not chunk: | ||
| 758 | break | ||
| 759 | yield chunk | ||
| 760 | |||
| 761 | self._content_consumed = True | ||
| 762 | |||
| 763 | if self._content_consumed and isinstance(self._content, bool): | ||
| 764 | raise StreamConsumedError() | ||
| 765 | elif chunk_size is not None and not isinstance(chunk_size, int): | ||
| 766 | raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) | ||
| 767 | # simulate reading small chunks of the content | ||
| 768 | reused_chunks = iter_slices(self._content, chunk_size) | ||
| 769 | |||
| 770 | stream_chunks = generate() | ||
| 771 | |||
| 772 | chunks = reused_chunks if self._content_consumed else stream_chunks | ||
| 773 | |||
| 774 | if decode_unicode: | ||
| 775 | chunks = stream_decode_response_unicode(chunks, self) | ||
| 776 | |||
| 777 | return chunks | ||
| 778 | |||
| 779 | def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): | ||
| 780 | """Iterates over the response data, one line at a time. When | ||
| 781 | stream=True is set on the request, this avoids reading the | ||
| 782 | content at once into memory for large responses. | ||
| 783 | |||
| 784 | .. note:: This method is not reentrant safe. | ||
| 785 | """ | ||
| 786 | |||
| 787 | pending = None | ||
| 788 | |||
| 789 | for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): | ||
| 790 | |||
| 791 | if pending is not None: | ||
| 792 | chunk = pending + chunk | ||
| 793 | |||
| 794 | if delimiter: | ||
| 795 | lines = chunk.split(delimiter) | ||
| 796 | else: | ||
| 797 | lines = chunk.splitlines() | ||
| 798 | |||
| 799 | if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: | ||
| 800 | pending = lines.pop() | ||
| 801 | else: | ||
| 802 | pending = None | ||
| 803 | |||
| 804 | for line in lines: | ||
| 805 | yield line | ||
| 806 | |||
| 807 | if pending is not None: | ||
| 808 | yield pending | ||
| 809 | |||
| 810 | @property | ||
| 811 | def content(self): | ||
| 812 | """Content of the response, in bytes.""" | ||
| 813 | |||
| 814 | if self._content is False: | ||
| 815 | # Read the contents. | ||
| 816 | if self._content_consumed: | ||
| 817 | raise RuntimeError( | ||
| 818 | 'The content for this response was already consumed') | ||
| 819 | |||
| 820 | if self.status_code == 0 or self.raw is None: | ||
| 821 | self._content = None | ||
| 822 | else: | ||
| 823 | self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() | ||
| 824 | |||
| 825 | self._content_consumed = True | ||
| 826 | # don't need to release the connection; that's been handled by urllib3 | ||
| 827 | # since we exhausted the data. | ||
| 828 | return self._content | ||
| 829 | |||
| 830 | @property | ||
| 831 | def text(self): | ||
| 832 | """Content of the response, in unicode. | ||
| 833 | |||
| 834 | If Response.encoding is None, encoding will be guessed using | ||
| 835 | ``chardet``. | ||
| 836 | |||
| 837 | The encoding of the response content is determined based solely on HTTP | ||
| 838 | headers, following RFC 2616 to the letter. If you can take advantage of | ||
| 839 | non-HTTP knowledge to make a better guess at the encoding, you should | ||
| 840 | set ``r.encoding`` appropriately before accessing this property. | ||
| 841 | """ | ||
| 842 | |||
| 843 | # Try charset from content-type | ||
| 844 | content = None | ||
| 845 | encoding = self.encoding | ||
| 846 | |||
| 847 | if not self.content: | ||
| 848 | return str('') | ||
| 849 | |||
| 850 | # Fallback to auto-detected encoding. | ||
| 851 | if self.encoding is None: | ||
| 852 | encoding = self.apparent_encoding | ||
| 853 | |||
| 854 | # Decode unicode from given encoding. | ||
| 855 | try: | ||
| 856 | content = str(self.content, encoding, errors='replace') | ||
| 857 | except (LookupError, TypeError): | ||
| 858 | # A LookupError is raised if the encoding was not found which could | ||
| 859 | # indicate a misspelling or similar mistake. | ||
| 860 | # | ||
| 861 | # A TypeError can be raised if encoding is None | ||
| 862 | # | ||
| 863 | # So we try blindly encoding. | ||
| 864 | content = str(self.content, errors='replace') | ||
| 865 | |||
| 866 | return content | ||
| 867 | |||
| 868 | def json(self, **kwargs): | ||
| 869 | r"""Returns the json-encoded content of a response, if any. | ||
| 870 | |||
| 871 | :param \*\*kwargs: Optional arguments that ``json.loads`` takes. | ||
| 872 | :raises ValueError: If the response body does not contain valid json. | ||
| 873 | """ | ||
| 874 | |||
| 875 | if not self.encoding and self.content and len(self.content) > 3: | ||
| 876 | # No encoding set. JSON RFC 4627 section 3 states we should expect | ||
| 877 | # UTF-8, -16 or -32. Detect which one to use; If the detection or | ||
| 878 | # decoding fails, fall back to `self.text` (using chardet to make | ||
| 879 | # a best guess). | ||
| 880 | encoding = guess_json_utf(self.content) | ||
| 881 | if encoding is not None: | ||
| 882 | try: | ||
| 883 | return complexjson.loads( | ||
| 884 | self.content.decode(encoding), **kwargs | ||
| 885 | ) | ||
| 886 | except UnicodeDecodeError: | ||
| 887 | # Wrong UTF codec detected; usually because it's not UTF-8 | ||
| 888 | # but some other 8-bit codec. This is an RFC violation, | ||
| 889 | # and the server didn't bother to tell us what codec *was* | ||
| 890 | # used. | ||
| 891 | pass | ||
| 892 | return complexjson.loads(self.text, **kwargs) | ||
| 893 | |||
| 894 | @property | ||
| 895 | def links(self): | ||
| 896 | """Returns the parsed header links of the response, if any.""" | ||
| 897 | |||
| 898 | header = self.headers.get('link') | ||
| 899 | |||
| 900 | # l = MultiDict() | ||
| 901 | l = {} | ||
| 902 | |||
| 903 | if header: | ||
| 904 | links = parse_header_links(header) | ||
| 905 | |||
| 906 | for link in links: | ||
| 907 | key = link.get('rel') or link.get('url') | ||
| 908 | l[key] = link | ||
| 909 | |||
| 910 | return l | ||
| 911 | |||
| 912 | def raise_for_status(self): | ||
| 913 | """Raises stored :class:`HTTPError`, if one occurred.""" | ||
| 914 | |||
| 915 | http_error_msg = '' | ||
| 916 | if isinstance(self.reason, bytes): | ||
| 917 | # We attempt to decode utf-8 first because some servers | ||
| 918 | # choose to localize their reason strings. If the string | ||
| 919 | # isn't utf-8, we fall back to iso-8859-1 for all other | ||
| 920 | # encodings. (See PR #3538) | ||
| 921 | try: | ||
| 922 | reason = self.reason.decode('utf-8') | ||
| 923 | except UnicodeDecodeError: | ||
| 924 | reason = self.reason.decode('iso-8859-1') | ||
| 925 | else: | ||
| 926 | reason = self.reason | ||
| 927 | |||
| 928 | if 400 <= self.status_code < 500: | ||
| 929 | http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) | ||
| 930 | |||
| 931 | elif 500 <= self.status_code < 600: | ||
| 932 | http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) | ||
| 933 | |||
| 934 | if http_error_msg: | ||
| 935 | raise HTTPError(http_error_msg, response=self) | ||
| 936 | |||
| 937 | def close(self): | ||
| 938 | """Releases the connection back to the pool. Once this method has been | ||
| 939 | called the underlying ``raw`` object must not be accessed again. | ||
| 940 | |||
| 941 | *Note: Should not normally need to be called explicitly.* | ||
| 942 | """ | ||
| 943 | if not self._content_consumed: | ||
| 944 | self.raw.close() | ||
| 945 | |||
| 946 | release_conn = getattr(self.raw, 'release_conn', None) | ||
| 947 | if release_conn is not None: | ||
| 948 | release_conn() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..c91d9c7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py | |||
| @@ -0,0 +1,16 @@ | |||
| 1 | import sys | ||
| 2 | |||
| 3 | # This code exists for backwards compatibility reasons. | ||
| 4 | # I don't like it either. Just look the other way. :) | ||
| 5 | |||
| 6 | for package in ('urllib3', 'idna', 'chardet'): | ||
| 7 | vendored_package = "pip._vendor." + package | ||
| 8 | locals()[package] = __import__(vendored_package) | ||
| 9 | # This traversal is apparently necessary such that the identities are | ||
| 10 | # preserved (requests.packages.urllib3.* is urllib3.*) | ||
| 11 | for mod in list(sys.modules): | ||
| 12 | if mod == vendored_package or mod.startswith(vendored_package + '.'): | ||
| 13 | unprefixed_mod = mod[len("pip._vendor."):] | ||
| 14 | sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] | ||
| 15 | |||
| 16 | # Kinda cool, though, right? | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..d8eafa8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py | |||
| @@ -0,0 +1,737 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.session | ||
| 5 | ~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module provides a Session object to manage and persist settings across | ||
| 8 | requests (cookies, auth, proxies). | ||
| 9 | """ | ||
| 10 | import os | ||
| 11 | import platform | ||
| 12 | import time | ||
| 13 | from collections import Mapping | ||
| 14 | from datetime import timedelta | ||
| 15 | |||
| 16 | from .auth import _basic_auth_str | ||
| 17 | from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse | ||
| 18 | from .cookies import ( | ||
| 19 | cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) | ||
| 20 | from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT | ||
| 21 | from .hooks import default_hooks, dispatch_hook | ||
| 22 | from ._internal_utils import to_native_string | ||
| 23 | from .utils import to_key_val_list, default_headers | ||
| 24 | from .exceptions import ( | ||
| 25 | TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) | ||
| 26 | |||
| 27 | from .structures import CaseInsensitiveDict | ||
| 28 | from .adapters import HTTPAdapter | ||
| 29 | |||
| 30 | from .utils import ( | ||
| 31 | requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, | ||
| 32 | get_auth_from_url, rewind_body | ||
| 33 | ) | ||
| 34 | |||
| 35 | from .status_codes import codes | ||
| 36 | |||
| 37 | # formerly defined here, reexposed here for backward compatibility | ||
| 38 | from .models import REDIRECT_STATI | ||
| 39 | |||
| 40 | # Preferred clock, based on which one is more accurate on a given system. | ||
| 41 | if platform.system() == 'Windows': | ||
| 42 | try: # Python 3.3+ | ||
| 43 | preferred_clock = time.perf_counter | ||
| 44 | except AttributeError: # Earlier than Python 3. | ||
| 45 | preferred_clock = time.clock | ||
| 46 | else: | ||
| 47 | preferred_clock = time.time | ||
| 48 | |||
| 49 | |||
| 50 | def merge_setting(request_setting, session_setting, dict_class=OrderedDict): | ||
| 51 | """Determines appropriate setting for a given request, taking into account | ||
| 52 | the explicit setting on that request, and the setting in the session. If a | ||
| 53 | setting is a dictionary, they will be merged together using `dict_class` | ||
| 54 | """ | ||
| 55 | |||
| 56 | if session_setting is None: | ||
| 57 | return request_setting | ||
| 58 | |||
| 59 | if request_setting is None: | ||
| 60 | return session_setting | ||
| 61 | |||
| 62 | # Bypass if not a dictionary (e.g. verify) | ||
| 63 | if not ( | ||
| 64 | isinstance(session_setting, Mapping) and | ||
| 65 | isinstance(request_setting, Mapping) | ||
| 66 | ): | ||
| 67 | return request_setting | ||
| 68 | |||
| 69 | merged_setting = dict_class(to_key_val_list(session_setting)) | ||
| 70 | merged_setting.update(to_key_val_list(request_setting)) | ||
| 71 | |||
| 72 | # Remove keys that are set to None. Extract keys first to avoid altering | ||
| 73 | # the dictionary during iteration. | ||
| 74 | none_keys = [k for (k, v) in merged_setting.items() if v is None] | ||
| 75 | for key in none_keys: | ||
| 76 | del merged_setting[key] | ||
| 77 | |||
| 78 | return merged_setting | ||
| 79 | |||
| 80 | |||
| 81 | def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): | ||
| 82 | """Properly merges both requests and session hooks. | ||
| 83 | |||
| 84 | This is necessary because when request_hooks == {'response': []}, the | ||
| 85 | merge breaks Session hooks entirely. | ||
| 86 | """ | ||
| 87 | if session_hooks is None or session_hooks.get('response') == []: | ||
| 88 | return request_hooks | ||
| 89 | |||
| 90 | if request_hooks is None or request_hooks.get('response') == []: | ||
| 91 | return session_hooks | ||
| 92 | |||
| 93 | return merge_setting(request_hooks, session_hooks, dict_class) | ||
| 94 | |||
| 95 | |||
| 96 | class SessionRedirectMixin(object): | ||
| 97 | |||
| 98 | def get_redirect_target(self, resp): | ||
| 99 | """Receives a Response. Returns a redirect URI or ``None``""" | ||
| 100 | # Due to the nature of how requests processes redirects this method will | ||
| 101 | # be called at least once upon the original response and at least twice | ||
| 102 | # on each subsequent redirect response (if any). | ||
| 103 | # If a custom mixin is used to handle this logic, it may be advantageous | ||
| 104 | # to cache the redirect location onto the response object as a private | ||
| 105 | # attribute. | ||
| 106 | if resp.is_redirect: | ||
| 107 | location = resp.headers['location'] | ||
| 108 | # Currently the underlying http module on py3 decode headers | ||
| 109 | # in latin1, but empirical evidence suggests that latin1 is very | ||
| 110 | # rarely used with non-ASCII characters in HTTP headers. | ||
| 111 | # It is more likely to get UTF8 header rather than latin1. | ||
| 112 | # This causes incorrect handling of UTF8 encoded location headers. | ||
| 113 | # To solve this, we re-encode the location in latin1. | ||
| 114 | if is_py3: | ||
| 115 | location = location.encode('latin1') | ||
| 116 | return to_native_string(location, 'utf8') | ||
| 117 | return None | ||
| 118 | |||
| 119 | def resolve_redirects(self, resp, req, stream=False, timeout=None, | ||
| 120 | verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): | ||
| 121 | """Receives a Response. Returns a generator of Responses or Requests.""" | ||
| 122 | |||
| 123 | hist = [] # keep track of history | ||
| 124 | |||
| 125 | url = self.get_redirect_target(resp) | ||
| 126 | while url: | ||
| 127 | prepared_request = req.copy() | ||
| 128 | |||
| 129 | # Update history and keep track of redirects. | ||
| 130 | # resp.history must ignore the original request in this loop | ||
| 131 | hist.append(resp) | ||
| 132 | resp.history = hist[1:] | ||
| 133 | |||
| 134 | try: | ||
| 135 | resp.content # Consume socket so it can be released | ||
| 136 | except (ChunkedEncodingError, ContentDecodingError, RuntimeError): | ||
| 137 | resp.raw.read(decode_content=False) | ||
| 138 | |||
| 139 | if len(resp.history) >= self.max_redirects: | ||
| 140 | raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) | ||
| 141 | |||
| 142 | # Release the connection back into the pool. | ||
| 143 | resp.close() | ||
| 144 | |||
| 145 | # Handle redirection without scheme (see: RFC 1808 Section 4) | ||
| 146 | if url.startswith('//'): | ||
| 147 | parsed_rurl = urlparse(resp.url) | ||
| 148 | url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) | ||
| 149 | |||
| 150 | # The scheme should be lower case... | ||
| 151 | parsed = urlparse(url) | ||
| 152 | url = parsed.geturl() | ||
| 153 | |||
| 154 | # Facilitate relative 'location' headers, as allowed by RFC 7231. | ||
| 155 | # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') | ||
| 156 | # Compliant with RFC3986, we percent encode the url. | ||
| 157 | if not parsed.netloc: | ||
| 158 | url = urljoin(resp.url, requote_uri(url)) | ||
| 159 | else: | ||
| 160 | url = requote_uri(url) | ||
| 161 | |||
| 162 | prepared_request.url = to_native_string(url) | ||
| 163 | |||
| 164 | self.rebuild_method(prepared_request, resp) | ||
| 165 | |||
| 166 | # https://github.com/requests/requests/issues/1084 | ||
| 167 | if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): | ||
| 168 | # https://github.com/requests/requests/issues/3490 | ||
| 169 | purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') | ||
| 170 | for header in purged_headers: | ||
| 171 | prepared_request.headers.pop(header, None) | ||
| 172 | prepared_request.body = None | ||
| 173 | |||
| 174 | headers = prepared_request.headers | ||
| 175 | try: | ||
| 176 | del headers['Cookie'] | ||
| 177 | except KeyError: | ||
| 178 | pass | ||
| 179 | |||
| 180 | # Extract any cookies sent on the response to the cookiejar | ||
| 181 | # in the new request. Because we've mutated our copied prepared | ||
| 182 | # request, use the old one that we haven't yet touched. | ||
| 183 | extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) | ||
| 184 | merge_cookies(prepared_request._cookies, self.cookies) | ||
| 185 | prepared_request.prepare_cookies(prepared_request._cookies) | ||
| 186 | |||
| 187 | # Rebuild auth and proxy information. | ||
| 188 | proxies = self.rebuild_proxies(prepared_request, proxies) | ||
| 189 | self.rebuild_auth(prepared_request, resp) | ||
| 190 | |||
| 191 | # A failed tell() sets `_body_position` to `object()`. This non-None | ||
| 192 | # value ensures `rewindable` will be True, allowing us to raise an | ||
| 193 | # UnrewindableBodyError, instead of hanging the connection. | ||
| 194 | rewindable = ( | ||
| 195 | prepared_request._body_position is not None and | ||
| 196 | ('Content-Length' in headers or 'Transfer-Encoding' in headers) | ||
| 197 | ) | ||
| 198 | |||
| 199 | # Attempt to rewind consumed file-like object. | ||
| 200 | if rewindable: | ||
| 201 | rewind_body(prepared_request) | ||
| 202 | |||
| 203 | # Override the original request. | ||
| 204 | req = prepared_request | ||
| 205 | |||
| 206 | if yield_requests: | ||
| 207 | yield req | ||
| 208 | else: | ||
| 209 | |||
| 210 | resp = self.send( | ||
| 211 | req, | ||
| 212 | stream=stream, | ||
| 213 | timeout=timeout, | ||
| 214 | verify=verify, | ||
| 215 | cert=cert, | ||
| 216 | proxies=proxies, | ||
| 217 | allow_redirects=False, | ||
| 218 | **adapter_kwargs | ||
| 219 | ) | ||
| 220 | |||
| 221 | extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) | ||
| 222 | |||
| 223 | # extract redirect url, if any, for the next loop | ||
| 224 | url = self.get_redirect_target(resp) | ||
| 225 | yield resp | ||
| 226 | |||
| 227 | def rebuild_auth(self, prepared_request, response): | ||
| 228 | """When being redirected we may want to strip authentication from the | ||
| 229 | request to avoid leaking credentials. This method intelligently removes | ||
| 230 | and reapplies authentication where possible to avoid credential loss. | ||
| 231 | """ | ||
| 232 | headers = prepared_request.headers | ||
| 233 | url = prepared_request.url | ||
| 234 | |||
| 235 | if 'Authorization' in headers: | ||
| 236 | # If we get redirected to a new host, we should strip out any | ||
| 237 | # authentication headers. | ||
| 238 | original_parsed = urlparse(response.request.url) | ||
| 239 | redirect_parsed = urlparse(url) | ||
| 240 | |||
| 241 | if (original_parsed.hostname != redirect_parsed.hostname): | ||
| 242 | del headers['Authorization'] | ||
| 243 | |||
| 244 | # .netrc might have more auth for us on our new host. | ||
| 245 | new_auth = get_netrc_auth(url) if self.trust_env else None | ||
| 246 | if new_auth is not None: | ||
| 247 | prepared_request.prepare_auth(new_auth) | ||
| 248 | |||
| 249 | return | ||
| 250 | |||
| 251 | def rebuild_proxies(self, prepared_request, proxies): | ||
| 252 | """This method re-evaluates the proxy configuration by considering the | ||
| 253 | environment variables. If we are redirected to a URL covered by | ||
| 254 | NO_PROXY, we strip the proxy configuration. Otherwise, we set missing | ||
| 255 | proxy keys for this URL (in case they were stripped by a previous | ||
| 256 | redirect). | ||
| 257 | |||
| 258 | This method also replaces the Proxy-Authorization header where | ||
| 259 | necessary. | ||
| 260 | |||
| 261 | :rtype: dict | ||
| 262 | """ | ||
| 263 | proxies = proxies if proxies is not None else {} | ||
| 264 | headers = prepared_request.headers | ||
| 265 | url = prepared_request.url | ||
| 266 | scheme = urlparse(url).scheme | ||
| 267 | new_proxies = proxies.copy() | ||
| 268 | no_proxy = proxies.get('no_proxy') | ||
| 269 | |||
| 270 | bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) | ||
| 271 | if self.trust_env and not bypass_proxy: | ||
| 272 | environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) | ||
| 273 | |||
| 274 | proxy = environ_proxies.get(scheme, environ_proxies.get('all')) | ||
| 275 | |||
| 276 | if proxy: | ||
| 277 | new_proxies.setdefault(scheme, proxy) | ||
| 278 | |||
| 279 | if 'Proxy-Authorization' in headers: | ||
| 280 | del headers['Proxy-Authorization'] | ||
| 281 | |||
| 282 | try: | ||
| 283 | username, password = get_auth_from_url(new_proxies[scheme]) | ||
| 284 | except KeyError: | ||
| 285 | username, password = None, None | ||
| 286 | |||
| 287 | if username and password: | ||
| 288 | headers['Proxy-Authorization'] = _basic_auth_str(username, password) | ||
| 289 | |||
| 290 | return new_proxies | ||
| 291 | |||
| 292 | def rebuild_method(self, prepared_request, response): | ||
| 293 | """When being redirected we may want to change the method of the request | ||
| 294 | based on certain specs or browser behavior. | ||
| 295 | """ | ||
| 296 | method = prepared_request.method | ||
| 297 | |||
| 298 | # http://tools.ietf.org/html/rfc7231#section-6.4.4 | ||
| 299 | if response.status_code == codes.see_other and method != 'HEAD': | ||
| 300 | method = 'GET' | ||
| 301 | |||
| 302 | # Do what the browsers do, despite standards... | ||
| 303 | # First, turn 302s into GETs. | ||
| 304 | if response.status_code == codes.found and method != 'HEAD': | ||
| 305 | method = 'GET' | ||
| 306 | |||
| 307 | # Second, if a POST is responded to with a 301, turn it into a GET. | ||
| 308 | # This bizarre behaviour is explained in Issue 1704. | ||
| 309 | if response.status_code == codes.moved and method == 'POST': | ||
| 310 | method = 'GET' | ||
| 311 | |||
| 312 | prepared_request.method = method | ||
| 313 | |||
| 314 | |||
| 315 | class Session(SessionRedirectMixin): | ||
| 316 | """A Requests session. | ||
| 317 | |||
| 318 | Provides cookie persistence, connection-pooling, and configuration. | ||
| 319 | |||
| 320 | Basic Usage:: | ||
| 321 | |||
| 322 | >>> import requests | ||
| 323 | >>> s = requests.Session() | ||
| 324 | >>> s.get('http://httpbin.org/get') | ||
| 325 | <Response [200]> | ||
| 326 | |||
| 327 | Or as a context manager:: | ||
| 328 | |||
| 329 | >>> with requests.Session() as s: | ||
| 330 | >>> s.get('http://httpbin.org/get') | ||
| 331 | <Response [200]> | ||
| 332 | """ | ||
| 333 | |||
| 334 | __attrs__ = [ | ||
| 335 | 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', | ||
| 336 | 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', | ||
| 337 | 'max_redirects', | ||
| 338 | ] | ||
| 339 | |||
| 340 | def __init__(self): | ||
| 341 | |||
| 342 | #: A case-insensitive dictionary of headers to be sent on each | ||
| 343 | #: :class:`Request <Request>` sent from this | ||
| 344 | #: :class:`Session <Session>`. | ||
| 345 | self.headers = default_headers() | ||
| 346 | |||
| 347 | #: Default Authentication tuple or object to attach to | ||
| 348 | #: :class:`Request <Request>`. | ||
| 349 | self.auth = None | ||
| 350 | |||
| 351 | #: Dictionary mapping protocol or protocol and host to the URL of the proxy | ||
| 352 | #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to | ||
| 353 | #: be used on each :class:`Request <Request>`. | ||
| 354 | self.proxies = {} | ||
| 355 | |||
| 356 | #: Event-handling hooks. | ||
| 357 | self.hooks = default_hooks() | ||
| 358 | |||
| 359 | #: Dictionary of querystring data to attach to each | ||
| 360 | #: :class:`Request <Request>`. The dictionary values may be lists for | ||
| 361 | #: representing multivalued query parameters. | ||
| 362 | self.params = {} | ||
| 363 | |||
| 364 | #: Stream response content default. | ||
| 365 | self.stream = False | ||
| 366 | |||
| 367 | #: SSL Verification default. | ||
| 368 | self.verify = True | ||
| 369 | |||
| 370 | #: SSL client certificate default, if String, path to ssl client | ||
| 371 | #: cert file (.pem). If Tuple, ('cert', 'key') pair. | ||
| 372 | self.cert = None | ||
| 373 | |||
| 374 | #: Maximum number of redirects allowed. If the request exceeds this | ||
| 375 | #: limit, a :class:`TooManyRedirects` exception is raised. | ||
| 376 | #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is | ||
| 377 | #: 30. | ||
| 378 | self.max_redirects = DEFAULT_REDIRECT_LIMIT | ||
| 379 | |||
| 380 | #: Trust environment settings for proxy configuration, default | ||
| 381 | #: authentication and similar. | ||
| 382 | self.trust_env = True | ||
| 383 | |||
| 384 | #: A CookieJar containing all currently outstanding cookies set on this | ||
| 385 | #: session. By default it is a | ||
| 386 | #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but | ||
| 387 | #: may be any other ``cookielib.CookieJar`` compatible object. | ||
| 388 | self.cookies = cookiejar_from_dict({}) | ||
| 389 | |||
| 390 | # Default connection adapters. | ||
| 391 | self.adapters = OrderedDict() | ||
| 392 | self.mount('https://', HTTPAdapter()) | ||
| 393 | self.mount('http://', HTTPAdapter()) | ||
| 394 | |||
| 395 | def __enter__(self): | ||
| 396 | return self | ||
| 397 | |||
| 398 | def __exit__(self, *args): | ||
| 399 | self.close() | ||
| 400 | |||
| 401 | def prepare_request(self, request): | ||
| 402 | """Constructs a :class:`PreparedRequest <PreparedRequest>` for | ||
| 403 | transmission and returns it. The :class:`PreparedRequest` has settings | ||
| 404 | merged from the :class:`Request <Request>` instance and those of the | ||
| 405 | :class:`Session`. | ||
| 406 | |||
| 407 | :param request: :class:`Request` instance to prepare with this | ||
| 408 | session's settings. | ||
| 409 | :rtype: requests.PreparedRequest | ||
| 410 | """ | ||
| 411 | cookies = request.cookies or {} | ||
| 412 | |||
| 413 | # Bootstrap CookieJar. | ||
| 414 | if not isinstance(cookies, cookielib.CookieJar): | ||
| 415 | cookies = cookiejar_from_dict(cookies) | ||
| 416 | |||
| 417 | # Merge with session cookies | ||
| 418 | merged_cookies = merge_cookies( | ||
| 419 | merge_cookies(RequestsCookieJar(), self.cookies), cookies) | ||
| 420 | |||
| 421 | # Set environment's basic authentication if not explicitly set. | ||
| 422 | auth = request.auth | ||
| 423 | if self.trust_env and not auth and not self.auth: | ||
| 424 | auth = get_netrc_auth(request.url) | ||
| 425 | |||
| 426 | p = PreparedRequest() | ||
| 427 | p.prepare( | ||
| 428 | method=request.method.upper(), | ||
| 429 | url=request.url, | ||
| 430 | files=request.files, | ||
| 431 | data=request.data, | ||
| 432 | json=request.json, | ||
| 433 | headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), | ||
| 434 | params=merge_setting(request.params, self.params), | ||
| 435 | auth=merge_setting(auth, self.auth), | ||
| 436 | cookies=merged_cookies, | ||
| 437 | hooks=merge_hooks(request.hooks, self.hooks), | ||
| 438 | ) | ||
| 439 | return p | ||
| 440 | |||
| 441 | def request(self, method, url, | ||
| 442 | params=None, data=None, headers=None, cookies=None, files=None, | ||
| 443 | auth=None, timeout=None, allow_redirects=True, proxies=None, | ||
| 444 | hooks=None, stream=None, verify=None, cert=None, json=None): | ||
| 445 | """Constructs a :class:`Request <Request>`, prepares it and sends it. | ||
| 446 | Returns :class:`Response <Response>` object. | ||
| 447 | |||
| 448 | :param method: method for the new :class:`Request` object. | ||
| 449 | :param url: URL for the new :class:`Request` object. | ||
| 450 | :param params: (optional) Dictionary or bytes to be sent in the query | ||
| 451 | string for the :class:`Request`. | ||
| 452 | :param data: (optional) Dictionary, bytes, or file-like object to send | ||
| 453 | in the body of the :class:`Request`. | ||
| 454 | :param json: (optional) json to send in the body of the | ||
| 455 | :class:`Request`. | ||
| 456 | :param headers: (optional) Dictionary of HTTP Headers to send with the | ||
| 457 | :class:`Request`. | ||
| 458 | :param cookies: (optional) Dict or CookieJar object to send with the | ||
| 459 | :class:`Request`. | ||
| 460 | :param files: (optional) Dictionary of ``'filename': file-like-objects`` | ||
| 461 | for multipart encoding upload. | ||
| 462 | :param auth: (optional) Auth tuple or callable to enable | ||
| 463 | Basic/Digest/Custom HTTP Auth. | ||
| 464 | :param timeout: (optional) How long to wait for the server to send | ||
| 465 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
| 466 | read timeout) <timeouts>` tuple. | ||
| 467 | :type timeout: float or tuple | ||
| 468 | :param allow_redirects: (optional) Set to True by default. | ||
| 469 | :type allow_redirects: bool | ||
| 470 | :param proxies: (optional) Dictionary mapping protocol or protocol and | ||
| 471 | hostname to the URL of the proxy. | ||
| 472 | :param stream: (optional) whether to immediately download the response | ||
| 473 | content. Defaults to ``False``. | ||
| 474 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
| 475 | the server's TLS certificate, or a string, in which case it must be a path | ||
| 476 | to a CA bundle to use. Defaults to ``True``. | ||
| 477 | :param cert: (optional) if String, path to ssl client cert file (.pem). | ||
| 478 | If Tuple, ('cert', 'key') pair. | ||
| 479 | :rtype: requests.Response | ||
| 480 | """ | ||
| 481 | # Create the Request. | ||
| 482 | req = Request( | ||
| 483 | method=method.upper(), | ||
| 484 | url=url, | ||
| 485 | headers=headers, | ||
| 486 | files=files, | ||
| 487 | data=data or {}, | ||
| 488 | json=json, | ||
| 489 | params=params or {}, | ||
| 490 | auth=auth, | ||
| 491 | cookies=cookies, | ||
| 492 | hooks=hooks, | ||
| 493 | ) | ||
| 494 | prep = self.prepare_request(req) | ||
| 495 | |||
| 496 | proxies = proxies or {} | ||
| 497 | |||
| 498 | settings = self.merge_environment_settings( | ||
| 499 | prep.url, proxies, stream, verify, cert | ||
| 500 | ) | ||
| 501 | |||
| 502 | # Send the request. | ||
| 503 | send_kwargs = { | ||
| 504 | 'timeout': timeout, | ||
| 505 | 'allow_redirects': allow_redirects, | ||
| 506 | } | ||
| 507 | send_kwargs.update(settings) | ||
| 508 | resp = self.send(prep, **send_kwargs) | ||
| 509 | |||
| 510 | return resp | ||
| 511 | |||
| 512 | def get(self, url, **kwargs): | ||
| 513 | r"""Sends a GET request. Returns :class:`Response` object. | ||
| 514 | |||
| 515 | :param url: URL for the new :class:`Request` object. | ||
| 516 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 517 | :rtype: requests.Response | ||
| 518 | """ | ||
| 519 | |||
| 520 | kwargs.setdefault('allow_redirects', True) | ||
| 521 | return self.request('GET', url, **kwargs) | ||
| 522 | |||
| 523 | def options(self, url, **kwargs): | ||
| 524 | r"""Sends a OPTIONS request. Returns :class:`Response` object. | ||
| 525 | |||
| 526 | :param url: URL for the new :class:`Request` object. | ||
| 527 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 528 | :rtype: requests.Response | ||
| 529 | """ | ||
| 530 | |||
| 531 | kwargs.setdefault('allow_redirects', True) | ||
| 532 | return self.request('OPTIONS', url, **kwargs) | ||
| 533 | |||
| 534 | def head(self, url, **kwargs): | ||
| 535 | r"""Sends a HEAD request. Returns :class:`Response` object. | ||
| 536 | |||
| 537 | :param url: URL for the new :class:`Request` object. | ||
| 538 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 539 | :rtype: requests.Response | ||
| 540 | """ | ||
| 541 | |||
| 542 | kwargs.setdefault('allow_redirects', False) | ||
| 543 | return self.request('HEAD', url, **kwargs) | ||
| 544 | |||
| 545 | def post(self, url, data=None, json=None, **kwargs): | ||
| 546 | r"""Sends a POST request. Returns :class:`Response` object. | ||
| 547 | |||
| 548 | :param url: URL for the new :class:`Request` object. | ||
| 549 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 550 | :param json: (optional) json to send in the body of the :class:`Request`. | ||
| 551 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 552 | :rtype: requests.Response | ||
| 553 | """ | ||
| 554 | |||
| 555 | return self.request('POST', url, data=data, json=json, **kwargs) | ||
| 556 | |||
| 557 | def put(self, url, data=None, **kwargs): | ||
| 558 | r"""Sends a PUT request. Returns :class:`Response` object. | ||
| 559 | |||
| 560 | :param url: URL for the new :class:`Request` object. | ||
| 561 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 562 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 563 | :rtype: requests.Response | ||
| 564 | """ | ||
| 565 | |||
| 566 | return self.request('PUT', url, data=data, **kwargs) | ||
| 567 | |||
| 568 | def patch(self, url, data=None, **kwargs): | ||
| 569 | r"""Sends a PATCH request. Returns :class:`Response` object. | ||
| 570 | |||
| 571 | :param url: URL for the new :class:`Request` object. | ||
| 572 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
| 573 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 574 | :rtype: requests.Response | ||
| 575 | """ | ||
| 576 | |||
| 577 | return self.request('PATCH', url, data=data, **kwargs) | ||
| 578 | |||
| 579 | def delete(self, url, **kwargs): | ||
| 580 | r"""Sends a DELETE request. Returns :class:`Response` object. | ||
| 581 | |||
| 582 | :param url: URL for the new :class:`Request` object. | ||
| 583 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
| 584 | :rtype: requests.Response | ||
| 585 | """ | ||
| 586 | |||
| 587 | return self.request('DELETE', url, **kwargs) | ||
| 588 | |||
| 589 | def send(self, request, **kwargs): | ||
| 590 | """Send a given PreparedRequest. | ||
| 591 | |||
| 592 | :rtype: requests.Response | ||
| 593 | """ | ||
| 594 | # Set defaults that the hooks can utilize to ensure they always have | ||
| 595 | # the correct parameters to reproduce the previous request. | ||
| 596 | kwargs.setdefault('stream', self.stream) | ||
| 597 | kwargs.setdefault('verify', self.verify) | ||
| 598 | kwargs.setdefault('cert', self.cert) | ||
| 599 | kwargs.setdefault('proxies', self.proxies) | ||
| 600 | |||
| 601 | # It's possible that users might accidentally send a Request object. | ||
| 602 | # Guard against that specific failure case. | ||
| 603 | if isinstance(request, Request): | ||
| 604 | raise ValueError('You can only send PreparedRequests.') | ||
| 605 | |||
| 606 | # Set up variables needed for resolve_redirects and dispatching of hooks | ||
| 607 | allow_redirects = kwargs.pop('allow_redirects', True) | ||
| 608 | stream = kwargs.get('stream') | ||
| 609 | hooks = request.hooks | ||
| 610 | |||
| 611 | # Get the appropriate adapter to use | ||
| 612 | adapter = self.get_adapter(url=request.url) | ||
| 613 | |||
| 614 | # Start time (approximately) of the request | ||
| 615 | start = preferred_clock() | ||
| 616 | |||
| 617 | # Send the request | ||
| 618 | r = adapter.send(request, **kwargs) | ||
| 619 | |||
| 620 | # Total elapsed time of the request (approximately) | ||
| 621 | elapsed = preferred_clock() - start | ||
| 622 | r.elapsed = timedelta(seconds=elapsed) | ||
| 623 | |||
| 624 | # Response manipulation hooks | ||
| 625 | r = dispatch_hook('response', hooks, r, **kwargs) | ||
| 626 | |||
| 627 | # Persist cookies | ||
| 628 | if r.history: | ||
| 629 | |||
| 630 | # If the hooks create history then we want those cookies too | ||
| 631 | for resp in r.history: | ||
| 632 | extract_cookies_to_jar(self.cookies, resp.request, resp.raw) | ||
| 633 | |||
| 634 | extract_cookies_to_jar(self.cookies, request, r.raw) | ||
| 635 | |||
| 636 | # Redirect resolving generator. | ||
| 637 | gen = self.resolve_redirects(r, request, **kwargs) | ||
| 638 | |||
| 639 | # Resolve redirects if allowed. | ||
| 640 | history = [resp for resp in gen] if allow_redirects else [] | ||
| 641 | |||
| 642 | # Shuffle things around if there's history. | ||
| 643 | if history: | ||
| 644 | # Insert the first (original) request at the start | ||
| 645 | history.insert(0, r) | ||
| 646 | # Get the last request made | ||
| 647 | r = history.pop() | ||
| 648 | r.history = history | ||
| 649 | |||
| 650 | # If redirects aren't being followed, store the response on the Request for Response.next(). | ||
| 651 | if not allow_redirects: | ||
| 652 | try: | ||
| 653 | r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) | ||
| 654 | except StopIteration: | ||
| 655 | pass | ||
| 656 | |||
| 657 | if not stream: | ||
| 658 | r.content | ||
| 659 | |||
| 660 | return r | ||
| 661 | |||
| 662 | def merge_environment_settings(self, url, proxies, stream, verify, cert): | ||
| 663 | """ | ||
| 664 | Check the environment and merge it with some settings. | ||
| 665 | |||
| 666 | :rtype: dict | ||
| 667 | """ | ||
| 668 | # Gather clues from the surrounding environment. | ||
| 669 | if self.trust_env: | ||
| 670 | # Set environment's proxies. | ||
| 671 | no_proxy = proxies.get('no_proxy') if proxies is not None else None | ||
| 672 | env_proxies = get_environ_proxies(url, no_proxy=no_proxy) | ||
| 673 | for (k, v) in env_proxies.items(): | ||
| 674 | proxies.setdefault(k, v) | ||
| 675 | |||
| 676 | # Look for requests environment configuration and be compatible | ||
| 677 | # with cURL. | ||
| 678 | if verify is True or verify is None: | ||
| 679 | verify = (os.environ.get('REQUESTS_CA_BUNDLE') or | ||
| 680 | os.environ.get('CURL_CA_BUNDLE')) | ||
| 681 | |||
| 682 | # Merge all the kwargs. | ||
| 683 | proxies = merge_setting(proxies, self.proxies) | ||
| 684 | stream = merge_setting(stream, self.stream) | ||
| 685 | verify = merge_setting(verify, self.verify) | ||
| 686 | cert = merge_setting(cert, self.cert) | ||
| 687 | |||
| 688 | return {'verify': verify, 'proxies': proxies, 'stream': stream, | ||
| 689 | 'cert': cert} | ||
| 690 | |||
| 691 | def get_adapter(self, url): | ||
| 692 | """ | ||
| 693 | Returns the appropriate connection adapter for the given URL. | ||
| 694 | |||
| 695 | :rtype: requests.adapters.BaseAdapter | ||
| 696 | """ | ||
| 697 | for (prefix, adapter) in self.adapters.items(): | ||
| 698 | |||
| 699 | if url.lower().startswith(prefix): | ||
| 700 | return adapter | ||
| 701 | |||
| 702 | # Nothing matches :-/ | ||
| 703 | raise InvalidSchema("No connection adapters were found for '%s'" % url) | ||
| 704 | |||
| 705 | def close(self): | ||
| 706 | """Closes all adapters and as such the session""" | ||
| 707 | for v in self.adapters.values(): | ||
| 708 | v.close() | ||
| 709 | |||
| 710 | def mount(self, prefix, adapter): | ||
| 711 | """Registers a connection adapter to a prefix. | ||
| 712 | |||
| 713 | Adapters are sorted in descending order by prefix length. | ||
| 714 | """ | ||
| 715 | self.adapters[prefix] = adapter | ||
| 716 | keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] | ||
| 717 | |||
| 718 | for key in keys_to_move: | ||
| 719 | self.adapters[key] = self.adapters.pop(key) | ||
| 720 | |||
| 721 | def __getstate__(self): | ||
| 722 | state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) | ||
| 723 | return state | ||
| 724 | |||
| 725 | def __setstate__(self, state): | ||
| 726 | for attr, value in state.items(): | ||
| 727 | setattr(self, attr, value) | ||
| 728 | |||
| 729 | |||
| 730 | def session(): | ||
| 731 | """ | ||
| 732 | Returns a :class:`Session` for context-management. | ||
| 733 | |||
| 734 | :rtype: Session | ||
| 735 | """ | ||
| 736 | |||
| 737 | return Session() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..85d9bbc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py | |||
| @@ -0,0 +1,91 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | from .structures import LookupDict | ||
| 4 | |||
| 5 | _codes = { | ||
| 6 | |||
| 7 | # Informational. | ||
| 8 | 100: ('continue',), | ||
| 9 | 101: ('switching_protocols',), | ||
| 10 | 102: ('processing',), | ||
| 11 | 103: ('checkpoint',), | ||
| 12 | 122: ('uri_too_long', 'request_uri_too_long'), | ||
| 13 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), | ||
| 14 | 201: ('created',), | ||
| 15 | 202: ('accepted',), | ||
| 16 | 203: ('non_authoritative_info', 'non_authoritative_information'), | ||
| 17 | 204: ('no_content',), | ||
| 18 | 205: ('reset_content', 'reset'), | ||
| 19 | 206: ('partial_content', 'partial'), | ||
| 20 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), | ||
| 21 | 208: ('already_reported',), | ||
| 22 | 226: ('im_used',), | ||
| 23 | |||
| 24 | # Redirection. | ||
| 25 | 300: ('multiple_choices',), | ||
| 26 | 301: ('moved_permanently', 'moved', '\\o-'), | ||
| 27 | 302: ('found',), | ||
| 28 | 303: ('see_other', 'other'), | ||
| 29 | 304: ('not_modified',), | ||
| 30 | 305: ('use_proxy',), | ||
| 31 | 306: ('switch_proxy',), | ||
| 32 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'), | ||
| 33 | 308: ('permanent_redirect', | ||
| 34 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 | ||
| 35 | |||
| 36 | # Client Error. | ||
| 37 | 400: ('bad_request', 'bad'), | ||
| 38 | 401: ('unauthorized',), | ||
| 39 | 402: ('payment_required', 'payment'), | ||
| 40 | 403: ('forbidden',), | ||
| 41 | 404: ('not_found', '-o-'), | ||
| 42 | 405: ('method_not_allowed', 'not_allowed'), | ||
| 43 | 406: ('not_acceptable',), | ||
| 44 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), | ||
| 45 | 408: ('request_timeout', 'timeout'), | ||
| 46 | 409: ('conflict',), | ||
| 47 | 410: ('gone',), | ||
| 48 | 411: ('length_required',), | ||
| 49 | 412: ('precondition_failed', 'precondition'), | ||
| 50 | 413: ('request_entity_too_large',), | ||
| 51 | 414: ('request_uri_too_large',), | ||
| 52 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), | ||
| 53 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), | ||
| 54 | 417: ('expectation_failed',), | ||
| 55 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), | ||
| 56 | 421: ('misdirected_request',), | ||
| 57 | 422: ('unprocessable_entity', 'unprocessable'), | ||
| 58 | 423: ('locked',), | ||
| 59 | 424: ('failed_dependency', 'dependency'), | ||
| 60 | 425: ('unordered_collection', 'unordered'), | ||
| 61 | 426: ('upgrade_required', 'upgrade'), | ||
| 62 | 428: ('precondition_required', 'precondition'), | ||
| 63 | 429: ('too_many_requests', 'too_many'), | ||
| 64 | 431: ('header_fields_too_large', 'fields_too_large'), | ||
| 65 | 444: ('no_response', 'none'), | ||
| 66 | 449: ('retry_with', 'retry'), | ||
| 67 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'), | ||
| 68 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'), | ||
| 69 | 499: ('client_closed_request',), | ||
| 70 | |||
| 71 | # Server Error. | ||
| 72 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'), | ||
| 73 | 501: ('not_implemented',), | ||
| 74 | 502: ('bad_gateway',), | ||
| 75 | 503: ('service_unavailable', 'unavailable'), | ||
| 76 | 504: ('gateway_timeout',), | ||
| 77 | 505: ('http_version_not_supported', 'http_version'), | ||
| 78 | 506: ('variant_also_negotiates',), | ||
| 79 | 507: ('insufficient_storage',), | ||
| 80 | 509: ('bandwidth_limit_exceeded', 'bandwidth'), | ||
| 81 | 510: ('not_extended',), | ||
| 82 | 511: ('network_authentication_required', 'network_auth', 'network_authentication'), | ||
| 83 | } | ||
| 84 | |||
| 85 | codes = LookupDict(name='status_codes') | ||
| 86 | |||
| 87 | for code, titles in _codes.items(): | ||
| 88 | for title in titles: | ||
| 89 | setattr(codes, title, code) | ||
| 90 | if not title.startswith(('\\', '/')): | ||
| 91 | setattr(codes, title.upper(), code) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..ce775ba --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py | |||
| @@ -0,0 +1,105 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.structures | ||
| 5 | ~~~~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | Data structures that power Requests. | ||
| 8 | """ | ||
| 9 | |||
| 10 | import collections | ||
| 11 | |||
| 12 | from .compat import OrderedDict | ||
| 13 | |||
| 14 | |||
| 15 | class CaseInsensitiveDict(collections.MutableMapping): | ||
| 16 | """A case-insensitive ``dict``-like object. | ||
| 17 | |||
| 18 | Implements all methods and operations of | ||
| 19 | ``collections.MutableMapping`` as well as dict's ``copy``. Also | ||
| 20 | provides ``lower_items``. | ||
| 21 | |||
| 22 | All keys are expected to be strings. The structure remembers the | ||
| 23 | case of the last key to be set, and ``iter(instance)``, | ||
| 24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` | ||
| 25 | will contain case-sensitive keys. However, querying and contains | ||
| 26 | testing is case insensitive:: | ||
| 27 | |||
| 28 | cid = CaseInsensitiveDict() | ||
| 29 | cid['Accept'] = 'application/json' | ||
| 30 | cid['aCCEPT'] == 'application/json' # True | ||
| 31 | list(cid) == ['Accept'] # True | ||
| 32 | |||
| 33 | For example, ``headers['content-encoding']`` will return the | ||
| 34 | value of a ``'Content-Encoding'`` response header, regardless | ||
| 35 | of how the header name was originally stored. | ||
| 36 | |||
| 37 | If the constructor, ``.update``, or equality comparison | ||
| 38 | operations are given keys that have equal ``.lower()``s, the | ||
| 39 | behavior is undefined. | ||
| 40 | """ | ||
| 41 | |||
| 42 | def __init__(self, data=None, **kwargs): | ||
| 43 | self._store = OrderedDict() | ||
| 44 | if data is None: | ||
| 45 | data = {} | ||
| 46 | self.update(data, **kwargs) | ||
| 47 | |||
| 48 | def __setitem__(self, key, value): | ||
| 49 | # Use the lowercased key for lookups, but store the actual | ||
| 50 | # key alongside the value. | ||
| 51 | self._store[key.lower()] = (key, value) | ||
| 52 | |||
| 53 | def __getitem__(self, key): | ||
| 54 | return self._store[key.lower()][1] | ||
| 55 | |||
| 56 | def __delitem__(self, key): | ||
| 57 | del self._store[key.lower()] | ||
| 58 | |||
| 59 | def __iter__(self): | ||
| 60 | return (casedkey for casedkey, mappedvalue in self._store.values()) | ||
| 61 | |||
| 62 | def __len__(self): | ||
| 63 | return len(self._store) | ||
| 64 | |||
| 65 | def lower_items(self): | ||
| 66 | """Like iteritems(), but with all lowercase keys.""" | ||
| 67 | return ( | ||
| 68 | (lowerkey, keyval[1]) | ||
| 69 | for (lowerkey, keyval) | ||
| 70 | in self._store.items() | ||
| 71 | ) | ||
| 72 | |||
| 73 | def __eq__(self, other): | ||
| 74 | if isinstance(other, collections.Mapping): | ||
| 75 | other = CaseInsensitiveDict(other) | ||
| 76 | else: | ||
| 77 | return NotImplemented | ||
| 78 | # Compare insensitively | ||
| 79 | return dict(self.lower_items()) == dict(other.lower_items()) | ||
| 80 | |||
| 81 | # Copy is required | ||
| 82 | def copy(self): | ||
| 83 | return CaseInsensitiveDict(self._store.values()) | ||
| 84 | |||
| 85 | def __repr__(self): | ||
| 86 | return str(dict(self.items())) | ||
| 87 | |||
| 88 | |||
| 89 | class LookupDict(dict): | ||
| 90 | """Dictionary lookup object.""" | ||
| 91 | |||
| 92 | def __init__(self, name=None): | ||
| 93 | self.name = name | ||
| 94 | super(LookupDict, self).__init__() | ||
| 95 | |||
| 96 | def __repr__(self): | ||
| 97 | return '<lookup \'%s\'>' % (self.name) | ||
| 98 | |||
| 99 | def __getitem__(self, key): | ||
| 100 | # We allow fall-through here, so values default to None | ||
| 101 | |||
| 102 | return self.__dict__.get(key, None) | ||
| 103 | |||
| 104 | def get(self, key, default=None): | ||
| 105 | return self.__dict__.get(key, default) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..fc4f894 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py | |||
| @@ -0,0 +1,904 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | |||
| 3 | """ | ||
| 4 | requests.utils | ||
| 5 | ~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This module provides utility functions that are used within Requests | ||
| 8 | that are also useful for external consumption. | ||
| 9 | """ | ||
| 10 | |||
| 11 | import cgi | ||
| 12 | import codecs | ||
| 13 | import collections | ||
| 14 | import contextlib | ||
| 15 | import io | ||
| 16 | import os | ||
| 17 | import platform | ||
| 18 | import re | ||
| 19 | import socket | ||
| 20 | import struct | ||
| 21 | import warnings | ||
| 22 | |||
| 23 | from .__version__ import __version__ | ||
| 24 | from . import certs | ||
| 25 | # to_native_string is unused here, but imported here for backwards compatibility | ||
| 26 | from ._internal_utils import to_native_string | ||
| 27 | from .compat import parse_http_list as _parse_list_header | ||
| 28 | from .compat import ( | ||
| 29 | quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, | ||
| 30 | proxy_bypass, urlunparse, basestring, integer_types, is_py3, | ||
| 31 | proxy_bypass_environment, getproxies_environment) | ||
| 32 | from .cookies import cookiejar_from_dict | ||
| 33 | from .structures import CaseInsensitiveDict | ||
| 34 | from .exceptions import ( | ||
| 35 | InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) | ||
| 36 | |||
| 37 | NETRC_FILES = ('.netrc', '_netrc') | ||
| 38 | |||
| 39 | DEFAULT_CA_BUNDLE_PATH = certs.where() | ||
| 40 | |||
| 41 | |||
| 42 | if platform.system() == 'Windows': | ||
| 43 | # provide a proxy_bypass version on Windows without DNS lookups | ||
| 44 | |||
| 45 | def proxy_bypass_registry(host): | ||
| 46 | if is_py3: | ||
| 47 | import winreg | ||
| 48 | else: | ||
| 49 | import _winreg as winreg | ||
| 50 | try: | ||
| 51 | internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, | ||
| 52 | r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') | ||
| 53 | proxyEnable = winreg.QueryValueEx(internetSettings, | ||
| 54 | 'ProxyEnable')[0] | ||
| 55 | proxyOverride = winreg.QueryValueEx(internetSettings, | ||
| 56 | 'ProxyOverride')[0] | ||
| 57 | except OSError: | ||
| 58 | return False | ||
| 59 | if not proxyEnable or not proxyOverride: | ||
| 60 | return False | ||
| 61 | |||
| 62 | # make a check value list from the registry entry: replace the | ||
| 63 | # '<local>' string by the localhost entry and the corresponding | ||
| 64 | # canonical entry. | ||
| 65 | proxyOverride = proxyOverride.split(';') | ||
| 66 | # now check if we match one of the registry values. | ||
| 67 | for test in proxyOverride: | ||
| 68 | if test == '<local>': | ||
| 69 | if '.' not in host: | ||
| 70 | return True | ||
| 71 | test = test.replace(".", r"\.") # mask dots | ||
| 72 | test = test.replace("*", r".*") # change glob sequence | ||
| 73 | test = test.replace("?", r".") # change glob char | ||
| 74 | if re.match(test, host, re.I): | ||
| 75 | return True | ||
| 76 | return False | ||
| 77 | |||
| 78 | def proxy_bypass(host): # noqa | ||
| 79 | """Return True, if the host should be bypassed. | ||
| 80 | |||
| 81 | Checks proxy settings gathered from the environment, if specified, | ||
| 82 | or the registry. | ||
| 83 | """ | ||
| 84 | if getproxies_environment(): | ||
| 85 | return proxy_bypass_environment(host) | ||
| 86 | else: | ||
| 87 | return proxy_bypass_registry(host) | ||
| 88 | |||
| 89 | |||
| 90 | def dict_to_sequence(d): | ||
| 91 | """Returns an internal sequence dictionary update.""" | ||
| 92 | |||
| 93 | if hasattr(d, 'items'): | ||
| 94 | d = d.items() | ||
| 95 | |||
| 96 | return d | ||
| 97 | |||
| 98 | |||
| 99 | def super_len(o): | ||
| 100 | total_length = None | ||
| 101 | current_position = 0 | ||
| 102 | |||
| 103 | if hasattr(o, '__len__'): | ||
| 104 | total_length = len(o) | ||
| 105 | |||
| 106 | elif hasattr(o, 'len'): | ||
| 107 | total_length = o.len | ||
| 108 | |||
| 109 | elif hasattr(o, 'fileno'): | ||
| 110 | try: | ||
| 111 | fileno = o.fileno() | ||
| 112 | except io.UnsupportedOperation: | ||
| 113 | pass | ||
| 114 | else: | ||
| 115 | total_length = os.fstat(fileno).st_size | ||
| 116 | |||
| 117 | # Having used fstat to determine the file length, we need to | ||
| 118 | # confirm that this file was opened up in binary mode. | ||
| 119 | if 'b' not in o.mode: | ||
| 120 | warnings.warn(( | ||
| 121 | "Requests has determined the content-length for this " | ||
| 122 | "request using the binary size of the file: however, the " | ||
| 123 | "file has been opened in text mode (i.e. without the 'b' " | ||
| 124 | "flag in the mode). This may lead to an incorrect " | ||
| 125 | "content-length. In Requests 3.0, support will be removed " | ||
| 126 | "for files in text mode."), | ||
| 127 | FileModeWarning | ||
| 128 | ) | ||
| 129 | |||
| 130 | if hasattr(o, 'tell'): | ||
| 131 | try: | ||
| 132 | current_position = o.tell() | ||
| 133 | except (OSError, IOError): | ||
| 134 | # This can happen in some weird situations, such as when the file | ||
| 135 | # is actually a special file descriptor like stdin. In this | ||
| 136 | # instance, we don't know what the length is, so set it to zero and | ||
| 137 | # let requests chunk it instead. | ||
| 138 | if total_length is not None: | ||
| 139 | current_position = total_length | ||
| 140 | else: | ||
| 141 | if hasattr(o, 'seek') and total_length is None: | ||
| 142 | # StringIO and BytesIO have seek but no useable fileno | ||
| 143 | try: | ||
| 144 | # seek to end of file | ||
| 145 | o.seek(0, 2) | ||
| 146 | total_length = o.tell() | ||
| 147 | |||
| 148 | # seek back to current position to support | ||
| 149 | # partially read file-like objects | ||
| 150 | o.seek(current_position or 0) | ||
| 151 | except (OSError, IOError): | ||
| 152 | total_length = 0 | ||
| 153 | |||
| 154 | if total_length is None: | ||
| 155 | total_length = 0 | ||
| 156 | |||
| 157 | return max(0, total_length - current_position) | ||
| 158 | |||
| 159 | |||
| 160 | def get_netrc_auth(url, raise_errors=False): | ||
| 161 | """Returns the Requests tuple auth for a given url from netrc.""" | ||
| 162 | |||
| 163 | try: | ||
| 164 | from netrc import netrc, NetrcParseError | ||
| 165 | |||
| 166 | netrc_path = None | ||
| 167 | |||
| 168 | for f in NETRC_FILES: | ||
| 169 | try: | ||
| 170 | loc = os.path.expanduser('~/{0}'.format(f)) | ||
| 171 | except KeyError: | ||
| 172 | # os.path.expanduser can fail when $HOME is undefined and | ||
| 173 | # getpwuid fails. See http://bugs.python.org/issue20164 & | ||
| 174 | # https://github.com/requests/requests/issues/1846 | ||
| 175 | return | ||
| 176 | |||
| 177 | if os.path.exists(loc): | ||
| 178 | netrc_path = loc | ||
| 179 | break | ||
| 180 | |||
| 181 | # Abort early if there isn't one. | ||
| 182 | if netrc_path is None: | ||
| 183 | return | ||
| 184 | |||
| 185 | ri = urlparse(url) | ||
| 186 | |||
| 187 | # Strip port numbers from netloc. This weird `if...encode`` dance is | ||
| 188 | # used for Python 3.2, which doesn't support unicode literals. | ||
| 189 | splitstr = b':' | ||
| 190 | if isinstance(url, str): | ||
| 191 | splitstr = splitstr.decode('ascii') | ||
| 192 | host = ri.netloc.split(splitstr)[0] | ||
| 193 | |||
| 194 | try: | ||
| 195 | _netrc = netrc(netrc_path).authenticators(host) | ||
| 196 | if _netrc: | ||
| 197 | # Return with login / password | ||
| 198 | login_i = (0 if _netrc[0] else 1) | ||
| 199 | return (_netrc[login_i], _netrc[2]) | ||
| 200 | except (NetrcParseError, IOError): | ||
| 201 | # If there was a parsing error or a permissions issue reading the file, | ||
| 202 | # we'll just skip netrc auth unless explicitly asked to raise errors. | ||
| 203 | if raise_errors: | ||
| 204 | raise | ||
| 205 | |||
| 206 | # AppEngine hackiness. | ||
| 207 | except (ImportError, AttributeError): | ||
| 208 | pass | ||
| 209 | |||
| 210 | |||
| 211 | def guess_filename(obj): | ||
| 212 | """Tries to guess the filename of the given object.""" | ||
| 213 | name = getattr(obj, 'name', None) | ||
| 214 | if (name and isinstance(name, basestring) and name[0] != '<' and | ||
| 215 | name[-1] != '>'): | ||
| 216 | return os.path.basename(name) | ||
| 217 | |||
| 218 | |||
| 219 | def from_key_val_list(value): | ||
| 220 | """Take an object and test to see if it can be represented as a | ||
| 221 | dictionary. Unless it can not be represented as such, return an | ||
| 222 | OrderedDict, e.g., | ||
| 223 | |||
| 224 | :: | ||
| 225 | |||
| 226 | >>> from_key_val_list([('key', 'val')]) | ||
| 227 | OrderedDict([('key', 'val')]) | ||
| 228 | >>> from_key_val_list('string') | ||
| 229 | ValueError: need more than 1 value to unpack | ||
| 230 | >>> from_key_val_list({'key': 'val'}) | ||
| 231 | OrderedDict([('key', 'val')]) | ||
| 232 | |||
| 233 | :rtype: OrderedDict | ||
| 234 | """ | ||
| 235 | if value is None: | ||
| 236 | return None | ||
| 237 | |||
| 238 | if isinstance(value, (str, bytes, bool, int)): | ||
| 239 | raise ValueError('cannot encode objects that are not 2-tuples') | ||
| 240 | |||
| 241 | return OrderedDict(value) | ||
| 242 | |||
| 243 | |||
| 244 | def to_key_val_list(value): | ||
| 245 | """Take an object and test to see if it can be represented as a | ||
| 246 | dictionary. If it can be, return a list of tuples, e.g., | ||
| 247 | |||
| 248 | :: | ||
| 249 | |||
| 250 | >>> to_key_val_list([('key', 'val')]) | ||
| 251 | [('key', 'val')] | ||
| 252 | >>> to_key_val_list({'key': 'val'}) | ||
| 253 | [('key', 'val')] | ||
| 254 | >>> to_key_val_list('string') | ||
| 255 | ValueError: cannot encode objects that are not 2-tuples. | ||
| 256 | |||
| 257 | :rtype: list | ||
| 258 | """ | ||
| 259 | if value is None: | ||
| 260 | return None | ||
| 261 | |||
| 262 | if isinstance(value, (str, bytes, bool, int)): | ||
| 263 | raise ValueError('cannot encode objects that are not 2-tuples') | ||
| 264 | |||
| 265 | if isinstance(value, collections.Mapping): | ||
| 266 | value = value.items() | ||
| 267 | |||
| 268 | return list(value) | ||
| 269 | |||
| 270 | |||
| 271 | # From mitsuhiko/werkzeug (used with permission). | ||
| 272 | def parse_list_header(value): | ||
| 273 | """Parse lists as described by RFC 2068 Section 2. | ||
| 274 | |||
| 275 | In particular, parse comma-separated lists where the elements of | ||
| 276 | the list may include quoted-strings. A quoted-string could | ||
| 277 | contain a comma. A non-quoted string could have quotes in the | ||
| 278 | middle. Quotes are removed automatically after parsing. | ||
| 279 | |||
| 280 | It basically works like :func:`parse_set_header` just that items | ||
| 281 | may appear multiple times and case sensitivity is preserved. | ||
| 282 | |||
| 283 | The return value is a standard :class:`list`: | ||
| 284 | |||
| 285 | >>> parse_list_header('token, "quoted value"') | ||
| 286 | ['token', 'quoted value'] | ||
| 287 | |||
| 288 | To create a header from the :class:`list` again, use the | ||
| 289 | :func:`dump_header` function. | ||
| 290 | |||
| 291 | :param value: a string with a list header. | ||
| 292 | :return: :class:`list` | ||
| 293 | :rtype: list | ||
| 294 | """ | ||
| 295 | result = [] | ||
| 296 | for item in _parse_list_header(value): | ||
| 297 | if item[:1] == item[-1:] == '"': | ||
| 298 | item = unquote_header_value(item[1:-1]) | ||
| 299 | result.append(item) | ||
| 300 | return result | ||
| 301 | |||
| 302 | |||
| 303 | # From mitsuhiko/werkzeug (used with permission). | ||
| 304 | def parse_dict_header(value): | ||
| 305 | """Parse lists of key, value pairs as described by RFC 2068 Section 2 and | ||
| 306 | convert them into a python dict: | ||
| 307 | |||
| 308 | >>> d = parse_dict_header('foo="is a fish", bar="as well"') | ||
| 309 | >>> type(d) is dict | ||
| 310 | True | ||
| 311 | >>> sorted(d.items()) | ||
| 312 | [('bar', 'as well'), ('foo', 'is a fish')] | ||
| 313 | |||
| 314 | If there is no value for a key it will be `None`: | ||
| 315 | |||
| 316 | >>> parse_dict_header('key_without_value') | ||
| 317 | {'key_without_value': None} | ||
| 318 | |||
| 319 | To create a header from the :class:`dict` again, use the | ||
| 320 | :func:`dump_header` function. | ||
| 321 | |||
| 322 | :param value: a string with a dict header. | ||
| 323 | :return: :class:`dict` | ||
| 324 | :rtype: dict | ||
| 325 | """ | ||
| 326 | result = {} | ||
| 327 | for item in _parse_list_header(value): | ||
| 328 | if '=' not in item: | ||
| 329 | result[item] = None | ||
| 330 | continue | ||
| 331 | name, value = item.split('=', 1) | ||
| 332 | if value[:1] == value[-1:] == '"': | ||
| 333 | value = unquote_header_value(value[1:-1]) | ||
| 334 | result[name] = value | ||
| 335 | return result | ||
| 336 | |||
| 337 | |||
| 338 | # From mitsuhiko/werkzeug (used with permission). | ||
| 339 | def unquote_header_value(value, is_filename=False): | ||
| 340 | r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). | ||
| 341 | This does not use the real unquoting but what browsers are actually | ||
| 342 | using for quoting. | ||
| 343 | |||
| 344 | :param value: the header value to unquote. | ||
| 345 | :rtype: str | ||
| 346 | """ | ||
| 347 | if value and value[0] == value[-1] == '"': | ||
| 348 | # this is not the real unquoting, but fixing this so that the | ||
| 349 | # RFC is met will result in bugs with internet explorer and | ||
| 350 | # probably some other browsers as well. IE for example is | ||
| 351 | # uploading files with "C:\foo\bar.txt" as filename | ||
| 352 | value = value[1:-1] | ||
| 353 | |||
| 354 | # if this is a filename and the starting characters look like | ||
| 355 | # a UNC path, then just return the value without quotes. Using the | ||
| 356 | # replace sequence below on a UNC path has the effect of turning | ||
| 357 | # the leading double slash into a single slash and then | ||
| 358 | # _fix_ie_filename() doesn't work correctly. See #458. | ||
| 359 | if not is_filename or value[:2] != '\\\\': | ||
| 360 | return value.replace('\\\\', '\\').replace('\\"', '"') | ||
| 361 | return value | ||
| 362 | |||
| 363 | |||
| 364 | def dict_from_cookiejar(cj): | ||
| 365 | """Returns a key/value dictionary from a CookieJar. | ||
| 366 | |||
| 367 | :param cj: CookieJar object to extract cookies from. | ||
| 368 | :rtype: dict | ||
| 369 | """ | ||
| 370 | |||
| 371 | cookie_dict = {} | ||
| 372 | |||
| 373 | for cookie in cj: | ||
| 374 | cookie_dict[cookie.name] = cookie.value | ||
| 375 | |||
| 376 | return cookie_dict | ||
| 377 | |||
| 378 | |||
| 379 | def add_dict_to_cookiejar(cj, cookie_dict): | ||
| 380 | """Returns a CookieJar from a key/value dictionary. | ||
| 381 | |||
| 382 | :param cj: CookieJar to insert cookies into. | ||
| 383 | :param cookie_dict: Dict of key/values to insert into CookieJar. | ||
| 384 | :rtype: CookieJar | ||
| 385 | """ | ||
| 386 | |||
| 387 | return cookiejar_from_dict(cookie_dict, cj) | ||
| 388 | |||
| 389 | |||
| 390 | def get_encodings_from_content(content): | ||
| 391 | """Returns encodings from given content string. | ||
| 392 | |||
| 393 | :param content: bytestring to extract encodings from. | ||
| 394 | """ | ||
| 395 | warnings.warn(( | ||
| 396 | 'In requests 3.0, get_encodings_from_content will be removed. For ' | ||
| 397 | 'more information, please see the discussion on issue #2266. (This' | ||
| 398 | ' warning should only appear once.)'), | ||
| 399 | DeprecationWarning) | ||
| 400 | |||
| 401 | charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) | ||
| 402 | pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) | ||
| 403 | xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') | ||
| 404 | |||
| 405 | return (charset_re.findall(content) + | ||
| 406 | pragma_re.findall(content) + | ||
| 407 | xml_re.findall(content)) | ||
| 408 | |||
| 409 | |||
| 410 | def get_encoding_from_headers(headers): | ||
| 411 | """Returns encodings from given HTTP Header Dict. | ||
| 412 | |||
| 413 | :param headers: dictionary to extract encoding from. | ||
| 414 | :rtype: str | ||
| 415 | """ | ||
| 416 | |||
| 417 | content_type = headers.get('content-type') | ||
| 418 | |||
| 419 | if not content_type: | ||
| 420 | return None | ||
| 421 | |||
| 422 | content_type, params = cgi.parse_header(content_type) | ||
| 423 | |||
| 424 | if 'charset' in params: | ||
| 425 | return params['charset'].strip("'\"") | ||
| 426 | |||
| 427 | if 'text' in content_type: | ||
| 428 | return 'ISO-8859-1' | ||
| 429 | |||
| 430 | |||
| 431 | def stream_decode_response_unicode(iterator, r): | ||
| 432 | """Stream decodes a iterator.""" | ||
| 433 | |||
| 434 | if r.encoding is None: | ||
| 435 | for item in iterator: | ||
| 436 | yield item | ||
| 437 | return | ||
| 438 | |||
| 439 | decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') | ||
| 440 | for chunk in iterator: | ||
| 441 | rv = decoder.decode(chunk) | ||
| 442 | if rv: | ||
| 443 | yield rv | ||
| 444 | rv = decoder.decode(b'', final=True) | ||
| 445 | if rv: | ||
| 446 | yield rv | ||
| 447 | |||
| 448 | |||
| 449 | def iter_slices(string, slice_length): | ||
| 450 | """Iterate over slices of a string.""" | ||
| 451 | pos = 0 | ||
| 452 | if slice_length is None or slice_length <= 0: | ||
| 453 | slice_length = len(string) | ||
| 454 | while pos < len(string): | ||
| 455 | yield string[pos:pos + slice_length] | ||
| 456 | pos += slice_length | ||
| 457 | |||
| 458 | |||
| 459 | def get_unicode_from_response(r): | ||
| 460 | """Returns the requested content back in unicode. | ||
| 461 | |||
| 462 | :param r: Response object to get unicode content from. | ||
| 463 | |||
| 464 | Tried: | ||
| 465 | |||
| 466 | 1. charset from content-type | ||
| 467 | 2. fall back and replace all unicode characters | ||
| 468 | |||
| 469 | :rtype: str | ||
| 470 | """ | ||
| 471 | warnings.warn(( | ||
| 472 | 'In requests 3.0, get_unicode_from_response will be removed. For ' | ||
| 473 | 'more information, please see the discussion on issue #2266. (This' | ||
| 474 | ' warning should only appear once.)'), | ||
| 475 | DeprecationWarning) | ||
| 476 | |||
| 477 | tried_encodings = [] | ||
| 478 | |||
| 479 | # Try charset from content-type | ||
| 480 | encoding = get_encoding_from_headers(r.headers) | ||
| 481 | |||
| 482 | if encoding: | ||
| 483 | try: | ||
| 484 | return str(r.content, encoding) | ||
| 485 | except UnicodeError: | ||
| 486 | tried_encodings.append(encoding) | ||
| 487 | |||
| 488 | # Fall back: | ||
| 489 | try: | ||
| 490 | return str(r.content, encoding, errors='replace') | ||
| 491 | except TypeError: | ||
| 492 | return r.content | ||
| 493 | |||
| 494 | |||
| 495 | # The unreserved URI characters (RFC 3986) | ||
| 496 | UNRESERVED_SET = frozenset( | ||
| 497 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") | ||
| 498 | |||
| 499 | |||
| 500 | def unquote_unreserved(uri): | ||
| 501 | """Un-escape any percent-escape sequences in a URI that are unreserved | ||
| 502 | characters. This leaves all reserved, illegal and non-ASCII bytes encoded. | ||
| 503 | |||
| 504 | :rtype: str | ||
| 505 | """ | ||
| 506 | parts = uri.split('%') | ||
| 507 | for i in range(1, len(parts)): | ||
| 508 | h = parts[i][0:2] | ||
| 509 | if len(h) == 2 and h.isalnum(): | ||
| 510 | try: | ||
| 511 | c = chr(int(h, 16)) | ||
| 512 | except ValueError: | ||
| 513 | raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) | ||
| 514 | |||
| 515 | if c in UNRESERVED_SET: | ||
| 516 | parts[i] = c + parts[i][2:] | ||
| 517 | else: | ||
| 518 | parts[i] = '%' + parts[i] | ||
| 519 | else: | ||
| 520 | parts[i] = '%' + parts[i] | ||
| 521 | return ''.join(parts) | ||
| 522 | |||
| 523 | |||
| 524 | def requote_uri(uri): | ||
| 525 | """Re-quote the given URI. | ||
| 526 | |||
| 527 | This function passes the given URI through an unquote/quote cycle to | ||
| 528 | ensure that it is fully and consistently quoted. | ||
| 529 | |||
| 530 | :rtype: str | ||
| 531 | """ | ||
| 532 | safe_with_percent = "!#$%&'()*+,/:;=?@[]~" | ||
| 533 | safe_without_percent = "!#$&'()*+,/:;=?@[]~" | ||
| 534 | try: | ||
| 535 | # Unquote only the unreserved characters | ||
| 536 | # Then quote only illegal characters (do not quote reserved, | ||
| 537 | # unreserved, or '%') | ||
| 538 | return quote(unquote_unreserved(uri), safe=safe_with_percent) | ||
| 539 | except InvalidURL: | ||
| 540 | # We couldn't unquote the given URI, so let's try quoting it, but | ||
| 541 | # there may be unquoted '%'s in the URI. We need to make sure they're | ||
| 542 | # properly quoted so they do not cause issues elsewhere. | ||
| 543 | return quote(uri, safe=safe_without_percent) | ||
| 544 | |||
| 545 | |||
| 546 | def address_in_network(ip, net): | ||
| 547 | """This function allows you to check if an IP belongs to a network subnet | ||
| 548 | |||
| 549 | Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 | ||
| 550 | returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 | ||
| 551 | |||
| 552 | :rtype: bool | ||
| 553 | """ | ||
| 554 | ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] | ||
| 555 | netaddr, bits = net.split('/') | ||
| 556 | netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] | ||
| 557 | network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask | ||
| 558 | return (ipaddr & netmask) == (network & netmask) | ||
| 559 | |||
| 560 | |||
| 561 | def dotted_netmask(mask): | ||
| 562 | """Converts mask from /xx format to xxx.xxx.xxx.xxx | ||
| 563 | |||
| 564 | Example: if mask is 24 function returns 255.255.255.0 | ||
| 565 | |||
| 566 | :rtype: str | ||
| 567 | """ | ||
| 568 | bits = 0xffffffff ^ (1 << 32 - mask) - 1 | ||
| 569 | return socket.inet_ntoa(struct.pack('>I', bits)) | ||
| 570 | |||
| 571 | |||
| 572 | def is_ipv4_address(string_ip): | ||
| 573 | """ | ||
| 574 | :rtype: bool | ||
| 575 | """ | ||
| 576 | try: | ||
| 577 | socket.inet_aton(string_ip) | ||
| 578 | except socket.error: | ||
| 579 | return False | ||
| 580 | return True | ||
| 581 | |||
| 582 | |||
| 583 | def is_valid_cidr(string_network): | ||
| 584 | """ | ||
| 585 | Very simple check of the cidr format in no_proxy variable. | ||
| 586 | |||
| 587 | :rtype: bool | ||
| 588 | """ | ||
| 589 | if string_network.count('/') == 1: | ||
| 590 | try: | ||
| 591 | mask = int(string_network.split('/')[1]) | ||
| 592 | except ValueError: | ||
| 593 | return False | ||
| 594 | |||
| 595 | if mask < 1 or mask > 32: | ||
| 596 | return False | ||
| 597 | |||
| 598 | try: | ||
| 599 | socket.inet_aton(string_network.split('/')[0]) | ||
| 600 | except socket.error: | ||
| 601 | return False | ||
| 602 | else: | ||
| 603 | return False | ||
| 604 | return True | ||
| 605 | |||
| 606 | |||
| 607 | @contextlib.contextmanager | ||
| 608 | def set_environ(env_name, value): | ||
| 609 | """Set the environment variable 'env_name' to 'value' | ||
| 610 | |||
| 611 | Save previous value, yield, and then restore the previous value stored in | ||
| 612 | the environment variable 'env_name'. | ||
| 613 | |||
| 614 | If 'value' is None, do nothing""" | ||
| 615 | value_changed = value is not None | ||
| 616 | if value_changed: | ||
| 617 | old_value = os.environ.get(env_name) | ||
| 618 | os.environ[env_name] = value | ||
| 619 | try: | ||
| 620 | yield | ||
| 621 | finally: | ||
| 622 | if value_changed: | ||
| 623 | if old_value is None: | ||
| 624 | del os.environ[env_name] | ||
| 625 | else: | ||
| 626 | os.environ[env_name] = old_value | ||
| 627 | |||
| 628 | |||
| 629 | def should_bypass_proxies(url, no_proxy): | ||
| 630 | """ | ||
| 631 | Returns whether we should bypass proxies or not. | ||
| 632 | |||
| 633 | :rtype: bool | ||
| 634 | """ | ||
| 635 | get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) | ||
| 636 | |||
| 637 | # First check whether no_proxy is defined. If it is, check that the URL | ||
| 638 | # we're getting isn't in the no_proxy list. | ||
| 639 | no_proxy_arg = no_proxy | ||
| 640 | if no_proxy is None: | ||
| 641 | no_proxy = get_proxy('no_proxy') | ||
| 642 | netloc = urlparse(url).netloc | ||
| 643 | |||
| 644 | if no_proxy: | ||
| 645 | # We need to check whether we match here. We need to see if we match | ||
| 646 | # the end of the netloc, both with and without the port. | ||
| 647 | no_proxy = ( | ||
| 648 | host for host in no_proxy.replace(' ', '').split(',') if host | ||
| 649 | ) | ||
| 650 | |||
| 651 | ip = netloc.split(':')[0] | ||
| 652 | if is_ipv4_address(ip): | ||
| 653 | for proxy_ip in no_proxy: | ||
| 654 | if is_valid_cidr(proxy_ip): | ||
| 655 | if address_in_network(ip, proxy_ip): | ||
| 656 | return True | ||
| 657 | elif ip == proxy_ip: | ||
| 658 | # If no_proxy ip was defined in plain IP notation instead of cidr notation & | ||
| 659 | # matches the IP of the index | ||
| 660 | return True | ||
| 661 | else: | ||
| 662 | for host in no_proxy: | ||
| 663 | if netloc.endswith(host) or netloc.split(':')[0].endswith(host): | ||
| 664 | # The URL does match something in no_proxy, so we don't want | ||
| 665 | # to apply the proxies on this URL. | ||
| 666 | return True | ||
| 667 | |||
| 668 | # If the system proxy settings indicate that this URL should be bypassed, | ||
| 669 | # don't proxy. | ||
| 670 | # The proxy_bypass function is incredibly buggy on OS X in early versions | ||
| 671 | # of Python 2.6, so allow this call to fail. Only catch the specific | ||
| 672 | # exceptions we've seen, though: this call failing in other ways can reveal | ||
| 673 | # legitimate problems. | ||
| 674 | with set_environ('no_proxy', no_proxy_arg): | ||
| 675 | try: | ||
| 676 | bypass = proxy_bypass(netloc) | ||
| 677 | except (TypeError, socket.gaierror): | ||
| 678 | bypass = False | ||
| 679 | |||
| 680 | if bypass: | ||
| 681 | return True | ||
| 682 | |||
| 683 | return False | ||
| 684 | |||
| 685 | |||
| 686 | def get_environ_proxies(url, no_proxy=None): | ||
| 687 | """ | ||
| 688 | Return a dict of environment proxies. | ||
| 689 | |||
| 690 | :rtype: dict | ||
| 691 | """ | ||
| 692 | if should_bypass_proxies(url, no_proxy=no_proxy): | ||
| 693 | return {} | ||
| 694 | else: | ||
| 695 | return getproxies() | ||
| 696 | |||
| 697 | |||
| 698 | def select_proxy(url, proxies): | ||
| 699 | """Select a proxy for the url, if applicable. | ||
| 700 | |||
| 701 | :param url: The url being for the request | ||
| 702 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs | ||
| 703 | """ | ||
| 704 | proxies = proxies or {} | ||
| 705 | urlparts = urlparse(url) | ||
| 706 | if urlparts.hostname is None: | ||
| 707 | return proxies.get(urlparts.scheme, proxies.get('all')) | ||
| 708 | |||
| 709 | proxy_keys = [ | ||
| 710 | urlparts.scheme + '://' + urlparts.hostname, | ||
| 711 | urlparts.scheme, | ||
| 712 | 'all://' + urlparts.hostname, | ||
| 713 | 'all', | ||
| 714 | ] | ||
| 715 | proxy = None | ||
| 716 | for proxy_key in proxy_keys: | ||
| 717 | if proxy_key in proxies: | ||
| 718 | proxy = proxies[proxy_key] | ||
| 719 | break | ||
| 720 | |||
| 721 | return proxy | ||
| 722 | |||
| 723 | |||
| 724 | def default_user_agent(name="python-requests"): | ||
| 725 | """ | ||
| 726 | Return a string representing the default user agent. | ||
| 727 | |||
| 728 | :rtype: str | ||
| 729 | """ | ||
| 730 | return '%s/%s' % (name, __version__) | ||
| 731 | |||
| 732 | |||
| 733 | def default_headers(): | ||
| 734 | """ | ||
| 735 | :rtype: requests.structures.CaseInsensitiveDict | ||
| 736 | """ | ||
| 737 | return CaseInsensitiveDict({ | ||
| 738 | 'User-Agent': default_user_agent(), | ||
| 739 | 'Accept-Encoding': ', '.join(('gzip', 'deflate')), | ||
| 740 | 'Accept': '*/*', | ||
| 741 | 'Connection': 'keep-alive', | ||
| 742 | }) | ||
| 743 | |||
| 744 | |||
| 745 | def parse_header_links(value): | ||
| 746 | """Return a dict of parsed link headers proxies. | ||
| 747 | |||
| 748 | i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" | ||
| 749 | |||
| 750 | :rtype: list | ||
| 751 | """ | ||
| 752 | |||
| 753 | links = [] | ||
| 754 | |||
| 755 | replace_chars = ' \'"' | ||
| 756 | |||
| 757 | for val in re.split(', *<', value): | ||
| 758 | try: | ||
| 759 | url, params = val.split(';', 1) | ||
| 760 | except ValueError: | ||
| 761 | url, params = val, '' | ||
| 762 | |||
| 763 | link = {'url': url.strip('<> \'"')} | ||
| 764 | |||
| 765 | for param in params.split(';'): | ||
| 766 | try: | ||
| 767 | key, value = param.split('=') | ||
| 768 | except ValueError: | ||
| 769 | break | ||
| 770 | |||
| 771 | link[key.strip(replace_chars)] = value.strip(replace_chars) | ||
| 772 | |||
| 773 | links.append(link) | ||
| 774 | |||
| 775 | return links | ||
| 776 | |||
| 777 | |||
| 778 | # Null bytes; no need to recreate these on each call to guess_json_utf | ||
| 779 | _null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 | ||
| 780 | _null2 = _null * 2 | ||
| 781 | _null3 = _null * 3 | ||
| 782 | |||
| 783 | |||
| 784 | def guess_json_utf(data): | ||
| 785 | """ | ||
| 786 | :rtype: str | ||
| 787 | """ | ||
| 788 | # JSON always starts with two ASCII characters, so detection is as | ||
| 789 | # easy as counting the nulls and from their location and count | ||
| 790 | # determine the encoding. Also detect a BOM, if present. | ||
| 791 | sample = data[:4] | ||
| 792 | if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): | ||
| 793 | return 'utf-32' # BOM included | ||
| 794 | if sample[:3] == codecs.BOM_UTF8: | ||
| 795 | return 'utf-8-sig' # BOM included, MS style (discouraged) | ||
| 796 | if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): | ||
| 797 | return 'utf-16' # BOM included | ||
| 798 | nullcount = sample.count(_null) | ||
| 799 | if nullcount == 0: | ||
| 800 | return 'utf-8' | ||
| 801 | if nullcount == 2: | ||
| 802 | if sample[::2] == _null2: # 1st and 3rd are null | ||
| 803 | return 'utf-16-be' | ||
| 804 | if sample[1::2] == _null2: # 2nd and 4th are null | ||
| 805 | return 'utf-16-le' | ||
| 806 | # Did not detect 2 valid UTF-16 ascii-range characters | ||
| 807 | if nullcount == 3: | ||
| 808 | if sample[:3] == _null3: | ||
| 809 | return 'utf-32-be' | ||
| 810 | if sample[1:] == _null3: | ||
| 811 | return 'utf-32-le' | ||
| 812 | # Did not detect a valid UTF-32 ascii-range character | ||
| 813 | return None | ||
| 814 | |||
| 815 | |||
| 816 | def prepend_scheme_if_needed(url, new_scheme): | ||
| 817 | """Given a URL that may or may not have a scheme, prepend the given scheme. | ||
| 818 | Does not replace a present scheme with the one provided as an argument. | ||
| 819 | |||
| 820 | :rtype: str | ||
| 821 | """ | ||
| 822 | scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) | ||
| 823 | |||
| 824 | # urlparse is a finicky beast, and sometimes decides that there isn't a | ||
| 825 | # netloc present. Assume that it's being over-cautious, and switch netloc | ||
| 826 | # and path if urlparse decided there was no netloc. | ||
| 827 | if not netloc: | ||
| 828 | netloc, path = path, netloc | ||
| 829 | |||
| 830 | return urlunparse((scheme, netloc, path, params, query, fragment)) | ||
| 831 | |||
| 832 | |||
| 833 | def get_auth_from_url(url): | ||
| 834 | """Given a url with authentication components, extract them into a tuple of | ||
| 835 | username,password. | ||
| 836 | |||
| 837 | :rtype: (str,str) | ||
| 838 | """ | ||
| 839 | parsed = urlparse(url) | ||
| 840 | |||
| 841 | try: | ||
| 842 | auth = (unquote(parsed.username), unquote(parsed.password)) | ||
| 843 | except (AttributeError, TypeError): | ||
| 844 | auth = ('', '') | ||
| 845 | |||
| 846 | return auth | ||
| 847 | |||
| 848 | |||
| 849 | # Moved outside of function to avoid recompile every call | ||
| 850 | _CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') | ||
| 851 | _CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') | ||
| 852 | |||
| 853 | |||
| 854 | def check_header_validity(header): | ||
| 855 | """Verifies that header value is a string which doesn't contain | ||
| 856 | leading whitespace or return characters. This prevents unintended | ||
| 857 | header injection. | ||
| 858 | |||
| 859 | :param header: tuple, in the format (name, value). | ||
| 860 | """ | ||
| 861 | name, value = header | ||
| 862 | |||
| 863 | if isinstance(value, bytes): | ||
| 864 | pat = _CLEAN_HEADER_REGEX_BYTE | ||
| 865 | else: | ||
| 866 | pat = _CLEAN_HEADER_REGEX_STR | ||
| 867 | try: | ||
| 868 | if not pat.match(value): | ||
| 869 | raise InvalidHeader("Invalid return character or leading space in header: %s" % name) | ||
| 870 | except TypeError: | ||
| 871 | raise InvalidHeader("Value for header {%s: %s} must be of type str or " | ||
| 872 | "bytes, not %s" % (name, value, type(value))) | ||
| 873 | |||
| 874 | |||
| 875 | def urldefragauth(url): | ||
| 876 | """ | ||
| 877 | Given a url remove the fragment and the authentication part. | ||
| 878 | |||
| 879 | :rtype: str | ||
| 880 | """ | ||
| 881 | scheme, netloc, path, params, query, fragment = urlparse(url) | ||
| 882 | |||
| 883 | # see func:`prepend_scheme_if_needed` | ||
| 884 | if not netloc: | ||
| 885 | netloc, path = path, netloc | ||
| 886 | |||
| 887 | netloc = netloc.rsplit('@', 1)[-1] | ||
| 888 | |||
| 889 | return urlunparse((scheme, netloc, path, params, query, '')) | ||
| 890 | |||
| 891 | |||
| 892 | def rewind_body(prepared_request): | ||
| 893 | """Move file pointer back to its recorded starting position | ||
| 894 | so it can be read again on redirect. | ||
| 895 | """ | ||
| 896 | body_seek = getattr(prepared_request.body, 'seek', None) | ||
| 897 | if body_seek is not None and isinstance(prepared_request._body_position, integer_types): | ||
| 898 | try: | ||
| 899 | body_seek(prepared_request._body_position) | ||
| 900 | except (IOError, OSError): | ||
| 901 | raise UnrewindableBodyError("An error occurred when rewinding request " | ||
| 902 | "body for redirect.") | ||
| 903 | else: | ||
| 904 | raise UnrewindableBodyError("Unable to rewind request body for redirect.") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/retrying.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/retrying.py new file mode 100644 index 0000000..f8d743b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/retrying.py | |||
| @@ -0,0 +1,267 @@ | |||
| 1 | ## Copyright 2013-2014 Ray Holder | ||
| 2 | ## | ||
| 3 | ## Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 4 | ## you may not use this file except in compliance with the License. | ||
| 5 | ## You may obtain a copy of the License at | ||
| 6 | ## | ||
| 7 | ## http://www.apache.org/licenses/LICENSE-2.0 | ||
| 8 | ## | ||
| 9 | ## Unless required by applicable law or agreed to in writing, software | ||
| 10 | ## distributed under the License is distributed on an "AS IS" BASIS, | ||
| 11 | ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 12 | ## See the License for the specific language governing permissions and | ||
| 13 | ## limitations under the License. | ||
| 14 | |||
| 15 | import random | ||
| 16 | from pip._vendor import six | ||
| 17 | import sys | ||
| 18 | import time | ||
| 19 | import traceback | ||
| 20 | |||
| 21 | |||
| 22 | # sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... | ||
| 23 | MAX_WAIT = 1073741823 | ||
| 24 | |||
| 25 | |||
| 26 | def retry(*dargs, **dkw): | ||
| 27 | """ | ||
| 28 | Decorator function that instantiates the Retrying object | ||
| 29 | @param *dargs: positional arguments passed to Retrying object | ||
| 30 | @param **dkw: keyword arguments passed to the Retrying object | ||
| 31 | """ | ||
| 32 | # support both @retry and @retry() as valid syntax | ||
| 33 | if len(dargs) == 1 and callable(dargs[0]): | ||
| 34 | def wrap_simple(f): | ||
| 35 | |||
| 36 | @six.wraps(f) | ||
| 37 | def wrapped_f(*args, **kw): | ||
| 38 | return Retrying().call(f, *args, **kw) | ||
| 39 | |||
| 40 | return wrapped_f | ||
| 41 | |||
| 42 | return wrap_simple(dargs[0]) | ||
| 43 | |||
| 44 | else: | ||
| 45 | def wrap(f): | ||
| 46 | |||
| 47 | @six.wraps(f) | ||
| 48 | def wrapped_f(*args, **kw): | ||
| 49 | return Retrying(*dargs, **dkw).call(f, *args, **kw) | ||
| 50 | |||
| 51 | return wrapped_f | ||
| 52 | |||
| 53 | return wrap | ||
| 54 | |||
| 55 | |||
| 56 | class Retrying(object): | ||
| 57 | |||
| 58 | def __init__(self, | ||
| 59 | stop=None, wait=None, | ||
| 60 | stop_max_attempt_number=None, | ||
| 61 | stop_max_delay=None, | ||
| 62 | wait_fixed=None, | ||
| 63 | wait_random_min=None, wait_random_max=None, | ||
| 64 | wait_incrementing_start=None, wait_incrementing_increment=None, | ||
| 65 | wait_exponential_multiplier=None, wait_exponential_max=None, | ||
| 66 | retry_on_exception=None, | ||
| 67 | retry_on_result=None, | ||
| 68 | wrap_exception=False, | ||
| 69 | stop_func=None, | ||
| 70 | wait_func=None, | ||
| 71 | wait_jitter_max=None): | ||
| 72 | |||
| 73 | self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number | ||
| 74 | self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay | ||
| 75 | self._wait_fixed = 1000 if wait_fixed is None else wait_fixed | ||
| 76 | self._wait_random_min = 0 if wait_random_min is None else wait_random_min | ||
| 77 | self._wait_random_max = 1000 if wait_random_max is None else wait_random_max | ||
| 78 | self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start | ||
| 79 | self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment | ||
| 80 | self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier | ||
| 81 | self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max | ||
| 82 | self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max | ||
| 83 | |||
| 84 | # TODO add chaining of stop behaviors | ||
| 85 | # stop behavior | ||
| 86 | stop_funcs = [] | ||
| 87 | if stop_max_attempt_number is not None: | ||
| 88 | stop_funcs.append(self.stop_after_attempt) | ||
| 89 | |||
| 90 | if stop_max_delay is not None: | ||
| 91 | stop_funcs.append(self.stop_after_delay) | ||
| 92 | |||
| 93 | if stop_func is not None: | ||
| 94 | self.stop = stop_func | ||
| 95 | |||
| 96 | elif stop is None: | ||
| 97 | self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs) | ||
| 98 | |||
| 99 | else: | ||
| 100 | self.stop = getattr(self, stop) | ||
| 101 | |||
| 102 | # TODO add chaining of wait behaviors | ||
| 103 | # wait behavior | ||
| 104 | wait_funcs = [lambda *args, **kwargs: 0] | ||
| 105 | if wait_fixed is not None: | ||
| 106 | wait_funcs.append(self.fixed_sleep) | ||
| 107 | |||
| 108 | if wait_random_min is not None or wait_random_max is not None: | ||
| 109 | wait_funcs.append(self.random_sleep) | ||
| 110 | |||
| 111 | if wait_incrementing_start is not None or wait_incrementing_increment is not None: | ||
| 112 | wait_funcs.append(self.incrementing_sleep) | ||
| 113 | |||
| 114 | if wait_exponential_multiplier is not None or wait_exponential_max is not None: | ||
| 115 | wait_funcs.append(self.exponential_sleep) | ||
| 116 | |||
| 117 | if wait_func is not None: | ||
| 118 | self.wait = wait_func | ||
| 119 | |||
| 120 | elif wait is None: | ||
| 121 | self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs) | ||
| 122 | |||
| 123 | else: | ||
| 124 | self.wait = getattr(self, wait) | ||
| 125 | |||
| 126 | # retry on exception filter | ||
| 127 | if retry_on_exception is None: | ||
| 128 | self._retry_on_exception = self.always_reject | ||
| 129 | else: | ||
| 130 | self._retry_on_exception = retry_on_exception | ||
| 131 | |||
| 132 | # TODO simplify retrying by Exception types | ||
| 133 | # retry on result filter | ||
| 134 | if retry_on_result is None: | ||
| 135 | self._retry_on_result = self.never_reject | ||
| 136 | else: | ||
| 137 | self._retry_on_result = retry_on_result | ||
| 138 | |||
| 139 | self._wrap_exception = wrap_exception | ||
| 140 | |||
| 141 | def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 142 | """Stop after the previous attempt >= stop_max_attempt_number.""" | ||
| 143 | return previous_attempt_number >= self._stop_max_attempt_number | ||
| 144 | |||
| 145 | def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 146 | """Stop after the time from the first attempt >= stop_max_delay.""" | ||
| 147 | return delay_since_first_attempt_ms >= self._stop_max_delay | ||
| 148 | |||
| 149 | def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 150 | """Don't sleep at all before retrying.""" | ||
| 151 | return 0 | ||
| 152 | |||
| 153 | def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 154 | """Sleep a fixed amount of time between each retry.""" | ||
| 155 | return self._wait_fixed | ||
| 156 | |||
| 157 | def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 158 | """Sleep a random amount of time between wait_random_min and wait_random_max""" | ||
| 159 | return random.randint(self._wait_random_min, self._wait_random_max) | ||
| 160 | |||
| 161 | def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 162 | """ | ||
| 163 | Sleep an incremental amount of time after each attempt, starting at | ||
| 164 | wait_incrementing_start and incrementing by wait_incrementing_increment | ||
| 165 | """ | ||
| 166 | result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) | ||
| 167 | if result < 0: | ||
| 168 | result = 0 | ||
| 169 | return result | ||
| 170 | |||
| 171 | def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): | ||
| 172 | exp = 2 ** previous_attempt_number | ||
| 173 | result = self._wait_exponential_multiplier * exp | ||
| 174 | if result > self._wait_exponential_max: | ||
| 175 | result = self._wait_exponential_max | ||
| 176 | if result < 0: | ||
| 177 | result = 0 | ||
| 178 | return result | ||
| 179 | |||
| 180 | def never_reject(self, result): | ||
| 181 | return False | ||
| 182 | |||
| 183 | def always_reject(self, result): | ||
| 184 | return True | ||
| 185 | |||
| 186 | def should_reject(self, attempt): | ||
| 187 | reject = False | ||
| 188 | if attempt.has_exception: | ||
| 189 | reject |= self._retry_on_exception(attempt.value[1]) | ||
| 190 | else: | ||
| 191 | reject |= self._retry_on_result(attempt.value) | ||
| 192 | |||
| 193 | return reject | ||
| 194 | |||
| 195 | def call(self, fn, *args, **kwargs): | ||
| 196 | start_time = int(round(time.time() * 1000)) | ||
| 197 | attempt_number = 1 | ||
| 198 | while True: | ||
| 199 | try: | ||
| 200 | attempt = Attempt(fn(*args, **kwargs), attempt_number, False) | ||
| 201 | except: | ||
| 202 | tb = sys.exc_info() | ||
| 203 | attempt = Attempt(tb, attempt_number, True) | ||
| 204 | |||
| 205 | if not self.should_reject(attempt): | ||
| 206 | return attempt.get(self._wrap_exception) | ||
| 207 | |||
| 208 | delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time | ||
| 209 | if self.stop(attempt_number, delay_since_first_attempt_ms): | ||
| 210 | if not self._wrap_exception and attempt.has_exception: | ||
| 211 | # get() on an attempt with an exception should cause it to be raised, but raise just in case | ||
| 212 | raise attempt.get() | ||
| 213 | else: | ||
| 214 | raise RetryError(attempt) | ||
| 215 | else: | ||
| 216 | sleep = self.wait(attempt_number, delay_since_first_attempt_ms) | ||
| 217 | if self._wait_jitter_max: | ||
| 218 | jitter = random.random() * self._wait_jitter_max | ||
| 219 | sleep = sleep + max(0, jitter) | ||
| 220 | time.sleep(sleep / 1000.0) | ||
| 221 | |||
| 222 | attempt_number += 1 | ||
| 223 | |||
| 224 | |||
| 225 | class Attempt(object): | ||
| 226 | """ | ||
| 227 | An Attempt encapsulates a call to a target function that may end as a | ||
| 228 | normal return value from the function or an Exception depending on what | ||
| 229 | occurred during the execution. | ||
| 230 | """ | ||
| 231 | |||
| 232 | def __init__(self, value, attempt_number, has_exception): | ||
| 233 | self.value = value | ||
| 234 | self.attempt_number = attempt_number | ||
| 235 | self.has_exception = has_exception | ||
| 236 | |||
| 237 | def get(self, wrap_exception=False): | ||
| 238 | """ | ||
| 239 | Return the return value of this Attempt instance or raise an Exception. | ||
| 240 | If wrap_exception is true, this Attempt is wrapped inside of a | ||
| 241 | RetryError before being raised. | ||
| 242 | """ | ||
| 243 | if self.has_exception: | ||
| 244 | if wrap_exception: | ||
| 245 | raise RetryError(self) | ||
| 246 | else: | ||
| 247 | six.reraise(self.value[0], self.value[1], self.value[2]) | ||
| 248 | else: | ||
| 249 | return self.value | ||
| 250 | |||
| 251 | def __repr__(self): | ||
| 252 | if self.has_exception: | ||
| 253 | return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2]))) | ||
| 254 | else: | ||
| 255 | return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value) | ||
| 256 | |||
| 257 | |||
| 258 | class RetryError(Exception): | ||
| 259 | """ | ||
| 260 | A RetryError encapsulates the last Attempt instance right before giving up. | ||
| 261 | """ | ||
| 262 | |||
| 263 | def __init__(self, last_attempt): | ||
| 264 | self.last_attempt = last_attempt | ||
| 265 | |||
| 266 | def __str__(self): | ||
| 267 | return "RetryError[{0}]".format(self.last_attempt) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/six.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/six.py new file mode 100644 index 0000000..e36380b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/six.py | |||
| @@ -0,0 +1,891 @@ | |||
| 1 | # Copyright (c) 2010-2017 Benjamin Peterson | ||
| 2 | # | ||
| 3 | # Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 4 | # of this software and associated documentation files (the "Software"), to deal | ||
| 5 | # in the Software without restriction, including without limitation the rights | ||
| 6 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 7 | # copies of the Software, and to permit persons to whom the Software is | ||
| 8 | # furnished to do so, subject to the following conditions: | ||
| 9 | # | ||
| 10 | # The above copyright notice and this permission notice shall be included in all | ||
| 11 | # copies or substantial portions of the Software. | ||
| 12 | # | ||
| 13 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 14 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 15 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
| 16 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
| 17 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
| 18 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
| 19 | # SOFTWARE. | ||
| 20 | |||
| 21 | """Utilities for writing code that runs on Python 2 and 3""" | ||
| 22 | |||
| 23 | from __future__ import absolute_import | ||
| 24 | |||
| 25 | import functools | ||
| 26 | import itertools | ||
| 27 | import operator | ||
| 28 | import sys | ||
| 29 | import types | ||
| 30 | |||
| 31 | __author__ = "Benjamin Peterson <benjamin@python.org>" | ||
| 32 | __version__ = "1.11.0" | ||
| 33 | |||
| 34 | |||
| 35 | # Useful for very coarse version differentiation. | ||
| 36 | PY2 = sys.version_info[0] == 2 | ||
| 37 | PY3 = sys.version_info[0] == 3 | ||
| 38 | PY34 = sys.version_info[0:2] >= (3, 4) | ||
| 39 | |||
| 40 | if PY3: | ||
| 41 | string_types = str, | ||
| 42 | integer_types = int, | ||
| 43 | class_types = type, | ||
| 44 | text_type = str | ||
| 45 | binary_type = bytes | ||
| 46 | |||
| 47 | MAXSIZE = sys.maxsize | ||
| 48 | else: | ||
| 49 | string_types = basestring, | ||
| 50 | integer_types = (int, long) | ||
| 51 | class_types = (type, types.ClassType) | ||
| 52 | text_type = unicode | ||
| 53 | binary_type = str | ||
| 54 | |||
| 55 | if sys.platform.startswith("java"): | ||
| 56 | # Jython always uses 32 bits. | ||
| 57 | MAXSIZE = int((1 << 31) - 1) | ||
| 58 | else: | ||
| 59 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t). | ||
| 60 | class X(object): | ||
| 61 | |||
| 62 | def __len__(self): | ||
| 63 | return 1 << 31 | ||
| 64 | try: | ||
| 65 | len(X()) | ||
| 66 | except OverflowError: | ||
| 67 | # 32-bit | ||
| 68 | MAXSIZE = int((1 << 31) - 1) | ||
| 69 | else: | ||
| 70 | # 64-bit | ||
| 71 | MAXSIZE = int((1 << 63) - 1) | ||
| 72 | del X | ||
| 73 | |||
| 74 | |||
| 75 | def _add_doc(func, doc): | ||
| 76 | """Add documentation to a function.""" | ||
| 77 | func.__doc__ = doc | ||
| 78 | |||
| 79 | |||
| 80 | def _import_module(name): | ||
| 81 | """Import module, returning the module after the last dot.""" | ||
| 82 | __import__(name) | ||
| 83 | return sys.modules[name] | ||
| 84 | |||
| 85 | |||
| 86 | class _LazyDescr(object): | ||
| 87 | |||
| 88 | def __init__(self, name): | ||
| 89 | self.name = name | ||
| 90 | |||
| 91 | def __get__(self, obj, tp): | ||
| 92 | result = self._resolve() | ||
| 93 | setattr(obj, self.name, result) # Invokes __set__. | ||
| 94 | try: | ||
| 95 | # This is a bit ugly, but it avoids running this again by | ||
| 96 | # removing this descriptor. | ||
| 97 | delattr(obj.__class__, self.name) | ||
| 98 | except AttributeError: | ||
| 99 | pass | ||
| 100 | return result | ||
| 101 | |||
| 102 | |||
| 103 | class MovedModule(_LazyDescr): | ||
| 104 | |||
| 105 | def __init__(self, name, old, new=None): | ||
| 106 | super(MovedModule, self).__init__(name) | ||
| 107 | if PY3: | ||
| 108 | if new is None: | ||
| 109 | new = name | ||
| 110 | self.mod = new | ||
| 111 | else: | ||
| 112 | self.mod = old | ||
| 113 | |||
| 114 | def _resolve(self): | ||
| 115 | return _import_module(self.mod) | ||
| 116 | |||
| 117 | def __getattr__(self, attr): | ||
| 118 | _module = self._resolve() | ||
| 119 | value = getattr(_module, attr) | ||
| 120 | setattr(self, attr, value) | ||
| 121 | return value | ||
| 122 | |||
| 123 | |||
| 124 | class _LazyModule(types.ModuleType): | ||
| 125 | |||
| 126 | def __init__(self, name): | ||
| 127 | super(_LazyModule, self).__init__(name) | ||
| 128 | self.__doc__ = self.__class__.__doc__ | ||
| 129 | |||
| 130 | def __dir__(self): | ||
| 131 | attrs = ["__doc__", "__name__"] | ||
| 132 | attrs += [attr.name for attr in self._moved_attributes] | ||
| 133 | return attrs | ||
| 134 | |||
| 135 | # Subclasses should override this | ||
| 136 | _moved_attributes = [] | ||
| 137 | |||
| 138 | |||
| 139 | class MovedAttribute(_LazyDescr): | ||
| 140 | |||
| 141 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): | ||
| 142 | super(MovedAttribute, self).__init__(name) | ||
| 143 | if PY3: | ||
| 144 | if new_mod is None: | ||
| 145 | new_mod = name | ||
| 146 | self.mod = new_mod | ||
| 147 | if new_attr is None: | ||
| 148 | if old_attr is None: | ||
| 149 | new_attr = name | ||
| 150 | else: | ||
| 151 | new_attr = old_attr | ||
| 152 | self.attr = new_attr | ||
| 153 | else: | ||
| 154 | self.mod = old_mod | ||
| 155 | if old_attr is None: | ||
| 156 | old_attr = name | ||
| 157 | self.attr = old_attr | ||
| 158 | |||
| 159 | def _resolve(self): | ||
| 160 | module = _import_module(self.mod) | ||
| 161 | return getattr(module, self.attr) | ||
| 162 | |||
| 163 | |||
| 164 | class _SixMetaPathImporter(object): | ||
| 165 | |||
| 166 | """ | ||
| 167 | A meta path importer to import six.moves and its submodules. | ||
| 168 | |||
| 169 | This class implements a PEP302 finder and loader. It should be compatible | ||
| 170 | with Python 2.5 and all existing versions of Python3 | ||
| 171 | """ | ||
| 172 | |||
| 173 | def __init__(self, six_module_name): | ||
| 174 | self.name = six_module_name | ||
| 175 | self.known_modules = {} | ||
| 176 | |||
| 177 | def _add_module(self, mod, *fullnames): | ||
| 178 | for fullname in fullnames: | ||
| 179 | self.known_modules[self.name + "." + fullname] = mod | ||
| 180 | |||
| 181 | def _get_module(self, fullname): | ||
| 182 | return self.known_modules[self.name + "." + fullname] | ||
| 183 | |||
| 184 | def find_module(self, fullname, path=None): | ||
| 185 | if fullname in self.known_modules: | ||
| 186 | return self | ||
| 187 | return None | ||
| 188 | |||
| 189 | def __get_module(self, fullname): | ||
| 190 | try: | ||
| 191 | return self.known_modules[fullname] | ||
| 192 | except KeyError: | ||
| 193 | raise ImportError("This loader does not know module " + fullname) | ||
| 194 | |||
| 195 | def load_module(self, fullname): | ||
| 196 | try: | ||
| 197 | # in case of a reload | ||
| 198 | return sys.modules[fullname] | ||
| 199 | except KeyError: | ||
| 200 | pass | ||
| 201 | mod = self.__get_module(fullname) | ||
| 202 | if isinstance(mod, MovedModule): | ||
| 203 | mod = mod._resolve() | ||
| 204 | else: | ||
| 205 | mod.__loader__ = self | ||
| 206 | sys.modules[fullname] = mod | ||
| 207 | return mod | ||
| 208 | |||
| 209 | def is_package(self, fullname): | ||
| 210 | """ | ||
| 211 | Return true, if the named module is a package. | ||
| 212 | |||
| 213 | We need this method to get correct spec objects with | ||
| 214 | Python 3.4 (see PEP451) | ||
| 215 | """ | ||
| 216 | return hasattr(self.__get_module(fullname), "__path__") | ||
| 217 | |||
| 218 | def get_code(self, fullname): | ||
| 219 | """Return None | ||
| 220 | |||
| 221 | Required, if is_package is implemented""" | ||
| 222 | self.__get_module(fullname) # eventually raises ImportError | ||
| 223 | return None | ||
| 224 | get_source = get_code # same as get_code | ||
| 225 | |||
| 226 | _importer = _SixMetaPathImporter(__name__) | ||
| 227 | |||
| 228 | |||
| 229 | class _MovedItems(_LazyModule): | ||
| 230 | |||
| 231 | """Lazy loading of moved objects""" | ||
| 232 | __path__ = [] # mark as package | ||
| 233 | |||
| 234 | |||
| 235 | _moved_attributes = [ | ||
| 236 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), | ||
| 237 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), | ||
| 238 | MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), | ||
| 239 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), | ||
| 240 | MovedAttribute("intern", "__builtin__", "sys"), | ||
| 241 | MovedAttribute("map", "itertools", "builtins", "imap", "map"), | ||
| 242 | MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), | ||
| 243 | MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), | ||
| 244 | MovedAttribute("getoutput", "commands", "subprocess"), | ||
| 245 | MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), | ||
| 246 | MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), | ||
| 247 | MovedAttribute("reduce", "__builtin__", "functools"), | ||
| 248 | MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), | ||
| 249 | MovedAttribute("StringIO", "StringIO", "io"), | ||
| 250 | MovedAttribute("UserDict", "UserDict", "collections"), | ||
| 251 | MovedAttribute("UserList", "UserList", "collections"), | ||
| 252 | MovedAttribute("UserString", "UserString", "collections"), | ||
| 253 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), | ||
| 254 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), | ||
| 255 | MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), | ||
| 256 | MovedModule("builtins", "__builtin__"), | ||
| 257 | MovedModule("configparser", "ConfigParser"), | ||
| 258 | MovedModule("copyreg", "copy_reg"), | ||
| 259 | MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), | ||
| 260 | MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), | ||
| 261 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), | ||
| 262 | MovedModule("http_cookies", "Cookie", "http.cookies"), | ||
| 263 | MovedModule("html_entities", "htmlentitydefs", "html.entities"), | ||
| 264 | MovedModule("html_parser", "HTMLParser", "html.parser"), | ||
| 265 | MovedModule("http_client", "httplib", "http.client"), | ||
| 266 | MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), | ||
| 267 | MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), | ||
| 268 | MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), | ||
| 269 | MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), | ||
| 270 | MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), | ||
| 271 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), | ||
| 272 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), | ||
| 273 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), | ||
| 274 | MovedModule("cPickle", "cPickle", "pickle"), | ||
| 275 | MovedModule("queue", "Queue"), | ||
| 276 | MovedModule("reprlib", "repr"), | ||
| 277 | MovedModule("socketserver", "SocketServer"), | ||
| 278 | MovedModule("_thread", "thread", "_thread"), | ||
| 279 | MovedModule("tkinter", "Tkinter"), | ||
| 280 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), | ||
| 281 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), | ||
| 282 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), | ||
| 283 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), | ||
| 284 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"), | ||
| 285 | MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), | ||
| 286 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), | ||
| 287 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), | ||
| 288 | MovedModule("tkinter_colorchooser", "tkColorChooser", | ||
| 289 | "tkinter.colorchooser"), | ||
| 290 | MovedModule("tkinter_commondialog", "tkCommonDialog", | ||
| 291 | "tkinter.commondialog"), | ||
| 292 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), | ||
| 293 | MovedModule("tkinter_font", "tkFont", "tkinter.font"), | ||
| 294 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), | ||
| 295 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", | ||
| 296 | "tkinter.simpledialog"), | ||
| 297 | MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), | ||
| 298 | MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), | ||
| 299 | MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), | ||
| 300 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), | ||
| 301 | MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), | ||
| 302 | MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), | ||
| 303 | ] | ||
| 304 | # Add windows specific modules. | ||
| 305 | if sys.platform == "win32": | ||
| 306 | _moved_attributes += [ | ||
| 307 | MovedModule("winreg", "_winreg"), | ||
| 308 | ] | ||
| 309 | |||
| 310 | for attr in _moved_attributes: | ||
| 311 | setattr(_MovedItems, attr.name, attr) | ||
| 312 | if isinstance(attr, MovedModule): | ||
| 313 | _importer._add_module(attr, "moves." + attr.name) | ||
| 314 | del attr | ||
| 315 | |||
| 316 | _MovedItems._moved_attributes = _moved_attributes | ||
| 317 | |||
| 318 | moves = _MovedItems(__name__ + ".moves") | ||
| 319 | _importer._add_module(moves, "moves") | ||
| 320 | |||
| 321 | |||
| 322 | class Module_six_moves_urllib_parse(_LazyModule): | ||
| 323 | |||
| 324 | """Lazy loading of moved objects in six.moves.urllib_parse""" | ||
| 325 | |||
| 326 | |||
| 327 | _urllib_parse_moved_attributes = [ | ||
| 328 | MovedAttribute("ParseResult", "urlparse", "urllib.parse"), | ||
| 329 | MovedAttribute("SplitResult", "urlparse", "urllib.parse"), | ||
| 330 | MovedAttribute("parse_qs", "urlparse", "urllib.parse"), | ||
| 331 | MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), | ||
| 332 | MovedAttribute("urldefrag", "urlparse", "urllib.parse"), | ||
| 333 | MovedAttribute("urljoin", "urlparse", "urllib.parse"), | ||
| 334 | MovedAttribute("urlparse", "urlparse", "urllib.parse"), | ||
| 335 | MovedAttribute("urlsplit", "urlparse", "urllib.parse"), | ||
| 336 | MovedAttribute("urlunparse", "urlparse", "urllib.parse"), | ||
| 337 | MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), | ||
| 338 | MovedAttribute("quote", "urllib", "urllib.parse"), | ||
| 339 | MovedAttribute("quote_plus", "urllib", "urllib.parse"), | ||
| 340 | MovedAttribute("unquote", "urllib", "urllib.parse"), | ||
| 341 | MovedAttribute("unquote_plus", "urllib", "urllib.parse"), | ||
| 342 | MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), | ||
| 343 | MovedAttribute("urlencode", "urllib", "urllib.parse"), | ||
| 344 | MovedAttribute("splitquery", "urllib", "urllib.parse"), | ||
| 345 | MovedAttribute("splittag", "urllib", "urllib.parse"), | ||
| 346 | MovedAttribute("splituser", "urllib", "urllib.parse"), | ||
| 347 | MovedAttribute("splitvalue", "urllib", "urllib.parse"), | ||
| 348 | MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), | ||
| 349 | MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), | ||
| 350 | MovedAttribute("uses_params", "urlparse", "urllib.parse"), | ||
| 351 | MovedAttribute("uses_query", "urlparse", "urllib.parse"), | ||
| 352 | MovedAttribute("uses_relative", "urlparse", "urllib.parse"), | ||
| 353 | ] | ||
| 354 | for attr in _urllib_parse_moved_attributes: | ||
| 355 | setattr(Module_six_moves_urllib_parse, attr.name, attr) | ||
| 356 | del attr | ||
| 357 | |||
| 358 | Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes | ||
| 359 | |||
| 360 | _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), | ||
| 361 | "moves.urllib_parse", "moves.urllib.parse") | ||
| 362 | |||
| 363 | |||
| 364 | class Module_six_moves_urllib_error(_LazyModule): | ||
| 365 | |||
| 366 | """Lazy loading of moved objects in six.moves.urllib_error""" | ||
| 367 | |||
| 368 | |||
| 369 | _urllib_error_moved_attributes = [ | ||
| 370 | MovedAttribute("URLError", "urllib2", "urllib.error"), | ||
| 371 | MovedAttribute("HTTPError", "urllib2", "urllib.error"), | ||
| 372 | MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), | ||
| 373 | ] | ||
| 374 | for attr in _urllib_error_moved_attributes: | ||
| 375 | setattr(Module_six_moves_urllib_error, attr.name, attr) | ||
| 376 | del attr | ||
| 377 | |||
| 378 | Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes | ||
| 379 | |||
| 380 | _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), | ||
| 381 | "moves.urllib_error", "moves.urllib.error") | ||
| 382 | |||
| 383 | |||
| 384 | class Module_six_moves_urllib_request(_LazyModule): | ||
| 385 | |||
| 386 | """Lazy loading of moved objects in six.moves.urllib_request""" | ||
| 387 | |||
| 388 | |||
| 389 | _urllib_request_moved_attributes = [ | ||
| 390 | MovedAttribute("urlopen", "urllib2", "urllib.request"), | ||
| 391 | MovedAttribute("install_opener", "urllib2", "urllib.request"), | ||
| 392 | MovedAttribute("build_opener", "urllib2", "urllib.request"), | ||
| 393 | MovedAttribute("pathname2url", "urllib", "urllib.request"), | ||
| 394 | MovedAttribute("url2pathname", "urllib", "urllib.request"), | ||
| 395 | MovedAttribute("getproxies", "urllib", "urllib.request"), | ||
| 396 | MovedAttribute("Request", "urllib2", "urllib.request"), | ||
| 397 | MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), | ||
| 398 | MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), | ||
| 399 | MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), | ||
| 400 | MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), | ||
| 401 | MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), | ||
| 402 | MovedAttribute("BaseHandler", "urllib2", "urllib.request"), | ||
| 403 | MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), | ||
| 404 | MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), | ||
| 405 | MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 406 | MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 407 | MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 408 | MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 409 | MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 410 | MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 411 | MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), | ||
| 412 | MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), | ||
| 413 | MovedAttribute("FileHandler", "urllib2", "urllib.request"), | ||
| 414 | MovedAttribute("FTPHandler", "urllib2", "urllib.request"), | ||
| 415 | MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), | ||
| 416 | MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), | ||
| 417 | MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), | ||
| 418 | MovedAttribute("urlretrieve", "urllib", "urllib.request"), | ||
| 419 | MovedAttribute("urlcleanup", "urllib", "urllib.request"), | ||
| 420 | MovedAttribute("URLopener", "urllib", "urllib.request"), | ||
| 421 | MovedAttribute("FancyURLopener", "urllib", "urllib.request"), | ||
| 422 | MovedAttribute("proxy_bypass", "urllib", "urllib.request"), | ||
| 423 | MovedAttribute("parse_http_list", "urllib2", "urllib.request"), | ||
| 424 | MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), | ||
| 425 | ] | ||
| 426 | for attr in _urllib_request_moved_attributes: | ||
| 427 | setattr(Module_six_moves_urllib_request, attr.name, attr) | ||
| 428 | del attr | ||
| 429 | |||
| 430 | Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes | ||
| 431 | |||
| 432 | _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), | ||
| 433 | "moves.urllib_request", "moves.urllib.request") | ||
| 434 | |||
| 435 | |||
| 436 | class Module_six_moves_urllib_response(_LazyModule): | ||
| 437 | |||
| 438 | """Lazy loading of moved objects in six.moves.urllib_response""" | ||
| 439 | |||
| 440 | |||
| 441 | _urllib_response_moved_attributes = [ | ||
| 442 | MovedAttribute("addbase", "urllib", "urllib.response"), | ||
| 443 | MovedAttribute("addclosehook", "urllib", "urllib.response"), | ||
| 444 | MovedAttribute("addinfo", "urllib", "urllib.response"), | ||
| 445 | MovedAttribute("addinfourl", "urllib", "urllib.response"), | ||
| 446 | ] | ||
| 447 | for attr in _urllib_response_moved_attributes: | ||
| 448 | setattr(Module_six_moves_urllib_response, attr.name, attr) | ||
| 449 | del attr | ||
| 450 | |||
| 451 | Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes | ||
| 452 | |||
| 453 | _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), | ||
| 454 | "moves.urllib_response", "moves.urllib.response") | ||
| 455 | |||
| 456 | |||
| 457 | class Module_six_moves_urllib_robotparser(_LazyModule): | ||
| 458 | |||
| 459 | """Lazy loading of moved objects in six.moves.urllib_robotparser""" | ||
| 460 | |||
| 461 | |||
| 462 | _urllib_robotparser_moved_attributes = [ | ||
| 463 | MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), | ||
| 464 | ] | ||
| 465 | for attr in _urllib_robotparser_moved_attributes: | ||
| 466 | setattr(Module_six_moves_urllib_robotparser, attr.name, attr) | ||
| 467 | del attr | ||
| 468 | |||
| 469 | Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes | ||
| 470 | |||
| 471 | _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), | ||
| 472 | "moves.urllib_robotparser", "moves.urllib.robotparser") | ||
| 473 | |||
| 474 | |||
| 475 | class Module_six_moves_urllib(types.ModuleType): | ||
| 476 | |||
| 477 | """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" | ||
| 478 | __path__ = [] # mark as package | ||
| 479 | parse = _importer._get_module("moves.urllib_parse") | ||
| 480 | error = _importer._get_module("moves.urllib_error") | ||
| 481 | request = _importer._get_module("moves.urllib_request") | ||
| 482 | response = _importer._get_module("moves.urllib_response") | ||
| 483 | robotparser = _importer._get_module("moves.urllib_robotparser") | ||
| 484 | |||
| 485 | def __dir__(self): | ||
| 486 | return ['parse', 'error', 'request', 'response', 'robotparser'] | ||
| 487 | |||
| 488 | _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), | ||
| 489 | "moves.urllib") | ||
| 490 | |||
| 491 | |||
| 492 | def add_move(move): | ||
| 493 | """Add an item to six.moves.""" | ||
| 494 | setattr(_MovedItems, move.name, move) | ||
| 495 | |||
| 496 | |||
| 497 | def remove_move(name): | ||
| 498 | """Remove item from six.moves.""" | ||
| 499 | try: | ||
| 500 | delattr(_MovedItems, name) | ||
| 501 | except AttributeError: | ||
| 502 | try: | ||
| 503 | del moves.__dict__[name] | ||
| 504 | except KeyError: | ||
| 505 | raise AttributeError("no such move, %r" % (name,)) | ||
| 506 | |||
| 507 | |||
| 508 | if PY3: | ||
| 509 | _meth_func = "__func__" | ||
| 510 | _meth_self = "__self__" | ||
| 511 | |||
| 512 | _func_closure = "__closure__" | ||
| 513 | _func_code = "__code__" | ||
| 514 | _func_defaults = "__defaults__" | ||
| 515 | _func_globals = "__globals__" | ||
| 516 | else: | ||
| 517 | _meth_func = "im_func" | ||
| 518 | _meth_self = "im_self" | ||
| 519 | |||
| 520 | _func_closure = "func_closure" | ||
| 521 | _func_code = "func_code" | ||
| 522 | _func_defaults = "func_defaults" | ||
| 523 | _func_globals = "func_globals" | ||
| 524 | |||
| 525 | |||
| 526 | try: | ||
| 527 | advance_iterator = next | ||
| 528 | except NameError: | ||
| 529 | def advance_iterator(it): | ||
| 530 | return it.next() | ||
| 531 | next = advance_iterator | ||
| 532 | |||
| 533 | |||
| 534 | try: | ||
| 535 | callable = callable | ||
| 536 | except NameError: | ||
| 537 | def callable(obj): | ||
| 538 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) | ||
| 539 | |||
| 540 | |||
| 541 | if PY3: | ||
| 542 | def get_unbound_function(unbound): | ||
| 543 | return unbound | ||
| 544 | |||
| 545 | create_bound_method = types.MethodType | ||
| 546 | |||
| 547 | def create_unbound_method(func, cls): | ||
| 548 | return func | ||
| 549 | |||
| 550 | Iterator = object | ||
| 551 | else: | ||
| 552 | def get_unbound_function(unbound): | ||
| 553 | return unbound.im_func | ||
| 554 | |||
| 555 | def create_bound_method(func, obj): | ||
| 556 | return types.MethodType(func, obj, obj.__class__) | ||
| 557 | |||
| 558 | def create_unbound_method(func, cls): | ||
| 559 | return types.MethodType(func, None, cls) | ||
| 560 | |||
| 561 | class Iterator(object): | ||
| 562 | |||
| 563 | def next(self): | ||
| 564 | return type(self).__next__(self) | ||
| 565 | |||
| 566 | callable = callable | ||
| 567 | _add_doc(get_unbound_function, | ||
| 568 | """Get the function out of a possibly unbound function""") | ||
| 569 | |||
| 570 | |||
| 571 | get_method_function = operator.attrgetter(_meth_func) | ||
| 572 | get_method_self = operator.attrgetter(_meth_self) | ||
| 573 | get_function_closure = operator.attrgetter(_func_closure) | ||
| 574 | get_function_code = operator.attrgetter(_func_code) | ||
| 575 | get_function_defaults = operator.attrgetter(_func_defaults) | ||
| 576 | get_function_globals = operator.attrgetter(_func_globals) | ||
| 577 | |||
| 578 | |||
| 579 | if PY3: | ||
| 580 | def iterkeys(d, **kw): | ||
| 581 | return iter(d.keys(**kw)) | ||
| 582 | |||
| 583 | def itervalues(d, **kw): | ||
| 584 | return iter(d.values(**kw)) | ||
| 585 | |||
| 586 | def iteritems(d, **kw): | ||
| 587 | return iter(d.items(**kw)) | ||
| 588 | |||
| 589 | def iterlists(d, **kw): | ||
| 590 | return iter(d.lists(**kw)) | ||
| 591 | |||
| 592 | viewkeys = operator.methodcaller("keys") | ||
| 593 | |||
| 594 | viewvalues = operator.methodcaller("values") | ||
| 595 | |||
| 596 | viewitems = operator.methodcaller("items") | ||
| 597 | else: | ||
| 598 | def iterkeys(d, **kw): | ||
| 599 | return d.iterkeys(**kw) | ||
| 600 | |||
| 601 | def itervalues(d, **kw): | ||
| 602 | return d.itervalues(**kw) | ||
| 603 | |||
| 604 | def iteritems(d, **kw): | ||
| 605 | return d.iteritems(**kw) | ||
| 606 | |||
| 607 | def iterlists(d, **kw): | ||
| 608 | return d.iterlists(**kw) | ||
| 609 | |||
| 610 | viewkeys = operator.methodcaller("viewkeys") | ||
| 611 | |||
| 612 | viewvalues = operator.methodcaller("viewvalues") | ||
| 613 | |||
| 614 | viewitems = operator.methodcaller("viewitems") | ||
| 615 | |||
| 616 | _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") | ||
| 617 | _add_doc(itervalues, "Return an iterator over the values of a dictionary.") | ||
| 618 | _add_doc(iteritems, | ||
| 619 | "Return an iterator over the (key, value) pairs of a dictionary.") | ||
| 620 | _add_doc(iterlists, | ||
| 621 | "Return an iterator over the (key, [values]) pairs of a dictionary.") | ||
| 622 | |||
| 623 | |||
| 624 | if PY3: | ||
| 625 | def b(s): | ||
| 626 | return s.encode("latin-1") | ||
| 627 | |||
| 628 | def u(s): | ||
| 629 | return s | ||
| 630 | unichr = chr | ||
| 631 | import struct | ||
| 632 | int2byte = struct.Struct(">B").pack | ||
| 633 | del struct | ||
| 634 | byte2int = operator.itemgetter(0) | ||
| 635 | indexbytes = operator.getitem | ||
| 636 | iterbytes = iter | ||
| 637 | import io | ||
| 638 | StringIO = io.StringIO | ||
| 639 | BytesIO = io.BytesIO | ||
| 640 | _assertCountEqual = "assertCountEqual" | ||
| 641 | if sys.version_info[1] <= 1: | ||
| 642 | _assertRaisesRegex = "assertRaisesRegexp" | ||
| 643 | _assertRegex = "assertRegexpMatches" | ||
| 644 | else: | ||
| 645 | _assertRaisesRegex = "assertRaisesRegex" | ||
| 646 | _assertRegex = "assertRegex" | ||
| 647 | else: | ||
| 648 | def b(s): | ||
| 649 | return s | ||
| 650 | # Workaround for standalone backslash | ||
| 651 | |||
| 652 | def u(s): | ||
| 653 | return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") | ||
| 654 | unichr = unichr | ||
| 655 | int2byte = chr | ||
| 656 | |||
| 657 | def byte2int(bs): | ||
| 658 | return ord(bs[0]) | ||
| 659 | |||
| 660 | def indexbytes(buf, i): | ||
| 661 | return ord(buf[i]) | ||
| 662 | iterbytes = functools.partial(itertools.imap, ord) | ||
| 663 | import StringIO | ||
| 664 | StringIO = BytesIO = StringIO.StringIO | ||
| 665 | _assertCountEqual = "assertItemsEqual" | ||
| 666 | _assertRaisesRegex = "assertRaisesRegexp" | ||
| 667 | _assertRegex = "assertRegexpMatches" | ||
| 668 | _add_doc(b, """Byte literal""") | ||
| 669 | _add_doc(u, """Text literal""") | ||
| 670 | |||
| 671 | |||
| 672 | def assertCountEqual(self, *args, **kwargs): | ||
| 673 | return getattr(self, _assertCountEqual)(*args, **kwargs) | ||
| 674 | |||
| 675 | |||
| 676 | def assertRaisesRegex(self, *args, **kwargs): | ||
| 677 | return getattr(self, _assertRaisesRegex)(*args, **kwargs) | ||
| 678 | |||
| 679 | |||
| 680 | def assertRegex(self, *args, **kwargs): | ||
| 681 | return getattr(self, _assertRegex)(*args, **kwargs) | ||
| 682 | |||
| 683 | |||
| 684 | if PY3: | ||
| 685 | exec_ = getattr(moves.builtins, "exec") | ||
| 686 | |||
| 687 | def reraise(tp, value, tb=None): | ||
| 688 | try: | ||
| 689 | if value is None: | ||
| 690 | value = tp() | ||
| 691 | if value.__traceback__ is not tb: | ||
| 692 | raise value.with_traceback(tb) | ||
| 693 | raise value | ||
| 694 | finally: | ||
| 695 | value = None | ||
| 696 | tb = None | ||
| 697 | |||
| 698 | else: | ||
| 699 | def exec_(_code_, _globs_=None, _locs_=None): | ||
| 700 | """Execute code in a namespace.""" | ||
| 701 | if _globs_ is None: | ||
| 702 | frame = sys._getframe(1) | ||
| 703 | _globs_ = frame.f_globals | ||
| 704 | if _locs_ is None: | ||
| 705 | _locs_ = frame.f_locals | ||
| 706 | del frame | ||
| 707 | elif _locs_ is None: | ||
| 708 | _locs_ = _globs_ | ||
| 709 | exec("""exec _code_ in _globs_, _locs_""") | ||
| 710 | |||
| 711 | exec_("""def reraise(tp, value, tb=None): | ||
| 712 | try: | ||
| 713 | raise tp, value, tb | ||
| 714 | finally: | ||
| 715 | tb = None | ||
| 716 | """) | ||
| 717 | |||
| 718 | |||
| 719 | if sys.version_info[:2] == (3, 2): | ||
| 720 | exec_("""def raise_from(value, from_value): | ||
| 721 | try: | ||
| 722 | if from_value is None: | ||
| 723 | raise value | ||
| 724 | raise value from from_value | ||
| 725 | finally: | ||
| 726 | value = None | ||
| 727 | """) | ||
| 728 | elif sys.version_info[:2] > (3, 2): | ||
| 729 | exec_("""def raise_from(value, from_value): | ||
| 730 | try: | ||
| 731 | raise value from from_value | ||
| 732 | finally: | ||
| 733 | value = None | ||
| 734 | """) | ||
| 735 | else: | ||
| 736 | def raise_from(value, from_value): | ||
| 737 | raise value | ||
| 738 | |||
| 739 | |||
| 740 | print_ = getattr(moves.builtins, "print", None) | ||
| 741 | if print_ is None: | ||
| 742 | def print_(*args, **kwargs): | ||
| 743 | """The new-style print function for Python 2.4 and 2.5.""" | ||
| 744 | fp = kwargs.pop("file", sys.stdout) | ||
| 745 | if fp is None: | ||
| 746 | return | ||
| 747 | |||
| 748 | def write(data): | ||
| 749 | if not isinstance(data, basestring): | ||
| 750 | data = str(data) | ||
| 751 | # If the file has an encoding, encode unicode with it. | ||
| 752 | if (isinstance(fp, file) and | ||
| 753 | isinstance(data, unicode) and | ||
| 754 | fp.encoding is not None): | ||
| 755 | errors = getattr(fp, "errors", None) | ||
| 756 | if errors is None: | ||
| 757 | errors = "strict" | ||
| 758 | data = data.encode(fp.encoding, errors) | ||
| 759 | fp.write(data) | ||
| 760 | want_unicode = False | ||
| 761 | sep = kwargs.pop("sep", None) | ||
| 762 | if sep is not None: | ||
| 763 | if isinstance(sep, unicode): | ||
| 764 | want_unicode = True | ||
| 765 | elif not isinstance(sep, str): | ||
| 766 | raise TypeError("sep must be None or a string") | ||
| 767 | end = kwargs.pop("end", None) | ||
| 768 | if end is not None: | ||
| 769 | if isinstance(end, unicode): | ||
| 770 | want_unicode = True | ||
| 771 | elif not isinstance(end, str): | ||
| 772 | raise TypeError("end must be None or a string") | ||
| 773 | if kwargs: | ||
| 774 | raise TypeError("invalid keyword arguments to print()") | ||
| 775 | if not want_unicode: | ||
| 776 | for arg in args: | ||
| 777 | if isinstance(arg, unicode): | ||
| 778 | want_unicode = True | ||
| 779 | break | ||
| 780 | if want_unicode: | ||
| 781 | newline = unicode("\n") | ||
| 782 | space = unicode(" ") | ||
| 783 | else: | ||
| 784 | newline = "\n" | ||
| 785 | space = " " | ||
| 786 | if sep is None: | ||
| 787 | sep = space | ||
| 788 | if end is None: | ||
| 789 | end = newline | ||
| 790 | for i, arg in enumerate(args): | ||
| 791 | if i: | ||
| 792 | write(sep) | ||
| 793 | write(arg) | ||
| 794 | write(end) | ||
| 795 | if sys.version_info[:2] < (3, 3): | ||
| 796 | _print = print_ | ||
| 797 | |||
| 798 | def print_(*args, **kwargs): | ||
| 799 | fp = kwargs.get("file", sys.stdout) | ||
| 800 | flush = kwargs.pop("flush", False) | ||
| 801 | _print(*args, **kwargs) | ||
| 802 | if flush and fp is not None: | ||
| 803 | fp.flush() | ||
| 804 | |||
| 805 | _add_doc(reraise, """Reraise an exception.""") | ||
| 806 | |||
| 807 | if sys.version_info[0:2] < (3, 4): | ||
| 808 | def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, | ||
| 809 | updated=functools.WRAPPER_UPDATES): | ||
| 810 | def wrapper(f): | ||
| 811 | f = functools.wraps(wrapped, assigned, updated)(f) | ||
| 812 | f.__wrapped__ = wrapped | ||
| 813 | return f | ||
| 814 | return wrapper | ||
| 815 | else: | ||
| 816 | wraps = functools.wraps | ||
| 817 | |||
| 818 | |||
| 819 | def with_metaclass(meta, *bases): | ||
| 820 | """Create a base class with a metaclass.""" | ||
| 821 | # This requires a bit of explanation: the basic idea is to make a dummy | ||
| 822 | # metaclass for one level of class instantiation that replaces itself with | ||
| 823 | # the actual metaclass. | ||
| 824 | class metaclass(type): | ||
| 825 | |||
| 826 | def __new__(cls, name, this_bases, d): | ||
| 827 | return meta(name, bases, d) | ||
| 828 | |||
| 829 | @classmethod | ||
| 830 | def __prepare__(cls, name, this_bases): | ||
| 831 | return meta.__prepare__(name, bases) | ||
| 832 | return type.__new__(metaclass, 'temporary_class', (), {}) | ||
| 833 | |||
| 834 | |||
| 835 | def add_metaclass(metaclass): | ||
| 836 | """Class decorator for creating a class with a metaclass.""" | ||
| 837 | def wrapper(cls): | ||
| 838 | orig_vars = cls.__dict__.copy() | ||
| 839 | slots = orig_vars.get('__slots__') | ||
| 840 | if slots is not None: | ||
| 841 | if isinstance(slots, str): | ||
| 842 | slots = [slots] | ||
| 843 | for slots_var in slots: | ||
| 844 | orig_vars.pop(slots_var) | ||
| 845 | orig_vars.pop('__dict__', None) | ||
| 846 | orig_vars.pop('__weakref__', None) | ||
| 847 | return metaclass(cls.__name__, cls.__bases__, orig_vars) | ||
| 848 | return wrapper | ||
| 849 | |||
| 850 | |||
| 851 | def python_2_unicode_compatible(klass): | ||
| 852 | """ | ||
| 853 | A decorator that defines __unicode__ and __str__ methods under Python 2. | ||
| 854 | Under Python 3 it does nothing. | ||
| 855 | |||
| 856 | To support Python 2 and 3 with a single code base, define a __str__ method | ||
| 857 | returning text and apply this decorator to the class. | ||
| 858 | """ | ||
| 859 | if PY2: | ||
| 860 | if '__str__' not in klass.__dict__: | ||
| 861 | raise ValueError("@python_2_unicode_compatible cannot be applied " | ||
| 862 | "to %s because it doesn't define __str__()." % | ||
| 863 | klass.__name__) | ||
| 864 | klass.__unicode__ = klass.__str__ | ||
| 865 | klass.__str__ = lambda self: self.__unicode__().encode('utf-8') | ||
| 866 | return klass | ||
| 867 | |||
| 868 | |||
| 869 | # Complete the moves implementation. | ||
| 870 | # This code is at the end of this module to speed up module loading. | ||
| 871 | # Turn this module into a package. | ||
| 872 | __path__ = [] # required for PEP 302 and PEP 451 | ||
| 873 | __package__ = __name__ # see PEP 366 @ReservedAssignment | ||
| 874 | if globals().get("__spec__") is not None: | ||
| 875 | __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable | ||
| 876 | # Remove other six meta path importers, since they cause problems. This can | ||
| 877 | # happen if six is removed from sys.modules and then reloaded. (Setuptools does | ||
| 878 | # this for some reason.) | ||
| 879 | if sys.meta_path: | ||
| 880 | for i, importer in enumerate(sys.meta_path): | ||
| 881 | # Here's some real nastiness: Another "instance" of the six module might | ||
| 882 | # be floating around. Therefore, we can't use isinstance() to check for | ||
| 883 | # the six meta path importer, since the other six instance will have | ||
| 884 | # inserted an importer with different class. | ||
| 885 | if (type(importer).__name__ == "_SixMetaPathImporter" and | ||
| 886 | importer.name == __name__): | ||
| 887 | del sys.meta_path[i] | ||
| 888 | break | ||
| 889 | del i, importer | ||
| 890 | # Finally, add the importer to the meta path import hook. | ||
| 891 | sys.meta_path.append(_importer) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/__init__.py new file mode 100644 index 0000000..1bffade --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/__init__.py | |||
| @@ -0,0 +1,97 @@ | |||
| 1 | """ | ||
| 2 | urllib3 - Thread-safe connection pooling and re-using. | ||
| 3 | """ | ||
| 4 | |||
| 5 | from __future__ import absolute_import | ||
| 6 | import warnings | ||
| 7 | |||
| 8 | from .connectionpool import ( | ||
| 9 | HTTPConnectionPool, | ||
| 10 | HTTPSConnectionPool, | ||
| 11 | connection_from_url | ||
| 12 | ) | ||
| 13 | |||
| 14 | from . import exceptions | ||
| 15 | from .filepost import encode_multipart_formdata | ||
| 16 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url | ||
| 17 | from .response import HTTPResponse | ||
| 18 | from .util.request import make_headers | ||
| 19 | from .util.url import get_host | ||
| 20 | from .util.timeout import Timeout | ||
| 21 | from .util.retry import Retry | ||
| 22 | |||
| 23 | |||
| 24 | # Set default logging handler to avoid "No handler found" warnings. | ||
| 25 | import logging | ||
| 26 | try: # Python 2.7+ | ||
| 27 | from logging import NullHandler | ||
| 28 | except ImportError: | ||
| 29 | class NullHandler(logging.Handler): | ||
| 30 | def emit(self, record): | ||
| 31 | pass | ||
| 32 | |||
| 33 | __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' | ||
| 34 | __license__ = 'MIT' | ||
| 35 | __version__ = '1.22' | ||
| 36 | |||
| 37 | __all__ = ( | ||
| 38 | 'HTTPConnectionPool', | ||
| 39 | 'HTTPSConnectionPool', | ||
| 40 | 'PoolManager', | ||
| 41 | 'ProxyManager', | ||
| 42 | 'HTTPResponse', | ||
| 43 | 'Retry', | ||
| 44 | 'Timeout', | ||
| 45 | 'add_stderr_logger', | ||
| 46 | 'connection_from_url', | ||
| 47 | 'disable_warnings', | ||
| 48 | 'encode_multipart_formdata', | ||
| 49 | 'get_host', | ||
| 50 | 'make_headers', | ||
| 51 | 'proxy_from_url', | ||
| 52 | ) | ||
| 53 | |||
| 54 | logging.getLogger(__name__).addHandler(NullHandler()) | ||
| 55 | |||
| 56 | |||
| 57 | def add_stderr_logger(level=logging.DEBUG): | ||
| 58 | """ | ||
| 59 | Helper for quickly adding a StreamHandler to the logger. Useful for | ||
| 60 | debugging. | ||
| 61 | |||
| 62 | Returns the handler after adding it. | ||
| 63 | """ | ||
| 64 | # This method needs to be in this __init__.py to get the __name__ correct | ||
| 65 | # even if urllib3 is vendored within another package. | ||
| 66 | logger = logging.getLogger(__name__) | ||
| 67 | handler = logging.StreamHandler() | ||
| 68 | handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) | ||
| 69 | logger.addHandler(handler) | ||
| 70 | logger.setLevel(level) | ||
| 71 | logger.debug('Added a stderr logging handler to logger: %s', __name__) | ||
| 72 | return handler | ||
| 73 | |||
| 74 | |||
| 75 | # ... Clean up. | ||
| 76 | del NullHandler | ||
| 77 | |||
| 78 | |||
| 79 | # All warning filters *must* be appended unless you're really certain that they | ||
| 80 | # shouldn't be: otherwise, it's very hard for users to use most Python | ||
| 81 | # mechanisms to silence them. | ||
| 82 | # SecurityWarning's always go off by default. | ||
| 83 | warnings.simplefilter('always', exceptions.SecurityWarning, append=True) | ||
| 84 | # SubjectAltNameWarning's should go off once per host | ||
| 85 | warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) | ||
| 86 | # InsecurePlatformWarning's don't vary between requests, so we keep it default. | ||
| 87 | warnings.simplefilter('default', exceptions.InsecurePlatformWarning, | ||
| 88 | append=True) | ||
| 89 | # SNIMissingWarnings should go off only once. | ||
| 90 | warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) | ||
| 91 | |||
| 92 | |||
| 93 | def disable_warnings(category=exceptions.HTTPWarning): | ||
| 94 | """ | ||
| 95 | Helper for quickly disabling all urllib3 warnings. | ||
| 96 | """ | ||
| 97 | warnings.simplefilter('ignore', category) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/_collections.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/_collections.py new file mode 100644 index 0000000..ecbf6b0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/_collections.py | |||
| @@ -0,0 +1,319 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from collections import Mapping, MutableMapping | ||
| 3 | try: | ||
| 4 | from threading import RLock | ||
| 5 | except ImportError: # Platform-specific: No threads available | ||
| 6 | class RLock: | ||
| 7 | def __enter__(self): | ||
| 8 | pass | ||
| 9 | |||
| 10 | def __exit__(self, exc_type, exc_value, traceback): | ||
| 11 | pass | ||
| 12 | |||
| 13 | |||
| 14 | try: # Python 2.7+ | ||
| 15 | from collections import OrderedDict | ||
| 16 | except ImportError: | ||
| 17 | from .packages.ordered_dict import OrderedDict | ||
| 18 | from .packages.six import iterkeys, itervalues, PY3 | ||
| 19 | |||
| 20 | |||
| 21 | __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] | ||
| 22 | |||
| 23 | |||
| 24 | _Null = object() | ||
| 25 | |||
| 26 | |||
| 27 | class RecentlyUsedContainer(MutableMapping): | ||
| 28 | """ | ||
| 29 | Provides a thread-safe dict-like container which maintains up to | ||
| 30 | ``maxsize`` keys while throwing away the least-recently-used keys beyond | ||
| 31 | ``maxsize``. | ||
| 32 | |||
| 33 | :param maxsize: | ||
| 34 | Maximum number of recent elements to retain. | ||
| 35 | |||
| 36 | :param dispose_func: | ||
| 37 | Every time an item is evicted from the container, | ||
| 38 | ``dispose_func(value)`` is called. Callback which will get called | ||
| 39 | """ | ||
| 40 | |||
| 41 | ContainerCls = OrderedDict | ||
| 42 | |||
| 43 | def __init__(self, maxsize=10, dispose_func=None): | ||
| 44 | self._maxsize = maxsize | ||
| 45 | self.dispose_func = dispose_func | ||
| 46 | |||
| 47 | self._container = self.ContainerCls() | ||
| 48 | self.lock = RLock() | ||
| 49 | |||
| 50 | def __getitem__(self, key): | ||
| 51 | # Re-insert the item, moving it to the end of the eviction line. | ||
| 52 | with self.lock: | ||
| 53 | item = self._container.pop(key) | ||
| 54 | self._container[key] = item | ||
| 55 | return item | ||
| 56 | |||
| 57 | def __setitem__(self, key, value): | ||
| 58 | evicted_value = _Null | ||
| 59 | with self.lock: | ||
| 60 | # Possibly evict the existing value of 'key' | ||
| 61 | evicted_value = self._container.get(key, _Null) | ||
| 62 | self._container[key] = value | ||
| 63 | |||
| 64 | # If we didn't evict an existing value, we might have to evict the | ||
| 65 | # least recently used item from the beginning of the container. | ||
| 66 | if len(self._container) > self._maxsize: | ||
| 67 | _key, evicted_value = self._container.popitem(last=False) | ||
| 68 | |||
| 69 | if self.dispose_func and evicted_value is not _Null: | ||
| 70 | self.dispose_func(evicted_value) | ||
| 71 | |||
| 72 | def __delitem__(self, key): | ||
| 73 | with self.lock: | ||
| 74 | value = self._container.pop(key) | ||
| 75 | |||
| 76 | if self.dispose_func: | ||
| 77 | self.dispose_func(value) | ||
| 78 | |||
| 79 | def __len__(self): | ||
| 80 | with self.lock: | ||
| 81 | return len(self._container) | ||
| 82 | |||
| 83 | def __iter__(self): | ||
| 84 | raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') | ||
| 85 | |||
| 86 | def clear(self): | ||
| 87 | with self.lock: | ||
| 88 | # Copy pointers to all values, then wipe the mapping | ||
| 89 | values = list(itervalues(self._container)) | ||
| 90 | self._container.clear() | ||
| 91 | |||
| 92 | if self.dispose_func: | ||
| 93 | for value in values: | ||
| 94 | self.dispose_func(value) | ||
| 95 | |||
| 96 | def keys(self): | ||
| 97 | with self.lock: | ||
| 98 | return list(iterkeys(self._container)) | ||
| 99 | |||
| 100 | |||
| 101 | class HTTPHeaderDict(MutableMapping): | ||
| 102 | """ | ||
| 103 | :param headers: | ||
| 104 | An iterable of field-value pairs. Must not contain multiple field names | ||
| 105 | when compared case-insensitively. | ||
| 106 | |||
| 107 | :param kwargs: | ||
| 108 | Additional field-value pairs to pass in to ``dict.update``. | ||
| 109 | |||
| 110 | A ``dict`` like container for storing HTTP Headers. | ||
| 111 | |||
| 112 | Field names are stored and compared case-insensitively in compliance with | ||
| 113 | RFC 7230. Iteration provides the first case-sensitive key seen for each | ||
| 114 | case-insensitive pair. | ||
| 115 | |||
| 116 | Using ``__setitem__`` syntax overwrites fields that compare equal | ||
| 117 | case-insensitively in order to maintain ``dict``'s api. For fields that | ||
| 118 | compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` | ||
| 119 | in a loop. | ||
| 120 | |||
| 121 | If multiple fields that are equal case-insensitively are passed to the | ||
| 122 | constructor or ``.update``, the behavior is undefined and some will be | ||
| 123 | lost. | ||
| 124 | |||
| 125 | >>> headers = HTTPHeaderDict() | ||
| 126 | >>> headers.add('Set-Cookie', 'foo=bar') | ||
| 127 | >>> headers.add('set-cookie', 'baz=quxx') | ||
| 128 | >>> headers['content-length'] = '7' | ||
| 129 | >>> headers['SET-cookie'] | ||
| 130 | 'foo=bar, baz=quxx' | ||
| 131 | >>> headers['Content-Length'] | ||
| 132 | '7' | ||
| 133 | """ | ||
| 134 | |||
| 135 | def __init__(self, headers=None, **kwargs): | ||
| 136 | super(HTTPHeaderDict, self).__init__() | ||
| 137 | self._container = OrderedDict() | ||
| 138 | if headers is not None: | ||
| 139 | if isinstance(headers, HTTPHeaderDict): | ||
| 140 | self._copy_from(headers) | ||
| 141 | else: | ||
| 142 | self.extend(headers) | ||
| 143 | if kwargs: | ||
| 144 | self.extend(kwargs) | ||
| 145 | |||
| 146 | def __setitem__(self, key, val): | ||
| 147 | self._container[key.lower()] = [key, val] | ||
| 148 | return self._container[key.lower()] | ||
| 149 | |||
| 150 | def __getitem__(self, key): | ||
| 151 | val = self._container[key.lower()] | ||
| 152 | return ', '.join(val[1:]) | ||
| 153 | |||
| 154 | def __delitem__(self, key): | ||
| 155 | del self._container[key.lower()] | ||
| 156 | |||
| 157 | def __contains__(self, key): | ||
| 158 | return key.lower() in self._container | ||
| 159 | |||
| 160 | def __eq__(self, other): | ||
| 161 | if not isinstance(other, Mapping) and not hasattr(other, 'keys'): | ||
| 162 | return False | ||
| 163 | if not isinstance(other, type(self)): | ||
| 164 | other = type(self)(other) | ||
| 165 | return (dict((k.lower(), v) for k, v in self.itermerged()) == | ||
| 166 | dict((k.lower(), v) for k, v in other.itermerged())) | ||
| 167 | |||
| 168 | def __ne__(self, other): | ||
| 169 | return not self.__eq__(other) | ||
| 170 | |||
| 171 | if not PY3: # Python 2 | ||
| 172 | iterkeys = MutableMapping.iterkeys | ||
| 173 | itervalues = MutableMapping.itervalues | ||
| 174 | |||
| 175 | __marker = object() | ||
| 176 | |||
| 177 | def __len__(self): | ||
| 178 | return len(self._container) | ||
| 179 | |||
| 180 | def __iter__(self): | ||
| 181 | # Only provide the originally cased names | ||
| 182 | for vals in self._container.values(): | ||
| 183 | yield vals[0] | ||
| 184 | |||
| 185 | def pop(self, key, default=__marker): | ||
| 186 | '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. | ||
| 187 | If key is not found, d is returned if given, otherwise KeyError is raised. | ||
| 188 | ''' | ||
| 189 | # Using the MutableMapping function directly fails due to the private marker. | ||
| 190 | # Using ordinary dict.pop would expose the internal structures. | ||
| 191 | # So let's reinvent the wheel. | ||
| 192 | try: | ||
| 193 | value = self[key] | ||
| 194 | except KeyError: | ||
| 195 | if default is self.__marker: | ||
| 196 | raise | ||
| 197 | return default | ||
| 198 | else: | ||
| 199 | del self[key] | ||
| 200 | return value | ||
| 201 | |||
| 202 | def discard(self, key): | ||
| 203 | try: | ||
| 204 | del self[key] | ||
| 205 | except KeyError: | ||
| 206 | pass | ||
| 207 | |||
| 208 | def add(self, key, val): | ||
| 209 | """Adds a (name, value) pair, doesn't overwrite the value if it already | ||
| 210 | exists. | ||
| 211 | |||
| 212 | >>> headers = HTTPHeaderDict(foo='bar') | ||
| 213 | >>> headers.add('Foo', 'baz') | ||
| 214 | >>> headers['foo'] | ||
| 215 | 'bar, baz' | ||
| 216 | """ | ||
| 217 | key_lower = key.lower() | ||
| 218 | new_vals = [key, val] | ||
| 219 | # Keep the common case aka no item present as fast as possible | ||
| 220 | vals = self._container.setdefault(key_lower, new_vals) | ||
| 221 | if new_vals is not vals: | ||
| 222 | vals.append(val) | ||
| 223 | |||
| 224 | def extend(self, *args, **kwargs): | ||
| 225 | """Generic import function for any type of header-like object. | ||
| 226 | Adapted version of MutableMapping.update in order to insert items | ||
| 227 | with self.add instead of self.__setitem__ | ||
| 228 | """ | ||
| 229 | if len(args) > 1: | ||
| 230 | raise TypeError("extend() takes at most 1 positional " | ||
| 231 | "arguments ({0} given)".format(len(args))) | ||
| 232 | other = args[0] if len(args) >= 1 else () | ||
| 233 | |||
| 234 | if isinstance(other, HTTPHeaderDict): | ||
| 235 | for key, val in other.iteritems(): | ||
| 236 | self.add(key, val) | ||
| 237 | elif isinstance(other, Mapping): | ||
| 238 | for key in other: | ||
| 239 | self.add(key, other[key]) | ||
| 240 | elif hasattr(other, "keys"): | ||
| 241 | for key in other.keys(): | ||
| 242 | self.add(key, other[key]) | ||
| 243 | else: | ||
| 244 | for key, value in other: | ||
| 245 | self.add(key, value) | ||
| 246 | |||
| 247 | for key, value in kwargs.items(): | ||
| 248 | self.add(key, value) | ||
| 249 | |||
| 250 | def getlist(self, key, default=__marker): | ||
| 251 | """Returns a list of all the values for the named field. Returns an | ||
| 252 | empty list if the key doesn't exist.""" | ||
| 253 | try: | ||
| 254 | vals = self._container[key.lower()] | ||
| 255 | except KeyError: | ||
| 256 | if default is self.__marker: | ||
| 257 | return [] | ||
| 258 | return default | ||
| 259 | else: | ||
| 260 | return vals[1:] | ||
| 261 | |||
| 262 | # Backwards compatibility for httplib | ||
| 263 | getheaders = getlist | ||
| 264 | getallmatchingheaders = getlist | ||
| 265 | iget = getlist | ||
| 266 | |||
| 267 | # Backwards compatibility for http.cookiejar | ||
| 268 | get_all = getlist | ||
| 269 | |||
| 270 | def __repr__(self): | ||
| 271 | return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) | ||
| 272 | |||
| 273 | def _copy_from(self, other): | ||
| 274 | for key in other: | ||
| 275 | val = other.getlist(key) | ||
| 276 | if isinstance(val, list): | ||
| 277 | # Don't need to convert tuples | ||
| 278 | val = list(val) | ||
| 279 | self._container[key.lower()] = [key] + val | ||
| 280 | |||
| 281 | def copy(self): | ||
| 282 | clone = type(self)() | ||
| 283 | clone._copy_from(self) | ||
| 284 | return clone | ||
| 285 | |||
| 286 | def iteritems(self): | ||
| 287 | """Iterate over all header lines, including duplicate ones.""" | ||
| 288 | for key in self: | ||
| 289 | vals = self._container[key.lower()] | ||
| 290 | for val in vals[1:]: | ||
| 291 | yield vals[0], val | ||
| 292 | |||
| 293 | def itermerged(self): | ||
| 294 | """Iterate over all headers, merging duplicate ones together.""" | ||
| 295 | for key in self: | ||
| 296 | val = self._container[key.lower()] | ||
| 297 | yield val[0], ', '.join(val[1:]) | ||
| 298 | |||
| 299 | def items(self): | ||
| 300 | return list(self.iteritems()) | ||
| 301 | |||
| 302 | @classmethod | ||
| 303 | def from_httplib(cls, message): # Python 2 | ||
| 304 | """Read headers from a Python 2 httplib message object.""" | ||
| 305 | # python2.7 does not expose a proper API for exporting multiheaders | ||
| 306 | # efficiently. This function re-reads raw lines from the message | ||
| 307 | # object and extracts the multiheaders properly. | ||
| 308 | headers = [] | ||
| 309 | |||
| 310 | for line in message.headers: | ||
| 311 | if line.startswith((' ', '\t')): | ||
| 312 | key, value = headers[-1] | ||
| 313 | headers[-1] = (key, value + '\r\n' + line.rstrip()) | ||
| 314 | continue | ||
| 315 | |||
| 316 | key, value = line.split(':', 1) | ||
| 317 | headers.append((key, value.strip())) | ||
| 318 | |||
| 319 | return cls(headers) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connection.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connection.py new file mode 100644 index 0000000..67090e3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connection.py | |||
| @@ -0,0 +1,373 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import datetime | ||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | import sys | ||
| 6 | import socket | ||
| 7 | from socket import error as SocketError, timeout as SocketTimeout | ||
| 8 | import warnings | ||
| 9 | from .packages import six | ||
| 10 | from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection | ||
| 11 | from .packages.six.moves.http_client import HTTPException # noqa: F401 | ||
| 12 | |||
| 13 | try: # Compiled with SSL? | ||
| 14 | import ssl | ||
| 15 | BaseSSLError = ssl.SSLError | ||
| 16 | except (ImportError, AttributeError): # Platform-specific: No SSL. | ||
| 17 | ssl = None | ||
| 18 | |||
| 19 | class BaseSSLError(BaseException): | ||
| 20 | pass | ||
| 21 | |||
| 22 | |||
| 23 | try: # Python 3: | ||
| 24 | # Not a no-op, we're adding this to the namespace so it can be imported. | ||
| 25 | ConnectionError = ConnectionError | ||
| 26 | except NameError: # Python 2: | ||
| 27 | class ConnectionError(Exception): | ||
| 28 | pass | ||
| 29 | |||
| 30 | |||
| 31 | from .exceptions import ( | ||
| 32 | NewConnectionError, | ||
| 33 | ConnectTimeoutError, | ||
| 34 | SubjectAltNameWarning, | ||
| 35 | SystemTimeWarning, | ||
| 36 | ) | ||
| 37 | from .packages.ssl_match_hostname import match_hostname, CertificateError | ||
| 38 | |||
| 39 | from .util.ssl_ import ( | ||
| 40 | resolve_cert_reqs, | ||
| 41 | resolve_ssl_version, | ||
| 42 | assert_fingerprint, | ||
| 43 | create_urllib3_context, | ||
| 44 | ssl_wrap_socket | ||
| 45 | ) | ||
| 46 | |||
| 47 | |||
| 48 | from .util import connection | ||
| 49 | |||
| 50 | from ._collections import HTTPHeaderDict | ||
| 51 | |||
| 52 | log = logging.getLogger(__name__) | ||
| 53 | |||
| 54 | port_by_scheme = { | ||
| 55 | 'http': 80, | ||
| 56 | 'https': 443, | ||
| 57 | } | ||
| 58 | |||
| 59 | # When updating RECENT_DATE, move it to | ||
| 60 | # within two years of the current date, and no | ||
| 61 | # earlier than 6 months ago. | ||
| 62 | RECENT_DATE = datetime.date(2016, 1, 1) | ||
| 63 | |||
| 64 | |||
| 65 | class DummyConnection(object): | ||
| 66 | """Used to detect a failed ConnectionCls import.""" | ||
| 67 | pass | ||
| 68 | |||
| 69 | |||
| 70 | class HTTPConnection(_HTTPConnection, object): | ||
| 71 | """ | ||
| 72 | Based on httplib.HTTPConnection but provides an extra constructor | ||
| 73 | backwards-compatibility layer between older and newer Pythons. | ||
| 74 | |||
| 75 | Additional keyword parameters are used to configure attributes of the connection. | ||
| 76 | Accepted parameters include: | ||
| 77 | |||
| 78 | - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` | ||
| 79 | - ``source_address``: Set the source address for the current connection. | ||
| 80 | |||
| 81 | .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x | ||
| 82 | |||
| 83 | - ``socket_options``: Set specific options on the underlying socket. If not specified, then | ||
| 84 | defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling | ||
| 85 | Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. | ||
| 86 | |||
| 87 | For example, if you wish to enable TCP Keep Alive in addition to the defaults, | ||
| 88 | you might pass:: | ||
| 89 | |||
| 90 | HTTPConnection.default_socket_options + [ | ||
| 91 | (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), | ||
| 92 | ] | ||
| 93 | |||
| 94 | Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). | ||
| 95 | """ | ||
| 96 | |||
| 97 | default_port = port_by_scheme['http'] | ||
| 98 | |||
| 99 | #: Disable Nagle's algorithm by default. | ||
| 100 | #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` | ||
| 101 | default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] | ||
| 102 | |||
| 103 | #: Whether this connection verifies the host's certificate. | ||
| 104 | is_verified = False | ||
| 105 | |||
| 106 | def __init__(self, *args, **kw): | ||
| 107 | if six.PY3: # Python 3 | ||
| 108 | kw.pop('strict', None) | ||
| 109 | |||
| 110 | # Pre-set source_address in case we have an older Python like 2.6. | ||
| 111 | self.source_address = kw.get('source_address') | ||
| 112 | |||
| 113 | if sys.version_info < (2, 7): # Python 2.6 | ||
| 114 | # _HTTPConnection on Python 2.6 will balk at this keyword arg, but | ||
| 115 | # not newer versions. We can still use it when creating a | ||
| 116 | # connection though, so we pop it *after* we have saved it as | ||
| 117 | # self.source_address. | ||
| 118 | kw.pop('source_address', None) | ||
| 119 | |||
| 120 | #: The socket options provided by the user. If no options are | ||
| 121 | #: provided, we use the default options. | ||
| 122 | self.socket_options = kw.pop('socket_options', self.default_socket_options) | ||
| 123 | |||
| 124 | # Superclass also sets self.source_address in Python 2.7+. | ||
| 125 | _HTTPConnection.__init__(self, *args, **kw) | ||
| 126 | |||
| 127 | def _new_conn(self): | ||
| 128 | """ Establish a socket connection and set nodelay settings on it. | ||
| 129 | |||
| 130 | :return: New socket connection. | ||
| 131 | """ | ||
| 132 | extra_kw = {} | ||
| 133 | if self.source_address: | ||
| 134 | extra_kw['source_address'] = self.source_address | ||
| 135 | |||
| 136 | if self.socket_options: | ||
| 137 | extra_kw['socket_options'] = self.socket_options | ||
| 138 | |||
| 139 | try: | ||
| 140 | conn = connection.create_connection( | ||
| 141 | (self.host, self.port), self.timeout, **extra_kw) | ||
| 142 | |||
| 143 | except SocketTimeout as e: | ||
| 144 | raise ConnectTimeoutError( | ||
| 145 | self, "Connection to %s timed out. (connect timeout=%s)" % | ||
| 146 | (self.host, self.timeout)) | ||
| 147 | |||
| 148 | except SocketError as e: | ||
| 149 | raise NewConnectionError( | ||
| 150 | self, "Failed to establish a new connection: %s" % e) | ||
| 151 | |||
| 152 | return conn | ||
| 153 | |||
| 154 | def _prepare_conn(self, conn): | ||
| 155 | self.sock = conn | ||
| 156 | # the _tunnel_host attribute was added in python 2.6.3 (via | ||
| 157 | # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do | ||
| 158 | # not have them. | ||
| 159 | if getattr(self, '_tunnel_host', None): | ||
| 160 | # TODO: Fix tunnel so it doesn't depend on self.sock state. | ||
| 161 | self._tunnel() | ||
| 162 | # Mark this connection as not reusable | ||
| 163 | self.auto_open = 0 | ||
| 164 | |||
| 165 | def connect(self): | ||
| 166 | conn = self._new_conn() | ||
| 167 | self._prepare_conn(conn) | ||
| 168 | |||
| 169 | def request_chunked(self, method, url, body=None, headers=None): | ||
| 170 | """ | ||
| 171 | Alternative to the common request method, which sends the | ||
| 172 | body with chunked encoding and not as one block | ||
| 173 | """ | ||
| 174 | headers = HTTPHeaderDict(headers if headers is not None else {}) | ||
| 175 | skip_accept_encoding = 'accept-encoding' in headers | ||
| 176 | skip_host = 'host' in headers | ||
| 177 | self.putrequest( | ||
| 178 | method, | ||
| 179 | url, | ||
| 180 | skip_accept_encoding=skip_accept_encoding, | ||
| 181 | skip_host=skip_host | ||
| 182 | ) | ||
| 183 | for header, value in headers.items(): | ||
| 184 | self.putheader(header, value) | ||
| 185 | if 'transfer-encoding' not in headers: | ||
| 186 | self.putheader('Transfer-Encoding', 'chunked') | ||
| 187 | self.endheaders() | ||
| 188 | |||
| 189 | if body is not None: | ||
| 190 | stringish_types = six.string_types + (six.binary_type,) | ||
| 191 | if isinstance(body, stringish_types): | ||
| 192 | body = (body,) | ||
| 193 | for chunk in body: | ||
| 194 | if not chunk: | ||
| 195 | continue | ||
| 196 | if not isinstance(chunk, six.binary_type): | ||
| 197 | chunk = chunk.encode('utf8') | ||
| 198 | len_str = hex(len(chunk))[2:] | ||
| 199 | self.send(len_str.encode('utf-8')) | ||
| 200 | self.send(b'\r\n') | ||
| 201 | self.send(chunk) | ||
| 202 | self.send(b'\r\n') | ||
| 203 | |||
| 204 | # After the if clause, to always have a closed body | ||
| 205 | self.send(b'0\r\n\r\n') | ||
| 206 | |||
| 207 | |||
| 208 | class HTTPSConnection(HTTPConnection): | ||
| 209 | default_port = port_by_scheme['https'] | ||
| 210 | |||
| 211 | ssl_version = None | ||
| 212 | |||
| 213 | def __init__(self, host, port=None, key_file=None, cert_file=None, | ||
| 214 | strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, | ||
| 215 | ssl_context=None, **kw): | ||
| 216 | |||
| 217 | HTTPConnection.__init__(self, host, port, strict=strict, | ||
| 218 | timeout=timeout, **kw) | ||
| 219 | |||
| 220 | self.key_file = key_file | ||
| 221 | self.cert_file = cert_file | ||
| 222 | self.ssl_context = ssl_context | ||
| 223 | |||
| 224 | # Required property for Google AppEngine 1.9.0 which otherwise causes | ||
| 225 | # HTTPS requests to go out as HTTP. (See Issue #356) | ||
| 226 | self._protocol = 'https' | ||
| 227 | |||
| 228 | def connect(self): | ||
| 229 | conn = self._new_conn() | ||
| 230 | self._prepare_conn(conn) | ||
| 231 | |||
| 232 | if self.ssl_context is None: | ||
| 233 | self.ssl_context = create_urllib3_context( | ||
| 234 | ssl_version=resolve_ssl_version(None), | ||
| 235 | cert_reqs=resolve_cert_reqs(None), | ||
| 236 | ) | ||
| 237 | |||
| 238 | self.sock = ssl_wrap_socket( | ||
| 239 | sock=conn, | ||
| 240 | keyfile=self.key_file, | ||
| 241 | certfile=self.cert_file, | ||
| 242 | ssl_context=self.ssl_context, | ||
| 243 | ) | ||
| 244 | |||
| 245 | |||
| 246 | class VerifiedHTTPSConnection(HTTPSConnection): | ||
| 247 | """ | ||
| 248 | Based on httplib.HTTPSConnection but wraps the socket with | ||
| 249 | SSL certification. | ||
| 250 | """ | ||
| 251 | cert_reqs = None | ||
| 252 | ca_certs = None | ||
| 253 | ca_cert_dir = None | ||
| 254 | ssl_version = None | ||
| 255 | assert_fingerprint = None | ||
| 256 | |||
| 257 | def set_cert(self, key_file=None, cert_file=None, | ||
| 258 | cert_reqs=None, ca_certs=None, | ||
| 259 | assert_hostname=None, assert_fingerprint=None, | ||
| 260 | ca_cert_dir=None): | ||
| 261 | """ | ||
| 262 | This method should only be called once, before the connection is used. | ||
| 263 | """ | ||
| 264 | # If cert_reqs is not provided, we can try to guess. If the user gave | ||
| 265 | # us a cert database, we assume they want to use it: otherwise, if | ||
| 266 | # they gave us an SSL Context object we should use whatever is set for | ||
| 267 | # it. | ||
| 268 | if cert_reqs is None: | ||
| 269 | if ca_certs or ca_cert_dir: | ||
| 270 | cert_reqs = 'CERT_REQUIRED' | ||
| 271 | elif self.ssl_context is not None: | ||
| 272 | cert_reqs = self.ssl_context.verify_mode | ||
| 273 | |||
| 274 | self.key_file = key_file | ||
| 275 | self.cert_file = cert_file | ||
| 276 | self.cert_reqs = cert_reqs | ||
| 277 | self.assert_hostname = assert_hostname | ||
| 278 | self.assert_fingerprint = assert_fingerprint | ||
| 279 | self.ca_certs = ca_certs and os.path.expanduser(ca_certs) | ||
| 280 | self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) | ||
| 281 | |||
| 282 | def connect(self): | ||
| 283 | # Add certificate verification | ||
| 284 | conn = self._new_conn() | ||
| 285 | |||
| 286 | hostname = self.host | ||
| 287 | if getattr(self, '_tunnel_host', None): | ||
| 288 | # _tunnel_host was added in Python 2.6.3 | ||
| 289 | # (See: http://hg.python.org/cpython/rev/0f57b30a152f) | ||
| 290 | |||
| 291 | self.sock = conn | ||
| 292 | # Calls self._set_hostport(), so self.host is | ||
| 293 | # self._tunnel_host below. | ||
| 294 | self._tunnel() | ||
| 295 | # Mark this connection as not reusable | ||
| 296 | self.auto_open = 0 | ||
| 297 | |||
| 298 | # Override the host with the one we're requesting data from. | ||
| 299 | hostname = self._tunnel_host | ||
| 300 | |||
| 301 | is_time_off = datetime.date.today() < RECENT_DATE | ||
| 302 | if is_time_off: | ||
| 303 | warnings.warn(( | ||
| 304 | 'System time is way off (before {0}). This will probably ' | ||
| 305 | 'lead to SSL verification errors').format(RECENT_DATE), | ||
| 306 | SystemTimeWarning | ||
| 307 | ) | ||
| 308 | |||
| 309 | # Wrap socket using verification with the root certs in | ||
| 310 | # trusted_root_certs | ||
| 311 | if self.ssl_context is None: | ||
| 312 | self.ssl_context = create_urllib3_context( | ||
| 313 | ssl_version=resolve_ssl_version(self.ssl_version), | ||
| 314 | cert_reqs=resolve_cert_reqs(self.cert_reqs), | ||
| 315 | ) | ||
| 316 | |||
| 317 | context = self.ssl_context | ||
| 318 | context.verify_mode = resolve_cert_reqs(self.cert_reqs) | ||
| 319 | self.sock = ssl_wrap_socket( | ||
| 320 | sock=conn, | ||
| 321 | keyfile=self.key_file, | ||
| 322 | certfile=self.cert_file, | ||
| 323 | ca_certs=self.ca_certs, | ||
| 324 | ca_cert_dir=self.ca_cert_dir, | ||
| 325 | server_hostname=hostname, | ||
| 326 | ssl_context=context) | ||
| 327 | |||
| 328 | if self.assert_fingerprint: | ||
| 329 | assert_fingerprint(self.sock.getpeercert(binary_form=True), | ||
| 330 | self.assert_fingerprint) | ||
| 331 | elif context.verify_mode != ssl.CERT_NONE \ | ||
| 332 | and not getattr(context, 'check_hostname', False) \ | ||
| 333 | and self.assert_hostname is not False: | ||
| 334 | # While urllib3 attempts to always turn off hostname matching from | ||
| 335 | # the TLS library, this cannot always be done. So we check whether | ||
| 336 | # the TLS Library still thinks it's matching hostnames. | ||
| 337 | cert = self.sock.getpeercert() | ||
| 338 | if not cert.get('subjectAltName', ()): | ||
| 339 | warnings.warn(( | ||
| 340 | 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' | ||
| 341 | '`commonName` for now. This feature is being removed by major browsers and ' | ||
| 342 | 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' | ||
| 343 | 'for details.)'.format(hostname)), | ||
| 344 | SubjectAltNameWarning | ||
| 345 | ) | ||
| 346 | _match_hostname(cert, self.assert_hostname or hostname) | ||
| 347 | |||
| 348 | self.is_verified = ( | ||
| 349 | context.verify_mode == ssl.CERT_REQUIRED or | ||
| 350 | self.assert_fingerprint is not None | ||
| 351 | ) | ||
| 352 | |||
| 353 | |||
| 354 | def _match_hostname(cert, asserted_hostname): | ||
| 355 | try: | ||
| 356 | match_hostname(cert, asserted_hostname) | ||
| 357 | except CertificateError as e: | ||
| 358 | log.error( | ||
| 359 | 'Certificate did not match expected hostname: %s. ' | ||
| 360 | 'Certificate: %s', asserted_hostname, cert | ||
| 361 | ) | ||
| 362 | # Add cert to exception and reraise so client code can inspect | ||
| 363 | # the cert when catching the exception, if they want to | ||
| 364 | e._peer_cert = cert | ||
| 365 | raise | ||
| 366 | |||
| 367 | |||
| 368 | if ssl: | ||
| 369 | # Make a copy for testing. | ||
| 370 | UnverifiedHTTPSConnection = HTTPSConnection | ||
| 371 | HTTPSConnection = VerifiedHTTPSConnection | ||
| 372 | else: | ||
| 373 | HTTPSConnection = DummyConnection | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py new file mode 100644 index 0000000..b099ca8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py | |||
| @@ -0,0 +1,905 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import errno | ||
| 3 | import logging | ||
| 4 | import sys | ||
| 5 | import warnings | ||
| 6 | |||
| 7 | from socket import error as SocketError, timeout as SocketTimeout | ||
| 8 | import socket | ||
| 9 | |||
| 10 | |||
| 11 | from .exceptions import ( | ||
| 12 | ClosedPoolError, | ||
| 13 | ProtocolError, | ||
| 14 | EmptyPoolError, | ||
| 15 | HeaderParsingError, | ||
| 16 | HostChangedError, | ||
| 17 | LocationValueError, | ||
| 18 | MaxRetryError, | ||
| 19 | ProxyError, | ||
| 20 | ReadTimeoutError, | ||
| 21 | SSLError, | ||
| 22 | TimeoutError, | ||
| 23 | InsecureRequestWarning, | ||
| 24 | NewConnectionError, | ||
| 25 | ) | ||
| 26 | from .packages.ssl_match_hostname import CertificateError | ||
| 27 | from .packages import six | ||
| 28 | from .packages.six.moves import queue | ||
| 29 | from .connection import ( | ||
| 30 | port_by_scheme, | ||
| 31 | DummyConnection, | ||
| 32 | HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, | ||
| 33 | HTTPException, BaseSSLError, | ||
| 34 | ) | ||
| 35 | from .request import RequestMethods | ||
| 36 | from .response import HTTPResponse | ||
| 37 | |||
| 38 | from .util.connection import is_connection_dropped | ||
| 39 | from .util.request import set_file_position | ||
| 40 | from .util.response import assert_header_parsing | ||
| 41 | from .util.retry import Retry | ||
| 42 | from .util.timeout import Timeout | ||
| 43 | from .util.url import get_host, Url | ||
| 44 | |||
| 45 | |||
| 46 | if six.PY2: | ||
| 47 | # Queue is imported for side effects on MS Windows | ||
| 48 | import Queue as _unused_module_Queue # noqa: F401 | ||
| 49 | |||
| 50 | xrange = six.moves.xrange | ||
| 51 | |||
| 52 | log = logging.getLogger(__name__) | ||
| 53 | |||
| 54 | _Default = object() | ||
| 55 | |||
| 56 | |||
| 57 | # Pool objects | ||
| 58 | class ConnectionPool(object): | ||
| 59 | """ | ||
| 60 | Base class for all connection pools, such as | ||
| 61 | :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. | ||
| 62 | """ | ||
| 63 | |||
| 64 | scheme = None | ||
| 65 | QueueCls = queue.LifoQueue | ||
| 66 | |||
| 67 | def __init__(self, host, port=None): | ||
| 68 | if not host: | ||
| 69 | raise LocationValueError("No host specified.") | ||
| 70 | |||
| 71 | self.host = _ipv6_host(host).lower() | ||
| 72 | self._proxy_host = host.lower() | ||
| 73 | self.port = port | ||
| 74 | |||
| 75 | def __str__(self): | ||
| 76 | return '%s(host=%r, port=%r)' % (type(self).__name__, | ||
| 77 | self.host, self.port) | ||
| 78 | |||
| 79 | def __enter__(self): | ||
| 80 | return self | ||
| 81 | |||
| 82 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 83 | self.close() | ||
| 84 | # Return False to re-raise any potential exceptions | ||
| 85 | return False | ||
| 86 | |||
| 87 | def close(self): | ||
| 88 | """ | ||
| 89 | Close all pooled connections and disable the pool. | ||
| 90 | """ | ||
| 91 | pass | ||
| 92 | |||
| 93 | |||
| 94 | # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 | ||
| 95 | _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) | ||
| 96 | |||
| 97 | |||
| 98 | class HTTPConnectionPool(ConnectionPool, RequestMethods): | ||
| 99 | """ | ||
| 100 | Thread-safe connection pool for one host. | ||
| 101 | |||
| 102 | :param host: | ||
| 103 | Host used for this HTTP Connection (e.g. "localhost"), passed into | ||
| 104 | :class:`httplib.HTTPConnection`. | ||
| 105 | |||
| 106 | :param port: | ||
| 107 | Port used for this HTTP Connection (None is equivalent to 80), passed | ||
| 108 | into :class:`httplib.HTTPConnection`. | ||
| 109 | |||
| 110 | :param strict: | ||
| 111 | Causes BadStatusLine to be raised if the status line can't be parsed | ||
| 112 | as a valid HTTP/1.0 or 1.1 status line, passed into | ||
| 113 | :class:`httplib.HTTPConnection`. | ||
| 114 | |||
| 115 | .. note:: | ||
| 116 | Only works in Python 2. This parameter is ignored in Python 3. | ||
| 117 | |||
| 118 | :param timeout: | ||
| 119 | Socket timeout in seconds for each individual connection. This can | ||
| 120 | be a float or integer, which sets the timeout for the HTTP request, | ||
| 121 | or an instance of :class:`urllib3.util.Timeout` which gives you more | ||
| 122 | fine-grained control over request timeouts. After the constructor has | ||
| 123 | been parsed, this is always a `urllib3.util.Timeout` object. | ||
| 124 | |||
| 125 | :param maxsize: | ||
| 126 | Number of connections to save that can be reused. More than 1 is useful | ||
| 127 | in multithreaded situations. If ``block`` is set to False, more | ||
| 128 | connections will be created but they will not be saved once they've | ||
| 129 | been used. | ||
| 130 | |||
| 131 | :param block: | ||
| 132 | If set to True, no more than ``maxsize`` connections will be used at | ||
| 133 | a time. When no free connections are available, the call will block | ||
| 134 | until a connection has been released. This is a useful side effect for | ||
| 135 | particular multithreaded situations where one does not want to use more | ||
| 136 | than maxsize connections per host to prevent flooding. | ||
| 137 | |||
| 138 | :param headers: | ||
| 139 | Headers to include with all requests, unless other headers are given | ||
| 140 | explicitly. | ||
| 141 | |||
| 142 | :param retries: | ||
| 143 | Retry configuration to use by default with requests in this pool. | ||
| 144 | |||
| 145 | :param _proxy: | ||
| 146 | Parsed proxy URL, should not be used directly, instead, see | ||
| 147 | :class:`urllib3.connectionpool.ProxyManager`" | ||
| 148 | |||
| 149 | :param _proxy_headers: | ||
| 150 | A dictionary with proxy headers, should not be used directly, | ||
| 151 | instead, see :class:`urllib3.connectionpool.ProxyManager`" | ||
| 152 | |||
| 153 | :param \\**conn_kw: | ||
| 154 | Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, | ||
| 155 | :class:`urllib3.connection.HTTPSConnection` instances. | ||
| 156 | """ | ||
| 157 | |||
| 158 | scheme = 'http' | ||
| 159 | ConnectionCls = HTTPConnection | ||
| 160 | ResponseCls = HTTPResponse | ||
| 161 | |||
| 162 | def __init__(self, host, port=None, strict=False, | ||
| 163 | timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, | ||
| 164 | headers=None, retries=None, | ||
| 165 | _proxy=None, _proxy_headers=None, | ||
| 166 | **conn_kw): | ||
| 167 | ConnectionPool.__init__(self, host, port) | ||
| 168 | RequestMethods.__init__(self, headers) | ||
| 169 | |||
| 170 | self.strict = strict | ||
| 171 | |||
| 172 | if not isinstance(timeout, Timeout): | ||
| 173 | timeout = Timeout.from_float(timeout) | ||
| 174 | |||
| 175 | if retries is None: | ||
| 176 | retries = Retry.DEFAULT | ||
| 177 | |||
| 178 | self.timeout = timeout | ||
| 179 | self.retries = retries | ||
| 180 | |||
| 181 | self.pool = self.QueueCls(maxsize) | ||
| 182 | self.block = block | ||
| 183 | |||
| 184 | self.proxy = _proxy | ||
| 185 | self.proxy_headers = _proxy_headers or {} | ||
| 186 | |||
| 187 | # Fill the queue up so that doing get() on it will block properly | ||
| 188 | for _ in xrange(maxsize): | ||
| 189 | self.pool.put(None) | ||
| 190 | |||
| 191 | # These are mostly for testing and debugging purposes. | ||
| 192 | self.num_connections = 0 | ||
| 193 | self.num_requests = 0 | ||
| 194 | self.conn_kw = conn_kw | ||
| 195 | |||
| 196 | if self.proxy: | ||
| 197 | # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. | ||
| 198 | # We cannot know if the user has added default socket options, so we cannot replace the | ||
| 199 | # list. | ||
| 200 | self.conn_kw.setdefault('socket_options', []) | ||
| 201 | |||
| 202 | def _new_conn(self): | ||
| 203 | """ | ||
| 204 | Return a fresh :class:`HTTPConnection`. | ||
| 205 | """ | ||
| 206 | self.num_connections += 1 | ||
| 207 | log.debug("Starting new HTTP connection (%d): %s", | ||
| 208 | self.num_connections, self.host) | ||
| 209 | |||
| 210 | conn = self.ConnectionCls(host=self.host, port=self.port, | ||
| 211 | timeout=self.timeout.connect_timeout, | ||
| 212 | strict=self.strict, **self.conn_kw) | ||
| 213 | return conn | ||
| 214 | |||
| 215 | def _get_conn(self, timeout=None): | ||
| 216 | """ | ||
| 217 | Get a connection. Will return a pooled connection if one is available. | ||
| 218 | |||
| 219 | If no connections are available and :prop:`.block` is ``False``, then a | ||
| 220 | fresh connection is returned. | ||
| 221 | |||
| 222 | :param timeout: | ||
| 223 | Seconds to wait before giving up and raising | ||
| 224 | :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and | ||
| 225 | :prop:`.block` is ``True``. | ||
| 226 | """ | ||
| 227 | conn = None | ||
| 228 | try: | ||
| 229 | conn = self.pool.get(block=self.block, timeout=timeout) | ||
| 230 | |||
| 231 | except AttributeError: # self.pool is None | ||
| 232 | raise ClosedPoolError(self, "Pool is closed.") | ||
| 233 | |||
| 234 | except queue.Empty: | ||
| 235 | if self.block: | ||
| 236 | raise EmptyPoolError(self, | ||
| 237 | "Pool reached maximum size and no more " | ||
| 238 | "connections are allowed.") | ||
| 239 | pass # Oh well, we'll create a new connection then | ||
| 240 | |||
| 241 | # If this is a persistent connection, check if it got disconnected | ||
| 242 | if conn and is_connection_dropped(conn): | ||
| 243 | log.debug("Resetting dropped connection: %s", self.host) | ||
| 244 | conn.close() | ||
| 245 | if getattr(conn, 'auto_open', 1) == 0: | ||
| 246 | # This is a proxied connection that has been mutated by | ||
| 247 | # httplib._tunnel() and cannot be reused (since it would | ||
| 248 | # attempt to bypass the proxy) | ||
| 249 | conn = None | ||
| 250 | |||
| 251 | return conn or self._new_conn() | ||
| 252 | |||
| 253 | def _put_conn(self, conn): | ||
| 254 | """ | ||
| 255 | Put a connection back into the pool. | ||
| 256 | |||
| 257 | :param conn: | ||
| 258 | Connection object for the current host and port as returned by | ||
| 259 | :meth:`._new_conn` or :meth:`._get_conn`. | ||
| 260 | |||
| 261 | If the pool is already full, the connection is closed and discarded | ||
| 262 | because we exceeded maxsize. If connections are discarded frequently, | ||
| 263 | then maxsize should be increased. | ||
| 264 | |||
| 265 | If the pool is closed, then the connection will be closed and discarded. | ||
| 266 | """ | ||
| 267 | try: | ||
| 268 | self.pool.put(conn, block=False) | ||
| 269 | return # Everything is dandy, done. | ||
| 270 | except AttributeError: | ||
| 271 | # self.pool is None. | ||
| 272 | pass | ||
| 273 | except queue.Full: | ||
| 274 | # This should never happen if self.block == True | ||
| 275 | log.warning( | ||
| 276 | "Connection pool is full, discarding connection: %s", | ||
| 277 | self.host) | ||
| 278 | |||
| 279 | # Connection never got put back into the pool, close it. | ||
| 280 | if conn: | ||
| 281 | conn.close() | ||
| 282 | |||
| 283 | def _validate_conn(self, conn): | ||
| 284 | """ | ||
| 285 | Called right before a request is made, after the socket is created. | ||
| 286 | """ | ||
| 287 | pass | ||
| 288 | |||
| 289 | def _prepare_proxy(self, conn): | ||
| 290 | # Nothing to do for HTTP connections. | ||
| 291 | pass | ||
| 292 | |||
| 293 | def _get_timeout(self, timeout): | ||
| 294 | """ Helper that always returns a :class:`urllib3.util.Timeout` """ | ||
| 295 | if timeout is _Default: | ||
| 296 | return self.timeout.clone() | ||
| 297 | |||
| 298 | if isinstance(timeout, Timeout): | ||
| 299 | return timeout.clone() | ||
| 300 | else: | ||
| 301 | # User passed us an int/float. This is for backwards compatibility, | ||
| 302 | # can be removed later | ||
| 303 | return Timeout.from_float(timeout) | ||
| 304 | |||
| 305 | def _raise_timeout(self, err, url, timeout_value): | ||
| 306 | """Is the error actually a timeout? Will raise a ReadTimeout or pass""" | ||
| 307 | |||
| 308 | if isinstance(err, SocketTimeout): | ||
| 309 | raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) | ||
| 310 | |||
| 311 | # See the above comment about EAGAIN in Python 3. In Python 2 we have | ||
| 312 | # to specifically catch it and throw the timeout error | ||
| 313 | if hasattr(err, 'errno') and err.errno in _blocking_errnos: | ||
| 314 | raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) | ||
| 315 | |||
| 316 | # Catch possible read timeouts thrown as SSL errors. If not the | ||
| 317 | # case, rethrow the original. We need to do this because of: | ||
| 318 | # http://bugs.python.org/issue10272 | ||
| 319 | if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 | ||
| 320 | raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) | ||
| 321 | |||
| 322 | def _make_request(self, conn, method, url, timeout=_Default, chunked=False, | ||
| 323 | **httplib_request_kw): | ||
| 324 | """ | ||
| 325 | Perform a request on a given urllib connection object taken from our | ||
| 326 | pool. | ||
| 327 | |||
| 328 | :param conn: | ||
| 329 | a connection from one of our connection pools | ||
| 330 | |||
| 331 | :param timeout: | ||
| 332 | Socket timeout in seconds for the request. This can be a | ||
| 333 | float or integer, which will set the same timeout value for | ||
| 334 | the socket connect and the socket read, or an instance of | ||
| 335 | :class:`urllib3.util.Timeout`, which gives you more fine-grained | ||
| 336 | control over your timeouts. | ||
| 337 | """ | ||
| 338 | self.num_requests += 1 | ||
| 339 | |||
| 340 | timeout_obj = self._get_timeout(timeout) | ||
| 341 | timeout_obj.start_connect() | ||
| 342 | conn.timeout = timeout_obj.connect_timeout | ||
| 343 | |||
| 344 | # Trigger any extra validation we need to do. | ||
| 345 | try: | ||
| 346 | self._validate_conn(conn) | ||
| 347 | except (SocketTimeout, BaseSSLError) as e: | ||
| 348 | # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. | ||
| 349 | self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) | ||
| 350 | raise | ||
| 351 | |||
| 352 | # conn.request() calls httplib.*.request, not the method in | ||
| 353 | # urllib3.request. It also calls makefile (recv) on the socket. | ||
| 354 | if chunked: | ||
| 355 | conn.request_chunked(method, url, **httplib_request_kw) | ||
| 356 | else: | ||
| 357 | conn.request(method, url, **httplib_request_kw) | ||
| 358 | |||
| 359 | # Reset the timeout for the recv() on the socket | ||
| 360 | read_timeout = timeout_obj.read_timeout | ||
| 361 | |||
| 362 | # App Engine doesn't have a sock attr | ||
| 363 | if getattr(conn, 'sock', None): | ||
| 364 | # In Python 3 socket.py will catch EAGAIN and return None when you | ||
| 365 | # try and read into the file pointer created by http.client, which | ||
| 366 | # instead raises a BadStatusLine exception. Instead of catching | ||
| 367 | # the exception and assuming all BadStatusLine exceptions are read | ||
| 368 | # timeouts, check for a zero timeout before making the request. | ||
| 369 | if read_timeout == 0: | ||
| 370 | raise ReadTimeoutError( | ||
| 371 | self, url, "Read timed out. (read timeout=%s)" % read_timeout) | ||
| 372 | if read_timeout is Timeout.DEFAULT_TIMEOUT: | ||
| 373 | conn.sock.settimeout(socket.getdefaulttimeout()) | ||
| 374 | else: # None or a value | ||
| 375 | conn.sock.settimeout(read_timeout) | ||
| 376 | |||
| 377 | # Receive the response from the server | ||
| 378 | try: | ||
| 379 | try: # Python 2.7, use buffering of HTTP responses | ||
| 380 | httplib_response = conn.getresponse(buffering=True) | ||
| 381 | except TypeError: # Python 2.6 and older, Python 3 | ||
| 382 | try: | ||
| 383 | httplib_response = conn.getresponse() | ||
| 384 | except Exception as e: | ||
| 385 | # Remove the TypeError from the exception chain in Python 3; | ||
| 386 | # otherwise it looks like a programming error was the cause. | ||
| 387 | six.raise_from(e, None) | ||
| 388 | except (SocketTimeout, BaseSSLError, SocketError) as e: | ||
| 389 | self._raise_timeout(err=e, url=url, timeout_value=read_timeout) | ||
| 390 | raise | ||
| 391 | |||
| 392 | # AppEngine doesn't have a version attr. | ||
| 393 | http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') | ||
| 394 | log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, | ||
| 395 | method, url, http_version, httplib_response.status, | ||
| 396 | httplib_response.length) | ||
| 397 | |||
| 398 | try: | ||
| 399 | assert_header_parsing(httplib_response.msg) | ||
| 400 | except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 | ||
| 401 | log.warning( | ||
| 402 | 'Failed to parse headers (url=%s): %s', | ||
| 403 | self._absolute_url(url), hpe, exc_info=True) | ||
| 404 | |||
| 405 | return httplib_response | ||
| 406 | |||
| 407 | def _absolute_url(self, path): | ||
| 408 | return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url | ||
| 409 | |||
| 410 | def close(self): | ||
| 411 | """ | ||
| 412 | Close all pooled connections and disable the pool. | ||
| 413 | """ | ||
| 414 | # Disable access to the pool | ||
| 415 | old_pool, self.pool = self.pool, None | ||
| 416 | |||
| 417 | try: | ||
| 418 | while True: | ||
| 419 | conn = old_pool.get(block=False) | ||
| 420 | if conn: | ||
| 421 | conn.close() | ||
| 422 | |||
| 423 | except queue.Empty: | ||
| 424 | pass # Done. | ||
| 425 | |||
| 426 | def is_same_host(self, url): | ||
| 427 | """ | ||
| 428 | Check if the given ``url`` is a member of the same host as this | ||
| 429 | connection pool. | ||
| 430 | """ | ||
| 431 | if url.startswith('/'): | ||
| 432 | return True | ||
| 433 | |||
| 434 | # TODO: Add optional support for socket.gethostbyname checking. | ||
| 435 | scheme, host, port = get_host(url) | ||
| 436 | |||
| 437 | host = _ipv6_host(host).lower() | ||
| 438 | |||
| 439 | # Use explicit default port for comparison when none is given | ||
| 440 | if self.port and not port: | ||
| 441 | port = port_by_scheme.get(scheme) | ||
| 442 | elif not self.port and port == port_by_scheme.get(scheme): | ||
| 443 | port = None | ||
| 444 | |||
| 445 | return (scheme, host, port) == (self.scheme, self.host, self.port) | ||
| 446 | |||
| 447 | def urlopen(self, method, url, body=None, headers=None, retries=None, | ||
| 448 | redirect=True, assert_same_host=True, timeout=_Default, | ||
| 449 | pool_timeout=None, release_conn=None, chunked=False, | ||
| 450 | body_pos=None, **response_kw): | ||
| 451 | """ | ||
| 452 | Get a connection from the pool and perform an HTTP request. This is the | ||
| 453 | lowest level call for making a request, so you'll need to specify all | ||
| 454 | the raw details. | ||
| 455 | |||
| 456 | .. note:: | ||
| 457 | |||
| 458 | More commonly, it's appropriate to use a convenience method provided | ||
| 459 | by :class:`.RequestMethods`, such as :meth:`request`. | ||
| 460 | |||
| 461 | .. note:: | ||
| 462 | |||
| 463 | `release_conn` will only behave as expected if | ||
| 464 | `preload_content=False` because we want to make | ||
| 465 | `preload_content=False` the default behaviour someday soon without | ||
| 466 | breaking backwards compatibility. | ||
| 467 | |||
| 468 | :param method: | ||
| 469 | HTTP request method (such as GET, POST, PUT, etc.) | ||
| 470 | |||
| 471 | :param body: | ||
| 472 | Data to send in the request body (useful for creating | ||
| 473 | POST requests, see HTTPConnectionPool.post_url for | ||
| 474 | more convenience). | ||
| 475 | |||
| 476 | :param headers: | ||
| 477 | Dictionary of custom headers to send, such as User-Agent, | ||
| 478 | If-None-Match, etc. If None, pool headers are used. If provided, | ||
| 479 | these headers completely replace any pool-specific headers. | ||
| 480 | |||
| 481 | :param retries: | ||
| 482 | Configure the number of retries to allow before raising a | ||
| 483 | :class:`~urllib3.exceptions.MaxRetryError` exception. | ||
| 484 | |||
| 485 | Pass ``None`` to retry until you receive a response. Pass a | ||
| 486 | :class:`~urllib3.util.retry.Retry` object for fine-grained control | ||
| 487 | over different types of retries. | ||
| 488 | Pass an integer number to retry connection errors that many times, | ||
| 489 | but no other types of errors. Pass zero to never retry. | ||
| 490 | |||
| 491 | If ``False``, then retries are disabled and any exception is raised | ||
| 492 | immediately. Also, instead of raising a MaxRetryError on redirects, | ||
| 493 | the redirect response will be returned. | ||
| 494 | |||
| 495 | :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. | ||
| 496 | |||
| 497 | :param redirect: | ||
| 498 | If True, automatically handle redirects (status codes 301, 302, | ||
| 499 | 303, 307, 308). Each redirect counts as a retry. Disabling retries | ||
| 500 | will disable redirect, too. | ||
| 501 | |||
| 502 | :param assert_same_host: | ||
| 503 | If ``True``, will make sure that the host of the pool requests is | ||
| 504 | consistent else will raise HostChangedError. When False, you can | ||
| 505 | use the pool on an HTTP proxy and request foreign hosts. | ||
| 506 | |||
| 507 | :param timeout: | ||
| 508 | If specified, overrides the default timeout for this one | ||
| 509 | request. It may be a float (in seconds) or an instance of | ||
| 510 | :class:`urllib3.util.Timeout`. | ||
| 511 | |||
| 512 | :param pool_timeout: | ||
| 513 | If set and the pool is set to block=True, then this method will | ||
| 514 | block for ``pool_timeout`` seconds and raise EmptyPoolError if no | ||
| 515 | connection is available within the time period. | ||
| 516 | |||
| 517 | :param release_conn: | ||
| 518 | If False, then the urlopen call will not release the connection | ||
| 519 | back into the pool once a response is received (but will release if | ||
| 520 | you read the entire contents of the response such as when | ||
| 521 | `preload_content=True`). This is useful if you're not preloading | ||
| 522 | the response's content immediately. You will need to call | ||
| 523 | ``r.release_conn()`` on the response ``r`` to return the connection | ||
| 524 | back into the pool. If None, it takes the value of | ||
| 525 | ``response_kw.get('preload_content', True)``. | ||
| 526 | |||
| 527 | :param chunked: | ||
| 528 | If True, urllib3 will send the body using chunked transfer | ||
| 529 | encoding. Otherwise, urllib3 will send the body using the standard | ||
| 530 | content-length form. Defaults to False. | ||
| 531 | |||
| 532 | :param int body_pos: | ||
| 533 | Position to seek to in file-like body in the event of a retry or | ||
| 534 | redirect. Typically this won't need to be set because urllib3 will | ||
| 535 | auto-populate the value when needed. | ||
| 536 | |||
| 537 | :param \\**response_kw: | ||
| 538 | Additional parameters are passed to | ||
| 539 | :meth:`urllib3.response.HTTPResponse.from_httplib` | ||
| 540 | """ | ||
| 541 | if headers is None: | ||
| 542 | headers = self.headers | ||
| 543 | |||
| 544 | if not isinstance(retries, Retry): | ||
| 545 | retries = Retry.from_int(retries, redirect=redirect, default=self.retries) | ||
| 546 | |||
| 547 | if release_conn is None: | ||
| 548 | release_conn = response_kw.get('preload_content', True) | ||
| 549 | |||
| 550 | # Check host | ||
| 551 | if assert_same_host and not self.is_same_host(url): | ||
| 552 | raise HostChangedError(self, url, retries) | ||
| 553 | |||
| 554 | conn = None | ||
| 555 | |||
| 556 | # Track whether `conn` needs to be released before | ||
| 557 | # returning/raising/recursing. Update this variable if necessary, and | ||
| 558 | # leave `release_conn` constant throughout the function. That way, if | ||
| 559 | # the function recurses, the original value of `release_conn` will be | ||
| 560 | # passed down into the recursive call, and its value will be respected. | ||
| 561 | # | ||
| 562 | # See issue #651 [1] for details. | ||
| 563 | # | ||
| 564 | # [1] <https://github.com/shazow/urllib3/issues/651> | ||
| 565 | release_this_conn = release_conn | ||
| 566 | |||
| 567 | # Merge the proxy headers. Only do this in HTTP. We have to copy the | ||
| 568 | # headers dict so we can safely change it without those changes being | ||
| 569 | # reflected in anyone else's copy. | ||
| 570 | if self.scheme == 'http': | ||
| 571 | headers = headers.copy() | ||
| 572 | headers.update(self.proxy_headers) | ||
| 573 | |||
| 574 | # Must keep the exception bound to a separate variable or else Python 3 | ||
| 575 | # complains about UnboundLocalError. | ||
| 576 | err = None | ||
| 577 | |||
| 578 | # Keep track of whether we cleanly exited the except block. This | ||
| 579 | # ensures we do proper cleanup in finally. | ||
| 580 | clean_exit = False | ||
| 581 | |||
| 582 | # Rewind body position, if needed. Record current position | ||
| 583 | # for future rewinds in the event of a redirect/retry. | ||
| 584 | body_pos = set_file_position(body, body_pos) | ||
| 585 | |||
| 586 | try: | ||
| 587 | # Request a connection from the queue. | ||
| 588 | timeout_obj = self._get_timeout(timeout) | ||
| 589 | conn = self._get_conn(timeout=pool_timeout) | ||
| 590 | |||
| 591 | conn.timeout = timeout_obj.connect_timeout | ||
| 592 | |||
| 593 | is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) | ||
| 594 | if is_new_proxy_conn: | ||
| 595 | self._prepare_proxy(conn) | ||
| 596 | |||
| 597 | # Make the request on the httplib connection object. | ||
| 598 | httplib_response = self._make_request(conn, method, url, | ||
| 599 | timeout=timeout_obj, | ||
| 600 | body=body, headers=headers, | ||
| 601 | chunked=chunked) | ||
| 602 | |||
| 603 | # If we're going to release the connection in ``finally:``, then | ||
| 604 | # the response doesn't need to know about the connection. Otherwise | ||
| 605 | # it will also try to release it and we'll have a double-release | ||
| 606 | # mess. | ||
| 607 | response_conn = conn if not release_conn else None | ||
| 608 | |||
| 609 | # Pass method to Response for length checking | ||
| 610 | response_kw['request_method'] = method | ||
| 611 | |||
| 612 | # Import httplib's response into our own wrapper object | ||
| 613 | response = self.ResponseCls.from_httplib(httplib_response, | ||
| 614 | pool=self, | ||
| 615 | connection=response_conn, | ||
| 616 | retries=retries, | ||
| 617 | **response_kw) | ||
| 618 | |||
| 619 | # Everything went great! | ||
| 620 | clean_exit = True | ||
| 621 | |||
| 622 | except queue.Empty: | ||
| 623 | # Timed out by queue. | ||
| 624 | raise EmptyPoolError(self, "No pool connections are available.") | ||
| 625 | |||
| 626 | except (TimeoutError, HTTPException, SocketError, ProtocolError, | ||
| 627 | BaseSSLError, SSLError, CertificateError) as e: | ||
| 628 | # Discard the connection for these exceptions. It will be | ||
| 629 | # replaced during the next _get_conn() call. | ||
| 630 | clean_exit = False | ||
| 631 | if isinstance(e, (BaseSSLError, CertificateError)): | ||
| 632 | e = SSLError(e) | ||
| 633 | elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: | ||
| 634 | e = ProxyError('Cannot connect to proxy.', e) | ||
| 635 | elif isinstance(e, (SocketError, HTTPException)): | ||
| 636 | e = ProtocolError('Connection aborted.', e) | ||
| 637 | |||
| 638 | retries = retries.increment(method, url, error=e, _pool=self, | ||
| 639 | _stacktrace=sys.exc_info()[2]) | ||
| 640 | retries.sleep() | ||
| 641 | |||
| 642 | # Keep track of the error for the retry warning. | ||
| 643 | err = e | ||
| 644 | |||
| 645 | finally: | ||
| 646 | if not clean_exit: | ||
| 647 | # We hit some kind of exception, handled or otherwise. We need | ||
| 648 | # to throw the connection away unless explicitly told not to. | ||
| 649 | # Close the connection, set the variable to None, and make sure | ||
| 650 | # we put the None back in the pool to avoid leaking it. | ||
| 651 | conn = conn and conn.close() | ||
| 652 | release_this_conn = True | ||
| 653 | |||
| 654 | if release_this_conn: | ||
| 655 | # Put the connection back to be reused. If the connection is | ||
| 656 | # expired then it will be None, which will get replaced with a | ||
| 657 | # fresh connection during _get_conn. | ||
| 658 | self._put_conn(conn) | ||
| 659 | |||
| 660 | if not conn: | ||
| 661 | # Try again | ||
| 662 | log.warning("Retrying (%r) after connection " | ||
| 663 | "broken by '%r': %s", retries, err, url) | ||
| 664 | return self.urlopen(method, url, body, headers, retries, | ||
| 665 | redirect, assert_same_host, | ||
| 666 | timeout=timeout, pool_timeout=pool_timeout, | ||
| 667 | release_conn=release_conn, body_pos=body_pos, | ||
| 668 | **response_kw) | ||
| 669 | |||
| 670 | def drain_and_release_conn(response): | ||
| 671 | try: | ||
| 672 | # discard any remaining response body, the connection will be | ||
| 673 | # released back to the pool once the entire response is read | ||
| 674 | response.read() | ||
| 675 | except (TimeoutError, HTTPException, SocketError, ProtocolError, | ||
| 676 | BaseSSLError, SSLError) as e: | ||
| 677 | pass | ||
| 678 | |||
| 679 | # Handle redirect? | ||
| 680 | redirect_location = redirect and response.get_redirect_location() | ||
| 681 | if redirect_location: | ||
| 682 | if response.status == 303: | ||
| 683 | method = 'GET' | ||
| 684 | |||
| 685 | try: | ||
| 686 | retries = retries.increment(method, url, response=response, _pool=self) | ||
| 687 | except MaxRetryError: | ||
| 688 | if retries.raise_on_redirect: | ||
| 689 | # Drain and release the connection for this response, since | ||
| 690 | # we're not returning it to be released manually. | ||
| 691 | drain_and_release_conn(response) | ||
| 692 | raise | ||
| 693 | return response | ||
| 694 | |||
| 695 | # drain and return the connection to the pool before recursing | ||
| 696 | drain_and_release_conn(response) | ||
| 697 | |||
| 698 | retries.sleep_for_retry(response) | ||
| 699 | log.debug("Redirecting %s -> %s", url, redirect_location) | ||
| 700 | return self.urlopen( | ||
| 701 | method, redirect_location, body, headers, | ||
| 702 | retries=retries, redirect=redirect, | ||
| 703 | assert_same_host=assert_same_host, | ||
| 704 | timeout=timeout, pool_timeout=pool_timeout, | ||
| 705 | release_conn=release_conn, body_pos=body_pos, | ||
| 706 | **response_kw) | ||
| 707 | |||
| 708 | # Check if we should retry the HTTP response. | ||
| 709 | has_retry_after = bool(response.getheader('Retry-After')) | ||
| 710 | if retries.is_retry(method, response.status, has_retry_after): | ||
| 711 | try: | ||
| 712 | retries = retries.increment(method, url, response=response, _pool=self) | ||
| 713 | except MaxRetryError: | ||
| 714 | if retries.raise_on_status: | ||
| 715 | # Drain and release the connection for this response, since | ||
| 716 | # we're not returning it to be released manually. | ||
| 717 | drain_and_release_conn(response) | ||
| 718 | raise | ||
| 719 | return response | ||
| 720 | |||
| 721 | # drain and return the connection to the pool before recursing | ||
| 722 | drain_and_release_conn(response) | ||
| 723 | |||
| 724 | retries.sleep(response) | ||
| 725 | log.debug("Retry: %s", url) | ||
| 726 | return self.urlopen( | ||
| 727 | method, url, body, headers, | ||
| 728 | retries=retries, redirect=redirect, | ||
| 729 | assert_same_host=assert_same_host, | ||
| 730 | timeout=timeout, pool_timeout=pool_timeout, | ||
| 731 | release_conn=release_conn, | ||
| 732 | body_pos=body_pos, **response_kw) | ||
| 733 | |||
| 734 | return response | ||
| 735 | |||
| 736 | |||
| 737 | class HTTPSConnectionPool(HTTPConnectionPool): | ||
| 738 | """ | ||
| 739 | Same as :class:`.HTTPConnectionPool`, but HTTPS. | ||
| 740 | |||
| 741 | When Python is compiled with the :mod:`ssl` module, then | ||
| 742 | :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, | ||
| 743 | instead of :class:`.HTTPSConnection`. | ||
| 744 | |||
| 745 | :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, | ||
| 746 | ``assert_hostname`` and ``host`` in this order to verify connections. | ||
| 747 | If ``assert_hostname`` is False, no verification is done. | ||
| 748 | |||
| 749 | The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, | ||
| 750 | ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is | ||
| 751 | available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade | ||
| 752 | the connection socket into an SSL socket. | ||
| 753 | """ | ||
| 754 | |||
| 755 | scheme = 'https' | ||
| 756 | ConnectionCls = HTTPSConnection | ||
| 757 | |||
| 758 | def __init__(self, host, port=None, | ||
| 759 | strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, | ||
| 760 | block=False, headers=None, retries=None, | ||
| 761 | _proxy=None, _proxy_headers=None, | ||
| 762 | key_file=None, cert_file=None, cert_reqs=None, | ||
| 763 | ca_certs=None, ssl_version=None, | ||
| 764 | assert_hostname=None, assert_fingerprint=None, | ||
| 765 | ca_cert_dir=None, **conn_kw): | ||
| 766 | |||
| 767 | HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, | ||
| 768 | block, headers, retries, _proxy, _proxy_headers, | ||
| 769 | **conn_kw) | ||
| 770 | |||
| 771 | if ca_certs and cert_reqs is None: | ||
| 772 | cert_reqs = 'CERT_REQUIRED' | ||
| 773 | |||
| 774 | self.key_file = key_file | ||
| 775 | self.cert_file = cert_file | ||
| 776 | self.cert_reqs = cert_reqs | ||
| 777 | self.ca_certs = ca_certs | ||
| 778 | self.ca_cert_dir = ca_cert_dir | ||
| 779 | self.ssl_version = ssl_version | ||
| 780 | self.assert_hostname = assert_hostname | ||
| 781 | self.assert_fingerprint = assert_fingerprint | ||
| 782 | |||
| 783 | def _prepare_conn(self, conn): | ||
| 784 | """ | ||
| 785 | Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` | ||
| 786 | and establish the tunnel if proxy is used. | ||
| 787 | """ | ||
| 788 | |||
| 789 | if isinstance(conn, VerifiedHTTPSConnection): | ||
| 790 | conn.set_cert(key_file=self.key_file, | ||
| 791 | cert_file=self.cert_file, | ||
| 792 | cert_reqs=self.cert_reqs, | ||
| 793 | ca_certs=self.ca_certs, | ||
| 794 | ca_cert_dir=self.ca_cert_dir, | ||
| 795 | assert_hostname=self.assert_hostname, | ||
| 796 | assert_fingerprint=self.assert_fingerprint) | ||
| 797 | conn.ssl_version = self.ssl_version | ||
| 798 | return conn | ||
| 799 | |||
| 800 | def _prepare_proxy(self, conn): | ||
| 801 | """ | ||
| 802 | Establish tunnel connection early, because otherwise httplib | ||
| 803 | would improperly set Host: header to proxy's IP:port. | ||
| 804 | """ | ||
| 805 | # Python 2.7+ | ||
| 806 | try: | ||
| 807 | set_tunnel = conn.set_tunnel | ||
| 808 | except AttributeError: # Platform-specific: Python 2.6 | ||
| 809 | set_tunnel = conn._set_tunnel | ||
| 810 | |||
| 811 | if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older | ||
| 812 | set_tunnel(self._proxy_host, self.port) | ||
| 813 | else: | ||
| 814 | set_tunnel(self._proxy_host, self.port, self.proxy_headers) | ||
| 815 | |||
| 816 | conn.connect() | ||
| 817 | |||
| 818 | def _new_conn(self): | ||
| 819 | """ | ||
| 820 | Return a fresh :class:`httplib.HTTPSConnection`. | ||
| 821 | """ | ||
| 822 | self.num_connections += 1 | ||
| 823 | log.debug("Starting new HTTPS connection (%d): %s", | ||
| 824 | self.num_connections, self.host) | ||
| 825 | |||
| 826 | if not self.ConnectionCls or self.ConnectionCls is DummyConnection: | ||
| 827 | raise SSLError("Can't connect to HTTPS URL because the SSL " | ||
| 828 | "module is not available.") | ||
| 829 | |||
| 830 | actual_host = self.host | ||
| 831 | actual_port = self.port | ||
| 832 | if self.proxy is not None: | ||
| 833 | actual_host = self.proxy.host | ||
| 834 | actual_port = self.proxy.port | ||
| 835 | |||
| 836 | conn = self.ConnectionCls(host=actual_host, port=actual_port, | ||
| 837 | timeout=self.timeout.connect_timeout, | ||
| 838 | strict=self.strict, **self.conn_kw) | ||
| 839 | |||
| 840 | return self._prepare_conn(conn) | ||
| 841 | |||
| 842 | def _validate_conn(self, conn): | ||
| 843 | """ | ||
| 844 | Called right before a request is made, after the socket is created. | ||
| 845 | """ | ||
| 846 | super(HTTPSConnectionPool, self)._validate_conn(conn) | ||
| 847 | |||
| 848 | # Force connect early to allow us to validate the connection. | ||
| 849 | if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` | ||
| 850 | conn.connect() | ||
| 851 | |||
| 852 | if not conn.is_verified: | ||
| 853 | warnings.warn(( | ||
| 854 | 'Unverified HTTPS request is being made. ' | ||
| 855 | 'Adding certificate verification is strongly advised. See: ' | ||
| 856 | 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' | ||
| 857 | '#ssl-warnings'), | ||
| 858 | InsecureRequestWarning) | ||
| 859 | |||
| 860 | |||
| 861 | def connection_from_url(url, **kw): | ||
| 862 | """ | ||
| 863 | Given a url, return an :class:`.ConnectionPool` instance of its host. | ||
| 864 | |||
| 865 | This is a shortcut for not having to parse out the scheme, host, and port | ||
| 866 | of the url before creating an :class:`.ConnectionPool` instance. | ||
| 867 | |||
| 868 | :param url: | ||
| 869 | Absolute URL string that must include the scheme. Port is optional. | ||
| 870 | |||
| 871 | :param \\**kw: | ||
| 872 | Passes additional parameters to the constructor of the appropriate | ||
| 873 | :class:`.ConnectionPool`. Useful for specifying things like | ||
| 874 | timeout, maxsize, headers, etc. | ||
| 875 | |||
| 876 | Example:: | ||
| 877 | |||
| 878 | >>> conn = connection_from_url('http://google.com/') | ||
| 879 | >>> r = conn.request('GET', '/') | ||
| 880 | """ | ||
| 881 | scheme, host, port = get_host(url) | ||
| 882 | port = port or port_by_scheme.get(scheme, 80) | ||
| 883 | if scheme == 'https': | ||
| 884 | return HTTPSConnectionPool(host, port=port, **kw) | ||
| 885 | else: | ||
| 886 | return HTTPConnectionPool(host, port=port, **kw) | ||
| 887 | |||
| 888 | |||
| 889 | def _ipv6_host(host): | ||
| 890 | """ | ||
| 891 | Process IPv6 address literals | ||
| 892 | """ | ||
| 893 | |||
| 894 | # httplib doesn't like it when we include brackets in IPv6 addresses | ||
| 895 | # Specifically, if we include brackets but also pass the port then | ||
| 896 | # httplib crazily doubles up the square brackets on the Host header. | ||
| 897 | # Instead, we need to make sure we never pass ``None`` as the port. | ||
| 898 | # However, for backward compatibility reasons we can't actually | ||
| 899 | # *assert* that. See http://bugs.python.org/issue28539 | ||
| 900 | # | ||
| 901 | # Also if an IPv6 address literal has a zone identifier, the | ||
| 902 | # percent sign might be URIencoded, convert it back into ASCII | ||
| 903 | if host.startswith('[') and host.endswith(']'): | ||
| 904 | host = host.replace('%25', '%').strip('[]') | ||
| 905 | return host | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py new file mode 100644 index 0000000..9787b02 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py | |||
| @@ -0,0 +1,593 @@ | |||
| 1 | """ | ||
| 2 | This module uses ctypes to bind a whole bunch of functions and constants from | ||
| 3 | SecureTransport. The goal here is to provide the low-level API to | ||
| 4 | SecureTransport. These are essentially the C-level functions and constants, and | ||
| 5 | they're pretty gross to work with. | ||
| 6 | |||
| 7 | This code is a bastardised version of the code found in Will Bond's oscrypto | ||
| 8 | library. An enormous debt is owed to him for blazing this trail for us. For | ||
| 9 | that reason, this code should be considered to be covered both by urllib3's | ||
| 10 | license and by oscrypto's: | ||
| 11 | |||
| 12 | Copyright (c) 2015-2016 Will Bond <will@wbond.net> | ||
| 13 | |||
| 14 | Permission is hereby granted, free of charge, to any person obtaining a | ||
| 15 | copy of this software and associated documentation files (the "Software"), | ||
| 16 | to deal in the Software without restriction, including without limitation | ||
| 17 | the rights to use, copy, modify, merge, publish, distribute, sublicense, | ||
| 18 | and/or sell copies of the Software, and to permit persons to whom the | ||
| 19 | Software is furnished to do so, subject to the following conditions: | ||
| 20 | |||
| 21 | The above copyright notice and this permission notice shall be included in | ||
| 22 | all copies or substantial portions of the Software. | ||
| 23 | |||
| 24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 25 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 26 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
| 27 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
| 28 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | ||
| 29 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | ||
| 30 | DEALINGS IN THE SOFTWARE. | ||
| 31 | """ | ||
| 32 | from __future__ import absolute_import | ||
| 33 | |||
| 34 | import platform | ||
| 35 | from ctypes.util import find_library | ||
| 36 | from ctypes import ( | ||
| 37 | c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, | ||
| 38 | c_bool | ||
| 39 | ) | ||
| 40 | from ctypes import CDLL, POINTER, CFUNCTYPE | ||
| 41 | |||
| 42 | |||
| 43 | security_path = find_library('Security') | ||
| 44 | if not security_path: | ||
| 45 | raise ImportError('The library Security could not be found') | ||
| 46 | |||
| 47 | |||
| 48 | core_foundation_path = find_library('CoreFoundation') | ||
| 49 | if not core_foundation_path: | ||
| 50 | raise ImportError('The library CoreFoundation could not be found') | ||
| 51 | |||
| 52 | |||
| 53 | version = platform.mac_ver()[0] | ||
| 54 | version_info = tuple(map(int, version.split('.'))) | ||
| 55 | if version_info < (10, 8): | ||
| 56 | raise OSError( | ||
| 57 | 'Only OS X 10.8 and newer are supported, not %s.%s' % ( | ||
| 58 | version_info[0], version_info[1] | ||
| 59 | ) | ||
| 60 | ) | ||
| 61 | |||
| 62 | Security = CDLL(security_path, use_errno=True) | ||
| 63 | CoreFoundation = CDLL(core_foundation_path, use_errno=True) | ||
| 64 | |||
| 65 | Boolean = c_bool | ||
| 66 | CFIndex = c_long | ||
| 67 | CFStringEncoding = c_uint32 | ||
| 68 | CFData = c_void_p | ||
| 69 | CFString = c_void_p | ||
| 70 | CFArray = c_void_p | ||
| 71 | CFMutableArray = c_void_p | ||
| 72 | CFDictionary = c_void_p | ||
| 73 | CFError = c_void_p | ||
| 74 | CFType = c_void_p | ||
| 75 | CFTypeID = c_ulong | ||
| 76 | |||
| 77 | CFTypeRef = POINTER(CFType) | ||
| 78 | CFAllocatorRef = c_void_p | ||
| 79 | |||
| 80 | OSStatus = c_int32 | ||
| 81 | |||
| 82 | CFDataRef = POINTER(CFData) | ||
| 83 | CFStringRef = POINTER(CFString) | ||
| 84 | CFArrayRef = POINTER(CFArray) | ||
| 85 | CFMutableArrayRef = POINTER(CFMutableArray) | ||
| 86 | CFDictionaryRef = POINTER(CFDictionary) | ||
| 87 | CFArrayCallBacks = c_void_p | ||
| 88 | CFDictionaryKeyCallBacks = c_void_p | ||
| 89 | CFDictionaryValueCallBacks = c_void_p | ||
| 90 | |||
| 91 | SecCertificateRef = POINTER(c_void_p) | ||
| 92 | SecExternalFormat = c_uint32 | ||
| 93 | SecExternalItemType = c_uint32 | ||
| 94 | SecIdentityRef = POINTER(c_void_p) | ||
| 95 | SecItemImportExportFlags = c_uint32 | ||
| 96 | SecItemImportExportKeyParameters = c_void_p | ||
| 97 | SecKeychainRef = POINTER(c_void_p) | ||
| 98 | SSLProtocol = c_uint32 | ||
| 99 | SSLCipherSuite = c_uint32 | ||
| 100 | SSLContextRef = POINTER(c_void_p) | ||
| 101 | SecTrustRef = POINTER(c_void_p) | ||
| 102 | SSLConnectionRef = c_uint32 | ||
| 103 | SecTrustResultType = c_uint32 | ||
| 104 | SecTrustOptionFlags = c_uint32 | ||
| 105 | SSLProtocolSide = c_uint32 | ||
| 106 | SSLConnectionType = c_uint32 | ||
| 107 | SSLSessionOption = c_uint32 | ||
| 108 | |||
| 109 | |||
| 110 | try: | ||
| 111 | Security.SecItemImport.argtypes = [ | ||
| 112 | CFDataRef, | ||
| 113 | CFStringRef, | ||
| 114 | POINTER(SecExternalFormat), | ||
| 115 | POINTER(SecExternalItemType), | ||
| 116 | SecItemImportExportFlags, | ||
| 117 | POINTER(SecItemImportExportKeyParameters), | ||
| 118 | SecKeychainRef, | ||
| 119 | POINTER(CFArrayRef), | ||
| 120 | ] | ||
| 121 | Security.SecItemImport.restype = OSStatus | ||
| 122 | |||
| 123 | Security.SecCertificateGetTypeID.argtypes = [] | ||
| 124 | Security.SecCertificateGetTypeID.restype = CFTypeID | ||
| 125 | |||
| 126 | Security.SecIdentityGetTypeID.argtypes = [] | ||
| 127 | Security.SecIdentityGetTypeID.restype = CFTypeID | ||
| 128 | |||
| 129 | Security.SecKeyGetTypeID.argtypes = [] | ||
| 130 | Security.SecKeyGetTypeID.restype = CFTypeID | ||
| 131 | |||
| 132 | Security.SecCertificateCreateWithData.argtypes = [ | ||
| 133 | CFAllocatorRef, | ||
| 134 | CFDataRef | ||
| 135 | ] | ||
| 136 | Security.SecCertificateCreateWithData.restype = SecCertificateRef | ||
| 137 | |||
| 138 | Security.SecCertificateCopyData.argtypes = [ | ||
| 139 | SecCertificateRef | ||
| 140 | ] | ||
| 141 | Security.SecCertificateCopyData.restype = CFDataRef | ||
| 142 | |||
| 143 | Security.SecCopyErrorMessageString.argtypes = [ | ||
| 144 | OSStatus, | ||
| 145 | c_void_p | ||
| 146 | ] | ||
| 147 | Security.SecCopyErrorMessageString.restype = CFStringRef | ||
| 148 | |||
| 149 | Security.SecIdentityCreateWithCertificate.argtypes = [ | ||
| 150 | CFTypeRef, | ||
| 151 | SecCertificateRef, | ||
| 152 | POINTER(SecIdentityRef) | ||
| 153 | ] | ||
| 154 | Security.SecIdentityCreateWithCertificate.restype = OSStatus | ||
| 155 | |||
| 156 | Security.SecKeychainCreate.argtypes = [ | ||
| 157 | c_char_p, | ||
| 158 | c_uint32, | ||
| 159 | c_void_p, | ||
| 160 | Boolean, | ||
| 161 | c_void_p, | ||
| 162 | POINTER(SecKeychainRef) | ||
| 163 | ] | ||
| 164 | Security.SecKeychainCreate.restype = OSStatus | ||
| 165 | |||
| 166 | Security.SecKeychainDelete.argtypes = [ | ||
| 167 | SecKeychainRef | ||
| 168 | ] | ||
| 169 | Security.SecKeychainDelete.restype = OSStatus | ||
| 170 | |||
| 171 | Security.SecPKCS12Import.argtypes = [ | ||
| 172 | CFDataRef, | ||
| 173 | CFDictionaryRef, | ||
| 174 | POINTER(CFArrayRef) | ||
| 175 | ] | ||
| 176 | Security.SecPKCS12Import.restype = OSStatus | ||
| 177 | |||
| 178 | SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) | ||
| 179 | SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) | ||
| 180 | |||
| 181 | Security.SSLSetIOFuncs.argtypes = [ | ||
| 182 | SSLContextRef, | ||
| 183 | SSLReadFunc, | ||
| 184 | SSLWriteFunc | ||
| 185 | ] | ||
| 186 | Security.SSLSetIOFuncs.restype = OSStatus | ||
| 187 | |||
| 188 | Security.SSLSetPeerID.argtypes = [ | ||
| 189 | SSLContextRef, | ||
| 190 | c_char_p, | ||
| 191 | c_size_t | ||
| 192 | ] | ||
| 193 | Security.SSLSetPeerID.restype = OSStatus | ||
| 194 | |||
| 195 | Security.SSLSetCertificate.argtypes = [ | ||
| 196 | SSLContextRef, | ||
| 197 | CFArrayRef | ||
| 198 | ] | ||
| 199 | Security.SSLSetCertificate.restype = OSStatus | ||
| 200 | |||
| 201 | Security.SSLSetCertificateAuthorities.argtypes = [ | ||
| 202 | SSLContextRef, | ||
| 203 | CFTypeRef, | ||
| 204 | Boolean | ||
| 205 | ] | ||
| 206 | Security.SSLSetCertificateAuthorities.restype = OSStatus | ||
| 207 | |||
| 208 | Security.SSLSetConnection.argtypes = [ | ||
| 209 | SSLContextRef, | ||
| 210 | SSLConnectionRef | ||
| 211 | ] | ||
| 212 | Security.SSLSetConnection.restype = OSStatus | ||
| 213 | |||
| 214 | Security.SSLSetPeerDomainName.argtypes = [ | ||
| 215 | SSLContextRef, | ||
| 216 | c_char_p, | ||
| 217 | c_size_t | ||
| 218 | ] | ||
| 219 | Security.SSLSetPeerDomainName.restype = OSStatus | ||
| 220 | |||
| 221 | Security.SSLHandshake.argtypes = [ | ||
| 222 | SSLContextRef | ||
| 223 | ] | ||
| 224 | Security.SSLHandshake.restype = OSStatus | ||
| 225 | |||
| 226 | Security.SSLRead.argtypes = [ | ||
| 227 | SSLContextRef, | ||
| 228 | c_char_p, | ||
| 229 | c_size_t, | ||
| 230 | POINTER(c_size_t) | ||
| 231 | ] | ||
| 232 | Security.SSLRead.restype = OSStatus | ||
| 233 | |||
| 234 | Security.SSLWrite.argtypes = [ | ||
| 235 | SSLContextRef, | ||
| 236 | c_char_p, | ||
| 237 | c_size_t, | ||
| 238 | POINTER(c_size_t) | ||
| 239 | ] | ||
| 240 | Security.SSLWrite.restype = OSStatus | ||
| 241 | |||
| 242 | Security.SSLClose.argtypes = [ | ||
| 243 | SSLContextRef | ||
| 244 | ] | ||
| 245 | Security.SSLClose.restype = OSStatus | ||
| 246 | |||
| 247 | Security.SSLGetNumberSupportedCiphers.argtypes = [ | ||
| 248 | SSLContextRef, | ||
| 249 | POINTER(c_size_t) | ||
| 250 | ] | ||
| 251 | Security.SSLGetNumberSupportedCiphers.restype = OSStatus | ||
| 252 | |||
| 253 | Security.SSLGetSupportedCiphers.argtypes = [ | ||
| 254 | SSLContextRef, | ||
| 255 | POINTER(SSLCipherSuite), | ||
| 256 | POINTER(c_size_t) | ||
| 257 | ] | ||
| 258 | Security.SSLGetSupportedCiphers.restype = OSStatus | ||
| 259 | |||
| 260 | Security.SSLSetEnabledCiphers.argtypes = [ | ||
| 261 | SSLContextRef, | ||
| 262 | POINTER(SSLCipherSuite), | ||
| 263 | c_size_t | ||
| 264 | ] | ||
| 265 | Security.SSLSetEnabledCiphers.restype = OSStatus | ||
| 266 | |||
| 267 | Security.SSLGetNumberEnabledCiphers.argtype = [ | ||
| 268 | SSLContextRef, | ||
| 269 | POINTER(c_size_t) | ||
| 270 | ] | ||
| 271 | Security.SSLGetNumberEnabledCiphers.restype = OSStatus | ||
| 272 | |||
| 273 | Security.SSLGetEnabledCiphers.argtypes = [ | ||
| 274 | SSLContextRef, | ||
| 275 | POINTER(SSLCipherSuite), | ||
| 276 | POINTER(c_size_t) | ||
| 277 | ] | ||
| 278 | Security.SSLGetEnabledCiphers.restype = OSStatus | ||
| 279 | |||
| 280 | Security.SSLGetNegotiatedCipher.argtypes = [ | ||
| 281 | SSLContextRef, | ||
| 282 | POINTER(SSLCipherSuite) | ||
| 283 | ] | ||
| 284 | Security.SSLGetNegotiatedCipher.restype = OSStatus | ||
| 285 | |||
| 286 | Security.SSLGetNegotiatedProtocolVersion.argtypes = [ | ||
| 287 | SSLContextRef, | ||
| 288 | POINTER(SSLProtocol) | ||
| 289 | ] | ||
| 290 | Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus | ||
| 291 | |||
| 292 | Security.SSLCopyPeerTrust.argtypes = [ | ||
| 293 | SSLContextRef, | ||
| 294 | POINTER(SecTrustRef) | ||
| 295 | ] | ||
| 296 | Security.SSLCopyPeerTrust.restype = OSStatus | ||
| 297 | |||
| 298 | Security.SecTrustSetAnchorCertificates.argtypes = [ | ||
| 299 | SecTrustRef, | ||
| 300 | CFArrayRef | ||
| 301 | ] | ||
| 302 | Security.SecTrustSetAnchorCertificates.restype = OSStatus | ||
| 303 | |||
| 304 | Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ | ||
| 305 | SecTrustRef, | ||
| 306 | Boolean | ||
| 307 | ] | ||
| 308 | Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus | ||
| 309 | |||
| 310 | Security.SecTrustEvaluate.argtypes = [ | ||
| 311 | SecTrustRef, | ||
| 312 | POINTER(SecTrustResultType) | ||
| 313 | ] | ||
| 314 | Security.SecTrustEvaluate.restype = OSStatus | ||
| 315 | |||
| 316 | Security.SecTrustGetCertificateCount.argtypes = [ | ||
| 317 | SecTrustRef | ||
| 318 | ] | ||
| 319 | Security.SecTrustGetCertificateCount.restype = CFIndex | ||
| 320 | |||
| 321 | Security.SecTrustGetCertificateAtIndex.argtypes = [ | ||
| 322 | SecTrustRef, | ||
| 323 | CFIndex | ||
| 324 | ] | ||
| 325 | Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef | ||
| 326 | |||
| 327 | Security.SSLCreateContext.argtypes = [ | ||
| 328 | CFAllocatorRef, | ||
| 329 | SSLProtocolSide, | ||
| 330 | SSLConnectionType | ||
| 331 | ] | ||
| 332 | Security.SSLCreateContext.restype = SSLContextRef | ||
| 333 | |||
| 334 | Security.SSLSetSessionOption.argtypes = [ | ||
| 335 | SSLContextRef, | ||
| 336 | SSLSessionOption, | ||
| 337 | Boolean | ||
| 338 | ] | ||
| 339 | Security.SSLSetSessionOption.restype = OSStatus | ||
| 340 | |||
| 341 | Security.SSLSetProtocolVersionMin.argtypes = [ | ||
| 342 | SSLContextRef, | ||
| 343 | SSLProtocol | ||
| 344 | ] | ||
| 345 | Security.SSLSetProtocolVersionMin.restype = OSStatus | ||
| 346 | |||
| 347 | Security.SSLSetProtocolVersionMax.argtypes = [ | ||
| 348 | SSLContextRef, | ||
| 349 | SSLProtocol | ||
| 350 | ] | ||
| 351 | Security.SSLSetProtocolVersionMax.restype = OSStatus | ||
| 352 | |||
| 353 | Security.SecCopyErrorMessageString.argtypes = [ | ||
| 354 | OSStatus, | ||
| 355 | c_void_p | ||
| 356 | ] | ||
| 357 | Security.SecCopyErrorMessageString.restype = CFStringRef | ||
| 358 | |||
| 359 | Security.SSLReadFunc = SSLReadFunc | ||
| 360 | Security.SSLWriteFunc = SSLWriteFunc | ||
| 361 | Security.SSLContextRef = SSLContextRef | ||
| 362 | Security.SSLProtocol = SSLProtocol | ||
| 363 | Security.SSLCipherSuite = SSLCipherSuite | ||
| 364 | Security.SecIdentityRef = SecIdentityRef | ||
| 365 | Security.SecKeychainRef = SecKeychainRef | ||
| 366 | Security.SecTrustRef = SecTrustRef | ||
| 367 | Security.SecTrustResultType = SecTrustResultType | ||
| 368 | Security.SecExternalFormat = SecExternalFormat | ||
| 369 | Security.OSStatus = OSStatus | ||
| 370 | |||
| 371 | Security.kSecImportExportPassphrase = CFStringRef.in_dll( | ||
| 372 | Security, 'kSecImportExportPassphrase' | ||
| 373 | ) | ||
| 374 | Security.kSecImportItemIdentity = CFStringRef.in_dll( | ||
| 375 | Security, 'kSecImportItemIdentity' | ||
| 376 | ) | ||
| 377 | |||
| 378 | # CoreFoundation time! | ||
| 379 | CoreFoundation.CFRetain.argtypes = [ | ||
| 380 | CFTypeRef | ||
| 381 | ] | ||
| 382 | CoreFoundation.CFRetain.restype = CFTypeRef | ||
| 383 | |||
| 384 | CoreFoundation.CFRelease.argtypes = [ | ||
| 385 | CFTypeRef | ||
| 386 | ] | ||
| 387 | CoreFoundation.CFRelease.restype = None | ||
| 388 | |||
| 389 | CoreFoundation.CFGetTypeID.argtypes = [ | ||
| 390 | CFTypeRef | ||
| 391 | ] | ||
| 392 | CoreFoundation.CFGetTypeID.restype = CFTypeID | ||
| 393 | |||
| 394 | CoreFoundation.CFStringCreateWithCString.argtypes = [ | ||
| 395 | CFAllocatorRef, | ||
| 396 | c_char_p, | ||
| 397 | CFStringEncoding | ||
| 398 | ] | ||
| 399 | CoreFoundation.CFStringCreateWithCString.restype = CFStringRef | ||
| 400 | |||
| 401 | CoreFoundation.CFStringGetCStringPtr.argtypes = [ | ||
| 402 | CFStringRef, | ||
| 403 | CFStringEncoding | ||
| 404 | ] | ||
| 405 | CoreFoundation.CFStringGetCStringPtr.restype = c_char_p | ||
| 406 | |||
| 407 | CoreFoundation.CFStringGetCString.argtypes = [ | ||
| 408 | CFStringRef, | ||
| 409 | c_char_p, | ||
| 410 | CFIndex, | ||
| 411 | CFStringEncoding | ||
| 412 | ] | ||
| 413 | CoreFoundation.CFStringGetCString.restype = c_bool | ||
| 414 | |||
| 415 | CoreFoundation.CFDataCreate.argtypes = [ | ||
| 416 | CFAllocatorRef, | ||
| 417 | c_char_p, | ||
| 418 | CFIndex | ||
| 419 | ] | ||
| 420 | CoreFoundation.CFDataCreate.restype = CFDataRef | ||
| 421 | |||
| 422 | CoreFoundation.CFDataGetLength.argtypes = [ | ||
| 423 | CFDataRef | ||
| 424 | ] | ||
| 425 | CoreFoundation.CFDataGetLength.restype = CFIndex | ||
| 426 | |||
| 427 | CoreFoundation.CFDataGetBytePtr.argtypes = [ | ||
| 428 | CFDataRef | ||
| 429 | ] | ||
| 430 | CoreFoundation.CFDataGetBytePtr.restype = c_void_p | ||
| 431 | |||
| 432 | CoreFoundation.CFDictionaryCreate.argtypes = [ | ||
| 433 | CFAllocatorRef, | ||
| 434 | POINTER(CFTypeRef), | ||
| 435 | POINTER(CFTypeRef), | ||
| 436 | CFIndex, | ||
| 437 | CFDictionaryKeyCallBacks, | ||
| 438 | CFDictionaryValueCallBacks | ||
| 439 | ] | ||
| 440 | CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef | ||
| 441 | |||
| 442 | CoreFoundation.CFDictionaryGetValue.argtypes = [ | ||
| 443 | CFDictionaryRef, | ||
| 444 | CFTypeRef | ||
| 445 | ] | ||
| 446 | CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef | ||
| 447 | |||
| 448 | CoreFoundation.CFArrayCreate.argtypes = [ | ||
| 449 | CFAllocatorRef, | ||
| 450 | POINTER(CFTypeRef), | ||
| 451 | CFIndex, | ||
| 452 | CFArrayCallBacks, | ||
| 453 | ] | ||
| 454 | CoreFoundation.CFArrayCreate.restype = CFArrayRef | ||
| 455 | |||
| 456 | CoreFoundation.CFArrayCreateMutable.argtypes = [ | ||
| 457 | CFAllocatorRef, | ||
| 458 | CFIndex, | ||
| 459 | CFArrayCallBacks | ||
| 460 | ] | ||
| 461 | CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef | ||
| 462 | |||
| 463 | CoreFoundation.CFArrayAppendValue.argtypes = [ | ||
| 464 | CFMutableArrayRef, | ||
| 465 | c_void_p | ||
| 466 | ] | ||
| 467 | CoreFoundation.CFArrayAppendValue.restype = None | ||
| 468 | |||
| 469 | CoreFoundation.CFArrayGetCount.argtypes = [ | ||
| 470 | CFArrayRef | ||
| 471 | ] | ||
| 472 | CoreFoundation.CFArrayGetCount.restype = CFIndex | ||
| 473 | |||
| 474 | CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ | ||
| 475 | CFArrayRef, | ||
| 476 | CFIndex | ||
| 477 | ] | ||
| 478 | CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p | ||
| 479 | |||
| 480 | CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( | ||
| 481 | CoreFoundation, 'kCFAllocatorDefault' | ||
| 482 | ) | ||
| 483 | CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') | ||
| 484 | CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( | ||
| 485 | CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' | ||
| 486 | ) | ||
| 487 | CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( | ||
| 488 | CoreFoundation, 'kCFTypeDictionaryValueCallBacks' | ||
| 489 | ) | ||
| 490 | |||
| 491 | CoreFoundation.CFTypeRef = CFTypeRef | ||
| 492 | CoreFoundation.CFArrayRef = CFArrayRef | ||
| 493 | CoreFoundation.CFStringRef = CFStringRef | ||
| 494 | CoreFoundation.CFDictionaryRef = CFDictionaryRef | ||
| 495 | |||
| 496 | except (AttributeError): | ||
| 497 | raise ImportError('Error initializing ctypes') | ||
| 498 | |||
| 499 | |||
| 500 | class CFConst(object): | ||
| 501 | """ | ||
| 502 | A class object that acts as essentially a namespace for CoreFoundation | ||
| 503 | constants. | ||
| 504 | """ | ||
| 505 | kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) | ||
| 506 | |||
| 507 | |||
| 508 | class SecurityConst(object): | ||
| 509 | """ | ||
| 510 | A class object that acts as essentially a namespace for Security constants. | ||
| 511 | """ | ||
| 512 | kSSLSessionOptionBreakOnServerAuth = 0 | ||
| 513 | |||
| 514 | kSSLProtocol2 = 1 | ||
| 515 | kSSLProtocol3 = 2 | ||
| 516 | kTLSProtocol1 = 4 | ||
| 517 | kTLSProtocol11 = 7 | ||
| 518 | kTLSProtocol12 = 8 | ||
| 519 | |||
| 520 | kSSLClientSide = 1 | ||
| 521 | kSSLStreamType = 0 | ||
| 522 | |||
| 523 | kSecFormatPEMSequence = 10 | ||
| 524 | |||
| 525 | kSecTrustResultInvalid = 0 | ||
| 526 | kSecTrustResultProceed = 1 | ||
| 527 | # This gap is present on purpose: this was kSecTrustResultConfirm, which | ||
| 528 | # is deprecated. | ||
| 529 | kSecTrustResultDeny = 3 | ||
| 530 | kSecTrustResultUnspecified = 4 | ||
| 531 | kSecTrustResultRecoverableTrustFailure = 5 | ||
| 532 | kSecTrustResultFatalTrustFailure = 6 | ||
| 533 | kSecTrustResultOtherError = 7 | ||
| 534 | |||
| 535 | errSSLProtocol = -9800 | ||
| 536 | errSSLWouldBlock = -9803 | ||
| 537 | errSSLClosedGraceful = -9805 | ||
| 538 | errSSLClosedNoNotify = -9816 | ||
| 539 | errSSLClosedAbort = -9806 | ||
| 540 | |||
| 541 | errSSLXCertChainInvalid = -9807 | ||
| 542 | errSSLCrypto = -9809 | ||
| 543 | errSSLInternal = -9810 | ||
| 544 | errSSLCertExpired = -9814 | ||
| 545 | errSSLCertNotYetValid = -9815 | ||
| 546 | errSSLUnknownRootCert = -9812 | ||
| 547 | errSSLNoRootCert = -9813 | ||
| 548 | errSSLHostNameMismatch = -9843 | ||
| 549 | errSSLPeerHandshakeFail = -9824 | ||
| 550 | errSSLPeerUserCancelled = -9839 | ||
| 551 | errSSLWeakPeerEphemeralDHKey = -9850 | ||
| 552 | errSSLServerAuthCompleted = -9841 | ||
| 553 | errSSLRecordOverflow = -9847 | ||
| 554 | |||
| 555 | errSecVerifyFailed = -67808 | ||
| 556 | errSecNoTrustSettings = -25263 | ||
| 557 | errSecItemNotFound = -25300 | ||
| 558 | errSecInvalidTrustSettings = -25262 | ||
| 559 | |||
| 560 | # Cipher suites. We only pick the ones our default cipher string allows. | ||
| 561 | TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C | ||
| 562 | TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 | ||
| 563 | TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B | ||
| 564 | TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F | ||
| 565 | TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 | ||
| 566 | TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F | ||
| 567 | TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 | ||
| 568 | TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E | ||
| 569 | TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 | ||
| 570 | TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 | ||
| 571 | TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A | ||
| 572 | TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 | ||
| 573 | TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B | ||
| 574 | TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A | ||
| 575 | TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 | ||
| 576 | TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 | ||
| 577 | TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 | ||
| 578 | TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 | ||
| 579 | TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 | ||
| 580 | TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 | ||
| 581 | TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 | ||
| 582 | TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 | ||
| 583 | TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 | ||
| 584 | TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 | ||
| 585 | TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D | ||
| 586 | TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C | ||
| 587 | TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D | ||
| 588 | TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C | ||
| 589 | TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 | ||
| 590 | TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F | ||
| 591 | TLS_AES_128_GCM_SHA256 = 0x1301 | ||
| 592 | TLS_AES_256_GCM_SHA384 = 0x1302 | ||
| 593 | TLS_CHACHA20_POLY1305_SHA256 = 0x1303 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py new file mode 100644 index 0000000..4e5c0db --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py | |||
| @@ -0,0 +1,343 @@ | |||
| 1 | """ | ||
| 2 | Low-level helpers for the SecureTransport bindings. | ||
| 3 | |||
| 4 | These are Python functions that are not directly related to the high-level APIs | ||
| 5 | but are necessary to get them to work. They include a whole bunch of low-level | ||
| 6 | CoreFoundation messing about and memory management. The concerns in this module | ||
| 7 | are almost entirely about trying to avoid memory leaks and providing | ||
| 8 | appropriate and useful assistance to the higher-level code. | ||
| 9 | """ | ||
| 10 | import base64 | ||
| 11 | import ctypes | ||
| 12 | import itertools | ||
| 13 | import re | ||
| 14 | import os | ||
| 15 | import ssl | ||
| 16 | import tempfile | ||
| 17 | |||
| 18 | from .bindings import Security, CoreFoundation, CFConst | ||
| 19 | |||
| 20 | |||
| 21 | # This regular expression is used to grab PEM data out of a PEM bundle. | ||
| 22 | _PEM_CERTS_RE = re.compile( | ||
| 23 | b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL | ||
| 24 | ) | ||
| 25 | |||
| 26 | |||
| 27 | def _cf_data_from_bytes(bytestring): | ||
| 28 | """ | ||
| 29 | Given a bytestring, create a CFData object from it. This CFData object must | ||
| 30 | be CFReleased by the caller. | ||
| 31 | """ | ||
| 32 | return CoreFoundation.CFDataCreate( | ||
| 33 | CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) | ||
| 34 | ) | ||
| 35 | |||
| 36 | |||
| 37 | def _cf_dictionary_from_tuples(tuples): | ||
| 38 | """ | ||
| 39 | Given a list of Python tuples, create an associated CFDictionary. | ||
| 40 | """ | ||
| 41 | dictionary_size = len(tuples) | ||
| 42 | |||
| 43 | # We need to get the dictionary keys and values out in the same order. | ||
| 44 | keys = (t[0] for t in tuples) | ||
| 45 | values = (t[1] for t in tuples) | ||
| 46 | cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) | ||
| 47 | cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) | ||
| 48 | |||
| 49 | return CoreFoundation.CFDictionaryCreate( | ||
| 50 | CoreFoundation.kCFAllocatorDefault, | ||
| 51 | cf_keys, | ||
| 52 | cf_values, | ||
| 53 | dictionary_size, | ||
| 54 | CoreFoundation.kCFTypeDictionaryKeyCallBacks, | ||
| 55 | CoreFoundation.kCFTypeDictionaryValueCallBacks, | ||
| 56 | ) | ||
| 57 | |||
| 58 | |||
| 59 | def _cf_string_to_unicode(value): | ||
| 60 | """ | ||
| 61 | Creates a Unicode string from a CFString object. Used entirely for error | ||
| 62 | reporting. | ||
| 63 | |||
| 64 | Yes, it annoys me quite a lot that this function is this complex. | ||
| 65 | """ | ||
| 66 | value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) | ||
| 67 | |||
| 68 | string = CoreFoundation.CFStringGetCStringPtr( | ||
| 69 | value_as_void_p, | ||
| 70 | CFConst.kCFStringEncodingUTF8 | ||
| 71 | ) | ||
| 72 | if string is None: | ||
| 73 | buffer = ctypes.create_string_buffer(1024) | ||
| 74 | result = CoreFoundation.CFStringGetCString( | ||
| 75 | value_as_void_p, | ||
| 76 | buffer, | ||
| 77 | 1024, | ||
| 78 | CFConst.kCFStringEncodingUTF8 | ||
| 79 | ) | ||
| 80 | if not result: | ||
| 81 | raise OSError('Error copying C string from CFStringRef') | ||
| 82 | string = buffer.value | ||
| 83 | if string is not None: | ||
| 84 | string = string.decode('utf-8') | ||
| 85 | return string | ||
| 86 | |||
| 87 | |||
| 88 | def _assert_no_error(error, exception_class=None): | ||
| 89 | """ | ||
| 90 | Checks the return code and throws an exception if there is an error to | ||
| 91 | report | ||
| 92 | """ | ||
| 93 | if error == 0: | ||
| 94 | return | ||
| 95 | |||
| 96 | cf_error_string = Security.SecCopyErrorMessageString(error, None) | ||
| 97 | output = _cf_string_to_unicode(cf_error_string) | ||
| 98 | CoreFoundation.CFRelease(cf_error_string) | ||
| 99 | |||
| 100 | if output is None or output == u'': | ||
| 101 | output = u'OSStatus %s' % error | ||
| 102 | |||
| 103 | if exception_class is None: | ||
| 104 | exception_class = ssl.SSLError | ||
| 105 | |||
| 106 | raise exception_class(output) | ||
| 107 | |||
| 108 | |||
| 109 | def _cert_array_from_pem(pem_bundle): | ||
| 110 | """ | ||
| 111 | Given a bundle of certs in PEM format, turns them into a CFArray of certs | ||
| 112 | that can be used to validate a cert chain. | ||
| 113 | """ | ||
| 114 | der_certs = [ | ||
| 115 | base64.b64decode(match.group(1)) | ||
| 116 | for match in _PEM_CERTS_RE.finditer(pem_bundle) | ||
| 117 | ] | ||
| 118 | if not der_certs: | ||
| 119 | raise ssl.SSLError("No root certificates specified") | ||
| 120 | |||
| 121 | cert_array = CoreFoundation.CFArrayCreateMutable( | ||
| 122 | CoreFoundation.kCFAllocatorDefault, | ||
| 123 | 0, | ||
| 124 | ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) | ||
| 125 | ) | ||
| 126 | if not cert_array: | ||
| 127 | raise ssl.SSLError("Unable to allocate memory!") | ||
| 128 | |||
| 129 | try: | ||
| 130 | for der_bytes in der_certs: | ||
| 131 | certdata = _cf_data_from_bytes(der_bytes) | ||
| 132 | if not certdata: | ||
| 133 | raise ssl.SSLError("Unable to allocate memory!") | ||
| 134 | cert = Security.SecCertificateCreateWithData( | ||
| 135 | CoreFoundation.kCFAllocatorDefault, certdata | ||
| 136 | ) | ||
| 137 | CoreFoundation.CFRelease(certdata) | ||
| 138 | if not cert: | ||
| 139 | raise ssl.SSLError("Unable to build cert object!") | ||
| 140 | |||
| 141 | CoreFoundation.CFArrayAppendValue(cert_array, cert) | ||
| 142 | CoreFoundation.CFRelease(cert) | ||
| 143 | except Exception: | ||
| 144 | # We need to free the array before the exception bubbles further. | ||
| 145 | # We only want to do that if an error occurs: otherwise, the caller | ||
| 146 | # should free. | ||
| 147 | CoreFoundation.CFRelease(cert_array) | ||
| 148 | |||
| 149 | return cert_array | ||
| 150 | |||
| 151 | |||
| 152 | def _is_cert(item): | ||
| 153 | """ | ||
| 154 | Returns True if a given CFTypeRef is a certificate. | ||
| 155 | """ | ||
| 156 | expected = Security.SecCertificateGetTypeID() | ||
| 157 | return CoreFoundation.CFGetTypeID(item) == expected | ||
| 158 | |||
| 159 | |||
| 160 | def _is_identity(item): | ||
| 161 | """ | ||
| 162 | Returns True if a given CFTypeRef is an identity. | ||
| 163 | """ | ||
| 164 | expected = Security.SecIdentityGetTypeID() | ||
| 165 | return CoreFoundation.CFGetTypeID(item) == expected | ||
| 166 | |||
| 167 | |||
| 168 | def _temporary_keychain(): | ||
| 169 | """ | ||
| 170 | This function creates a temporary Mac keychain that we can use to work with | ||
| 171 | credentials. This keychain uses a one-time password and a temporary file to | ||
| 172 | store the data. We expect to have one keychain per socket. The returned | ||
| 173 | SecKeychainRef must be freed by the caller, including calling | ||
| 174 | SecKeychainDelete. | ||
| 175 | |||
| 176 | Returns a tuple of the SecKeychainRef and the path to the temporary | ||
| 177 | directory that contains it. | ||
| 178 | """ | ||
| 179 | # Unfortunately, SecKeychainCreate requires a path to a keychain. This | ||
| 180 | # means we cannot use mkstemp to use a generic temporary file. Instead, | ||
| 181 | # we're going to create a temporary directory and a filename to use there. | ||
| 182 | # This filename will be 8 random bytes expanded into base64. We also need | ||
| 183 | # some random bytes to password-protect the keychain we're creating, so we | ||
| 184 | # ask for 40 random bytes. | ||
| 185 | random_bytes = os.urandom(40) | ||
| 186 | filename = base64.b64encode(random_bytes[:8]).decode('utf-8') | ||
| 187 | password = base64.b64encode(random_bytes[8:]) # Must be valid UTF-8 | ||
| 188 | tempdirectory = tempfile.mkdtemp() | ||
| 189 | |||
| 190 | keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') | ||
| 191 | |||
| 192 | # We now want to create the keychain itself. | ||
| 193 | keychain = Security.SecKeychainRef() | ||
| 194 | status = Security.SecKeychainCreate( | ||
| 195 | keychain_path, | ||
| 196 | len(password), | ||
| 197 | password, | ||
| 198 | False, | ||
| 199 | None, | ||
| 200 | ctypes.byref(keychain) | ||
| 201 | ) | ||
| 202 | _assert_no_error(status) | ||
| 203 | |||
| 204 | # Having created the keychain, we want to pass it off to the caller. | ||
| 205 | return keychain, tempdirectory | ||
| 206 | |||
| 207 | |||
| 208 | def _load_items_from_file(keychain, path): | ||
| 209 | """ | ||
| 210 | Given a single file, loads all the trust objects from it into arrays and | ||
| 211 | the keychain. | ||
| 212 | Returns a tuple of lists: the first list is a list of identities, the | ||
| 213 | second a list of certs. | ||
| 214 | """ | ||
| 215 | certificates = [] | ||
| 216 | identities = [] | ||
| 217 | result_array = None | ||
| 218 | |||
| 219 | with open(path, 'rb') as f: | ||
| 220 | raw_filedata = f.read() | ||
| 221 | |||
| 222 | try: | ||
| 223 | filedata = CoreFoundation.CFDataCreate( | ||
| 224 | CoreFoundation.kCFAllocatorDefault, | ||
| 225 | raw_filedata, | ||
| 226 | len(raw_filedata) | ||
| 227 | ) | ||
| 228 | result_array = CoreFoundation.CFArrayRef() | ||
| 229 | result = Security.SecItemImport( | ||
| 230 | filedata, # cert data | ||
| 231 | None, # Filename, leaving it out for now | ||
| 232 | None, # What the type of the file is, we don't care | ||
| 233 | None, # what's in the file, we don't care | ||
| 234 | 0, # import flags | ||
| 235 | None, # key params, can include passphrase in the future | ||
| 236 | keychain, # The keychain to insert into | ||
| 237 | ctypes.byref(result_array) # Results | ||
| 238 | ) | ||
| 239 | _assert_no_error(result) | ||
| 240 | |||
| 241 | # A CFArray is not very useful to us as an intermediary | ||
| 242 | # representation, so we are going to extract the objects we want | ||
| 243 | # and then free the array. We don't need to keep hold of keys: the | ||
| 244 | # keychain already has them! | ||
| 245 | result_count = CoreFoundation.CFArrayGetCount(result_array) | ||
| 246 | for index in range(result_count): | ||
| 247 | item = CoreFoundation.CFArrayGetValueAtIndex( | ||
| 248 | result_array, index | ||
| 249 | ) | ||
| 250 | item = ctypes.cast(item, CoreFoundation.CFTypeRef) | ||
| 251 | |||
| 252 | if _is_cert(item): | ||
| 253 | CoreFoundation.CFRetain(item) | ||
| 254 | certificates.append(item) | ||
| 255 | elif _is_identity(item): | ||
| 256 | CoreFoundation.CFRetain(item) | ||
| 257 | identities.append(item) | ||
| 258 | finally: | ||
| 259 | if result_array: | ||
| 260 | CoreFoundation.CFRelease(result_array) | ||
| 261 | |||
| 262 | CoreFoundation.CFRelease(filedata) | ||
| 263 | |||
| 264 | return (identities, certificates) | ||
| 265 | |||
| 266 | |||
| 267 | def _load_client_cert_chain(keychain, *paths): | ||
| 268 | """ | ||
| 269 | Load certificates and maybe keys from a number of files. Has the end goal | ||
| 270 | of returning a CFArray containing one SecIdentityRef, and then zero or more | ||
| 271 | SecCertificateRef objects, suitable for use as a client certificate trust | ||
| 272 | chain. | ||
| 273 | """ | ||
| 274 | # Ok, the strategy. | ||
| 275 | # | ||
| 276 | # This relies on knowing that macOS will not give you a SecIdentityRef | ||
| 277 | # unless you have imported a key into a keychain. This is a somewhat | ||
| 278 | # artificial limitation of macOS (for example, it doesn't necessarily | ||
| 279 | # affect iOS), but there is nothing inside Security.framework that lets you | ||
| 280 | # get a SecIdentityRef without having a key in a keychain. | ||
| 281 | # | ||
| 282 | # So the policy here is we take all the files and iterate them in order. | ||
| 283 | # Each one will use SecItemImport to have one or more objects loaded from | ||
| 284 | # it. We will also point at a keychain that macOS can use to work with the | ||
| 285 | # private key. | ||
| 286 | # | ||
| 287 | # Once we have all the objects, we'll check what we actually have. If we | ||
| 288 | # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, | ||
| 289 | # we'll take the first certificate (which we assume to be our leaf) and | ||
| 290 | # ask the keychain to give us a SecIdentityRef with that cert's associated | ||
| 291 | # key. | ||
| 292 | # | ||
| 293 | # We'll then return a CFArray containing the trust chain: one | ||
| 294 | # SecIdentityRef and then zero-or-more SecCertificateRef objects. The | ||
| 295 | # responsibility for freeing this CFArray will be with the caller. This | ||
| 296 | # CFArray must remain alive for the entire connection, so in practice it | ||
| 297 | # will be stored with a single SSLSocket, along with the reference to the | ||
| 298 | # keychain. | ||
| 299 | certificates = [] | ||
| 300 | identities = [] | ||
| 301 | |||
| 302 | # Filter out bad paths. | ||
| 303 | paths = (path for path in paths if path) | ||
| 304 | |||
| 305 | try: | ||
| 306 | for file_path in paths: | ||
| 307 | new_identities, new_certs = _load_items_from_file( | ||
| 308 | keychain, file_path | ||
| 309 | ) | ||
| 310 | identities.extend(new_identities) | ||
| 311 | certificates.extend(new_certs) | ||
| 312 | |||
| 313 | # Ok, we have everything. The question is: do we have an identity? If | ||
| 314 | # not, we want to grab one from the first cert we have. | ||
| 315 | if not identities: | ||
| 316 | new_identity = Security.SecIdentityRef() | ||
| 317 | status = Security.SecIdentityCreateWithCertificate( | ||
| 318 | keychain, | ||
| 319 | certificates[0], | ||
| 320 | ctypes.byref(new_identity) | ||
| 321 | ) | ||
| 322 | _assert_no_error(status) | ||
| 323 | identities.append(new_identity) | ||
| 324 | |||
| 325 | # We now want to release the original certificate, as we no longer | ||
| 326 | # need it. | ||
| 327 | CoreFoundation.CFRelease(certificates.pop(0)) | ||
| 328 | |||
| 329 | # We now need to build a new CFArray that holds the trust chain. | ||
| 330 | trust_chain = CoreFoundation.CFArrayCreateMutable( | ||
| 331 | CoreFoundation.kCFAllocatorDefault, | ||
| 332 | 0, | ||
| 333 | ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), | ||
| 334 | ) | ||
| 335 | for item in itertools.chain(identities, certificates): | ||
| 336 | # ArrayAppendValue does a CFRetain on the item. That's fine, | ||
| 337 | # because the finally block will release our other refs to them. | ||
| 338 | CoreFoundation.CFArrayAppendValue(trust_chain, item) | ||
| 339 | |||
| 340 | return trust_chain | ||
| 341 | finally: | ||
| 342 | for obj in itertools.chain(identities, certificates): | ||
| 343 | CoreFoundation.CFRelease(obj) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/appengine.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/appengine.py new file mode 100644 index 0000000..fc00d17 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/appengine.py | |||
| @@ -0,0 +1,296 @@ | |||
| 1 | """ | ||
| 2 | This module provides a pool manager that uses Google App Engine's | ||
| 3 | `URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_. | ||
| 4 | |||
| 5 | Example usage:: | ||
| 6 | |||
| 7 | from pip._vendor.urllib3 import PoolManager | ||
| 8 | from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox | ||
| 9 | |||
| 10 | if is_appengine_sandbox(): | ||
| 11 | # AppEngineManager uses AppEngine's URLFetch API behind the scenes | ||
| 12 | http = AppEngineManager() | ||
| 13 | else: | ||
| 14 | # PoolManager uses a socket-level API behind the scenes | ||
| 15 | http = PoolManager() | ||
| 16 | |||
| 17 | r = http.request('GET', 'https://google.com/') | ||
| 18 | |||
| 19 | There are `limitations <https://cloud.google.com/appengine/docs/python/\ | ||
| 20 | urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be | ||
| 21 | the best choice for your application. There are three options for using | ||
| 22 | urllib3 on Google App Engine: | ||
| 23 | |||
| 24 | 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is | ||
| 25 | cost-effective in many circumstances as long as your usage is within the | ||
| 26 | limitations. | ||
| 27 | 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. | ||
| 28 | Sockets also have `limitations and restrictions | ||
| 29 | <https://cloud.google.com/appengine/docs/python/sockets/\ | ||
| 30 | #limitations-and-restrictions>`_ and have a lower free quota than URLFetch. | ||
| 31 | To use sockets, be sure to specify the following in your ``app.yaml``:: | ||
| 32 | |||
| 33 | env_variables: | ||
| 34 | GAE_USE_SOCKETS_HTTPLIB : 'true' | ||
| 35 | |||
| 36 | 3. If you are using `App Engine Flexible | ||
| 37 | <https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard | ||
| 38 | :class:`PoolManager` without any configuration or special environment variables. | ||
| 39 | """ | ||
| 40 | |||
| 41 | from __future__ import absolute_import | ||
| 42 | import logging | ||
| 43 | import os | ||
| 44 | import warnings | ||
| 45 | from ..packages.six.moves.urllib.parse import urljoin | ||
| 46 | |||
| 47 | from ..exceptions import ( | ||
| 48 | HTTPError, | ||
| 49 | HTTPWarning, | ||
| 50 | MaxRetryError, | ||
| 51 | ProtocolError, | ||
| 52 | TimeoutError, | ||
| 53 | SSLError | ||
| 54 | ) | ||
| 55 | |||
| 56 | from ..packages.six import BytesIO | ||
| 57 | from ..request import RequestMethods | ||
| 58 | from ..response import HTTPResponse | ||
| 59 | from ..util.timeout import Timeout | ||
| 60 | from ..util.retry import Retry | ||
| 61 | |||
| 62 | try: | ||
| 63 | from google.appengine.api import urlfetch | ||
| 64 | except ImportError: | ||
| 65 | urlfetch = None | ||
| 66 | |||
| 67 | |||
| 68 | log = logging.getLogger(__name__) | ||
| 69 | |||
| 70 | |||
| 71 | class AppEnginePlatformWarning(HTTPWarning): | ||
| 72 | pass | ||
| 73 | |||
| 74 | |||
| 75 | class AppEnginePlatformError(HTTPError): | ||
| 76 | pass | ||
| 77 | |||
| 78 | |||
| 79 | class AppEngineManager(RequestMethods): | ||
| 80 | """ | ||
| 81 | Connection manager for Google App Engine sandbox applications. | ||
| 82 | |||
| 83 | This manager uses the URLFetch service directly instead of using the | ||
| 84 | emulated httplib, and is subject to URLFetch limitations as described in | ||
| 85 | the App Engine documentation `here | ||
| 86 | <https://cloud.google.com/appengine/docs/python/urlfetch>`_. | ||
| 87 | |||
| 88 | Notably it will raise an :class:`AppEnginePlatformError` if: | ||
| 89 | * URLFetch is not available. | ||
| 90 | * If you attempt to use this on App Engine Flexible, as full socket | ||
| 91 | support is available. | ||
| 92 | * If a request size is more than 10 megabytes. | ||
| 93 | * If a response size is more than 32 megabtyes. | ||
| 94 | * If you use an unsupported request method such as OPTIONS. | ||
| 95 | |||
| 96 | Beyond those cases, it will raise normal urllib3 errors. | ||
| 97 | """ | ||
| 98 | |||
| 99 | def __init__(self, headers=None, retries=None, validate_certificate=True, | ||
| 100 | urlfetch_retries=True): | ||
| 101 | if not urlfetch: | ||
| 102 | raise AppEnginePlatformError( | ||
| 103 | "URLFetch is not available in this environment.") | ||
| 104 | |||
| 105 | if is_prod_appengine_mvms(): | ||
| 106 | raise AppEnginePlatformError( | ||
| 107 | "Use normal urllib3.PoolManager instead of AppEngineManager" | ||
| 108 | "on Managed VMs, as using URLFetch is not necessary in " | ||
| 109 | "this environment.") | ||
| 110 | |||
| 111 | warnings.warn( | ||
| 112 | "urllib3 is using URLFetch on Google App Engine sandbox instead " | ||
| 113 | "of sockets. To use sockets directly instead of URLFetch see " | ||
| 114 | "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", | ||
| 115 | AppEnginePlatformWarning) | ||
| 116 | |||
| 117 | RequestMethods.__init__(self, headers) | ||
| 118 | self.validate_certificate = validate_certificate | ||
| 119 | self.urlfetch_retries = urlfetch_retries | ||
| 120 | |||
| 121 | self.retries = retries or Retry.DEFAULT | ||
| 122 | |||
| 123 | def __enter__(self): | ||
| 124 | return self | ||
| 125 | |||
| 126 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 127 | # Return False to re-raise any potential exceptions | ||
| 128 | return False | ||
| 129 | |||
| 130 | def urlopen(self, method, url, body=None, headers=None, | ||
| 131 | retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, | ||
| 132 | **response_kw): | ||
| 133 | |||
| 134 | retries = self._get_retries(retries, redirect) | ||
| 135 | |||
| 136 | try: | ||
| 137 | follow_redirects = ( | ||
| 138 | redirect and | ||
| 139 | retries.redirect != 0 and | ||
| 140 | retries.total) | ||
| 141 | response = urlfetch.fetch( | ||
| 142 | url, | ||
| 143 | payload=body, | ||
| 144 | method=method, | ||
| 145 | headers=headers or {}, | ||
| 146 | allow_truncated=False, | ||
| 147 | follow_redirects=self.urlfetch_retries and follow_redirects, | ||
| 148 | deadline=self._get_absolute_timeout(timeout), | ||
| 149 | validate_certificate=self.validate_certificate, | ||
| 150 | ) | ||
| 151 | except urlfetch.DeadlineExceededError as e: | ||
| 152 | raise TimeoutError(self, e) | ||
| 153 | |||
| 154 | except urlfetch.InvalidURLError as e: | ||
| 155 | if 'too large' in str(e): | ||
| 156 | raise AppEnginePlatformError( | ||
| 157 | "URLFetch request too large, URLFetch only " | ||
| 158 | "supports requests up to 10mb in size.", e) | ||
| 159 | raise ProtocolError(e) | ||
| 160 | |||
| 161 | except urlfetch.DownloadError as e: | ||
| 162 | if 'Too many redirects' in str(e): | ||
| 163 | raise MaxRetryError(self, url, reason=e) | ||
| 164 | raise ProtocolError(e) | ||
| 165 | |||
| 166 | except urlfetch.ResponseTooLargeError as e: | ||
| 167 | raise AppEnginePlatformError( | ||
| 168 | "URLFetch response too large, URLFetch only supports" | ||
| 169 | "responses up to 32mb in size.", e) | ||
| 170 | |||
| 171 | except urlfetch.SSLCertificateError as e: | ||
| 172 | raise SSLError(e) | ||
| 173 | |||
| 174 | except urlfetch.InvalidMethodError as e: | ||
| 175 | raise AppEnginePlatformError( | ||
| 176 | "URLFetch does not support method: %s" % method, e) | ||
| 177 | |||
| 178 | http_response = self._urlfetch_response_to_http_response( | ||
| 179 | response, retries=retries, **response_kw) | ||
| 180 | |||
| 181 | # Handle redirect? | ||
| 182 | redirect_location = redirect and http_response.get_redirect_location() | ||
| 183 | if redirect_location: | ||
| 184 | # Check for redirect response | ||
| 185 | if (self.urlfetch_retries and retries.raise_on_redirect): | ||
| 186 | raise MaxRetryError(self, url, "too many redirects") | ||
| 187 | else: | ||
| 188 | if http_response.status == 303: | ||
| 189 | method = 'GET' | ||
| 190 | |||
| 191 | try: | ||
| 192 | retries = retries.increment(method, url, response=http_response, _pool=self) | ||
| 193 | except MaxRetryError: | ||
| 194 | if retries.raise_on_redirect: | ||
| 195 | raise MaxRetryError(self, url, "too many redirects") | ||
| 196 | return http_response | ||
| 197 | |||
| 198 | retries.sleep_for_retry(http_response) | ||
| 199 | log.debug("Redirecting %s -> %s", url, redirect_location) | ||
| 200 | redirect_url = urljoin(url, redirect_location) | ||
| 201 | return self.urlopen( | ||
| 202 | method, redirect_url, body, headers, | ||
| 203 | retries=retries, redirect=redirect, | ||
| 204 | timeout=timeout, **response_kw) | ||
| 205 | |||
| 206 | # Check if we should retry the HTTP response. | ||
| 207 | has_retry_after = bool(http_response.getheader('Retry-After')) | ||
| 208 | if retries.is_retry(method, http_response.status, has_retry_after): | ||
| 209 | retries = retries.increment( | ||
| 210 | method, url, response=http_response, _pool=self) | ||
| 211 | log.debug("Retry: %s", url) | ||
| 212 | retries.sleep(http_response) | ||
| 213 | return self.urlopen( | ||
| 214 | method, url, | ||
| 215 | body=body, headers=headers, | ||
| 216 | retries=retries, redirect=redirect, | ||
| 217 | timeout=timeout, **response_kw) | ||
| 218 | |||
| 219 | return http_response | ||
| 220 | |||
| 221 | def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): | ||
| 222 | |||
| 223 | if is_prod_appengine(): | ||
| 224 | # Production GAE handles deflate encoding automatically, but does | ||
| 225 | # not remove the encoding header. | ||
| 226 | content_encoding = urlfetch_resp.headers.get('content-encoding') | ||
| 227 | |||
| 228 | if content_encoding == 'deflate': | ||
| 229 | del urlfetch_resp.headers['content-encoding'] | ||
| 230 | |||
| 231 | transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') | ||
| 232 | # We have a full response's content, | ||
| 233 | # so let's make sure we don't report ourselves as chunked data. | ||
| 234 | if transfer_encoding == 'chunked': | ||
| 235 | encodings = transfer_encoding.split(",") | ||
| 236 | encodings.remove('chunked') | ||
| 237 | urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) | ||
| 238 | |||
| 239 | return HTTPResponse( | ||
| 240 | # In order for decoding to work, we must present the content as | ||
| 241 | # a file-like object. | ||
| 242 | body=BytesIO(urlfetch_resp.content), | ||
| 243 | headers=urlfetch_resp.headers, | ||
| 244 | status=urlfetch_resp.status_code, | ||
| 245 | **response_kw | ||
| 246 | ) | ||
| 247 | |||
| 248 | def _get_absolute_timeout(self, timeout): | ||
| 249 | if timeout is Timeout.DEFAULT_TIMEOUT: | ||
| 250 | return None # Defer to URLFetch's default. | ||
| 251 | if isinstance(timeout, Timeout): | ||
| 252 | if timeout._read is not None or timeout._connect is not None: | ||
| 253 | warnings.warn( | ||
| 254 | "URLFetch does not support granular timeout settings, " | ||
| 255 | "reverting to total or default URLFetch timeout.", | ||
| 256 | AppEnginePlatformWarning) | ||
| 257 | return timeout.total | ||
| 258 | return timeout | ||
| 259 | |||
| 260 | def _get_retries(self, retries, redirect): | ||
| 261 | if not isinstance(retries, Retry): | ||
| 262 | retries = Retry.from_int( | ||
| 263 | retries, redirect=redirect, default=self.retries) | ||
| 264 | |||
| 265 | if retries.connect or retries.read or retries.redirect: | ||
| 266 | warnings.warn( | ||
| 267 | "URLFetch only supports total retries and does not " | ||
| 268 | "recognize connect, read, or redirect retry parameters.", | ||
| 269 | AppEnginePlatformWarning) | ||
| 270 | |||
| 271 | return retries | ||
| 272 | |||
| 273 | |||
| 274 | def is_appengine(): | ||
| 275 | return (is_local_appengine() or | ||
| 276 | is_prod_appengine() or | ||
| 277 | is_prod_appengine_mvms()) | ||
| 278 | |||
| 279 | |||
| 280 | def is_appengine_sandbox(): | ||
| 281 | return is_appengine() and not is_prod_appengine_mvms() | ||
| 282 | |||
| 283 | |||
| 284 | def is_local_appengine(): | ||
| 285 | return ('APPENGINE_RUNTIME' in os.environ and | ||
| 286 | 'Development/' in os.environ['SERVER_SOFTWARE']) | ||
| 287 | |||
| 288 | |||
| 289 | def is_prod_appengine(): | ||
| 290 | return ('APPENGINE_RUNTIME' in os.environ and | ||
| 291 | 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and | ||
| 292 | not is_prod_appengine_mvms()) | ||
| 293 | |||
| 294 | |||
| 295 | def is_prod_appengine_mvms(): | ||
| 296 | return os.environ.get('GAE_VM', False) == 'true' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/ntlmpool.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/ntlmpool.py new file mode 100644 index 0000000..888e0ad --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/ntlmpool.py | |||
| @@ -0,0 +1,112 @@ | |||
| 1 | """ | ||
| 2 | NTLM authenticating pool, contributed by erikcederstran | ||
| 3 | |||
| 4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 | ||
| 5 | """ | ||
| 6 | from __future__ import absolute_import | ||
| 7 | |||
| 8 | from logging import getLogger | ||
| 9 | from ntlm import ntlm | ||
| 10 | |||
| 11 | from .. import HTTPSConnectionPool | ||
| 12 | from ..packages.six.moves.http_client import HTTPSConnection | ||
| 13 | |||
| 14 | |||
| 15 | log = getLogger(__name__) | ||
| 16 | |||
| 17 | |||
| 18 | class NTLMConnectionPool(HTTPSConnectionPool): | ||
| 19 | """ | ||
| 20 | Implements an NTLM authentication version of an urllib3 connection pool | ||
| 21 | """ | ||
| 22 | |||
| 23 | scheme = 'https' | ||
| 24 | |||
| 25 | def __init__(self, user, pw, authurl, *args, **kwargs): | ||
| 26 | """ | ||
| 27 | authurl is a random URL on the server that is protected by NTLM. | ||
| 28 | user is the Windows user, probably in the DOMAIN\\username format. | ||
| 29 | pw is the password for the user. | ||
| 30 | """ | ||
| 31 | super(NTLMConnectionPool, self).__init__(*args, **kwargs) | ||
| 32 | self.authurl = authurl | ||
| 33 | self.rawuser = user | ||
| 34 | user_parts = user.split('\\', 1) | ||
| 35 | self.domain = user_parts[0].upper() | ||
| 36 | self.user = user_parts[1] | ||
| 37 | self.pw = pw | ||
| 38 | |||
| 39 | def _new_conn(self): | ||
| 40 | # Performs the NTLM handshake that secures the connection. The socket | ||
| 41 | # must be kept open while requests are performed. | ||
| 42 | self.num_connections += 1 | ||
| 43 | log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', | ||
| 44 | self.num_connections, self.host, self.authurl) | ||
| 45 | |||
| 46 | headers = {} | ||
| 47 | headers['Connection'] = 'Keep-Alive' | ||
| 48 | req_header = 'Authorization' | ||
| 49 | resp_header = 'www-authenticate' | ||
| 50 | |||
| 51 | conn = HTTPSConnection(host=self.host, port=self.port) | ||
| 52 | |||
| 53 | # Send negotiation message | ||
| 54 | headers[req_header] = ( | ||
| 55 | 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) | ||
| 56 | log.debug('Request headers: %s', headers) | ||
| 57 | conn.request('GET', self.authurl, None, headers) | ||
| 58 | res = conn.getresponse() | ||
| 59 | reshdr = dict(res.getheaders()) | ||
| 60 | log.debug('Response status: %s %s', res.status, res.reason) | ||
| 61 | log.debug('Response headers: %s', reshdr) | ||
| 62 | log.debug('Response data: %s [...]', res.read(100)) | ||
| 63 | |||
| 64 | # Remove the reference to the socket, so that it can not be closed by | ||
| 65 | # the response object (we want to keep the socket open) | ||
| 66 | res.fp = None | ||
| 67 | |||
| 68 | # Server should respond with a challenge message | ||
| 69 | auth_header_values = reshdr[resp_header].split(', ') | ||
| 70 | auth_header_value = None | ||
| 71 | for s in auth_header_values: | ||
| 72 | if s[:5] == 'NTLM ': | ||
| 73 | auth_header_value = s[5:] | ||
| 74 | if auth_header_value is None: | ||
| 75 | raise Exception('Unexpected %s response header: %s' % | ||
| 76 | (resp_header, reshdr[resp_header])) | ||
| 77 | |||
| 78 | # Send authentication message | ||
| 79 | ServerChallenge, NegotiateFlags = \ | ||
| 80 | ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) | ||
| 81 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, | ||
| 82 | self.user, | ||
| 83 | self.domain, | ||
| 84 | self.pw, | ||
| 85 | NegotiateFlags) | ||
| 86 | headers[req_header] = 'NTLM %s' % auth_msg | ||
| 87 | log.debug('Request headers: %s', headers) | ||
| 88 | conn.request('GET', self.authurl, None, headers) | ||
| 89 | res = conn.getresponse() | ||
| 90 | log.debug('Response status: %s %s', res.status, res.reason) | ||
| 91 | log.debug('Response headers: %s', dict(res.getheaders())) | ||
| 92 | log.debug('Response data: %s [...]', res.read()[:100]) | ||
| 93 | if res.status != 200: | ||
| 94 | if res.status == 401: | ||
| 95 | raise Exception('Server rejected request: wrong ' | ||
| 96 | 'username or password') | ||
| 97 | raise Exception('Wrong server response: %s %s' % | ||
| 98 | (res.status, res.reason)) | ||
| 99 | |||
| 100 | res.fp = None | ||
| 101 | log.debug('Connection established') | ||
| 102 | return conn | ||
| 103 | |||
| 104 | def urlopen(self, method, url, body=None, headers=None, retries=3, | ||
| 105 | redirect=True, assert_same_host=True): | ||
| 106 | if headers is None: | ||
| 107 | headers = {} | ||
| 108 | headers['Connection'] = 'Keep-Alive' | ||
| 109 | return super(NTLMConnectionPool, self).urlopen(method, url, body, | ||
| 110 | headers, retries, | ||
| 111 | redirect, | ||
| 112 | assert_same_host) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/pyopenssl.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/pyopenssl.py new file mode 100644 index 0000000..f13e2bc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/pyopenssl.py | |||
| @@ -0,0 +1,455 @@ | |||
| 1 | """ | ||
| 2 | SSL with SNI_-support for Python 2. Follow these instructions if you would | ||
| 3 | like to verify SSL certificates in Python 2. Note, the default libraries do | ||
| 4 | *not* do certificate checking; you need to do additional work to validate | ||
| 5 | certificates yourself. | ||
| 6 | |||
| 7 | This needs the following packages installed: | ||
| 8 | |||
| 9 | * pyOpenSSL (tested with 16.0.0) | ||
| 10 | * cryptography (minimum 1.3.4, from pyopenssl) | ||
| 11 | * idna (minimum 2.0, from cryptography) | ||
| 12 | |||
| 13 | However, pyopenssl depends on cryptography, which depends on idna, so while we | ||
| 14 | use all three directly here we end up having relatively few packages required. | ||
| 15 | |||
| 16 | You can install them with the following command: | ||
| 17 | |||
| 18 | pip install pyopenssl cryptography idna | ||
| 19 | |||
| 20 | To activate certificate checking, call | ||
| 21 | :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code | ||
| 22 | before you begin making HTTP requests. This can be done in a ``sitecustomize`` | ||
| 23 | module, or at any other time before your application begins using ``urllib3``, | ||
| 24 | like this:: | ||
| 25 | |||
| 26 | try: | ||
| 27 | import urllib3.contrib.pyopenssl | ||
| 28 | urllib3.contrib.pyopenssl.inject_into_urllib3() | ||
| 29 | except ImportError: | ||
| 30 | pass | ||
| 31 | |||
| 32 | Now you can use :mod:`urllib3` as you normally would, and it will support SNI | ||
| 33 | when the required modules are installed. | ||
| 34 | |||
| 35 | Activating this module also has the positive side effect of disabling SSL/TLS | ||
| 36 | compression in Python 2 (see `CRIME attack`_). | ||
| 37 | |||
| 38 | If you want to configure the default list of supported cipher suites, you can | ||
| 39 | set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. | ||
| 40 | |||
| 41 | .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication | ||
| 42 | .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) | ||
| 43 | """ | ||
| 44 | from __future__ import absolute_import | ||
| 45 | |||
| 46 | import OpenSSL.SSL | ||
| 47 | from cryptography import x509 | ||
| 48 | from cryptography.hazmat.backends.openssl import backend as openssl_backend | ||
| 49 | from cryptography.hazmat.backends.openssl.x509 import _Certificate | ||
| 50 | |||
| 51 | from socket import timeout, error as SocketError | ||
| 52 | from io import BytesIO | ||
| 53 | |||
| 54 | try: # Platform-specific: Python 2 | ||
| 55 | from socket import _fileobject | ||
| 56 | except ImportError: # Platform-specific: Python 3 | ||
| 57 | _fileobject = None | ||
| 58 | from ..packages.backports.makefile import backport_makefile | ||
| 59 | |||
| 60 | import logging | ||
| 61 | import ssl | ||
| 62 | from ..packages import six | ||
| 63 | import sys | ||
| 64 | |||
| 65 | from .. import util | ||
| 66 | |||
| 67 | __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] | ||
| 68 | |||
| 69 | # SNI always works. | ||
| 70 | HAS_SNI = True | ||
| 71 | |||
| 72 | # Map from urllib3 to PyOpenSSL compatible parameter-values. | ||
| 73 | _openssl_versions = { | ||
| 74 | ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, | ||
| 75 | ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, | ||
| 76 | } | ||
| 77 | |||
| 78 | if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): | ||
| 79 | _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD | ||
| 80 | |||
| 81 | if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): | ||
| 82 | _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD | ||
| 83 | |||
| 84 | try: | ||
| 85 | _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) | ||
| 86 | except AttributeError: | ||
| 87 | pass | ||
| 88 | |||
| 89 | _stdlib_to_openssl_verify = { | ||
| 90 | ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, | ||
| 91 | ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, | ||
| 92 | ssl.CERT_REQUIRED: | ||
| 93 | OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, | ||
| 94 | } | ||
| 95 | _openssl_to_stdlib_verify = dict( | ||
| 96 | (v, k) for k, v in _stdlib_to_openssl_verify.items() | ||
| 97 | ) | ||
| 98 | |||
| 99 | # OpenSSL will only write 16K at a time | ||
| 100 | SSL_WRITE_BLOCKSIZE = 16384 | ||
| 101 | |||
| 102 | orig_util_HAS_SNI = util.HAS_SNI | ||
| 103 | orig_util_SSLContext = util.ssl_.SSLContext | ||
| 104 | |||
| 105 | |||
| 106 | log = logging.getLogger(__name__) | ||
| 107 | |||
| 108 | |||
| 109 | def inject_into_urllib3(): | ||
| 110 | 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' | ||
| 111 | |||
| 112 | _validate_dependencies_met() | ||
| 113 | |||
| 114 | util.ssl_.SSLContext = PyOpenSSLContext | ||
| 115 | util.HAS_SNI = HAS_SNI | ||
| 116 | util.ssl_.HAS_SNI = HAS_SNI | ||
| 117 | util.IS_PYOPENSSL = True | ||
| 118 | util.ssl_.IS_PYOPENSSL = True | ||
| 119 | |||
| 120 | |||
| 121 | def extract_from_urllib3(): | ||
| 122 | 'Undo monkey-patching by :func:`inject_into_urllib3`.' | ||
| 123 | |||
| 124 | util.ssl_.SSLContext = orig_util_SSLContext | ||
| 125 | util.HAS_SNI = orig_util_HAS_SNI | ||
| 126 | util.ssl_.HAS_SNI = orig_util_HAS_SNI | ||
| 127 | util.IS_PYOPENSSL = False | ||
| 128 | util.ssl_.IS_PYOPENSSL = False | ||
| 129 | |||
| 130 | |||
| 131 | def _validate_dependencies_met(): | ||
| 132 | """ | ||
| 133 | Verifies that PyOpenSSL's package-level dependencies have been met. | ||
| 134 | Throws `ImportError` if they are not met. | ||
| 135 | """ | ||
| 136 | # Method added in `cryptography==1.1`; not available in older versions | ||
| 137 | from cryptography.x509.extensions import Extensions | ||
| 138 | if getattr(Extensions, "get_extension_for_class", None) is None: | ||
| 139 | raise ImportError("'cryptography' module missing required functionality. " | ||
| 140 | "Try upgrading to v1.3.4 or newer.") | ||
| 141 | |||
| 142 | # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 | ||
| 143 | # attribute is only present on those versions. | ||
| 144 | from OpenSSL.crypto import X509 | ||
| 145 | x509 = X509() | ||
| 146 | if getattr(x509, "_x509", None) is None: | ||
| 147 | raise ImportError("'pyOpenSSL' module missing required functionality. " | ||
| 148 | "Try upgrading to v0.14 or newer.") | ||
| 149 | |||
| 150 | |||
| 151 | def _dnsname_to_stdlib(name): | ||
| 152 | """ | ||
| 153 | Converts a dNSName SubjectAlternativeName field to the form used by the | ||
| 154 | standard library on the given Python version. | ||
| 155 | |||
| 156 | Cryptography produces a dNSName as a unicode string that was idna-decoded | ||
| 157 | from ASCII bytes. We need to idna-encode that string to get it back, and | ||
| 158 | then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib | ||
| 159 | uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). | ||
| 160 | """ | ||
| 161 | def idna_encode(name): | ||
| 162 | """ | ||
| 163 | Borrowed wholesale from the Python Cryptography Project. It turns out | ||
| 164 | that we can't just safely call `idna.encode`: it can explode for | ||
| 165 | wildcard names. This avoids that problem. | ||
| 166 | """ | ||
| 167 | from pip._vendor import idna | ||
| 168 | |||
| 169 | for prefix in [u'*.', u'.']: | ||
| 170 | if name.startswith(prefix): | ||
| 171 | name = name[len(prefix):] | ||
| 172 | return prefix.encode('ascii') + idna.encode(name) | ||
| 173 | return idna.encode(name) | ||
| 174 | |||
| 175 | name = idna_encode(name) | ||
| 176 | if sys.version_info >= (3, 0): | ||
| 177 | name = name.decode('utf-8') | ||
| 178 | return name | ||
| 179 | |||
| 180 | |||
| 181 | def get_subj_alt_name(peer_cert): | ||
| 182 | """ | ||
| 183 | Given an PyOpenSSL certificate, provides all the subject alternative names. | ||
| 184 | """ | ||
| 185 | # Pass the cert to cryptography, which has much better APIs for this. | ||
| 186 | if hasattr(peer_cert, "to_cryptography"): | ||
| 187 | cert = peer_cert.to_cryptography() | ||
| 188 | else: | ||
| 189 | # This is technically using private APIs, but should work across all | ||
| 190 | # relevant versions before PyOpenSSL got a proper API for this. | ||
| 191 | cert = _Certificate(openssl_backend, peer_cert._x509) | ||
| 192 | |||
| 193 | # We want to find the SAN extension. Ask Cryptography to locate it (it's | ||
| 194 | # faster than looping in Python) | ||
| 195 | try: | ||
| 196 | ext = cert.extensions.get_extension_for_class( | ||
| 197 | x509.SubjectAlternativeName | ||
| 198 | ).value | ||
| 199 | except x509.ExtensionNotFound: | ||
| 200 | # No such extension, return the empty list. | ||
| 201 | return [] | ||
| 202 | except (x509.DuplicateExtension, x509.UnsupportedExtension, | ||
| 203 | x509.UnsupportedGeneralNameType, UnicodeError) as e: | ||
| 204 | # A problem has been found with the quality of the certificate. Assume | ||
| 205 | # no SAN field is present. | ||
| 206 | log.warning( | ||
| 207 | "A problem was encountered with the certificate that prevented " | ||
| 208 | "urllib3 from finding the SubjectAlternativeName field. This can " | ||
| 209 | "affect certificate validation. The error was %s", | ||
| 210 | e, | ||
| 211 | ) | ||
| 212 | return [] | ||
| 213 | |||
| 214 | # We want to return dNSName and iPAddress fields. We need to cast the IPs | ||
| 215 | # back to strings because the match_hostname function wants them as | ||
| 216 | # strings. | ||
| 217 | # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 | ||
| 218 | # decoded. This is pretty frustrating, but that's what the standard library | ||
| 219 | # does with certificates, and so we need to attempt to do the same. | ||
| 220 | names = [ | ||
| 221 | ('DNS', _dnsname_to_stdlib(name)) | ||
| 222 | for name in ext.get_values_for_type(x509.DNSName) | ||
| 223 | ] | ||
| 224 | names.extend( | ||
| 225 | ('IP Address', str(name)) | ||
| 226 | for name in ext.get_values_for_type(x509.IPAddress) | ||
| 227 | ) | ||
| 228 | |||
| 229 | return names | ||
| 230 | |||
| 231 | |||
| 232 | class WrappedSocket(object): | ||
| 233 | '''API-compatibility wrapper for Python OpenSSL's Connection-class. | ||
| 234 | |||
| 235 | Note: _makefile_refs, _drop() and _reuse() are needed for the garbage | ||
| 236 | collector of pypy. | ||
| 237 | ''' | ||
| 238 | |||
| 239 | def __init__(self, connection, socket, suppress_ragged_eofs=True): | ||
| 240 | self.connection = connection | ||
| 241 | self.socket = socket | ||
| 242 | self.suppress_ragged_eofs = suppress_ragged_eofs | ||
| 243 | self._makefile_refs = 0 | ||
| 244 | self._closed = False | ||
| 245 | |||
| 246 | def fileno(self): | ||
| 247 | return self.socket.fileno() | ||
| 248 | |||
| 249 | # Copy-pasted from Python 3.5 source code | ||
| 250 | def _decref_socketios(self): | ||
| 251 | if self._makefile_refs > 0: | ||
| 252 | self._makefile_refs -= 1 | ||
| 253 | if self._closed: | ||
| 254 | self.close() | ||
| 255 | |||
| 256 | def recv(self, *args, **kwargs): | ||
| 257 | try: | ||
| 258 | data = self.connection.recv(*args, **kwargs) | ||
| 259 | except OpenSSL.SSL.SysCallError as e: | ||
| 260 | if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): | ||
| 261 | return b'' | ||
| 262 | else: | ||
| 263 | raise SocketError(str(e)) | ||
| 264 | except OpenSSL.SSL.ZeroReturnError as e: | ||
| 265 | if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: | ||
| 266 | return b'' | ||
| 267 | else: | ||
| 268 | raise | ||
| 269 | except OpenSSL.SSL.WantReadError: | ||
| 270 | rd = util.wait_for_read(self.socket, self.socket.gettimeout()) | ||
| 271 | if not rd: | ||
| 272 | raise timeout('The read operation timed out') | ||
| 273 | else: | ||
| 274 | return self.recv(*args, **kwargs) | ||
| 275 | else: | ||
| 276 | return data | ||
| 277 | |||
| 278 | def recv_into(self, *args, **kwargs): | ||
| 279 | try: | ||
| 280 | return self.connection.recv_into(*args, **kwargs) | ||
| 281 | except OpenSSL.SSL.SysCallError as e: | ||
| 282 | if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): | ||
| 283 | return 0 | ||
| 284 | else: | ||
| 285 | raise SocketError(str(e)) | ||
| 286 | except OpenSSL.SSL.ZeroReturnError as e: | ||
| 287 | if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: | ||
| 288 | return 0 | ||
| 289 | else: | ||
| 290 | raise | ||
| 291 | except OpenSSL.SSL.WantReadError: | ||
| 292 | rd = util.wait_for_read(self.socket, self.socket.gettimeout()) | ||
| 293 | if not rd: | ||
| 294 | raise timeout('The read operation timed out') | ||
| 295 | else: | ||
| 296 | return self.recv_into(*args, **kwargs) | ||
| 297 | |||
| 298 | def settimeout(self, timeout): | ||
| 299 | return self.socket.settimeout(timeout) | ||
| 300 | |||
| 301 | def _send_until_done(self, data): | ||
| 302 | while True: | ||
| 303 | try: | ||
| 304 | return self.connection.send(data) | ||
| 305 | except OpenSSL.SSL.WantWriteError: | ||
| 306 | wr = util.wait_for_write(self.socket, self.socket.gettimeout()) | ||
| 307 | if not wr: | ||
| 308 | raise timeout() | ||
| 309 | continue | ||
| 310 | except OpenSSL.SSL.SysCallError as e: | ||
| 311 | raise SocketError(str(e)) | ||
| 312 | |||
| 313 | def sendall(self, data): | ||
| 314 | total_sent = 0 | ||
| 315 | while total_sent < len(data): | ||
| 316 | sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) | ||
| 317 | total_sent += sent | ||
| 318 | |||
| 319 | def shutdown(self): | ||
| 320 | # FIXME rethrow compatible exceptions should we ever use this | ||
| 321 | self.connection.shutdown() | ||
| 322 | |||
| 323 | def close(self): | ||
| 324 | if self._makefile_refs < 1: | ||
| 325 | try: | ||
| 326 | self._closed = True | ||
| 327 | return self.connection.close() | ||
| 328 | except OpenSSL.SSL.Error: | ||
| 329 | return | ||
| 330 | else: | ||
| 331 | self._makefile_refs -= 1 | ||
| 332 | |||
| 333 | def getpeercert(self, binary_form=False): | ||
| 334 | x509 = self.connection.get_peer_certificate() | ||
| 335 | |||
| 336 | if not x509: | ||
| 337 | return x509 | ||
| 338 | |||
| 339 | if binary_form: | ||
| 340 | return OpenSSL.crypto.dump_certificate( | ||
| 341 | OpenSSL.crypto.FILETYPE_ASN1, | ||
| 342 | x509) | ||
| 343 | |||
| 344 | return { | ||
| 345 | 'subject': ( | ||
| 346 | (('commonName', x509.get_subject().CN),), | ||
| 347 | ), | ||
| 348 | 'subjectAltName': get_subj_alt_name(x509) | ||
| 349 | } | ||
| 350 | |||
| 351 | def _reuse(self): | ||
| 352 | self._makefile_refs += 1 | ||
| 353 | |||
| 354 | def _drop(self): | ||
| 355 | if self._makefile_refs < 1: | ||
| 356 | self.close() | ||
| 357 | else: | ||
| 358 | self._makefile_refs -= 1 | ||
| 359 | |||
| 360 | |||
| 361 | if _fileobject: # Platform-specific: Python 2 | ||
| 362 | def makefile(self, mode, bufsize=-1): | ||
| 363 | self._makefile_refs += 1 | ||
| 364 | return _fileobject(self, mode, bufsize, close=True) | ||
| 365 | else: # Platform-specific: Python 3 | ||
| 366 | makefile = backport_makefile | ||
| 367 | |||
| 368 | WrappedSocket.makefile = makefile | ||
| 369 | |||
| 370 | |||
| 371 | class PyOpenSSLContext(object): | ||
| 372 | """ | ||
| 373 | I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible | ||
| 374 | for translating the interface of the standard library ``SSLContext`` object | ||
| 375 | to calls into PyOpenSSL. | ||
| 376 | """ | ||
| 377 | def __init__(self, protocol): | ||
| 378 | self.protocol = _openssl_versions[protocol] | ||
| 379 | self._ctx = OpenSSL.SSL.Context(self.protocol) | ||
| 380 | self._options = 0 | ||
| 381 | self.check_hostname = False | ||
| 382 | |||
| 383 | @property | ||
| 384 | def options(self): | ||
| 385 | return self._options | ||
| 386 | |||
| 387 | @options.setter | ||
| 388 | def options(self, value): | ||
| 389 | self._options = value | ||
| 390 | self._ctx.set_options(value) | ||
| 391 | |||
| 392 | @property | ||
| 393 | def verify_mode(self): | ||
| 394 | return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] | ||
| 395 | |||
| 396 | @verify_mode.setter | ||
| 397 | def verify_mode(self, value): | ||
| 398 | self._ctx.set_verify( | ||
| 399 | _stdlib_to_openssl_verify[value], | ||
| 400 | _verify_callback | ||
| 401 | ) | ||
| 402 | |||
| 403 | def set_default_verify_paths(self): | ||
| 404 | self._ctx.set_default_verify_paths() | ||
| 405 | |||
| 406 | def set_ciphers(self, ciphers): | ||
| 407 | if isinstance(ciphers, six.text_type): | ||
| 408 | ciphers = ciphers.encode('utf-8') | ||
| 409 | self._ctx.set_cipher_list(ciphers) | ||
| 410 | |||
| 411 | def load_verify_locations(self, cafile=None, capath=None, cadata=None): | ||
| 412 | if cafile is not None: | ||
| 413 | cafile = cafile.encode('utf-8') | ||
| 414 | if capath is not None: | ||
| 415 | capath = capath.encode('utf-8') | ||
| 416 | self._ctx.load_verify_locations(cafile, capath) | ||
| 417 | if cadata is not None: | ||
| 418 | self._ctx.load_verify_locations(BytesIO(cadata)) | ||
| 419 | |||
| 420 | def load_cert_chain(self, certfile, keyfile=None, password=None): | ||
| 421 | self._ctx.use_certificate_file(certfile) | ||
| 422 | if password is not None: | ||
| 423 | self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) | ||
| 424 | self._ctx.use_privatekey_file(keyfile or certfile) | ||
| 425 | |||
| 426 | def wrap_socket(self, sock, server_side=False, | ||
| 427 | do_handshake_on_connect=True, suppress_ragged_eofs=True, | ||
| 428 | server_hostname=None): | ||
| 429 | cnx = OpenSSL.SSL.Connection(self._ctx, sock) | ||
| 430 | |||
| 431 | if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 | ||
| 432 | server_hostname = server_hostname.encode('utf-8') | ||
| 433 | |||
| 434 | if server_hostname is not None: | ||
| 435 | cnx.set_tlsext_host_name(server_hostname) | ||
| 436 | |||
| 437 | cnx.set_connect_state() | ||
| 438 | |||
| 439 | while True: | ||
| 440 | try: | ||
| 441 | cnx.do_handshake() | ||
| 442 | except OpenSSL.SSL.WantReadError: | ||
| 443 | rd = util.wait_for_read(sock, sock.gettimeout()) | ||
| 444 | if not rd: | ||
| 445 | raise timeout('select timed out') | ||
| 446 | continue | ||
| 447 | except OpenSSL.SSL.Error as e: | ||
| 448 | raise ssl.SSLError('bad handshake: %r' % e) | ||
| 449 | break | ||
| 450 | |||
| 451 | return WrappedSocket(cnx, sock) | ||
| 452 | |||
| 453 | |||
| 454 | def _verify_callback(cnx, x509, err_no, err_depth, return_code): | ||
| 455 | return err_no == 0 | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/securetransport.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/securetransport.py new file mode 100644 index 0000000..77cf861 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/securetransport.py | |||
| @@ -0,0 +1,810 @@ | |||
| 1 | """ | ||
| 2 | SecureTranport support for urllib3 via ctypes. | ||
| 3 | |||
| 4 | This makes platform-native TLS available to urllib3 users on macOS without the | ||
| 5 | use of a compiler. This is an important feature because the Python Package | ||
| 6 | Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL | ||
| 7 | that ships with macOS is not capable of doing TLSv1.2. The only way to resolve | ||
| 8 | this is to give macOS users an alternative solution to the problem, and that | ||
| 9 | solution is to use SecureTransport. | ||
| 10 | |||
| 11 | We use ctypes here because this solution must not require a compiler. That's | ||
| 12 | because pip is not allowed to require a compiler either. | ||
| 13 | |||
| 14 | This is not intended to be a seriously long-term solution to this problem. | ||
| 15 | The hope is that PEP 543 will eventually solve this issue for us, at which | ||
| 16 | point we can retire this contrib module. But in the short term, we need to | ||
| 17 | solve the impending tire fire that is Python on Mac without this kind of | ||
| 18 | contrib module. So...here we are. | ||
| 19 | |||
| 20 | To use this module, simply import and inject it:: | ||
| 21 | |||
| 22 | import urllib3.contrib.securetransport | ||
| 23 | urllib3.contrib.securetransport.inject_into_urllib3() | ||
| 24 | |||
| 25 | Happy TLSing! | ||
| 26 | """ | ||
| 27 | from __future__ import absolute_import | ||
| 28 | |||
| 29 | import contextlib | ||
| 30 | import ctypes | ||
| 31 | import errno | ||
| 32 | import os.path | ||
| 33 | import shutil | ||
| 34 | import socket | ||
| 35 | import ssl | ||
| 36 | import threading | ||
| 37 | import weakref | ||
| 38 | |||
| 39 | from .. import util | ||
| 40 | from ._securetransport.bindings import ( | ||
| 41 | Security, SecurityConst, CoreFoundation | ||
| 42 | ) | ||
| 43 | from ._securetransport.low_level import ( | ||
| 44 | _assert_no_error, _cert_array_from_pem, _temporary_keychain, | ||
| 45 | _load_client_cert_chain | ||
| 46 | ) | ||
| 47 | |||
| 48 | try: # Platform-specific: Python 2 | ||
| 49 | from socket import _fileobject | ||
| 50 | except ImportError: # Platform-specific: Python 3 | ||
| 51 | _fileobject = None | ||
| 52 | from ..packages.backports.makefile import backport_makefile | ||
| 53 | |||
| 54 | try: | ||
| 55 | memoryview(b'') | ||
| 56 | except NameError: | ||
| 57 | raise ImportError("SecureTransport only works on Pythons with memoryview") | ||
| 58 | |||
| 59 | __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] | ||
| 60 | |||
| 61 | # SNI always works | ||
| 62 | HAS_SNI = True | ||
| 63 | |||
| 64 | orig_util_HAS_SNI = util.HAS_SNI | ||
| 65 | orig_util_SSLContext = util.ssl_.SSLContext | ||
| 66 | |||
| 67 | # This dictionary is used by the read callback to obtain a handle to the | ||
| 68 | # calling wrapped socket. This is a pretty silly approach, but for now it'll | ||
| 69 | # do. I feel like I should be able to smuggle a handle to the wrapped socket | ||
| 70 | # directly in the SSLConnectionRef, but for now this approach will work I | ||
| 71 | # guess. | ||
| 72 | # | ||
| 73 | # We need to lock around this structure for inserts, but we don't do it for | ||
| 74 | # reads/writes in the callbacks. The reasoning here goes as follows: | ||
| 75 | # | ||
| 76 | # 1. It is not possible to call into the callbacks before the dictionary is | ||
| 77 | # populated, so once in the callback the id must be in the dictionary. | ||
| 78 | # 2. The callbacks don't mutate the dictionary, they only read from it, and | ||
| 79 | # so cannot conflict with any of the insertions. | ||
| 80 | # | ||
| 81 | # This is good: if we had to lock in the callbacks we'd drastically slow down | ||
| 82 | # the performance of this code. | ||
| 83 | _connection_refs = weakref.WeakValueDictionary() | ||
| 84 | _connection_ref_lock = threading.Lock() | ||
| 85 | |||
| 86 | # Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over | ||
| 87 | # for no better reason than we need *a* limit, and this one is right there. | ||
| 88 | SSL_WRITE_BLOCKSIZE = 16384 | ||
| 89 | |||
| 90 | # This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to | ||
| 91 | # individual cipher suites. We need to do this becuase this is how | ||
| 92 | # SecureTransport wants them. | ||
| 93 | CIPHER_SUITES = [ | ||
| 94 | SecurityConst.TLS_AES_256_GCM_SHA384, | ||
| 95 | SecurityConst.TLS_CHACHA20_POLY1305_SHA256, | ||
| 96 | SecurityConst.TLS_AES_128_GCM_SHA256, | ||
| 97 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, | ||
| 98 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, | ||
| 99 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, | ||
| 100 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, | ||
| 101 | SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, | ||
| 102 | SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, | ||
| 103 | SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, | ||
| 104 | SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, | ||
| 105 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, | ||
| 106 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, | ||
| 107 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, | ||
| 108 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, | ||
| 109 | SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, | ||
| 110 | SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, | ||
| 111 | SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, | ||
| 112 | SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, | ||
| 113 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, | ||
| 114 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, | ||
| 115 | SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, | ||
| 116 | SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, | ||
| 117 | SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, | ||
| 118 | SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, | ||
| 119 | SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, | ||
| 120 | SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, | ||
| 121 | SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, | ||
| 122 | SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, | ||
| 123 | SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, | ||
| 124 | SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, | ||
| 125 | SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, | ||
| 126 | SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, | ||
| 127 | ] | ||
| 128 | |||
| 129 | # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of | ||
| 130 | # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. | ||
| 131 | _protocol_to_min_max = { | ||
| 132 | ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), | ||
| 133 | } | ||
| 134 | |||
| 135 | if hasattr(ssl, "PROTOCOL_SSLv2"): | ||
| 136 | _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( | ||
| 137 | SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 | ||
| 138 | ) | ||
| 139 | if hasattr(ssl, "PROTOCOL_SSLv3"): | ||
| 140 | _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( | ||
| 141 | SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 | ||
| 142 | ) | ||
| 143 | if hasattr(ssl, "PROTOCOL_TLSv1"): | ||
| 144 | _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( | ||
| 145 | SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 | ||
| 146 | ) | ||
| 147 | if hasattr(ssl, "PROTOCOL_TLSv1_1"): | ||
| 148 | _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( | ||
| 149 | SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 | ||
| 150 | ) | ||
| 151 | if hasattr(ssl, "PROTOCOL_TLSv1_2"): | ||
| 152 | _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( | ||
| 153 | SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 | ||
| 154 | ) | ||
| 155 | if hasattr(ssl, "PROTOCOL_TLS"): | ||
| 156 | _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] | ||
| 157 | |||
| 158 | |||
| 159 | def inject_into_urllib3(): | ||
| 160 | """ | ||
| 161 | Monkey-patch urllib3 with SecureTransport-backed SSL-support. | ||
| 162 | """ | ||
| 163 | util.ssl_.SSLContext = SecureTransportContext | ||
| 164 | util.HAS_SNI = HAS_SNI | ||
| 165 | util.ssl_.HAS_SNI = HAS_SNI | ||
| 166 | util.IS_SECURETRANSPORT = True | ||
| 167 | util.ssl_.IS_SECURETRANSPORT = True | ||
| 168 | |||
| 169 | |||
| 170 | def extract_from_urllib3(): | ||
| 171 | """ | ||
| 172 | Undo monkey-patching by :func:`inject_into_urllib3`. | ||
| 173 | """ | ||
| 174 | util.ssl_.SSLContext = orig_util_SSLContext | ||
| 175 | util.HAS_SNI = orig_util_HAS_SNI | ||
| 176 | util.ssl_.HAS_SNI = orig_util_HAS_SNI | ||
| 177 | util.IS_SECURETRANSPORT = False | ||
| 178 | util.ssl_.IS_SECURETRANSPORT = False | ||
| 179 | |||
| 180 | |||
| 181 | def _read_callback(connection_id, data_buffer, data_length_pointer): | ||
| 182 | """ | ||
| 183 | SecureTransport read callback. This is called by ST to request that data | ||
| 184 | be returned from the socket. | ||
| 185 | """ | ||
| 186 | wrapped_socket = None | ||
| 187 | try: | ||
| 188 | wrapped_socket = _connection_refs.get(connection_id) | ||
| 189 | if wrapped_socket is None: | ||
| 190 | return SecurityConst.errSSLInternal | ||
| 191 | base_socket = wrapped_socket.socket | ||
| 192 | |||
| 193 | requested_length = data_length_pointer[0] | ||
| 194 | |||
| 195 | timeout = wrapped_socket.gettimeout() | ||
| 196 | error = None | ||
| 197 | read_count = 0 | ||
| 198 | buffer = (ctypes.c_char * requested_length).from_address(data_buffer) | ||
| 199 | buffer_view = memoryview(buffer) | ||
| 200 | |||
| 201 | try: | ||
| 202 | while read_count < requested_length: | ||
| 203 | if timeout is None or timeout >= 0: | ||
| 204 | readables = util.wait_for_read([base_socket], timeout) | ||
| 205 | if not readables: | ||
| 206 | raise socket.error(errno.EAGAIN, 'timed out') | ||
| 207 | |||
| 208 | # We need to tell ctypes that we have a buffer that can be | ||
| 209 | # written to. Upsettingly, we do that like this: | ||
| 210 | chunk_size = base_socket.recv_into( | ||
| 211 | buffer_view[read_count:requested_length] | ||
| 212 | ) | ||
| 213 | read_count += chunk_size | ||
| 214 | if not chunk_size: | ||
| 215 | if not read_count: | ||
| 216 | return SecurityConst.errSSLClosedGraceful | ||
| 217 | break | ||
| 218 | except (socket.error) as e: | ||
| 219 | error = e.errno | ||
| 220 | |||
| 221 | if error is not None and error != errno.EAGAIN: | ||
| 222 | if error == errno.ECONNRESET: | ||
| 223 | return SecurityConst.errSSLClosedAbort | ||
| 224 | raise | ||
| 225 | |||
| 226 | data_length_pointer[0] = read_count | ||
| 227 | |||
| 228 | if read_count != requested_length: | ||
| 229 | return SecurityConst.errSSLWouldBlock | ||
| 230 | |||
| 231 | return 0 | ||
| 232 | except Exception as e: | ||
| 233 | if wrapped_socket is not None: | ||
| 234 | wrapped_socket._exception = e | ||
| 235 | return SecurityConst.errSSLInternal | ||
| 236 | |||
| 237 | |||
| 238 | def _write_callback(connection_id, data_buffer, data_length_pointer): | ||
| 239 | """ | ||
| 240 | SecureTransport write callback. This is called by ST to request that data | ||
| 241 | actually be sent on the network. | ||
| 242 | """ | ||
| 243 | wrapped_socket = None | ||
| 244 | try: | ||
| 245 | wrapped_socket = _connection_refs.get(connection_id) | ||
| 246 | if wrapped_socket is None: | ||
| 247 | return SecurityConst.errSSLInternal | ||
| 248 | base_socket = wrapped_socket.socket | ||
| 249 | |||
| 250 | bytes_to_write = data_length_pointer[0] | ||
| 251 | data = ctypes.string_at(data_buffer, bytes_to_write) | ||
| 252 | |||
| 253 | timeout = wrapped_socket.gettimeout() | ||
| 254 | error = None | ||
| 255 | sent = 0 | ||
| 256 | |||
| 257 | try: | ||
| 258 | while sent < bytes_to_write: | ||
| 259 | if timeout is None or timeout >= 0: | ||
| 260 | writables = util.wait_for_write([base_socket], timeout) | ||
| 261 | if not writables: | ||
| 262 | raise socket.error(errno.EAGAIN, 'timed out') | ||
| 263 | chunk_sent = base_socket.send(data) | ||
| 264 | sent += chunk_sent | ||
| 265 | |||
| 266 | # This has some needless copying here, but I'm not sure there's | ||
| 267 | # much value in optimising this data path. | ||
| 268 | data = data[chunk_sent:] | ||
| 269 | except (socket.error) as e: | ||
| 270 | error = e.errno | ||
| 271 | |||
| 272 | if error is not None and error != errno.EAGAIN: | ||
| 273 | if error == errno.ECONNRESET: | ||
| 274 | return SecurityConst.errSSLClosedAbort | ||
| 275 | raise | ||
| 276 | |||
| 277 | data_length_pointer[0] = sent | ||
| 278 | if sent != bytes_to_write: | ||
| 279 | return SecurityConst.errSSLWouldBlock | ||
| 280 | |||
| 281 | return 0 | ||
| 282 | except Exception as e: | ||
| 283 | if wrapped_socket is not None: | ||
| 284 | wrapped_socket._exception = e | ||
| 285 | return SecurityConst.errSSLInternal | ||
| 286 | |||
| 287 | |||
| 288 | # We need to keep these two objects references alive: if they get GC'd while | ||
| 289 | # in use then SecureTransport could attempt to call a function that is in freed | ||
| 290 | # memory. That would be...uh...bad. Yeah, that's the word. Bad. | ||
| 291 | _read_callback_pointer = Security.SSLReadFunc(_read_callback) | ||
| 292 | _write_callback_pointer = Security.SSLWriteFunc(_write_callback) | ||
| 293 | |||
| 294 | |||
| 295 | class WrappedSocket(object): | ||
| 296 | """ | ||
| 297 | API-compatibility wrapper for Python's OpenSSL wrapped socket object. | ||
| 298 | |||
| 299 | Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage | ||
| 300 | collector of PyPy. | ||
| 301 | """ | ||
| 302 | def __init__(self, socket): | ||
| 303 | self.socket = socket | ||
| 304 | self.context = None | ||
| 305 | self._makefile_refs = 0 | ||
| 306 | self._closed = False | ||
| 307 | self._exception = None | ||
| 308 | self._keychain = None | ||
| 309 | self._keychain_dir = None | ||
| 310 | self._client_cert_chain = None | ||
| 311 | |||
| 312 | # We save off the previously-configured timeout and then set it to | ||
| 313 | # zero. This is done because we use select and friends to handle the | ||
| 314 | # timeouts, but if we leave the timeout set on the lower socket then | ||
| 315 | # Python will "kindly" call select on that socket again for us. Avoid | ||
| 316 | # that by forcing the timeout to zero. | ||
| 317 | self._timeout = self.socket.gettimeout() | ||
| 318 | self.socket.settimeout(0) | ||
| 319 | |||
| 320 | @contextlib.contextmanager | ||
| 321 | def _raise_on_error(self): | ||
| 322 | """ | ||
| 323 | A context manager that can be used to wrap calls that do I/O from | ||
| 324 | SecureTransport. If any of the I/O callbacks hit an exception, this | ||
| 325 | context manager will correctly propagate the exception after the fact. | ||
| 326 | This avoids silently swallowing those exceptions. | ||
| 327 | |||
| 328 | It also correctly forces the socket closed. | ||
| 329 | """ | ||
| 330 | self._exception = None | ||
| 331 | |||
| 332 | # We explicitly don't catch around this yield because in the unlikely | ||
| 333 | # event that an exception was hit in the block we don't want to swallow | ||
| 334 | # it. | ||
| 335 | yield | ||
| 336 | if self._exception is not None: | ||
| 337 | exception, self._exception = self._exception, None | ||
| 338 | self.close() | ||
| 339 | raise exception | ||
| 340 | |||
| 341 | def _set_ciphers(self): | ||
| 342 | """ | ||
| 343 | Sets up the allowed ciphers. By default this matches the set in | ||
| 344 | util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done | ||
| 345 | custom and doesn't allow changing at this time, mostly because parsing | ||
| 346 | OpenSSL cipher strings is going to be a freaking nightmare. | ||
| 347 | """ | ||
| 348 | ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) | ||
| 349 | result = Security.SSLSetEnabledCiphers( | ||
| 350 | self.context, ciphers, len(CIPHER_SUITES) | ||
| 351 | ) | ||
| 352 | _assert_no_error(result) | ||
| 353 | |||
| 354 | def _custom_validate(self, verify, trust_bundle): | ||
| 355 | """ | ||
| 356 | Called when we have set custom validation. We do this in two cases: | ||
| 357 | first, when cert validation is entirely disabled; and second, when | ||
| 358 | using a custom trust DB. | ||
| 359 | """ | ||
| 360 | # If we disabled cert validation, just say: cool. | ||
| 361 | if not verify: | ||
| 362 | return | ||
| 363 | |||
| 364 | # We want data in memory, so load it up. | ||
| 365 | if os.path.isfile(trust_bundle): | ||
| 366 | with open(trust_bundle, 'rb') as f: | ||
| 367 | trust_bundle = f.read() | ||
| 368 | |||
| 369 | cert_array = None | ||
| 370 | trust = Security.SecTrustRef() | ||
| 371 | |||
| 372 | try: | ||
| 373 | # Get a CFArray that contains the certs we want. | ||
| 374 | cert_array = _cert_array_from_pem(trust_bundle) | ||
| 375 | |||
| 376 | # Ok, now the hard part. We want to get the SecTrustRef that ST has | ||
| 377 | # created for this connection, shove our CAs into it, tell ST to | ||
| 378 | # ignore everything else it knows, and then ask if it can build a | ||
| 379 | # chain. This is a buuuunch of code. | ||
| 380 | result = Security.SSLCopyPeerTrust( | ||
| 381 | self.context, ctypes.byref(trust) | ||
| 382 | ) | ||
| 383 | _assert_no_error(result) | ||
| 384 | if not trust: | ||
| 385 | raise ssl.SSLError("Failed to copy trust reference") | ||
| 386 | |||
| 387 | result = Security.SecTrustSetAnchorCertificates(trust, cert_array) | ||
| 388 | _assert_no_error(result) | ||
| 389 | |||
| 390 | result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) | ||
| 391 | _assert_no_error(result) | ||
| 392 | |||
| 393 | trust_result = Security.SecTrustResultType() | ||
| 394 | result = Security.SecTrustEvaluate( | ||
| 395 | trust, ctypes.byref(trust_result) | ||
| 396 | ) | ||
| 397 | _assert_no_error(result) | ||
| 398 | finally: | ||
| 399 | if trust: | ||
| 400 | CoreFoundation.CFRelease(trust) | ||
| 401 | |||
| 402 | if cert_array is None: | ||
| 403 | CoreFoundation.CFRelease(cert_array) | ||
| 404 | |||
| 405 | # Ok, now we can look at what the result was. | ||
| 406 | successes = ( | ||
| 407 | SecurityConst.kSecTrustResultUnspecified, | ||
| 408 | SecurityConst.kSecTrustResultProceed | ||
| 409 | ) | ||
| 410 | if trust_result.value not in successes: | ||
| 411 | raise ssl.SSLError( | ||
| 412 | "certificate verify failed, error code: %d" % | ||
| 413 | trust_result.value | ||
| 414 | ) | ||
| 415 | |||
| 416 | def handshake(self, | ||
| 417 | server_hostname, | ||
| 418 | verify, | ||
| 419 | trust_bundle, | ||
| 420 | min_version, | ||
| 421 | max_version, | ||
| 422 | client_cert, | ||
| 423 | client_key, | ||
| 424 | client_key_passphrase): | ||
| 425 | """ | ||
| 426 | Actually performs the TLS handshake. This is run automatically by | ||
| 427 | wrapped socket, and shouldn't be needed in user code. | ||
| 428 | """ | ||
| 429 | # First, we do the initial bits of connection setup. We need to create | ||
| 430 | # a context, set its I/O funcs, and set the connection reference. | ||
| 431 | self.context = Security.SSLCreateContext( | ||
| 432 | None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType | ||
| 433 | ) | ||
| 434 | result = Security.SSLSetIOFuncs( | ||
| 435 | self.context, _read_callback_pointer, _write_callback_pointer | ||
| 436 | ) | ||
| 437 | _assert_no_error(result) | ||
| 438 | |||
| 439 | # Here we need to compute the handle to use. We do this by taking the | ||
| 440 | # id of self modulo 2**31 - 1. If this is already in the dictionary, we | ||
| 441 | # just keep incrementing by one until we find a free space. | ||
| 442 | with _connection_ref_lock: | ||
| 443 | handle = id(self) % 2147483647 | ||
| 444 | while handle in _connection_refs: | ||
| 445 | handle = (handle + 1) % 2147483647 | ||
| 446 | _connection_refs[handle] = self | ||
| 447 | |||
| 448 | result = Security.SSLSetConnection(self.context, handle) | ||
| 449 | _assert_no_error(result) | ||
| 450 | |||
| 451 | # If we have a server hostname, we should set that too. | ||
| 452 | if server_hostname: | ||
| 453 | if not isinstance(server_hostname, bytes): | ||
| 454 | server_hostname = server_hostname.encode('utf-8') | ||
| 455 | |||
| 456 | result = Security.SSLSetPeerDomainName( | ||
| 457 | self.context, server_hostname, len(server_hostname) | ||
| 458 | ) | ||
| 459 | _assert_no_error(result) | ||
| 460 | |||
| 461 | # Setup the ciphers. | ||
| 462 | self._set_ciphers() | ||
| 463 | |||
| 464 | # Set the minimum and maximum TLS versions. | ||
| 465 | result = Security.SSLSetProtocolVersionMin(self.context, min_version) | ||
| 466 | _assert_no_error(result) | ||
| 467 | result = Security.SSLSetProtocolVersionMax(self.context, max_version) | ||
| 468 | _assert_no_error(result) | ||
| 469 | |||
| 470 | # If there's a trust DB, we need to use it. We do that by telling | ||
| 471 | # SecureTransport to break on server auth. We also do that if we don't | ||
| 472 | # want to validate the certs at all: we just won't actually do any | ||
| 473 | # authing in that case. | ||
| 474 | if not verify or trust_bundle is not None: | ||
| 475 | result = Security.SSLSetSessionOption( | ||
| 476 | self.context, | ||
| 477 | SecurityConst.kSSLSessionOptionBreakOnServerAuth, | ||
| 478 | True | ||
| 479 | ) | ||
| 480 | _assert_no_error(result) | ||
| 481 | |||
| 482 | # If there's a client cert, we need to use it. | ||
| 483 | if client_cert: | ||
| 484 | self._keychain, self._keychain_dir = _temporary_keychain() | ||
| 485 | self._client_cert_chain = _load_client_cert_chain( | ||
| 486 | self._keychain, client_cert, client_key | ||
| 487 | ) | ||
| 488 | result = Security.SSLSetCertificate( | ||
| 489 | self.context, self._client_cert_chain | ||
| 490 | ) | ||
| 491 | _assert_no_error(result) | ||
| 492 | |||
| 493 | while True: | ||
| 494 | with self._raise_on_error(): | ||
| 495 | result = Security.SSLHandshake(self.context) | ||
| 496 | |||
| 497 | if result == SecurityConst.errSSLWouldBlock: | ||
| 498 | raise socket.timeout("handshake timed out") | ||
| 499 | elif result == SecurityConst.errSSLServerAuthCompleted: | ||
| 500 | self._custom_validate(verify, trust_bundle) | ||
| 501 | continue | ||
| 502 | else: | ||
| 503 | _assert_no_error(result) | ||
| 504 | break | ||
| 505 | |||
| 506 | def fileno(self): | ||
| 507 | return self.socket.fileno() | ||
| 508 | |||
| 509 | # Copy-pasted from Python 3.5 source code | ||
| 510 | def _decref_socketios(self): | ||
| 511 | if self._makefile_refs > 0: | ||
| 512 | self._makefile_refs -= 1 | ||
| 513 | if self._closed: | ||
| 514 | self.close() | ||
| 515 | |||
| 516 | def recv(self, bufsiz): | ||
| 517 | buffer = ctypes.create_string_buffer(bufsiz) | ||
| 518 | bytes_read = self.recv_into(buffer, bufsiz) | ||
| 519 | data = buffer[:bytes_read] | ||
| 520 | return data | ||
| 521 | |||
| 522 | def recv_into(self, buffer, nbytes=None): | ||
| 523 | # Read short on EOF. | ||
| 524 | if self._closed: | ||
| 525 | return 0 | ||
| 526 | |||
| 527 | if nbytes is None: | ||
| 528 | nbytes = len(buffer) | ||
| 529 | |||
| 530 | buffer = (ctypes.c_char * nbytes).from_buffer(buffer) | ||
| 531 | processed_bytes = ctypes.c_size_t(0) | ||
| 532 | |||
| 533 | with self._raise_on_error(): | ||
| 534 | result = Security.SSLRead( | ||
| 535 | self.context, buffer, nbytes, ctypes.byref(processed_bytes) | ||
| 536 | ) | ||
| 537 | |||
| 538 | # There are some result codes that we want to treat as "not always | ||
| 539 | # errors". Specifically, those are errSSLWouldBlock, | ||
| 540 | # errSSLClosedGraceful, and errSSLClosedNoNotify. | ||
| 541 | if (result == SecurityConst.errSSLWouldBlock): | ||
| 542 | # If we didn't process any bytes, then this was just a time out. | ||
| 543 | # However, we can get errSSLWouldBlock in situations when we *did* | ||
| 544 | # read some data, and in those cases we should just read "short" | ||
| 545 | # and return. | ||
| 546 | if processed_bytes.value == 0: | ||
| 547 | # Timed out, no data read. | ||
| 548 | raise socket.timeout("recv timed out") | ||
| 549 | elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): | ||
| 550 | # The remote peer has closed this connection. We should do so as | ||
| 551 | # well. Note that we don't actually return here because in | ||
| 552 | # principle this could actually be fired along with return data. | ||
| 553 | # It's unlikely though. | ||
| 554 | self.close() | ||
| 555 | else: | ||
| 556 | _assert_no_error(result) | ||
| 557 | |||
| 558 | # Ok, we read and probably succeeded. We should return whatever data | ||
| 559 | # was actually read. | ||
| 560 | return processed_bytes.value | ||
| 561 | |||
| 562 | def settimeout(self, timeout): | ||
| 563 | self._timeout = timeout | ||
| 564 | |||
| 565 | def gettimeout(self): | ||
| 566 | return self._timeout | ||
| 567 | |||
| 568 | def send(self, data): | ||
| 569 | processed_bytes = ctypes.c_size_t(0) | ||
| 570 | |||
| 571 | with self._raise_on_error(): | ||
| 572 | result = Security.SSLWrite( | ||
| 573 | self.context, data, len(data), ctypes.byref(processed_bytes) | ||
| 574 | ) | ||
| 575 | |||
| 576 | if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: | ||
| 577 | # Timed out | ||
| 578 | raise socket.timeout("send timed out") | ||
| 579 | else: | ||
| 580 | _assert_no_error(result) | ||
| 581 | |||
| 582 | # We sent, and probably succeeded. Tell them how much we sent. | ||
| 583 | return processed_bytes.value | ||
| 584 | |||
| 585 | def sendall(self, data): | ||
| 586 | total_sent = 0 | ||
| 587 | while total_sent < len(data): | ||
| 588 | sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) | ||
| 589 | total_sent += sent | ||
| 590 | |||
| 591 | def shutdown(self): | ||
| 592 | with self._raise_on_error(): | ||
| 593 | Security.SSLClose(self.context) | ||
| 594 | |||
| 595 | def close(self): | ||
| 596 | # TODO: should I do clean shutdown here? Do I have to? | ||
| 597 | if self._makefile_refs < 1: | ||
| 598 | self._closed = True | ||
| 599 | if self.context: | ||
| 600 | CoreFoundation.CFRelease(self.context) | ||
| 601 | self.context = None | ||
| 602 | if self._client_cert_chain: | ||
| 603 | CoreFoundation.CFRelease(self._client_cert_chain) | ||
| 604 | self._client_cert_chain = None | ||
| 605 | if self._keychain: | ||
| 606 | Security.SecKeychainDelete(self._keychain) | ||
| 607 | CoreFoundation.CFRelease(self._keychain) | ||
| 608 | shutil.rmtree(self._keychain_dir) | ||
| 609 | self._keychain = self._keychain_dir = None | ||
| 610 | return self.socket.close() | ||
| 611 | else: | ||
| 612 | self._makefile_refs -= 1 | ||
| 613 | |||
| 614 | def getpeercert(self, binary_form=False): | ||
| 615 | # Urgh, annoying. | ||
| 616 | # | ||
| 617 | # Here's how we do this: | ||
| 618 | # | ||
| 619 | # 1. Call SSLCopyPeerTrust to get hold of the trust object for this | ||
| 620 | # connection. | ||
| 621 | # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. | ||
| 622 | # 3. To get the CN, call SecCertificateCopyCommonName and process that | ||
| 623 | # string so that it's of the appropriate type. | ||
| 624 | # 4. To get the SAN, we need to do something a bit more complex: | ||
| 625 | # a. Call SecCertificateCopyValues to get the data, requesting | ||
| 626 | # kSecOIDSubjectAltName. | ||
| 627 | # b. Mess about with this dictionary to try to get the SANs out. | ||
| 628 | # | ||
| 629 | # This is gross. Really gross. It's going to be a few hundred LoC extra | ||
| 630 | # just to repeat something that SecureTransport can *already do*. So my | ||
| 631 | # operating assumption at this time is that what we want to do is | ||
| 632 | # instead to just flag to urllib3 that it shouldn't do its own hostname | ||
| 633 | # validation when using SecureTransport. | ||
| 634 | if not binary_form: | ||
| 635 | raise ValueError( | ||
| 636 | "SecureTransport only supports dumping binary certs" | ||
| 637 | ) | ||
| 638 | trust = Security.SecTrustRef() | ||
| 639 | certdata = None | ||
| 640 | der_bytes = None | ||
| 641 | |||
| 642 | try: | ||
| 643 | # Grab the trust store. | ||
| 644 | result = Security.SSLCopyPeerTrust( | ||
| 645 | self.context, ctypes.byref(trust) | ||
| 646 | ) | ||
| 647 | _assert_no_error(result) | ||
| 648 | if not trust: | ||
| 649 | # Probably we haven't done the handshake yet. No biggie. | ||
| 650 | return None | ||
| 651 | |||
| 652 | cert_count = Security.SecTrustGetCertificateCount(trust) | ||
| 653 | if not cert_count: | ||
| 654 | # Also a case that might happen if we haven't handshaked. | ||
| 655 | # Handshook? Handshaken? | ||
| 656 | return None | ||
| 657 | |||
| 658 | leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) | ||
| 659 | assert leaf | ||
| 660 | |||
| 661 | # Ok, now we want the DER bytes. | ||
| 662 | certdata = Security.SecCertificateCopyData(leaf) | ||
| 663 | assert certdata | ||
| 664 | |||
| 665 | data_length = CoreFoundation.CFDataGetLength(certdata) | ||
| 666 | data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) | ||
| 667 | der_bytes = ctypes.string_at(data_buffer, data_length) | ||
| 668 | finally: | ||
| 669 | if certdata: | ||
| 670 | CoreFoundation.CFRelease(certdata) | ||
| 671 | if trust: | ||
| 672 | CoreFoundation.CFRelease(trust) | ||
| 673 | |||
| 674 | return der_bytes | ||
| 675 | |||
| 676 | def _reuse(self): | ||
| 677 | self._makefile_refs += 1 | ||
| 678 | |||
| 679 | def _drop(self): | ||
| 680 | if self._makefile_refs < 1: | ||
| 681 | self.close() | ||
| 682 | else: | ||
| 683 | self._makefile_refs -= 1 | ||
| 684 | |||
| 685 | |||
| 686 | if _fileobject: # Platform-specific: Python 2 | ||
| 687 | def makefile(self, mode, bufsize=-1): | ||
| 688 | self._makefile_refs += 1 | ||
| 689 | return _fileobject(self, mode, bufsize, close=True) | ||
| 690 | else: # Platform-specific: Python 3 | ||
| 691 | def makefile(self, mode="r", buffering=None, *args, **kwargs): | ||
| 692 | # We disable buffering with SecureTransport because it conflicts with | ||
| 693 | # the buffering that ST does internally (see issue #1153 for more). | ||
| 694 | buffering = 0 | ||
| 695 | return backport_makefile(self, mode, buffering, *args, **kwargs) | ||
| 696 | |||
| 697 | WrappedSocket.makefile = makefile | ||
| 698 | |||
| 699 | |||
| 700 | class SecureTransportContext(object): | ||
| 701 | """ | ||
| 702 | I am a wrapper class for the SecureTransport library, to translate the | ||
| 703 | interface of the standard library ``SSLContext`` object to calls into | ||
| 704 | SecureTransport. | ||
| 705 | """ | ||
| 706 | def __init__(self, protocol): | ||
| 707 | self._min_version, self._max_version = _protocol_to_min_max[protocol] | ||
| 708 | self._options = 0 | ||
| 709 | self._verify = False | ||
| 710 | self._trust_bundle = None | ||
| 711 | self._client_cert = None | ||
| 712 | self._client_key = None | ||
| 713 | self._client_key_passphrase = None | ||
| 714 | |||
| 715 | @property | ||
| 716 | def check_hostname(self): | ||
| 717 | """ | ||
| 718 | SecureTransport cannot have its hostname checking disabled. For more, | ||
| 719 | see the comment on getpeercert() in this file. | ||
| 720 | """ | ||
| 721 | return True | ||
| 722 | |||
| 723 | @check_hostname.setter | ||
| 724 | def check_hostname(self, value): | ||
| 725 | """ | ||
| 726 | SecureTransport cannot have its hostname checking disabled. For more, | ||
| 727 | see the comment on getpeercert() in this file. | ||
| 728 | """ | ||
| 729 | pass | ||
| 730 | |||
| 731 | @property | ||
| 732 | def options(self): | ||
| 733 | # TODO: Well, crap. | ||
| 734 | # | ||
| 735 | # So this is the bit of the code that is the most likely to cause us | ||
| 736 | # trouble. Essentially we need to enumerate all of the SSL options that | ||
| 737 | # users might want to use and try to see if we can sensibly translate | ||
| 738 | # them, or whether we should just ignore them. | ||
| 739 | return self._options | ||
| 740 | |||
| 741 | @options.setter | ||
| 742 | def options(self, value): | ||
| 743 | # TODO: Update in line with above. | ||
| 744 | self._options = value | ||
| 745 | |||
| 746 | @property | ||
| 747 | def verify_mode(self): | ||
| 748 | return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE | ||
| 749 | |||
| 750 | @verify_mode.setter | ||
| 751 | def verify_mode(self, value): | ||
| 752 | self._verify = True if value == ssl.CERT_REQUIRED else False | ||
| 753 | |||
| 754 | def set_default_verify_paths(self): | ||
| 755 | # So, this has to do something a bit weird. Specifically, what it does | ||
| 756 | # is nothing. | ||
| 757 | # | ||
| 758 | # This means that, if we had previously had load_verify_locations | ||
| 759 | # called, this does not undo that. We need to do that because it turns | ||
| 760 | # out that the rest of the urllib3 code will attempt to load the | ||
| 761 | # default verify paths if it hasn't been told about any paths, even if | ||
| 762 | # the context itself was sometime earlier. We resolve that by just | ||
| 763 | # ignoring it. | ||
| 764 | pass | ||
| 765 | |||
| 766 | def load_default_certs(self): | ||
| 767 | return self.set_default_verify_paths() | ||
| 768 | |||
| 769 | def set_ciphers(self, ciphers): | ||
| 770 | # For now, we just require the default cipher string. | ||
| 771 | if ciphers != util.ssl_.DEFAULT_CIPHERS: | ||
| 772 | raise ValueError( | ||
| 773 | "SecureTransport doesn't support custom cipher strings" | ||
| 774 | ) | ||
| 775 | |||
| 776 | def load_verify_locations(self, cafile=None, capath=None, cadata=None): | ||
| 777 | # OK, we only really support cadata and cafile. | ||
| 778 | if capath is not None: | ||
| 779 | raise ValueError( | ||
| 780 | "SecureTransport does not support cert directories" | ||
| 781 | ) | ||
| 782 | |||
| 783 | self._trust_bundle = cafile or cadata | ||
| 784 | |||
| 785 | def load_cert_chain(self, certfile, keyfile=None, password=None): | ||
| 786 | self._client_cert = certfile | ||
| 787 | self._client_key = keyfile | ||
| 788 | self._client_cert_passphrase = password | ||
| 789 | |||
| 790 | def wrap_socket(self, sock, server_side=False, | ||
| 791 | do_handshake_on_connect=True, suppress_ragged_eofs=True, | ||
| 792 | server_hostname=None): | ||
| 793 | # So, what do we do here? Firstly, we assert some properties. This is a | ||
| 794 | # stripped down shim, so there is some functionality we don't support. | ||
| 795 | # See PEP 543 for the real deal. | ||
| 796 | assert not server_side | ||
| 797 | assert do_handshake_on_connect | ||
| 798 | assert suppress_ragged_eofs | ||
| 799 | |||
| 800 | # Ok, we're good to go. Now we want to create the wrapped socket object | ||
| 801 | # and store it in the appropriate place. | ||
| 802 | wrapped_socket = WrappedSocket(sock) | ||
| 803 | |||
| 804 | # Now we can handshake | ||
| 805 | wrapped_socket.handshake( | ||
| 806 | server_hostname, self._verify, self._trust_bundle, | ||
| 807 | self._min_version, self._max_version, self._client_cert, | ||
| 808 | self._client_key, self._client_key_passphrase | ||
| 809 | ) | ||
| 810 | return wrapped_socket | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/socks.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/socks.py new file mode 100644 index 0000000..6c99a75 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/contrib/socks.py | |||
| @@ -0,0 +1,188 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | """ | ||
| 3 | This module contains provisional support for SOCKS proxies from within | ||
| 4 | urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and | ||
| 5 | SOCKS5. To enable its functionality, either install PySocks or install this | ||
| 6 | module with the ``socks`` extra. | ||
| 7 | |||
| 8 | The SOCKS implementation supports the full range of urllib3 features. It also | ||
| 9 | supports the following SOCKS features: | ||
| 10 | |||
| 11 | - SOCKS4 | ||
| 12 | - SOCKS4a | ||
| 13 | - SOCKS5 | ||
| 14 | - Usernames and passwords for the SOCKS proxy | ||
| 15 | |||
| 16 | Known Limitations: | ||
| 17 | |||
| 18 | - Currently PySocks does not support contacting remote websites via literal | ||
| 19 | IPv6 addresses. Any such connection attempt will fail. You must use a domain | ||
| 20 | name. | ||
| 21 | - Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any | ||
| 22 | such connection attempt will fail. | ||
| 23 | """ | ||
| 24 | from __future__ import absolute_import | ||
| 25 | |||
| 26 | try: | ||
| 27 | import socks | ||
| 28 | except ImportError: | ||
| 29 | import warnings | ||
| 30 | from ..exceptions import DependencyWarning | ||
| 31 | |||
| 32 | warnings.warn(( | ||
| 33 | 'SOCKS support in urllib3 requires the installation of optional ' | ||
| 34 | 'dependencies: specifically, PySocks. For more information, see ' | ||
| 35 | 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' | ||
| 36 | ), | ||
| 37 | DependencyWarning | ||
| 38 | ) | ||
| 39 | raise | ||
| 40 | |||
| 41 | from socket import error as SocketError, timeout as SocketTimeout | ||
| 42 | |||
| 43 | from ..connection import ( | ||
| 44 | HTTPConnection, HTTPSConnection | ||
| 45 | ) | ||
| 46 | from ..connectionpool import ( | ||
| 47 | HTTPConnectionPool, HTTPSConnectionPool | ||
| 48 | ) | ||
| 49 | from ..exceptions import ConnectTimeoutError, NewConnectionError | ||
| 50 | from ..poolmanager import PoolManager | ||
| 51 | from ..util.url import parse_url | ||
| 52 | |||
| 53 | try: | ||
| 54 | import ssl | ||
| 55 | except ImportError: | ||
| 56 | ssl = None | ||
| 57 | |||
| 58 | |||
| 59 | class SOCKSConnection(HTTPConnection): | ||
| 60 | """ | ||
| 61 | A plain-text HTTP connection that connects via a SOCKS proxy. | ||
| 62 | """ | ||
| 63 | def __init__(self, *args, **kwargs): | ||
| 64 | self._socks_options = kwargs.pop('_socks_options') | ||
| 65 | super(SOCKSConnection, self).__init__(*args, **kwargs) | ||
| 66 | |||
| 67 | def _new_conn(self): | ||
| 68 | """ | ||
| 69 | Establish a new connection via the SOCKS proxy. | ||
| 70 | """ | ||
| 71 | extra_kw = {} | ||
| 72 | if self.source_address: | ||
| 73 | extra_kw['source_address'] = self.source_address | ||
| 74 | |||
| 75 | if self.socket_options: | ||
| 76 | extra_kw['socket_options'] = self.socket_options | ||
| 77 | |||
| 78 | try: | ||
| 79 | conn = socks.create_connection( | ||
| 80 | (self.host, self.port), | ||
| 81 | proxy_type=self._socks_options['socks_version'], | ||
| 82 | proxy_addr=self._socks_options['proxy_host'], | ||
| 83 | proxy_port=self._socks_options['proxy_port'], | ||
| 84 | proxy_username=self._socks_options['username'], | ||
| 85 | proxy_password=self._socks_options['password'], | ||
| 86 | proxy_rdns=self._socks_options['rdns'], | ||
| 87 | timeout=self.timeout, | ||
| 88 | **extra_kw | ||
| 89 | ) | ||
| 90 | |||
| 91 | except SocketTimeout as e: | ||
| 92 | raise ConnectTimeoutError( | ||
| 93 | self, "Connection to %s timed out. (connect timeout=%s)" % | ||
| 94 | (self.host, self.timeout)) | ||
| 95 | |||
| 96 | except socks.ProxyError as e: | ||
| 97 | # This is fragile as hell, but it seems to be the only way to raise | ||
| 98 | # useful errors here. | ||
| 99 | if e.socket_err: | ||
| 100 | error = e.socket_err | ||
| 101 | if isinstance(error, SocketTimeout): | ||
| 102 | raise ConnectTimeoutError( | ||
| 103 | self, | ||
| 104 | "Connection to %s timed out. (connect timeout=%s)" % | ||
| 105 | (self.host, self.timeout) | ||
| 106 | ) | ||
| 107 | else: | ||
| 108 | raise NewConnectionError( | ||
| 109 | self, | ||
| 110 | "Failed to establish a new connection: %s" % error | ||
| 111 | ) | ||
| 112 | else: | ||
| 113 | raise NewConnectionError( | ||
| 114 | self, | ||
| 115 | "Failed to establish a new connection: %s" % e | ||
| 116 | ) | ||
| 117 | |||
| 118 | except SocketError as e: # Defensive: PySocks should catch all these. | ||
| 119 | raise NewConnectionError( | ||
| 120 | self, "Failed to establish a new connection: %s" % e) | ||
| 121 | |||
| 122 | return conn | ||
| 123 | |||
| 124 | |||
| 125 | # We don't need to duplicate the Verified/Unverified distinction from | ||
| 126 | # urllib3/connection.py here because the HTTPSConnection will already have been | ||
| 127 | # correctly set to either the Verified or Unverified form by that module. This | ||
| 128 | # means the SOCKSHTTPSConnection will automatically be the correct type. | ||
| 129 | class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): | ||
| 130 | pass | ||
| 131 | |||
| 132 | |||
| 133 | class SOCKSHTTPConnectionPool(HTTPConnectionPool): | ||
| 134 | ConnectionCls = SOCKSConnection | ||
| 135 | |||
| 136 | |||
| 137 | class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): | ||
| 138 | ConnectionCls = SOCKSHTTPSConnection | ||
| 139 | |||
| 140 | |||
| 141 | class SOCKSProxyManager(PoolManager): | ||
| 142 | """ | ||
| 143 | A version of the urllib3 ProxyManager that routes connections via the | ||
| 144 | defined SOCKS proxy. | ||
| 145 | """ | ||
| 146 | pool_classes_by_scheme = { | ||
| 147 | 'http': SOCKSHTTPConnectionPool, | ||
| 148 | 'https': SOCKSHTTPSConnectionPool, | ||
| 149 | } | ||
| 150 | |||
| 151 | def __init__(self, proxy_url, username=None, password=None, | ||
| 152 | num_pools=10, headers=None, **connection_pool_kw): | ||
| 153 | parsed = parse_url(proxy_url) | ||
| 154 | |||
| 155 | if parsed.scheme == 'socks5': | ||
| 156 | socks_version = socks.PROXY_TYPE_SOCKS5 | ||
| 157 | rdns = False | ||
| 158 | elif parsed.scheme == 'socks5h': | ||
| 159 | socks_version = socks.PROXY_TYPE_SOCKS5 | ||
| 160 | rdns = True | ||
| 161 | elif parsed.scheme == 'socks4': | ||
| 162 | socks_version = socks.PROXY_TYPE_SOCKS4 | ||
| 163 | rdns = False | ||
| 164 | elif parsed.scheme == 'socks4a': | ||
| 165 | socks_version = socks.PROXY_TYPE_SOCKS4 | ||
| 166 | rdns = True | ||
| 167 | else: | ||
| 168 | raise ValueError( | ||
| 169 | "Unable to determine SOCKS version from %s" % proxy_url | ||
| 170 | ) | ||
| 171 | |||
| 172 | self.proxy_url = proxy_url | ||
| 173 | |||
| 174 | socks_options = { | ||
| 175 | 'socks_version': socks_version, | ||
| 176 | 'proxy_host': parsed.host, | ||
| 177 | 'proxy_port': parsed.port, | ||
| 178 | 'username': username, | ||
| 179 | 'password': password, | ||
| 180 | 'rdns': rdns | ||
| 181 | } | ||
| 182 | connection_pool_kw['_socks_options'] = socks_options | ||
| 183 | |||
| 184 | super(SOCKSProxyManager, self).__init__( | ||
| 185 | num_pools, headers, **connection_pool_kw | ||
| 186 | ) | ||
| 187 | |||
| 188 | self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/exceptions.py new file mode 100644 index 0000000..670a63e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/exceptions.py | |||
| @@ -0,0 +1,246 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from .packages.six.moves.http_client import ( | ||
| 3 | IncompleteRead as httplib_IncompleteRead | ||
| 4 | ) | ||
| 5 | # Base Exceptions | ||
| 6 | |||
| 7 | |||
| 8 | class HTTPError(Exception): | ||
| 9 | "Base exception used by this module." | ||
| 10 | pass | ||
| 11 | |||
| 12 | |||
| 13 | class HTTPWarning(Warning): | ||
| 14 | "Base warning used by this module." | ||
| 15 | pass | ||
| 16 | |||
| 17 | |||
| 18 | class PoolError(HTTPError): | ||
| 19 | "Base exception for errors caused within a pool." | ||
| 20 | def __init__(self, pool, message): | ||
| 21 | self.pool = pool | ||
| 22 | HTTPError.__init__(self, "%s: %s" % (pool, message)) | ||
| 23 | |||
| 24 | def __reduce__(self): | ||
| 25 | # For pickling purposes. | ||
| 26 | return self.__class__, (None, None) | ||
| 27 | |||
| 28 | |||
| 29 | class RequestError(PoolError): | ||
| 30 | "Base exception for PoolErrors that have associated URLs." | ||
| 31 | def __init__(self, pool, url, message): | ||
| 32 | self.url = url | ||
| 33 | PoolError.__init__(self, pool, message) | ||
| 34 | |||
| 35 | def __reduce__(self): | ||
| 36 | # For pickling purposes. | ||
| 37 | return self.__class__, (None, self.url, None) | ||
| 38 | |||
| 39 | |||
| 40 | class SSLError(HTTPError): | ||
| 41 | "Raised when SSL certificate fails in an HTTPS connection." | ||
| 42 | pass | ||
| 43 | |||
| 44 | |||
| 45 | class ProxyError(HTTPError): | ||
| 46 | "Raised when the connection to a proxy fails." | ||
| 47 | pass | ||
| 48 | |||
| 49 | |||
| 50 | class DecodeError(HTTPError): | ||
| 51 | "Raised when automatic decoding based on Content-Type fails." | ||
| 52 | pass | ||
| 53 | |||
| 54 | |||
| 55 | class ProtocolError(HTTPError): | ||
| 56 | "Raised when something unexpected happens mid-request/response." | ||
| 57 | pass | ||
| 58 | |||
| 59 | |||
| 60 | #: Renamed to ProtocolError but aliased for backwards compatibility. | ||
| 61 | ConnectionError = ProtocolError | ||
| 62 | |||
| 63 | |||
| 64 | # Leaf Exceptions | ||
| 65 | |||
| 66 | class MaxRetryError(RequestError): | ||
| 67 | """Raised when the maximum number of retries is exceeded. | ||
| 68 | |||
| 69 | :param pool: The connection pool | ||
| 70 | :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` | ||
| 71 | :param string url: The requested Url | ||
| 72 | :param exceptions.Exception reason: The underlying error | ||
| 73 | |||
| 74 | """ | ||
| 75 | |||
| 76 | def __init__(self, pool, url, reason=None): | ||
| 77 | self.reason = reason | ||
| 78 | |||
| 79 | message = "Max retries exceeded with url: %s (Caused by %r)" % ( | ||
| 80 | url, reason) | ||
| 81 | |||
| 82 | RequestError.__init__(self, pool, url, message) | ||
| 83 | |||
| 84 | |||
| 85 | class HostChangedError(RequestError): | ||
| 86 | "Raised when an existing pool gets a request for a foreign host." | ||
| 87 | |||
| 88 | def __init__(self, pool, url, retries=3): | ||
| 89 | message = "Tried to open a foreign host with url: %s" % url | ||
| 90 | RequestError.__init__(self, pool, url, message) | ||
| 91 | self.retries = retries | ||
| 92 | |||
| 93 | |||
| 94 | class TimeoutStateError(HTTPError): | ||
| 95 | """ Raised when passing an invalid state to a timeout """ | ||
| 96 | pass | ||
| 97 | |||
| 98 | |||
| 99 | class TimeoutError(HTTPError): | ||
| 100 | """ Raised when a socket timeout error occurs. | ||
| 101 | |||
| 102 | Catching this error will catch both :exc:`ReadTimeoutErrors | ||
| 103 | <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. | ||
| 104 | """ | ||
| 105 | pass | ||
| 106 | |||
| 107 | |||
| 108 | class ReadTimeoutError(TimeoutError, RequestError): | ||
| 109 | "Raised when a socket timeout occurs while receiving data from a server" | ||
| 110 | pass | ||
| 111 | |||
| 112 | |||
| 113 | # This timeout error does not have a URL attached and needs to inherit from the | ||
| 114 | # base HTTPError | ||
| 115 | class ConnectTimeoutError(TimeoutError): | ||
| 116 | "Raised when a socket timeout occurs while connecting to a server" | ||
| 117 | pass | ||
| 118 | |||
| 119 | |||
| 120 | class NewConnectionError(ConnectTimeoutError, PoolError): | ||
| 121 | "Raised when we fail to establish a new connection. Usually ECONNREFUSED." | ||
| 122 | pass | ||
| 123 | |||
| 124 | |||
| 125 | class EmptyPoolError(PoolError): | ||
| 126 | "Raised when a pool runs out of connections and no more are allowed." | ||
| 127 | pass | ||
| 128 | |||
| 129 | |||
| 130 | class ClosedPoolError(PoolError): | ||
| 131 | "Raised when a request enters a pool after the pool has been closed." | ||
| 132 | pass | ||
| 133 | |||
| 134 | |||
| 135 | class LocationValueError(ValueError, HTTPError): | ||
| 136 | "Raised when there is something wrong with a given URL input." | ||
| 137 | pass | ||
| 138 | |||
| 139 | |||
| 140 | class LocationParseError(LocationValueError): | ||
| 141 | "Raised when get_host or similar fails to parse the URL input." | ||
| 142 | |||
| 143 | def __init__(self, location): | ||
| 144 | message = "Failed to parse: %s" % location | ||
| 145 | HTTPError.__init__(self, message) | ||
| 146 | |||
| 147 | self.location = location | ||
| 148 | |||
| 149 | |||
| 150 | class ResponseError(HTTPError): | ||
| 151 | "Used as a container for an error reason supplied in a MaxRetryError." | ||
| 152 | GENERIC_ERROR = 'too many error responses' | ||
| 153 | SPECIFIC_ERROR = 'too many {status_code} error responses' | ||
| 154 | |||
| 155 | |||
| 156 | class SecurityWarning(HTTPWarning): | ||
| 157 | "Warned when perfoming security reducing actions" | ||
| 158 | pass | ||
| 159 | |||
| 160 | |||
| 161 | class SubjectAltNameWarning(SecurityWarning): | ||
| 162 | "Warned when connecting to a host with a certificate missing a SAN." | ||
| 163 | pass | ||
| 164 | |||
| 165 | |||
| 166 | class InsecureRequestWarning(SecurityWarning): | ||
| 167 | "Warned when making an unverified HTTPS request." | ||
| 168 | pass | ||
| 169 | |||
| 170 | |||
| 171 | class SystemTimeWarning(SecurityWarning): | ||
| 172 | "Warned when system time is suspected to be wrong" | ||
| 173 | pass | ||
| 174 | |||
| 175 | |||
| 176 | class InsecurePlatformWarning(SecurityWarning): | ||
| 177 | "Warned when certain SSL configuration is not available on a platform." | ||
| 178 | pass | ||
| 179 | |||
| 180 | |||
| 181 | class SNIMissingWarning(HTTPWarning): | ||
| 182 | "Warned when making a HTTPS request without SNI available." | ||
| 183 | pass | ||
| 184 | |||
| 185 | |||
| 186 | class DependencyWarning(HTTPWarning): | ||
| 187 | """ | ||
| 188 | Warned when an attempt is made to import a module with missing optional | ||
| 189 | dependencies. | ||
| 190 | """ | ||
| 191 | pass | ||
| 192 | |||
| 193 | |||
| 194 | class ResponseNotChunked(ProtocolError, ValueError): | ||
| 195 | "Response needs to be chunked in order to read it as chunks." | ||
| 196 | pass | ||
| 197 | |||
| 198 | |||
| 199 | class BodyNotHttplibCompatible(HTTPError): | ||
| 200 | """ | ||
| 201 | Body should be httplib.HTTPResponse like (have an fp attribute which | ||
| 202 | returns raw chunks) for read_chunked(). | ||
| 203 | """ | ||
| 204 | pass | ||
| 205 | |||
| 206 | |||
| 207 | class IncompleteRead(HTTPError, httplib_IncompleteRead): | ||
| 208 | """ | ||
| 209 | Response length doesn't match expected Content-Length | ||
| 210 | |||
| 211 | Subclass of http_client.IncompleteRead to allow int value | ||
| 212 | for `partial` to avoid creating large objects on streamed | ||
| 213 | reads. | ||
| 214 | """ | ||
| 215 | def __init__(self, partial, expected): | ||
| 216 | super(IncompleteRead, self).__init__(partial, expected) | ||
| 217 | |||
| 218 | def __repr__(self): | ||
| 219 | return ('IncompleteRead(%i bytes read, ' | ||
| 220 | '%i more expected)' % (self.partial, self.expected)) | ||
| 221 | |||
| 222 | |||
| 223 | class InvalidHeader(HTTPError): | ||
| 224 | "The header provided was somehow invalid." | ||
| 225 | pass | ||
| 226 | |||
| 227 | |||
| 228 | class ProxySchemeUnknown(AssertionError, ValueError): | ||
| 229 | "ProxyManager does not support the supplied scheme" | ||
| 230 | # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. | ||
| 231 | |||
| 232 | def __init__(self, scheme): | ||
| 233 | message = "Not supported proxy scheme %s" % scheme | ||
| 234 | super(ProxySchemeUnknown, self).__init__(message) | ||
| 235 | |||
| 236 | |||
| 237 | class HeaderParsingError(HTTPError): | ||
| 238 | "Raised by assert_header_parsing, but we convert it to a log.warning statement." | ||
| 239 | def __init__(self, defects, unparsed_data): | ||
| 240 | message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) | ||
| 241 | super(HeaderParsingError, self).__init__(message) | ||
| 242 | |||
| 243 | |||
| 244 | class UnrewindableBodyError(HTTPError): | ||
| 245 | "urllib3 encountered an error when trying to rewind a body" | ||
| 246 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/fields.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/fields.py new file mode 100644 index 0000000..8e15621 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/fields.py | |||
| @@ -0,0 +1,178 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import email.utils | ||
| 3 | import mimetypes | ||
| 4 | |||
| 5 | from .packages import six | ||
| 6 | |||
| 7 | |||
| 8 | def guess_content_type(filename, default='application/octet-stream'): | ||
| 9 | """ | ||
| 10 | Guess the "Content-Type" of a file. | ||
| 11 | |||
| 12 | :param filename: | ||
| 13 | The filename to guess the "Content-Type" of using :mod:`mimetypes`. | ||
| 14 | :param default: | ||
| 15 | If no "Content-Type" can be guessed, default to `default`. | ||
| 16 | """ | ||
| 17 | if filename: | ||
| 18 | return mimetypes.guess_type(filename)[0] or default | ||
| 19 | return default | ||
| 20 | |||
| 21 | |||
| 22 | def format_header_param(name, value): | ||
| 23 | """ | ||
| 24 | Helper function to format and quote a single header parameter. | ||
| 25 | |||
| 26 | Particularly useful for header parameters which might contain | ||
| 27 | non-ASCII values, like file names. This follows RFC 2231, as | ||
| 28 | suggested by RFC 2388 Section 4.4. | ||
| 29 | |||
| 30 | :param name: | ||
| 31 | The name of the parameter, a string expected to be ASCII only. | ||
| 32 | :param value: | ||
| 33 | The value of the parameter, provided as a unicode string. | ||
| 34 | """ | ||
| 35 | if not any(ch in value for ch in '"\\\r\n'): | ||
| 36 | result = '%s="%s"' % (name, value) | ||
| 37 | try: | ||
| 38 | result.encode('ascii') | ||
| 39 | except (UnicodeEncodeError, UnicodeDecodeError): | ||
| 40 | pass | ||
| 41 | else: | ||
| 42 | return result | ||
| 43 | if not six.PY3 and isinstance(value, six.text_type): # Python 2: | ||
| 44 | value = value.encode('utf-8') | ||
| 45 | value = email.utils.encode_rfc2231(value, 'utf-8') | ||
| 46 | value = '%s*=%s' % (name, value) | ||
| 47 | return value | ||
| 48 | |||
| 49 | |||
| 50 | class RequestField(object): | ||
| 51 | """ | ||
| 52 | A data container for request body parameters. | ||
| 53 | |||
| 54 | :param name: | ||
| 55 | The name of this request field. | ||
| 56 | :param data: | ||
| 57 | The data/value body. | ||
| 58 | :param filename: | ||
| 59 | An optional filename of the request field. | ||
| 60 | :param headers: | ||
| 61 | An optional dict-like object of headers to initially use for the field. | ||
| 62 | """ | ||
| 63 | def __init__(self, name, data, filename=None, headers=None): | ||
| 64 | self._name = name | ||
| 65 | self._filename = filename | ||
| 66 | self.data = data | ||
| 67 | self.headers = {} | ||
| 68 | if headers: | ||
| 69 | self.headers = dict(headers) | ||
| 70 | |||
| 71 | @classmethod | ||
| 72 | def from_tuples(cls, fieldname, value): | ||
| 73 | """ | ||
| 74 | A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. | ||
| 75 | |||
| 76 | Supports constructing :class:`~urllib3.fields.RequestField` from | ||
| 77 | parameter of key/value strings AND key/filetuple. A filetuple is a | ||
| 78 | (filename, data, MIME type) tuple where the MIME type is optional. | ||
| 79 | For example:: | ||
| 80 | |||
| 81 | 'foo': 'bar', | ||
| 82 | 'fakefile': ('foofile.txt', 'contents of foofile'), | ||
| 83 | 'realfile': ('barfile.txt', open('realfile').read()), | ||
| 84 | 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), | ||
| 85 | 'nonamefile': 'contents of nonamefile field', | ||
| 86 | |||
| 87 | Field names and filenames must be unicode. | ||
| 88 | """ | ||
| 89 | if isinstance(value, tuple): | ||
| 90 | if len(value) == 3: | ||
| 91 | filename, data, content_type = value | ||
| 92 | else: | ||
| 93 | filename, data = value | ||
| 94 | content_type = guess_content_type(filename) | ||
| 95 | else: | ||
| 96 | filename = None | ||
| 97 | content_type = None | ||
| 98 | data = value | ||
| 99 | |||
| 100 | request_param = cls(fieldname, data, filename=filename) | ||
| 101 | request_param.make_multipart(content_type=content_type) | ||
| 102 | |||
| 103 | return request_param | ||
| 104 | |||
| 105 | def _render_part(self, name, value): | ||
| 106 | """ | ||
| 107 | Overridable helper function to format a single header parameter. | ||
| 108 | |||
| 109 | :param name: | ||
| 110 | The name of the parameter, a string expected to be ASCII only. | ||
| 111 | :param value: | ||
| 112 | The value of the parameter, provided as a unicode string. | ||
| 113 | """ | ||
| 114 | return format_header_param(name, value) | ||
| 115 | |||
| 116 | def _render_parts(self, header_parts): | ||
| 117 | """ | ||
| 118 | Helper function to format and quote a single header. | ||
| 119 | |||
| 120 | Useful for single headers that are composed of multiple items. E.g., | ||
| 121 | 'Content-Disposition' fields. | ||
| 122 | |||
| 123 | :param header_parts: | ||
| 124 | A sequence of (k, v) typles or a :class:`dict` of (k, v) to format | ||
| 125 | as `k1="v1"; k2="v2"; ...`. | ||
| 126 | """ | ||
| 127 | parts = [] | ||
| 128 | iterable = header_parts | ||
| 129 | if isinstance(header_parts, dict): | ||
| 130 | iterable = header_parts.items() | ||
| 131 | |||
| 132 | for name, value in iterable: | ||
| 133 | if value is not None: | ||
| 134 | parts.append(self._render_part(name, value)) | ||
| 135 | |||
| 136 | return '; '.join(parts) | ||
| 137 | |||
| 138 | def render_headers(self): | ||
| 139 | """ | ||
| 140 | Renders the headers for this request field. | ||
| 141 | """ | ||
| 142 | lines = [] | ||
| 143 | |||
| 144 | sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] | ||
| 145 | for sort_key in sort_keys: | ||
| 146 | if self.headers.get(sort_key, False): | ||
| 147 | lines.append('%s: %s' % (sort_key, self.headers[sort_key])) | ||
| 148 | |||
| 149 | for header_name, header_value in self.headers.items(): | ||
| 150 | if header_name not in sort_keys: | ||
| 151 | if header_value: | ||
| 152 | lines.append('%s: %s' % (header_name, header_value)) | ||
| 153 | |||
| 154 | lines.append('\r\n') | ||
| 155 | return '\r\n'.join(lines) | ||
| 156 | |||
| 157 | def make_multipart(self, content_disposition=None, content_type=None, | ||
| 158 | content_location=None): | ||
| 159 | """ | ||
| 160 | Makes this request field into a multipart request field. | ||
| 161 | |||
| 162 | This method overrides "Content-Disposition", "Content-Type" and | ||
| 163 | "Content-Location" headers to the request parameter. | ||
| 164 | |||
| 165 | :param content_type: | ||
| 166 | The 'Content-Type' of the request body. | ||
| 167 | :param content_location: | ||
| 168 | The 'Content-Location' of the request body. | ||
| 169 | |||
| 170 | """ | ||
| 171 | self.headers['Content-Disposition'] = content_disposition or 'form-data' | ||
| 172 | self.headers['Content-Disposition'] += '; '.join([ | ||
| 173 | '', self._render_parts( | ||
| 174 | (('name', self._name), ('filename', self._filename)) | ||
| 175 | ) | ||
| 176 | ]) | ||
| 177 | self.headers['Content-Type'] = content_type | ||
| 178 | self.headers['Content-Location'] = content_location | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/filepost.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/filepost.py new file mode 100644 index 0000000..e53dedc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/filepost.py | |||
| @@ -0,0 +1,94 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import codecs | ||
| 3 | |||
| 4 | from uuid import uuid4 | ||
| 5 | from io import BytesIO | ||
| 6 | |||
| 7 | from .packages import six | ||
| 8 | from .packages.six import b | ||
| 9 | from .fields import RequestField | ||
| 10 | |||
| 11 | writer = codecs.lookup('utf-8')[3] | ||
| 12 | |||
| 13 | |||
| 14 | def choose_boundary(): | ||
| 15 | """ | ||
| 16 | Our embarrassingly-simple replacement for mimetools.choose_boundary. | ||
| 17 | """ | ||
| 18 | return uuid4().hex | ||
| 19 | |||
| 20 | |||
| 21 | def iter_field_objects(fields): | ||
| 22 | """ | ||
| 23 | Iterate over fields. | ||
| 24 | |||
| 25 | Supports list of (k, v) tuples and dicts, and lists of | ||
| 26 | :class:`~urllib3.fields.RequestField`. | ||
| 27 | |||
| 28 | """ | ||
| 29 | if isinstance(fields, dict): | ||
| 30 | i = six.iteritems(fields) | ||
| 31 | else: | ||
| 32 | i = iter(fields) | ||
| 33 | |||
| 34 | for field in i: | ||
| 35 | if isinstance(field, RequestField): | ||
| 36 | yield field | ||
| 37 | else: | ||
| 38 | yield RequestField.from_tuples(*field) | ||
| 39 | |||
| 40 | |||
| 41 | def iter_fields(fields): | ||
| 42 | """ | ||
| 43 | .. deprecated:: 1.6 | ||
| 44 | |||
| 45 | Iterate over fields. | ||
| 46 | |||
| 47 | The addition of :class:`~urllib3.fields.RequestField` makes this function | ||
| 48 | obsolete. Instead, use :func:`iter_field_objects`, which returns | ||
| 49 | :class:`~urllib3.fields.RequestField` objects. | ||
| 50 | |||
| 51 | Supports list of (k, v) tuples and dicts. | ||
| 52 | """ | ||
| 53 | if isinstance(fields, dict): | ||
| 54 | return ((k, v) for k, v in six.iteritems(fields)) | ||
| 55 | |||
| 56 | return ((k, v) for k, v in fields) | ||
| 57 | |||
| 58 | |||
| 59 | def encode_multipart_formdata(fields, boundary=None): | ||
| 60 | """ | ||
| 61 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. | ||
| 62 | |||
| 63 | :param fields: | ||
| 64 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). | ||
| 65 | |||
| 66 | :param boundary: | ||
| 67 | If not specified, then a random boundary will be generated using | ||
| 68 | :func:`mimetools.choose_boundary`. | ||
| 69 | """ | ||
| 70 | body = BytesIO() | ||
| 71 | if boundary is None: | ||
| 72 | boundary = choose_boundary() | ||
| 73 | |||
| 74 | for field in iter_field_objects(fields): | ||
| 75 | body.write(b('--%s\r\n' % (boundary))) | ||
| 76 | |||
| 77 | writer(body).write(field.render_headers()) | ||
| 78 | data = field.data | ||
| 79 | |||
| 80 | if isinstance(data, int): | ||
| 81 | data = str(data) # Backwards compatibility | ||
| 82 | |||
| 83 | if isinstance(data, six.text_type): | ||
| 84 | writer(body).write(data) | ||
| 85 | else: | ||
| 86 | body.write(data) | ||
| 87 | |||
| 88 | body.write(b'\r\n') | ||
| 89 | |||
| 90 | body.write(b('--%s--\r\n' % (boundary))) | ||
| 91 | |||
| 92 | content_type = str('multipart/form-data; boundary=%s' % boundary) | ||
| 93 | |||
| 94 | return body.getvalue(), content_type | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py new file mode 100644 index 0000000..324c551 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py | |||
| @@ -0,0 +1,5 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | from . import ssl_match_hostname | ||
| 4 | |||
| 5 | __all__ = ('ssl_match_hostname', ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py new file mode 100644 index 0000000..00dee0b --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py | |||
| @@ -0,0 +1,53 @@ | |||
| 1 | # -*- coding: utf-8 -*- | ||
| 2 | """ | ||
| 3 | backports.makefile | ||
| 4 | ~~~~~~~~~~~~~~~~~~ | ||
| 5 | |||
| 6 | Backports the Python 3 ``socket.makefile`` method for use with anything that | ||
| 7 | wants to create a "fake" socket object. | ||
| 8 | """ | ||
| 9 | import io | ||
| 10 | |||
| 11 | from socket import SocketIO | ||
| 12 | |||
| 13 | |||
| 14 | def backport_makefile(self, mode="r", buffering=None, encoding=None, | ||
| 15 | errors=None, newline=None): | ||
| 16 | """ | ||
| 17 | Backport of ``socket.makefile`` from Python 3.5. | ||
| 18 | """ | ||
| 19 | if not set(mode) <= set(["r", "w", "b"]): | ||
| 20 | raise ValueError( | ||
| 21 | "invalid mode %r (only r, w, b allowed)" % (mode,) | ||
| 22 | ) | ||
| 23 | writing = "w" in mode | ||
| 24 | reading = "r" in mode or not writing | ||
| 25 | assert reading or writing | ||
| 26 | binary = "b" in mode | ||
| 27 | rawmode = "" | ||
| 28 | if reading: | ||
| 29 | rawmode += "r" | ||
| 30 | if writing: | ||
| 31 | rawmode += "w" | ||
| 32 | raw = SocketIO(self, rawmode) | ||
| 33 | self._makefile_refs += 1 | ||
| 34 | if buffering is None: | ||
| 35 | buffering = -1 | ||
| 36 | if buffering < 0: | ||
| 37 | buffering = io.DEFAULT_BUFFER_SIZE | ||
| 38 | if buffering == 0: | ||
| 39 | if not binary: | ||
| 40 | raise ValueError("unbuffered streams must be binary") | ||
| 41 | return raw | ||
| 42 | if reading and writing: | ||
| 43 | buffer = io.BufferedRWPair(raw, raw, buffering) | ||
| 44 | elif reading: | ||
| 45 | buffer = io.BufferedReader(raw, buffering) | ||
| 46 | else: | ||
| 47 | assert writing | ||
| 48 | buffer = io.BufferedWriter(raw, buffering) | ||
| 49 | if binary: | ||
| 50 | return buffer | ||
| 51 | text = io.TextIOWrapper(buffer, encoding, errors, newline) | ||
| 52 | text.mode = mode | ||
| 53 | return text | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ordered_dict.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ordered_dict.py new file mode 100644 index 0000000..62dcb42 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ordered_dict.py | |||
| @@ -0,0 +1,259 @@ | |||
| 1 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. | ||
| 2 | # Passes Python2.7's test suite and incorporates all the latest updates. | ||
| 3 | # Copyright 2009 Raymond Hettinger, released under the MIT License. | ||
| 4 | # http://code.activestate.com/recipes/576693/ | ||
| 5 | try: | ||
| 6 | from thread import get_ident as _get_ident | ||
| 7 | except ImportError: | ||
| 8 | from dummy_thread import get_ident as _get_ident | ||
| 9 | |||
| 10 | try: | ||
| 11 | from _abcoll import KeysView, ValuesView, ItemsView | ||
| 12 | except ImportError: | ||
| 13 | pass | ||
| 14 | |||
| 15 | |||
| 16 | class OrderedDict(dict): | ||
| 17 | 'Dictionary that remembers insertion order' | ||
| 18 | # An inherited dict maps keys to values. | ||
| 19 | # The inherited dict provides __getitem__, __len__, __contains__, and get. | ||
| 20 | # The remaining methods are order-aware. | ||
| 21 | # Big-O running times for all methods are the same as for regular dictionaries. | ||
| 22 | |||
| 23 | # The internal self.__map dictionary maps keys to links in a doubly linked list. | ||
| 24 | # The circular doubly linked list starts and ends with a sentinel element. | ||
| 25 | # The sentinel element never gets deleted (this simplifies the algorithm). | ||
| 26 | # Each link is stored as a list of length three: [PREV, NEXT, KEY]. | ||
| 27 | |||
| 28 | def __init__(self, *args, **kwds): | ||
| 29 | '''Initialize an ordered dictionary. Signature is the same as for | ||
| 30 | regular dictionaries, but keyword arguments are not recommended | ||
| 31 | because their insertion order is arbitrary. | ||
| 32 | |||
| 33 | ''' | ||
| 34 | if len(args) > 1: | ||
| 35 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) | ||
| 36 | try: | ||
| 37 | self.__root | ||
| 38 | except AttributeError: | ||
| 39 | self.__root = root = [] # sentinel node | ||
| 40 | root[:] = [root, root, None] | ||
| 41 | self.__map = {} | ||
| 42 | self.__update(*args, **kwds) | ||
| 43 | |||
| 44 | def __setitem__(self, key, value, dict_setitem=dict.__setitem__): | ||
| 45 | 'od.__setitem__(i, y) <==> od[i]=y' | ||
| 46 | # Setting a new item creates a new link which goes at the end of the linked | ||
| 47 | # list, and the inherited dictionary is updated with the new key/value pair. | ||
| 48 | if key not in self: | ||
| 49 | root = self.__root | ||
| 50 | last = root[0] | ||
| 51 | last[1] = root[0] = self.__map[key] = [last, root, key] | ||
| 52 | dict_setitem(self, key, value) | ||
| 53 | |||
| 54 | def __delitem__(self, key, dict_delitem=dict.__delitem__): | ||
| 55 | 'od.__delitem__(y) <==> del od[y]' | ||
| 56 | # Deleting an existing item uses self.__map to find the link which is | ||
| 57 | # then removed by updating the links in the predecessor and successor nodes. | ||
| 58 | dict_delitem(self, key) | ||
| 59 | link_prev, link_next, key = self.__map.pop(key) | ||
| 60 | link_prev[1] = link_next | ||
| 61 | link_next[0] = link_prev | ||
| 62 | |||
| 63 | def __iter__(self): | ||
| 64 | 'od.__iter__() <==> iter(od)' | ||
| 65 | root = self.__root | ||
| 66 | curr = root[1] | ||
| 67 | while curr is not root: | ||
| 68 | yield curr[2] | ||
| 69 | curr = curr[1] | ||
| 70 | |||
| 71 | def __reversed__(self): | ||
| 72 | 'od.__reversed__() <==> reversed(od)' | ||
| 73 | root = self.__root | ||
| 74 | curr = root[0] | ||
| 75 | while curr is not root: | ||
| 76 | yield curr[2] | ||
| 77 | curr = curr[0] | ||
| 78 | |||
| 79 | def clear(self): | ||
| 80 | 'od.clear() -> None. Remove all items from od.' | ||
| 81 | try: | ||
| 82 | for node in self.__map.itervalues(): | ||
| 83 | del node[:] | ||
| 84 | root = self.__root | ||
| 85 | root[:] = [root, root, None] | ||
| 86 | self.__map.clear() | ||
| 87 | except AttributeError: | ||
| 88 | pass | ||
| 89 | dict.clear(self) | ||
| 90 | |||
| 91 | def popitem(self, last=True): | ||
| 92 | '''od.popitem() -> (k, v), return and remove a (key, value) pair. | ||
| 93 | Pairs are returned in LIFO order if last is true or FIFO order if false. | ||
| 94 | |||
| 95 | ''' | ||
| 96 | if not self: | ||
| 97 | raise KeyError('dictionary is empty') | ||
| 98 | root = self.__root | ||
| 99 | if last: | ||
| 100 | link = root[0] | ||
| 101 | link_prev = link[0] | ||
| 102 | link_prev[1] = root | ||
| 103 | root[0] = link_prev | ||
| 104 | else: | ||
| 105 | link = root[1] | ||
| 106 | link_next = link[1] | ||
| 107 | root[1] = link_next | ||
| 108 | link_next[0] = root | ||
| 109 | key = link[2] | ||
| 110 | del self.__map[key] | ||
| 111 | value = dict.pop(self, key) | ||
| 112 | return key, value | ||
| 113 | |||
| 114 | # -- the following methods do not depend on the internal structure -- | ||
| 115 | |||
| 116 | def keys(self): | ||
| 117 | 'od.keys() -> list of keys in od' | ||
| 118 | return list(self) | ||
| 119 | |||
| 120 | def values(self): | ||
| 121 | 'od.values() -> list of values in od' | ||
| 122 | return [self[key] for key in self] | ||
| 123 | |||
| 124 | def items(self): | ||
| 125 | 'od.items() -> list of (key, value) pairs in od' | ||
| 126 | return [(key, self[key]) for key in self] | ||
| 127 | |||
| 128 | def iterkeys(self): | ||
| 129 | 'od.iterkeys() -> an iterator over the keys in od' | ||
| 130 | return iter(self) | ||
| 131 | |||
| 132 | def itervalues(self): | ||
| 133 | 'od.itervalues -> an iterator over the values in od' | ||
| 134 | for k in self: | ||
| 135 | yield self[k] | ||
| 136 | |||
| 137 | def iteritems(self): | ||
| 138 | 'od.iteritems -> an iterator over the (key, value) items in od' | ||
| 139 | for k in self: | ||
| 140 | yield (k, self[k]) | ||
| 141 | |||
| 142 | def update(*args, **kwds): | ||
| 143 | '''od.update(E, **F) -> None. Update od from dict/iterable E and F. | ||
| 144 | |||
| 145 | If E is a dict instance, does: for k in E: od[k] = E[k] | ||
| 146 | If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] | ||
| 147 | Or if E is an iterable of items, does: for k, v in E: od[k] = v | ||
| 148 | In either case, this is followed by: for k, v in F.items(): od[k] = v | ||
| 149 | |||
| 150 | ''' | ||
| 151 | if len(args) > 2: | ||
| 152 | raise TypeError('update() takes at most 2 positional ' | ||
| 153 | 'arguments (%d given)' % (len(args),)) | ||
| 154 | elif not args: | ||
| 155 | raise TypeError('update() takes at least 1 argument (0 given)') | ||
| 156 | self = args[0] | ||
| 157 | # Make progressively weaker assumptions about "other" | ||
| 158 | other = () | ||
| 159 | if len(args) == 2: | ||
| 160 | other = args[1] | ||
| 161 | if isinstance(other, dict): | ||
| 162 | for key in other: | ||
| 163 | self[key] = other[key] | ||
| 164 | elif hasattr(other, 'keys'): | ||
| 165 | for key in other.keys(): | ||
| 166 | self[key] = other[key] | ||
| 167 | else: | ||
| 168 | for key, value in other: | ||
| 169 | self[key] = value | ||
| 170 | for key, value in kwds.items(): | ||
| 171 | self[key] = value | ||
| 172 | |||
| 173 | __update = update # let subclasses override update without breaking __init__ | ||
| 174 | |||
| 175 | __marker = object() | ||
| 176 | |||
| 177 | def pop(self, key, default=__marker): | ||
| 178 | '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. | ||
| 179 | If key is not found, d is returned if given, otherwise KeyError is raised. | ||
| 180 | |||
| 181 | ''' | ||
| 182 | if key in self: | ||
| 183 | result = self[key] | ||
| 184 | del self[key] | ||
| 185 | return result | ||
| 186 | if default is self.__marker: | ||
| 187 | raise KeyError(key) | ||
| 188 | return default | ||
| 189 | |||
| 190 | def setdefault(self, key, default=None): | ||
| 191 | 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' | ||
| 192 | if key in self: | ||
| 193 | return self[key] | ||
| 194 | self[key] = default | ||
| 195 | return default | ||
| 196 | |||
| 197 | def __repr__(self, _repr_running={}): | ||
| 198 | 'od.__repr__() <==> repr(od)' | ||
| 199 | call_key = id(self), _get_ident() | ||
| 200 | if call_key in _repr_running: | ||
| 201 | return '...' | ||
| 202 | _repr_running[call_key] = 1 | ||
| 203 | try: | ||
| 204 | if not self: | ||
| 205 | return '%s()' % (self.__class__.__name__,) | ||
| 206 | return '%s(%r)' % (self.__class__.__name__, self.items()) | ||
| 207 | finally: | ||
| 208 | del _repr_running[call_key] | ||
| 209 | |||
| 210 | def __reduce__(self): | ||
| 211 | 'Return state information for pickling' | ||
| 212 | items = [[k, self[k]] for k in self] | ||
| 213 | inst_dict = vars(self).copy() | ||
| 214 | for k in vars(OrderedDict()): | ||
| 215 | inst_dict.pop(k, None) | ||
| 216 | if inst_dict: | ||
| 217 | return (self.__class__, (items,), inst_dict) | ||
| 218 | return self.__class__, (items,) | ||
| 219 | |||
| 220 | def copy(self): | ||
| 221 | 'od.copy() -> a shallow copy of od' | ||
| 222 | return self.__class__(self) | ||
| 223 | |||
| 224 | @classmethod | ||
| 225 | def fromkeys(cls, iterable, value=None): | ||
| 226 | '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S | ||
| 227 | and values equal to v (which defaults to None). | ||
| 228 | |||
| 229 | ''' | ||
| 230 | d = cls() | ||
| 231 | for key in iterable: | ||
| 232 | d[key] = value | ||
| 233 | return d | ||
| 234 | |||
| 235 | def __eq__(self, other): | ||
| 236 | '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive | ||
| 237 | while comparison to a regular mapping is order-insensitive. | ||
| 238 | |||
| 239 | ''' | ||
| 240 | if isinstance(other, OrderedDict): | ||
| 241 | return len(self)==len(other) and self.items() == other.items() | ||
| 242 | return dict.__eq__(self, other) | ||
| 243 | |||
| 244 | def __ne__(self, other): | ||
| 245 | return not self == other | ||
| 246 | |||
| 247 | # -- the following methods are only used in Python 2.7 -- | ||
| 248 | |||
| 249 | def viewkeys(self): | ||
| 250 | "od.viewkeys() -> a set-like object providing a view on od's keys" | ||
| 251 | return KeysView(self) | ||
| 252 | |||
| 253 | def viewvalues(self): | ||
| 254 | "od.viewvalues() -> an object providing a view on od's values" | ||
| 255 | return ValuesView(self) | ||
| 256 | |||
| 257 | def viewitems(self): | ||
| 258 | "od.viewitems() -> a set-like object providing a view on od's items" | ||
| 259 | return ItemsView(self) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/six.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/six.py new file mode 100644 index 0000000..7bd9225 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/six.py | |||
| @@ -0,0 +1,868 @@ | |||
| 1 | """Utilities for writing code that runs on Python 2 and 3""" | ||
| 2 | |||
| 3 | # Copyright (c) 2010-2015 Benjamin Peterson | ||
| 4 | # | ||
| 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 6 | # of this software and associated documentation files (the "Software"), to deal | ||
| 7 | # in the Software without restriction, including without limitation the rights | ||
| 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 9 | # copies of the Software, and to permit persons to whom the Software is | ||
| 10 | # furnished to do so, subject to the following conditions: | ||
| 11 | # | ||
| 12 | # The above copyright notice and this permission notice shall be included in all | ||
| 13 | # copies or substantial portions of the Software. | ||
| 14 | # | ||
| 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
| 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
| 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||
| 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||
| 21 | # SOFTWARE. | ||
| 22 | |||
| 23 | from __future__ import absolute_import | ||
| 24 | |||
| 25 | import functools | ||
| 26 | import itertools | ||
| 27 | import operator | ||
| 28 | import sys | ||
| 29 | import types | ||
| 30 | |||
| 31 | __author__ = "Benjamin Peterson <benjamin@python.org>" | ||
| 32 | __version__ = "1.10.0" | ||
| 33 | |||
| 34 | |||
| 35 | # Useful for very coarse version differentiation. | ||
| 36 | PY2 = sys.version_info[0] == 2 | ||
| 37 | PY3 = sys.version_info[0] == 3 | ||
| 38 | PY34 = sys.version_info[0:2] >= (3, 4) | ||
| 39 | |||
| 40 | if PY3: | ||
| 41 | string_types = str, | ||
| 42 | integer_types = int, | ||
| 43 | class_types = type, | ||
| 44 | text_type = str | ||
| 45 | binary_type = bytes | ||
| 46 | |||
| 47 | MAXSIZE = sys.maxsize | ||
| 48 | else: | ||
| 49 | string_types = basestring, | ||
| 50 | integer_types = (int, long) | ||
| 51 | class_types = (type, types.ClassType) | ||
| 52 | text_type = unicode | ||
| 53 | binary_type = str | ||
| 54 | |||
| 55 | if sys.platform.startswith("java"): | ||
| 56 | # Jython always uses 32 bits. | ||
| 57 | MAXSIZE = int((1 << 31) - 1) | ||
| 58 | else: | ||
| 59 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t). | ||
| 60 | class X(object): | ||
| 61 | |||
| 62 | def __len__(self): | ||
| 63 | return 1 << 31 | ||
| 64 | try: | ||
| 65 | len(X()) | ||
| 66 | except OverflowError: | ||
| 67 | # 32-bit | ||
| 68 | MAXSIZE = int((1 << 31) - 1) | ||
| 69 | else: | ||
| 70 | # 64-bit | ||
| 71 | MAXSIZE = int((1 << 63) - 1) | ||
| 72 | del X | ||
| 73 | |||
| 74 | |||
| 75 | def _add_doc(func, doc): | ||
| 76 | """Add documentation to a function.""" | ||
| 77 | func.__doc__ = doc | ||
| 78 | |||
| 79 | |||
| 80 | def _import_module(name): | ||
| 81 | """Import module, returning the module after the last dot.""" | ||
| 82 | __import__(name) | ||
| 83 | return sys.modules[name] | ||
| 84 | |||
| 85 | |||
| 86 | class _LazyDescr(object): | ||
| 87 | |||
| 88 | def __init__(self, name): | ||
| 89 | self.name = name | ||
| 90 | |||
| 91 | def __get__(self, obj, tp): | ||
| 92 | result = self._resolve() | ||
| 93 | setattr(obj, self.name, result) # Invokes __set__. | ||
| 94 | try: | ||
| 95 | # This is a bit ugly, but it avoids running this again by | ||
| 96 | # removing this descriptor. | ||
| 97 | delattr(obj.__class__, self.name) | ||
| 98 | except AttributeError: | ||
| 99 | pass | ||
| 100 | return result | ||
| 101 | |||
| 102 | |||
| 103 | class MovedModule(_LazyDescr): | ||
| 104 | |||
| 105 | def __init__(self, name, old, new=None): | ||
| 106 | super(MovedModule, self).__init__(name) | ||
| 107 | if PY3: | ||
| 108 | if new is None: | ||
| 109 | new = name | ||
| 110 | self.mod = new | ||
| 111 | else: | ||
| 112 | self.mod = old | ||
| 113 | |||
| 114 | def _resolve(self): | ||
| 115 | return _import_module(self.mod) | ||
| 116 | |||
| 117 | def __getattr__(self, attr): | ||
| 118 | _module = self._resolve() | ||
| 119 | value = getattr(_module, attr) | ||
| 120 | setattr(self, attr, value) | ||
| 121 | return value | ||
| 122 | |||
| 123 | |||
| 124 | class _LazyModule(types.ModuleType): | ||
| 125 | |||
| 126 | def __init__(self, name): | ||
| 127 | super(_LazyModule, self).__init__(name) | ||
| 128 | self.__doc__ = self.__class__.__doc__ | ||
| 129 | |||
| 130 | def __dir__(self): | ||
| 131 | attrs = ["__doc__", "__name__"] | ||
| 132 | attrs += [attr.name for attr in self._moved_attributes] | ||
| 133 | return attrs | ||
| 134 | |||
| 135 | # Subclasses should override this | ||
| 136 | _moved_attributes = [] | ||
| 137 | |||
| 138 | |||
| 139 | class MovedAttribute(_LazyDescr): | ||
| 140 | |||
| 141 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): | ||
| 142 | super(MovedAttribute, self).__init__(name) | ||
| 143 | if PY3: | ||
| 144 | if new_mod is None: | ||
| 145 | new_mod = name | ||
| 146 | self.mod = new_mod | ||
| 147 | if new_attr is None: | ||
| 148 | if old_attr is None: | ||
| 149 | new_attr = name | ||
| 150 | else: | ||
| 151 | new_attr = old_attr | ||
| 152 | self.attr = new_attr | ||
| 153 | else: | ||
| 154 | self.mod = old_mod | ||
| 155 | if old_attr is None: | ||
| 156 | old_attr = name | ||
| 157 | self.attr = old_attr | ||
| 158 | |||
| 159 | def _resolve(self): | ||
| 160 | module = _import_module(self.mod) | ||
| 161 | return getattr(module, self.attr) | ||
| 162 | |||
| 163 | |||
| 164 | class _SixMetaPathImporter(object): | ||
| 165 | |||
| 166 | """ | ||
| 167 | A meta path importer to import six.moves and its submodules. | ||
| 168 | |||
| 169 | This class implements a PEP302 finder and loader. It should be compatible | ||
| 170 | with Python 2.5 and all existing versions of Python3 | ||
| 171 | """ | ||
| 172 | |||
| 173 | def __init__(self, six_module_name): | ||
| 174 | self.name = six_module_name | ||
| 175 | self.known_modules = {} | ||
| 176 | |||
| 177 | def _add_module(self, mod, *fullnames): | ||
| 178 | for fullname in fullnames: | ||
| 179 | self.known_modules[self.name + "." + fullname] = mod | ||
| 180 | |||
| 181 | def _get_module(self, fullname): | ||
| 182 | return self.known_modules[self.name + "." + fullname] | ||
| 183 | |||
| 184 | def find_module(self, fullname, path=None): | ||
| 185 | if fullname in self.known_modules: | ||
| 186 | return self | ||
| 187 | return None | ||
| 188 | |||
| 189 | def __get_module(self, fullname): | ||
| 190 | try: | ||
| 191 | return self.known_modules[fullname] | ||
| 192 | except KeyError: | ||
| 193 | raise ImportError("This loader does not know module " + fullname) | ||
| 194 | |||
| 195 | def load_module(self, fullname): | ||
| 196 | try: | ||
| 197 | # in case of a reload | ||
| 198 | return sys.modules[fullname] | ||
| 199 | except KeyError: | ||
| 200 | pass | ||
| 201 | mod = self.__get_module(fullname) | ||
| 202 | if isinstance(mod, MovedModule): | ||
| 203 | mod = mod._resolve() | ||
| 204 | else: | ||
| 205 | mod.__loader__ = self | ||
| 206 | sys.modules[fullname] = mod | ||
| 207 | return mod | ||
| 208 | |||
| 209 | def is_package(self, fullname): | ||
| 210 | """ | ||
| 211 | Return true, if the named module is a package. | ||
| 212 | |||
| 213 | We need this method to get correct spec objects with | ||
| 214 | Python 3.4 (see PEP451) | ||
| 215 | """ | ||
| 216 | return hasattr(self.__get_module(fullname), "__path__") | ||
| 217 | |||
| 218 | def get_code(self, fullname): | ||
| 219 | """Return None | ||
| 220 | |||
| 221 | Required, if is_package is implemented""" | ||
| 222 | self.__get_module(fullname) # eventually raises ImportError | ||
| 223 | return None | ||
| 224 | get_source = get_code # same as get_code | ||
| 225 | |||
| 226 | _importer = _SixMetaPathImporter(__name__) | ||
| 227 | |||
| 228 | |||
| 229 | class _MovedItems(_LazyModule): | ||
| 230 | |||
| 231 | """Lazy loading of moved objects""" | ||
| 232 | __path__ = [] # mark as package | ||
| 233 | |||
| 234 | |||
| 235 | _moved_attributes = [ | ||
| 236 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), | ||
| 237 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), | ||
| 238 | MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), | ||
| 239 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), | ||
| 240 | MovedAttribute("intern", "__builtin__", "sys"), | ||
| 241 | MovedAttribute("map", "itertools", "builtins", "imap", "map"), | ||
| 242 | MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), | ||
| 243 | MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), | ||
| 244 | MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), | ||
| 245 | MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), | ||
| 246 | MovedAttribute("reduce", "__builtin__", "functools"), | ||
| 247 | MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), | ||
| 248 | MovedAttribute("StringIO", "StringIO", "io"), | ||
| 249 | MovedAttribute("UserDict", "UserDict", "collections"), | ||
| 250 | MovedAttribute("UserList", "UserList", "collections"), | ||
| 251 | MovedAttribute("UserString", "UserString", "collections"), | ||
| 252 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), | ||
| 253 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), | ||
| 254 | MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), | ||
| 255 | MovedModule("builtins", "__builtin__"), | ||
| 256 | MovedModule("configparser", "ConfigParser"), | ||
| 257 | MovedModule("copyreg", "copy_reg"), | ||
| 258 | MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), | ||
| 259 | MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), | ||
| 260 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), | ||
| 261 | MovedModule("http_cookies", "Cookie", "http.cookies"), | ||
| 262 | MovedModule("html_entities", "htmlentitydefs", "html.entities"), | ||
| 263 | MovedModule("html_parser", "HTMLParser", "html.parser"), | ||
| 264 | MovedModule("http_client", "httplib", "http.client"), | ||
| 265 | MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), | ||
| 266 | MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), | ||
| 267 | MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), | ||
| 268 | MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), | ||
| 269 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), | ||
| 270 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), | ||
| 271 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), | ||
| 272 | MovedModule("cPickle", "cPickle", "pickle"), | ||
| 273 | MovedModule("queue", "Queue"), | ||
| 274 | MovedModule("reprlib", "repr"), | ||
| 275 | MovedModule("socketserver", "SocketServer"), | ||
| 276 | MovedModule("_thread", "thread", "_thread"), | ||
| 277 | MovedModule("tkinter", "Tkinter"), | ||
| 278 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), | ||
| 279 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), | ||
| 280 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), | ||
| 281 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), | ||
| 282 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"), | ||
| 283 | MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), | ||
| 284 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), | ||
| 285 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), | ||
| 286 | MovedModule("tkinter_colorchooser", "tkColorChooser", | ||
| 287 | "tkinter.colorchooser"), | ||
| 288 | MovedModule("tkinter_commondialog", "tkCommonDialog", | ||
| 289 | "tkinter.commondialog"), | ||
| 290 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), | ||
| 291 | MovedModule("tkinter_font", "tkFont", "tkinter.font"), | ||
| 292 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), | ||
| 293 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", | ||
| 294 | "tkinter.simpledialog"), | ||
| 295 | MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), | ||
| 296 | MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), | ||
| 297 | MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), | ||
| 298 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), | ||
| 299 | MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), | ||
| 300 | MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), | ||
| 301 | ] | ||
| 302 | # Add windows specific modules. | ||
| 303 | if sys.platform == "win32": | ||
| 304 | _moved_attributes += [ | ||
| 305 | MovedModule("winreg", "_winreg"), | ||
| 306 | ] | ||
| 307 | |||
| 308 | for attr in _moved_attributes: | ||
| 309 | setattr(_MovedItems, attr.name, attr) | ||
| 310 | if isinstance(attr, MovedModule): | ||
| 311 | _importer._add_module(attr, "moves." + attr.name) | ||
| 312 | del attr | ||
| 313 | |||
| 314 | _MovedItems._moved_attributes = _moved_attributes | ||
| 315 | |||
| 316 | moves = _MovedItems(__name__ + ".moves") | ||
| 317 | _importer._add_module(moves, "moves") | ||
| 318 | |||
| 319 | |||
| 320 | class Module_six_moves_urllib_parse(_LazyModule): | ||
| 321 | |||
| 322 | """Lazy loading of moved objects in six.moves.urllib_parse""" | ||
| 323 | |||
| 324 | |||
| 325 | _urllib_parse_moved_attributes = [ | ||
| 326 | MovedAttribute("ParseResult", "urlparse", "urllib.parse"), | ||
| 327 | MovedAttribute("SplitResult", "urlparse", "urllib.parse"), | ||
| 328 | MovedAttribute("parse_qs", "urlparse", "urllib.parse"), | ||
| 329 | MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), | ||
| 330 | MovedAttribute("urldefrag", "urlparse", "urllib.parse"), | ||
| 331 | MovedAttribute("urljoin", "urlparse", "urllib.parse"), | ||
| 332 | MovedAttribute("urlparse", "urlparse", "urllib.parse"), | ||
| 333 | MovedAttribute("urlsplit", "urlparse", "urllib.parse"), | ||
| 334 | MovedAttribute("urlunparse", "urlparse", "urllib.parse"), | ||
| 335 | MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), | ||
| 336 | MovedAttribute("quote", "urllib", "urllib.parse"), | ||
| 337 | MovedAttribute("quote_plus", "urllib", "urllib.parse"), | ||
| 338 | MovedAttribute("unquote", "urllib", "urllib.parse"), | ||
| 339 | MovedAttribute("unquote_plus", "urllib", "urllib.parse"), | ||
| 340 | MovedAttribute("urlencode", "urllib", "urllib.parse"), | ||
| 341 | MovedAttribute("splitquery", "urllib", "urllib.parse"), | ||
| 342 | MovedAttribute("splittag", "urllib", "urllib.parse"), | ||
| 343 | MovedAttribute("splituser", "urllib", "urllib.parse"), | ||
| 344 | MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), | ||
| 345 | MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), | ||
| 346 | MovedAttribute("uses_params", "urlparse", "urllib.parse"), | ||
| 347 | MovedAttribute("uses_query", "urlparse", "urllib.parse"), | ||
| 348 | MovedAttribute("uses_relative", "urlparse", "urllib.parse"), | ||
| 349 | ] | ||
| 350 | for attr in _urllib_parse_moved_attributes: | ||
| 351 | setattr(Module_six_moves_urllib_parse, attr.name, attr) | ||
| 352 | del attr | ||
| 353 | |||
| 354 | Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes | ||
| 355 | |||
| 356 | _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), | ||
| 357 | "moves.urllib_parse", "moves.urllib.parse") | ||
| 358 | |||
| 359 | |||
| 360 | class Module_six_moves_urllib_error(_LazyModule): | ||
| 361 | |||
| 362 | """Lazy loading of moved objects in six.moves.urllib_error""" | ||
| 363 | |||
| 364 | |||
| 365 | _urllib_error_moved_attributes = [ | ||
| 366 | MovedAttribute("URLError", "urllib2", "urllib.error"), | ||
| 367 | MovedAttribute("HTTPError", "urllib2", "urllib.error"), | ||
| 368 | MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), | ||
| 369 | ] | ||
| 370 | for attr in _urllib_error_moved_attributes: | ||
| 371 | setattr(Module_six_moves_urllib_error, attr.name, attr) | ||
| 372 | del attr | ||
| 373 | |||
| 374 | Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes | ||
| 375 | |||
| 376 | _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), | ||
| 377 | "moves.urllib_error", "moves.urllib.error") | ||
| 378 | |||
| 379 | |||
| 380 | class Module_six_moves_urllib_request(_LazyModule): | ||
| 381 | |||
| 382 | """Lazy loading of moved objects in six.moves.urllib_request""" | ||
| 383 | |||
| 384 | |||
| 385 | _urllib_request_moved_attributes = [ | ||
| 386 | MovedAttribute("urlopen", "urllib2", "urllib.request"), | ||
| 387 | MovedAttribute("install_opener", "urllib2", "urllib.request"), | ||
| 388 | MovedAttribute("build_opener", "urllib2", "urllib.request"), | ||
| 389 | MovedAttribute("pathname2url", "urllib", "urllib.request"), | ||
| 390 | MovedAttribute("url2pathname", "urllib", "urllib.request"), | ||
| 391 | MovedAttribute("getproxies", "urllib", "urllib.request"), | ||
| 392 | MovedAttribute("Request", "urllib2", "urllib.request"), | ||
| 393 | MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), | ||
| 394 | MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), | ||
| 395 | MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), | ||
| 396 | MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), | ||
| 397 | MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), | ||
| 398 | MovedAttribute("BaseHandler", "urllib2", "urllib.request"), | ||
| 399 | MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), | ||
| 400 | MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), | ||
| 401 | MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 402 | MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 403 | MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), | ||
| 404 | MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 405 | MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 406 | MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), | ||
| 407 | MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), | ||
| 408 | MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), | ||
| 409 | MovedAttribute("FileHandler", "urllib2", "urllib.request"), | ||
| 410 | MovedAttribute("FTPHandler", "urllib2", "urllib.request"), | ||
| 411 | MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), | ||
| 412 | MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), | ||
| 413 | MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), | ||
| 414 | MovedAttribute("urlretrieve", "urllib", "urllib.request"), | ||
| 415 | MovedAttribute("urlcleanup", "urllib", "urllib.request"), | ||
| 416 | MovedAttribute("URLopener", "urllib", "urllib.request"), | ||
| 417 | MovedAttribute("FancyURLopener", "urllib", "urllib.request"), | ||
| 418 | MovedAttribute("proxy_bypass", "urllib", "urllib.request"), | ||
| 419 | ] | ||
| 420 | for attr in _urllib_request_moved_attributes: | ||
| 421 | setattr(Module_six_moves_urllib_request, attr.name, attr) | ||
| 422 | del attr | ||
| 423 | |||
| 424 | Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes | ||
| 425 | |||
| 426 | _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), | ||
| 427 | "moves.urllib_request", "moves.urllib.request") | ||
| 428 | |||
| 429 | |||
| 430 | class Module_six_moves_urllib_response(_LazyModule): | ||
| 431 | |||
| 432 | """Lazy loading of moved objects in six.moves.urllib_response""" | ||
| 433 | |||
| 434 | |||
| 435 | _urllib_response_moved_attributes = [ | ||
| 436 | MovedAttribute("addbase", "urllib", "urllib.response"), | ||
| 437 | MovedAttribute("addclosehook", "urllib", "urllib.response"), | ||
| 438 | MovedAttribute("addinfo", "urllib", "urllib.response"), | ||
| 439 | MovedAttribute("addinfourl", "urllib", "urllib.response"), | ||
| 440 | ] | ||
| 441 | for attr in _urllib_response_moved_attributes: | ||
| 442 | setattr(Module_six_moves_urllib_response, attr.name, attr) | ||
| 443 | del attr | ||
| 444 | |||
| 445 | Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes | ||
| 446 | |||
| 447 | _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), | ||
| 448 | "moves.urllib_response", "moves.urllib.response") | ||
| 449 | |||
| 450 | |||
| 451 | class Module_six_moves_urllib_robotparser(_LazyModule): | ||
| 452 | |||
| 453 | """Lazy loading of moved objects in six.moves.urllib_robotparser""" | ||
| 454 | |||
| 455 | |||
| 456 | _urllib_robotparser_moved_attributes = [ | ||
| 457 | MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), | ||
| 458 | ] | ||
| 459 | for attr in _urllib_robotparser_moved_attributes: | ||
| 460 | setattr(Module_six_moves_urllib_robotparser, attr.name, attr) | ||
| 461 | del attr | ||
| 462 | |||
| 463 | Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes | ||
| 464 | |||
| 465 | _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), | ||
| 466 | "moves.urllib_robotparser", "moves.urllib.robotparser") | ||
| 467 | |||
| 468 | |||
| 469 | class Module_six_moves_urllib(types.ModuleType): | ||
| 470 | |||
| 471 | """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" | ||
| 472 | __path__ = [] # mark as package | ||
| 473 | parse = _importer._get_module("moves.urllib_parse") | ||
| 474 | error = _importer._get_module("moves.urllib_error") | ||
| 475 | request = _importer._get_module("moves.urllib_request") | ||
| 476 | response = _importer._get_module("moves.urllib_response") | ||
| 477 | robotparser = _importer._get_module("moves.urllib_robotparser") | ||
| 478 | |||
| 479 | def __dir__(self): | ||
| 480 | return ['parse', 'error', 'request', 'response', 'robotparser'] | ||
| 481 | |||
| 482 | _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), | ||
| 483 | "moves.urllib") | ||
| 484 | |||
| 485 | |||
| 486 | def add_move(move): | ||
| 487 | """Add an item to six.moves.""" | ||
| 488 | setattr(_MovedItems, move.name, move) | ||
| 489 | |||
| 490 | |||
| 491 | def remove_move(name): | ||
| 492 | """Remove item from six.moves.""" | ||
| 493 | try: | ||
| 494 | delattr(_MovedItems, name) | ||
| 495 | except AttributeError: | ||
| 496 | try: | ||
| 497 | del moves.__dict__[name] | ||
| 498 | except KeyError: | ||
| 499 | raise AttributeError("no such move, %r" % (name,)) | ||
| 500 | |||
| 501 | |||
| 502 | if PY3: | ||
| 503 | _meth_func = "__func__" | ||
| 504 | _meth_self = "__self__" | ||
| 505 | |||
| 506 | _func_closure = "__closure__" | ||
| 507 | _func_code = "__code__" | ||
| 508 | _func_defaults = "__defaults__" | ||
| 509 | _func_globals = "__globals__" | ||
| 510 | else: | ||
| 511 | _meth_func = "im_func" | ||
| 512 | _meth_self = "im_self" | ||
| 513 | |||
| 514 | _func_closure = "func_closure" | ||
| 515 | _func_code = "func_code" | ||
| 516 | _func_defaults = "func_defaults" | ||
| 517 | _func_globals = "func_globals" | ||
| 518 | |||
| 519 | |||
| 520 | try: | ||
| 521 | advance_iterator = next | ||
| 522 | except NameError: | ||
| 523 | def advance_iterator(it): | ||
| 524 | return it.next() | ||
| 525 | next = advance_iterator | ||
| 526 | |||
| 527 | |||
| 528 | try: | ||
| 529 | callable = callable | ||
| 530 | except NameError: | ||
| 531 | def callable(obj): | ||
| 532 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) | ||
| 533 | |||
| 534 | |||
| 535 | if PY3: | ||
| 536 | def get_unbound_function(unbound): | ||
| 537 | return unbound | ||
| 538 | |||
| 539 | create_bound_method = types.MethodType | ||
| 540 | |||
| 541 | def create_unbound_method(func, cls): | ||
| 542 | return func | ||
| 543 | |||
| 544 | Iterator = object | ||
| 545 | else: | ||
| 546 | def get_unbound_function(unbound): | ||
| 547 | return unbound.im_func | ||
| 548 | |||
| 549 | def create_bound_method(func, obj): | ||
| 550 | return types.MethodType(func, obj, obj.__class__) | ||
| 551 | |||
| 552 | def create_unbound_method(func, cls): | ||
| 553 | return types.MethodType(func, None, cls) | ||
| 554 | |||
| 555 | class Iterator(object): | ||
| 556 | |||
| 557 | def next(self): | ||
| 558 | return type(self).__next__(self) | ||
| 559 | |||
| 560 | callable = callable | ||
| 561 | _add_doc(get_unbound_function, | ||
| 562 | """Get the function out of a possibly unbound function""") | ||
| 563 | |||
| 564 | |||
| 565 | get_method_function = operator.attrgetter(_meth_func) | ||
| 566 | get_method_self = operator.attrgetter(_meth_self) | ||
| 567 | get_function_closure = operator.attrgetter(_func_closure) | ||
| 568 | get_function_code = operator.attrgetter(_func_code) | ||
| 569 | get_function_defaults = operator.attrgetter(_func_defaults) | ||
| 570 | get_function_globals = operator.attrgetter(_func_globals) | ||
| 571 | |||
| 572 | |||
| 573 | if PY3: | ||
| 574 | def iterkeys(d, **kw): | ||
| 575 | return iter(d.keys(**kw)) | ||
| 576 | |||
| 577 | def itervalues(d, **kw): | ||
| 578 | return iter(d.values(**kw)) | ||
| 579 | |||
| 580 | def iteritems(d, **kw): | ||
| 581 | return iter(d.items(**kw)) | ||
| 582 | |||
| 583 | def iterlists(d, **kw): | ||
| 584 | return iter(d.lists(**kw)) | ||
| 585 | |||
| 586 | viewkeys = operator.methodcaller("keys") | ||
| 587 | |||
| 588 | viewvalues = operator.methodcaller("values") | ||
| 589 | |||
| 590 | viewitems = operator.methodcaller("items") | ||
| 591 | else: | ||
| 592 | def iterkeys(d, **kw): | ||
| 593 | return d.iterkeys(**kw) | ||
| 594 | |||
| 595 | def itervalues(d, **kw): | ||
| 596 | return d.itervalues(**kw) | ||
| 597 | |||
| 598 | def iteritems(d, **kw): | ||
| 599 | return d.iteritems(**kw) | ||
| 600 | |||
| 601 | def iterlists(d, **kw): | ||
| 602 | return d.iterlists(**kw) | ||
| 603 | |||
| 604 | viewkeys = operator.methodcaller("viewkeys") | ||
| 605 | |||
| 606 | viewvalues = operator.methodcaller("viewvalues") | ||
| 607 | |||
| 608 | viewitems = operator.methodcaller("viewitems") | ||
| 609 | |||
| 610 | _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") | ||
| 611 | _add_doc(itervalues, "Return an iterator over the values of a dictionary.") | ||
| 612 | _add_doc(iteritems, | ||
| 613 | "Return an iterator over the (key, value) pairs of a dictionary.") | ||
| 614 | _add_doc(iterlists, | ||
| 615 | "Return an iterator over the (key, [values]) pairs of a dictionary.") | ||
| 616 | |||
| 617 | |||
| 618 | if PY3: | ||
| 619 | def b(s): | ||
| 620 | return s.encode("latin-1") | ||
| 621 | |||
| 622 | def u(s): | ||
| 623 | return s | ||
| 624 | unichr = chr | ||
| 625 | import struct | ||
| 626 | int2byte = struct.Struct(">B").pack | ||
| 627 | del struct | ||
| 628 | byte2int = operator.itemgetter(0) | ||
| 629 | indexbytes = operator.getitem | ||
| 630 | iterbytes = iter | ||
| 631 | import io | ||
| 632 | StringIO = io.StringIO | ||
| 633 | BytesIO = io.BytesIO | ||
| 634 | _assertCountEqual = "assertCountEqual" | ||
| 635 | if sys.version_info[1] <= 1: | ||
| 636 | _assertRaisesRegex = "assertRaisesRegexp" | ||
| 637 | _assertRegex = "assertRegexpMatches" | ||
| 638 | else: | ||
| 639 | _assertRaisesRegex = "assertRaisesRegex" | ||
| 640 | _assertRegex = "assertRegex" | ||
| 641 | else: | ||
| 642 | def b(s): | ||
| 643 | return s | ||
| 644 | # Workaround for standalone backslash | ||
| 645 | |||
| 646 | def u(s): | ||
| 647 | return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") | ||
| 648 | unichr = unichr | ||
| 649 | int2byte = chr | ||
| 650 | |||
| 651 | def byte2int(bs): | ||
| 652 | return ord(bs[0]) | ||
| 653 | |||
| 654 | def indexbytes(buf, i): | ||
| 655 | return ord(buf[i]) | ||
| 656 | iterbytes = functools.partial(itertools.imap, ord) | ||
| 657 | import StringIO | ||
| 658 | StringIO = BytesIO = StringIO.StringIO | ||
| 659 | _assertCountEqual = "assertItemsEqual" | ||
| 660 | _assertRaisesRegex = "assertRaisesRegexp" | ||
| 661 | _assertRegex = "assertRegexpMatches" | ||
| 662 | _add_doc(b, """Byte literal""") | ||
| 663 | _add_doc(u, """Text literal""") | ||
| 664 | |||
| 665 | |||
| 666 | def assertCountEqual(self, *args, **kwargs): | ||
| 667 | return getattr(self, _assertCountEqual)(*args, **kwargs) | ||
| 668 | |||
| 669 | |||
| 670 | def assertRaisesRegex(self, *args, **kwargs): | ||
| 671 | return getattr(self, _assertRaisesRegex)(*args, **kwargs) | ||
| 672 | |||
| 673 | |||
| 674 | def assertRegex(self, *args, **kwargs): | ||
| 675 | return getattr(self, _assertRegex)(*args, **kwargs) | ||
| 676 | |||
| 677 | |||
| 678 | if PY3: | ||
| 679 | exec_ = getattr(moves.builtins, "exec") | ||
| 680 | |||
| 681 | def reraise(tp, value, tb=None): | ||
| 682 | if value is None: | ||
| 683 | value = tp() | ||
| 684 | if value.__traceback__ is not tb: | ||
| 685 | raise value.with_traceback(tb) | ||
| 686 | raise value | ||
| 687 | |||
| 688 | else: | ||
| 689 | def exec_(_code_, _globs_=None, _locs_=None): | ||
| 690 | """Execute code in a namespace.""" | ||
| 691 | if _globs_ is None: | ||
| 692 | frame = sys._getframe(1) | ||
| 693 | _globs_ = frame.f_globals | ||
| 694 | if _locs_ is None: | ||
| 695 | _locs_ = frame.f_locals | ||
| 696 | del frame | ||
| 697 | elif _locs_ is None: | ||
| 698 | _locs_ = _globs_ | ||
| 699 | exec("""exec _code_ in _globs_, _locs_""") | ||
| 700 | |||
| 701 | exec_("""def reraise(tp, value, tb=None): | ||
| 702 | raise tp, value, tb | ||
| 703 | """) | ||
| 704 | |||
| 705 | |||
| 706 | if sys.version_info[:2] == (3, 2): | ||
| 707 | exec_("""def raise_from(value, from_value): | ||
| 708 | if from_value is None: | ||
| 709 | raise value | ||
| 710 | raise value from from_value | ||
| 711 | """) | ||
| 712 | elif sys.version_info[:2] > (3, 2): | ||
| 713 | exec_("""def raise_from(value, from_value): | ||
| 714 | raise value from from_value | ||
| 715 | """) | ||
| 716 | else: | ||
| 717 | def raise_from(value, from_value): | ||
| 718 | raise value | ||
| 719 | |||
| 720 | |||
| 721 | print_ = getattr(moves.builtins, "print", None) | ||
| 722 | if print_ is None: | ||
| 723 | def print_(*args, **kwargs): | ||
| 724 | """The new-style print function for Python 2.4 and 2.5.""" | ||
| 725 | fp = kwargs.pop("file", sys.stdout) | ||
| 726 | if fp is None: | ||
| 727 | return | ||
| 728 | |||
| 729 | def write(data): | ||
| 730 | if not isinstance(data, basestring): | ||
| 731 | data = str(data) | ||
| 732 | # If the file has an encoding, encode unicode with it. | ||
| 733 | if (isinstance(fp, file) and | ||
| 734 | isinstance(data, unicode) and | ||
| 735 | fp.encoding is not None): | ||
| 736 | errors = getattr(fp, "errors", None) | ||
| 737 | if errors is None: | ||
| 738 | errors = "strict" | ||
| 739 | data = data.encode(fp.encoding, errors) | ||
| 740 | fp.write(data) | ||
| 741 | want_unicode = False | ||
| 742 | sep = kwargs.pop("sep", None) | ||
| 743 | if sep is not None: | ||
| 744 | if isinstance(sep, unicode): | ||
| 745 | want_unicode = True | ||
| 746 | elif not isinstance(sep, str): | ||
| 747 | raise TypeError("sep must be None or a string") | ||
| 748 | end = kwargs.pop("end", None) | ||
| 749 | if end is not None: | ||
| 750 | if isinstance(end, unicode): | ||
| 751 | want_unicode = True | ||
| 752 | elif not isinstance(end, str): | ||
| 753 | raise TypeError("end must be None or a string") | ||
| 754 | if kwargs: | ||
| 755 | raise TypeError("invalid keyword arguments to print()") | ||
| 756 | if not want_unicode: | ||
| 757 | for arg in args: | ||
| 758 | if isinstance(arg, unicode): | ||
| 759 | want_unicode = True | ||
| 760 | break | ||
| 761 | if want_unicode: | ||
| 762 | newline = unicode("\n") | ||
| 763 | space = unicode(" ") | ||
| 764 | else: | ||
| 765 | newline = "\n" | ||
| 766 | space = " " | ||
| 767 | if sep is None: | ||
| 768 | sep = space | ||
| 769 | if end is None: | ||
| 770 | end = newline | ||
| 771 | for i, arg in enumerate(args): | ||
| 772 | if i: | ||
| 773 | write(sep) | ||
| 774 | write(arg) | ||
| 775 | write(end) | ||
| 776 | if sys.version_info[:2] < (3, 3): | ||
| 777 | _print = print_ | ||
| 778 | |||
| 779 | def print_(*args, **kwargs): | ||
| 780 | fp = kwargs.get("file", sys.stdout) | ||
| 781 | flush = kwargs.pop("flush", False) | ||
| 782 | _print(*args, **kwargs) | ||
| 783 | if flush and fp is not None: | ||
| 784 | fp.flush() | ||
| 785 | |||
| 786 | _add_doc(reraise, """Reraise an exception.""") | ||
| 787 | |||
| 788 | if sys.version_info[0:2] < (3, 4): | ||
| 789 | def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, | ||
| 790 | updated=functools.WRAPPER_UPDATES): | ||
| 791 | def wrapper(f): | ||
| 792 | f = functools.wraps(wrapped, assigned, updated)(f) | ||
| 793 | f.__wrapped__ = wrapped | ||
| 794 | return f | ||
| 795 | return wrapper | ||
| 796 | else: | ||
| 797 | wraps = functools.wraps | ||
| 798 | |||
| 799 | |||
| 800 | def with_metaclass(meta, *bases): | ||
| 801 | """Create a base class with a metaclass.""" | ||
| 802 | # This requires a bit of explanation: the basic idea is to make a dummy | ||
| 803 | # metaclass for one level of class instantiation that replaces itself with | ||
| 804 | # the actual metaclass. | ||
| 805 | class metaclass(meta): | ||
| 806 | |||
| 807 | def __new__(cls, name, this_bases, d): | ||
| 808 | return meta(name, bases, d) | ||
| 809 | return type.__new__(metaclass, 'temporary_class', (), {}) | ||
| 810 | |||
| 811 | |||
| 812 | def add_metaclass(metaclass): | ||
| 813 | """Class decorator for creating a class with a metaclass.""" | ||
| 814 | def wrapper(cls): | ||
| 815 | orig_vars = cls.__dict__.copy() | ||
| 816 | slots = orig_vars.get('__slots__') | ||
| 817 | if slots is not None: | ||
| 818 | if isinstance(slots, str): | ||
| 819 | slots = [slots] | ||
| 820 | for slots_var in slots: | ||
| 821 | orig_vars.pop(slots_var) | ||
| 822 | orig_vars.pop('__dict__', None) | ||
| 823 | orig_vars.pop('__weakref__', None) | ||
| 824 | return metaclass(cls.__name__, cls.__bases__, orig_vars) | ||
| 825 | return wrapper | ||
| 826 | |||
| 827 | |||
| 828 | def python_2_unicode_compatible(klass): | ||
| 829 | """ | ||
| 830 | A decorator that defines __unicode__ and __str__ methods under Python 2. | ||
| 831 | Under Python 3 it does nothing. | ||
| 832 | |||
| 833 | To support Python 2 and 3 with a single code base, define a __str__ method | ||
| 834 | returning text and apply this decorator to the class. | ||
| 835 | """ | ||
| 836 | if PY2: | ||
| 837 | if '__str__' not in klass.__dict__: | ||
| 838 | raise ValueError("@python_2_unicode_compatible cannot be applied " | ||
| 839 | "to %s because it doesn't define __str__()." % | ||
| 840 | klass.__name__) | ||
| 841 | klass.__unicode__ = klass.__str__ | ||
| 842 | klass.__str__ = lambda self: self.__unicode__().encode('utf-8') | ||
| 843 | return klass | ||
| 844 | |||
| 845 | |||
| 846 | # Complete the moves implementation. | ||
| 847 | # This code is at the end of this module to speed up module loading. | ||
| 848 | # Turn this module into a package. | ||
| 849 | __path__ = [] # required for PEP 302 and PEP 451 | ||
| 850 | __package__ = __name__ # see PEP 366 @ReservedAssignment | ||
| 851 | if globals().get("__spec__") is not None: | ||
| 852 | __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable | ||
| 853 | # Remove other six meta path importers, since they cause problems. This can | ||
| 854 | # happen if six is removed from sys.modules and then reloaded. (Setuptools does | ||
| 855 | # this for some reason.) | ||
| 856 | if sys.meta_path: | ||
| 857 | for i, importer in enumerate(sys.meta_path): | ||
| 858 | # Here's some real nastiness: Another "instance" of the six module might | ||
| 859 | # be floating around. Therefore, we can't use isinstance() to check for | ||
| 860 | # the six meta path importer, since the other six instance will have | ||
| 861 | # inserted an importer with different class. | ||
| 862 | if (type(importer).__name__ == "_SixMetaPathImporter" and | ||
| 863 | importer.name == __name__): | ||
| 864 | del sys.meta_path[i] | ||
| 865 | break | ||
| 866 | del i, importer | ||
| 867 | # Finally, add the importer to the meta path import hook. | ||
| 868 | sys.meta_path.append(_importer) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py new file mode 100644 index 0000000..accb927 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py | |||
| @@ -0,0 +1,19 @@ | |||
| 1 | import sys | ||
| 2 | |||
| 3 | try: | ||
| 4 | # Our match_hostname function is the same as 3.5's, so we only want to | ||
| 5 | # import the match_hostname function if it's at least that good. | ||
| 6 | if sys.version_info < (3, 5): | ||
| 7 | raise ImportError("Fallback to vendored code") | ||
| 8 | |||
| 9 | from ssl import CertificateError, match_hostname | ||
| 10 | except ImportError: | ||
| 11 | try: | ||
| 12 | # Backport of the function from a pypi module | ||
| 13 | from backports.ssl_match_hostname import CertificateError, match_hostname | ||
| 14 | except ImportError: | ||
| 15 | # Our vendored copy | ||
| 16 | from ._implementation import CertificateError, match_hostname | ||
| 17 | |||
| 18 | # Not needed, but documenting what we provide. | ||
| 19 | __all__ = ('CertificateError', 'match_hostname') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py new file mode 100644 index 0000000..7272d86 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py | |||
| @@ -0,0 +1,157 @@ | |||
| 1 | """The match_hostname() function from Python 3.3.3, essential when using SSL.""" | ||
| 2 | |||
| 3 | # Note: This file is under the PSF license as the code comes from the python | ||
| 4 | # stdlib. http://docs.python.org/3/license.html | ||
| 5 | |||
| 6 | import re | ||
| 7 | import sys | ||
| 8 | |||
| 9 | # ipaddress has been backported to 2.6+ in pypi. If it is installed on the | ||
| 10 | # system, use it to handle IPAddress ServerAltnames (this was added in | ||
| 11 | # python-3.5) otherwise only do DNS matching. This allows | ||
| 12 | # backports.ssl_match_hostname to continue to be used all the way back to | ||
| 13 | # python-2.4. | ||
| 14 | try: | ||
| 15 | from pip._vendor import ipaddress | ||
| 16 | except ImportError: | ||
| 17 | ipaddress = None | ||
| 18 | |||
| 19 | __version__ = '3.5.0.1' | ||
| 20 | |||
| 21 | |||
| 22 | class CertificateError(ValueError): | ||
| 23 | pass | ||
| 24 | |||
| 25 | |||
| 26 | def _dnsname_match(dn, hostname, max_wildcards=1): | ||
| 27 | """Matching according to RFC 6125, section 6.4.3 | ||
| 28 | |||
| 29 | http://tools.ietf.org/html/rfc6125#section-6.4.3 | ||
| 30 | """ | ||
| 31 | pats = [] | ||
| 32 | if not dn: | ||
| 33 | return False | ||
| 34 | |||
| 35 | # Ported from python3-syntax: | ||
| 36 | # leftmost, *remainder = dn.split(r'.') | ||
| 37 | parts = dn.split(r'.') | ||
| 38 | leftmost = parts[0] | ||
| 39 | remainder = parts[1:] | ||
| 40 | |||
| 41 | wildcards = leftmost.count('*') | ||
| 42 | if wildcards > max_wildcards: | ||
| 43 | # Issue #17980: avoid denials of service by refusing more | ||
| 44 | # than one wildcard per fragment. A survey of established | ||
| 45 | # policy among SSL implementations showed it to be a | ||
| 46 | # reasonable choice. | ||
| 47 | raise CertificateError( | ||
| 48 | "too many wildcards in certificate DNS name: " + repr(dn)) | ||
| 49 | |||
| 50 | # speed up common case w/o wildcards | ||
| 51 | if not wildcards: | ||
| 52 | return dn.lower() == hostname.lower() | ||
| 53 | |||
| 54 | # RFC 6125, section 6.4.3, subitem 1. | ||
| 55 | # The client SHOULD NOT attempt to match a presented identifier in which | ||
| 56 | # the wildcard character comprises a label other than the left-most label. | ||
| 57 | if leftmost == '*': | ||
| 58 | # When '*' is a fragment by itself, it matches a non-empty dotless | ||
| 59 | # fragment. | ||
| 60 | pats.append('[^.]+') | ||
| 61 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'): | ||
| 62 | # RFC 6125, section 6.4.3, subitem 3. | ||
| 63 | # The client SHOULD NOT attempt to match a presented identifier | ||
| 64 | # where the wildcard character is embedded within an A-label or | ||
| 65 | # U-label of an internationalized domain name. | ||
| 66 | pats.append(re.escape(leftmost)) | ||
| 67 | else: | ||
| 68 | # Otherwise, '*' matches any dotless string, e.g. www* | ||
| 69 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) | ||
| 70 | |||
| 71 | # add the remaining fragments, ignore any wildcards | ||
| 72 | for frag in remainder: | ||
| 73 | pats.append(re.escape(frag)) | ||
| 74 | |||
| 75 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) | ||
| 76 | return pat.match(hostname) | ||
| 77 | |||
| 78 | |||
| 79 | def _to_unicode(obj): | ||
| 80 | if isinstance(obj, str) and sys.version_info < (3,): | ||
| 81 | obj = unicode(obj, encoding='ascii', errors='strict') | ||
| 82 | return obj | ||
| 83 | |||
| 84 | def _ipaddress_match(ipname, host_ip): | ||
| 85 | """Exact matching of IP addresses. | ||
| 86 | |||
| 87 | RFC 6125 explicitly doesn't define an algorithm for this | ||
| 88 | (section 1.7.2 - "Out of Scope"). | ||
| 89 | """ | ||
| 90 | # OpenSSL may add a trailing newline to a subjectAltName's IP address | ||
| 91 | # Divergence from upstream: ipaddress can't handle byte str | ||
| 92 | ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) | ||
| 93 | return ip == host_ip | ||
| 94 | |||
| 95 | |||
| 96 | def match_hostname(cert, hostname): | ||
| 97 | """Verify that *cert* (in decoded format as returned by | ||
| 98 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 | ||
| 99 | rules are followed, but IP addresses are not accepted for *hostname*. | ||
| 100 | |||
| 101 | CertificateError is raised on failure. On success, the function | ||
| 102 | returns nothing. | ||
| 103 | """ | ||
| 104 | if not cert: | ||
| 105 | raise ValueError("empty or no certificate, match_hostname needs a " | ||
| 106 | "SSL socket or SSL context with either " | ||
| 107 | "CERT_OPTIONAL or CERT_REQUIRED") | ||
| 108 | try: | ||
| 109 | # Divergence from upstream: ipaddress can't handle byte str | ||
| 110 | host_ip = ipaddress.ip_address(_to_unicode(hostname)) | ||
| 111 | except ValueError: | ||
| 112 | # Not an IP address (common case) | ||
| 113 | host_ip = None | ||
| 114 | except UnicodeError: | ||
| 115 | # Divergence from upstream: Have to deal with ipaddress not taking | ||
| 116 | # byte strings. addresses should be all ascii, so we consider it not | ||
| 117 | # an ipaddress in this case | ||
| 118 | host_ip = None | ||
| 119 | except AttributeError: | ||
| 120 | # Divergence from upstream: Make ipaddress library optional | ||
| 121 | if ipaddress is None: | ||
| 122 | host_ip = None | ||
| 123 | else: | ||
| 124 | raise | ||
| 125 | dnsnames = [] | ||
| 126 | san = cert.get('subjectAltName', ()) | ||
| 127 | for key, value in san: | ||
| 128 | if key == 'DNS': | ||
| 129 | if host_ip is None and _dnsname_match(value, hostname): | ||
| 130 | return | ||
| 131 | dnsnames.append(value) | ||
| 132 | elif key == 'IP Address': | ||
| 133 | if host_ip is not None and _ipaddress_match(value, host_ip): | ||
| 134 | return | ||
| 135 | dnsnames.append(value) | ||
| 136 | if not dnsnames: | ||
| 137 | # The subject is only checked when there is no dNSName entry | ||
| 138 | # in subjectAltName | ||
| 139 | for sub in cert.get('subject', ()): | ||
| 140 | for key, value in sub: | ||
| 141 | # XXX according to RFC 2818, the most specific Common Name | ||
| 142 | # must be used. | ||
| 143 | if key == 'commonName': | ||
| 144 | if _dnsname_match(value, hostname): | ||
| 145 | return | ||
| 146 | dnsnames.append(value) | ||
| 147 | if len(dnsnames) > 1: | ||
| 148 | raise CertificateError("hostname %r " | ||
| 149 | "doesn't match either of %s" | ||
| 150 | % (hostname, ', '.join(map(repr, dnsnames)))) | ||
| 151 | elif len(dnsnames) == 1: | ||
| 152 | raise CertificateError("hostname %r " | ||
| 153 | "doesn't match %r" | ||
| 154 | % (hostname, dnsnames[0])) | ||
| 155 | else: | ||
| 156 | raise CertificateError("no appropriate commonName or " | ||
| 157 | "subjectAltName fields were found") | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/poolmanager.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/poolmanager.py new file mode 100644 index 0000000..607ae0f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/poolmanager.py | |||
| @@ -0,0 +1,440 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import collections | ||
| 3 | import functools | ||
| 4 | import logging | ||
| 5 | |||
| 6 | from ._collections import RecentlyUsedContainer | ||
| 7 | from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool | ||
| 8 | from .connectionpool import port_by_scheme | ||
| 9 | from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown | ||
| 10 | from .packages.six.moves.urllib.parse import urljoin | ||
| 11 | from .request import RequestMethods | ||
| 12 | from .util.url import parse_url | ||
| 13 | from .util.retry import Retry | ||
| 14 | |||
| 15 | |||
| 16 | __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] | ||
| 17 | |||
| 18 | |||
| 19 | log = logging.getLogger(__name__) | ||
| 20 | |||
| 21 | SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', | ||
| 22 | 'ssl_version', 'ca_cert_dir', 'ssl_context') | ||
| 23 | |||
| 24 | # All known keyword arguments that could be provided to the pool manager, its | ||
| 25 | # pools, or the underlying connections. This is used to construct a pool key. | ||
| 26 | _key_fields = ( | ||
| 27 | 'key_scheme', # str | ||
| 28 | 'key_host', # str | ||
| 29 | 'key_port', # int | ||
| 30 | 'key_timeout', # int or float or Timeout | ||
| 31 | 'key_retries', # int or Retry | ||
| 32 | 'key_strict', # bool | ||
| 33 | 'key_block', # bool | ||
| 34 | 'key_source_address', # str | ||
| 35 | 'key_key_file', # str | ||
| 36 | 'key_cert_file', # str | ||
| 37 | 'key_cert_reqs', # str | ||
| 38 | 'key_ca_certs', # str | ||
| 39 | 'key_ssl_version', # str | ||
| 40 | 'key_ca_cert_dir', # str | ||
| 41 | 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext | ||
| 42 | 'key_maxsize', # int | ||
| 43 | 'key_headers', # dict | ||
| 44 | 'key__proxy', # parsed proxy url | ||
| 45 | 'key__proxy_headers', # dict | ||
| 46 | 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples | ||
| 47 | 'key__socks_options', # dict | ||
| 48 | 'key_assert_hostname', # bool or string | ||
| 49 | 'key_assert_fingerprint', # str | ||
| 50 | ) | ||
| 51 | |||
| 52 | #: The namedtuple class used to construct keys for the connection pool. | ||
| 53 | #: All custom key schemes should include the fields in this key at a minimum. | ||
| 54 | PoolKey = collections.namedtuple('PoolKey', _key_fields) | ||
| 55 | |||
| 56 | |||
| 57 | def _default_key_normalizer(key_class, request_context): | ||
| 58 | """ | ||
| 59 | Create a pool key out of a request context dictionary. | ||
| 60 | |||
| 61 | According to RFC 3986, both the scheme and host are case-insensitive. | ||
| 62 | Therefore, this function normalizes both before constructing the pool | ||
| 63 | key for an HTTPS request. If you wish to change this behaviour, provide | ||
| 64 | alternate callables to ``key_fn_by_scheme``. | ||
| 65 | |||
| 66 | :param key_class: | ||
| 67 | The class to use when constructing the key. This should be a namedtuple | ||
| 68 | with the ``scheme`` and ``host`` keys at a minimum. | ||
| 69 | :type key_class: namedtuple | ||
| 70 | :param request_context: | ||
| 71 | A dictionary-like object that contain the context for a request. | ||
| 72 | :type request_context: dict | ||
| 73 | |||
| 74 | :return: A namedtuple that can be used as a connection pool key. | ||
| 75 | :rtype: PoolKey | ||
| 76 | """ | ||
| 77 | # Since we mutate the dictionary, make a copy first | ||
| 78 | context = request_context.copy() | ||
| 79 | context['scheme'] = context['scheme'].lower() | ||
| 80 | context['host'] = context['host'].lower() | ||
| 81 | |||
| 82 | # These are both dictionaries and need to be transformed into frozensets | ||
| 83 | for key in ('headers', '_proxy_headers', '_socks_options'): | ||
| 84 | if key in context and context[key] is not None: | ||
| 85 | context[key] = frozenset(context[key].items()) | ||
| 86 | |||
| 87 | # The socket_options key may be a list and needs to be transformed into a | ||
| 88 | # tuple. | ||
| 89 | socket_opts = context.get('socket_options') | ||
| 90 | if socket_opts is not None: | ||
| 91 | context['socket_options'] = tuple(socket_opts) | ||
| 92 | |||
| 93 | # Map the kwargs to the names in the namedtuple - this is necessary since | ||
| 94 | # namedtuples can't have fields starting with '_'. | ||
| 95 | for key in list(context.keys()): | ||
| 96 | context['key_' + key] = context.pop(key) | ||
| 97 | |||
| 98 | # Default to ``None`` for keys missing from the context | ||
| 99 | for field in key_class._fields: | ||
| 100 | if field not in context: | ||
| 101 | context[field] = None | ||
| 102 | |||
| 103 | return key_class(**context) | ||
| 104 | |||
| 105 | |||
| 106 | #: A dictionary that maps a scheme to a callable that creates a pool key. | ||
| 107 | #: This can be used to alter the way pool keys are constructed, if desired. | ||
| 108 | #: Each PoolManager makes a copy of this dictionary so they can be configured | ||
| 109 | #: globally here, or individually on the instance. | ||
| 110 | key_fn_by_scheme = { | ||
| 111 | 'http': functools.partial(_default_key_normalizer, PoolKey), | ||
| 112 | 'https': functools.partial(_default_key_normalizer, PoolKey), | ||
| 113 | } | ||
| 114 | |||
| 115 | pool_classes_by_scheme = { | ||
| 116 | 'http': HTTPConnectionPool, | ||
| 117 | 'https': HTTPSConnectionPool, | ||
| 118 | } | ||
| 119 | |||
| 120 | |||
| 121 | class PoolManager(RequestMethods): | ||
| 122 | """ | ||
| 123 | Allows for arbitrary requests while transparently keeping track of | ||
| 124 | necessary connection pools for you. | ||
| 125 | |||
| 126 | :param num_pools: | ||
| 127 | Number of connection pools to cache before discarding the least | ||
| 128 | recently used pool. | ||
| 129 | |||
| 130 | :param headers: | ||
| 131 | Headers to include with all requests, unless other headers are given | ||
| 132 | explicitly. | ||
| 133 | |||
| 134 | :param \\**connection_pool_kw: | ||
| 135 | Additional parameters are used to create fresh | ||
| 136 | :class:`urllib3.connectionpool.ConnectionPool` instances. | ||
| 137 | |||
| 138 | Example:: | ||
| 139 | |||
| 140 | >>> manager = PoolManager(num_pools=2) | ||
| 141 | >>> r = manager.request('GET', 'http://google.com/') | ||
| 142 | >>> r = manager.request('GET', 'http://google.com/mail') | ||
| 143 | >>> r = manager.request('GET', 'http://yahoo.com/') | ||
| 144 | >>> len(manager.pools) | ||
| 145 | 2 | ||
| 146 | |||
| 147 | """ | ||
| 148 | |||
| 149 | proxy = None | ||
| 150 | |||
| 151 | def __init__(self, num_pools=10, headers=None, **connection_pool_kw): | ||
| 152 | RequestMethods.__init__(self, headers) | ||
| 153 | self.connection_pool_kw = connection_pool_kw | ||
| 154 | self.pools = RecentlyUsedContainer(num_pools, | ||
| 155 | dispose_func=lambda p: p.close()) | ||
| 156 | |||
| 157 | # Locally set the pool classes and keys so other PoolManagers can | ||
| 158 | # override them. | ||
| 159 | self.pool_classes_by_scheme = pool_classes_by_scheme | ||
| 160 | self.key_fn_by_scheme = key_fn_by_scheme.copy() | ||
| 161 | |||
| 162 | def __enter__(self): | ||
| 163 | return self | ||
| 164 | |||
| 165 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
| 166 | self.clear() | ||
| 167 | # Return False to re-raise any potential exceptions | ||
| 168 | return False | ||
| 169 | |||
| 170 | def _new_pool(self, scheme, host, port, request_context=None): | ||
| 171 | """ | ||
| 172 | Create a new :class:`ConnectionPool` based on host, port, scheme, and | ||
| 173 | any additional pool keyword arguments. | ||
| 174 | |||
| 175 | If ``request_context`` is provided, it is provided as keyword arguments | ||
| 176 | to the pool class used. This method is used to actually create the | ||
| 177 | connection pools handed out by :meth:`connection_from_url` and | ||
| 178 | companion methods. It is intended to be overridden for customization. | ||
| 179 | """ | ||
| 180 | pool_cls = self.pool_classes_by_scheme[scheme] | ||
| 181 | if request_context is None: | ||
| 182 | request_context = self.connection_pool_kw.copy() | ||
| 183 | |||
| 184 | # Although the context has everything necessary to create the pool, | ||
| 185 | # this function has historically only used the scheme, host, and port | ||
| 186 | # in the positional args. When an API change is acceptable these can | ||
| 187 | # be removed. | ||
| 188 | for key in ('scheme', 'host', 'port'): | ||
| 189 | request_context.pop(key, None) | ||
| 190 | |||
| 191 | if scheme == 'http': | ||
| 192 | for kw in SSL_KEYWORDS: | ||
| 193 | request_context.pop(kw, None) | ||
| 194 | |||
| 195 | return pool_cls(host, port, **request_context) | ||
| 196 | |||
| 197 | def clear(self): | ||
| 198 | """ | ||
| 199 | Empty our store of pools and direct them all to close. | ||
| 200 | |||
| 201 | This will not affect in-flight connections, but they will not be | ||
| 202 | re-used after completion. | ||
| 203 | """ | ||
| 204 | self.pools.clear() | ||
| 205 | |||
| 206 | def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): | ||
| 207 | """ | ||
| 208 | Get a :class:`ConnectionPool` based on the host, port, and scheme. | ||
| 209 | |||
| 210 | If ``port`` isn't given, it will be derived from the ``scheme`` using | ||
| 211 | ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is | ||
| 212 | provided, it is merged with the instance's ``connection_pool_kw`` | ||
| 213 | variable and used to create the new connection pool, if one is | ||
| 214 | needed. | ||
| 215 | """ | ||
| 216 | |||
| 217 | if not host: | ||
| 218 | raise LocationValueError("No host specified.") | ||
| 219 | |||
| 220 | request_context = self._merge_pool_kwargs(pool_kwargs) | ||
| 221 | request_context['scheme'] = scheme or 'http' | ||
| 222 | if not port: | ||
| 223 | port = port_by_scheme.get(request_context['scheme'].lower(), 80) | ||
| 224 | request_context['port'] = port | ||
| 225 | request_context['host'] = host | ||
| 226 | |||
| 227 | return self.connection_from_context(request_context) | ||
| 228 | |||
| 229 | def connection_from_context(self, request_context): | ||
| 230 | """ | ||
| 231 | Get a :class:`ConnectionPool` based on the request context. | ||
| 232 | |||
| 233 | ``request_context`` must at least contain the ``scheme`` key and its | ||
| 234 | value must be a key in ``key_fn_by_scheme`` instance variable. | ||
| 235 | """ | ||
| 236 | scheme = request_context['scheme'].lower() | ||
| 237 | pool_key_constructor = self.key_fn_by_scheme[scheme] | ||
| 238 | pool_key = pool_key_constructor(request_context) | ||
| 239 | |||
| 240 | return self.connection_from_pool_key(pool_key, request_context=request_context) | ||
| 241 | |||
| 242 | def connection_from_pool_key(self, pool_key, request_context=None): | ||
| 243 | """ | ||
| 244 | Get a :class:`ConnectionPool` based on the provided pool key. | ||
| 245 | |||
| 246 | ``pool_key`` should be a namedtuple that only contains immutable | ||
| 247 | objects. At a minimum it must have the ``scheme``, ``host``, and | ||
| 248 | ``port`` fields. | ||
| 249 | """ | ||
| 250 | with self.pools.lock: | ||
| 251 | # If the scheme, host, or port doesn't match existing open | ||
| 252 | # connections, open a new ConnectionPool. | ||
| 253 | pool = self.pools.get(pool_key) | ||
| 254 | if pool: | ||
| 255 | return pool | ||
| 256 | |||
| 257 | # Make a fresh ConnectionPool of the desired type | ||
| 258 | scheme = request_context['scheme'] | ||
| 259 | host = request_context['host'] | ||
| 260 | port = request_context['port'] | ||
| 261 | pool = self._new_pool(scheme, host, port, request_context=request_context) | ||
| 262 | self.pools[pool_key] = pool | ||
| 263 | |||
| 264 | return pool | ||
| 265 | |||
| 266 | def connection_from_url(self, url, pool_kwargs=None): | ||
| 267 | """ | ||
| 268 | Similar to :func:`urllib3.connectionpool.connection_from_url`. | ||
| 269 | |||
| 270 | If ``pool_kwargs`` is not provided and a new pool needs to be | ||
| 271 | constructed, ``self.connection_pool_kw`` is used to initialize | ||
| 272 | the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` | ||
| 273 | is provided, it is used instead. Note that if a new pool does not | ||
| 274 | need to be created for the request, the provided ``pool_kwargs`` are | ||
| 275 | not used. | ||
| 276 | """ | ||
| 277 | u = parse_url(url) | ||
| 278 | return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, | ||
| 279 | pool_kwargs=pool_kwargs) | ||
| 280 | |||
| 281 | def _merge_pool_kwargs(self, override): | ||
| 282 | """ | ||
| 283 | Merge a dictionary of override values for self.connection_pool_kw. | ||
| 284 | |||
| 285 | This does not modify self.connection_pool_kw and returns a new dict. | ||
| 286 | Any keys in the override dictionary with a value of ``None`` are | ||
| 287 | removed from the merged dictionary. | ||
| 288 | """ | ||
| 289 | base_pool_kwargs = self.connection_pool_kw.copy() | ||
| 290 | if override: | ||
| 291 | for key, value in override.items(): | ||
| 292 | if value is None: | ||
| 293 | try: | ||
| 294 | del base_pool_kwargs[key] | ||
| 295 | except KeyError: | ||
| 296 | pass | ||
| 297 | else: | ||
| 298 | base_pool_kwargs[key] = value | ||
| 299 | return base_pool_kwargs | ||
| 300 | |||
| 301 | def urlopen(self, method, url, redirect=True, **kw): | ||
| 302 | """ | ||
| 303 | Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` | ||
| 304 | with custom cross-host redirect logic and only sends the request-uri | ||
| 305 | portion of the ``url``. | ||
| 306 | |||
| 307 | The given ``url`` parameter must be absolute, such that an appropriate | ||
| 308 | :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. | ||
| 309 | """ | ||
| 310 | u = parse_url(url) | ||
| 311 | conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) | ||
| 312 | |||
| 313 | kw['assert_same_host'] = False | ||
| 314 | kw['redirect'] = False | ||
| 315 | if 'headers' not in kw: | ||
| 316 | kw['headers'] = self.headers | ||
| 317 | |||
| 318 | if self.proxy is not None and u.scheme == "http": | ||
| 319 | response = conn.urlopen(method, url, **kw) | ||
| 320 | else: | ||
| 321 | response = conn.urlopen(method, u.request_uri, **kw) | ||
| 322 | |||
| 323 | redirect_location = redirect and response.get_redirect_location() | ||
| 324 | if not redirect_location: | ||
| 325 | return response | ||
| 326 | |||
| 327 | # Support relative URLs for redirecting. | ||
| 328 | redirect_location = urljoin(url, redirect_location) | ||
| 329 | |||
| 330 | # RFC 7231, Section 6.4.4 | ||
| 331 | if response.status == 303: | ||
| 332 | method = 'GET' | ||
| 333 | |||
| 334 | retries = kw.get('retries') | ||
| 335 | if not isinstance(retries, Retry): | ||
| 336 | retries = Retry.from_int(retries, redirect=redirect) | ||
| 337 | |||
| 338 | try: | ||
| 339 | retries = retries.increment(method, url, response=response, _pool=conn) | ||
| 340 | except MaxRetryError: | ||
| 341 | if retries.raise_on_redirect: | ||
| 342 | raise | ||
| 343 | return response | ||
| 344 | |||
| 345 | kw['retries'] = retries | ||
| 346 | kw['redirect'] = redirect | ||
| 347 | |||
| 348 | log.info("Redirecting %s -> %s", url, redirect_location) | ||
| 349 | return self.urlopen(method, redirect_location, **kw) | ||
| 350 | |||
| 351 | |||
| 352 | class ProxyManager(PoolManager): | ||
| 353 | """ | ||
| 354 | Behaves just like :class:`PoolManager`, but sends all requests through | ||
| 355 | the defined proxy, using the CONNECT method for HTTPS URLs. | ||
| 356 | |||
| 357 | :param proxy_url: | ||
| 358 | The URL of the proxy to be used. | ||
| 359 | |||
| 360 | :param proxy_headers: | ||
| 361 | A dictionary contaning headers that will be sent to the proxy. In case | ||
| 362 | of HTTP they are being sent with each request, while in the | ||
| 363 | HTTPS/CONNECT case they are sent only once. Could be used for proxy | ||
| 364 | authentication. | ||
| 365 | |||
| 366 | Example: | ||
| 367 | >>> proxy = urllib3.ProxyManager('http://localhost:3128/') | ||
| 368 | >>> r1 = proxy.request('GET', 'http://google.com/') | ||
| 369 | >>> r2 = proxy.request('GET', 'http://httpbin.org/') | ||
| 370 | >>> len(proxy.pools) | ||
| 371 | 1 | ||
| 372 | >>> r3 = proxy.request('GET', 'https://httpbin.org/') | ||
| 373 | >>> r4 = proxy.request('GET', 'https://twitter.com/') | ||
| 374 | >>> len(proxy.pools) | ||
| 375 | 3 | ||
| 376 | |||
| 377 | """ | ||
| 378 | |||
| 379 | def __init__(self, proxy_url, num_pools=10, headers=None, | ||
| 380 | proxy_headers=None, **connection_pool_kw): | ||
| 381 | |||
| 382 | if isinstance(proxy_url, HTTPConnectionPool): | ||
| 383 | proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, | ||
| 384 | proxy_url.port) | ||
| 385 | proxy = parse_url(proxy_url) | ||
| 386 | if not proxy.port: | ||
| 387 | port = port_by_scheme.get(proxy.scheme, 80) | ||
| 388 | proxy = proxy._replace(port=port) | ||
| 389 | |||
| 390 | if proxy.scheme not in ("http", "https"): | ||
| 391 | raise ProxySchemeUnknown(proxy.scheme) | ||
| 392 | |||
| 393 | self.proxy = proxy | ||
| 394 | self.proxy_headers = proxy_headers or {} | ||
| 395 | |||
| 396 | connection_pool_kw['_proxy'] = self.proxy | ||
| 397 | connection_pool_kw['_proxy_headers'] = self.proxy_headers | ||
| 398 | |||
| 399 | super(ProxyManager, self).__init__( | ||
| 400 | num_pools, headers, **connection_pool_kw) | ||
| 401 | |||
| 402 | def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): | ||
| 403 | if scheme == "https": | ||
| 404 | return super(ProxyManager, self).connection_from_host( | ||
| 405 | host, port, scheme, pool_kwargs=pool_kwargs) | ||
| 406 | |||
| 407 | return super(ProxyManager, self).connection_from_host( | ||
| 408 | self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) | ||
| 409 | |||
| 410 | def _set_proxy_headers(self, url, headers=None): | ||
| 411 | """ | ||
| 412 | Sets headers needed by proxies: specifically, the Accept and Host | ||
| 413 | headers. Only sets headers not provided by the user. | ||
| 414 | """ | ||
| 415 | headers_ = {'Accept': '*/*'} | ||
| 416 | |||
| 417 | netloc = parse_url(url).netloc | ||
| 418 | if netloc: | ||
| 419 | headers_['Host'] = netloc | ||
| 420 | |||
| 421 | if headers: | ||
| 422 | headers_.update(headers) | ||
| 423 | return headers_ | ||
| 424 | |||
| 425 | def urlopen(self, method, url, redirect=True, **kw): | ||
| 426 | "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." | ||
| 427 | u = parse_url(url) | ||
| 428 | |||
| 429 | if u.scheme == "http": | ||
| 430 | # For proxied HTTPS requests, httplib sets the necessary headers | ||
| 431 | # on the CONNECT to the proxy. For HTTP, we'll definitely | ||
| 432 | # need to set 'Host' at the very least. | ||
| 433 | headers = kw.get('headers', self.headers) | ||
| 434 | kw['headers'] = self._set_proxy_headers(url, headers) | ||
| 435 | |||
| 436 | return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) | ||
| 437 | |||
| 438 | |||
| 439 | def proxy_from_url(url, **kw): | ||
| 440 | return ProxyManager(proxy_url=url, **kw) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/request.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/request.py new file mode 100644 index 0000000..9d789d6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/request.py | |||
| @@ -0,0 +1,148 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | from .filepost import encode_multipart_formdata | ||
| 4 | from .packages.six.moves.urllib.parse import urlencode | ||
| 5 | |||
| 6 | |||
| 7 | __all__ = ['RequestMethods'] | ||
| 8 | |||
| 9 | |||
| 10 | class RequestMethods(object): | ||
| 11 | """ | ||
| 12 | Convenience mixin for classes who implement a :meth:`urlopen` method, such | ||
| 13 | as :class:`~urllib3.connectionpool.HTTPConnectionPool` and | ||
| 14 | :class:`~urllib3.poolmanager.PoolManager`. | ||
| 15 | |||
| 16 | Provides behavior for making common types of HTTP request methods and | ||
| 17 | decides which type of request field encoding to use. | ||
| 18 | |||
| 19 | Specifically, | ||
| 20 | |||
| 21 | :meth:`.request_encode_url` is for sending requests whose fields are | ||
| 22 | encoded in the URL (such as GET, HEAD, DELETE). | ||
| 23 | |||
| 24 | :meth:`.request_encode_body` is for sending requests whose fields are | ||
| 25 | encoded in the *body* of the request using multipart or www-form-urlencoded | ||
| 26 | (such as for POST, PUT, PATCH). | ||
| 27 | |||
| 28 | :meth:`.request` is for making any kind of request, it will look up the | ||
| 29 | appropriate encoding format and use one of the above two methods to make | ||
| 30 | the request. | ||
| 31 | |||
| 32 | Initializer parameters: | ||
| 33 | |||
| 34 | :param headers: | ||
| 35 | Headers to include with all requests, unless other headers are given | ||
| 36 | explicitly. | ||
| 37 | """ | ||
| 38 | |||
| 39 | _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) | ||
| 40 | |||
| 41 | def __init__(self, headers=None): | ||
| 42 | self.headers = headers or {} | ||
| 43 | |||
| 44 | def urlopen(self, method, url, body=None, headers=None, | ||
| 45 | encode_multipart=True, multipart_boundary=None, | ||
| 46 | **kw): # Abstract | ||
| 47 | raise NotImplemented("Classes extending RequestMethods must implement " | ||
| 48 | "their own ``urlopen`` method.") | ||
| 49 | |||
| 50 | def request(self, method, url, fields=None, headers=None, **urlopen_kw): | ||
| 51 | """ | ||
| 52 | Make a request using :meth:`urlopen` with the appropriate encoding of | ||
| 53 | ``fields`` based on the ``method`` used. | ||
| 54 | |||
| 55 | This is a convenience method that requires the least amount of manual | ||
| 56 | effort. It can be used in most situations, while still having the | ||
| 57 | option to drop down to more specific methods when necessary, such as | ||
| 58 | :meth:`request_encode_url`, :meth:`request_encode_body`, | ||
| 59 | or even the lowest level :meth:`urlopen`. | ||
| 60 | """ | ||
| 61 | method = method.upper() | ||
| 62 | |||
| 63 | if method in self._encode_url_methods: | ||
| 64 | return self.request_encode_url(method, url, fields=fields, | ||
| 65 | headers=headers, | ||
| 66 | **urlopen_kw) | ||
| 67 | else: | ||
| 68 | return self.request_encode_body(method, url, fields=fields, | ||
| 69 | headers=headers, | ||
| 70 | **urlopen_kw) | ||
| 71 | |||
| 72 | def request_encode_url(self, method, url, fields=None, headers=None, | ||
| 73 | **urlopen_kw): | ||
| 74 | """ | ||
| 75 | Make a request using :meth:`urlopen` with the ``fields`` encoded in | ||
| 76 | the url. This is useful for request methods like GET, HEAD, DELETE, etc. | ||
| 77 | """ | ||
| 78 | if headers is None: | ||
| 79 | headers = self.headers | ||
| 80 | |||
| 81 | extra_kw = {'headers': headers} | ||
| 82 | extra_kw.update(urlopen_kw) | ||
| 83 | |||
| 84 | if fields: | ||
| 85 | url += '?' + urlencode(fields) | ||
| 86 | |||
| 87 | return self.urlopen(method, url, **extra_kw) | ||
| 88 | |||
| 89 | def request_encode_body(self, method, url, fields=None, headers=None, | ||
| 90 | encode_multipart=True, multipart_boundary=None, | ||
| 91 | **urlopen_kw): | ||
| 92 | """ | ||
| 93 | Make a request using :meth:`urlopen` with the ``fields`` encoded in | ||
| 94 | the body. This is useful for request methods like POST, PUT, PATCH, etc. | ||
| 95 | |||
| 96 | When ``encode_multipart=True`` (default), then | ||
| 97 | :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode | ||
| 98 | the payload with the appropriate content type. Otherwise | ||
| 99 | :meth:`urllib.urlencode` is used with the | ||
| 100 | 'application/x-www-form-urlencoded' content type. | ||
| 101 | |||
| 102 | Multipart encoding must be used when posting files, and it's reasonably | ||
| 103 | safe to use it in other times too. However, it may break request | ||
| 104 | signing, such as with OAuth. | ||
| 105 | |||
| 106 | Supports an optional ``fields`` parameter of key/value strings AND | ||
| 107 | key/filetuple. A filetuple is a (filename, data, MIME type) tuple where | ||
| 108 | the MIME type is optional. For example:: | ||
| 109 | |||
| 110 | fields = { | ||
| 111 | 'foo': 'bar', | ||
| 112 | 'fakefile': ('foofile.txt', 'contents of foofile'), | ||
| 113 | 'realfile': ('barfile.txt', open('realfile').read()), | ||
| 114 | 'typedfile': ('bazfile.bin', open('bazfile').read(), | ||
| 115 | 'image/jpeg'), | ||
| 116 | 'nonamefile': 'contents of nonamefile field', | ||
| 117 | } | ||
| 118 | |||
| 119 | When uploading a file, providing a filename (the first parameter of the | ||
| 120 | tuple) is optional but recommended to best mimick behavior of browsers. | ||
| 121 | |||
| 122 | Note that if ``headers`` are supplied, the 'Content-Type' header will | ||
| 123 | be overwritten because it depends on the dynamic random boundary string | ||
| 124 | which is used to compose the body of the request. The random boundary | ||
| 125 | string can be explicitly set with the ``multipart_boundary`` parameter. | ||
| 126 | """ | ||
| 127 | if headers is None: | ||
| 128 | headers = self.headers | ||
| 129 | |||
| 130 | extra_kw = {'headers': {}} | ||
| 131 | |||
| 132 | if fields: | ||
| 133 | if 'body' in urlopen_kw: | ||
| 134 | raise TypeError( | ||
| 135 | "request got values for both 'fields' and 'body', can only specify one.") | ||
| 136 | |||
| 137 | if encode_multipart: | ||
| 138 | body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) | ||
| 139 | else: | ||
| 140 | body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' | ||
| 141 | |||
| 142 | extra_kw['body'] = body | ||
| 143 | extra_kw['headers'] = {'Content-Type': content_type} | ||
| 144 | |||
| 145 | extra_kw['headers'].update(headers) | ||
| 146 | extra_kw.update(urlopen_kw) | ||
| 147 | |||
| 148 | return self.urlopen(method, url, **extra_kw) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/response.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/response.py new file mode 100644 index 0000000..54799ba --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/response.py | |||
| @@ -0,0 +1,626 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from contextlib import contextmanager | ||
| 3 | import zlib | ||
| 4 | import io | ||
| 5 | import logging | ||
| 6 | from socket import timeout as SocketTimeout | ||
| 7 | from socket import error as SocketError | ||
| 8 | |||
| 9 | from ._collections import HTTPHeaderDict | ||
| 10 | from .exceptions import ( | ||
| 11 | BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, | ||
| 12 | ResponseNotChunked, IncompleteRead, InvalidHeader | ||
| 13 | ) | ||
| 14 | from .packages.six import string_types as basestring, binary_type, PY3 | ||
| 15 | from .packages.six.moves import http_client as httplib | ||
| 16 | from .connection import HTTPException, BaseSSLError | ||
| 17 | from .util.response import is_fp_closed, is_response_to_head | ||
| 18 | |||
| 19 | log = logging.getLogger(__name__) | ||
| 20 | |||
| 21 | |||
| 22 | class DeflateDecoder(object): | ||
| 23 | |||
| 24 | def __init__(self): | ||
| 25 | self._first_try = True | ||
| 26 | self._data = binary_type() | ||
| 27 | self._obj = zlib.decompressobj() | ||
| 28 | |||
| 29 | def __getattr__(self, name): | ||
| 30 | return getattr(self._obj, name) | ||
| 31 | |||
| 32 | def decompress(self, data): | ||
| 33 | if not data: | ||
| 34 | return data | ||
| 35 | |||
| 36 | if not self._first_try: | ||
| 37 | return self._obj.decompress(data) | ||
| 38 | |||
| 39 | self._data += data | ||
| 40 | try: | ||
| 41 | decompressed = self._obj.decompress(data) | ||
| 42 | if decompressed: | ||
| 43 | self._first_try = False | ||
| 44 | self._data = None | ||
| 45 | return decompressed | ||
| 46 | except zlib.error: | ||
| 47 | self._first_try = False | ||
| 48 | self._obj = zlib.decompressobj(-zlib.MAX_WBITS) | ||
| 49 | try: | ||
| 50 | return self.decompress(self._data) | ||
| 51 | finally: | ||
| 52 | self._data = None | ||
| 53 | |||
| 54 | |||
| 55 | class GzipDecoder(object): | ||
| 56 | |||
| 57 | def __init__(self): | ||
| 58 | self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) | ||
| 59 | |||
| 60 | def __getattr__(self, name): | ||
| 61 | return getattr(self._obj, name) | ||
| 62 | |||
| 63 | def decompress(self, data): | ||
| 64 | if not data: | ||
| 65 | return data | ||
| 66 | return self._obj.decompress(data) | ||
| 67 | |||
| 68 | |||
| 69 | def _get_decoder(mode): | ||
| 70 | if mode == 'gzip': | ||
| 71 | return GzipDecoder() | ||
| 72 | |||
| 73 | return DeflateDecoder() | ||
| 74 | |||
| 75 | |||
| 76 | class HTTPResponse(io.IOBase): | ||
| 77 | """ | ||
| 78 | HTTP Response container. | ||
| 79 | |||
| 80 | Backwards-compatible to httplib's HTTPResponse but the response ``body`` is | ||
| 81 | loaded and decoded on-demand when the ``data`` property is accessed. This | ||
| 82 | class is also compatible with the Python standard library's :mod:`io` | ||
| 83 | module, and can hence be treated as a readable object in the context of that | ||
| 84 | framework. | ||
| 85 | |||
| 86 | Extra parameters for behaviour not present in httplib.HTTPResponse: | ||
| 87 | |||
| 88 | :param preload_content: | ||
| 89 | If True, the response's body will be preloaded during construction. | ||
| 90 | |||
| 91 | :param decode_content: | ||
| 92 | If True, attempts to decode specific content-encoding's based on headers | ||
| 93 | (like 'gzip' and 'deflate') will be skipped and raw data will be used | ||
| 94 | instead. | ||
| 95 | |||
| 96 | :param original_response: | ||
| 97 | When this HTTPResponse wrapper is generated from an httplib.HTTPResponse | ||
| 98 | object, it's convenient to include the original for debug purposes. It's | ||
| 99 | otherwise unused. | ||
| 100 | |||
| 101 | :param retries: | ||
| 102 | The retries contains the last :class:`~urllib3.util.retry.Retry` that | ||
| 103 | was used during the request. | ||
| 104 | |||
| 105 | :param enforce_content_length: | ||
| 106 | Enforce content length checking. Body returned by server must match | ||
| 107 | value of Content-Length header, if present. Otherwise, raise error. | ||
| 108 | """ | ||
| 109 | |||
| 110 | CONTENT_DECODERS = ['gzip', 'deflate'] | ||
| 111 | REDIRECT_STATUSES = [301, 302, 303, 307, 308] | ||
| 112 | |||
| 113 | def __init__(self, body='', headers=None, status=0, version=0, reason=None, | ||
| 114 | strict=0, preload_content=True, decode_content=True, | ||
| 115 | original_response=None, pool=None, connection=None, | ||
| 116 | retries=None, enforce_content_length=False, request_method=None): | ||
| 117 | |||
| 118 | if isinstance(headers, HTTPHeaderDict): | ||
| 119 | self.headers = headers | ||
| 120 | else: | ||
| 121 | self.headers = HTTPHeaderDict(headers) | ||
| 122 | self.status = status | ||
| 123 | self.version = version | ||
| 124 | self.reason = reason | ||
| 125 | self.strict = strict | ||
| 126 | self.decode_content = decode_content | ||
| 127 | self.retries = retries | ||
| 128 | self.enforce_content_length = enforce_content_length | ||
| 129 | |||
| 130 | self._decoder = None | ||
| 131 | self._body = None | ||
| 132 | self._fp = None | ||
| 133 | self._original_response = original_response | ||
| 134 | self._fp_bytes_read = 0 | ||
| 135 | |||
| 136 | if body and isinstance(body, (basestring, binary_type)): | ||
| 137 | self._body = body | ||
| 138 | |||
| 139 | self._pool = pool | ||
| 140 | self._connection = connection | ||
| 141 | |||
| 142 | if hasattr(body, 'read'): | ||
| 143 | self._fp = body | ||
| 144 | |||
| 145 | # Are we using the chunked-style of transfer encoding? | ||
| 146 | self.chunked = False | ||
| 147 | self.chunk_left = None | ||
| 148 | tr_enc = self.headers.get('transfer-encoding', '').lower() | ||
| 149 | # Don't incur the penalty of creating a list and then discarding it | ||
| 150 | encodings = (enc.strip() for enc in tr_enc.split(",")) | ||
| 151 | if "chunked" in encodings: | ||
| 152 | self.chunked = True | ||
| 153 | |||
| 154 | # Determine length of response | ||
| 155 | self.length_remaining = self._init_length(request_method) | ||
| 156 | |||
| 157 | # If requested, preload the body. | ||
| 158 | if preload_content and not self._body: | ||
| 159 | self._body = self.read(decode_content=decode_content) | ||
| 160 | |||
| 161 | def get_redirect_location(self): | ||
| 162 | """ | ||
| 163 | Should we redirect and where to? | ||
| 164 | |||
| 165 | :returns: Truthy redirect location string if we got a redirect status | ||
| 166 | code and valid location. ``None`` if redirect status and no | ||
| 167 | location. ``False`` if not a redirect status code. | ||
| 168 | """ | ||
| 169 | if self.status in self.REDIRECT_STATUSES: | ||
| 170 | return self.headers.get('location') | ||
| 171 | |||
| 172 | return False | ||
| 173 | |||
| 174 | def release_conn(self): | ||
| 175 | if not self._pool or not self._connection: | ||
| 176 | return | ||
| 177 | |||
| 178 | self._pool._put_conn(self._connection) | ||
| 179 | self._connection = None | ||
| 180 | |||
| 181 | @property | ||
| 182 | def data(self): | ||
| 183 | # For backwords-compat with earlier urllib3 0.4 and earlier. | ||
| 184 | if self._body: | ||
| 185 | return self._body | ||
| 186 | |||
| 187 | if self._fp: | ||
| 188 | return self.read(cache_content=True) | ||
| 189 | |||
| 190 | @property | ||
| 191 | def connection(self): | ||
| 192 | return self._connection | ||
| 193 | |||
| 194 | def tell(self): | ||
| 195 | """ | ||
| 196 | Obtain the number of bytes pulled over the wire so far. May differ from | ||
| 197 | the amount of content returned by :meth:``HTTPResponse.read`` if bytes | ||
| 198 | are encoded on the wire (e.g, compressed). | ||
| 199 | """ | ||
| 200 | return self._fp_bytes_read | ||
| 201 | |||
| 202 | def _init_length(self, request_method): | ||
| 203 | """ | ||
| 204 | Set initial length value for Response content if available. | ||
| 205 | """ | ||
| 206 | length = self.headers.get('content-length') | ||
| 207 | |||
| 208 | if length is not None and self.chunked: | ||
| 209 | # This Response will fail with an IncompleteRead if it can't be | ||
| 210 | # received as chunked. This method falls back to attempt reading | ||
| 211 | # the response before raising an exception. | ||
| 212 | log.warning("Received response with both Content-Length and " | ||
| 213 | "Transfer-Encoding set. This is expressly forbidden " | ||
| 214 | "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " | ||
| 215 | "attempting to process response as Transfer-Encoding: " | ||
| 216 | "chunked.") | ||
| 217 | return None | ||
| 218 | |||
| 219 | elif length is not None: | ||
| 220 | try: | ||
| 221 | # RFC 7230 section 3.3.2 specifies multiple content lengths can | ||
| 222 | # be sent in a single Content-Length header | ||
| 223 | # (e.g. Content-Length: 42, 42). This line ensures the values | ||
| 224 | # are all valid ints and that as long as the `set` length is 1, | ||
| 225 | # all values are the same. Otherwise, the header is invalid. | ||
| 226 | lengths = set([int(val) for val in length.split(',')]) | ||
| 227 | if len(lengths) > 1: | ||
| 228 | raise InvalidHeader("Content-Length contained multiple " | ||
| 229 | "unmatching values (%s)" % length) | ||
| 230 | length = lengths.pop() | ||
| 231 | except ValueError: | ||
| 232 | length = None | ||
| 233 | else: | ||
| 234 | if length < 0: | ||
| 235 | length = None | ||
| 236 | |||
| 237 | # Convert status to int for comparison | ||
| 238 | # In some cases, httplib returns a status of "_UNKNOWN" | ||
| 239 | try: | ||
| 240 | status = int(self.status) | ||
| 241 | except ValueError: | ||
| 242 | status = 0 | ||
| 243 | |||
| 244 | # Check for responses that shouldn't include a body | ||
| 245 | if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': | ||
| 246 | length = 0 | ||
| 247 | |||
| 248 | return length | ||
| 249 | |||
| 250 | def _init_decoder(self): | ||
| 251 | """ | ||
| 252 | Set-up the _decoder attribute if necessary. | ||
| 253 | """ | ||
| 254 | # Note: content-encoding value should be case-insensitive, per RFC 7230 | ||
| 255 | # Section 3.2 | ||
| 256 | content_encoding = self.headers.get('content-encoding', '').lower() | ||
| 257 | if self._decoder is None and content_encoding in self.CONTENT_DECODERS: | ||
| 258 | self._decoder = _get_decoder(content_encoding) | ||
| 259 | |||
| 260 | def _decode(self, data, decode_content, flush_decoder): | ||
| 261 | """ | ||
| 262 | Decode the data passed in and potentially flush the decoder. | ||
| 263 | """ | ||
| 264 | try: | ||
| 265 | if decode_content and self._decoder: | ||
| 266 | data = self._decoder.decompress(data) | ||
| 267 | except (IOError, zlib.error) as e: | ||
| 268 | content_encoding = self.headers.get('content-encoding', '').lower() | ||
| 269 | raise DecodeError( | ||
| 270 | "Received response with content-encoding: %s, but " | ||
| 271 | "failed to decode it." % content_encoding, e) | ||
| 272 | |||
| 273 | if flush_decoder and decode_content: | ||
| 274 | data += self._flush_decoder() | ||
| 275 | |||
| 276 | return data | ||
| 277 | |||
| 278 | def _flush_decoder(self): | ||
| 279 | """ | ||
| 280 | Flushes the decoder. Should only be called if the decoder is actually | ||
| 281 | being used. | ||
| 282 | """ | ||
| 283 | if self._decoder: | ||
| 284 | buf = self._decoder.decompress(b'') | ||
| 285 | return buf + self._decoder.flush() | ||
| 286 | |||
| 287 | return b'' | ||
| 288 | |||
| 289 | @contextmanager | ||
| 290 | def _error_catcher(self): | ||
| 291 | """ | ||
| 292 | Catch low-level python exceptions, instead re-raising urllib3 | ||
| 293 | variants, so that low-level exceptions are not leaked in the | ||
| 294 | high-level api. | ||
| 295 | |||
| 296 | On exit, release the connection back to the pool. | ||
| 297 | """ | ||
| 298 | clean_exit = False | ||
| 299 | |||
| 300 | try: | ||
| 301 | try: | ||
| 302 | yield | ||
| 303 | |||
| 304 | except SocketTimeout: | ||
| 305 | # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but | ||
| 306 | # there is yet no clean way to get at it from this context. | ||
| 307 | raise ReadTimeoutError(self._pool, None, 'Read timed out.') | ||
| 308 | |||
| 309 | except BaseSSLError as e: | ||
| 310 | # FIXME: Is there a better way to differentiate between SSLErrors? | ||
| 311 | if 'read operation timed out' not in str(e): # Defensive: | ||
| 312 | # This shouldn't happen but just in case we're missing an edge | ||
| 313 | # case, let's avoid swallowing SSL errors. | ||
| 314 | raise | ||
| 315 | |||
| 316 | raise ReadTimeoutError(self._pool, None, 'Read timed out.') | ||
| 317 | |||
| 318 | except (HTTPException, SocketError) as e: | ||
| 319 | # This includes IncompleteRead. | ||
| 320 | raise ProtocolError('Connection broken: %r' % e, e) | ||
| 321 | |||
| 322 | # If no exception is thrown, we should avoid cleaning up | ||
| 323 | # unnecessarily. | ||
| 324 | clean_exit = True | ||
| 325 | finally: | ||
| 326 | # If we didn't terminate cleanly, we need to throw away our | ||
| 327 | # connection. | ||
| 328 | if not clean_exit: | ||
| 329 | # The response may not be closed but we're not going to use it | ||
| 330 | # anymore so close it now to ensure that the connection is | ||
| 331 | # released back to the pool. | ||
| 332 | if self._original_response: | ||
| 333 | self._original_response.close() | ||
| 334 | |||
| 335 | # Closing the response may not actually be sufficient to close | ||
| 336 | # everything, so if we have a hold of the connection close that | ||
| 337 | # too. | ||
| 338 | if self._connection: | ||
| 339 | self._connection.close() | ||
| 340 | |||
| 341 | # If we hold the original response but it's closed now, we should | ||
| 342 | # return the connection back to the pool. | ||
| 343 | if self._original_response and self._original_response.isclosed(): | ||
| 344 | self.release_conn() | ||
| 345 | |||
| 346 | def read(self, amt=None, decode_content=None, cache_content=False): | ||
| 347 | """ | ||
| 348 | Similar to :meth:`httplib.HTTPResponse.read`, but with two additional | ||
| 349 | parameters: ``decode_content`` and ``cache_content``. | ||
| 350 | |||
| 351 | :param amt: | ||
| 352 | How much of the content to read. If specified, caching is skipped | ||
| 353 | because it doesn't make sense to cache partial content as the full | ||
| 354 | response. | ||
| 355 | |||
| 356 | :param decode_content: | ||
| 357 | If True, will attempt to decode the body based on the | ||
| 358 | 'content-encoding' header. | ||
| 359 | |||
| 360 | :param cache_content: | ||
| 361 | If True, will save the returned data such that the same result is | ||
| 362 | returned despite of the state of the underlying file object. This | ||
| 363 | is useful if you want the ``.data`` property to continue working | ||
| 364 | after having ``.read()`` the file object. (Overridden if ``amt`` is | ||
| 365 | set.) | ||
| 366 | """ | ||
| 367 | self._init_decoder() | ||
| 368 | if decode_content is None: | ||
| 369 | decode_content = self.decode_content | ||
| 370 | |||
| 371 | if self._fp is None: | ||
| 372 | return | ||
| 373 | |||
| 374 | flush_decoder = False | ||
| 375 | data = None | ||
| 376 | |||
| 377 | with self._error_catcher(): | ||
| 378 | if amt is None: | ||
| 379 | # cStringIO doesn't like amt=None | ||
| 380 | data = self._fp.read() | ||
| 381 | flush_decoder = True | ||
| 382 | else: | ||
| 383 | cache_content = False | ||
| 384 | data = self._fp.read(amt) | ||
| 385 | if amt != 0 and not data: # Platform-specific: Buggy versions of Python. | ||
| 386 | # Close the connection when no data is returned | ||
| 387 | # | ||
| 388 | # This is redundant to what httplib/http.client _should_ | ||
| 389 | # already do. However, versions of python released before | ||
| 390 | # December 15, 2012 (http://bugs.python.org/issue16298) do | ||
| 391 | # not properly close the connection in all cases. There is | ||
| 392 | # no harm in redundantly calling close. | ||
| 393 | self._fp.close() | ||
| 394 | flush_decoder = True | ||
| 395 | if self.enforce_content_length and self.length_remaining not in (0, None): | ||
| 396 | # This is an edge case that httplib failed to cover due | ||
| 397 | # to concerns of backward compatibility. We're | ||
| 398 | # addressing it here to make sure IncompleteRead is | ||
| 399 | # raised during streaming, so all calls with incorrect | ||
| 400 | # Content-Length are caught. | ||
| 401 | raise IncompleteRead(self._fp_bytes_read, self.length_remaining) | ||
| 402 | |||
| 403 | if data: | ||
| 404 | self._fp_bytes_read += len(data) | ||
| 405 | if self.length_remaining is not None: | ||
| 406 | self.length_remaining -= len(data) | ||
| 407 | |||
| 408 | data = self._decode(data, decode_content, flush_decoder) | ||
| 409 | |||
| 410 | if cache_content: | ||
| 411 | self._body = data | ||
| 412 | |||
| 413 | return data | ||
| 414 | |||
| 415 | def stream(self, amt=2**16, decode_content=None): | ||
| 416 | """ | ||
| 417 | A generator wrapper for the read() method. A call will block until | ||
| 418 | ``amt`` bytes have been read from the connection or until the | ||
| 419 | connection is closed. | ||
| 420 | |||
| 421 | :param amt: | ||
| 422 | How much of the content to read. The generator will return up to | ||
| 423 | much data per iteration, but may return less. This is particularly | ||
| 424 | likely when using compressed data. However, the empty string will | ||
| 425 | never be returned. | ||
| 426 | |||
| 427 | :param decode_content: | ||
| 428 | If True, will attempt to decode the body based on the | ||
| 429 | 'content-encoding' header. | ||
| 430 | """ | ||
| 431 | if self.chunked and self.supports_chunked_reads(): | ||
| 432 | for line in self.read_chunked(amt, decode_content=decode_content): | ||
| 433 | yield line | ||
| 434 | else: | ||
| 435 | while not is_fp_closed(self._fp): | ||
| 436 | data = self.read(amt=amt, decode_content=decode_content) | ||
| 437 | |||
| 438 | if data: | ||
| 439 | yield data | ||
| 440 | |||
| 441 | @classmethod | ||
| 442 | def from_httplib(ResponseCls, r, **response_kw): | ||
| 443 | """ | ||
| 444 | Given an :class:`httplib.HTTPResponse` instance ``r``, return a | ||
| 445 | corresponding :class:`urllib3.response.HTTPResponse` object. | ||
| 446 | |||
| 447 | Remaining parameters are passed to the HTTPResponse constructor, along | ||
| 448 | with ``original_response=r``. | ||
| 449 | """ | ||
| 450 | headers = r.msg | ||
| 451 | |||
| 452 | if not isinstance(headers, HTTPHeaderDict): | ||
| 453 | if PY3: # Python 3 | ||
| 454 | headers = HTTPHeaderDict(headers.items()) | ||
| 455 | else: # Python 2 | ||
| 456 | headers = HTTPHeaderDict.from_httplib(headers) | ||
| 457 | |||
| 458 | # HTTPResponse objects in Python 3 don't have a .strict attribute | ||
| 459 | strict = getattr(r, 'strict', 0) | ||
| 460 | resp = ResponseCls(body=r, | ||
| 461 | headers=headers, | ||
| 462 | status=r.status, | ||
| 463 | version=r.version, | ||
| 464 | reason=r.reason, | ||
| 465 | strict=strict, | ||
| 466 | original_response=r, | ||
| 467 | **response_kw) | ||
| 468 | return resp | ||
| 469 | |||
| 470 | # Backwards-compatibility methods for httplib.HTTPResponse | ||
| 471 | def getheaders(self): | ||
| 472 | return self.headers | ||
| 473 | |||
| 474 | def getheader(self, name, default=None): | ||
| 475 | return self.headers.get(name, default) | ||
| 476 | |||
| 477 | # Backwards compatibility for http.cookiejar | ||
| 478 | def info(self): | ||
| 479 | return self.headers | ||
| 480 | |||
| 481 | # Overrides from io.IOBase | ||
| 482 | def close(self): | ||
| 483 | if not self.closed: | ||
| 484 | self._fp.close() | ||
| 485 | |||
| 486 | if self._connection: | ||
| 487 | self._connection.close() | ||
| 488 | |||
| 489 | @property | ||
| 490 | def closed(self): | ||
| 491 | if self._fp is None: | ||
| 492 | return True | ||
| 493 | elif hasattr(self._fp, 'isclosed'): | ||
| 494 | return self._fp.isclosed() | ||
| 495 | elif hasattr(self._fp, 'closed'): | ||
| 496 | return self._fp.closed | ||
| 497 | else: | ||
| 498 | return True | ||
| 499 | |||
| 500 | def fileno(self): | ||
| 501 | if self._fp is None: | ||
| 502 | raise IOError("HTTPResponse has no file to get a fileno from") | ||
| 503 | elif hasattr(self._fp, "fileno"): | ||
| 504 | return self._fp.fileno() | ||
| 505 | else: | ||
| 506 | raise IOError("The file-like object this HTTPResponse is wrapped " | ||
| 507 | "around has no file descriptor") | ||
| 508 | |||
| 509 | def flush(self): | ||
| 510 | if self._fp is not None and hasattr(self._fp, 'flush'): | ||
| 511 | return self._fp.flush() | ||
| 512 | |||
| 513 | def readable(self): | ||
| 514 | # This method is required for `io` module compatibility. | ||
| 515 | return True | ||
| 516 | |||
| 517 | def readinto(self, b): | ||
| 518 | # This method is required for `io` module compatibility. | ||
| 519 | temp = self.read(len(b)) | ||
| 520 | if len(temp) == 0: | ||
| 521 | return 0 | ||
| 522 | else: | ||
| 523 | b[:len(temp)] = temp | ||
| 524 | return len(temp) | ||
| 525 | |||
| 526 | def supports_chunked_reads(self): | ||
| 527 | """ | ||
| 528 | Checks if the underlying file-like object looks like a | ||
| 529 | httplib.HTTPResponse object. We do this by testing for the fp | ||
| 530 | attribute. If it is present we assume it returns raw chunks as | ||
| 531 | processed by read_chunked(). | ||
| 532 | """ | ||
| 533 | return hasattr(self._fp, 'fp') | ||
| 534 | |||
| 535 | def _update_chunk_length(self): | ||
| 536 | # First, we'll figure out length of a chunk and then | ||
| 537 | # we'll try to read it from socket. | ||
| 538 | if self.chunk_left is not None: | ||
| 539 | return | ||
| 540 | line = self._fp.fp.readline() | ||
| 541 | line = line.split(b';', 1)[0] | ||
| 542 | try: | ||
| 543 | self.chunk_left = int(line, 16) | ||
| 544 | except ValueError: | ||
| 545 | # Invalid chunked protocol response, abort. | ||
| 546 | self.close() | ||
| 547 | raise httplib.IncompleteRead(line) | ||
| 548 | |||
| 549 | def _handle_chunk(self, amt): | ||
| 550 | returned_chunk = None | ||
| 551 | if amt is None: | ||
| 552 | chunk = self._fp._safe_read(self.chunk_left) | ||
| 553 | returned_chunk = chunk | ||
| 554 | self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | ||
| 555 | self.chunk_left = None | ||
| 556 | elif amt < self.chunk_left: | ||
| 557 | value = self._fp._safe_read(amt) | ||
| 558 | self.chunk_left = self.chunk_left - amt | ||
| 559 | returned_chunk = value | ||
| 560 | elif amt == self.chunk_left: | ||
| 561 | value = self._fp._safe_read(amt) | ||
| 562 | self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | ||
| 563 | self.chunk_left = None | ||
| 564 | returned_chunk = value | ||
| 565 | else: # amt > self.chunk_left | ||
| 566 | returned_chunk = self._fp._safe_read(self.chunk_left) | ||
| 567 | self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. | ||
| 568 | self.chunk_left = None | ||
| 569 | return returned_chunk | ||
| 570 | |||
| 571 | def read_chunked(self, amt=None, decode_content=None): | ||
| 572 | """ | ||
| 573 | Similar to :meth:`HTTPResponse.read`, but with an additional | ||
| 574 | parameter: ``decode_content``. | ||
| 575 | |||
| 576 | :param decode_content: | ||
| 577 | If True, will attempt to decode the body based on the | ||
| 578 | 'content-encoding' header. | ||
| 579 | """ | ||
| 580 | self._init_decoder() | ||
| 581 | # FIXME: Rewrite this method and make it a class with a better structured logic. | ||
| 582 | if not self.chunked: | ||
| 583 | raise ResponseNotChunked( | ||
| 584 | "Response is not chunked. " | ||
| 585 | "Header 'transfer-encoding: chunked' is missing.") | ||
| 586 | if not self.supports_chunked_reads(): | ||
| 587 | raise BodyNotHttplibCompatible( | ||
| 588 | "Body should be httplib.HTTPResponse like. " | ||
| 589 | "It should have have an fp attribute which returns raw chunks.") | ||
| 590 | |||
| 591 | # Don't bother reading the body of a HEAD request. | ||
| 592 | if self._original_response and is_response_to_head(self._original_response): | ||
| 593 | self._original_response.close() | ||
| 594 | return | ||
| 595 | |||
| 596 | with self._error_catcher(): | ||
| 597 | while True: | ||
| 598 | self._update_chunk_length() | ||
| 599 | if self.chunk_left == 0: | ||
| 600 | break | ||
| 601 | chunk = self._handle_chunk(amt) | ||
| 602 | decoded = self._decode(chunk, decode_content=decode_content, | ||
| 603 | flush_decoder=False) | ||
| 604 | if decoded: | ||
| 605 | yield decoded | ||
| 606 | |||
| 607 | if decode_content: | ||
| 608 | # On CPython and PyPy, we should never need to flush the | ||
| 609 | # decoder. However, on Jython we *might* need to, so | ||
| 610 | # lets defensively do it anyway. | ||
| 611 | decoded = self._flush_decoder() | ||
| 612 | if decoded: # Platform-specific: Jython. | ||
| 613 | yield decoded | ||
| 614 | |||
| 615 | # Chunk content ends with \r\n: discard it. | ||
| 616 | while True: | ||
| 617 | line = self._fp.fp.readline() | ||
| 618 | if not line: | ||
| 619 | # Some sites may not end with '\r\n'. | ||
| 620 | break | ||
| 621 | if line == b'\r\n': | ||
| 622 | break | ||
| 623 | |||
| 624 | # We read everything; close the "file". | ||
| 625 | if self._original_response: | ||
| 626 | self._original_response.close() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/__init__.py new file mode 100644 index 0000000..a84b005 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/__init__.py | |||
| @@ -0,0 +1,54 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | # For backwards compatibility, provide imports that used to be here. | ||
| 3 | from .connection import is_connection_dropped | ||
| 4 | from .request import make_headers | ||
| 5 | from .response import is_fp_closed | ||
| 6 | from .ssl_ import ( | ||
| 7 | SSLContext, | ||
| 8 | HAS_SNI, | ||
| 9 | IS_PYOPENSSL, | ||
| 10 | IS_SECURETRANSPORT, | ||
| 11 | assert_fingerprint, | ||
| 12 | resolve_cert_reqs, | ||
| 13 | resolve_ssl_version, | ||
| 14 | ssl_wrap_socket, | ||
| 15 | ) | ||
| 16 | from .timeout import ( | ||
| 17 | current_time, | ||
| 18 | Timeout, | ||
| 19 | ) | ||
| 20 | |||
| 21 | from .retry import Retry | ||
| 22 | from .url import ( | ||
| 23 | get_host, | ||
| 24 | parse_url, | ||
| 25 | split_first, | ||
| 26 | Url, | ||
| 27 | ) | ||
| 28 | from .wait import ( | ||
| 29 | wait_for_read, | ||
| 30 | wait_for_write | ||
| 31 | ) | ||
| 32 | |||
| 33 | __all__ = ( | ||
| 34 | 'HAS_SNI', | ||
| 35 | 'IS_PYOPENSSL', | ||
| 36 | 'IS_SECURETRANSPORT', | ||
| 37 | 'SSLContext', | ||
| 38 | 'Retry', | ||
| 39 | 'Timeout', | ||
| 40 | 'Url', | ||
| 41 | 'assert_fingerprint', | ||
| 42 | 'current_time', | ||
| 43 | 'is_connection_dropped', | ||
| 44 | 'is_fp_closed', | ||
| 45 | 'get_host', | ||
| 46 | 'parse_url', | ||
| 47 | 'make_headers', | ||
| 48 | 'resolve_cert_reqs', | ||
| 49 | 'resolve_ssl_version', | ||
| 50 | 'split_first', | ||
| 51 | 'ssl_wrap_socket', | ||
| 52 | 'wait_for_read', | ||
| 53 | 'wait_for_write' | ||
| 54 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/connection.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/connection.py new file mode 100644 index 0000000..31ecd83 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/connection.py | |||
| @@ -0,0 +1,130 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import socket | ||
| 3 | from .wait import wait_for_read | ||
| 4 | from .selectors import HAS_SELECT, SelectorError | ||
| 5 | |||
| 6 | |||
| 7 | def is_connection_dropped(conn): # Platform-specific | ||
| 8 | """ | ||
| 9 | Returns True if the connection is dropped and should be closed. | ||
| 10 | |||
| 11 | :param conn: | ||
| 12 | :class:`httplib.HTTPConnection` object. | ||
| 13 | |||
| 14 | Note: For platforms like AppEngine, this will always return ``False`` to | ||
| 15 | let the platform handle connection recycling transparently for us. | ||
| 16 | """ | ||
| 17 | sock = getattr(conn, 'sock', False) | ||
| 18 | if sock is False: # Platform-specific: AppEngine | ||
| 19 | return False | ||
| 20 | if sock is None: # Connection already closed (such as by httplib). | ||
| 21 | return True | ||
| 22 | |||
| 23 | if not HAS_SELECT: | ||
| 24 | return False | ||
| 25 | |||
| 26 | try: | ||
| 27 | return bool(wait_for_read(sock, timeout=0.0)) | ||
| 28 | except SelectorError: | ||
| 29 | return True | ||
| 30 | |||
| 31 | |||
| 32 | # This function is copied from socket.py in the Python 2.7 standard | ||
| 33 | # library test suite. Added to its signature is only `socket_options`. | ||
| 34 | # One additional modification is that we avoid binding to IPv6 servers | ||
| 35 | # discovered in DNS if the system doesn't have IPv6 functionality. | ||
| 36 | def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, | ||
| 37 | source_address=None, socket_options=None): | ||
| 38 | """Connect to *address* and return the socket object. | ||
| 39 | |||
| 40 | Convenience function. Connect to *address* (a 2-tuple ``(host, | ||
| 41 | port)``) and return the socket object. Passing the optional | ||
| 42 | *timeout* parameter will set the timeout on the socket instance | ||
| 43 | before attempting to connect. If no *timeout* is supplied, the | ||
| 44 | global default timeout setting returned by :func:`getdefaulttimeout` | ||
| 45 | is used. If *source_address* is set it must be a tuple of (host, port) | ||
| 46 | for the socket to bind as a source address before making the connection. | ||
| 47 | An host of '' or port 0 tells the OS to use the default. | ||
| 48 | """ | ||
| 49 | |||
| 50 | host, port = address | ||
| 51 | if host.startswith('['): | ||
| 52 | host = host.strip('[]') | ||
| 53 | err = None | ||
| 54 | |||
| 55 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets | ||
| 56 | # us select whether to work with IPv4 DNS records, IPv6 records, or both. | ||
| 57 | # The original create_connection function always returns all records. | ||
| 58 | family = allowed_gai_family() | ||
| 59 | |||
| 60 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): | ||
| 61 | af, socktype, proto, canonname, sa = res | ||
| 62 | sock = None | ||
| 63 | try: | ||
| 64 | sock = socket.socket(af, socktype, proto) | ||
| 65 | |||
| 66 | # If provided, set socket level options before connecting. | ||
| 67 | _set_socket_options(sock, socket_options) | ||
| 68 | |||
| 69 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: | ||
| 70 | sock.settimeout(timeout) | ||
| 71 | if source_address: | ||
| 72 | sock.bind(source_address) | ||
| 73 | sock.connect(sa) | ||
| 74 | return sock | ||
| 75 | |||
| 76 | except socket.error as e: | ||
| 77 | err = e | ||
| 78 | if sock is not None: | ||
| 79 | sock.close() | ||
| 80 | sock = None | ||
| 81 | |||
| 82 | if err is not None: | ||
| 83 | raise err | ||
| 84 | |||
| 85 | raise socket.error("getaddrinfo returns an empty list") | ||
| 86 | |||
| 87 | |||
| 88 | def _set_socket_options(sock, options): | ||
| 89 | if options is None: | ||
| 90 | return | ||
| 91 | |||
| 92 | for opt in options: | ||
| 93 | sock.setsockopt(*opt) | ||
| 94 | |||
| 95 | |||
| 96 | def allowed_gai_family(): | ||
| 97 | """This function is designed to work in the context of | ||
| 98 | getaddrinfo, where family=socket.AF_UNSPEC is the default and | ||
| 99 | will perform a DNS search for both IPv6 and IPv4 records.""" | ||
| 100 | |||
| 101 | family = socket.AF_INET | ||
| 102 | if HAS_IPV6: | ||
| 103 | family = socket.AF_UNSPEC | ||
| 104 | return family | ||
| 105 | |||
| 106 | |||
| 107 | def _has_ipv6(host): | ||
| 108 | """ Returns True if the system can bind an IPv6 address. """ | ||
| 109 | sock = None | ||
| 110 | has_ipv6 = False | ||
| 111 | |||
| 112 | if socket.has_ipv6: | ||
| 113 | # has_ipv6 returns true if cPython was compiled with IPv6 support. | ||
| 114 | # It does not tell us if the system has IPv6 support enabled. To | ||
| 115 | # determine that we must bind to an IPv6 address. | ||
| 116 | # https://github.com/shazow/urllib3/pull/611 | ||
| 117 | # https://bugs.python.org/issue658327 | ||
| 118 | try: | ||
| 119 | sock = socket.socket(socket.AF_INET6) | ||
| 120 | sock.bind((host, 0)) | ||
| 121 | has_ipv6 = True | ||
| 122 | except Exception: | ||
| 123 | pass | ||
| 124 | |||
| 125 | if sock: | ||
| 126 | sock.close() | ||
| 127 | return has_ipv6 | ||
| 128 | |||
| 129 | |||
| 130 | HAS_IPV6 = _has_ipv6('::1') | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/request.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/request.py new file mode 100644 index 0000000..22882b8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/request.py | |||
| @@ -0,0 +1,118 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from base64 import b64encode | ||
| 3 | |||
| 4 | from ..packages.six import b, integer_types | ||
| 5 | from ..exceptions import UnrewindableBodyError | ||
| 6 | |||
| 7 | ACCEPT_ENCODING = 'gzip,deflate' | ||
| 8 | _FAILEDTELL = object() | ||
| 9 | |||
| 10 | |||
| 11 | def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, | ||
| 12 | basic_auth=None, proxy_basic_auth=None, disable_cache=None): | ||
| 13 | """ | ||
| 14 | Shortcuts for generating request headers. | ||
| 15 | |||
| 16 | :param keep_alive: | ||
| 17 | If ``True``, adds 'connection: keep-alive' header. | ||
| 18 | |||
| 19 | :param accept_encoding: | ||
| 20 | Can be a boolean, list, or string. | ||
| 21 | ``True`` translates to 'gzip,deflate'. | ||
| 22 | List will get joined by comma. | ||
| 23 | String will be used as provided. | ||
| 24 | |||
| 25 | :param user_agent: | ||
| 26 | String representing the user-agent you want, such as | ||
| 27 | "python-urllib3/0.6" | ||
| 28 | |||
| 29 | :param basic_auth: | ||
| 30 | Colon-separated username:password string for 'authorization: basic ...' | ||
| 31 | auth header. | ||
| 32 | |||
| 33 | :param proxy_basic_auth: | ||
| 34 | Colon-separated username:password string for 'proxy-authorization: basic ...' | ||
| 35 | auth header. | ||
| 36 | |||
| 37 | :param disable_cache: | ||
| 38 | If ``True``, adds 'cache-control: no-cache' header. | ||
| 39 | |||
| 40 | Example:: | ||
| 41 | |||
| 42 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0") | ||
| 43 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} | ||
| 44 | >>> make_headers(accept_encoding=True) | ||
| 45 | {'accept-encoding': 'gzip,deflate'} | ||
| 46 | """ | ||
| 47 | headers = {} | ||
| 48 | if accept_encoding: | ||
| 49 | if isinstance(accept_encoding, str): | ||
| 50 | pass | ||
| 51 | elif isinstance(accept_encoding, list): | ||
| 52 | accept_encoding = ','.join(accept_encoding) | ||
| 53 | else: | ||
| 54 | accept_encoding = ACCEPT_ENCODING | ||
| 55 | headers['accept-encoding'] = accept_encoding | ||
| 56 | |||
| 57 | if user_agent: | ||
| 58 | headers['user-agent'] = user_agent | ||
| 59 | |||
| 60 | if keep_alive: | ||
| 61 | headers['connection'] = 'keep-alive' | ||
| 62 | |||
| 63 | if basic_auth: | ||
| 64 | headers['authorization'] = 'Basic ' + \ | ||
| 65 | b64encode(b(basic_auth)).decode('utf-8') | ||
| 66 | |||
| 67 | if proxy_basic_auth: | ||
| 68 | headers['proxy-authorization'] = 'Basic ' + \ | ||
| 69 | b64encode(b(proxy_basic_auth)).decode('utf-8') | ||
| 70 | |||
| 71 | if disable_cache: | ||
| 72 | headers['cache-control'] = 'no-cache' | ||
| 73 | |||
| 74 | return headers | ||
| 75 | |||
| 76 | |||
| 77 | def set_file_position(body, pos): | ||
| 78 | """ | ||
| 79 | If a position is provided, move file to that point. | ||
| 80 | Otherwise, we'll attempt to record a position for future use. | ||
| 81 | """ | ||
| 82 | if pos is not None: | ||
| 83 | rewind_body(body, pos) | ||
| 84 | elif getattr(body, 'tell', None) is not None: | ||
| 85 | try: | ||
| 86 | pos = body.tell() | ||
| 87 | except (IOError, OSError): | ||
| 88 | # This differentiates from None, allowing us to catch | ||
| 89 | # a failed `tell()` later when trying to rewind the body. | ||
| 90 | pos = _FAILEDTELL | ||
| 91 | |||
| 92 | return pos | ||
| 93 | |||
| 94 | |||
| 95 | def rewind_body(body, body_pos): | ||
| 96 | """ | ||
| 97 | Attempt to rewind body to a certain position. | ||
| 98 | Primarily used for request redirects and retries. | ||
| 99 | |||
| 100 | :param body: | ||
| 101 | File-like object that supports seek. | ||
| 102 | |||
| 103 | :param int pos: | ||
| 104 | Position to seek to in file. | ||
| 105 | """ | ||
| 106 | body_seek = getattr(body, 'seek', None) | ||
| 107 | if body_seek is not None and isinstance(body_pos, integer_types): | ||
| 108 | try: | ||
| 109 | body_seek(body_pos) | ||
| 110 | except (IOError, OSError): | ||
| 111 | raise UnrewindableBodyError("An error occurred when rewinding request " | ||
| 112 | "body for redirect/retry.") | ||
| 113 | elif body_pos is _FAILEDTELL: | ||
| 114 | raise UnrewindableBodyError("Unable to record file position for rewinding " | ||
| 115 | "request body during a redirect/retry.") | ||
| 116 | else: | ||
| 117 | raise ValueError("body_pos must be of type integer, " | ||
| 118 | "instead it was %s." % type(body_pos)) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/response.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/response.py new file mode 100644 index 0000000..c2eb49c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/response.py | |||
| @@ -0,0 +1,81 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from ..packages.six.moves import http_client as httplib | ||
| 3 | |||
| 4 | from ..exceptions import HeaderParsingError | ||
| 5 | |||
| 6 | |||
| 7 | def is_fp_closed(obj): | ||
| 8 | """ | ||
| 9 | Checks whether a given file-like object is closed. | ||
| 10 | |||
| 11 | :param obj: | ||
| 12 | The file-like object to check. | ||
| 13 | """ | ||
| 14 | |||
| 15 | try: | ||
| 16 | # Check `isclosed()` first, in case Python3 doesn't set `closed`. | ||
| 17 | # GH Issue #928 | ||
| 18 | return obj.isclosed() | ||
| 19 | except AttributeError: | ||
| 20 | pass | ||
| 21 | |||
| 22 | try: | ||
| 23 | # Check via the official file-like-object way. | ||
| 24 | return obj.closed | ||
| 25 | except AttributeError: | ||
| 26 | pass | ||
| 27 | |||
| 28 | try: | ||
| 29 | # Check if the object is a container for another file-like object that | ||
| 30 | # gets released on exhaustion (e.g. HTTPResponse). | ||
| 31 | return obj.fp is None | ||
| 32 | except AttributeError: | ||
| 33 | pass | ||
| 34 | |||
| 35 | raise ValueError("Unable to determine whether fp is closed.") | ||
| 36 | |||
| 37 | |||
| 38 | def assert_header_parsing(headers): | ||
| 39 | """ | ||
| 40 | Asserts whether all headers have been successfully parsed. | ||
| 41 | Extracts encountered errors from the result of parsing headers. | ||
| 42 | |||
| 43 | Only works on Python 3. | ||
| 44 | |||
| 45 | :param headers: Headers to verify. | ||
| 46 | :type headers: `httplib.HTTPMessage`. | ||
| 47 | |||
| 48 | :raises urllib3.exceptions.HeaderParsingError: | ||
| 49 | If parsing errors are found. | ||
| 50 | """ | ||
| 51 | |||
| 52 | # This will fail silently if we pass in the wrong kind of parameter. | ||
| 53 | # To make debugging easier add an explicit check. | ||
| 54 | if not isinstance(headers, httplib.HTTPMessage): | ||
| 55 | raise TypeError('expected httplib.Message, got {0}.'.format( | ||
| 56 | type(headers))) | ||
| 57 | |||
| 58 | defects = getattr(headers, 'defects', None) | ||
| 59 | get_payload = getattr(headers, 'get_payload', None) | ||
| 60 | |||
| 61 | unparsed_data = None | ||
| 62 | if get_payload: # Platform-specific: Python 3. | ||
| 63 | unparsed_data = get_payload() | ||
| 64 | |||
| 65 | if defects or unparsed_data: | ||
| 66 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) | ||
| 67 | |||
| 68 | |||
| 69 | def is_response_to_head(response): | ||
| 70 | """ | ||
| 71 | Checks whether the request of a response has been a HEAD-request. | ||
| 72 | Handles the quirks of AppEngine. | ||
| 73 | |||
| 74 | :param conn: | ||
| 75 | :type conn: :class:`httplib.HTTPResponse` | ||
| 76 | """ | ||
| 77 | # FIXME: Can we do this somehow without accessing private httplib _method? | ||
| 78 | method = response._method | ||
| 79 | if isinstance(method, int): # Platform-specific: Appengine | ||
| 80 | return method == 3 | ||
| 81 | return method.upper() == 'HEAD' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/retry.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/retry.py new file mode 100644 index 0000000..2a7e8c1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/retry.py | |||
| @@ -0,0 +1,401 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import time | ||
| 3 | import logging | ||
| 4 | from collections import namedtuple | ||
| 5 | from itertools import takewhile | ||
| 6 | import email | ||
| 7 | import re | ||
| 8 | |||
| 9 | from ..exceptions import ( | ||
| 10 | ConnectTimeoutError, | ||
| 11 | MaxRetryError, | ||
| 12 | ProtocolError, | ||
| 13 | ReadTimeoutError, | ||
| 14 | ResponseError, | ||
| 15 | InvalidHeader, | ||
| 16 | ) | ||
| 17 | from ..packages import six | ||
| 18 | |||
| 19 | |||
| 20 | log = logging.getLogger(__name__) | ||
| 21 | |||
| 22 | # Data structure for representing the metadata of requests that result in a retry. | ||
| 23 | RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", | ||
| 24 | "status", "redirect_location"]) | ||
| 25 | |||
| 26 | |||
| 27 | class Retry(object): | ||
| 28 | """ Retry configuration. | ||
| 29 | |||
| 30 | Each retry attempt will create a new Retry object with updated values, so | ||
| 31 | they can be safely reused. | ||
| 32 | |||
| 33 | Retries can be defined as a default for a pool:: | ||
| 34 | |||
| 35 | retries = Retry(connect=5, read=2, redirect=5) | ||
| 36 | http = PoolManager(retries=retries) | ||
| 37 | response = http.request('GET', 'http://example.com/') | ||
| 38 | |||
| 39 | Or per-request (which overrides the default for the pool):: | ||
| 40 | |||
| 41 | response = http.request('GET', 'http://example.com/', retries=Retry(10)) | ||
| 42 | |||
| 43 | Retries can be disabled by passing ``False``:: | ||
| 44 | |||
| 45 | response = http.request('GET', 'http://example.com/', retries=False) | ||
| 46 | |||
| 47 | Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless | ||
| 48 | retries are disabled, in which case the causing exception will be raised. | ||
| 49 | |||
| 50 | :param int total: | ||
| 51 | Total number of retries to allow. Takes precedence over other counts. | ||
| 52 | |||
| 53 | Set to ``None`` to remove this constraint and fall back on other | ||
| 54 | counts. It's a good idea to set this to some sensibly-high value to | ||
| 55 | account for unexpected edge cases and avoid infinite retry loops. | ||
| 56 | |||
| 57 | Set to ``0`` to fail on the first retry. | ||
| 58 | |||
| 59 | Set to ``False`` to disable and imply ``raise_on_redirect=False``. | ||
| 60 | |||
| 61 | :param int connect: | ||
| 62 | How many connection-related errors to retry on. | ||
| 63 | |||
| 64 | These are errors raised before the request is sent to the remote server, | ||
| 65 | which we assume has not triggered the server to process the request. | ||
| 66 | |||
| 67 | Set to ``0`` to fail on the first retry of this type. | ||
| 68 | |||
| 69 | :param int read: | ||
| 70 | How many times to retry on read errors. | ||
| 71 | |||
| 72 | These errors are raised after the request was sent to the server, so the | ||
| 73 | request may have side-effects. | ||
| 74 | |||
| 75 | Set to ``0`` to fail on the first retry of this type. | ||
| 76 | |||
| 77 | :param int redirect: | ||
| 78 | How many redirects to perform. Limit this to avoid infinite redirect | ||
| 79 | loops. | ||
| 80 | |||
| 81 | A redirect is a HTTP response with a status code 301, 302, 303, 307 or | ||
| 82 | 308. | ||
| 83 | |||
| 84 | Set to ``0`` to fail on the first retry of this type. | ||
| 85 | |||
| 86 | Set to ``False`` to disable and imply ``raise_on_redirect=False``. | ||
| 87 | |||
| 88 | :param int status: | ||
| 89 | How many times to retry on bad status codes. | ||
| 90 | |||
| 91 | These are retries made on responses, where status code matches | ||
| 92 | ``status_forcelist``. | ||
| 93 | |||
| 94 | Set to ``0`` to fail on the first retry of this type. | ||
| 95 | |||
| 96 | :param iterable method_whitelist: | ||
| 97 | Set of uppercased HTTP method verbs that we should retry on. | ||
| 98 | |||
| 99 | By default, we only retry on methods which are considered to be | ||
| 100 | idempotent (multiple requests with the same parameters end with the | ||
| 101 | same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. | ||
| 102 | |||
| 103 | Set to a ``False`` value to retry on any verb. | ||
| 104 | |||
| 105 | :param iterable status_forcelist: | ||
| 106 | A set of integer HTTP status codes that we should force a retry on. | ||
| 107 | A retry is initiated if the request method is in ``method_whitelist`` | ||
| 108 | and the response status code is in ``status_forcelist``. | ||
| 109 | |||
| 110 | By default, this is disabled with ``None``. | ||
| 111 | |||
| 112 | :param float backoff_factor: | ||
| 113 | A backoff factor to apply between attempts after the second try | ||
| 114 | (most errors are resolved immediately by a second try without a | ||
| 115 | delay). urllib3 will sleep for:: | ||
| 116 | |||
| 117 | {backoff factor} * (2 ^ ({number of total retries} - 1)) | ||
| 118 | |||
| 119 | seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep | ||
| 120 | for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer | ||
| 121 | than :attr:`Retry.BACKOFF_MAX`. | ||
| 122 | |||
| 123 | By default, backoff is disabled (set to 0). | ||
| 124 | |||
| 125 | :param bool raise_on_redirect: Whether, if the number of redirects is | ||
| 126 | exhausted, to raise a MaxRetryError, or to return a response with a | ||
| 127 | response code in the 3xx range. | ||
| 128 | |||
| 129 | :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: | ||
| 130 | whether we should raise an exception, or return a response, | ||
| 131 | if status falls in ``status_forcelist`` range and retries have | ||
| 132 | been exhausted. | ||
| 133 | |||
| 134 | :param tuple history: The history of the request encountered during | ||
| 135 | each call to :meth:`~Retry.increment`. The list is in the order | ||
| 136 | the requests occurred. Each list item is of class :class:`RequestHistory`. | ||
| 137 | |||
| 138 | :param bool respect_retry_after_header: | ||
| 139 | Whether to respect Retry-After header on status codes defined as | ||
| 140 | :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. | ||
| 141 | |||
| 142 | """ | ||
| 143 | |||
| 144 | DEFAULT_METHOD_WHITELIST = frozenset([ | ||
| 145 | 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) | ||
| 146 | |||
| 147 | RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) | ||
| 148 | |||
| 149 | #: Maximum backoff time. | ||
| 150 | BACKOFF_MAX = 120 | ||
| 151 | |||
| 152 | def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, | ||
| 153 | method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, | ||
| 154 | backoff_factor=0, raise_on_redirect=True, raise_on_status=True, | ||
| 155 | history=None, respect_retry_after_header=True): | ||
| 156 | |||
| 157 | self.total = total | ||
| 158 | self.connect = connect | ||
| 159 | self.read = read | ||
| 160 | self.status = status | ||
| 161 | |||
| 162 | if redirect is False or total is False: | ||
| 163 | redirect = 0 | ||
| 164 | raise_on_redirect = False | ||
| 165 | |||
| 166 | self.redirect = redirect | ||
| 167 | self.status_forcelist = status_forcelist or set() | ||
| 168 | self.method_whitelist = method_whitelist | ||
| 169 | self.backoff_factor = backoff_factor | ||
| 170 | self.raise_on_redirect = raise_on_redirect | ||
| 171 | self.raise_on_status = raise_on_status | ||
| 172 | self.history = history or tuple() | ||
| 173 | self.respect_retry_after_header = respect_retry_after_header | ||
| 174 | |||
| 175 | def new(self, **kw): | ||
| 176 | params = dict( | ||
| 177 | total=self.total, | ||
| 178 | connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, | ||
| 179 | method_whitelist=self.method_whitelist, | ||
| 180 | status_forcelist=self.status_forcelist, | ||
| 181 | backoff_factor=self.backoff_factor, | ||
| 182 | raise_on_redirect=self.raise_on_redirect, | ||
| 183 | raise_on_status=self.raise_on_status, | ||
| 184 | history=self.history, | ||
| 185 | ) | ||
| 186 | params.update(kw) | ||
| 187 | return type(self)(**params) | ||
| 188 | |||
| 189 | @classmethod | ||
| 190 | def from_int(cls, retries, redirect=True, default=None): | ||
| 191 | """ Backwards-compatibility for the old retries format.""" | ||
| 192 | if retries is None: | ||
| 193 | retries = default if default is not None else cls.DEFAULT | ||
| 194 | |||
| 195 | if isinstance(retries, Retry): | ||
| 196 | return retries | ||
| 197 | |||
| 198 | redirect = bool(redirect) and None | ||
| 199 | new_retries = cls(retries, redirect=redirect) | ||
| 200 | log.debug("Converted retries value: %r -> %r", retries, new_retries) | ||
| 201 | return new_retries | ||
| 202 | |||
| 203 | def get_backoff_time(self): | ||
| 204 | """ Formula for computing the current backoff | ||
| 205 | |||
| 206 | :rtype: float | ||
| 207 | """ | ||
| 208 | # We want to consider only the last consecutive errors sequence (Ignore redirects). | ||
| 209 | consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, | ||
| 210 | reversed(self.history)))) | ||
| 211 | if consecutive_errors_len <= 1: | ||
| 212 | return 0 | ||
| 213 | |||
| 214 | backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) | ||
| 215 | return min(self.BACKOFF_MAX, backoff_value) | ||
| 216 | |||
| 217 | def parse_retry_after(self, retry_after): | ||
| 218 | # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 | ||
| 219 | if re.match(r"^\s*[0-9]+\s*$", retry_after): | ||
| 220 | seconds = int(retry_after) | ||
| 221 | else: | ||
| 222 | retry_date_tuple = email.utils.parsedate(retry_after) | ||
| 223 | if retry_date_tuple is None: | ||
| 224 | raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) | ||
| 225 | retry_date = time.mktime(retry_date_tuple) | ||
| 226 | seconds = retry_date - time.time() | ||
| 227 | |||
| 228 | if seconds < 0: | ||
| 229 | seconds = 0 | ||
| 230 | |||
| 231 | return seconds | ||
| 232 | |||
| 233 | def get_retry_after(self, response): | ||
| 234 | """ Get the value of Retry-After in seconds. """ | ||
| 235 | |||
| 236 | retry_after = response.getheader("Retry-After") | ||
| 237 | |||
| 238 | if retry_after is None: | ||
| 239 | return None | ||
| 240 | |||
| 241 | return self.parse_retry_after(retry_after) | ||
| 242 | |||
| 243 | def sleep_for_retry(self, response=None): | ||
| 244 | retry_after = self.get_retry_after(response) | ||
| 245 | if retry_after: | ||
| 246 | time.sleep(retry_after) | ||
| 247 | return True | ||
| 248 | |||
| 249 | return False | ||
| 250 | |||
| 251 | def _sleep_backoff(self): | ||
| 252 | backoff = self.get_backoff_time() | ||
| 253 | if backoff <= 0: | ||
| 254 | return | ||
| 255 | time.sleep(backoff) | ||
| 256 | |||
| 257 | def sleep(self, response=None): | ||
| 258 | """ Sleep between retry attempts. | ||
| 259 | |||
| 260 | This method will respect a server's ``Retry-After`` response header | ||
| 261 | and sleep the duration of the time requested. If that is not present, it | ||
| 262 | will use an exponential backoff. By default, the backoff factor is 0 and | ||
| 263 | this method will return immediately. | ||
| 264 | """ | ||
| 265 | |||
| 266 | if response: | ||
| 267 | slept = self.sleep_for_retry(response) | ||
| 268 | if slept: | ||
| 269 | return | ||
| 270 | |||
| 271 | self._sleep_backoff() | ||
| 272 | |||
| 273 | def _is_connection_error(self, err): | ||
| 274 | """ Errors when we're fairly sure that the server did not receive the | ||
| 275 | request, so it should be safe to retry. | ||
| 276 | """ | ||
| 277 | return isinstance(err, ConnectTimeoutError) | ||
| 278 | |||
| 279 | def _is_read_error(self, err): | ||
| 280 | """ Errors that occur after the request has been started, so we should | ||
| 281 | assume that the server began processing it. | ||
| 282 | """ | ||
| 283 | return isinstance(err, (ReadTimeoutError, ProtocolError)) | ||
| 284 | |||
| 285 | def _is_method_retryable(self, method): | ||
| 286 | """ Checks if a given HTTP method should be retried upon, depending if | ||
| 287 | it is included on the method whitelist. | ||
| 288 | """ | ||
| 289 | if self.method_whitelist and method.upper() not in self.method_whitelist: | ||
| 290 | return False | ||
| 291 | |||
| 292 | return True | ||
| 293 | |||
| 294 | def is_retry(self, method, status_code, has_retry_after=False): | ||
| 295 | """ Is this method/status code retryable? (Based on whitelists and control | ||
| 296 | variables such as the number of total retries to allow, whether to | ||
| 297 | respect the Retry-After header, whether this header is present, and | ||
| 298 | whether the returned status code is on the list of status codes to | ||
| 299 | be retried upon on the presence of the aforementioned header) | ||
| 300 | """ | ||
| 301 | if not self._is_method_retryable(method): | ||
| 302 | return False | ||
| 303 | |||
| 304 | if self.status_forcelist and status_code in self.status_forcelist: | ||
| 305 | return True | ||
| 306 | |||
| 307 | return (self.total and self.respect_retry_after_header and | ||
| 308 | has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) | ||
| 309 | |||
| 310 | def is_exhausted(self): | ||
| 311 | """ Are we out of retries? """ | ||
| 312 | retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) | ||
| 313 | retry_counts = list(filter(None, retry_counts)) | ||
| 314 | if not retry_counts: | ||
| 315 | return False | ||
| 316 | |||
| 317 | return min(retry_counts) < 0 | ||
| 318 | |||
| 319 | def increment(self, method=None, url=None, response=None, error=None, | ||
| 320 | _pool=None, _stacktrace=None): | ||
| 321 | """ Return a new Retry object with incremented retry counters. | ||
| 322 | |||
| 323 | :param response: A response object, or None, if the server did not | ||
| 324 | return a response. | ||
| 325 | :type response: :class:`~urllib3.response.HTTPResponse` | ||
| 326 | :param Exception error: An error encountered during the request, or | ||
| 327 | None if the response was received successfully. | ||
| 328 | |||
| 329 | :return: A new ``Retry`` object. | ||
| 330 | """ | ||
| 331 | if self.total is False and error: | ||
| 332 | # Disabled, indicate to re-raise the error. | ||
| 333 | raise six.reraise(type(error), error, _stacktrace) | ||
| 334 | |||
| 335 | total = self.total | ||
| 336 | if total is not None: | ||
| 337 | total -= 1 | ||
| 338 | |||
| 339 | connect = self.connect | ||
| 340 | read = self.read | ||
| 341 | redirect = self.redirect | ||
| 342 | status_count = self.status | ||
| 343 | cause = 'unknown' | ||
| 344 | status = None | ||
| 345 | redirect_location = None | ||
| 346 | |||
| 347 | if error and self._is_connection_error(error): | ||
| 348 | # Connect retry? | ||
| 349 | if connect is False: | ||
| 350 | raise six.reraise(type(error), error, _stacktrace) | ||
| 351 | elif connect is not None: | ||
| 352 | connect -= 1 | ||
| 353 | |||
| 354 | elif error and self._is_read_error(error): | ||
| 355 | # Read retry? | ||
| 356 | if read is False or not self._is_method_retryable(method): | ||
| 357 | raise six.reraise(type(error), error, _stacktrace) | ||
| 358 | elif read is not None: | ||
| 359 | read -= 1 | ||
| 360 | |||
| 361 | elif response and response.get_redirect_location(): | ||
| 362 | # Redirect retry? | ||
| 363 | if redirect is not None: | ||
| 364 | redirect -= 1 | ||
| 365 | cause = 'too many redirects' | ||
| 366 | redirect_location = response.get_redirect_location() | ||
| 367 | status = response.status | ||
| 368 | |||
| 369 | else: | ||
| 370 | # Incrementing because of a server error like a 500 in | ||
| 371 | # status_forcelist and a the given method is in the whitelist | ||
| 372 | cause = ResponseError.GENERIC_ERROR | ||
| 373 | if response and response.status: | ||
| 374 | if status_count is not None: | ||
| 375 | status_count -= 1 | ||
| 376 | cause = ResponseError.SPECIFIC_ERROR.format( | ||
| 377 | status_code=response.status) | ||
| 378 | status = response.status | ||
| 379 | |||
| 380 | history = self.history + (RequestHistory(method, url, error, status, redirect_location),) | ||
| 381 | |||
| 382 | new_retry = self.new( | ||
| 383 | total=total, | ||
| 384 | connect=connect, read=read, redirect=redirect, status=status_count, | ||
| 385 | history=history) | ||
| 386 | |||
| 387 | if new_retry.is_exhausted(): | ||
| 388 | raise MaxRetryError(_pool, url, error or ResponseError(cause)) | ||
| 389 | |||
| 390 | log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) | ||
| 391 | |||
| 392 | return new_retry | ||
| 393 | |||
| 394 | def __repr__(self): | ||
| 395 | return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' | ||
| 396 | 'read={self.read}, redirect={self.redirect}, status={self.status})').format( | ||
| 397 | cls=type(self), self=self) | ||
| 398 | |||
| 399 | |||
| 400 | # For backwards compatibility (equivalent to pre-v1.9): | ||
| 401 | Retry.DEFAULT = Retry(3) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/selectors.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/selectors.py new file mode 100644 index 0000000..9f16c66 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/selectors.py | |||
| @@ -0,0 +1,581 @@ | |||
| 1 | # Backport of selectors.py from Python 3.5+ to support Python < 3.4 | ||
| 2 | # Also has the behavior specified in PEP 475 which is to retry syscalls | ||
| 3 | # in the case of an EINTR error. This module is required because selectors34 | ||
| 4 | # does not follow this behavior and instead returns that no dile descriptor | ||
| 5 | # events have occurred rather than retry the syscall. The decision to drop | ||
| 6 | # support for select.devpoll is made to maintain 100% test coverage. | ||
| 7 | |||
| 8 | import errno | ||
| 9 | import math | ||
| 10 | import select | ||
| 11 | import socket | ||
| 12 | import sys | ||
| 13 | import time | ||
| 14 | from collections import namedtuple, Mapping | ||
| 15 | |||
| 16 | try: | ||
| 17 | monotonic = time.monotonic | ||
| 18 | except (AttributeError, ImportError): # Python 3.3< | ||
| 19 | monotonic = time.time | ||
| 20 | |||
| 21 | EVENT_READ = (1 << 0) | ||
| 22 | EVENT_WRITE = (1 << 1) | ||
| 23 | |||
| 24 | HAS_SELECT = True # Variable that shows whether the platform has a selector. | ||
| 25 | _SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. | ||
| 26 | _DEFAULT_SELECTOR = None | ||
| 27 | |||
| 28 | |||
| 29 | class SelectorError(Exception): | ||
| 30 | def __init__(self, errcode): | ||
| 31 | super(SelectorError, self).__init__() | ||
| 32 | self.errno = errcode | ||
| 33 | |||
| 34 | def __repr__(self): | ||
| 35 | return "<SelectorError errno={0}>".format(self.errno) | ||
| 36 | |||
| 37 | def __str__(self): | ||
| 38 | return self.__repr__() | ||
| 39 | |||
| 40 | |||
| 41 | def _fileobj_to_fd(fileobj): | ||
| 42 | """ Return a file descriptor from a file object. If | ||
| 43 | given an integer will simply return that integer back. """ | ||
| 44 | if isinstance(fileobj, int): | ||
| 45 | fd = fileobj | ||
| 46 | else: | ||
| 47 | try: | ||
| 48 | fd = int(fileobj.fileno()) | ||
| 49 | except (AttributeError, TypeError, ValueError): | ||
| 50 | raise ValueError("Invalid file object: {0!r}".format(fileobj)) | ||
| 51 | if fd < 0: | ||
| 52 | raise ValueError("Invalid file descriptor: {0}".format(fd)) | ||
| 53 | return fd | ||
| 54 | |||
| 55 | |||
| 56 | # Determine which function to use to wrap system calls because Python 3.5+ | ||
| 57 | # already handles the case when system calls are interrupted. | ||
| 58 | if sys.version_info >= (3, 5): | ||
| 59 | def _syscall_wrapper(func, _, *args, **kwargs): | ||
| 60 | """ This is the short-circuit version of the below logic | ||
| 61 | because in Python 3.5+ all system calls automatically restart | ||
| 62 | and recalculate their timeouts. """ | ||
| 63 | try: | ||
| 64 | return func(*args, **kwargs) | ||
| 65 | except (OSError, IOError, select.error) as e: | ||
| 66 | errcode = None | ||
| 67 | if hasattr(e, "errno"): | ||
| 68 | errcode = e.errno | ||
| 69 | raise SelectorError(errcode) | ||
| 70 | else: | ||
| 71 | def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): | ||
| 72 | """ Wrapper function for syscalls that could fail due to EINTR. | ||
| 73 | All functions should be retried if there is time left in the timeout | ||
| 74 | in accordance with PEP 475. """ | ||
| 75 | timeout = kwargs.get("timeout", None) | ||
| 76 | if timeout is None: | ||
| 77 | expires = None | ||
| 78 | recalc_timeout = False | ||
| 79 | else: | ||
| 80 | timeout = float(timeout) | ||
| 81 | if timeout < 0.0: # Timeout less than 0 treated as no timeout. | ||
| 82 | expires = None | ||
| 83 | else: | ||
| 84 | expires = monotonic() + timeout | ||
| 85 | |||
| 86 | args = list(args) | ||
| 87 | if recalc_timeout and "timeout" not in kwargs: | ||
| 88 | raise ValueError( | ||
| 89 | "Timeout must be in args or kwargs to be recalculated") | ||
| 90 | |||
| 91 | result = _SYSCALL_SENTINEL | ||
| 92 | while result is _SYSCALL_SENTINEL: | ||
| 93 | try: | ||
| 94 | result = func(*args, **kwargs) | ||
| 95 | # OSError is thrown by select.select | ||
| 96 | # IOError is thrown by select.epoll.poll | ||
| 97 | # select.error is thrown by select.poll.poll | ||
| 98 | # Aren't we thankful for Python 3.x rework for exceptions? | ||
| 99 | except (OSError, IOError, select.error) as e: | ||
| 100 | # select.error wasn't a subclass of OSError in the past. | ||
| 101 | errcode = None | ||
| 102 | if hasattr(e, "errno"): | ||
| 103 | errcode = e.errno | ||
| 104 | elif hasattr(e, "args"): | ||
| 105 | errcode = e.args[0] | ||
| 106 | |||
| 107 | # Also test for the Windows equivalent of EINTR. | ||
| 108 | is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and | ||
| 109 | errcode == errno.WSAEINTR)) | ||
| 110 | |||
| 111 | if is_interrupt: | ||
| 112 | if expires is not None: | ||
| 113 | current_time = monotonic() | ||
| 114 | if current_time > expires: | ||
| 115 | raise OSError(errno=errno.ETIMEDOUT) | ||
| 116 | if recalc_timeout: | ||
| 117 | if "timeout" in kwargs: | ||
| 118 | kwargs["timeout"] = expires - current_time | ||
| 119 | continue | ||
| 120 | if errcode: | ||
| 121 | raise SelectorError(errcode) | ||
| 122 | else: | ||
| 123 | raise | ||
| 124 | return result | ||
| 125 | |||
| 126 | |||
| 127 | SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) | ||
| 128 | |||
| 129 | |||
| 130 | class _SelectorMapping(Mapping): | ||
| 131 | """ Mapping of file objects to selector keys """ | ||
| 132 | |||
| 133 | def __init__(self, selector): | ||
| 134 | self._selector = selector | ||
| 135 | |||
| 136 | def __len__(self): | ||
| 137 | return len(self._selector._fd_to_key) | ||
| 138 | |||
| 139 | def __getitem__(self, fileobj): | ||
| 140 | try: | ||
| 141 | fd = self._selector._fileobj_lookup(fileobj) | ||
| 142 | return self._selector._fd_to_key[fd] | ||
| 143 | except KeyError: | ||
| 144 | raise KeyError("{0!r} is not registered.".format(fileobj)) | ||
| 145 | |||
| 146 | def __iter__(self): | ||
| 147 | return iter(self._selector._fd_to_key) | ||
| 148 | |||
| 149 | |||
| 150 | class BaseSelector(object): | ||
| 151 | """ Abstract Selector class | ||
| 152 | |||
| 153 | A selector supports registering file objects to be monitored | ||
| 154 | for specific I/O events. | ||
| 155 | |||
| 156 | A file object is a file descriptor or any object with a | ||
| 157 | `fileno()` method. An arbitrary object can be attached to the | ||
| 158 | file object which can be used for example to store context info, | ||
| 159 | a callback, etc. | ||
| 160 | |||
| 161 | A selector can use various implementations (select(), poll(), epoll(), | ||
| 162 | and kqueue()) depending on the platform. The 'DefaultSelector' class uses | ||
| 163 | the most efficient implementation for the current platform. | ||
| 164 | """ | ||
| 165 | def __init__(self): | ||
| 166 | # Maps file descriptors to keys. | ||
| 167 | self._fd_to_key = {} | ||
| 168 | |||
| 169 | # Read-only mapping returned by get_map() | ||
| 170 | self._map = _SelectorMapping(self) | ||
| 171 | |||
| 172 | def _fileobj_lookup(self, fileobj): | ||
| 173 | """ Return a file descriptor from a file object. | ||
| 174 | This wraps _fileobj_to_fd() to do an exhaustive | ||
| 175 | search in case the object is invalid but we still | ||
| 176 | have it in our map. Used by unregister() so we can | ||
| 177 | unregister an object that was previously registered | ||
| 178 | even if it is closed. It is also used by _SelectorMapping | ||
| 179 | """ | ||
| 180 | try: | ||
| 181 | return _fileobj_to_fd(fileobj) | ||
| 182 | except ValueError: | ||
| 183 | |||
| 184 | # Search through all our mapped keys. | ||
| 185 | for key in self._fd_to_key.values(): | ||
| 186 | if key.fileobj is fileobj: | ||
| 187 | return key.fd | ||
| 188 | |||
| 189 | # Raise ValueError after all. | ||
| 190 | raise | ||
| 191 | |||
| 192 | def register(self, fileobj, events, data=None): | ||
| 193 | """ Register a file object for a set of events to monitor. """ | ||
| 194 | if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): | ||
| 195 | raise ValueError("Invalid events: {0!r}".format(events)) | ||
| 196 | |||
| 197 | key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) | ||
| 198 | |||
| 199 | if key.fd in self._fd_to_key: | ||
| 200 | raise KeyError("{0!r} (FD {1}) is already registered" | ||
| 201 | .format(fileobj, key.fd)) | ||
| 202 | |||
| 203 | self._fd_to_key[key.fd] = key | ||
| 204 | return key | ||
| 205 | |||
| 206 | def unregister(self, fileobj): | ||
| 207 | """ Unregister a file object from being monitored. """ | ||
| 208 | try: | ||
| 209 | key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) | ||
| 210 | except KeyError: | ||
| 211 | raise KeyError("{0!r} is not registered".format(fileobj)) | ||
| 212 | |||
| 213 | # Getting the fileno of a closed socket on Windows errors with EBADF. | ||
| 214 | except socket.error as e: # Platform-specific: Windows. | ||
| 215 | if e.errno != errno.EBADF: | ||
| 216 | raise | ||
| 217 | else: | ||
| 218 | for key in self._fd_to_key.values(): | ||
| 219 | if key.fileobj is fileobj: | ||
| 220 | self._fd_to_key.pop(key.fd) | ||
| 221 | break | ||
| 222 | else: | ||
| 223 | raise KeyError("{0!r} is not registered".format(fileobj)) | ||
| 224 | return key | ||
| 225 | |||
| 226 | def modify(self, fileobj, events, data=None): | ||
| 227 | """ Change a registered file object monitored events and data. """ | ||
| 228 | # NOTE: Some subclasses optimize this operation even further. | ||
| 229 | try: | ||
| 230 | key = self._fd_to_key[self._fileobj_lookup(fileobj)] | ||
| 231 | except KeyError: | ||
| 232 | raise KeyError("{0!r} is not registered".format(fileobj)) | ||
| 233 | |||
| 234 | if events != key.events: | ||
| 235 | self.unregister(fileobj) | ||
| 236 | key = self.register(fileobj, events, data) | ||
| 237 | |||
| 238 | elif data != key.data: | ||
| 239 | # Use a shortcut to update the data. | ||
| 240 | key = key._replace(data=data) | ||
| 241 | self._fd_to_key[key.fd] = key | ||
| 242 | |||
| 243 | return key | ||
| 244 | |||
| 245 | def select(self, timeout=None): | ||
| 246 | """ Perform the actual selection until some monitored file objects | ||
| 247 | are ready or the timeout expires. """ | ||
| 248 | raise NotImplementedError() | ||
| 249 | |||
| 250 | def close(self): | ||
| 251 | """ Close the selector. This must be called to ensure that all | ||
| 252 | underlying resources are freed. """ | ||
| 253 | self._fd_to_key.clear() | ||
| 254 | self._map = None | ||
| 255 | |||
| 256 | def get_key(self, fileobj): | ||
| 257 | """ Return the key associated with a registered file object. """ | ||
| 258 | mapping = self.get_map() | ||
| 259 | if mapping is None: | ||
| 260 | raise RuntimeError("Selector is closed") | ||
| 261 | try: | ||
| 262 | return mapping[fileobj] | ||
| 263 | except KeyError: | ||
| 264 | raise KeyError("{0!r} is not registered".format(fileobj)) | ||
| 265 | |||
| 266 | def get_map(self): | ||
| 267 | """ Return a mapping of file objects to selector keys """ | ||
| 268 | return self._map | ||
| 269 | |||
| 270 | def _key_from_fd(self, fd): | ||
| 271 | """ Return the key associated to a given file descriptor | ||
| 272 | Return None if it is not found. """ | ||
| 273 | try: | ||
| 274 | return self._fd_to_key[fd] | ||
| 275 | except KeyError: | ||
| 276 | return None | ||
| 277 | |||
| 278 | def __enter__(self): | ||
| 279 | return self | ||
| 280 | |||
| 281 | def __exit__(self, *args): | ||
| 282 | self.close() | ||
| 283 | |||
| 284 | |||
| 285 | # Almost all platforms have select.select() | ||
| 286 | if hasattr(select, "select"): | ||
| 287 | class SelectSelector(BaseSelector): | ||
| 288 | """ Select-based selector. """ | ||
| 289 | def __init__(self): | ||
| 290 | super(SelectSelector, self).__init__() | ||
| 291 | self._readers = set() | ||
| 292 | self._writers = set() | ||
| 293 | |||
| 294 | def register(self, fileobj, events, data=None): | ||
| 295 | key = super(SelectSelector, self).register(fileobj, events, data) | ||
| 296 | if events & EVENT_READ: | ||
| 297 | self._readers.add(key.fd) | ||
| 298 | if events & EVENT_WRITE: | ||
| 299 | self._writers.add(key.fd) | ||
| 300 | return key | ||
| 301 | |||
| 302 | def unregister(self, fileobj): | ||
| 303 | key = super(SelectSelector, self).unregister(fileobj) | ||
| 304 | self._readers.discard(key.fd) | ||
| 305 | self._writers.discard(key.fd) | ||
| 306 | return key | ||
| 307 | |||
| 308 | def _select(self, r, w, timeout=None): | ||
| 309 | """ Wrapper for select.select because timeout is a positional arg """ | ||
| 310 | return select.select(r, w, [], timeout) | ||
| 311 | |||
| 312 | def select(self, timeout=None): | ||
| 313 | # Selecting on empty lists on Windows errors out. | ||
| 314 | if not len(self._readers) and not len(self._writers): | ||
| 315 | return [] | ||
| 316 | |||
| 317 | timeout = None if timeout is None else max(timeout, 0.0) | ||
| 318 | ready = [] | ||
| 319 | r, w, _ = _syscall_wrapper(self._select, True, self._readers, | ||
| 320 | self._writers, timeout) | ||
| 321 | r = set(r) | ||
| 322 | w = set(w) | ||
| 323 | for fd in r | w: | ||
| 324 | events = 0 | ||
| 325 | if fd in r: | ||
| 326 | events |= EVENT_READ | ||
| 327 | if fd in w: | ||
| 328 | events |= EVENT_WRITE | ||
| 329 | |||
| 330 | key = self._key_from_fd(fd) | ||
| 331 | if key: | ||
| 332 | ready.append((key, events & key.events)) | ||
| 333 | return ready | ||
| 334 | |||
| 335 | |||
| 336 | if hasattr(select, "poll"): | ||
| 337 | class PollSelector(BaseSelector): | ||
| 338 | """ Poll-based selector """ | ||
| 339 | def __init__(self): | ||
| 340 | super(PollSelector, self).__init__() | ||
| 341 | self._poll = select.poll() | ||
| 342 | |||
| 343 | def register(self, fileobj, events, data=None): | ||
| 344 | key = super(PollSelector, self).register(fileobj, events, data) | ||
| 345 | event_mask = 0 | ||
| 346 | if events & EVENT_READ: | ||
| 347 | event_mask |= select.POLLIN | ||
| 348 | if events & EVENT_WRITE: | ||
| 349 | event_mask |= select.POLLOUT | ||
| 350 | self._poll.register(key.fd, event_mask) | ||
| 351 | return key | ||
| 352 | |||
| 353 | def unregister(self, fileobj): | ||
| 354 | key = super(PollSelector, self).unregister(fileobj) | ||
| 355 | self._poll.unregister(key.fd) | ||
| 356 | return key | ||
| 357 | |||
| 358 | def _wrap_poll(self, timeout=None): | ||
| 359 | """ Wrapper function for select.poll.poll() so that | ||
| 360 | _syscall_wrapper can work with only seconds. """ | ||
| 361 | if timeout is not None: | ||
| 362 | if timeout <= 0: | ||
| 363 | timeout = 0 | ||
| 364 | else: | ||
| 365 | # select.poll.poll() has a resolution of 1 millisecond, | ||
| 366 | # round away from zero to wait *at least* timeout seconds. | ||
| 367 | timeout = math.ceil(timeout * 1e3) | ||
| 368 | |||
| 369 | result = self._poll.poll(timeout) | ||
| 370 | return result | ||
| 371 | |||
| 372 | def select(self, timeout=None): | ||
| 373 | ready = [] | ||
| 374 | fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) | ||
| 375 | for fd, event_mask in fd_events: | ||
| 376 | events = 0 | ||
| 377 | if event_mask & ~select.POLLIN: | ||
| 378 | events |= EVENT_WRITE | ||
| 379 | if event_mask & ~select.POLLOUT: | ||
| 380 | events |= EVENT_READ | ||
| 381 | |||
| 382 | key = self._key_from_fd(fd) | ||
| 383 | if key: | ||
| 384 | ready.append((key, events & key.events)) | ||
| 385 | |||
| 386 | return ready | ||
| 387 | |||
| 388 | |||
| 389 | if hasattr(select, "epoll"): | ||
| 390 | class EpollSelector(BaseSelector): | ||
| 391 | """ Epoll-based selector """ | ||
| 392 | def __init__(self): | ||
| 393 | super(EpollSelector, self).__init__() | ||
| 394 | self._epoll = select.epoll() | ||
| 395 | |||
| 396 | def fileno(self): | ||
| 397 | return self._epoll.fileno() | ||
| 398 | |||
| 399 | def register(self, fileobj, events, data=None): | ||
| 400 | key = super(EpollSelector, self).register(fileobj, events, data) | ||
| 401 | events_mask = 0 | ||
| 402 | if events & EVENT_READ: | ||
| 403 | events_mask |= select.EPOLLIN | ||
| 404 | if events & EVENT_WRITE: | ||
| 405 | events_mask |= select.EPOLLOUT | ||
| 406 | _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) | ||
| 407 | return key | ||
| 408 | |||
| 409 | def unregister(self, fileobj): | ||
| 410 | key = super(EpollSelector, self).unregister(fileobj) | ||
| 411 | try: | ||
| 412 | _syscall_wrapper(self._epoll.unregister, False, key.fd) | ||
| 413 | except SelectorError: | ||
| 414 | # This can occur when the fd was closed since registry. | ||
| 415 | pass | ||
| 416 | return key | ||
| 417 | |||
| 418 | def select(self, timeout=None): | ||
| 419 | if timeout is not None: | ||
| 420 | if timeout <= 0: | ||
| 421 | timeout = 0.0 | ||
| 422 | else: | ||
| 423 | # select.epoll.poll() has a resolution of 1 millisecond | ||
| 424 | # but luckily takes seconds so we don't need a wrapper | ||
| 425 | # like PollSelector. Just for better rounding. | ||
| 426 | timeout = math.ceil(timeout * 1e3) * 1e-3 | ||
| 427 | timeout = float(timeout) | ||
| 428 | else: | ||
| 429 | timeout = -1.0 # epoll.poll() must have a float. | ||
| 430 | |||
| 431 | # We always want at least 1 to ensure that select can be called | ||
| 432 | # with no file descriptors registered. Otherwise will fail. | ||
| 433 | max_events = max(len(self._fd_to_key), 1) | ||
| 434 | |||
| 435 | ready = [] | ||
| 436 | fd_events = _syscall_wrapper(self._epoll.poll, True, | ||
| 437 | timeout=timeout, | ||
| 438 | maxevents=max_events) | ||
| 439 | for fd, event_mask in fd_events: | ||
| 440 | events = 0 | ||
| 441 | if event_mask & ~select.EPOLLIN: | ||
| 442 | events |= EVENT_WRITE | ||
| 443 | if event_mask & ~select.EPOLLOUT: | ||
| 444 | events |= EVENT_READ | ||
| 445 | |||
| 446 | key = self._key_from_fd(fd) | ||
| 447 | if key: | ||
| 448 | ready.append((key, events & key.events)) | ||
| 449 | return ready | ||
| 450 | |||
| 451 | def close(self): | ||
| 452 | self._epoll.close() | ||
| 453 | super(EpollSelector, self).close() | ||
| 454 | |||
| 455 | |||
| 456 | if hasattr(select, "kqueue"): | ||
| 457 | class KqueueSelector(BaseSelector): | ||
| 458 | """ Kqueue / Kevent-based selector """ | ||
| 459 | def __init__(self): | ||
| 460 | super(KqueueSelector, self).__init__() | ||
| 461 | self._kqueue = select.kqueue() | ||
| 462 | |||
| 463 | def fileno(self): | ||
| 464 | return self._kqueue.fileno() | ||
| 465 | |||
| 466 | def register(self, fileobj, events, data=None): | ||
| 467 | key = super(KqueueSelector, self).register(fileobj, events, data) | ||
| 468 | if events & EVENT_READ: | ||
| 469 | kevent = select.kevent(key.fd, | ||
| 470 | select.KQ_FILTER_READ, | ||
| 471 | select.KQ_EV_ADD) | ||
| 472 | |||
| 473 | _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) | ||
| 474 | |||
| 475 | if events & EVENT_WRITE: | ||
| 476 | kevent = select.kevent(key.fd, | ||
| 477 | select.KQ_FILTER_WRITE, | ||
| 478 | select.KQ_EV_ADD) | ||
| 479 | |||
| 480 | _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) | ||
| 481 | |||
| 482 | return key | ||
| 483 | |||
| 484 | def unregister(self, fileobj): | ||
| 485 | key = super(KqueueSelector, self).unregister(fileobj) | ||
| 486 | if key.events & EVENT_READ: | ||
| 487 | kevent = select.kevent(key.fd, | ||
| 488 | select.KQ_FILTER_READ, | ||
| 489 | select.KQ_EV_DELETE) | ||
| 490 | try: | ||
| 491 | _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) | ||
| 492 | except SelectorError: | ||
| 493 | pass | ||
| 494 | if key.events & EVENT_WRITE: | ||
| 495 | kevent = select.kevent(key.fd, | ||
| 496 | select.KQ_FILTER_WRITE, | ||
| 497 | select.KQ_EV_DELETE) | ||
| 498 | try: | ||
| 499 | _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) | ||
| 500 | except SelectorError: | ||
| 501 | pass | ||
| 502 | |||
| 503 | return key | ||
| 504 | |||
| 505 | def select(self, timeout=None): | ||
| 506 | if timeout is not None: | ||
| 507 | timeout = max(timeout, 0) | ||
| 508 | |||
| 509 | max_events = len(self._fd_to_key) * 2 | ||
| 510 | ready_fds = {} | ||
| 511 | |||
| 512 | kevent_list = _syscall_wrapper(self._kqueue.control, True, | ||
| 513 | None, max_events, timeout) | ||
| 514 | |||
| 515 | for kevent in kevent_list: | ||
| 516 | fd = kevent.ident | ||
| 517 | event_mask = kevent.filter | ||
| 518 | events = 0 | ||
| 519 | if event_mask == select.KQ_FILTER_READ: | ||
| 520 | events |= EVENT_READ | ||
| 521 | if event_mask == select.KQ_FILTER_WRITE: | ||
| 522 | events |= EVENT_WRITE | ||
| 523 | |||
| 524 | key = self._key_from_fd(fd) | ||
| 525 | if key: | ||
| 526 | if key.fd not in ready_fds: | ||
| 527 | ready_fds[key.fd] = (key, events & key.events) | ||
| 528 | else: | ||
| 529 | old_events = ready_fds[key.fd][1] | ||
| 530 | ready_fds[key.fd] = (key, (events | old_events) & key.events) | ||
| 531 | |||
| 532 | return list(ready_fds.values()) | ||
| 533 | |||
| 534 | def close(self): | ||
| 535 | self._kqueue.close() | ||
| 536 | super(KqueueSelector, self).close() | ||
| 537 | |||
| 538 | |||
| 539 | if not hasattr(select, 'select'): # Platform-specific: AppEngine | ||
| 540 | HAS_SELECT = False | ||
| 541 | |||
| 542 | |||
| 543 | def _can_allocate(struct): | ||
| 544 | """ Checks that select structs can be allocated by the underlying | ||
| 545 | operating system, not just advertised by the select module. We don't | ||
| 546 | check select() because we'll be hopeful that most platforms that | ||
| 547 | don't have it available will not advertise it. (ie: GAE) """ | ||
| 548 | try: | ||
| 549 | # select.poll() objects won't fail until used. | ||
| 550 | if struct == 'poll': | ||
| 551 | p = select.poll() | ||
| 552 | p.poll(0) | ||
| 553 | |||
| 554 | # All others will fail on allocation. | ||
| 555 | else: | ||
| 556 | getattr(select, struct)().close() | ||
| 557 | return True | ||
| 558 | except (OSError, AttributeError) as e: | ||
| 559 | return False | ||
| 560 | |||
| 561 | |||
| 562 | # Choose the best implementation, roughly: | ||
| 563 | # kqueue == epoll > poll > select. Devpoll not supported. (See above) | ||
| 564 | # select() also can't accept a FD > FD_SETSIZE (usually around 1024) | ||
| 565 | def DefaultSelector(): | ||
| 566 | """ This function serves as a first call for DefaultSelector to | ||
| 567 | detect if the select module is being monkey-patched incorrectly | ||
| 568 | by eventlet, greenlet, and preserve proper behavior. """ | ||
| 569 | global _DEFAULT_SELECTOR | ||
| 570 | if _DEFAULT_SELECTOR is None: | ||
| 571 | if _can_allocate('kqueue'): | ||
| 572 | _DEFAULT_SELECTOR = KqueueSelector | ||
| 573 | elif _can_allocate('epoll'): | ||
| 574 | _DEFAULT_SELECTOR = EpollSelector | ||
| 575 | elif _can_allocate('poll'): | ||
| 576 | _DEFAULT_SELECTOR = PollSelector | ||
| 577 | elif hasattr(select, 'select'): | ||
| 578 | _DEFAULT_SELECTOR = SelectSelector | ||
| 579 | else: # Platform-specific: AppEngine | ||
| 580 | raise ValueError('Platform does not have a selector') | ||
| 581 | return _DEFAULT_SELECTOR() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/ssl_.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/ssl_.py new file mode 100644 index 0000000..c11dff2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/ssl_.py | |||
| @@ -0,0 +1,341 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | import errno | ||
| 3 | import warnings | ||
| 4 | import hmac | ||
| 5 | |||
| 6 | from binascii import hexlify, unhexlify | ||
| 7 | from hashlib import md5, sha1, sha256 | ||
| 8 | |||
| 9 | from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning | ||
| 10 | |||
| 11 | |||
| 12 | SSLContext = None | ||
| 13 | HAS_SNI = False | ||
| 14 | IS_PYOPENSSL = False | ||
| 15 | IS_SECURETRANSPORT = False | ||
| 16 | |||
| 17 | # Maps the length of a digest to a possible hash function producing this digest | ||
| 18 | HASHFUNC_MAP = { | ||
| 19 | 32: md5, | ||
| 20 | 40: sha1, | ||
| 21 | 64: sha256, | ||
| 22 | } | ||
| 23 | |||
| 24 | |||
| 25 | def _const_compare_digest_backport(a, b): | ||
| 26 | """ | ||
| 27 | Compare two digests of equal length in constant time. | ||
| 28 | |||
| 29 | The digests must be of type str/bytes. | ||
| 30 | Returns True if the digests match, and False otherwise. | ||
| 31 | """ | ||
| 32 | result = abs(len(a) - len(b)) | ||
| 33 | for l, r in zip(bytearray(a), bytearray(b)): | ||
| 34 | result |= l ^ r | ||
| 35 | return result == 0 | ||
| 36 | |||
| 37 | |||
| 38 | _const_compare_digest = getattr(hmac, 'compare_digest', | ||
| 39 | _const_compare_digest_backport) | ||
| 40 | |||
| 41 | |||
| 42 | try: # Test for SSL features | ||
| 43 | import ssl | ||
| 44 | from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 | ||
| 45 | from ssl import HAS_SNI # Has SNI? | ||
| 46 | except ImportError: | ||
| 47 | pass | ||
| 48 | |||
| 49 | |||
| 50 | try: | ||
| 51 | from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION | ||
| 52 | except ImportError: | ||
| 53 | OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 | ||
| 54 | OP_NO_COMPRESSION = 0x20000 | ||
| 55 | |||
| 56 | # A secure default. | ||
| 57 | # Sources for more information on TLS ciphers: | ||
| 58 | # | ||
| 59 | # - https://wiki.mozilla.org/Security/Server_Side_TLS | ||
| 60 | # - https://www.ssllabs.com/projects/best-practices/index.html | ||
| 61 | # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ | ||
| 62 | # | ||
| 63 | # The general intent is: | ||
| 64 | # - Prefer TLS 1.3 cipher suites | ||
| 65 | # - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), | ||
| 66 | # - prefer ECDHE over DHE for better performance, | ||
| 67 | # - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and | ||
| 68 | # security, | ||
| 69 | # - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, | ||
| 70 | # - disable NULL authentication, MD5 MACs and DSS for security reasons. | ||
| 71 | DEFAULT_CIPHERS = ':'.join([ | ||
| 72 | 'TLS13-AES-256-GCM-SHA384', | ||
| 73 | 'TLS13-CHACHA20-POLY1305-SHA256', | ||
| 74 | 'TLS13-AES-128-GCM-SHA256', | ||
| 75 | 'ECDH+AESGCM', | ||
| 76 | 'ECDH+CHACHA20', | ||
| 77 | 'DH+AESGCM', | ||
| 78 | 'DH+CHACHA20', | ||
| 79 | 'ECDH+AES256', | ||
| 80 | 'DH+AES256', | ||
| 81 | 'ECDH+AES128', | ||
| 82 | 'DH+AES', | ||
| 83 | 'RSA+AESGCM', | ||
| 84 | 'RSA+AES', | ||
| 85 | '!aNULL', | ||
| 86 | '!eNULL', | ||
| 87 | '!MD5', | ||
| 88 | ]) | ||
| 89 | |||
| 90 | try: | ||
| 91 | from ssl import SSLContext # Modern SSL? | ||
| 92 | except ImportError: | ||
| 93 | import sys | ||
| 94 | |||
| 95 | class SSLContext(object): # Platform-specific: Python 2 & 3.1 | ||
| 96 | supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or | ||
| 97 | (3, 2) <= sys.version_info) | ||
| 98 | |||
| 99 | def __init__(self, protocol_version): | ||
| 100 | self.protocol = protocol_version | ||
| 101 | # Use default values from a real SSLContext | ||
| 102 | self.check_hostname = False | ||
| 103 | self.verify_mode = ssl.CERT_NONE | ||
| 104 | self.ca_certs = None | ||
| 105 | self.options = 0 | ||
| 106 | self.certfile = None | ||
| 107 | self.keyfile = None | ||
| 108 | self.ciphers = None | ||
| 109 | |||
| 110 | def load_cert_chain(self, certfile, keyfile): | ||
| 111 | self.certfile = certfile | ||
| 112 | self.keyfile = keyfile | ||
| 113 | |||
| 114 | def load_verify_locations(self, cafile=None, capath=None): | ||
| 115 | self.ca_certs = cafile | ||
| 116 | |||
| 117 | if capath is not None: | ||
| 118 | raise SSLError("CA directories not supported in older Pythons") | ||
| 119 | |||
| 120 | def set_ciphers(self, cipher_suite): | ||
| 121 | if not self.supports_set_ciphers: | ||
| 122 | raise TypeError( | ||
| 123 | 'Your version of Python does not support setting ' | ||
| 124 | 'a custom cipher suite. Please upgrade to Python ' | ||
| 125 | '2.7, 3.2, or later if you need this functionality.' | ||
| 126 | ) | ||
| 127 | self.ciphers = cipher_suite | ||
| 128 | |||
| 129 | def wrap_socket(self, socket, server_hostname=None, server_side=False): | ||
| 130 | warnings.warn( | ||
| 131 | 'A true SSLContext object is not available. This prevents ' | ||
| 132 | 'urllib3 from configuring SSL appropriately and may cause ' | ||
| 133 | 'certain SSL connections to fail. You can upgrade to a newer ' | ||
| 134 | 'version of Python to solve this. For more information, see ' | ||
| 135 | 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' | ||
| 136 | '#ssl-warnings', | ||
| 137 | InsecurePlatformWarning | ||
| 138 | ) | ||
| 139 | kwargs = { | ||
| 140 | 'keyfile': self.keyfile, | ||
| 141 | 'certfile': self.certfile, | ||
| 142 | 'ca_certs': self.ca_certs, | ||
| 143 | 'cert_reqs': self.verify_mode, | ||
| 144 | 'ssl_version': self.protocol, | ||
| 145 | 'server_side': server_side, | ||
| 146 | } | ||
| 147 | if self.supports_set_ciphers: # Platform-specific: Python 2.7+ | ||
| 148 | return wrap_socket(socket, ciphers=self.ciphers, **kwargs) | ||
| 149 | else: # Platform-specific: Python 2.6 | ||
| 150 | return wrap_socket(socket, **kwargs) | ||
| 151 | |||
| 152 | |||
| 153 | def assert_fingerprint(cert, fingerprint): | ||
| 154 | """ | ||
| 155 | Checks if given fingerprint matches the supplied certificate. | ||
| 156 | |||
| 157 | :param cert: | ||
| 158 | Certificate as bytes object. | ||
| 159 | :param fingerprint: | ||
| 160 | Fingerprint as string of hexdigits, can be interspersed by colons. | ||
| 161 | """ | ||
| 162 | |||
| 163 | fingerprint = fingerprint.replace(':', '').lower() | ||
| 164 | digest_length = len(fingerprint) | ||
| 165 | hashfunc = HASHFUNC_MAP.get(digest_length) | ||
| 166 | if not hashfunc: | ||
| 167 | raise SSLError( | ||
| 168 | 'Fingerprint of invalid length: {0}'.format(fingerprint)) | ||
| 169 | |||
| 170 | # We need encode() here for py32; works on py2 and p33. | ||
| 171 | fingerprint_bytes = unhexlify(fingerprint.encode()) | ||
| 172 | |||
| 173 | cert_digest = hashfunc(cert).digest() | ||
| 174 | |||
| 175 | if not _const_compare_digest(cert_digest, fingerprint_bytes): | ||
| 176 | raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' | ||
| 177 | .format(fingerprint, hexlify(cert_digest))) | ||
| 178 | |||
| 179 | |||
| 180 | def resolve_cert_reqs(candidate): | ||
| 181 | """ | ||
| 182 | Resolves the argument to a numeric constant, which can be passed to | ||
| 183 | the wrap_socket function/method from the ssl module. | ||
| 184 | Defaults to :data:`ssl.CERT_NONE`. | ||
| 185 | If given a string it is assumed to be the name of the constant in the | ||
| 186 | :mod:`ssl` module or its abbrevation. | ||
| 187 | (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. | ||
| 188 | If it's neither `None` nor a string we assume it is already the numeric | ||
| 189 | constant which can directly be passed to wrap_socket. | ||
| 190 | """ | ||
| 191 | if candidate is None: | ||
| 192 | return CERT_NONE | ||
| 193 | |||
| 194 | if isinstance(candidate, str): | ||
| 195 | res = getattr(ssl, candidate, None) | ||
| 196 | if res is None: | ||
| 197 | res = getattr(ssl, 'CERT_' + candidate) | ||
| 198 | return res | ||
| 199 | |||
| 200 | return candidate | ||
| 201 | |||
| 202 | |||
| 203 | def resolve_ssl_version(candidate): | ||
| 204 | """ | ||
| 205 | like resolve_cert_reqs | ||
| 206 | """ | ||
| 207 | if candidate is None: | ||
| 208 | return PROTOCOL_SSLv23 | ||
| 209 | |||
| 210 | if isinstance(candidate, str): | ||
| 211 | res = getattr(ssl, candidate, None) | ||
| 212 | if res is None: | ||
| 213 | res = getattr(ssl, 'PROTOCOL_' + candidate) | ||
| 214 | return res | ||
| 215 | |||
| 216 | return candidate | ||
| 217 | |||
| 218 | |||
| 219 | def create_urllib3_context(ssl_version=None, cert_reqs=None, | ||
| 220 | options=None, ciphers=None): | ||
| 221 | """All arguments have the same meaning as ``ssl_wrap_socket``. | ||
| 222 | |||
| 223 | By default, this function does a lot of the same work that | ||
| 224 | ``ssl.create_default_context`` does on Python 3.4+. It: | ||
| 225 | |||
| 226 | - Disables SSLv2, SSLv3, and compression | ||
| 227 | - Sets a restricted set of server ciphers | ||
| 228 | |||
| 229 | If you wish to enable SSLv3, you can do:: | ||
| 230 | |||
| 231 | from pip._vendor.urllib3.util import ssl_ | ||
| 232 | context = ssl_.create_urllib3_context() | ||
| 233 | context.options &= ~ssl_.OP_NO_SSLv3 | ||
| 234 | |||
| 235 | You can do the same to enable compression (substituting ``COMPRESSION`` | ||
| 236 | for ``SSLv3`` in the last line above). | ||
| 237 | |||
| 238 | :param ssl_version: | ||
| 239 | The desired protocol version to use. This will default to | ||
| 240 | PROTOCOL_SSLv23 which will negotiate the highest protocol that both | ||
| 241 | the server and your installation of OpenSSL support. | ||
| 242 | :param cert_reqs: | ||
| 243 | Whether to require the certificate verification. This defaults to | ||
| 244 | ``ssl.CERT_REQUIRED``. | ||
| 245 | :param options: | ||
| 246 | Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, | ||
| 247 | ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. | ||
| 248 | :param ciphers: | ||
| 249 | Which cipher suites to allow the server to select. | ||
| 250 | :returns: | ||
| 251 | Constructed SSLContext object with specified options | ||
| 252 | :rtype: SSLContext | ||
| 253 | """ | ||
| 254 | context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) | ||
| 255 | |||
| 256 | # Setting the default here, as we may have no ssl module on import | ||
| 257 | cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs | ||
| 258 | |||
| 259 | if options is None: | ||
| 260 | options = 0 | ||
| 261 | # SSLv2 is easily broken and is considered harmful and dangerous | ||
| 262 | options |= OP_NO_SSLv2 | ||
| 263 | # SSLv3 has several problems and is now dangerous | ||
| 264 | options |= OP_NO_SSLv3 | ||
| 265 | # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ | ||
| 266 | # (issue #309) | ||
| 267 | options |= OP_NO_COMPRESSION | ||
| 268 | |||
| 269 | context.options |= options | ||
| 270 | |||
| 271 | if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 | ||
| 272 | context.set_ciphers(ciphers or DEFAULT_CIPHERS) | ||
| 273 | |||
| 274 | context.verify_mode = cert_reqs | ||
| 275 | if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 | ||
| 276 | # We do our own verification, including fingerprints and alternative | ||
| 277 | # hostnames. So disable it here | ||
| 278 | context.check_hostname = False | ||
| 279 | return context | ||
| 280 | |||
| 281 | |||
| 282 | def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, | ||
| 283 | ca_certs=None, server_hostname=None, | ||
| 284 | ssl_version=None, ciphers=None, ssl_context=None, | ||
| 285 | ca_cert_dir=None): | ||
| 286 | """ | ||
| 287 | All arguments except for server_hostname, ssl_context, and ca_cert_dir have | ||
| 288 | the same meaning as they do when using :func:`ssl.wrap_socket`. | ||
| 289 | |||
| 290 | :param server_hostname: | ||
| 291 | When SNI is supported, the expected hostname of the certificate | ||
| 292 | :param ssl_context: | ||
| 293 | A pre-made :class:`SSLContext` object. If none is provided, one will | ||
| 294 | be created using :func:`create_urllib3_context`. | ||
| 295 | :param ciphers: | ||
| 296 | A string of ciphers we wish the client to support. This is not | ||
| 297 | supported on Python 2.6 as the ssl module does not support it. | ||
| 298 | :param ca_cert_dir: | ||
| 299 | A directory containing CA certificates in multiple separate files, as | ||
| 300 | supported by OpenSSL's -CApath flag or the capath argument to | ||
| 301 | SSLContext.load_verify_locations(). | ||
| 302 | """ | ||
| 303 | context = ssl_context | ||
| 304 | if context is None: | ||
| 305 | # Note: This branch of code and all the variables in it are no longer | ||
| 306 | # used by urllib3 itself. We should consider deprecating and removing | ||
| 307 | # this code. | ||
| 308 | context = create_urllib3_context(ssl_version, cert_reqs, | ||
| 309 | ciphers=ciphers) | ||
| 310 | |||
| 311 | if ca_certs or ca_cert_dir: | ||
| 312 | try: | ||
| 313 | context.load_verify_locations(ca_certs, ca_cert_dir) | ||
| 314 | except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 | ||
| 315 | raise SSLError(e) | ||
| 316 | # Py33 raises FileNotFoundError which subclasses OSError | ||
| 317 | # These are not equivalent unless we check the errno attribute | ||
| 318 | except OSError as e: # Platform-specific: Python 3.3 and beyond | ||
| 319 | if e.errno == errno.ENOENT: | ||
| 320 | raise SSLError(e) | ||
| 321 | raise | ||
| 322 | elif getattr(context, 'load_default_certs', None) is not None: | ||
| 323 | # try to load OS default certs; works well on Windows (require Python3.4+) | ||
| 324 | context.load_default_certs() | ||
| 325 | |||
| 326 | if certfile: | ||
| 327 | context.load_cert_chain(certfile, keyfile) | ||
| 328 | if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI | ||
| 329 | return context.wrap_socket(sock, server_hostname=server_hostname) | ||
| 330 | |||
| 331 | warnings.warn( | ||
| 332 | 'An HTTPS request has been made, but the SNI (Subject Name ' | ||
| 333 | 'Indication) extension to TLS is not available on this platform. ' | ||
| 334 | 'This may cause the server to present an incorrect TLS ' | ||
| 335 | 'certificate, which can cause validation failures. You can upgrade to ' | ||
| 336 | 'a newer version of Python to solve this. For more information, see ' | ||
| 337 | 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' | ||
| 338 | '#ssl-warnings', | ||
| 339 | SNIMissingWarning | ||
| 340 | ) | ||
| 341 | return context.wrap_socket(sock) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/timeout.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/timeout.py new file mode 100644 index 0000000..9c2e6ef --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/timeout.py | |||
| @@ -0,0 +1,242 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | # The default socket timeout, used by httplib to indicate that no timeout was | ||
| 3 | # specified by the user | ||
| 4 | from socket import _GLOBAL_DEFAULT_TIMEOUT | ||
| 5 | import time | ||
| 6 | |||
| 7 | from ..exceptions import TimeoutStateError | ||
| 8 | |||
| 9 | # A sentinel value to indicate that no timeout was specified by the user in | ||
| 10 | # urllib3 | ||
| 11 | _Default = object() | ||
| 12 | |||
| 13 | |||
| 14 | # Use time.monotonic if available. | ||
| 15 | current_time = getattr(time, "monotonic", time.time) | ||
| 16 | |||
| 17 | |||
| 18 | class Timeout(object): | ||
| 19 | """ Timeout configuration. | ||
| 20 | |||
| 21 | Timeouts can be defined as a default for a pool:: | ||
| 22 | |||
| 23 | timeout = Timeout(connect=2.0, read=7.0) | ||
| 24 | http = PoolManager(timeout=timeout) | ||
| 25 | response = http.request('GET', 'http://example.com/') | ||
| 26 | |||
| 27 | Or per-request (which overrides the default for the pool):: | ||
| 28 | |||
| 29 | response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) | ||
| 30 | |||
| 31 | Timeouts can be disabled by setting all the parameters to ``None``:: | ||
| 32 | |||
| 33 | no_timeout = Timeout(connect=None, read=None) | ||
| 34 | response = http.request('GET', 'http://example.com/, timeout=no_timeout) | ||
| 35 | |||
| 36 | |||
| 37 | :param total: | ||
| 38 | This combines the connect and read timeouts into one; the read timeout | ||
| 39 | will be set to the time leftover from the connect attempt. In the | ||
| 40 | event that both a connect timeout and a total are specified, or a read | ||
| 41 | timeout and a total are specified, the shorter timeout will be applied. | ||
| 42 | |||
| 43 | Defaults to None. | ||
| 44 | |||
| 45 | :type total: integer, float, or None | ||
| 46 | |||
| 47 | :param connect: | ||
| 48 | The maximum amount of time to wait for a connection attempt to a server | ||
| 49 | to succeed. Omitting the parameter will default the connect timeout to | ||
| 50 | the system default, probably `the global default timeout in socket.py | ||
| 51 | <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. | ||
| 52 | None will set an infinite timeout for connection attempts. | ||
| 53 | |||
| 54 | :type connect: integer, float, or None | ||
| 55 | |||
| 56 | :param read: | ||
| 57 | The maximum amount of time to wait between consecutive | ||
| 58 | read operations for a response from the server. Omitting | ||
| 59 | the parameter will default the read timeout to the system | ||
| 60 | default, probably `the global default timeout in socket.py | ||
| 61 | <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. | ||
| 62 | None will set an infinite timeout. | ||
| 63 | |||
| 64 | :type read: integer, float, or None | ||
| 65 | |||
| 66 | .. note:: | ||
| 67 | |||
| 68 | Many factors can affect the total amount of time for urllib3 to return | ||
| 69 | an HTTP response. | ||
| 70 | |||
| 71 | For example, Python's DNS resolver does not obey the timeout specified | ||
| 72 | on the socket. Other factors that can affect total request time include | ||
| 73 | high CPU load, high swap, the program running at a low priority level, | ||
| 74 | or other behaviors. | ||
| 75 | |||
| 76 | In addition, the read and total timeouts only measure the time between | ||
| 77 | read operations on the socket connecting the client and the server, | ||
| 78 | not the total amount of time for the request to return a complete | ||
| 79 | response. For most requests, the timeout is raised because the server | ||
| 80 | has not sent the first byte in the specified time. This is not always | ||
| 81 | the case; if a server streams one byte every fifteen seconds, a timeout | ||
| 82 | of 20 seconds will not trigger, even though the request will take | ||
| 83 | several minutes to complete. | ||
| 84 | |||
| 85 | If your goal is to cut off any request after a set amount of wall clock | ||
| 86 | time, consider having a second "watcher" thread to cut off a slow | ||
| 87 | request. | ||
| 88 | """ | ||
| 89 | |||
| 90 | #: A sentinel object representing the default timeout value | ||
| 91 | DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT | ||
| 92 | |||
| 93 | def __init__(self, total=None, connect=_Default, read=_Default): | ||
| 94 | self._connect = self._validate_timeout(connect, 'connect') | ||
| 95 | self._read = self._validate_timeout(read, 'read') | ||
| 96 | self.total = self._validate_timeout(total, 'total') | ||
| 97 | self._start_connect = None | ||
| 98 | |||
| 99 | def __str__(self): | ||
| 100 | return '%s(connect=%r, read=%r, total=%r)' % ( | ||
| 101 | type(self).__name__, self._connect, self._read, self.total) | ||
| 102 | |||
| 103 | @classmethod | ||
| 104 | def _validate_timeout(cls, value, name): | ||
| 105 | """ Check that a timeout attribute is valid. | ||
| 106 | |||
| 107 | :param value: The timeout value to validate | ||
| 108 | :param name: The name of the timeout attribute to validate. This is | ||
| 109 | used to specify in error messages. | ||
| 110 | :return: The validated and casted version of the given value. | ||
| 111 | :raises ValueError: If it is a numeric value less than or equal to | ||
| 112 | zero, or the type is not an integer, float, or None. | ||
| 113 | """ | ||
| 114 | if value is _Default: | ||
| 115 | return cls.DEFAULT_TIMEOUT | ||
| 116 | |||
| 117 | if value is None or value is cls.DEFAULT_TIMEOUT: | ||
| 118 | return value | ||
| 119 | |||
| 120 | if isinstance(value, bool): | ||
| 121 | raise ValueError("Timeout cannot be a boolean value. It must " | ||
| 122 | "be an int, float or None.") | ||
| 123 | try: | ||
| 124 | float(value) | ||
| 125 | except (TypeError, ValueError): | ||
| 126 | raise ValueError("Timeout value %s was %s, but it must be an " | ||
| 127 | "int, float or None." % (name, value)) | ||
| 128 | |||
| 129 | try: | ||
| 130 | if value <= 0: | ||
| 131 | raise ValueError("Attempted to set %s timeout to %s, but the " | ||
| 132 | "timeout cannot be set to a value less " | ||
| 133 | "than or equal to 0." % (name, value)) | ||
| 134 | except TypeError: # Python 3 | ||
| 135 | raise ValueError("Timeout value %s was %s, but it must be an " | ||
| 136 | "int, float or None." % (name, value)) | ||
| 137 | |||
| 138 | return value | ||
| 139 | |||
| 140 | @classmethod | ||
| 141 | def from_float(cls, timeout): | ||
| 142 | """ Create a new Timeout from a legacy timeout value. | ||
| 143 | |||
| 144 | The timeout value used by httplib.py sets the same timeout on the | ||
| 145 | connect(), and recv() socket requests. This creates a :class:`Timeout` | ||
| 146 | object that sets the individual timeouts to the ``timeout`` value | ||
| 147 | passed to this function. | ||
| 148 | |||
| 149 | :param timeout: The legacy timeout value. | ||
| 150 | :type timeout: integer, float, sentinel default object, or None | ||
| 151 | :return: Timeout object | ||
| 152 | :rtype: :class:`Timeout` | ||
| 153 | """ | ||
| 154 | return Timeout(read=timeout, connect=timeout) | ||
| 155 | |||
| 156 | def clone(self): | ||
| 157 | """ Create a copy of the timeout object | ||
| 158 | |||
| 159 | Timeout properties are stored per-pool but each request needs a fresh | ||
| 160 | Timeout object to ensure each one has its own start/stop configured. | ||
| 161 | |||
| 162 | :return: a copy of the timeout object | ||
| 163 | :rtype: :class:`Timeout` | ||
| 164 | """ | ||
| 165 | # We can't use copy.deepcopy because that will also create a new object | ||
| 166 | # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to | ||
| 167 | # detect the user default. | ||
| 168 | return Timeout(connect=self._connect, read=self._read, | ||
| 169 | total=self.total) | ||
| 170 | |||
| 171 | def start_connect(self): | ||
| 172 | """ Start the timeout clock, used during a connect() attempt | ||
| 173 | |||
| 174 | :raises urllib3.exceptions.TimeoutStateError: if you attempt | ||
| 175 | to start a timer that has been started already. | ||
| 176 | """ | ||
| 177 | if self._start_connect is not None: | ||
| 178 | raise TimeoutStateError("Timeout timer has already been started.") | ||
| 179 | self._start_connect = current_time() | ||
| 180 | return self._start_connect | ||
| 181 | |||
| 182 | def get_connect_duration(self): | ||
| 183 | """ Gets the time elapsed since the call to :meth:`start_connect`. | ||
| 184 | |||
| 185 | :return: Elapsed time. | ||
| 186 | :rtype: float | ||
| 187 | :raises urllib3.exceptions.TimeoutStateError: if you attempt | ||
| 188 | to get duration for a timer that hasn't been started. | ||
| 189 | """ | ||
| 190 | if self._start_connect is None: | ||
| 191 | raise TimeoutStateError("Can't get connect duration for timer " | ||
| 192 | "that has not started.") | ||
| 193 | return current_time() - self._start_connect | ||
| 194 | |||
| 195 | @property | ||
| 196 | def connect_timeout(self): | ||
| 197 | """ Get the value to use when setting a connection timeout. | ||
| 198 | |||
| 199 | This will be a positive float or integer, the value None | ||
| 200 | (never timeout), or the default system timeout. | ||
| 201 | |||
| 202 | :return: Connect timeout. | ||
| 203 | :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None | ||
| 204 | """ | ||
| 205 | if self.total is None: | ||
| 206 | return self._connect | ||
| 207 | |||
| 208 | if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: | ||
| 209 | return self.total | ||
| 210 | |||
| 211 | return min(self._connect, self.total) | ||
| 212 | |||
| 213 | @property | ||
| 214 | def read_timeout(self): | ||
| 215 | """ Get the value for the read timeout. | ||
| 216 | |||
| 217 | This assumes some time has elapsed in the connection timeout and | ||
| 218 | computes the read timeout appropriately. | ||
| 219 | |||
| 220 | If self.total is set, the read timeout is dependent on the amount of | ||
| 221 | time taken by the connect timeout. If the connection time has not been | ||
| 222 | established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be | ||
| 223 | raised. | ||
| 224 | |||
| 225 | :return: Value to use for the read timeout. | ||
| 226 | :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None | ||
| 227 | :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` | ||
| 228 | has not yet been called on this object. | ||
| 229 | """ | ||
| 230 | if (self.total is not None and | ||
| 231 | self.total is not self.DEFAULT_TIMEOUT and | ||
| 232 | self._read is not None and | ||
| 233 | self._read is not self.DEFAULT_TIMEOUT): | ||
| 234 | # In case the connect timeout has not yet been established. | ||
| 235 | if self._start_connect is None: | ||
| 236 | return self._read | ||
| 237 | return max(0, min(self.total - self.get_connect_duration(), | ||
| 238 | self._read)) | ||
| 239 | elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: | ||
| 240 | return max(0, self.total - self.get_connect_duration()) | ||
| 241 | else: | ||
| 242 | return self._read | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/url.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/url.py new file mode 100644 index 0000000..60f826a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/url.py | |||
| @@ -0,0 +1,230 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | from collections import namedtuple | ||
| 3 | |||
| 4 | from ..exceptions import LocationParseError | ||
| 5 | |||
| 6 | |||
| 7 | url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] | ||
| 8 | |||
| 9 | # We only want to normalize urls with an HTTP(S) scheme. | ||
| 10 | # urllib3 infers URLs without a scheme (None) to be http. | ||
| 11 | NORMALIZABLE_SCHEMES = ('http', 'https', None) | ||
| 12 | |||
| 13 | |||
| 14 | class Url(namedtuple('Url', url_attrs)): | ||
| 15 | """ | ||
| 16 | Datastructure for representing an HTTP URL. Used as a return value for | ||
| 17 | :func:`parse_url`. Both the scheme and host are normalized as they are | ||
| 18 | both case-insensitive according to RFC 3986. | ||
| 19 | """ | ||
| 20 | __slots__ = () | ||
| 21 | |||
| 22 | def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, | ||
| 23 | query=None, fragment=None): | ||
| 24 | if path and not path.startswith('/'): | ||
| 25 | path = '/' + path | ||
| 26 | if scheme: | ||
| 27 | scheme = scheme.lower() | ||
| 28 | if host and scheme in NORMALIZABLE_SCHEMES: | ||
| 29 | host = host.lower() | ||
| 30 | return super(Url, cls).__new__(cls, scheme, auth, host, port, path, | ||
| 31 | query, fragment) | ||
| 32 | |||
| 33 | @property | ||
| 34 | def hostname(self): | ||
| 35 | """For backwards-compatibility with urlparse. We're nice like that.""" | ||
| 36 | return self.host | ||
| 37 | |||
| 38 | @property | ||
| 39 | def request_uri(self): | ||
| 40 | """Absolute path including the query string.""" | ||
| 41 | uri = self.path or '/' | ||
| 42 | |||
| 43 | if self.query is not None: | ||
| 44 | uri += '?' + self.query | ||
| 45 | |||
| 46 | return uri | ||
| 47 | |||
| 48 | @property | ||
| 49 | def netloc(self): | ||
| 50 | """Network location including host and port""" | ||
| 51 | if self.port: | ||
| 52 | return '%s:%d' % (self.host, self.port) | ||
| 53 | return self.host | ||
| 54 | |||
| 55 | @property | ||
| 56 | def url(self): | ||
| 57 | """ | ||
| 58 | Convert self into a url | ||
| 59 | |||
| 60 | This function should more or less round-trip with :func:`.parse_url`. The | ||
| 61 | returned url may not be exactly the same as the url inputted to | ||
| 62 | :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls | ||
| 63 | with a blank port will have : removed). | ||
| 64 | |||
| 65 | Example: :: | ||
| 66 | |||
| 67 | >>> U = parse_url('http://google.com/mail/') | ||
| 68 | >>> U.url | ||
| 69 | 'http://google.com/mail/' | ||
| 70 | >>> Url('http', 'username:password', 'host.com', 80, | ||
| 71 | ... '/path', 'query', 'fragment').url | ||
| 72 | 'http://username:password@host.com:80/path?query#fragment' | ||
| 73 | """ | ||
| 74 | scheme, auth, host, port, path, query, fragment = self | ||
| 75 | url = '' | ||
| 76 | |||
| 77 | # We use "is not None" we want things to happen with empty strings (or 0 port) | ||
| 78 | if scheme is not None: | ||
| 79 | url += scheme + '://' | ||
| 80 | if auth is not None: | ||
| 81 | url += auth + '@' | ||
| 82 | if host is not None: | ||
| 83 | url += host | ||
| 84 | if port is not None: | ||
| 85 | url += ':' + str(port) | ||
| 86 | if path is not None: | ||
| 87 | url += path | ||
| 88 | if query is not None: | ||
| 89 | url += '?' + query | ||
| 90 | if fragment is not None: | ||
| 91 | url += '#' + fragment | ||
| 92 | |||
| 93 | return url | ||
| 94 | |||
| 95 | def __str__(self): | ||
| 96 | return self.url | ||
| 97 | |||
| 98 | |||
| 99 | def split_first(s, delims): | ||
| 100 | """ | ||
| 101 | Given a string and an iterable of delimiters, split on the first found | ||
| 102 | delimiter. Return two split parts and the matched delimiter. | ||
| 103 | |||
| 104 | If not found, then the first part is the full input string. | ||
| 105 | |||
| 106 | Example:: | ||
| 107 | |||
| 108 | >>> split_first('foo/bar?baz', '?/=') | ||
| 109 | ('foo', 'bar?baz', '/') | ||
| 110 | >>> split_first('foo/bar?baz', '123') | ||
| 111 | ('foo/bar?baz', '', None) | ||
| 112 | |||
| 113 | Scales linearly with number of delims. Not ideal for large number of delims. | ||
| 114 | """ | ||
| 115 | min_idx = None | ||
| 116 | min_delim = None | ||
| 117 | for d in delims: | ||
| 118 | idx = s.find(d) | ||
| 119 | if idx < 0: | ||
| 120 | continue | ||
| 121 | |||
| 122 | if min_idx is None or idx < min_idx: | ||
| 123 | min_idx = idx | ||
| 124 | min_delim = d | ||
| 125 | |||
| 126 | if min_idx is None or min_idx < 0: | ||
| 127 | return s, '', None | ||
| 128 | |||
| 129 | return s[:min_idx], s[min_idx + 1:], min_delim | ||
| 130 | |||
| 131 | |||
| 132 | def parse_url(url): | ||
| 133 | """ | ||
| 134 | Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is | ||
| 135 | performed to parse incomplete urls. Fields not provided will be None. | ||
| 136 | |||
| 137 | Partly backwards-compatible with :mod:`urlparse`. | ||
| 138 | |||
| 139 | Example:: | ||
| 140 | |||
| 141 | >>> parse_url('http://google.com/mail/') | ||
| 142 | Url(scheme='http', host='google.com', port=None, path='/mail/', ...) | ||
| 143 | >>> parse_url('google.com:80') | ||
| 144 | Url(scheme=None, host='google.com', port=80, path=None, ...) | ||
| 145 | >>> parse_url('/foo?bar') | ||
| 146 | Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) | ||
| 147 | """ | ||
| 148 | |||
| 149 | # While this code has overlap with stdlib's urlparse, it is much | ||
| 150 | # simplified for our needs and less annoying. | ||
| 151 | # Additionally, this implementations does silly things to be optimal | ||
| 152 | # on CPython. | ||
| 153 | |||
| 154 | if not url: | ||
| 155 | # Empty | ||
| 156 | return Url() | ||
| 157 | |||
| 158 | scheme = None | ||
| 159 | auth = None | ||
| 160 | host = None | ||
| 161 | port = None | ||
| 162 | path = None | ||
| 163 | fragment = None | ||
| 164 | query = None | ||
| 165 | |||
| 166 | # Scheme | ||
| 167 | if '://' in url: | ||
| 168 | scheme, url = url.split('://', 1) | ||
| 169 | |||
| 170 | # Find the earliest Authority Terminator | ||
| 171 | # (http://tools.ietf.org/html/rfc3986#section-3.2) | ||
| 172 | url, path_, delim = split_first(url, ['/', '?', '#']) | ||
| 173 | |||
| 174 | if delim: | ||
| 175 | # Reassemble the path | ||
| 176 | path = delim + path_ | ||
| 177 | |||
| 178 | # Auth | ||
| 179 | if '@' in url: | ||
| 180 | # Last '@' denotes end of auth part | ||
| 181 | auth, url = url.rsplit('@', 1) | ||
| 182 | |||
| 183 | # IPv6 | ||
| 184 | if url and url[0] == '[': | ||
| 185 | host, url = url.split(']', 1) | ||
| 186 | host += ']' | ||
| 187 | |||
| 188 | # Port | ||
| 189 | if ':' in url: | ||
| 190 | _host, port = url.split(':', 1) | ||
| 191 | |||
| 192 | if not host: | ||
| 193 | host = _host | ||
| 194 | |||
| 195 | if port: | ||
| 196 | # If given, ports must be integers. No whitespace, no plus or | ||
| 197 | # minus prefixes, no non-integer digits such as ^2 (superscript). | ||
| 198 | if not port.isdigit(): | ||
| 199 | raise LocationParseError(url) | ||
| 200 | try: | ||
| 201 | port = int(port) | ||
| 202 | except ValueError: | ||
| 203 | raise LocationParseError(url) | ||
| 204 | else: | ||
| 205 | # Blank ports are cool, too. (rfc3986#section-3.2.3) | ||
| 206 | port = None | ||
| 207 | |||
| 208 | elif not host and url: | ||
| 209 | host = url | ||
| 210 | |||
| 211 | if not path: | ||
| 212 | return Url(scheme, auth, host, port, path, query, fragment) | ||
| 213 | |||
| 214 | # Fragment | ||
| 215 | if '#' in path: | ||
| 216 | path, fragment = path.split('#', 1) | ||
| 217 | |||
| 218 | # Query | ||
| 219 | if '?' in path: | ||
| 220 | path, query = path.split('?', 1) | ||
| 221 | |||
| 222 | return Url(scheme, auth, host, port, path, query, fragment) | ||
| 223 | |||
| 224 | |||
| 225 | def get_host(url): | ||
| 226 | """ | ||
| 227 | Deprecated. Use :func:`parse_url` instead. | ||
| 228 | """ | ||
| 229 | p = parse_url(url) | ||
| 230 | return p.scheme or 'http', p.hostname, p.port | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/wait.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/wait.py new file mode 100644 index 0000000..46392f2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/util/wait.py | |||
| @@ -0,0 +1,40 @@ | |||
| 1 | from .selectors import ( | ||
| 2 | HAS_SELECT, | ||
| 3 | DefaultSelector, | ||
| 4 | EVENT_READ, | ||
| 5 | EVENT_WRITE | ||
| 6 | ) | ||
| 7 | |||
| 8 | |||
| 9 | def _wait_for_io_events(socks, events, timeout=None): | ||
| 10 | """ Waits for IO events to be available from a list of sockets | ||
| 11 | or optionally a single socket if passed in. Returns a list of | ||
| 12 | sockets that can be interacted with immediately. """ | ||
| 13 | if not HAS_SELECT: | ||
| 14 | raise ValueError('Platform does not have a selector') | ||
| 15 | if not isinstance(socks, list): | ||
| 16 | # Probably just a single socket. | ||
| 17 | if hasattr(socks, "fileno"): | ||
| 18 | socks = [socks] | ||
| 19 | # Otherwise it might be a non-list iterable. | ||
| 20 | else: | ||
| 21 | socks = list(socks) | ||
| 22 | with DefaultSelector() as selector: | ||
| 23 | for sock in socks: | ||
| 24 | selector.register(sock, events) | ||
| 25 | return [key[0].fileobj for key in | ||
| 26 | selector.select(timeout) if key[1] & events] | ||
| 27 | |||
| 28 | |||
| 29 | def wait_for_read(socks, timeout=None): | ||
| 30 | """ Waits for reading to be available from a list of sockets | ||
| 31 | or optionally a single socket if passed in. Returns a list of | ||
| 32 | sockets that can be read from immediately. """ | ||
| 33 | return _wait_for_io_events(socks, EVENT_READ, timeout) | ||
| 34 | |||
| 35 | |||
| 36 | def wait_for_write(socks, timeout=None): | ||
| 37 | """ Waits for writing to be available from a list of sockets | ||
| 38 | or optionally a single socket if passed in. Returns a list of | ||
| 39 | sockets that can be written to immediately. """ | ||
| 40 | return _wait_for_io_events(socks, EVENT_WRITE, timeout) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/__init__.py new file mode 100644 index 0000000..16671ef --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/__init__.py | |||
| @@ -0,0 +1,342 @@ | |||
| 1 | # coding: utf-8 | ||
| 2 | """ | ||
| 3 | |||
| 4 | webencodings | ||
| 5 | ~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | This is a Python implementation of the `WHATWG Encoding standard | ||
| 8 | <http://encoding.spec.whatwg.org/>`. See README for details. | ||
| 9 | |||
| 10 | :copyright: Copyright 2012 by Simon Sapin | ||
| 11 | :license: BSD, see LICENSE for details. | ||
| 12 | |||
| 13 | """ | ||
| 14 | |||
| 15 | from __future__ import unicode_literals | ||
| 16 | |||
| 17 | import codecs | ||
| 18 | |||
| 19 | from .labels import LABELS | ||
| 20 | |||
| 21 | |||
| 22 | VERSION = '0.5.1' | ||
| 23 | |||
| 24 | |||
| 25 | # Some names in Encoding are not valid Python aliases. Remap these. | ||
| 26 | PYTHON_NAMES = { | ||
| 27 | 'iso-8859-8-i': 'iso-8859-8', | ||
| 28 | 'x-mac-cyrillic': 'mac-cyrillic', | ||
| 29 | 'macintosh': 'mac-roman', | ||
| 30 | 'windows-874': 'cp874'} | ||
| 31 | |||
| 32 | CACHE = {} | ||
| 33 | |||
| 34 | |||
| 35 | def ascii_lower(string): | ||
| 36 | r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. | ||
| 37 | |||
| 38 | :param string: An Unicode string. | ||
| 39 | :returns: A new Unicode string. | ||
| 40 | |||
| 41 | This is used for `ASCII case-insensitive | ||
| 42 | <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_ | ||
| 43 | matching of encoding labels. | ||
| 44 | The same matching is also used, among other things, | ||
| 45 | for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_. | ||
| 46 | |||
| 47 | This is different from the :meth:`~py:str.lower` method of Unicode strings | ||
| 48 | which also affect non-ASCII characters, | ||
| 49 | sometimes mapping them into the ASCII range: | ||
| 50 | |||
| 51 | >>> keyword = u'Bac\N{KELVIN SIGN}ground' | ||
| 52 | >>> assert keyword.lower() == u'background' | ||
| 53 | >>> assert ascii_lower(keyword) != keyword.lower() | ||
| 54 | >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground' | ||
| 55 | |||
| 56 | """ | ||
| 57 | # This turns out to be faster than unicode.translate() | ||
| 58 | return string.encode('utf8').lower().decode('utf8') | ||
| 59 | |||
| 60 | |||
| 61 | def lookup(label): | ||
| 62 | """ | ||
| 63 | Look for an encoding by its label. | ||
| 64 | This is the spec’s `get an encoding | ||
| 65 | <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm. | ||
| 66 | Supported labels are listed there. | ||
| 67 | |||
| 68 | :param label: A string. | ||
| 69 | :returns: | ||
| 70 | An :class:`Encoding` object, or :obj:`None` for an unknown label. | ||
| 71 | |||
| 72 | """ | ||
| 73 | # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020. | ||
| 74 | label = ascii_lower(label.strip('\t\n\f\r ')) | ||
| 75 | name = LABELS.get(label) | ||
| 76 | if name is None: | ||
| 77 | return None | ||
| 78 | encoding = CACHE.get(name) | ||
| 79 | if encoding is None: | ||
| 80 | if name == 'x-user-defined': | ||
| 81 | from .x_user_defined import codec_info | ||
| 82 | else: | ||
| 83 | python_name = PYTHON_NAMES.get(name, name) | ||
| 84 | # Any python_name value that gets to here should be valid. | ||
| 85 | codec_info = codecs.lookup(python_name) | ||
| 86 | encoding = Encoding(name, codec_info) | ||
| 87 | CACHE[name] = encoding | ||
| 88 | return encoding | ||
| 89 | |||
| 90 | |||
| 91 | def _get_encoding(encoding_or_label): | ||
| 92 | """ | ||
| 93 | Accept either an encoding object or label. | ||
| 94 | |||
| 95 | :param encoding: An :class:`Encoding` object or a label string. | ||
| 96 | :returns: An :class:`Encoding` object. | ||
| 97 | :raises: :exc:`~exceptions.LookupError` for an unknown label. | ||
| 98 | |||
| 99 | """ | ||
| 100 | if hasattr(encoding_or_label, 'codec_info'): | ||
| 101 | return encoding_or_label | ||
| 102 | |||
| 103 | encoding = lookup(encoding_or_label) | ||
| 104 | if encoding is None: | ||
| 105 | raise LookupError('Unknown encoding label: %r' % encoding_or_label) | ||
| 106 | return encoding | ||
| 107 | |||
| 108 | |||
| 109 | class Encoding(object): | ||
| 110 | """Reresents a character encoding such as UTF-8, | ||
| 111 | that can be used for decoding or encoding. | ||
| 112 | |||
| 113 | .. attribute:: name | ||
| 114 | |||
| 115 | Canonical name of the encoding | ||
| 116 | |||
| 117 | .. attribute:: codec_info | ||
| 118 | |||
| 119 | The actual implementation of the encoding, | ||
| 120 | a stdlib :class:`~codecs.CodecInfo` object. | ||
| 121 | See :func:`codecs.register`. | ||
| 122 | |||
| 123 | """ | ||
| 124 | def __init__(self, name, codec_info): | ||
| 125 | self.name = name | ||
| 126 | self.codec_info = codec_info | ||
| 127 | |||
| 128 | def __repr__(self): | ||
| 129 | return '<Encoding %s>' % self.name | ||
| 130 | |||
| 131 | |||
| 132 | #: The UTF-8 encoding. Should be used for new content and formats. | ||
| 133 | UTF8 = lookup('utf-8') | ||
| 134 | |||
| 135 | _UTF16LE = lookup('utf-16le') | ||
| 136 | _UTF16BE = lookup('utf-16be') | ||
| 137 | |||
| 138 | |||
| 139 | def decode(input, fallback_encoding, errors='replace'): | ||
| 140 | """ | ||
| 141 | Decode a single string. | ||
| 142 | |||
| 143 | :param input: A byte string | ||
| 144 | :param fallback_encoding: | ||
| 145 | An :class:`Encoding` object or a label string. | ||
| 146 | The encoding to use if :obj:`input` does note have a BOM. | ||
| 147 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 148 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 149 | :return: | ||
| 150 | A ``(output, encoding)`` tuple of an Unicode string | ||
| 151 | and an :obj:`Encoding`. | ||
| 152 | |||
| 153 | """ | ||
| 154 | # Fail early if `encoding` is an invalid label. | ||
| 155 | fallback_encoding = _get_encoding(fallback_encoding) | ||
| 156 | bom_encoding, input = _detect_bom(input) | ||
| 157 | encoding = bom_encoding or fallback_encoding | ||
| 158 | return encoding.codec_info.decode(input, errors)[0], encoding | ||
| 159 | |||
| 160 | |||
| 161 | def _detect_bom(input): | ||
| 162 | """Return (bom_encoding, input), with any BOM removed from the input.""" | ||
| 163 | if input.startswith(b'\xFF\xFE'): | ||
| 164 | return _UTF16LE, input[2:] | ||
| 165 | if input.startswith(b'\xFE\xFF'): | ||
| 166 | return _UTF16BE, input[2:] | ||
| 167 | if input.startswith(b'\xEF\xBB\xBF'): | ||
| 168 | return UTF8, input[3:] | ||
| 169 | return None, input | ||
| 170 | |||
| 171 | |||
| 172 | def encode(input, encoding=UTF8, errors='strict'): | ||
| 173 | """ | ||
| 174 | Encode a single string. | ||
| 175 | |||
| 176 | :param input: An Unicode string. | ||
| 177 | :param encoding: An :class:`Encoding` object or a label string. | ||
| 178 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 179 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 180 | :return: A byte string. | ||
| 181 | |||
| 182 | """ | ||
| 183 | return _get_encoding(encoding).codec_info.encode(input, errors)[0] | ||
| 184 | |||
| 185 | |||
| 186 | def iter_decode(input, fallback_encoding, errors='replace'): | ||
| 187 | """ | ||
| 188 | "Pull"-based decoder. | ||
| 189 | |||
| 190 | :param input: | ||
| 191 | An iterable of byte strings. | ||
| 192 | |||
| 193 | The input is first consumed just enough to determine the encoding | ||
| 194 | based on the precense of a BOM, | ||
| 195 | then consumed on demand when the return value is. | ||
| 196 | :param fallback_encoding: | ||
| 197 | An :class:`Encoding` object or a label string. | ||
| 198 | The encoding to use if :obj:`input` does note have a BOM. | ||
| 199 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 200 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 201 | :returns: | ||
| 202 | An ``(output, encoding)`` tuple. | ||
| 203 | :obj:`output` is an iterable of Unicode strings, | ||
| 204 | :obj:`encoding` is the :obj:`Encoding` that is being used. | ||
| 205 | |||
| 206 | """ | ||
| 207 | |||
| 208 | decoder = IncrementalDecoder(fallback_encoding, errors) | ||
| 209 | generator = _iter_decode_generator(input, decoder) | ||
| 210 | encoding = next(generator) | ||
| 211 | return generator, encoding | ||
| 212 | |||
| 213 | |||
| 214 | def _iter_decode_generator(input, decoder): | ||
| 215 | """Return a generator that first yields the :obj:`Encoding`, | ||
| 216 | then yields output chukns as Unicode strings. | ||
| 217 | |||
| 218 | """ | ||
| 219 | decode = decoder.decode | ||
| 220 | input = iter(input) | ||
| 221 | for chunck in input: | ||
| 222 | output = decode(chunck) | ||
| 223 | if output: | ||
| 224 | assert decoder.encoding is not None | ||
| 225 | yield decoder.encoding | ||
| 226 | yield output | ||
| 227 | break | ||
| 228 | else: | ||
| 229 | # Input exhausted without determining the encoding | ||
| 230 | output = decode(b'', final=True) | ||
| 231 | assert decoder.encoding is not None | ||
| 232 | yield decoder.encoding | ||
| 233 | if output: | ||
| 234 | yield output | ||
| 235 | return | ||
| 236 | |||
| 237 | for chunck in input: | ||
| 238 | output = decode(chunck) | ||
| 239 | if output: | ||
| 240 | yield output | ||
| 241 | output = decode(b'', final=True) | ||
| 242 | if output: | ||
| 243 | yield output | ||
| 244 | |||
| 245 | |||
| 246 | def iter_encode(input, encoding=UTF8, errors='strict'): | ||
| 247 | """ | ||
| 248 | “Pull”-based encoder. | ||
| 249 | |||
| 250 | :param input: An iterable of Unicode strings. | ||
| 251 | :param encoding: An :class:`Encoding` object or a label string. | ||
| 252 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 253 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 254 | :returns: An iterable of byte strings. | ||
| 255 | |||
| 256 | """ | ||
| 257 | # Fail early if `encoding` is an invalid label. | ||
| 258 | encode = IncrementalEncoder(encoding, errors).encode | ||
| 259 | return _iter_encode_generator(input, encode) | ||
| 260 | |||
| 261 | |||
| 262 | def _iter_encode_generator(input, encode): | ||
| 263 | for chunck in input: | ||
| 264 | output = encode(chunck) | ||
| 265 | if output: | ||
| 266 | yield output | ||
| 267 | output = encode('', final=True) | ||
| 268 | if output: | ||
| 269 | yield output | ||
| 270 | |||
| 271 | |||
| 272 | class IncrementalDecoder(object): | ||
| 273 | """ | ||
| 274 | “Push”-based decoder. | ||
| 275 | |||
| 276 | :param fallback_encoding: | ||
| 277 | An :class:`Encoding` object or a label string. | ||
| 278 | The encoding to use if :obj:`input` does note have a BOM. | ||
| 279 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 280 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 281 | |||
| 282 | """ | ||
| 283 | def __init__(self, fallback_encoding, errors='replace'): | ||
| 284 | # Fail early if `encoding` is an invalid label. | ||
| 285 | self._fallback_encoding = _get_encoding(fallback_encoding) | ||
| 286 | self._errors = errors | ||
| 287 | self._buffer = b'' | ||
| 288 | self._decoder = None | ||
| 289 | #: The actual :class:`Encoding` that is being used, | ||
| 290 | #: or :obj:`None` if that is not determined yet. | ||
| 291 | #: (Ie. if there is not enough input yet to determine | ||
| 292 | #: if there is a BOM.) | ||
| 293 | self.encoding = None # Not known yet. | ||
| 294 | |||
| 295 | def decode(self, input, final=False): | ||
| 296 | """Decode one chunk of the input. | ||
| 297 | |||
| 298 | :param input: A byte string. | ||
| 299 | :param final: | ||
| 300 | Indicate that no more input is available. | ||
| 301 | Must be :obj:`True` if this is the last call. | ||
| 302 | :returns: An Unicode string. | ||
| 303 | |||
| 304 | """ | ||
| 305 | decoder = self._decoder | ||
| 306 | if decoder is not None: | ||
| 307 | return decoder(input, final) | ||
| 308 | |||
| 309 | input = self._buffer + input | ||
| 310 | encoding, input = _detect_bom(input) | ||
| 311 | if encoding is None: | ||
| 312 | if len(input) < 3 and not final: # Not enough data yet. | ||
| 313 | self._buffer = input | ||
| 314 | return '' | ||
| 315 | else: # No BOM | ||
| 316 | encoding = self._fallback_encoding | ||
| 317 | decoder = encoding.codec_info.incrementaldecoder(self._errors).decode | ||
| 318 | self._decoder = decoder | ||
| 319 | self.encoding = encoding | ||
| 320 | return decoder(input, final) | ||
| 321 | |||
| 322 | |||
| 323 | class IncrementalEncoder(object): | ||
| 324 | """ | ||
| 325 | “Push”-based encoder. | ||
| 326 | |||
| 327 | :param encoding: An :class:`Encoding` object or a label string. | ||
| 328 | :param errors: Type of error handling. See :func:`codecs.register`. | ||
| 329 | :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. | ||
| 330 | |||
| 331 | .. method:: encode(input, final=False) | ||
| 332 | |||
| 333 | :param input: An Unicode string. | ||
| 334 | :param final: | ||
| 335 | Indicate that no more input is available. | ||
| 336 | Must be :obj:`True` if this is the last call. | ||
| 337 | :returns: A byte string. | ||
| 338 | |||
| 339 | """ | ||
| 340 | def __init__(self, encoding=UTF8, errors='strict'): | ||
| 341 | encoding = _get_encoding(encoding) | ||
| 342 | self.encode = encoding.codec_info.incrementalencoder(errors).encode | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/labels.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/labels.py new file mode 100644 index 0000000..9dae102 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/labels.py | |||
| @@ -0,0 +1,231 @@ | |||
| 1 | """ | ||
| 2 | |||
| 3 | webencodings.labels | ||
| 4 | ~~~~~~~~~~~~~~~~~~~ | ||
| 5 | |||
| 6 | Map encoding labels to their name. | ||
| 7 | |||
| 8 | :copyright: Copyright 2012 by Simon Sapin | ||
| 9 | :license: BSD, see LICENSE for details. | ||
| 10 | |||
| 11 | """ | ||
| 12 | |||
| 13 | # XXX Do not edit! | ||
| 14 | # This file is automatically generated by mklabels.py | ||
| 15 | |||
| 16 | LABELS = { | ||
| 17 | 'unicode-1-1-utf-8': 'utf-8', | ||
| 18 | 'utf-8': 'utf-8', | ||
| 19 | 'utf8': 'utf-8', | ||
| 20 | '866': 'ibm866', | ||
| 21 | 'cp866': 'ibm866', | ||
| 22 | 'csibm866': 'ibm866', | ||
| 23 | 'ibm866': 'ibm866', | ||
| 24 | 'csisolatin2': 'iso-8859-2', | ||
| 25 | 'iso-8859-2': 'iso-8859-2', | ||
| 26 | 'iso-ir-101': 'iso-8859-2', | ||
| 27 | 'iso8859-2': 'iso-8859-2', | ||
| 28 | 'iso88592': 'iso-8859-2', | ||
| 29 | 'iso_8859-2': 'iso-8859-2', | ||
| 30 | 'iso_8859-2:1987': 'iso-8859-2', | ||
| 31 | 'l2': 'iso-8859-2', | ||
| 32 | 'latin2': 'iso-8859-2', | ||
| 33 | 'csisolatin3': 'iso-8859-3', | ||
| 34 | 'iso-8859-3': 'iso-8859-3', | ||
| 35 | 'iso-ir-109': 'iso-8859-3', | ||
| 36 | 'iso8859-3': 'iso-8859-3', | ||
| 37 | 'iso88593': 'iso-8859-3', | ||
| 38 | 'iso_8859-3': 'iso-8859-3', | ||
| 39 | 'iso_8859-3:1988': 'iso-8859-3', | ||
| 40 | 'l3': 'iso-8859-3', | ||
| 41 | 'latin3': 'iso-8859-3', | ||
| 42 | 'csisolatin4': 'iso-8859-4', | ||
| 43 | 'iso-8859-4': 'iso-8859-4', | ||
| 44 | 'iso-ir-110': 'iso-8859-4', | ||
| 45 | 'iso8859-4': 'iso-8859-4', | ||
| 46 | 'iso88594': 'iso-8859-4', | ||
| 47 | 'iso_8859-4': 'iso-8859-4', | ||
| 48 | 'iso_8859-4:1988': 'iso-8859-4', | ||
| 49 | 'l4': 'iso-8859-4', | ||
| 50 | 'latin4': 'iso-8859-4', | ||
| 51 | 'csisolatincyrillic': 'iso-8859-5', | ||
| 52 | 'cyrillic': 'iso-8859-5', | ||
| 53 | 'iso-8859-5': 'iso-8859-5', | ||
| 54 | 'iso-ir-144': 'iso-8859-5', | ||
| 55 | 'iso8859-5': 'iso-8859-5', | ||
| 56 | 'iso88595': 'iso-8859-5', | ||
| 57 | 'iso_8859-5': 'iso-8859-5', | ||
| 58 | 'iso_8859-5:1988': 'iso-8859-5', | ||
| 59 | 'arabic': 'iso-8859-6', | ||
| 60 | 'asmo-708': 'iso-8859-6', | ||
| 61 | 'csiso88596e': 'iso-8859-6', | ||
| 62 | 'csiso88596i': 'iso-8859-6', | ||
| 63 | 'csisolatinarabic': 'iso-8859-6', | ||
| 64 | 'ecma-114': 'iso-8859-6', | ||
| 65 | 'iso-8859-6': 'iso-8859-6', | ||
| 66 | 'iso-8859-6-e': 'iso-8859-6', | ||
| 67 | 'iso-8859-6-i': 'iso-8859-6', | ||
| 68 | 'iso-ir-127': 'iso-8859-6', | ||
| 69 | 'iso8859-6': 'iso-8859-6', | ||
| 70 | 'iso88596': 'iso-8859-6', | ||
| 71 | 'iso_8859-6': 'iso-8859-6', | ||
| 72 | 'iso_8859-6:1987': 'iso-8859-6', | ||
| 73 | 'csisolatingreek': 'iso-8859-7', | ||
| 74 | 'ecma-118': 'iso-8859-7', | ||
| 75 | 'elot_928': 'iso-8859-7', | ||
| 76 | 'greek': 'iso-8859-7', | ||
| 77 | 'greek8': 'iso-8859-7', | ||
| 78 | 'iso-8859-7': 'iso-8859-7', | ||
| 79 | 'iso-ir-126': 'iso-8859-7', | ||
| 80 | 'iso8859-7': 'iso-8859-7', | ||
| 81 | 'iso88597': 'iso-8859-7', | ||
| 82 | 'iso_8859-7': 'iso-8859-7', | ||
| 83 | 'iso_8859-7:1987': 'iso-8859-7', | ||
| 84 | 'sun_eu_greek': 'iso-8859-7', | ||
| 85 | 'csiso88598e': 'iso-8859-8', | ||
| 86 | 'csisolatinhebrew': 'iso-8859-8', | ||
| 87 | 'hebrew': 'iso-8859-8', | ||
| 88 | 'iso-8859-8': 'iso-8859-8', | ||
| 89 | 'iso-8859-8-e': 'iso-8859-8', | ||
| 90 | 'iso-ir-138': 'iso-8859-8', | ||
| 91 | 'iso8859-8': 'iso-8859-8', | ||
| 92 | 'iso88598': 'iso-8859-8', | ||
| 93 | 'iso_8859-8': 'iso-8859-8', | ||
| 94 | 'iso_8859-8:1988': 'iso-8859-8', | ||
| 95 | 'visual': 'iso-8859-8', | ||
| 96 | 'csiso88598i': 'iso-8859-8-i', | ||
| 97 | 'iso-8859-8-i': 'iso-8859-8-i', | ||
| 98 | 'logical': 'iso-8859-8-i', | ||
| 99 | 'csisolatin6': 'iso-8859-10', | ||
| 100 | 'iso-8859-10': 'iso-8859-10', | ||
| 101 | 'iso-ir-157': 'iso-8859-10', | ||
| 102 | 'iso8859-10': 'iso-8859-10', | ||
| 103 | 'iso885910': 'iso-8859-10', | ||
| 104 | 'l6': 'iso-8859-10', | ||
| 105 | 'latin6': 'iso-8859-10', | ||
| 106 | 'iso-8859-13': 'iso-8859-13', | ||
| 107 | 'iso8859-13': 'iso-8859-13', | ||
| 108 | 'iso885913': 'iso-8859-13', | ||
| 109 | 'iso-8859-14': 'iso-8859-14', | ||
| 110 | 'iso8859-14': 'iso-8859-14', | ||
| 111 | 'iso885914': 'iso-8859-14', | ||
| 112 | 'csisolatin9': 'iso-8859-15', | ||
| 113 | 'iso-8859-15': 'iso-8859-15', | ||
| 114 | 'iso8859-15': 'iso-8859-15', | ||
| 115 | 'iso885915': 'iso-8859-15', | ||
| 116 | 'iso_8859-15': 'iso-8859-15', | ||
| 117 | 'l9': 'iso-8859-15', | ||
| 118 | 'iso-8859-16': 'iso-8859-16', | ||
| 119 | 'cskoi8r': 'koi8-r', | ||
| 120 | 'koi': 'koi8-r', | ||
| 121 | 'koi8': 'koi8-r', | ||
| 122 | 'koi8-r': 'koi8-r', | ||
| 123 | 'koi8_r': 'koi8-r', | ||
| 124 | 'koi8-u': 'koi8-u', | ||
| 125 | 'csmacintosh': 'macintosh', | ||
| 126 | 'mac': 'macintosh', | ||
| 127 | 'macintosh': 'macintosh', | ||
| 128 | 'x-mac-roman': 'macintosh', | ||
| 129 | 'dos-874': 'windows-874', | ||
| 130 | 'iso-8859-11': 'windows-874', | ||
| 131 | 'iso8859-11': 'windows-874', | ||
| 132 | 'iso885911': 'windows-874', | ||
| 133 | 'tis-620': 'windows-874', | ||
| 134 | 'windows-874': 'windows-874', | ||
| 135 | 'cp1250': 'windows-1250', | ||
| 136 | 'windows-1250': 'windows-1250', | ||
| 137 | 'x-cp1250': 'windows-1250', | ||
| 138 | 'cp1251': 'windows-1251', | ||
| 139 | 'windows-1251': 'windows-1251', | ||
| 140 | 'x-cp1251': 'windows-1251', | ||
| 141 | 'ansi_x3.4-1968': 'windows-1252', | ||
| 142 | 'ascii': 'windows-1252', | ||
| 143 | 'cp1252': 'windows-1252', | ||
| 144 | 'cp819': 'windows-1252', | ||
| 145 | 'csisolatin1': 'windows-1252', | ||
| 146 | 'ibm819': 'windows-1252', | ||
| 147 | 'iso-8859-1': 'windows-1252', | ||
| 148 | 'iso-ir-100': 'windows-1252', | ||
| 149 | 'iso8859-1': 'windows-1252', | ||
| 150 | 'iso88591': 'windows-1252', | ||
| 151 | 'iso_8859-1': 'windows-1252', | ||
| 152 | 'iso_8859-1:1987': 'windows-1252', | ||
| 153 | 'l1': 'windows-1252', | ||
| 154 | 'latin1': 'windows-1252', | ||
| 155 | 'us-ascii': 'windows-1252', | ||
| 156 | 'windows-1252': 'windows-1252', | ||
| 157 | 'x-cp1252': 'windows-1252', | ||
| 158 | 'cp1253': 'windows-1253', | ||
| 159 | 'windows-1253': 'windows-1253', | ||
| 160 | 'x-cp1253': 'windows-1253', | ||
| 161 | 'cp1254': 'windows-1254', | ||
| 162 | 'csisolatin5': 'windows-1254', | ||
| 163 | 'iso-8859-9': 'windows-1254', | ||
| 164 | 'iso-ir-148': 'windows-1254', | ||
| 165 | 'iso8859-9': 'windows-1254', | ||
| 166 | 'iso88599': 'windows-1254', | ||
| 167 | 'iso_8859-9': 'windows-1254', | ||
| 168 | 'iso_8859-9:1989': 'windows-1254', | ||
| 169 | 'l5': 'windows-1254', | ||
| 170 | 'latin5': 'windows-1254', | ||
| 171 | 'windows-1254': 'windows-1254', | ||
| 172 | 'x-cp1254': 'windows-1254', | ||
| 173 | 'cp1255': 'windows-1255', | ||
| 174 | 'windows-1255': 'windows-1255', | ||
| 175 | 'x-cp1255': 'windows-1255', | ||
| 176 | 'cp1256': 'windows-1256', | ||
| 177 | 'windows-1256': 'windows-1256', | ||
| 178 | 'x-cp1256': 'windows-1256', | ||
| 179 | 'cp1257': 'windows-1257', | ||
| 180 | 'windows-1257': 'windows-1257', | ||
| 181 | 'x-cp1257': 'windows-1257', | ||
| 182 | 'cp1258': 'windows-1258', | ||
| 183 | 'windows-1258': 'windows-1258', | ||
| 184 | 'x-cp1258': 'windows-1258', | ||
| 185 | 'x-mac-cyrillic': 'x-mac-cyrillic', | ||
| 186 | 'x-mac-ukrainian': 'x-mac-cyrillic', | ||
| 187 | 'chinese': 'gbk', | ||
| 188 | 'csgb2312': 'gbk', | ||
| 189 | 'csiso58gb231280': 'gbk', | ||
| 190 | 'gb2312': 'gbk', | ||
| 191 | 'gb_2312': 'gbk', | ||
| 192 | 'gb_2312-80': 'gbk', | ||
| 193 | 'gbk': 'gbk', | ||
| 194 | 'iso-ir-58': 'gbk', | ||
| 195 | 'x-gbk': 'gbk', | ||
| 196 | 'gb18030': 'gb18030', | ||
| 197 | 'hz-gb-2312': 'hz-gb-2312', | ||
| 198 | 'big5': 'big5', | ||
| 199 | 'big5-hkscs': 'big5', | ||
| 200 | 'cn-big5': 'big5', | ||
| 201 | 'csbig5': 'big5', | ||
| 202 | 'x-x-big5': 'big5', | ||
| 203 | 'cseucpkdfmtjapanese': 'euc-jp', | ||
| 204 | 'euc-jp': 'euc-jp', | ||
| 205 | 'x-euc-jp': 'euc-jp', | ||
| 206 | 'csiso2022jp': 'iso-2022-jp', | ||
| 207 | 'iso-2022-jp': 'iso-2022-jp', | ||
| 208 | 'csshiftjis': 'shift_jis', | ||
| 209 | 'ms_kanji': 'shift_jis', | ||
| 210 | 'shift-jis': 'shift_jis', | ||
| 211 | 'shift_jis': 'shift_jis', | ||
| 212 | 'sjis': 'shift_jis', | ||
| 213 | 'windows-31j': 'shift_jis', | ||
| 214 | 'x-sjis': 'shift_jis', | ||
| 215 | 'cseuckr': 'euc-kr', | ||
| 216 | 'csksc56011987': 'euc-kr', | ||
| 217 | 'euc-kr': 'euc-kr', | ||
| 218 | 'iso-ir-149': 'euc-kr', | ||
| 219 | 'korean': 'euc-kr', | ||
| 220 | 'ks_c_5601-1987': 'euc-kr', | ||
| 221 | 'ks_c_5601-1989': 'euc-kr', | ||
| 222 | 'ksc5601': 'euc-kr', | ||
| 223 | 'ksc_5601': 'euc-kr', | ||
| 224 | 'windows-949': 'euc-kr', | ||
| 225 | 'csiso2022kr': 'iso-2022-kr', | ||
| 226 | 'iso-2022-kr': 'iso-2022-kr', | ||
| 227 | 'utf-16be': 'utf-16be', | ||
| 228 | 'utf-16': 'utf-16le', | ||
| 229 | 'utf-16le': 'utf-16le', | ||
| 230 | 'x-user-defined': 'x-user-defined', | ||
| 231 | } | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/mklabels.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/mklabels.py new file mode 100644 index 0000000..a7f7e76 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/mklabels.py | |||
| @@ -0,0 +1,59 @@ | |||
| 1 | """ | ||
| 2 | |||
| 3 | webencodings.mklabels | ||
| 4 | ~~~~~~~~~~~~~~~~~~~~~ | ||
| 5 | |||
| 6 | Regenarate the webencodings.labels module. | ||
| 7 | |||
| 8 | :copyright: Copyright 2012 by Simon Sapin | ||
| 9 | :license: BSD, see LICENSE for details. | ||
| 10 | |||
| 11 | """ | ||
| 12 | |||
| 13 | import json | ||
| 14 | try: | ||
| 15 | from urllib import urlopen | ||
| 16 | except ImportError: | ||
| 17 | from urllib.request import urlopen | ||
| 18 | |||
| 19 | |||
| 20 | def assert_lower(string): | ||
| 21 | assert string == string.lower() | ||
| 22 | return string | ||
| 23 | |||
| 24 | |||
| 25 | def generate(url): | ||
| 26 | parts = ['''\ | ||
| 27 | """ | ||
| 28 | |||
| 29 | webencodings.labels | ||
| 30 | ~~~~~~~~~~~~~~~~~~~ | ||
| 31 | |||
| 32 | Map encoding labels to their name. | ||
| 33 | |||
| 34 | :copyright: Copyright 2012 by Simon Sapin | ||
| 35 | :license: BSD, see LICENSE for details. | ||
| 36 | |||
| 37 | """ | ||
| 38 | |||
| 39 | # XXX Do not edit! | ||
| 40 | # This file is automatically generated by mklabels.py | ||
| 41 | |||
| 42 | LABELS = { | ||
| 43 | '''] | ||
| 44 | labels = [ | ||
| 45 | (repr(assert_lower(label)).lstrip('u'), | ||
| 46 | repr(encoding['name']).lstrip('u')) | ||
| 47 | for category in json.loads(urlopen(url).read().decode('ascii')) | ||
| 48 | for encoding in category['encodings'] | ||
| 49 | for label in encoding['labels']] | ||
| 50 | max_len = max(len(label) for label, name in labels) | ||
| 51 | parts.extend( | ||
| 52 | ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name) | ||
| 53 | for label, name in labels) | ||
| 54 | parts.append('}') | ||
| 55 | return ''.join(parts) | ||
| 56 | |||
| 57 | |||
| 58 | if __name__ == '__main__': | ||
| 59 | print(generate('http://encoding.spec.whatwg.org/encodings.json')) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/tests.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/tests.py new file mode 100644 index 0000000..f6cdbf9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/tests.py | |||
| @@ -0,0 +1,153 @@ | |||
| 1 | # coding: utf-8 | ||
| 2 | """ | ||
| 3 | |||
| 4 | webencodings.tests | ||
| 5 | ~~~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | A basic test suite for Encoding. | ||
| 8 | |||
| 9 | :copyright: Copyright 2012 by Simon Sapin | ||
| 10 | :license: BSD, see LICENSE for details. | ||
| 11 | |||
| 12 | """ | ||
| 13 | |||
| 14 | from __future__ import unicode_literals | ||
| 15 | |||
| 16 | from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode, | ||
| 17 | IncrementalDecoder, IncrementalEncoder, UTF8) | ||
| 18 | |||
| 19 | |||
| 20 | def assert_raises(exception, function, *args, **kwargs): | ||
| 21 | try: | ||
| 22 | function(*args, **kwargs) | ||
| 23 | except exception: | ||
| 24 | return | ||
| 25 | else: # pragma: no cover | ||
| 26 | raise AssertionError('Did not raise %s.' % exception) | ||
| 27 | |||
| 28 | |||
| 29 | def test_labels(): | ||
| 30 | assert lookup('utf-8').name == 'utf-8' | ||
| 31 | assert lookup('Utf-8').name == 'utf-8' | ||
| 32 | assert lookup('UTF-8').name == 'utf-8' | ||
| 33 | assert lookup('utf8').name == 'utf-8' | ||
| 34 | assert lookup('utf8').name == 'utf-8' | ||
| 35 | assert lookup('utf8 ').name == 'utf-8' | ||
| 36 | assert lookup(' \r\nutf8\t').name == 'utf-8' | ||
| 37 | assert lookup('u8') is None # Python label. | ||
| 38 | assert lookup('utf-8 ') is None # Non-ASCII white space. | ||
| 39 | |||
| 40 | assert lookup('US-ASCII').name == 'windows-1252' | ||
| 41 | assert lookup('iso-8859-1').name == 'windows-1252' | ||
| 42 | assert lookup('latin1').name == 'windows-1252' | ||
| 43 | assert lookup('LATIN1').name == 'windows-1252' | ||
| 44 | assert lookup('latin-1') is None | ||
| 45 | assert lookup('LATİN1') is None # ASCII-only case insensitivity. | ||
| 46 | |||
| 47 | |||
| 48 | def test_all_labels(): | ||
| 49 | for label in LABELS: | ||
| 50 | assert decode(b'', label) == ('', lookup(label)) | ||
| 51 | assert encode('', label) == b'' | ||
| 52 | for repeat in [0, 1, 12]: | ||
| 53 | output, _ = iter_decode([b''] * repeat, label) | ||
| 54 | assert list(output) == [] | ||
| 55 | assert list(iter_encode([''] * repeat, label)) == [] | ||
| 56 | decoder = IncrementalDecoder(label) | ||
| 57 | assert decoder.decode(b'') == '' | ||
| 58 | assert decoder.decode(b'', final=True) == '' | ||
| 59 | encoder = IncrementalEncoder(label) | ||
| 60 | assert encoder.encode('') == b'' | ||
| 61 | assert encoder.encode('', final=True) == b'' | ||
| 62 | # All encoding names are valid labels too: | ||
| 63 | for name in set(LABELS.values()): | ||
| 64 | assert lookup(name).name == name | ||
| 65 | |||
| 66 | |||
| 67 | def test_invalid_label(): | ||
| 68 | assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid') | ||
| 69 | assert_raises(LookupError, encode, 'é', 'invalid') | ||
| 70 | assert_raises(LookupError, iter_decode, [], 'invalid') | ||
| 71 | assert_raises(LookupError, iter_encode, [], 'invalid') | ||
| 72 | assert_raises(LookupError, IncrementalDecoder, 'invalid') | ||
| 73 | assert_raises(LookupError, IncrementalEncoder, 'invalid') | ||
| 74 | |||
| 75 | |||
| 76 | def test_decode(): | ||
| 77 | assert decode(b'\x80', 'latin1') == ('€', lookup('latin1')) | ||
| 78 | assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1')) | ||
| 79 | assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8')) | ||
| 80 | assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8')) | ||
| 81 | assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii')) | ||
| 82 | assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM | ||
| 83 | |||
| 84 | assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM | ||
| 85 | assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM | ||
| 86 | assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be')) | ||
| 87 | assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le')) | ||
| 88 | |||
| 89 | assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be')) | ||
| 90 | assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le')) | ||
| 91 | assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le')) | ||
| 92 | |||
| 93 | assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be')) | ||
| 94 | assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le')) | ||
| 95 | assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le')) | ||
| 96 | |||
| 97 | |||
| 98 | def test_encode(): | ||
| 99 | assert encode('é', 'latin1') == b'\xe9' | ||
| 100 | assert encode('é', 'utf8') == b'\xc3\xa9' | ||
| 101 | assert encode('é', 'utf8') == b'\xc3\xa9' | ||
| 102 | assert encode('é', 'utf-16') == b'\xe9\x00' | ||
| 103 | assert encode('é', 'utf-16le') == b'\xe9\x00' | ||
| 104 | assert encode('é', 'utf-16be') == b'\x00\xe9' | ||
| 105 | |||
| 106 | |||
| 107 | def test_iter_decode(): | ||
| 108 | def iter_decode_to_string(input, fallback_encoding): | ||
| 109 | output, _encoding = iter_decode(input, fallback_encoding) | ||
| 110 | return ''.join(output) | ||
| 111 | assert iter_decode_to_string([], 'latin1') == '' | ||
| 112 | assert iter_decode_to_string([b''], 'latin1') == '' | ||
| 113 | assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' | ||
| 114 | assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' | ||
| 115 | assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' | ||
| 116 | assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' | ||
| 117 | assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' | ||
| 118 | assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' | ||
| 119 | assert iter_decode_to_string([ | ||
| 120 | b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' | ||
| 121 | assert iter_decode_to_string([ | ||
| 122 | b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' | ||
| 123 | assert iter_decode_to_string([ | ||
| 124 | b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' | ||
| 125 | assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' | ||
| 126 | assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' | ||
| 127 | assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' | ||
| 128 | assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' | ||
| 129 | assert iter_decode_to_string([ | ||
| 130 | b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' | ||
| 131 | assert iter_decode_to_string([ | ||
| 132 | b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo' | ||
| 133 | |||
| 134 | |||
| 135 | def test_iter_encode(): | ||
| 136 | assert b''.join(iter_encode([], 'latin1')) == b'' | ||
| 137 | assert b''.join(iter_encode([''], 'latin1')) == b'' | ||
| 138 | assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' | ||
| 139 | assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' | ||
| 140 | assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' | ||
| 141 | assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' | ||
| 142 | assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' | ||
| 143 | assert b''.join(iter_encode([ | ||
| 144 | '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo' | ||
| 145 | |||
| 146 | |||
| 147 | def test_x_user_defined(): | ||
| 148 | encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca' | ||
| 149 | decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca' | ||
| 150 | encoded = b'aa' | ||
| 151 | decoded = 'aa' | ||
| 152 | assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined')) | ||
| 153 | assert encode(decoded, 'x-user-defined') == encoded | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/x_user_defined.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/x_user_defined.py new file mode 100644 index 0000000..748e2c9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/webencodings/x_user_defined.py | |||
| @@ -0,0 +1,325 @@ | |||
| 1 | # coding: utf-8 | ||
| 2 | """ | ||
| 3 | |||
| 4 | webencodings.x_user_defined | ||
| 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
| 6 | |||
| 7 | An implementation of the x-user-defined encoding. | ||
| 8 | |||
| 9 | :copyright: Copyright 2012 by Simon Sapin | ||
| 10 | :license: BSD, see LICENSE for details. | ||
| 11 | |||
| 12 | """ | ||
| 13 | |||
| 14 | from __future__ import unicode_literals | ||
| 15 | |||
| 16 | import codecs | ||
| 17 | |||
| 18 | |||
| 19 | ### Codec APIs | ||
| 20 | |||
| 21 | class Codec(codecs.Codec): | ||
| 22 | |||
| 23 | def encode(self, input, errors='strict'): | ||
| 24 | return codecs.charmap_encode(input, errors, encoding_table) | ||
| 25 | |||
| 26 | def decode(self, input, errors='strict'): | ||
| 27 | return codecs.charmap_decode(input, errors, decoding_table) | ||
| 28 | |||
| 29 | |||
| 30 | class IncrementalEncoder(codecs.IncrementalEncoder): | ||
| 31 | def encode(self, input, final=False): | ||
| 32 | return codecs.charmap_encode(input, self.errors, encoding_table)[0] | ||
| 33 | |||
| 34 | |||
| 35 | class IncrementalDecoder(codecs.IncrementalDecoder): | ||
| 36 | def decode(self, input, final=False): | ||
| 37 | return codecs.charmap_decode(input, self.errors, decoding_table)[0] | ||
| 38 | |||
| 39 | |||
| 40 | class StreamWriter(Codec, codecs.StreamWriter): | ||
| 41 | pass | ||
| 42 | |||
| 43 | |||
| 44 | class StreamReader(Codec, codecs.StreamReader): | ||
| 45 | pass | ||
| 46 | |||
| 47 | |||
| 48 | ### encodings module API | ||
| 49 | |||
| 50 | codec_info = codecs.CodecInfo( | ||
| 51 | name='x-user-defined', | ||
| 52 | encode=Codec().encode, | ||
| 53 | decode=Codec().decode, | ||
| 54 | incrementalencoder=IncrementalEncoder, | ||
| 55 | incrementaldecoder=IncrementalDecoder, | ||
| 56 | streamreader=StreamReader, | ||
| 57 | streamwriter=StreamWriter, | ||
| 58 | ) | ||
| 59 | |||
| 60 | |||
| 61 | ### Decoding Table | ||
| 62 | |||
| 63 | # Python 3: | ||
| 64 | # for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700)) | ||
| 65 | decoding_table = ( | ||
| 66 | '\x00' | ||
| 67 | '\x01' | ||
| 68 | '\x02' | ||
| 69 | '\x03' | ||
| 70 | '\x04' | ||
| 71 | '\x05' | ||
| 72 | '\x06' | ||
| 73 | '\x07' | ||
| 74 | '\x08' | ||
| 75 | '\t' | ||
| 76 | '\n' | ||
| 77 | '\x0b' | ||
| 78 | '\x0c' | ||
| 79 | '\r' | ||
| 80 | '\x0e' | ||
| 81 | '\x0f' | ||
| 82 | '\x10' | ||
| 83 | '\x11' | ||
| 84 | '\x12' | ||
| 85 | '\x13' | ||
| 86 | '\x14' | ||
| 87 | '\x15' | ||
| 88 | '\x16' | ||
| 89 | '\x17' | ||
| 90 | '\x18' | ||
| 91 | '\x19' | ||
| 92 | '\x1a' | ||
| 93 | '\x1b' | ||
| 94 | '\x1c' | ||
| 95 | '\x1d' | ||
| 96 | '\x1e' | ||
| 97 | '\x1f' | ||
| 98 | ' ' | ||
| 99 | '!' | ||
| 100 | '"' | ||
| 101 | '#' | ||
| 102 | '$' | ||
| 103 | '%' | ||
| 104 | '&' | ||
| 105 | "'" | ||
| 106 | '(' | ||
| 107 | ')' | ||
| 108 | '*' | ||
| 109 | '+' | ||
| 110 | ',' | ||
| 111 | '-' | ||
| 112 | '.' | ||
| 113 | '/' | ||
| 114 | '0' | ||
| 115 | '1' | ||
| 116 | '2' | ||
| 117 | '3' | ||
| 118 | '4' | ||
| 119 | '5' | ||
| 120 | '6' | ||
| 121 | '7' | ||
| 122 | '8' | ||
| 123 | '9' | ||
| 124 | ':' | ||
| 125 | ';' | ||
| 126 | '<' | ||
| 127 | '=' | ||
| 128 | '>' | ||
| 129 | '?' | ||
| 130 | '@' | ||
| 131 | 'A' | ||
| 132 | 'B' | ||
| 133 | 'C' | ||
| 134 | 'D' | ||
| 135 | 'E' | ||
| 136 | 'F' | ||
| 137 | 'G' | ||
| 138 | 'H' | ||
| 139 | 'I' | ||
| 140 | 'J' | ||
| 141 | 'K' | ||
| 142 | 'L' | ||
| 143 | 'M' | ||
| 144 | 'N' | ||
| 145 | 'O' | ||
| 146 | 'P' | ||
| 147 | 'Q' | ||
| 148 | 'R' | ||
| 149 | 'S' | ||
| 150 | 'T' | ||
| 151 | 'U' | ||
| 152 | 'V' | ||
| 153 | 'W' | ||
| 154 | 'X' | ||
| 155 | 'Y' | ||
| 156 | 'Z' | ||
| 157 | '[' | ||
| 158 | '\\' | ||
| 159 | ']' | ||
| 160 | '^' | ||
| 161 | '_' | ||
| 162 | '`' | ||
| 163 | 'a' | ||
| 164 | 'b' | ||
| 165 | 'c' | ||
| 166 | 'd' | ||
| 167 | 'e' | ||
| 168 | 'f' | ||
| 169 | 'g' | ||
| 170 | 'h' | ||
| 171 | 'i' | ||
| 172 | 'j' | ||
| 173 | 'k' | ||
| 174 | 'l' | ||
| 175 | 'm' | ||
| 176 | 'n' | ||
| 177 | 'o' | ||
| 178 | 'p' | ||
| 179 | 'q' | ||
| 180 | 'r' | ||
| 181 | 's' | ||
| 182 | 't' | ||
| 183 | 'u' | ||
| 184 | 'v' | ||
| 185 | 'w' | ||
| 186 | 'x' | ||
| 187 | 'y' | ||
| 188 | 'z' | ||
| 189 | '{' | ||
| 190 | '|' | ||
| 191 | '}' | ||
| 192 | '~' | ||
| 193 | '\x7f' | ||
| 194 | '\uf780' | ||
| 195 | '\uf781' | ||
| 196 | '\uf782' | ||
| 197 | '\uf783' | ||
| 198 | '\uf784' | ||
| 199 | '\uf785' | ||
| 200 | '\uf786' | ||
| 201 | '\uf787' | ||
| 202 | '\uf788' | ||
| 203 | '\uf789' | ||
| 204 | '\uf78a' | ||
| 205 | '\uf78b' | ||
| 206 | '\uf78c' | ||
| 207 | '\uf78d' | ||
| 208 | '\uf78e' | ||
| 209 | '\uf78f' | ||
| 210 | '\uf790' | ||
| 211 | '\uf791' | ||
| 212 | '\uf792' | ||
| 213 | '\uf793' | ||
| 214 | '\uf794' | ||
| 215 | '\uf795' | ||
| 216 | '\uf796' | ||
| 217 | '\uf797' | ||
| 218 | '\uf798' | ||
| 219 | '\uf799' | ||
| 220 | '\uf79a' | ||
| 221 | '\uf79b' | ||
| 222 | '\uf79c' | ||
| 223 | '\uf79d' | ||
| 224 | '\uf79e' | ||
| 225 | '\uf79f' | ||
| 226 | '\uf7a0' | ||
| 227 | '\uf7a1' | ||
| 228 | '\uf7a2' | ||
| 229 | '\uf7a3' | ||
| 230 | '\uf7a4' | ||
| 231 | '\uf7a5' | ||
| 232 | '\uf7a6' | ||
| 233 | '\uf7a7' | ||
| 234 | '\uf7a8' | ||
| 235 | '\uf7a9' | ||
| 236 | '\uf7aa' | ||
| 237 | '\uf7ab' | ||
| 238 | '\uf7ac' | ||
| 239 | '\uf7ad' | ||
| 240 | '\uf7ae' | ||
| 241 | '\uf7af' | ||
| 242 | '\uf7b0' | ||
| 243 | '\uf7b1' | ||
| 244 | '\uf7b2' | ||
| 245 | '\uf7b3' | ||
| 246 | '\uf7b4' | ||
| 247 | '\uf7b5' | ||
| 248 | '\uf7b6' | ||
| 249 | '\uf7b7' | ||
| 250 | '\uf7b8' | ||
| 251 | '\uf7b9' | ||
| 252 | '\uf7ba' | ||
| 253 | '\uf7bb' | ||
| 254 | '\uf7bc' | ||
| 255 | '\uf7bd' | ||
| 256 | '\uf7be' | ||
| 257 | '\uf7bf' | ||
| 258 | '\uf7c0' | ||
| 259 | '\uf7c1' | ||
| 260 | '\uf7c2' | ||
| 261 | '\uf7c3' | ||
| 262 | '\uf7c4' | ||
| 263 | '\uf7c5' | ||
| 264 | '\uf7c6' | ||
| 265 | '\uf7c7' | ||
| 266 | '\uf7c8' | ||
| 267 | '\uf7c9' | ||
| 268 | '\uf7ca' | ||
| 269 | '\uf7cb' | ||
| 270 | '\uf7cc' | ||
| 271 | '\uf7cd' | ||
| 272 | '\uf7ce' | ||
| 273 | '\uf7cf' | ||
| 274 | '\uf7d0' | ||
| 275 | '\uf7d1' | ||
| 276 | '\uf7d2' | ||
| 277 | '\uf7d3' | ||
| 278 | '\uf7d4' | ||
| 279 | '\uf7d5' | ||
| 280 | '\uf7d6' | ||
| 281 | '\uf7d7' | ||
| 282 | '\uf7d8' | ||
| 283 | '\uf7d9' | ||
| 284 | '\uf7da' | ||
| 285 | '\uf7db' | ||
| 286 | '\uf7dc' | ||
| 287 | '\uf7dd' | ||
| 288 | '\uf7de' | ||
| 289 | '\uf7df' | ||
| 290 | '\uf7e0' | ||
| 291 | '\uf7e1' | ||
| 292 | '\uf7e2' | ||
| 293 | '\uf7e3' | ||
| 294 | '\uf7e4' | ||
| 295 | '\uf7e5' | ||
| 296 | '\uf7e6' | ||
| 297 | '\uf7e7' | ||
| 298 | '\uf7e8' | ||
| 299 | '\uf7e9' | ||
| 300 | '\uf7ea' | ||
| 301 | '\uf7eb' | ||
| 302 | '\uf7ec' | ||
| 303 | '\uf7ed' | ||
| 304 | '\uf7ee' | ||
| 305 | '\uf7ef' | ||
| 306 | '\uf7f0' | ||
| 307 | '\uf7f1' | ||
| 308 | '\uf7f2' | ||
| 309 | '\uf7f3' | ||
| 310 | '\uf7f4' | ||
| 311 | '\uf7f5' | ||
| 312 | '\uf7f6' | ||
| 313 | '\uf7f7' | ||
| 314 | '\uf7f8' | ||
| 315 | '\uf7f9' | ||
| 316 | '\uf7fa' | ||
| 317 | '\uf7fb' | ||
| 318 | '\uf7fc' | ||
| 319 | '\uf7fd' | ||
| 320 | '\uf7fe' | ||
| 321 | '\uf7ff' | ||
| 322 | ) | ||
| 323 | |||
| 324 | ### Encoding table | ||
| 325 | encoding_table = codecs.charmap_build(decoding_table) | ||
