summaryrefslogtreecommitdiff
path: root/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal
diff options
context:
space:
mode:
authorShubham Saini <shubham6405@gmail.com>2018-12-11 10:01:23 +0000
committerShubham Saini <shubham6405@gmail.com>2018-12-11 10:01:23 +0000
commit68df54d6629ec019142eb149dd037774f2d11e7c (patch)
tree345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal')
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py246
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py373
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py240
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py92
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py202
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py609
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py79
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py42
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py94
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py227
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py233
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py96
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py57
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py36
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py502
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py343
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py135
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py164
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py71
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py179
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py235
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py378
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py922
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py249
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py1117
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py194
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py4
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py15
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py0
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py106
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py252
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py380
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py317
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py69
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py338
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py1115
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py164
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py455
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py354
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py8
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py0
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py258
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py77
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py33
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py28
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py84
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py94
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py132
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py851
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py163
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py70
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py8
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py82
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py29
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py421
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py471
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py113
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py311
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py105
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py271
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py817
61 files changed, 15110 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py
new file mode 100644
index 0000000..d713b0d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/__init__.py
@@ -0,0 +1,246 @@
1#!/usr/bin/env python
2from __future__ import absolute_import
3
4import locale
5import logging
6import os
7import optparse
8import warnings
9
10import sys
11
12# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
13# but if invoked (i.e. imported), it will issue a warning to stderr if socks
14# isn't available. requests unconditionally imports urllib3's socks contrib
15# module, triggering this warning. The warning breaks DEP-8 tests (because of
16# the stderr output) and is just plain annoying in normal usage. I don't want
17# to add socks as yet another dependency for pip, nor do I want to allow-stder
18# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
19# be done before the import of pip.vcs.
20from pip._vendor.urllib3.exceptions import DependencyWarning
21warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
22
23# We want to inject the use of SecureTransport as early as possible so that any
24# references or sessions or what have you are ensured to have it, however we
25# only want to do this in the case that we're running on macOS and the linked
26# OpenSSL is too old to handle TLSv1.2
27try:
28 import ssl
29except ImportError:
30 pass
31else:
32 # Checks for OpenSSL 1.0.1 on MacOS
33 if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
34 try:
35 from pip._vendor.urllib3.contrib import securetransport
36 except (ImportError, OSError):
37 pass
38 else:
39 securetransport.inject_into_urllib3()
40
41from pip import __version__
42from pip._internal import cmdoptions
43from pip._internal.exceptions import CommandError, PipError
44from pip._internal.utils.misc import get_installed_distributions, get_prog
45from pip._internal.utils import deprecation
46from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
47from pip._internal.baseparser import (
48 ConfigOptionParser, UpdatingDefaultsHelpFormatter,
49)
50from pip._internal.commands import get_summaries, get_similar_commands
51from pip._internal.commands import commands_dict
52from pip._vendor.urllib3.exceptions import InsecureRequestWarning
53
54logger = logging.getLogger(__name__)
55
56# Hide the InsecureRequestWarning from urllib3
57warnings.filterwarnings("ignore", category=InsecureRequestWarning)
58
59
60def autocomplete():
61 """Command and option completion for the main option parser (and options)
62 and its subcommands (and options).
63
64 Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
65 """
66 # Don't complete if user hasn't sourced bash_completion file.
67 if 'PIP_AUTO_COMPLETE' not in os.environ:
68 return
69 cwords = os.environ['COMP_WORDS'].split()[1:]
70 cword = int(os.environ['COMP_CWORD'])
71 try:
72 current = cwords[cword - 1]
73 except IndexError:
74 current = ''
75
76 subcommands = [cmd for cmd, summary in get_summaries()]
77 options = []
78 # subcommand
79 try:
80 subcommand_name = [w for w in cwords if w in subcommands][0]
81 except IndexError:
82 subcommand_name = None
83
84 parser = create_main_parser()
85 # subcommand options
86 if subcommand_name:
87 # special case: 'help' subcommand has no options
88 if subcommand_name == 'help':
89 sys.exit(1)
90 # special case: list locally installed dists for show and uninstall
91 should_list_installed = (
92 subcommand_name in ['show', 'uninstall'] and
93 not current.startswith('-')
94 )
95 if should_list_installed:
96 installed = []
97 lc = current.lower()
98 for dist in get_installed_distributions(local_only=True):
99 if dist.key.startswith(lc) and dist.key not in cwords[1:]:
100 installed.append(dist.key)
101 # if there are no dists installed, fall back to option completion
102 if installed:
103 for dist in installed:
104 print(dist)
105 sys.exit(1)
106
107 subcommand = commands_dict[subcommand_name]()
108
109 for opt in subcommand.parser.option_list_all:
110 if opt.help != optparse.SUPPRESS_HELP:
111 for opt_str in opt._long_opts + opt._short_opts:
112 options.append((opt_str, opt.nargs))
113
114 # filter out previously specified options from available options
115 prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
116 options = [(x, v) for (x, v) in options if x not in prev_opts]
117 # filter options by current input
118 options = [(k, v) for k, v in options if k.startswith(current)]
119 for option in options:
120 opt_label = option[0]
121 # append '=' to options which require args
122 if option[1] and option[0][:2] == "--":
123 opt_label += '='
124 print(opt_label)
125 else:
126 # show main parser options only when necessary
127 if current.startswith('-') or current.startswith('--'):
128 opts = [i.option_list for i in parser.option_groups]
129 opts.append(parser.option_list)
130 opts = (o for it in opts for o in it)
131
132 for opt in opts:
133 if opt.help != optparse.SUPPRESS_HELP:
134 subcommands += opt._long_opts + opt._short_opts
135
136 print(' '.join([x for x in subcommands if x.startswith(current)]))
137 sys.exit(1)
138
139
140def create_main_parser():
141 parser_kw = {
142 'usage': '\n%prog <command> [options]',
143 'add_help_option': False,
144 'formatter': UpdatingDefaultsHelpFormatter(),
145 'name': 'global',
146 'prog': get_prog(),
147 }
148
149 parser = ConfigOptionParser(**parser_kw)
150 parser.disable_interspersed_args()
151
152 pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
153 parser.version = 'pip %s from %s (python %s)' % (
154 __version__, pip_pkg_dir, sys.version[:3],
155 )
156
157 # add the general options
158 gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
159 parser.add_option_group(gen_opts)
160
161 parser.main = True # so the help formatter knows
162
163 # create command listing for description
164 command_summaries = get_summaries()
165 description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
166 parser.description = '\n'.join(description)
167
168 return parser
169
170
171def parseopts(args):
172 parser = create_main_parser()
173
174 # Note: parser calls disable_interspersed_args(), so the result of this
175 # call is to split the initial args into the general options before the
176 # subcommand and everything else.
177 # For example:
178 # args: ['--timeout=5', 'install', '--user', 'INITools']
179 # general_options: ['--timeout==5']
180 # args_else: ['install', '--user', 'INITools']
181 general_options, args_else = parser.parse_args(args)
182
183 # --version
184 if general_options.version:
185 sys.stdout.write(parser.version)
186 sys.stdout.write(os.linesep)
187 sys.exit()
188
189 # pip || pip help -> print_help()
190 if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
191 parser.print_help()
192 sys.exit()
193
194 # the subcommand name
195 cmd_name = args_else[0]
196
197 if cmd_name not in commands_dict:
198 guess = get_similar_commands(cmd_name)
199
200 msg = ['unknown command "%s"' % cmd_name]
201 if guess:
202 msg.append('maybe you meant "%s"' % guess)
203
204 raise CommandError(' - '.join(msg))
205
206 # all the args without the subcommand
207 cmd_args = args[:]
208 cmd_args.remove(cmd_name)
209
210 return cmd_name, cmd_args
211
212
213def check_isolated(args):
214 isolated = False
215
216 if "--isolated" in args:
217 isolated = True
218
219 return isolated
220
221
222def main(args=None):
223 if args is None:
224 args = sys.argv[1:]
225
226 # Configure our deprecation warnings to be sent through loggers
227 deprecation.install_warning_logger()
228
229 autocomplete()
230
231 try:
232 cmd_name, cmd_args = parseopts(args)
233 except PipError as exc:
234 sys.stderr.write("ERROR: %s" % exc)
235 sys.stderr.write(os.linesep)
236 sys.exit(1)
237
238 # Needed for locale.getpreferredencoding(False) to work
239 # in pip._internal.utils.encoding.auto_decode
240 try:
241 locale.setlocale(locale.LC_ALL, '')
242 except locale.Error as e:
243 # setlocale can apparently crash if locale are uninitialized
244 logger.debug("Ignoring error %s when setting locale", e)
245 command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
246 return command.main(cmd_args)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py
new file mode 100644
index 0000000..e900928
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/basecommand.py
@@ -0,0 +1,373 @@
1"""Base Command class, and related routines"""
2from __future__ import absolute_import
3
4import logging
5import logging.config
6import optparse
7import os
8import sys
9import warnings
10
11from pip._internal import cmdoptions
12from pip._internal.baseparser import (
13 ConfigOptionParser, UpdatingDefaultsHelpFormatter,
14)
15from pip._internal.compat import WINDOWS
16from pip._internal.download import PipSession
17from pip._internal.exceptions import (
18 BadCommand, CommandError, InstallationError, PreviousBuildDirError,
19 UninstallationError,
20)
21from pip._internal.index import PackageFinder
22from pip._internal.locations import running_under_virtualenv
23from pip._internal.req.req_file import parse_requirements
24from pip._internal.req.req_install import InstallRequirement
25from pip._internal.status_codes import (
26 ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
27 VIRTUALENV_NOT_FOUND,
28)
29from pip._internal.utils import deprecation
30from pip._internal.utils.logging import IndentingFormatter
31from pip._internal.utils.misc import get_prog, normalize_path
32from pip._internal.utils.outdated import pip_version_check
33from pip._internal.utils.typing import MYPY_CHECK_RUNNING
34
35if MYPY_CHECK_RUNNING:
36 from typing import Optional
37
38__all__ = ['Command']
39
40logger = logging.getLogger(__name__)
41
42
43class Command(object):
44 name = None # type: Optional[str]
45 usage = None # type: Optional[str]
46 hidden = False # type: bool
47 ignore_require_venv = False # type: bool
48 log_streams = ("ext://sys.stdout", "ext://sys.stderr")
49
50 def __init__(self, isolated=False):
51 parser_kw = {
52 'usage': self.usage,
53 'prog': '%s %s' % (get_prog(), self.name),
54 'formatter': UpdatingDefaultsHelpFormatter(),
55 'add_help_option': False,
56 'name': self.name,
57 'description': self.__doc__,
58 'isolated': isolated,
59 }
60
61 self.parser = ConfigOptionParser(**parser_kw)
62
63 # Commands should add options to this option group
64 optgroup_name = '%s Options' % self.name.capitalize()
65 self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
66
67 # Add the general options
68 gen_opts = cmdoptions.make_option_group(
69 cmdoptions.general_group,
70 self.parser,
71 )
72 self.parser.add_option_group(gen_opts)
73
74 def _build_session(self, options, retries=None, timeout=None):
75 session = PipSession(
76 cache=(
77 normalize_path(os.path.join(options.cache_dir, "http"))
78 if options.cache_dir else None
79 ),
80 retries=retries if retries is not None else options.retries,
81 insecure_hosts=options.trusted_hosts,
82 )
83
84 # Handle custom ca-bundles from the user
85 if options.cert:
86 session.verify = options.cert
87
88 # Handle SSL client certificate
89 if options.client_cert:
90 session.cert = options.client_cert
91
92 # Handle timeouts
93 if options.timeout or timeout:
94 session.timeout = (
95 timeout if timeout is not None else options.timeout
96 )
97
98 # Handle configured proxies
99 if options.proxy:
100 session.proxies = {
101 "http": options.proxy,
102 "https": options.proxy,
103 }
104
105 # Determine if we can prompt the user for authentication or not
106 session.auth.prompting = not options.no_input
107
108 return session
109
110 def parse_args(self, args):
111 # factored out for testability
112 return self.parser.parse_args(args)
113
114 def main(self, args):
115 options, args = self.parse_args(args)
116
117 # Set verbosity so that it can be used elsewhere.
118 self.verbosity = options.verbose - options.quiet
119
120 if self.verbosity >= 1:
121 level = "DEBUG"
122 elif self.verbosity == -1:
123 level = "WARNING"
124 elif self.verbosity == -2:
125 level = "ERROR"
126 elif self.verbosity <= -3:
127 level = "CRITICAL"
128 else:
129 level = "INFO"
130
131 # The root logger should match the "console" level *unless* we
132 # specified "--log" to send debug logs to a file.
133 root_level = level
134 if options.log:
135 root_level = "DEBUG"
136
137 logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
138 handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
139
140 logging.config.dictConfig({
141 "version": 1,
142 "disable_existing_loggers": False,
143 "filters": {
144 "exclude_warnings": {
145 "()": "pip._internal.utils.logging.MaxLevelFilter",
146 "level": logging.WARNING,
147 },
148 },
149 "formatters": {
150 "indent": {
151 "()": IndentingFormatter,
152 "format": "%(message)s",
153 },
154 },
155 "handlers": {
156 "console": {
157 "level": level,
158 "class": logger_class,
159 "no_color": options.no_color,
160 "stream": self.log_streams[0],
161 "filters": ["exclude_warnings"],
162 "formatter": "indent",
163 },
164 "console_errors": {
165 "level": "WARNING",
166 "class": logger_class,
167 "no_color": options.no_color,
168 "stream": self.log_streams[1],
169 "formatter": "indent",
170 },
171 "user_log": {
172 "level": "DEBUG",
173 "class": handler_class,
174 "filename": options.log or "/dev/null",
175 "delay": True,
176 "formatter": "indent",
177 },
178 },
179 "root": {
180 "level": root_level,
181 "handlers": list(filter(None, [
182 "console",
183 "console_errors",
184 "user_log" if options.log else None,
185 ])),
186 },
187 # Disable any logging besides WARNING unless we have DEBUG level
188 # logging enabled. These use both pip._vendor and the bare names
189 # for the case where someone unbundles our libraries.
190 "loggers": {
191 name: {
192 "level": (
193 "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
194 )
195 } for name in [
196 "pip._vendor", "distlib", "requests", "urllib3"
197 ]
198 },
199 })
200
201 if sys.version_info[:2] == (3, 3):
202 warnings.warn(
203 "Python 3.3 supported has been deprecated and support for it "
204 "will be dropped in the future. Please upgrade your Python.",
205 deprecation.RemovedInPip11Warning,
206 )
207
208 # TODO: try to get these passing down from the command?
209 # without resorting to os.environ to hold these.
210
211 if options.no_input:
212 os.environ['PIP_NO_INPUT'] = '1'
213
214 if options.exists_action:
215 os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
216
217 if options.require_venv and not self.ignore_require_venv:
218 # If a venv is required check if it can really be found
219 if not running_under_virtualenv():
220 logger.critical(
221 'Could not find an activated virtualenv (required).'
222 )
223 sys.exit(VIRTUALENV_NOT_FOUND)
224
225 original_root_handlers = set(logging.root.handlers)
226
227 try:
228 status = self.run(options, args)
229 # FIXME: all commands should return an exit status
230 # and when it is done, isinstance is not needed anymore
231 if isinstance(status, int):
232 return status
233 except PreviousBuildDirError as exc:
234 logger.critical(str(exc))
235 logger.debug('Exception information:', exc_info=True)
236
237 return PREVIOUS_BUILD_DIR_ERROR
238 except (InstallationError, UninstallationError, BadCommand) as exc:
239 logger.critical(str(exc))
240 logger.debug('Exception information:', exc_info=True)
241
242 return ERROR
243 except CommandError as exc:
244 logger.critical('ERROR: %s', exc)
245 logger.debug('Exception information:', exc_info=True)
246
247 return ERROR
248 except KeyboardInterrupt:
249 logger.critical('Operation cancelled by user')
250 logger.debug('Exception information:', exc_info=True)
251
252 return ERROR
253 except:
254 logger.critical('Exception:', exc_info=True)
255
256 return UNKNOWN_ERROR
257 finally:
258 # Check if we're using the latest version of pip available
259 if (not options.disable_pip_version_check and not
260 getattr(options, "no_index", False)):
261 with self._build_session(
262 options,
263 retries=0,
264 timeout=min(5, options.timeout)) as session:
265 pip_version_check(session, options)
266 # Avoid leaking loggers
267 for handler in set(logging.root.handlers) - original_root_handlers:
268 # this method benefit from the Logger class internal lock
269 logging.root.removeHandler(handler)
270
271 return SUCCESS
272
273
274class RequirementCommand(Command):
275
276 @staticmethod
277 def populate_requirement_set(requirement_set, args, options, finder,
278 session, name, wheel_cache):
279 """
280 Marshal cmd line args into a requirement set.
281 """
282 # NOTE: As a side-effect, options.require_hashes and
283 # requirement_set.require_hashes may be updated
284
285 for filename in options.constraints:
286 for req_to_add in parse_requirements(
287 filename,
288 constraint=True, finder=finder, options=options,
289 session=session, wheel_cache=wheel_cache):
290 req_to_add.is_direct = True
291 requirement_set.add_requirement(req_to_add)
292
293 for req in args:
294 req_to_add = InstallRequirement.from_line(
295 req, None, isolated=options.isolated_mode,
296 wheel_cache=wheel_cache
297 )
298 req_to_add.is_direct = True
299 requirement_set.add_requirement(req_to_add)
300
301 for req in options.editables:
302 req_to_add = InstallRequirement.from_editable(
303 req,
304 isolated=options.isolated_mode,
305 wheel_cache=wheel_cache
306 )
307 req_to_add.is_direct = True
308 requirement_set.add_requirement(req_to_add)
309
310 for filename in options.requirements:
311 for req_to_add in parse_requirements(
312 filename,
313 finder=finder, options=options, session=session,
314 wheel_cache=wheel_cache):
315 req_to_add.is_direct = True
316 requirement_set.add_requirement(req_to_add)
317 # If --require-hashes was a line in a requirements file, tell
318 # RequirementSet about it:
319 requirement_set.require_hashes = options.require_hashes
320
321 if not (args or options.editables or options.requirements):
322 opts = {'name': name}
323 if options.find_links:
324 raise CommandError(
325 'You must give at least one requirement to %(name)s '
326 '(maybe you meant "pip %(name)s %(links)s"?)' %
327 dict(opts, links=' '.join(options.find_links)))
328 else:
329 raise CommandError(
330 'You must give at least one requirement to %(name)s '
331 '(see "pip help %(name)s")' % opts)
332
333 # On Windows, any operation modifying pip should be run as:
334 # python -m pip ...
335 # See https://github.com/pypa/pip/issues/1299 for more discussion
336 should_show_use_python_msg = (
337 WINDOWS and
338 requirement_set.has_requirement("pip") and
339 os.path.basename(sys.argv[0]).startswith("pip")
340 )
341 if should_show_use_python_msg:
342 new_command = [
343 sys.executable, "-m", "pip"
344 ] + sys.argv[1:]
345 raise CommandError(
346 'To modify pip, please run the following command:\n{}'
347 .format(" ".join(new_command))
348 )
349
350 def _build_package_finder(self, options, session,
351 platform=None, python_versions=None,
352 abi=None, implementation=None):
353 """
354 Create a package finder appropriate to this requirement command.
355 """
356 index_urls = [options.index_url] + options.extra_index_urls
357 if options.no_index:
358 logger.debug('Ignoring indexes: %s', ','.join(index_urls))
359 index_urls = []
360
361 return PackageFinder(
362 find_links=options.find_links,
363 format_control=options.format_control,
364 index_urls=index_urls,
365 trusted_hosts=options.trusted_hosts,
366 allow_all_prereleases=options.pre,
367 process_dependency_links=options.process_dependency_links,
368 session=session,
369 platform=platform,
370 versions=python_versions,
371 abi=abi,
372 implementation=implementation,
373 )
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py
new file mode 100644
index 0000000..ed28a1b
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/baseparser.py
@@ -0,0 +1,240 @@
1"""Base option parser setup"""
2from __future__ import absolute_import
3
4import logging
5import optparse
6import sys
7import textwrap
8from distutils.util import strtobool
9
10from pip._vendor.six import string_types
11
12from pip._internal.compat import get_terminal_size
13from pip._internal.configuration import Configuration, ConfigurationError
14
15logger = logging.getLogger(__name__)
16
17
18class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19 """A prettier/less verbose help formatter for optparse."""
20
21 def __init__(self, *args, **kwargs):
22 # help position must be aligned with __init__.parseopts.description
23 kwargs['max_help_position'] = 30
24 kwargs['indent_increment'] = 1
25 kwargs['width'] = get_terminal_size()[0] - 2
26 optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
27
28 def format_option_strings(self, option):
29 return self._format_option_strings(option, ' <%s>', ', ')
30
31 def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
32 """
33 Return a comma-separated list of option strings and metavars.
34
35 :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
36 :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
37 :param optsep: separator
38 """
39 opts = []
40
41 if option._short_opts:
42 opts.append(option._short_opts[0])
43 if option._long_opts:
44 opts.append(option._long_opts[0])
45 if len(opts) > 1:
46 opts.insert(1, optsep)
47
48 if option.takes_value():
49 metavar = option.metavar or option.dest.lower()
50 opts.append(mvarfmt % metavar.lower())
51
52 return ''.join(opts)
53
54 def format_heading(self, heading):
55 if heading == 'Options':
56 return ''
57 return heading + ':\n'
58
59 def format_usage(self, usage):
60 """
61 Ensure there is only one newline between usage and the first heading
62 if there is no description.
63 """
64 msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
65 return msg
66
67 def format_description(self, description):
68 # leave full control over description to us
69 if description:
70 if hasattr(self.parser, 'main'):
71 label = 'Commands'
72 else:
73 label = 'Description'
74 # some doc strings have initial newlines, some don't
75 description = description.lstrip('\n')
76 # some doc strings have final newlines and spaces, some don't
77 description = description.rstrip()
78 # dedent, then reindent
79 description = self.indent_lines(textwrap.dedent(description), " ")
80 description = '%s:\n%s\n' % (label, description)
81 return description
82 else:
83 return ''
84
85 def format_epilog(self, epilog):
86 # leave full control over epilog to us
87 if epilog:
88 return epilog
89 else:
90 return ''
91
92 def indent_lines(self, text, indent):
93 new_lines = [indent + line for line in text.split('\n')]
94 return "\n".join(new_lines)
95
96
97class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
98 """Custom help formatter for use in ConfigOptionParser.
99
100 This is updates the defaults before expanding them, allowing
101 them to show up correctly in the help listing.
102 """
103
104 def expand_default(self, option):
105 if self.parser is not None:
106 self.parser._update_defaults(self.parser.defaults)
107 return optparse.IndentedHelpFormatter.expand_default(self, option)
108
109
110class CustomOptionParser(optparse.OptionParser):
111
112 def insert_option_group(self, idx, *args, **kwargs):
113 """Insert an OptionGroup at a given position."""
114 group = self.add_option_group(*args, **kwargs)
115
116 self.option_groups.pop()
117 self.option_groups.insert(idx, group)
118
119 return group
120
121 @property
122 def option_list_all(self):
123 """Get a list of all options, including those in option groups."""
124 res = self.option_list[:]
125 for i in self.option_groups:
126 res.extend(i.option_list)
127
128 return res
129
130
131class ConfigOptionParser(CustomOptionParser):
132 """Custom option parser which updates its defaults by checking the
133 configuration files and environmental variables"""
134
135 def __init__(self, *args, **kwargs):
136 self.name = kwargs.pop('name')
137
138 isolated = kwargs.pop("isolated", False)
139 self.config = Configuration(isolated)
140
141 assert self.name
142 optparse.OptionParser.__init__(self, *args, **kwargs)
143
144 def check_default(self, option, key, val):
145 try:
146 return option.check_value(key, val)
147 except optparse.OptionValueError as exc:
148 print("An error occurred during configuration: %s" % exc)
149 sys.exit(3)
150
151 def _get_ordered_configuration_items(self):
152 # Configuration gives keys in an unordered manner. Order them.
153 override_order = ["global", self.name, ":env:"]
154
155 # Pool the options into different groups
156 section_items = {name: [] for name in override_order}
157 for section_key, val in self.config.items():
158 # ignore empty values
159 if not val:
160 logger.debug(
161 "Ignoring configuration key '%s' as it's value is empty.",
162 section_key
163 )
164 continue
165
166 section, key = section_key.split(".", 1)
167 if section in override_order:
168 section_items[section].append((key, val))
169
170 # Yield each group in their override order
171 for section in override_order:
172 for key, val in section_items[section]:
173 yield key, val
174
175 def _update_defaults(self, defaults):
176 """Updates the given defaults with values from the config files and
177 the environ. Does a little special handling for certain types of
178 options (lists)."""
179
180 # Accumulate complex default state.
181 self.values = optparse.Values(self.defaults)
182 late_eval = set()
183 # Then set the options with those values
184 for key, val in self._get_ordered_configuration_items():
185 # '--' because configuration supports only long names
186 option = self.get_option('--' + key)
187
188 # Ignore options not present in this parser. E.g. non-globals put
189 # in [global] by users that want them to apply to all applicable
190 # commands.
191 if option is None:
192 continue
193
194 if option.action in ('store_true', 'store_false', 'count'):
195 val = strtobool(val)
196 elif option.action == 'append':
197 val = val.split()
198 val = [self.check_default(option, key, v) for v in val]
199 elif option.action == 'callback':
200 late_eval.add(option.dest)
201 opt_str = option.get_opt_string()
202 val = option.convert_value(opt_str, val)
203 # From take_action
204 args = option.callback_args or ()
205 kwargs = option.callback_kwargs or {}
206 option.callback(option, opt_str, val, self, *args, **kwargs)
207 else:
208 val = self.check_default(option, key, val)
209
210 defaults[option.dest] = val
211
212 for key in late_eval:
213 defaults[key] = getattr(self.values, key)
214 self.values = None
215 return defaults
216
217 def get_default_values(self):
218 """Overriding to make updating the defaults after instantiation of
219 the option parser possible, _update_defaults() does the dirty work."""
220 if not self.process_default_values:
221 # Old, pre-Optik 1.5 behaviour.
222 return optparse.Values(self.defaults)
223
224 # Load the configuration, or error out in case of an error
225 try:
226 self.config.load()
227 except ConfigurationError as err:
228 self.exit(2, err.args[0])
229
230 defaults = self._update_defaults(self.defaults.copy()) # ours
231 for option in self._get_all_options():
232 default = defaults.get(option.dest)
233 if isinstance(default, string_types):
234 opt_str = option.get_opt_string()
235 defaults[option.dest] = option.check_value(opt_str, default)
236 return optparse.Values(defaults)
237
238 def error(self, msg):
239 self.print_usage(sys.stderr)
240 self.exit(2, "%s\n" % msg)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py
new file mode 100644
index 0000000..8ad7735
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/build_env.py
@@ -0,0 +1,92 @@
1"""Build Environment used for isolation during sdist building
2"""
3
4import os
5from distutils.sysconfig import get_python_lib
6from sysconfig import get_paths
7
8from pip._internal.utils.temp_dir import TempDirectory
9
10
11class BuildEnvironment(object):
12 """Creates and manages an isolated environment to install build deps
13 """
14
15 def __init__(self, no_clean):
16 self._temp_dir = TempDirectory(kind="build-env")
17 self._no_clean = no_clean
18
19 @property
20 def path(self):
21 return self._temp_dir.path
22
23 def __enter__(self):
24 self._temp_dir.create()
25
26 self.save_path = os.environ.get('PATH', None)
27 self.save_pythonpath = os.environ.get('PYTHONPATH', None)
28 self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None)
29
30 install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
31 install_dirs = get_paths(install_scheme, vars={
32 'base': self.path,
33 'platbase': self.path,
34 })
35
36 scripts = install_dirs['scripts']
37 if self.save_path:
38 os.environ['PATH'] = scripts + os.pathsep + self.save_path
39 else:
40 os.environ['PATH'] = scripts + os.pathsep + os.defpath
41
42 # Note: prefer distutils' sysconfig to get the
43 # library paths so PyPy is correctly supported.
44 purelib = get_python_lib(plat_specific=0, prefix=self.path)
45 platlib = get_python_lib(plat_specific=1, prefix=self.path)
46 if purelib == platlib:
47 lib_dirs = purelib
48 else:
49 lib_dirs = purelib + os.pathsep + platlib
50 if self.save_pythonpath:
51 os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
52 self.save_pythonpath
53 else:
54 os.environ['PYTHONPATH'] = lib_dirs
55
56 os.environ['PYTHONNOUSERSITE'] = '1'
57
58 return self.path
59
60 def __exit__(self, exc_type, exc_val, exc_tb):
61 if not self._no_clean:
62 self._temp_dir.cleanup()
63
64 def restore_var(varname, old_value):
65 if old_value is None:
66 os.environ.pop(varname, None)
67 else:
68 os.environ[varname] = old_value
69
70 restore_var('PATH', self.save_path)
71 restore_var('PYTHONPATH', self.save_pythonpath)
72 restore_var('PYTHONNOUSERSITE', self.save_nousersite)
73
74 def cleanup(self):
75 self._temp_dir.cleanup()
76
77
78class NoOpBuildEnvironment(BuildEnvironment):
79 """A no-op drop-in replacement for BuildEnvironment
80 """
81
82 def __init__(self, no_clean):
83 pass
84
85 def __enter__(self):
86 pass
87
88 def __exit__(self, exc_type, exc_val, exc_tb):
89 pass
90
91 def cleanup(self):
92 pass
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py
new file mode 100644
index 0000000..5547d73
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cache.py
@@ -0,0 +1,202 @@
1"""Cache Management
2"""
3
4import errno
5import hashlib
6import logging
7import os
8
9from pip._vendor.packaging.utils import canonicalize_name
10
11from pip._internal import index
12from pip._internal.compat import expanduser
13from pip._internal.download import path_to_url
14from pip._internal.utils.temp_dir import TempDirectory
15from pip._internal.wheel import InvalidWheelFilename, Wheel
16
17logger = logging.getLogger(__name__)
18
19
20class Cache(object):
21 """An abstract class - provides cache directories for data from links
22
23
24 :param cache_dir: The root of the cache.
25 :param format_control: A pip.index.FormatControl object to limit
26 binaries being read from the cache.
27 :param allowed_formats: which formats of files the cache should store.
28 ('binary' and 'source' are the only allowed values)
29 """
30
31 def __init__(self, cache_dir, format_control, allowed_formats):
32 super(Cache, self).__init__()
33 self.cache_dir = expanduser(cache_dir) if cache_dir else None
34 self.format_control = format_control
35 self.allowed_formats = allowed_formats
36
37 _valid_formats = {"source", "binary"}
38 assert self.allowed_formats.union(_valid_formats) == _valid_formats
39
40 def _get_cache_path_parts(self, link):
41 """Get parts of part that must be os.path.joined with cache_dir
42 """
43
44 # We want to generate an url to use as our cache key, we don't want to
45 # just re-use the URL because it might have other items in the fragment
46 # and we don't care about those.
47 key_parts = [link.url_without_fragment]
48 if link.hash_name is not None and link.hash is not None:
49 key_parts.append("=".join([link.hash_name, link.hash]))
50 key_url = "#".join(key_parts)
51
52 # Encode our key url with sha224, we'll use this because it has similar
53 # security properties to sha256, but with a shorter total output (and
54 # thus less secure). However the differences don't make a lot of
55 # difference for our use case here.
56 hashed = hashlib.sha224(key_url.encode()).hexdigest()
57
58 # We want to nest the directories some to prevent having a ton of top
59 # level directories where we might run out of sub directories on some
60 # FS.
61 parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
62
63 return parts
64
65 def _get_candidates(self, link, package_name):
66 can_not_cache = (
67 not self.cache_dir or
68 not package_name or
69 not link
70 )
71 if can_not_cache:
72 return []
73
74 canonical_name = canonicalize_name(package_name)
75 formats = index.fmt_ctl_formats(
76 self.format_control, canonical_name
77 )
78 if not self.allowed_formats.intersection(formats):
79 return []
80
81 root = self.get_path_for_link(link)
82 try:
83 return os.listdir(root)
84 except OSError as err:
85 if err.errno in {errno.ENOENT, errno.ENOTDIR}:
86 return []
87 raise
88
89 def get_path_for_link(self, link):
90 """Return a directory to store cached items in for link.
91 """
92 raise NotImplementedError()
93
94 def get(self, link, package_name):
95 """Returns a link to a cached item if it exists, otherwise returns the
96 passed link.
97 """
98 raise NotImplementedError()
99
100 def _link_for_candidate(self, link, candidate):
101 root = self.get_path_for_link(link)
102 path = os.path.join(root, candidate)
103
104 return index.Link(path_to_url(path))
105
106 def cleanup(self):
107 pass
108
109
110class SimpleWheelCache(Cache):
111 """A cache of wheels for future installs.
112 """
113
114 def __init__(self, cache_dir, format_control):
115 super(SimpleWheelCache, self).__init__(
116 cache_dir, format_control, {"binary"}
117 )
118
119 def get_path_for_link(self, link):
120 """Return a directory to store cached wheels for link
121
122 Because there are M wheels for any one sdist, we provide a directory
123 to cache them in, and then consult that directory when looking up
124 cache hits.
125
126 We only insert things into the cache if they have plausible version
127 numbers, so that we don't contaminate the cache with things that were
128 not unique. E.g. ./package might have dozens of installs done for it
129 and build a version of 0.0...and if we built and cached a wheel, we'd
130 end up using the same wheel even if the source has been edited.
131
132 :param link: The link of the sdist for which this will cache wheels.
133 """
134 parts = self._get_cache_path_parts(link)
135
136 # Store wheels within the root cache_dir
137 return os.path.join(self.cache_dir, "wheels", *parts)
138
139 def get(self, link, package_name):
140 candidates = []
141
142 for wheel_name in self._get_candidates(link, package_name):
143 try:
144 wheel = Wheel(wheel_name)
145 except InvalidWheelFilename:
146 continue
147 if not wheel.supported():
148 # Built for a different python/arch/etc
149 continue
150 candidates.append((wheel.support_index_min(), wheel_name))
151
152 if not candidates:
153 return link
154
155 return self._link_for_candidate(link, min(candidates)[1])
156
157
158class EphemWheelCache(SimpleWheelCache):
159 """A SimpleWheelCache that creates it's own temporary cache directory
160 """
161
162 def __init__(self, format_control):
163 self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
164 self._temp_dir.create()
165
166 super(EphemWheelCache, self).__init__(
167 self._temp_dir.path, format_control
168 )
169
170 def cleanup(self):
171 self._temp_dir.cleanup()
172
173
174class WheelCache(Cache):
175 """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
176
177 This Cache allows for gracefully degradation, using the ephem wheel cache
178 when a certain link is not found in the simple wheel cache first.
179 """
180
181 def __init__(self, cache_dir, format_control):
182 super(WheelCache, self).__init__(
183 cache_dir, format_control, {'binary'}
184 )
185 self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
186 self._ephem_cache = EphemWheelCache(format_control)
187
188 def get_path_for_link(self, link):
189 return self._wheel_cache.get_path_for_link(link)
190
191 def get_ephem_path_for_link(self, link):
192 return self._ephem_cache.get_path_for_link(link)
193
194 def get(self, link, package_name):
195 retval = self._wheel_cache.get(link, package_name)
196 if retval is link:
197 retval = self._ephem_cache.get(link, package_name)
198 return retval
199
200 def cleanup(self):
201 self._wheel_cache.cleanup()
202 self._ephem_cache.cleanup()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py
new file mode 100644
index 0000000..58854e3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/cmdoptions.py
@@ -0,0 +1,609 @@
1"""
2shared options and groups
3
4The principle here is to define options once, but *not* instantiate them
5globally. One reason being that options with action='append' can carry state
6between parses. pip parses general options twice internally, and shouldn't
7pass on state. To be consistent, all options will follow this design.
8
9"""
10from __future__ import absolute_import
11
12import warnings
13from functools import partial
14from optparse import SUPPRESS_HELP, Option, OptionGroup
15
16from pip._internal.index import (
17 FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
18)
19from pip._internal.locations import USER_CACHE_DIR, src_prefix
20from pip._internal.models import PyPI
21from pip._internal.utils.hashes import STRONG_HASHES
22from pip._internal.utils.typing import MYPY_CHECK_RUNNING
23from pip._internal.utils.ui import BAR_TYPES
24
25if MYPY_CHECK_RUNNING:
26 from typing import Any
27
28
29def make_option_group(group, parser):
30 """
31 Return an OptionGroup object
32 group -- assumed to be dict with 'name' and 'options' keys
33 parser -- an optparse Parser
34 """
35 option_group = OptionGroup(parser, group['name'])
36 for option in group['options']:
37 option_group.add_option(option())
38 return option_group
39
40
41def check_install_build_global(options, check_options=None):
42 """Disable wheels if per-setup.py call options are set.
43
44 :param options: The OptionParser options to update.
45 :param check_options: The options to check, if not supplied defaults to
46 options.
47 """
48 if check_options is None:
49 check_options = options
50
51 def getname(n):
52 return getattr(check_options, n, None)
53 names = ["build_options", "global_options", "install_options"]
54 if any(map(getname, names)):
55 control = options.format_control
56 fmt_ctl_no_binary(control)
57 warnings.warn(
58 'Disabling all use of wheels due to the use of --build-options '
59 '/ --global-options / --install-options.', stacklevel=2,
60 )
61
62
63###########
64# options #
65###########
66
67help_ = partial(
68 Option,
69 '-h', '--help',
70 dest='help',
71 action='help',
72 help='Show help.',
73) # type: Any
74
75isolated_mode = partial(
76 Option,
77 "--isolated",
78 dest="isolated_mode",
79 action="store_true",
80 default=False,
81 help=(
82 "Run pip in an isolated mode, ignoring environment variables and user "
83 "configuration."
84 ),
85)
86
87require_virtualenv = partial(
88 Option,
89 # Run only if inside a virtualenv, bail if not.
90 '--require-virtualenv', '--require-venv',
91 dest='require_venv',
92 action='store_true',
93 default=False,
94 help=SUPPRESS_HELP
95) # type: Any
96
97verbose = partial(
98 Option,
99 '-v', '--verbose',
100 dest='verbose',
101 action='count',
102 default=0,
103 help='Give more output. Option is additive, and can be used up to 3 times.'
104)
105
106no_color = partial(
107 Option,
108 '--no-color',
109 dest='no_color',
110 action='store_true',
111 default=False,
112 help="Suppress colored output",
113)
114
115version = partial(
116 Option,
117 '-V', '--version',
118 dest='version',
119 action='store_true',
120 help='Show version and exit.',
121) # type: Any
122
123quiet = partial(
124 Option,
125 '-q', '--quiet',
126 dest='quiet',
127 action='count',
128 default=0,
129 help=(
130 'Give less output. Option is additive, and can be used up to 3'
131 ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
132 ' levels).'
133 ),
134) # type: Any
135
136progress_bar = partial(
137 Option,
138 '--progress-bar',
139 dest='progress_bar',
140 type='choice',
141 choices=list(BAR_TYPES.keys()),
142 default='on',
143 help=(
144 'Specify type of progress to be displayed [' +
145 '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
146 ),
147) # type: Any
148
149log = partial(
150 Option,
151 "--log", "--log-file", "--local-log",
152 dest="log",
153 metavar="path",
154 help="Path to a verbose appending log."
155) # type: Any
156
157no_input = partial(
158 Option,
159 # Don't ask for input
160 '--no-input',
161 dest='no_input',
162 action='store_true',
163 default=False,
164 help=SUPPRESS_HELP
165) # type: Any
166
167proxy = partial(
168 Option,
169 '--proxy',
170 dest='proxy',
171 type='str',
172 default='',
173 help="Specify a proxy in the form [user:passwd@]proxy.server:port."
174) # type: Any
175
176retries = partial(
177 Option,
178 '--retries',
179 dest='retries',
180 type='int',
181 default=5,
182 help="Maximum number of retries each connection should attempt "
183 "(default %default times).",
184) # type: Any
185
186timeout = partial(
187 Option,
188 '--timeout', '--default-timeout',
189 metavar='sec',
190 dest='timeout',
191 type='float',
192 default=15,
193 help='Set the socket timeout (default %default seconds).',
194) # type: Any
195
196skip_requirements_regex = partial(
197 Option,
198 # A regex to be used to skip requirements
199 '--skip-requirements-regex',
200 dest='skip_requirements_regex',
201 type='str',
202 default='',
203 help=SUPPRESS_HELP,
204) # type: Any
205
206
207def exists_action():
208 return Option(
209 # Option when path already exist
210 '--exists-action',
211 dest='exists_action',
212 type='choice',
213 choices=['s', 'i', 'w', 'b', 'a'],
214 default=[],
215 action='append',
216 metavar='action',
217 help="Default action when a path already exists: "
218 "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
219 )
220
221
222cert = partial(
223 Option,
224 '--cert',
225 dest='cert',
226 type='str',
227 metavar='path',
228 help="Path to alternate CA bundle.",
229) # type: Any
230
231client_cert = partial(
232 Option,
233 '--client-cert',
234 dest='client_cert',
235 type='str',
236 default=None,
237 metavar='path',
238 help="Path to SSL client certificate, a single file containing the "
239 "private key and the certificate in PEM format.",
240) # type: Any
241
242index_url = partial(
243 Option,
244 '-i', '--index-url', '--pypi-url',
245 dest='index_url',
246 metavar='URL',
247 default=PyPI.simple_url,
248 help="Base URL of Python Package Index (default %default). "
249 "This should point to a repository compliant with PEP 503 "
250 "(the simple repository API) or a local directory laid out "
251 "in the same format.",
252) # type: Any
253
254
255def extra_index_url():
256 return Option(
257 '--extra-index-url',
258 dest='extra_index_urls',
259 metavar='URL',
260 action='append',
261 default=[],
262 help="Extra URLs of package indexes to use in addition to "
263 "--index-url. Should follow the same rules as "
264 "--index-url.",
265 )
266
267
268no_index = partial(
269 Option,
270 '--no-index',
271 dest='no_index',
272 action='store_true',
273 default=False,
274 help='Ignore package index (only looking at --find-links URLs instead).',
275) # type: Any
276
277
278def find_links():
279 return Option(
280 '-f', '--find-links',
281 dest='find_links',
282 action='append',
283 default=[],
284 metavar='url',
285 help="If a url or path to an html file, then parse for links to "
286 "archives. If a local path or file:// url that's a directory, "
287 "then look for archives in the directory listing.",
288 )
289
290
291def trusted_host():
292 return Option(
293 "--trusted-host",
294 dest="trusted_hosts",
295 action="append",
296 metavar="HOSTNAME",
297 default=[],
298 help="Mark this host as trusted, even though it does not have valid "
299 "or any HTTPS.",
300 )
301
302
303# Remove after 1.5
304process_dependency_links = partial(
305 Option,
306 "--process-dependency-links",
307 dest="process_dependency_links",
308 action="store_true",
309 default=False,
310 help="Enable the processing of dependency links.",
311) # type: Any
312
313
314def constraints():
315 return Option(
316 '-c', '--constraint',
317 dest='constraints',
318 action='append',
319 default=[],
320 metavar='file',
321 help='Constrain versions using the given constraints file. '
322 'This option can be used multiple times.'
323 )
324
325
326def requirements():
327 return Option(
328 '-r', '--requirement',
329 dest='requirements',
330 action='append',
331 default=[],
332 metavar='file',
333 help='Install from the given requirements file. '
334 'This option can be used multiple times.'
335 )
336
337
338def editable():
339 return Option(
340 '-e', '--editable',
341 dest='editables',
342 action='append',
343 default=[],
344 metavar='path/url',
345 help=('Install a project in editable mode (i.e. setuptools '
346 '"develop mode") from a local project path or a VCS url.'),
347 )
348
349
350src = partial(
351 Option,
352 '--src', '--source', '--source-dir', '--source-directory',
353 dest='src_dir',
354 metavar='dir',
355 default=src_prefix,
356 help='Directory to check out editable projects into. '
357 'The default in a virtualenv is "<venv path>/src". '
358 'The default for global installs is "<current dir>/src".'
359) # type: Any
360
361
362def _get_format_control(values, option):
363 """Get a format_control object."""
364 return getattr(values, option.dest)
365
366
367def _handle_no_binary(option, opt_str, value, parser):
368 existing = getattr(parser.values, option.dest)
369 fmt_ctl_handle_mutual_exclude(
370 value, existing.no_binary, existing.only_binary,
371 )
372
373
374def _handle_only_binary(option, opt_str, value, parser):
375 existing = getattr(parser.values, option.dest)
376 fmt_ctl_handle_mutual_exclude(
377 value, existing.only_binary, existing.no_binary,
378 )
379
380
381def no_binary():
382 return Option(
383 "--no-binary", dest="format_control", action="callback",
384 callback=_handle_no_binary, type="str",
385 default=FormatControl(set(), set()),
386 help="Do not use binary packages. Can be supplied multiple times, and "
387 "each time adds to the existing value. Accepts either :all: to "
388 "disable all binary packages, :none: to empty the set, or one or "
389 "more package names with commas between them. Note that some "
390 "packages are tricky to compile and may fail to install when "
391 "this option is used on them.",
392 )
393
394
395def only_binary():
396 return Option(
397 "--only-binary", dest="format_control", action="callback",
398 callback=_handle_only_binary, type="str",
399 default=FormatControl(set(), set()),
400 help="Do not use source packages. Can be supplied multiple times, and "
401 "each time adds to the existing value. Accepts either :all: to "
402 "disable all source packages, :none: to empty the set, or one or "
403 "more package names with commas between them. Packages without "
404 "binary distributions will fail to install when this option is "
405 "used on them.",
406 )
407
408
409cache_dir = partial(
410 Option,
411 "--cache-dir",
412 dest="cache_dir",
413 default=USER_CACHE_DIR,
414 metavar="dir",
415 help="Store the cache data in <dir>."
416)
417
418no_cache = partial(
419 Option,
420 "--no-cache-dir",
421 dest="cache_dir",
422 action="store_false",
423 help="Disable the cache.",
424)
425
426no_deps = partial(
427 Option,
428 '--no-deps', '--no-dependencies',
429 dest='ignore_dependencies',
430 action='store_true',
431 default=False,
432 help="Don't install package dependencies.",
433) # type: Any
434
435build_dir = partial(
436 Option,
437 '-b', '--build', '--build-dir', '--build-directory',
438 dest='build_dir',
439 metavar='dir',
440 help='Directory to unpack packages into and build in. Note that '
441 'an initial build still takes place in a temporary directory. '
442 'The location of temporary directories can be controlled by setting '
443 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
444 'When passed, build directories are not cleaned in case of failures.'
445) # type: Any
446
447ignore_requires_python = partial(
448 Option,
449 '--ignore-requires-python',
450 dest='ignore_requires_python',
451 action='store_true',
452 help='Ignore the Requires-Python information.'
453) # type: Any
454
455no_build_isolation = partial(
456 Option,
457 '--no-build-isolation',
458 dest='build_isolation',
459 action='store_false',
460 default=True,
461 help='Disable isolation when building a modern source distribution. '
462 'Build dependencies specified by PEP 518 must be already installed '
463 'if this option is used.'
464) # type: Any
465
466install_options = partial(
467 Option,
468 '--install-option',
469 dest='install_options',
470 action='append',
471 metavar='options',
472 help="Extra arguments to be supplied to the setup.py install "
473 "command (use like --install-option=\"--install-scripts=/usr/local/"
474 "bin\"). Use multiple --install-option options to pass multiple "
475 "options to setup.py install. If you are using an option with a "
476 "directory path, be sure to use absolute path.",
477) # type: Any
478
479global_options = partial(
480 Option,
481 '--global-option',
482 dest='global_options',
483 action='append',
484 metavar='options',
485 help="Extra global options to be supplied to the setup.py "
486 "call before the install command.",
487) # type: Any
488
489no_clean = partial(
490 Option,
491 '--no-clean',
492 action='store_true',
493 default=False,
494 help="Don't clean up build directories)."
495) # type: Any
496
497pre = partial(
498 Option,
499 '--pre',
500 action='store_true',
501 default=False,
502 help="Include pre-release and development versions. By default, "
503 "pip only finds stable versions.",
504) # type: Any
505
506disable_pip_version_check = partial(
507 Option,
508 "--disable-pip-version-check",
509 dest="disable_pip_version_check",
510 action="store_true",
511 default=False,
512 help="Don't periodically check PyPI to determine whether a new version "
513 "of pip is available for download. Implied with --no-index.",
514) # type: Any
515
516
517# Deprecated, Remove later
518always_unzip = partial(
519 Option,
520 '-Z', '--always-unzip',
521 dest='always_unzip',
522 action='store_true',
523 help=SUPPRESS_HELP,
524) # type: Any
525
526
527def _merge_hash(option, opt_str, value, parser):
528 """Given a value spelled "algo:digest", append the digest to a list
529 pointed to in a dict by the algo name."""
530 if not parser.values.hashes:
531 parser.values.hashes = {}
532 try:
533 algo, digest = value.split(':', 1)
534 except ValueError:
535 parser.error('Arguments to %s must be a hash name '
536 'followed by a value, like --hash=sha256:abcde...' %
537 opt_str)
538 if algo not in STRONG_HASHES:
539 parser.error('Allowed hash algorithms for %s are %s.' %
540 (opt_str, ', '.join(STRONG_HASHES)))
541 parser.values.hashes.setdefault(algo, []).append(digest)
542
543
544hash = partial(
545 Option,
546 '--hash',
547 # Hash values eventually end up in InstallRequirement.hashes due to
548 # __dict__ copying in process_line().
549 dest='hashes',
550 action='callback',
551 callback=_merge_hash,
552 type='string',
553 help="Verify that the package's archive matches this "
554 'hash before installing. Example: --hash=sha256:abcdef...',
555) # type: Any
556
557
558require_hashes = partial(
559 Option,
560 '--require-hashes',
561 dest='require_hashes',
562 action='store_true',
563 default=False,
564 help='Require a hash to check each requirement against, for '
565 'repeatable installs. This option is implied when any package in a '
566 'requirements file has a --hash option.',
567) # type: Any
568
569
570##########
571# groups #
572##########
573
574general_group = {
575 'name': 'General Options',
576 'options': [
577 help_,
578 isolated_mode,
579 require_virtualenv,
580 verbose,
581 version,
582 quiet,
583 log,
584 no_input,
585 proxy,
586 retries,
587 timeout,
588 skip_requirements_regex,
589 exists_action,
590 trusted_host,
591 cert,
592 client_cert,
593 cache_dir,
594 no_cache,
595 disable_pip_version_check,
596 no_color,
597 ]
598}
599
600index_group = {
601 'name': 'Package Index Options',
602 'options': [
603 index_url,
604 extra_index_url,
605 no_index,
606 find_links,
607 process_dependency_links,
608 ]
609}
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py
new file mode 100644
index 0000000..d44e6f1
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/__init__.py
@@ -0,0 +1,79 @@
1"""
2Package containing all pip commands
3"""
4from __future__ import absolute_import
5
6from pip._internal.commands.completion import CompletionCommand
7from pip._internal.commands.configuration import ConfigurationCommand
8from pip._internal.commands.download import DownloadCommand
9from pip._internal.commands.freeze import FreezeCommand
10from pip._internal.commands.hash import HashCommand
11from pip._internal.commands.help import HelpCommand
12from pip._internal.commands.list import ListCommand
13from pip._internal.commands.check import CheckCommand
14from pip._internal.commands.search import SearchCommand
15from pip._internal.commands.show import ShowCommand
16from pip._internal.commands.install import InstallCommand
17from pip._internal.commands.uninstall import UninstallCommand
18from pip._internal.commands.wheel import WheelCommand
19
20from pip._internal.utils.typing import MYPY_CHECK_RUNNING
21
22if MYPY_CHECK_RUNNING:
23 from typing import List, Type
24 from pip._internal.basecommand import Command
25
26commands_order = [
27 InstallCommand,
28 DownloadCommand,
29 UninstallCommand,
30 FreezeCommand,
31 ListCommand,
32 ShowCommand,
33 CheckCommand,
34 ConfigurationCommand,
35 SearchCommand,
36 WheelCommand,
37 HashCommand,
38 CompletionCommand,
39 HelpCommand,
40] # type: List[Type[Command]]
41
42commands_dict = {c.name: c for c in commands_order}
43
44
45def get_summaries(ordered=True):
46 """Yields sorted (command name, command summary) tuples."""
47
48 if ordered:
49 cmditems = _sort_commands(commands_dict, commands_order)
50 else:
51 cmditems = commands_dict.items()
52
53 for name, command_class in cmditems:
54 yield (name, command_class.summary)
55
56
57def get_similar_commands(name):
58 """Command name auto-correct."""
59 from difflib import get_close_matches
60
61 name = name.lower()
62
63 close_commands = get_close_matches(name, commands_dict.keys())
64
65 if close_commands:
66 return close_commands[0]
67 else:
68 return False
69
70
71def _sort_commands(cmddict, order):
72 def keyfn(key):
73 try:
74 return order.index(key[1])
75 except ValueError:
76 # unordered items should come last
77 return 0xff
78
79 return sorted(cmddict.items(), key=keyfn)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py
new file mode 100644
index 0000000..b1bf38a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/check.py
@@ -0,0 +1,42 @@
1import logging
2
3from pip._internal.basecommand import Command
4from pip._internal.operations.check import (
5 check_package_set, create_package_set_from_installed,
6)
7from pip._internal.utils.misc import get_installed_distributions
8
9logger = logging.getLogger(__name__)
10
11
12class CheckCommand(Command):
13 """Verify installed packages have compatible dependencies."""
14 name = 'check'
15 usage = """
16 %prog [options]"""
17 summary = 'Verify installed packages have compatible dependencies.'
18
19 def run(self, options, args):
20 package_set = create_package_set_from_installed()
21 missing, conflicting = check_package_set(package_set)
22
23 for project_name in missing:
24 version = package_set[project_name].version
25 for dependency in missing[project_name]:
26 logger.info(
27 "%s %s requires %s, which is not installed.",
28 project_name, version, dependency[0],
29 )
30
31 for project_name in conflicting:
32 version = package_set[project_name].version
33 for dep_name, dep_version, req in conflicting[project_name]:
34 logger.info(
35 "%s %s has requirement %s, but you have %s %s.",
36 project_name, version, req, dep_name, dep_version,
37 )
38
39 if missing or conflicting:
40 return 1
41 else:
42 logger.info("No broken requirements found.")
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py
new file mode 100644
index 0000000..8da1e83
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/completion.py
@@ -0,0 +1,94 @@
1from __future__ import absolute_import
2
3import sys
4import textwrap
5
6from pip._internal.basecommand import Command
7from pip._internal.utils.misc import get_prog
8
9BASE_COMPLETION = """
10# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
11"""
12
13COMPLETION_SCRIPTS = {
14 'bash': """
15 _pip_completion()
16 {
17 COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
18 COMP_CWORD=$COMP_CWORD \\
19 PIP_AUTO_COMPLETE=1 $1 ) )
20 }
21 complete -o default -F _pip_completion %(prog)s
22 """,
23 'zsh': """
24 function _pip_completion {
25 local words cword
26 read -Ac words
27 read -cn cword
28 reply=( $( COMP_WORDS="$words[*]" \\
29 COMP_CWORD=$(( cword-1 )) \\
30 PIP_AUTO_COMPLETE=1 $words[1] ) )
31 }
32 compctl -K _pip_completion %(prog)s
33 """,
34 'fish': """
35 function __fish_complete_pip
36 set -lx COMP_WORDS (commandline -o) ""
37 set -lx COMP_CWORD ( \\
38 math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
39 )
40 set -lx PIP_AUTO_COMPLETE 1
41 string split \\ -- (eval $COMP_WORDS[1])
42 end
43 complete -fa "(__fish_complete_pip)" -c %(prog)s
44 """,
45}
46
47
48class CompletionCommand(Command):
49 """A helper command to be used for command completion."""
50 name = 'completion'
51 summary = 'A helper command used for command completion.'
52 ignore_require_venv = True
53
54 def __init__(self, *args, **kw):
55 super(CompletionCommand, self).__init__(*args, **kw)
56
57 cmd_opts = self.cmd_opts
58
59 cmd_opts.add_option(
60 '--bash', '-b',
61 action='store_const',
62 const='bash',
63 dest='shell',
64 help='Emit completion code for bash')
65 cmd_opts.add_option(
66 '--zsh', '-z',
67 action='store_const',
68 const='zsh',
69 dest='shell',
70 help='Emit completion code for zsh')
71 cmd_opts.add_option(
72 '--fish', '-f',
73 action='store_const',
74 const='fish',
75 dest='shell',
76 help='Emit completion code for fish')
77
78 self.parser.insert_option_group(0, cmd_opts)
79
80 def run(self, options, args):
81 """Prints the completion code of the given shell"""
82 shells = COMPLETION_SCRIPTS.keys()
83 shell_options = ['--' + shell for shell in sorted(shells)]
84 if options.shell in shells:
85 script = textwrap.dedent(
86 COMPLETION_SCRIPTS.get(options.shell, '') % {
87 'prog': get_prog(),
88 }
89 )
90 print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
91 else:
92 sys.stderr.write(
93 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
94 )
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py
new file mode 100644
index 0000000..e10d9a9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/configuration.py
@@ -0,0 +1,227 @@
1import logging
2import os
3import subprocess
4
5from pip._internal.basecommand import Command
6from pip._internal.configuration import Configuration, kinds
7from pip._internal.exceptions import PipError
8from pip._internal.locations import venv_config_file
9from pip._internal.status_codes import ERROR, SUCCESS
10from pip._internal.utils.misc import get_prog
11
12logger = logging.getLogger(__name__)
13
14
15class ConfigurationCommand(Command):
16 """Manage local and global configuration.
17
18 Subcommands:
19
20 list: List the active configuration (or from the file specified)
21 edit: Edit the configuration file in an editor
22 get: Get the value associated with name
23 set: Set the name=value
24 unset: Unset the value associated with name
25
26 If none of --user, --global and --venv are passed, a virtual
27 environment configuration file is used if one is active and the file
28 exists. Otherwise, all modifications happen on the to the user file by
29 default.
30 """
31
32 name = 'config'
33 usage = """
34 %prog [<file-option>] list
35 %prog [<file-option>] [--editor <editor-path>] edit
36
37 %prog [<file-option>] get name
38 %prog [<file-option>] set name value
39 %prog [<file-option>] unset name
40 """
41
42 summary = "Manage local and global configuration."
43
44 def __init__(self, *args, **kwargs):
45 super(ConfigurationCommand, self).__init__(*args, **kwargs)
46
47 self.configuration = None
48
49 self.cmd_opts.add_option(
50 '--editor',
51 dest='editor',
52 action='store',
53 default=None,
54 help=(
55 'Editor to use to edit the file. Uses VISUAL or EDITOR '
56 'environment variables if not provided.'
57 )
58 )
59
60 self.cmd_opts.add_option(
61 '--global',
62 dest='global_file',
63 action='store_true',
64 default=False,
65 help='Use the system-wide configuration file only'
66 )
67
68 self.cmd_opts.add_option(
69 '--user',
70 dest='user_file',
71 action='store_true',
72 default=False,
73 help='Use the user configuration file only'
74 )
75
76 self.cmd_opts.add_option(
77 '--venv',
78 dest='venv_file',
79 action='store_true',
80 default=False,
81 help='Use the virtualenv configuration file only'
82 )
83
84 self.parser.insert_option_group(0, self.cmd_opts)
85
86 def run(self, options, args):
87 handlers = {
88 "list": self.list_values,
89 "edit": self.open_in_editor,
90 "get": self.get_name,
91 "set": self.set_name_value,
92 "unset": self.unset_name
93 }
94
95 # Determine action
96 if not args or args[0] not in handlers:
97 logger.error("Need an action ({}) to perform.".format(
98 ", ".join(sorted(handlers)))
99 )
100 return ERROR
101
102 action = args[0]
103
104 # Determine which configuration files are to be loaded
105 # Depends on whether the command is modifying.
106 try:
107 load_only = self._determine_file(
108 options, need_value=(action in ["get", "set", "unset", "edit"])
109 )
110 except PipError as e:
111 logger.error(e.args[0])
112 return ERROR
113
114 # Load a new configuration
115 self.configuration = Configuration(
116 isolated=options.isolated_mode, load_only=load_only
117 )
118 self.configuration.load()
119
120 # Error handling happens here, not in the action-handlers.
121 try:
122 handlers[action](options, args[1:])
123 except PipError as e:
124 logger.error(e.args[0])
125 return ERROR
126
127 return SUCCESS
128
129 def _determine_file(self, options, need_value):
130 file_options = {
131 kinds.USER: options.user_file,
132 kinds.GLOBAL: options.global_file,
133 kinds.VENV: options.venv_file
134 }
135
136 if sum(file_options.values()) == 0:
137 if not need_value:
138 return None
139 # Default to user, unless there's a virtualenv file.
140 elif os.path.exists(venv_config_file):
141 return kinds.VENV
142 else:
143 return kinds.USER
144 elif sum(file_options.values()) == 1:
145 # There's probably a better expression for this.
146 return [key for key in file_options if file_options[key]][0]
147
148 raise PipError(
149 "Need exactly one file to operate upon "
150 "(--user, --venv, --global) to perform."
151 )
152
153 def list_values(self, options, args):
154 self._get_n_args(args, "list", n=0)
155
156 for key, value in sorted(self.configuration.items()):
157 logger.info("%s=%r", key, value)
158
159 def get_name(self, options, args):
160 key = self._get_n_args(args, "get [name]", n=1)
161 value = self.configuration.get_value(key)
162
163 logger.info("%s", value)
164
165 def set_name_value(self, options, args):
166 key, value = self._get_n_args(args, "set [name] [value]", n=2)
167 self.configuration.set_value(key, value)
168
169 self._save_configuration()
170
171 def unset_name(self, options, args):
172 key = self._get_n_args(args, "unset [name]", n=1)
173 self.configuration.unset_value(key)
174
175 self._save_configuration()
176
177 def open_in_editor(self, options, args):
178 editor = self._determine_editor(options)
179
180 fname = self.configuration.get_file_to_edit()
181 if fname is None:
182 raise PipError("Could not determine appropriate file.")
183
184 try:
185 subprocess.check_call([editor, fname])
186 except subprocess.CalledProcessError as e:
187 raise PipError(
188 "Editor Subprocess exited with exit code {}"
189 .format(e.returncode)
190 )
191
192 def _get_n_args(self, args, example, n):
193 """Helper to make sure the command got the right number of arguments
194 """
195 if len(args) != n:
196 msg = (
197 'Got unexpected number of arguments, expected {}. '
198 '(example: "{} config {}")'
199 ).format(n, get_prog(), example)
200 raise PipError(msg)
201
202 if n == 1:
203 return args[0]
204 else:
205 return args
206
207 def _save_configuration(self):
208 # We successfully ran a modifying command. Need to save the
209 # configuration.
210 try:
211 self.configuration.save()
212 except Exception:
213 logger.error(
214 "Unable to save configuration. Please report this as a bug.",
215 exc_info=1
216 )
217 raise PipError("Internal Error.")
218
219 def _determine_editor(self, options):
220 if options.editor is not None:
221 return options.editor
222 elif "VISUAL" in os.environ:
223 return os.environ["VISUAL"]
224 elif "EDITOR" in os.environ:
225 return os.environ["EDITOR"]
226 else:
227 raise PipError("Could not determine editor to use.")
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py
new file mode 100644
index 0000000..916a470
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/download.py
@@ -0,0 +1,233 @@
1from __future__ import absolute_import
2
3import logging
4import os
5
6from pip._internal import cmdoptions
7from pip._internal.basecommand import RequirementCommand
8from pip._internal.exceptions import CommandError
9from pip._internal.index import FormatControl
10from pip._internal.operations.prepare import RequirementPreparer
11from pip._internal.req import RequirementSet
12from pip._internal.resolve import Resolver
13from pip._internal.utils.filesystem import check_path_owner
14from pip._internal.utils.misc import ensure_dir, normalize_path
15from pip._internal.utils.temp_dir import TempDirectory
16
17logger = logging.getLogger(__name__)
18
19
20class DownloadCommand(RequirementCommand):
21 """
22 Download packages from:
23
24 - PyPI (and other indexes) using requirement specifiers.
25 - VCS project urls.
26 - Local project directories.
27 - Local or remote source archives.
28
29 pip also supports downloading from "requirements files", which provide
30 an easy way to specify a whole environment to be downloaded.
31 """
32 name = 'download'
33
34 usage = """
35 %prog [options] <requirement specifier> [package-index-options] ...
36 %prog [options] -r <requirements file> [package-index-options] ...
37 %prog [options] <vcs project url> ...
38 %prog [options] <local project path> ...
39 %prog [options] <archive url/path> ..."""
40
41 summary = 'Download packages.'
42
43 def __init__(self, *args, **kw):
44 super(DownloadCommand, self).__init__(*args, **kw)
45
46 cmd_opts = self.cmd_opts
47
48 cmd_opts.add_option(cmdoptions.constraints())
49 cmd_opts.add_option(cmdoptions.requirements())
50 cmd_opts.add_option(cmdoptions.build_dir())
51 cmd_opts.add_option(cmdoptions.no_deps())
52 cmd_opts.add_option(cmdoptions.global_options())
53 cmd_opts.add_option(cmdoptions.no_binary())
54 cmd_opts.add_option(cmdoptions.only_binary())
55 cmd_opts.add_option(cmdoptions.src())
56 cmd_opts.add_option(cmdoptions.pre())
57 cmd_opts.add_option(cmdoptions.no_clean())
58 cmd_opts.add_option(cmdoptions.require_hashes())
59 cmd_opts.add_option(cmdoptions.progress_bar())
60 cmd_opts.add_option(cmdoptions.no_build_isolation())
61
62 cmd_opts.add_option(
63 '-d', '--dest', '--destination-dir', '--destination-directory',
64 dest='download_dir',
65 metavar='dir',
66 default=os.curdir,
67 help=("Download packages into <dir>."),
68 )
69
70 cmd_opts.add_option(
71 '--platform',
72 dest='platform',
73 metavar='platform',
74 default=None,
75 help=("Only download wheels compatible with <platform>. "
76 "Defaults to the platform of the running system."),
77 )
78
79 cmd_opts.add_option(
80 '--python-version',
81 dest='python_version',
82 metavar='python_version',
83 default=None,
84 help=("Only download wheels compatible with Python "
85 "interpreter version <version>. If not specified, then the "
86 "current system interpreter minor version is used. A major "
87 "version (e.g. '2') can be specified to match all "
88 "minor revs of that major version. A minor version "
89 "(e.g. '34') can also be specified."),
90 )
91
92 cmd_opts.add_option(
93 '--implementation',
94 dest='implementation',
95 metavar='implementation',
96 default=None,
97 help=("Only download wheels compatible with Python "
98 "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
99 " or 'ip'. If not specified, then the current "
100 "interpreter implementation is used. Use 'py' to force "
101 "implementation-agnostic wheels."),
102 )
103
104 cmd_opts.add_option(
105 '--abi',
106 dest='abi',
107 metavar='abi',
108 default=None,
109 help=("Only download wheels compatible with Python "
110 "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
111 "current interpreter abi tag is used. Generally "
112 "you will need to specify --implementation, "
113 "--platform, and --python-version when using "
114 "this option."),
115 )
116
117 index_opts = cmdoptions.make_option_group(
118 cmdoptions.index_group,
119 self.parser,
120 )
121
122 self.parser.insert_option_group(0, index_opts)
123 self.parser.insert_option_group(0, cmd_opts)
124
125 def run(self, options, args):
126 options.ignore_installed = True
127 # editable doesn't really make sense for `pip download`, but the bowels
128 # of the RequirementSet code require that property.
129 options.editables = []
130
131 if options.python_version:
132 python_versions = [options.python_version]
133 else:
134 python_versions = None
135
136 dist_restriction_set = any([
137 options.python_version,
138 options.platform,
139 options.abi,
140 options.implementation,
141 ])
142 binary_only = FormatControl(set(), {':all:'})
143 no_sdist_dependencies = (
144 options.format_control != binary_only and
145 not options.ignore_dependencies
146 )
147 if dist_restriction_set and no_sdist_dependencies:
148 raise CommandError(
149 "When restricting platform and interpreter constraints using "
150 "--python-version, --platform, --abi, or --implementation, "
151 "either --no-deps must be set, or --only-binary=:all: must be "
152 "set and --no-binary must not be set (or must be set to "
153 ":none:)."
154 )
155
156 options.src_dir = os.path.abspath(options.src_dir)
157 options.download_dir = normalize_path(options.download_dir)
158
159 ensure_dir(options.download_dir)
160
161 with self._build_session(options) as session:
162 finder = self._build_package_finder(
163 options=options,
164 session=session,
165 platform=options.platform,
166 python_versions=python_versions,
167 abi=options.abi,
168 implementation=options.implementation,
169 )
170 build_delete = (not (options.no_clean or options.build_dir))
171 if options.cache_dir and not check_path_owner(options.cache_dir):
172 logger.warning(
173 "The directory '%s' or its parent directory is not owned "
174 "by the current user and caching wheels has been "
175 "disabled. check the permissions and owner of that "
176 "directory. If executing pip with sudo, you may want "
177 "sudo's -H flag.",
178 options.cache_dir,
179 )
180 options.cache_dir = None
181
182 with TempDirectory(
183 options.build_dir, delete=build_delete, kind="download"
184 ) as directory:
185
186 requirement_set = RequirementSet(
187 require_hashes=options.require_hashes,
188 )
189 self.populate_requirement_set(
190 requirement_set,
191 args,
192 options,
193 finder,
194 session,
195 self.name,
196 None
197 )
198
199 preparer = RequirementPreparer(
200 build_dir=directory.path,
201 src_dir=options.src_dir,
202 download_dir=options.download_dir,
203 wheel_download_dir=None,
204 progress_bar=options.progress_bar,
205 build_isolation=options.build_isolation,
206 )
207
208 resolver = Resolver(
209 preparer=preparer,
210 finder=finder,
211 session=session,
212 wheel_cache=None,
213 use_user_site=False,
214 upgrade_strategy="to-satisfy-only",
215 force_reinstall=False,
216 ignore_dependencies=options.ignore_dependencies,
217 ignore_requires_python=False,
218 ignore_installed=True,
219 isolated=options.isolated_mode,
220 )
221 resolver.resolve(requirement_set)
222
223 downloaded = ' '.join([
224 req.name for req in requirement_set.successfully_downloaded
225 ])
226 if downloaded:
227 logger.info('Successfully downloaded %s', downloaded)
228
229 # Clean up
230 if not options.no_clean:
231 requirement_set.cleanup_files()
232
233 return requirement_set
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py
new file mode 100644
index 0000000..ac562d7
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/freeze.py
@@ -0,0 +1,96 @@
1from __future__ import absolute_import
2
3import sys
4
5from pip._internal import index
6from pip._internal.basecommand import Command
7from pip._internal.cache import WheelCache
8from pip._internal.compat import stdlib_pkgs
9from pip._internal.operations.freeze import freeze
10
11DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
12
13
14class FreezeCommand(Command):
15 """
16 Output installed packages in requirements format.
17
18 packages are listed in a case-insensitive sorted order.
19 """
20 name = 'freeze'
21 usage = """
22 %prog [options]"""
23 summary = 'Output installed packages in requirements format.'
24 log_streams = ("ext://sys.stderr", "ext://sys.stderr")
25
26 def __init__(self, *args, **kw):
27 super(FreezeCommand, self).__init__(*args, **kw)
28
29 self.cmd_opts.add_option(
30 '-r', '--requirement',
31 dest='requirements',
32 action='append',
33 default=[],
34 metavar='file',
35 help="Use the order in the given requirements file and its "
36 "comments when generating output. This option can be "
37 "used multiple times.")
38 self.cmd_opts.add_option(
39 '-f', '--find-links',
40 dest='find_links',
41 action='append',
42 default=[],
43 metavar='URL',
44 help='URL for finding packages, which will be added to the '
45 'output.')
46 self.cmd_opts.add_option(
47 '-l', '--local',
48 dest='local',
49 action='store_true',
50 default=False,
51 help='If in a virtualenv that has global access, do not output '
52 'globally-installed packages.')
53 self.cmd_opts.add_option(
54 '--user',
55 dest='user',
56 action='store_true',
57 default=False,
58 help='Only output packages installed in user-site.')
59 self.cmd_opts.add_option(
60 '--all',
61 dest='freeze_all',
62 action='store_true',
63 help='Do not skip these packages in the output:'
64 ' %s' % ', '.join(DEV_PKGS))
65 self.cmd_opts.add_option(
66 '--exclude-editable',
67 dest='exclude_editable',
68 action='store_true',
69 help='Exclude editable package from output.')
70
71 self.parser.insert_option_group(0, self.cmd_opts)
72
73 def run(self, options, args):
74 format_control = index.FormatControl(set(), set())
75 wheel_cache = WheelCache(options.cache_dir, format_control)
76 skip = set(stdlib_pkgs)
77 if not options.freeze_all:
78 skip.update(DEV_PKGS)
79
80 freeze_kwargs = dict(
81 requirement=options.requirements,
82 find_links=options.find_links,
83 local_only=options.local,
84 user_only=options.user,
85 skip_regex=options.skip_requirements_regex,
86 isolated=options.isolated_mode,
87 wheel_cache=wheel_cache,
88 skip=skip,
89 exclude_editable=options.exclude_editable,
90 )
91
92 try:
93 for line in freeze(**freeze_kwargs):
94 sys.stdout.write(line + '\n')
95 finally:
96 wheel_cache.cleanup()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py
new file mode 100644
index 0000000..0ce1419
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/hash.py
@@ -0,0 +1,57 @@
1from __future__ import absolute_import
2
3import hashlib
4import logging
5import sys
6
7from pip._internal.basecommand import Command
8from pip._internal.status_codes import ERROR
9from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
10from pip._internal.utils.misc import read_chunks
11
12logger = logging.getLogger(__name__)
13
14
15class HashCommand(Command):
16 """
17 Compute a hash of a local package archive.
18
19 These can be used with --hash in a requirements file to do repeatable
20 installs.
21
22 """
23 name = 'hash'
24 usage = '%prog [options] <file> ...'
25 summary = 'Compute hashes of package archives.'
26 ignore_require_venv = True
27
28 def __init__(self, *args, **kw):
29 super(HashCommand, self).__init__(*args, **kw)
30 self.cmd_opts.add_option(
31 '-a', '--algorithm',
32 dest='algorithm',
33 choices=STRONG_HASHES,
34 action='store',
35 default=FAVORITE_HASH,
36 help='The hash algorithm to use: one of %s' %
37 ', '.join(STRONG_HASHES))
38 self.parser.insert_option_group(0, self.cmd_opts)
39
40 def run(self, options, args):
41 if not args:
42 self.parser.print_usage(sys.stderr)
43 return ERROR
44
45 algorithm = options.algorithm
46 for path in args:
47 logger.info('%s:\n--hash=%s:%s',
48 path, algorithm, _hash_of_file(path, algorithm))
49
50
51def _hash_of_file(path, algorithm):
52 """Return the hash digest of a file."""
53 with open(path, 'rb') as archive:
54 hash = hashlib.new(algorithm)
55 for chunk in read_chunks(archive):
56 hash.update(chunk)
57 return hash.hexdigest()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py
new file mode 100644
index 0000000..f4a0e40
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py
@@ -0,0 +1,36 @@
1from __future__ import absolute_import
2
3from pip._internal.basecommand import SUCCESS, Command
4from pip._internal.exceptions import CommandError
5
6
7class HelpCommand(Command):
8 """Show help for commands"""
9 name = 'help'
10 usage = """
11 %prog <command>"""
12 summary = 'Show help for commands.'
13 ignore_require_venv = True
14
15 def run(self, options, args):
16 from pip._internal.commands import commands_dict, get_similar_commands
17
18 try:
19 # 'pip help' with no args is handled by pip.__init__.parseopt()
20 cmd_name = args[0] # the command we need help for
21 except IndexError:
22 return SUCCESS
23
24 if cmd_name not in commands_dict:
25 guess = get_similar_commands(cmd_name)
26
27 msg = ['unknown command "%s"' % cmd_name]
28 if guess:
29 msg.append('maybe you meant "%s"' % guess)
30
31 raise CommandError(' - '.join(msg))
32
33 command = commands_dict[cmd_name]()
34 command.parser.print_help()
35
36 return SUCCESS
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py
new file mode 100644
index 0000000..057a64e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/install.py
@@ -0,0 +1,502 @@
1from __future__ import absolute_import
2
3import errno
4import logging
5import operator
6import os
7import shutil
8from optparse import SUPPRESS_HELP
9
10from pip._internal import cmdoptions
11from pip._internal.basecommand import RequirementCommand
12from pip._internal.cache import WheelCache
13from pip._internal.exceptions import (
14 CommandError, InstallationError, PreviousBuildDirError,
15)
16from pip._internal.locations import distutils_scheme, virtualenv_no_global
17from pip._internal.operations.check import check_install_conflicts
18from pip._internal.operations.prepare import RequirementPreparer
19from pip._internal.req import RequirementSet, install_given_reqs
20from pip._internal.resolve import Resolver
21from pip._internal.status_codes import ERROR
22from pip._internal.utils.filesystem import check_path_owner
23from pip._internal.utils.misc import ensure_dir, get_installed_version
24from pip._internal.utils.temp_dir import TempDirectory
25from pip._internal.wheel import WheelBuilder
26
27try:
28 import wheel
29except ImportError:
30 wheel = None
31
32
33logger = logging.getLogger(__name__)
34
35
36class InstallCommand(RequirementCommand):
37 """
38 Install packages from:
39
40 - PyPI (and other indexes) using requirement specifiers.
41 - VCS project urls.
42 - Local project directories.
43 - Local or remote source archives.
44
45 pip also supports installing from "requirements files", which provide
46 an easy way to specify a whole environment to be installed.
47 """
48 name = 'install'
49
50 usage = """
51 %prog [options] <requirement specifier> [package-index-options] ...
52 %prog [options] -r <requirements file> [package-index-options] ...
53 %prog [options] [-e] <vcs project url> ...
54 %prog [options] [-e] <local project path> ...
55 %prog [options] <archive url/path> ..."""
56
57 summary = 'Install packages.'
58
59 def __init__(self, *args, **kw):
60 super(InstallCommand, self).__init__(*args, **kw)
61
62 cmd_opts = self.cmd_opts
63
64 cmd_opts.add_option(cmdoptions.requirements())
65 cmd_opts.add_option(cmdoptions.constraints())
66 cmd_opts.add_option(cmdoptions.no_deps())
67 cmd_opts.add_option(cmdoptions.pre())
68
69 cmd_opts.add_option(cmdoptions.editable())
70 cmd_opts.add_option(
71 '-t', '--target',
72 dest='target_dir',
73 metavar='dir',
74 default=None,
75 help='Install packages into <dir>. '
76 'By default this will not replace existing files/folders in '
77 '<dir>. Use --upgrade to replace existing packages in <dir> '
78 'with new versions.'
79 )
80 cmd_opts.add_option(
81 '--user',
82 dest='use_user_site',
83 action='store_true',
84 help="Install to the Python user install directory for your "
85 "platform. Typically ~/.local/, or %APPDATA%\\Python on "
86 "Windows. (See the Python documentation for site.USER_BASE "
87 "for full details.)")
88 cmd_opts.add_option(
89 '--no-user',
90 dest='use_user_site',
91 action='store_false',
92 help=SUPPRESS_HELP)
93 cmd_opts.add_option(
94 '--root',
95 dest='root_path',
96 metavar='dir',
97 default=None,
98 help="Install everything relative to this alternate root "
99 "directory.")
100 cmd_opts.add_option(
101 '--prefix',
102 dest='prefix_path',
103 metavar='dir',
104 default=None,
105 help="Installation prefix where lib, bin and other top-level "
106 "folders are placed")
107
108 cmd_opts.add_option(cmdoptions.build_dir())
109
110 cmd_opts.add_option(cmdoptions.src())
111
112 cmd_opts.add_option(
113 '-U', '--upgrade',
114 dest='upgrade',
115 action='store_true',
116 help='Upgrade all specified packages to the newest available '
117 'version. The handling of dependencies depends on the '
118 'upgrade-strategy used.'
119 )
120
121 cmd_opts.add_option(
122 '--upgrade-strategy',
123 dest='upgrade_strategy',
124 default='only-if-needed',
125 choices=['only-if-needed', 'eager'],
126 help='Determines how dependency upgrading should be handled '
127 '[default: %default]. '
128 '"eager" - dependencies are upgraded regardless of '
129 'whether the currently installed version satisfies the '
130 'requirements of the upgraded package(s). '
131 '"only-if-needed" - are upgraded only when they do not '
132 'satisfy the requirements of the upgraded package(s).'
133 )
134
135 cmd_opts.add_option(
136 '--force-reinstall',
137 dest='force_reinstall',
138 action='store_true',
139 help='Reinstall all packages even if they are already '
140 'up-to-date.')
141
142 cmd_opts.add_option(
143 '-I', '--ignore-installed',
144 dest='ignore_installed',
145 action='store_true',
146 help='Ignore the installed packages (reinstalling instead).')
147
148 cmd_opts.add_option(cmdoptions.ignore_requires_python())
149 cmd_opts.add_option(cmdoptions.no_build_isolation())
150
151 cmd_opts.add_option(cmdoptions.install_options())
152 cmd_opts.add_option(cmdoptions.global_options())
153
154 cmd_opts.add_option(
155 "--compile",
156 action="store_true",
157 dest="compile",
158 default=True,
159 help="Compile Python source files to bytecode",
160 )
161
162 cmd_opts.add_option(
163 "--no-compile",
164 action="store_false",
165 dest="compile",
166 help="Do not compile Python source files to bytecode",
167 )
168
169 cmd_opts.add_option(
170 "--no-warn-script-location",
171 action="store_false",
172 dest="warn_script_location",
173 default=True,
174 help="Do not warn when installing scripts outside PATH",
175 )
176 cmd_opts.add_option(
177 "--no-warn-conflicts",
178 action="store_false",
179 dest="warn_about_conflicts",
180 default=True,
181 help="Do not warn about broken dependencies",
182 )
183
184 cmd_opts.add_option(cmdoptions.no_binary())
185 cmd_opts.add_option(cmdoptions.only_binary())
186 cmd_opts.add_option(cmdoptions.no_clean())
187 cmd_opts.add_option(cmdoptions.require_hashes())
188 cmd_opts.add_option(cmdoptions.progress_bar())
189
190 index_opts = cmdoptions.make_option_group(
191 cmdoptions.index_group,
192 self.parser,
193 )
194
195 self.parser.insert_option_group(0, index_opts)
196 self.parser.insert_option_group(0, cmd_opts)
197
198 def run(self, options, args):
199 cmdoptions.check_install_build_global(options)
200
201 upgrade_strategy = "to-satisfy-only"
202 if options.upgrade:
203 upgrade_strategy = options.upgrade_strategy
204
205 if options.build_dir:
206 options.build_dir = os.path.abspath(options.build_dir)
207
208 options.src_dir = os.path.abspath(options.src_dir)
209 install_options = options.install_options or []
210 if options.use_user_site:
211 if options.prefix_path:
212 raise CommandError(
213 "Can not combine '--user' and '--prefix' as they imply "
214 "different installation locations"
215 )
216 if virtualenv_no_global():
217 raise InstallationError(
218 "Can not perform a '--user' install. User site-packages "
219 "are not visible in this virtualenv."
220 )
221 install_options.append('--user')
222 install_options.append('--prefix=')
223
224 target_temp_dir = TempDirectory(kind="target")
225 if options.target_dir:
226 options.ignore_installed = True
227 options.target_dir = os.path.abspath(options.target_dir)
228 if (os.path.exists(options.target_dir) and not
229 os.path.isdir(options.target_dir)):
230 raise CommandError(
231 "Target path exists but is not a directory, will not "
232 "continue."
233 )
234
235 # Create a target directory for using with the target option
236 target_temp_dir.create()
237 install_options.append('--home=' + target_temp_dir.path)
238
239 global_options = options.global_options or []
240
241 with self._build_session(options) as session:
242 finder = self._build_package_finder(options, session)
243 build_delete = (not (options.no_clean or options.build_dir))
244 wheel_cache = WheelCache(options.cache_dir, options.format_control)
245
246 if options.cache_dir and not check_path_owner(options.cache_dir):
247 logger.warning(
248 "The directory '%s' or its parent directory is not owned "
249 "by the current user and caching wheels has been "
250 "disabled. check the permissions and owner of that "
251 "directory. If executing pip with sudo, you may want "
252 "sudo's -H flag.",
253 options.cache_dir,
254 )
255 options.cache_dir = None
256
257 with TempDirectory(
258 options.build_dir, delete=build_delete, kind="install"
259 ) as directory:
260 requirement_set = RequirementSet(
261 require_hashes=options.require_hashes,
262 )
263
264 try:
265 self.populate_requirement_set(
266 requirement_set, args, options, finder, session,
267 self.name, wheel_cache
268 )
269 preparer = RequirementPreparer(
270 build_dir=directory.path,
271 src_dir=options.src_dir,
272 download_dir=None,
273 wheel_download_dir=None,
274 progress_bar=options.progress_bar,
275 build_isolation=options.build_isolation,
276 )
277
278 resolver = Resolver(
279 preparer=preparer,
280 finder=finder,
281 session=session,
282 wheel_cache=wheel_cache,
283 use_user_site=options.use_user_site,
284 upgrade_strategy=upgrade_strategy,
285 force_reinstall=options.force_reinstall,
286 ignore_dependencies=options.ignore_dependencies,
287 ignore_requires_python=options.ignore_requires_python,
288 ignore_installed=options.ignore_installed,
289 isolated=options.isolated_mode,
290 )
291 resolver.resolve(requirement_set)
292
293 # If caching is disabled or wheel is not installed don't
294 # try to build wheels.
295 if wheel and options.cache_dir:
296 # build wheels before install.
297 wb = WheelBuilder(
298 finder, preparer, wheel_cache,
299 build_options=[], global_options=[],
300 )
301 # Ignore the result: a failed wheel will be
302 # installed from the sdist/vcs whatever.
303 wb.build(
304 requirement_set.requirements.values(),
305 session=session, autobuilding=True
306 )
307
308 to_install = resolver.get_installation_order(
309 requirement_set
310 )
311
312 # Consistency Checking of the package set we're installing.
313 should_warn_about_conflicts = (
314 not options.ignore_dependencies and
315 options.warn_about_conflicts
316 )
317 if should_warn_about_conflicts:
318 self._warn_about_conflicts(to_install)
319
320 # Don't warn about script install locations if
321 # --target has been specified
322 warn_script_location = options.warn_script_location
323 if options.target_dir:
324 warn_script_location = False
325
326 installed = install_given_reqs(
327 to_install,
328 install_options,
329 global_options,
330 root=options.root_path,
331 home=target_temp_dir.path,
332 prefix=options.prefix_path,
333 pycompile=options.compile,
334 warn_script_location=warn_script_location,
335 use_user_site=options.use_user_site,
336 )
337
338 possible_lib_locations = get_lib_location_guesses(
339 user=options.use_user_site,
340 home=target_temp_dir.path,
341 root=options.root_path,
342 prefix=options.prefix_path,
343 isolated=options.isolated_mode,
344 )
345 reqs = sorted(installed, key=operator.attrgetter('name'))
346 items = []
347 for req in reqs:
348 item = req.name
349 try:
350 installed_version = get_installed_version(
351 req.name, possible_lib_locations
352 )
353 if installed_version:
354 item += '-' + installed_version
355 except Exception:
356 pass
357 items.append(item)
358 installed = ' '.join(items)
359 if installed:
360 logger.info('Successfully installed %s', installed)
361 except EnvironmentError as error:
362 show_traceback = (self.verbosity >= 1)
363
364 message = create_env_error_message(
365 error, show_traceback, options.use_user_site,
366 )
367 logger.error(message, exc_info=show_traceback)
368
369 return ERROR
370 except PreviousBuildDirError:
371 options.no_clean = True
372 raise
373 finally:
374 # Clean up
375 if not options.no_clean:
376 requirement_set.cleanup_files()
377 wheel_cache.cleanup()
378
379 if options.target_dir:
380 self._handle_target_dir(
381 options.target_dir, target_temp_dir, options.upgrade
382 )
383 return requirement_set
384
385 def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
386 ensure_dir(target_dir)
387
388 # Checking both purelib and platlib directories for installed
389 # packages to be moved to target directory
390 lib_dir_list = []
391
392 with target_temp_dir:
393 # Checking both purelib and platlib directories for installed
394 # packages to be moved to target directory
395 scheme = distutils_scheme('', home=target_temp_dir.path)
396 purelib_dir = scheme['purelib']
397 platlib_dir = scheme['platlib']
398 data_dir = scheme['data']
399
400 if os.path.exists(purelib_dir):
401 lib_dir_list.append(purelib_dir)
402 if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
403 lib_dir_list.append(platlib_dir)
404 if os.path.exists(data_dir):
405 lib_dir_list.append(data_dir)
406
407 for lib_dir in lib_dir_list:
408 for item in os.listdir(lib_dir):
409 if lib_dir == data_dir:
410 ddir = os.path.join(data_dir, item)
411 if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
412 continue
413 target_item_dir = os.path.join(target_dir, item)
414 if os.path.exists(target_item_dir):
415 if not upgrade:
416 logger.warning(
417 'Target directory %s already exists. Specify '
418 '--upgrade to force replacement.',
419 target_item_dir
420 )
421 continue
422 if os.path.islink(target_item_dir):
423 logger.warning(
424 'Target directory %s already exists and is '
425 'a link. Pip will not automatically replace '
426 'links, please remove if replacement is '
427 'desired.',
428 target_item_dir
429 )
430 continue
431 if os.path.isdir(target_item_dir):
432 shutil.rmtree(target_item_dir)
433 else:
434 os.remove(target_item_dir)
435
436 shutil.move(
437 os.path.join(lib_dir, item),
438 target_item_dir
439 )
440
441 def _warn_about_conflicts(self, to_install):
442 package_set, _dep_info = check_install_conflicts(to_install)
443 missing, conflicting = _dep_info
444
445 # NOTE: There is some duplication here from pip check
446 for project_name in missing:
447 version = package_set[project_name][0]
448 for dependency in missing[project_name]:
449 logger.critical(
450 "%s %s requires %s, which is not installed.",
451 project_name, version, dependency[1],
452 )
453
454 for project_name in conflicting:
455 version = package_set[project_name][0]
456 for dep_name, dep_version, req in conflicting[project_name]:
457 logger.critical(
458 "%s %s has requirement %s, but you'll have %s %s which is "
459 "incompatible.",
460 project_name, version, req, dep_name, dep_version,
461 )
462
463
464def get_lib_location_guesses(*args, **kwargs):
465 scheme = distutils_scheme('', *args, **kwargs)
466 return [scheme['purelib'], scheme['platlib']]
467
468
469def create_env_error_message(error, show_traceback, using_user_site):
470 """Format an error message for an EnvironmentError
471
472 It may occur anytime during the execution of the install command.
473 """
474 parts = []
475
476 # Mention the error if we are not going to show a traceback
477 parts.append("Could not install packages due to an EnvironmentError")
478 if not show_traceback:
479 parts.append(": ")
480 parts.append(str(error))
481 else:
482 parts.append(".")
483
484 # Spilt the error indication from a helper message (if any)
485 parts[-1] += "\n"
486
487 # Suggest useful actions to the user:
488 # (1) using user site-packages or (2) verifying the permissions
489 if error.errno == errno.EACCES:
490 user_option_part = "Consider using the `--user` option"
491 permissions_part = "Check the permissions"
492
493 if not using_user_site:
494 parts.extend([
495 user_option_part, " or ",
496 permissions_part.lower(),
497 ])
498 else:
499 parts.append(permissions_part)
500 parts.append(".\n")
501
502 return "".join(parts).strip() + "\n"
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py
new file mode 100644
index 0000000..1b46c6f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/list.py
@@ -0,0 +1,343 @@
1from __future__ import absolute_import
2
3import json
4import logging
5import warnings
6
7from pip._vendor import six
8from pip._vendor.six.moves import zip_longest
9
10from pip._internal.basecommand import Command
11from pip._internal.cmdoptions import index_group, make_option_group
12from pip._internal.exceptions import CommandError
13from pip._internal.index import PackageFinder
14from pip._internal.utils.deprecation import RemovedInPip11Warning
15from pip._internal.utils.misc import (
16 dist_is_editable, get_installed_distributions,
17)
18from pip._internal.utils.packaging import get_installer
19
20logger = logging.getLogger(__name__)
21
22
23class ListCommand(Command):
24 """
25 List installed packages, including editables.
26
27 Packages are listed in a case-insensitive sorted order.
28 """
29 name = 'list'
30 usage = """
31 %prog [options]"""
32 summary = 'List installed packages.'
33
34 def __init__(self, *args, **kw):
35 super(ListCommand, self).__init__(*args, **kw)
36
37 cmd_opts = self.cmd_opts
38
39 cmd_opts.add_option(
40 '-o', '--outdated',
41 action='store_true',
42 default=False,
43 help='List outdated packages')
44 cmd_opts.add_option(
45 '-u', '--uptodate',
46 action='store_true',
47 default=False,
48 help='List uptodate packages')
49 cmd_opts.add_option(
50 '-e', '--editable',
51 action='store_true',
52 default=False,
53 help='List editable projects.')
54 cmd_opts.add_option(
55 '-l', '--local',
56 action='store_true',
57 default=False,
58 help=('If in a virtualenv that has global access, do not list '
59 'globally-installed packages.'),
60 )
61 self.cmd_opts.add_option(
62 '--user',
63 dest='user',
64 action='store_true',
65 default=False,
66 help='Only output packages installed in user-site.')
67
68 cmd_opts.add_option(
69 '--pre',
70 action='store_true',
71 default=False,
72 help=("Include pre-release and development versions. By default, "
73 "pip only finds stable versions."),
74 )
75
76 cmd_opts.add_option(
77 '--format',
78 action='store',
79 dest='list_format',
80 default="columns",
81 choices=('legacy', 'columns', 'freeze', 'json'),
82 help="Select the output format among: columns (default), freeze, "
83 "json, or legacy.",
84 )
85
86 cmd_opts.add_option(
87 '--not-required',
88 action='store_true',
89 dest='not_required',
90 help="List packages that are not dependencies of "
91 "installed packages.",
92 )
93
94 cmd_opts.add_option(
95 '--exclude-editable',
96 action='store_false',
97 dest='include_editable',
98 help='Exclude editable package from output.',
99 )
100 cmd_opts.add_option(
101 '--include-editable',
102 action='store_true',
103 dest='include_editable',
104 help='Include editable package from output.',
105 default=True,
106 )
107 index_opts = make_option_group(index_group, self.parser)
108
109 self.parser.insert_option_group(0, index_opts)
110 self.parser.insert_option_group(0, cmd_opts)
111
112 def _build_package_finder(self, options, index_urls, session):
113 """
114 Create a package finder appropriate to this list command.
115 """
116 return PackageFinder(
117 find_links=options.find_links,
118 index_urls=index_urls,
119 allow_all_prereleases=options.pre,
120 trusted_hosts=options.trusted_hosts,
121 process_dependency_links=options.process_dependency_links,
122 session=session,
123 )
124
125 def run(self, options, args):
126 if options.list_format == "legacy":
127 warnings.warn(
128 "The legacy format has been deprecated and will be removed "
129 "in the future.",
130 RemovedInPip11Warning,
131 )
132
133 if options.outdated and options.uptodate:
134 raise CommandError(
135 "Options --outdated and --uptodate cannot be combined.")
136
137 packages = get_installed_distributions(
138 local_only=options.local,
139 user_only=options.user,
140 editables_only=options.editable,
141 include_editables=options.include_editable,
142 )
143
144 if options.outdated:
145 packages = self.get_outdated(packages, options)
146 elif options.uptodate:
147 packages = self.get_uptodate(packages, options)
148
149 if options.not_required:
150 packages = self.get_not_required(packages, options)
151
152 self.output_package_listing(packages, options)
153
154 def get_outdated(self, packages, options):
155 return [
156 dist for dist in self.iter_packages_latest_infos(packages, options)
157 if dist.latest_version > dist.parsed_version
158 ]
159
160 def get_uptodate(self, packages, options):
161 return [
162 dist for dist in self.iter_packages_latest_infos(packages, options)
163 if dist.latest_version == dist.parsed_version
164 ]
165
166 def get_not_required(self, packages, options):
167 dep_keys = set()
168 for dist in packages:
169 dep_keys.update(requirement.key for requirement in dist.requires())
170 return {pkg for pkg in packages if pkg.key not in dep_keys}
171
172 def iter_packages_latest_infos(self, packages, options):
173 index_urls = [options.index_url] + options.extra_index_urls
174 if options.no_index:
175 logger.debug('Ignoring indexes: %s', ','.join(index_urls))
176 index_urls = []
177
178 dependency_links = []
179 for dist in packages:
180 if dist.has_metadata('dependency_links.txt'):
181 dependency_links.extend(
182 dist.get_metadata_lines('dependency_links.txt'),
183 )
184
185 with self._build_session(options) as session:
186 finder = self._build_package_finder(options, index_urls, session)
187 finder.add_dependency_links(dependency_links)
188
189 for dist in packages:
190 typ = 'unknown'
191 all_candidates = finder.find_all_candidates(dist.key)
192 if not options.pre:
193 # Remove prereleases
194 all_candidates = [candidate for candidate in all_candidates
195 if not candidate.version.is_prerelease]
196
197 if not all_candidates:
198 continue
199 best_candidate = max(all_candidates,
200 key=finder._candidate_sort_key)
201 remote_version = best_candidate.version
202 if best_candidate.location.is_wheel:
203 typ = 'wheel'
204 else:
205 typ = 'sdist'
206 # This is dirty but makes the rest of the code much cleaner
207 dist.latest_version = remote_version
208 dist.latest_filetype = typ
209 yield dist
210
211 def output_legacy(self, dist, options):
212 if options.verbose >= 1:
213 return '%s (%s, %s, %s)' % (
214 dist.project_name,
215 dist.version,
216 dist.location,
217 get_installer(dist),
218 )
219 elif dist_is_editable(dist):
220 return '%s (%s, %s)' % (
221 dist.project_name,
222 dist.version,
223 dist.location,
224 )
225 else:
226 return '%s (%s)' % (dist.project_name, dist.version)
227
228 def output_legacy_latest(self, dist, options):
229 return '%s - Latest: %s [%s]' % (
230 self.output_legacy(dist, options),
231 dist.latest_version,
232 dist.latest_filetype,
233 )
234
235 def output_package_listing(self, packages, options):
236 packages = sorted(
237 packages,
238 key=lambda dist: dist.project_name.lower(),
239 )
240 if options.list_format == 'columns' and packages:
241 data, header = format_for_columns(packages, options)
242 self.output_package_listing_columns(data, header)
243 elif options.list_format == 'freeze':
244 for dist in packages:
245 if options.verbose >= 1:
246 logger.info("%s==%s (%s)", dist.project_name,
247 dist.version, dist.location)
248 else:
249 logger.info("%s==%s", dist.project_name, dist.version)
250 elif options.list_format == 'json':
251 logger.info(format_for_json(packages, options))
252 elif options.list_format == "legacy":
253 for dist in packages:
254 if options.outdated:
255 logger.info(self.output_legacy_latest(dist, options))
256 else:
257 logger.info(self.output_legacy(dist, options))
258
259 def output_package_listing_columns(self, data, header):
260 # insert the header first: we need to know the size of column names
261 if len(data) > 0:
262 data.insert(0, header)
263
264 pkg_strings, sizes = tabulate(data)
265
266 # Create and add a separator.
267 if len(data) > 0:
268 pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
269
270 for val in pkg_strings:
271 logger.info(val)
272
273
274def tabulate(vals):
275 # From pfmoore on GitHub:
276 # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
277 assert len(vals) > 0
278
279 sizes = [0] * max(len(x) for x in vals)
280 for row in vals:
281 sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
282
283 result = []
284 for row in vals:
285 display = " ".join([str(c).ljust(s) if c is not None else ''
286 for s, c in zip_longest(sizes, row)])
287 result.append(display)
288
289 return result, sizes
290
291
292def format_for_columns(pkgs, options):
293 """
294 Convert the package data into something usable
295 by output_package_listing_columns.
296 """
297 running_outdated = options.outdated
298 # Adjust the header for the `pip list --outdated` case.
299 if running_outdated:
300 header = ["Package", "Version", "Latest", "Type"]
301 else:
302 header = ["Package", "Version"]
303
304 data = []
305 if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
306 header.append("Location")
307 if options.verbose >= 1:
308 header.append("Installer")
309
310 for proj in pkgs:
311 # if we're working on the 'outdated' list, separate out the
312 # latest_version and type
313 row = [proj.project_name, proj.version]
314
315 if running_outdated:
316 row.append(proj.latest_version)
317 row.append(proj.latest_filetype)
318
319 if options.verbose >= 1 or dist_is_editable(proj):
320 row.append(proj.location)
321 if options.verbose >= 1:
322 row.append(get_installer(proj))
323
324 data.append(row)
325
326 return data, header
327
328
329def format_for_json(packages, options):
330 data = []
331 for dist in packages:
332 info = {
333 'name': dist.project_name,
334 'version': six.text_type(dist.version),
335 }
336 if options.verbose >= 1:
337 info['location'] = dist.location
338 info['installer'] = get_installer(dist)
339 if options.outdated:
340 info['latest_version'] = six.text_type(dist.latest_version)
341 info['latest_filetype'] = dist.latest_filetype
342 data.append(info)
343 return json.dumps(data)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py
new file mode 100644
index 0000000..83895ce
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/search.py
@@ -0,0 +1,135 @@
1from __future__ import absolute_import
2
3import logging
4import sys
5import textwrap
6from collections import OrderedDict
7
8from pip._vendor import pkg_resources
9from pip._vendor.packaging.version import parse as parse_version
10# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
11# why we ignore the type on this import
12from pip._vendor.six.moves import xmlrpc_client # type: ignore
13
14from pip._internal.basecommand import SUCCESS, Command
15from pip._internal.compat import get_terminal_size
16from pip._internal.download import PipXmlrpcTransport
17from pip._internal.exceptions import CommandError
18from pip._internal.models import PyPI
19from pip._internal.status_codes import NO_MATCHES_FOUND
20from pip._internal.utils.logging import indent_log
21
22logger = logging.getLogger(__name__)
23
24
25class SearchCommand(Command):
26 """Search for PyPI packages whose name or summary contains <query>."""
27 name = 'search'
28 usage = """
29 %prog [options] <query>"""
30 summary = 'Search PyPI for packages.'
31 ignore_require_venv = True
32
33 def __init__(self, *args, **kw):
34 super(SearchCommand, self).__init__(*args, **kw)
35 self.cmd_opts.add_option(
36 '-i', '--index',
37 dest='index',
38 metavar='URL',
39 default=PyPI.pypi_url,
40 help='Base URL of Python Package Index (default %default)')
41
42 self.parser.insert_option_group(0, self.cmd_opts)
43
44 def run(self, options, args):
45 if not args:
46 raise CommandError('Missing required argument (search query).')
47 query = args
48 pypi_hits = self.search(query, options)
49 hits = transform_hits(pypi_hits)
50
51 terminal_width = None
52 if sys.stdout.isatty():
53 terminal_width = get_terminal_size()[0]
54
55 print_results(hits, terminal_width=terminal_width)
56 if pypi_hits:
57 return SUCCESS
58 return NO_MATCHES_FOUND
59
60 def search(self, query, options):
61 index_url = options.index
62 with self._build_session(options) as session:
63 transport = PipXmlrpcTransport(index_url, session)
64 pypi = xmlrpc_client.ServerProxy(index_url, transport)
65 hits = pypi.search({'name': query, 'summary': query}, 'or')
66 return hits
67
68
69def transform_hits(hits):
70 """
71 The list from pypi is really a list of versions. We want a list of
72 packages with the list of versions stored inline. This converts the
73 list from pypi into one we can use.
74 """
75 packages = OrderedDict()
76 for hit in hits:
77 name = hit['name']
78 summary = hit['summary']
79 version = hit['version']
80
81 if name not in packages.keys():
82 packages[name] = {
83 'name': name,
84 'summary': summary,
85 'versions': [version],
86 }
87 else:
88 packages[name]['versions'].append(version)
89
90 # if this is the highest version, replace summary and score
91 if version == highest_version(packages[name]['versions']):
92 packages[name]['summary'] = summary
93
94 return list(packages.values())
95
96
97def print_results(hits, name_column_width=None, terminal_width=None):
98 if not hits:
99 return
100 if name_column_width is None:
101 name_column_width = max([
102 len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
103 for hit in hits
104 ]) + 4
105
106 installed_packages = [p.project_name for p in pkg_resources.working_set]
107 for hit in hits:
108 name = hit['name']
109 summary = hit['summary'] or ''
110 latest = highest_version(hit.get('versions', ['-']))
111 if terminal_width is not None:
112 target_width = terminal_width - name_column_width - 5
113 if target_width > 10:
114 # wrap and indent summary to fit terminal
115 summary = textwrap.wrap(summary, target_width)
116 summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
117
118 line = '%-*s - %s' % (name_column_width,
119 '%s (%s)' % (name, latest), summary)
120 try:
121 logger.info(line)
122 if name in installed_packages:
123 dist = pkg_resources.get_distribution(name)
124 with indent_log():
125 if dist.version == latest:
126 logger.info('INSTALLED: %s (latest)', dist.version)
127 else:
128 logger.info('INSTALLED: %s', dist.version)
129 logger.info('LATEST: %s', latest)
130 except UnicodeEncodeError:
131 pass
132
133
134def highest_version(versions):
135 return max(versions, key=parse_version)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py
new file mode 100644
index 0000000..bad9628
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/show.py
@@ -0,0 +1,164 @@
1from __future__ import absolute_import
2
3import logging
4import os
5from email.parser import FeedParser # type: ignore
6
7from pip._vendor import pkg_resources
8from pip._vendor.packaging.utils import canonicalize_name
9
10from pip._internal.basecommand import Command
11from pip._internal.status_codes import ERROR, SUCCESS
12
13logger = logging.getLogger(__name__)
14
15
16class ShowCommand(Command):
17 """Show information about one or more installed packages."""
18 name = 'show'
19 usage = """
20 %prog [options] <package> ..."""
21 summary = 'Show information about installed packages.'
22 ignore_require_venv = True
23
24 def __init__(self, *args, **kw):
25 super(ShowCommand, self).__init__(*args, **kw)
26 self.cmd_opts.add_option(
27 '-f', '--files',
28 dest='files',
29 action='store_true',
30 default=False,
31 help='Show the full list of installed files for each package.')
32
33 self.parser.insert_option_group(0, self.cmd_opts)
34
35 def run(self, options, args):
36 if not args:
37 logger.warning('ERROR: Please provide a package name or names.')
38 return ERROR
39 query = args
40
41 results = search_packages_info(query)
42 if not print_results(
43 results, list_files=options.files, verbose=options.verbose):
44 return ERROR
45 return SUCCESS
46
47
48def search_packages_info(query):
49 """
50 Gather details from installed distributions. Print distribution name,
51 version, location, and installed files. Installed files requires a
52 pip generated 'installed-files.txt' in the distributions '.egg-info'
53 directory.
54 """
55 installed = {}
56 for p in pkg_resources.working_set:
57 installed[canonicalize_name(p.project_name)] = p
58
59 query_names = [canonicalize_name(name) for name in query]
60
61 for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
62 package = {
63 'name': dist.project_name,
64 'version': dist.version,
65 'location': dist.location,
66 'requires': [dep.project_name for dep in dist.requires()],
67 }
68 file_list = None
69 metadata = None
70 if isinstance(dist, pkg_resources.DistInfoDistribution):
71 # RECORDs should be part of .dist-info metadatas
72 if dist.has_metadata('RECORD'):
73 lines = dist.get_metadata_lines('RECORD')
74 paths = [l.split(',')[0] for l in lines]
75 paths = [os.path.join(dist.location, p) for p in paths]
76 file_list = [os.path.relpath(p, dist.location) for p in paths]
77
78 if dist.has_metadata('METADATA'):
79 metadata = dist.get_metadata('METADATA')
80 else:
81 # Otherwise use pip's log for .egg-info's
82 if dist.has_metadata('installed-files.txt'):
83 paths = dist.get_metadata_lines('installed-files.txt')
84 paths = [os.path.join(dist.egg_info, p) for p in paths]
85 file_list = [os.path.relpath(p, dist.location) for p in paths]
86
87 if dist.has_metadata('PKG-INFO'):
88 metadata = dist.get_metadata('PKG-INFO')
89
90 if dist.has_metadata('entry_points.txt'):
91 entry_points = dist.get_metadata_lines('entry_points.txt')
92 package['entry_points'] = entry_points
93
94 if dist.has_metadata('INSTALLER'):
95 for line in dist.get_metadata_lines('INSTALLER'):
96 if line.strip():
97 package['installer'] = line.strip()
98 break
99
100 # @todo: Should pkg_resources.Distribution have a
101 # `get_pkg_info` method?
102 feed_parser = FeedParser()
103 feed_parser.feed(metadata)
104 pkg_info_dict = feed_parser.close()
105 for key in ('metadata-version', 'summary',
106 'home-page', 'author', 'author-email', 'license'):
107 package[key] = pkg_info_dict.get(key)
108
109 # It looks like FeedParser cannot deal with repeated headers
110 classifiers = []
111 for line in metadata.splitlines():
112 if line.startswith('Classifier: '):
113 classifiers.append(line[len('Classifier: '):])
114 package['classifiers'] = classifiers
115
116 if file_list:
117 package['files'] = sorted(file_list)
118 yield package
119
120
121def print_results(distributions, list_files=False, verbose=False):
122 """
123 Print the informations from installed distributions found.
124 """
125 results_printed = False
126 for i, dist in enumerate(distributions):
127 results_printed = True
128 if i > 0:
129 logger.info("---")
130
131 name = dist.get('name', '')
132 required_by = [
133 pkg.project_name for pkg in pkg_resources.working_set
134 if name in [required.name for required in pkg.requires()]
135 ]
136
137 logger.info("Name: %s", name)
138 logger.info("Version: %s", dist.get('version', ''))
139 logger.info("Summary: %s", dist.get('summary', ''))
140 logger.info("Home-page: %s", dist.get('home-page', ''))
141 logger.info("Author: %s", dist.get('author', ''))
142 logger.info("Author-email: %s", dist.get('author-email', ''))
143 logger.info("License: %s", dist.get('license', ''))
144 logger.info("Location: %s", dist.get('location', ''))
145 logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
146 logger.info("Required-by: %s", ', '.join(required_by))
147
148 if verbose:
149 logger.info("Metadata-Version: %s",
150 dist.get('metadata-version', ''))
151 logger.info("Installer: %s", dist.get('installer', ''))
152 logger.info("Classifiers:")
153 for classifier in dist.get('classifiers', []):
154 logger.info(" %s", classifier)
155 logger.info("Entry-points:")
156 for entry in dist.get('entry_points', []):
157 logger.info(" %s", entry.strip())
158 if list_files:
159 logger.info("Files:")
160 for line in dist.get('files', []):
161 logger.info(" %s", line.strip())
162 if "files" not in dist:
163 logger.info("Cannot locate installed-files.txt")
164 return results_printed
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py
new file mode 100644
index 0000000..3bfa07f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/uninstall.py
@@ -0,0 +1,71 @@
1from __future__ import absolute_import
2
3from pip._vendor.packaging.utils import canonicalize_name
4
5from pip._internal.basecommand import Command
6from pip._internal.exceptions import InstallationError
7from pip._internal.req import InstallRequirement, parse_requirements
8
9
10class UninstallCommand(Command):
11 """
12 Uninstall packages.
13
14 pip is able to uninstall most installed packages. Known exceptions are:
15
16 - Pure distutils packages installed with ``python setup.py install``, which
17 leave behind no metadata to determine what files were installed.
18 - Script wrappers installed by ``python setup.py develop``.
19 """
20 name = 'uninstall'
21 usage = """
22 %prog [options] <package> ...
23 %prog [options] -r <requirements file> ..."""
24 summary = 'Uninstall packages.'
25
26 def __init__(self, *args, **kw):
27 super(UninstallCommand, self).__init__(*args, **kw)
28 self.cmd_opts.add_option(
29 '-r', '--requirement',
30 dest='requirements',
31 action='append',
32 default=[],
33 metavar='file',
34 help='Uninstall all the packages listed in the given requirements '
35 'file. This option can be used multiple times.',
36 )
37 self.cmd_opts.add_option(
38 '-y', '--yes',
39 dest='yes',
40 action='store_true',
41 help="Don't ask for confirmation of uninstall deletions.")
42
43 self.parser.insert_option_group(0, self.cmd_opts)
44
45 def run(self, options, args):
46 with self._build_session(options) as session:
47 reqs_to_uninstall = {}
48 for name in args:
49 req = InstallRequirement.from_line(
50 name, isolated=options.isolated_mode,
51 )
52 if req.name:
53 reqs_to_uninstall[canonicalize_name(req.name)] = req
54 for filename in options.requirements:
55 for req in parse_requirements(
56 filename,
57 options=options,
58 session=session):
59 if req.name:
60 reqs_to_uninstall[canonicalize_name(req.name)] = req
61 if not reqs_to_uninstall:
62 raise InstallationError(
63 'You must give at least one requirement to %(name)s (see '
64 '"pip help %(name)s")' % dict(name=self.name)
65 )
66 for req in reqs_to_uninstall.values():
67 uninstall_pathset = req.uninstall(
68 auto_confirm=options.yes, verbose=self.verbosity > 0,
69 )
70 if uninstall_pathset:
71 uninstall_pathset.commit()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py
new file mode 100644
index 0000000..ed8cdfc
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/wheel.py
@@ -0,0 +1,179 @@
1# -*- coding: utf-8 -*-
2from __future__ import absolute_import
3
4import logging
5import os
6
7from pip._internal import cmdoptions
8from pip._internal.basecommand import RequirementCommand
9from pip._internal.cache import WheelCache
10from pip._internal.exceptions import CommandError, PreviousBuildDirError
11from pip._internal.operations.prepare import RequirementPreparer
12from pip._internal.req import RequirementSet
13from pip._internal.resolve import Resolver
14from pip._internal.utils.temp_dir import TempDirectory
15from pip._internal.wheel import WheelBuilder
16
17logger = logging.getLogger(__name__)
18
19
20class WheelCommand(RequirementCommand):
21 """
22 Build Wheel archives for your requirements and dependencies.
23
24 Wheel is a built-package format, and offers the advantage of not
25 recompiling your software during every install. For more details, see the
26 wheel docs: https://wheel.readthedocs.io/en/latest/
27
28 Requirements: setuptools>=0.8, and wheel.
29
30 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
31 package to build individual wheels.
32
33 """
34
35 name = 'wheel'
36 usage = """
37 %prog [options] <requirement specifier> ...
38 %prog [options] -r <requirements file> ...
39 %prog [options] [-e] <vcs project url> ...
40 %prog [options] [-e] <local project path> ...
41 %prog [options] <archive url/path> ..."""
42
43 summary = 'Build wheels from your requirements.'
44
45 def __init__(self, *args, **kw):
46 super(WheelCommand, self).__init__(*args, **kw)
47
48 cmd_opts = self.cmd_opts
49
50 cmd_opts.add_option(
51 '-w', '--wheel-dir',
52 dest='wheel_dir',
53 metavar='dir',
54 default=os.curdir,
55 help=("Build wheels into <dir>, where the default is the "
56 "current working directory."),
57 )
58 cmd_opts.add_option(cmdoptions.no_binary())
59 cmd_opts.add_option(cmdoptions.only_binary())
60 cmd_opts.add_option(
61 '--build-option',
62 dest='build_options',
63 metavar='options',
64 action='append',
65 help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
66 )
67 cmd_opts.add_option(cmdoptions.no_build_isolation())
68 cmd_opts.add_option(cmdoptions.constraints())
69 cmd_opts.add_option(cmdoptions.editable())
70 cmd_opts.add_option(cmdoptions.requirements())
71 cmd_opts.add_option(cmdoptions.src())
72 cmd_opts.add_option(cmdoptions.ignore_requires_python())
73 cmd_opts.add_option(cmdoptions.no_deps())
74 cmd_opts.add_option(cmdoptions.build_dir())
75 cmd_opts.add_option(cmdoptions.progress_bar())
76
77 cmd_opts.add_option(
78 '--global-option',
79 dest='global_options',
80 action='append',
81 metavar='options',
82 help="Extra global options to be supplied to the setup.py "
83 "call before the 'bdist_wheel' command.")
84
85 cmd_opts.add_option(
86 '--pre',
87 action='store_true',
88 default=False,
89 help=("Include pre-release and development versions. By default, "
90 "pip only finds stable versions."),
91 )
92
93 cmd_opts.add_option(cmdoptions.no_clean())
94 cmd_opts.add_option(cmdoptions.require_hashes())
95
96 index_opts = cmdoptions.make_option_group(
97 cmdoptions.index_group,
98 self.parser,
99 )
100
101 self.parser.insert_option_group(0, index_opts)
102 self.parser.insert_option_group(0, cmd_opts)
103
104 def run(self, options, args):
105 cmdoptions.check_install_build_global(options)
106
107 index_urls = [options.index_url] + options.extra_index_urls
108 if options.no_index:
109 logger.debug('Ignoring indexes: %s', ','.join(index_urls))
110 index_urls = []
111
112 if options.build_dir:
113 options.build_dir = os.path.abspath(options.build_dir)
114
115 options.src_dir = os.path.abspath(options.src_dir)
116
117 with self._build_session(options) as session:
118 finder = self._build_package_finder(options, session)
119 build_delete = (not (options.no_clean or options.build_dir))
120 wheel_cache = WheelCache(options.cache_dir, options.format_control)
121
122 with TempDirectory(
123 options.build_dir, delete=build_delete, kind="wheel"
124 ) as directory:
125 requirement_set = RequirementSet(
126 require_hashes=options.require_hashes,
127 )
128
129 try:
130 self.populate_requirement_set(
131 requirement_set, args, options, finder, session,
132 self.name, wheel_cache
133 )
134
135 preparer = RequirementPreparer(
136 build_dir=directory.path,
137 src_dir=options.src_dir,
138 download_dir=None,
139 wheel_download_dir=options.wheel_dir,
140 progress_bar=options.progress_bar,
141 build_isolation=options.build_isolation,
142 )
143
144 resolver = Resolver(
145 preparer=preparer,
146 finder=finder,
147 session=session,
148 wheel_cache=wheel_cache,
149 use_user_site=False,
150 upgrade_strategy="to-satisfy-only",
151 force_reinstall=False,
152 ignore_dependencies=options.ignore_dependencies,
153 ignore_requires_python=options.ignore_requires_python,
154 ignore_installed=True,
155 isolated=options.isolated_mode,
156 )
157 resolver.resolve(requirement_set)
158
159 # build wheels
160 wb = WheelBuilder(
161 finder, preparer, wheel_cache,
162 build_options=options.build_options or [],
163 global_options=options.global_options or [],
164 no_clean=options.no_clean,
165 )
166 wheels_built_successfully = wb.build(
167 requirement_set.requirements.values(), session=session,
168 )
169 if not wheels_built_successfully:
170 raise CommandError(
171 "Failed to build one or more wheels"
172 )
173 except PreviousBuildDirError:
174 options.no_clean = True
175 raise
176 finally:
177 if not options.no_clean:
178 requirement_set.cleanup_files()
179 wheel_cache.cleanup()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py
new file mode 100644
index 0000000..064717d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/compat.py
@@ -0,0 +1,235 @@
1"""Stuff that differs in different Python versions and platform
2distributions."""
3from __future__ import absolute_import, division
4
5import codecs
6import locale
7import logging
8import os
9import shutil
10import sys
11
12from pip._vendor.six import text_type
13
14try:
15 import ipaddress
16except ImportError:
17 try:
18 from pip._vendor import ipaddress # type: ignore
19 except ImportError:
20 import ipaddr as ipaddress # type: ignore
21 ipaddress.ip_address = ipaddress.IPAddress
22 ipaddress.ip_network = ipaddress.IPNetwork
23
24
25__all__ = [
26 "ipaddress", "uses_pycache", "console_to_str", "native_str",
27 "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
28]
29
30
31logger = logging.getLogger(__name__)
32
33if sys.version_info >= (3, 4):
34 uses_pycache = True
35 from importlib.util import cache_from_source
36else:
37 import imp
38
39 try:
40 cache_from_source = imp.cache_from_source # type: ignore
41 except AttributeError:
42 # does not use __pycache__
43 cache_from_source = None
44
45 uses_pycache = cache_from_source is not None
46
47
48if sys.version_info >= (3, 5):
49 backslashreplace_decode = "backslashreplace"
50else:
51 # In version 3.4 and older, backslashreplace exists
52 # but does not support use for decoding.
53 # We implement our own replace handler for this
54 # situation, so that we can consistently use
55 # backslash replacement for all versions.
56 def backslashreplace_decode_fn(err):
57 raw_bytes = (err.object[i] for i in range(err.start, err.end))
58 if sys.version_info[0] == 2:
59 # Python 2 gave us characters - convert to numeric bytes
60 raw_bytes = (ord(b) for b in raw_bytes)
61 return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
62 codecs.register_error(
63 "backslashreplace_decode",
64 backslashreplace_decode_fn,
65 )
66 backslashreplace_decode = "backslashreplace_decode"
67
68
69def console_to_str(data):
70 """Return a string, safe for output, of subprocess output.
71
72 We assume the data is in the locale preferred encoding.
73 If it won't decode properly, we warn the user but decode as
74 best we can.
75
76 We also ensure that the output can be safely written to
77 standard output without encoding errors.
78 """
79
80 # First, get the encoding we assume. This is the preferred
81 # encoding for the locale, unless that is not found, or
82 # it is ASCII, in which case assume UTF-8
83 encoding = locale.getpreferredencoding()
84 if (not encoding) or codecs.lookup(encoding).name == "ascii":
85 encoding = "utf-8"
86
87 # Now try to decode the data - if we fail, warn the user and
88 # decode with replacement.
89 try:
90 s = data.decode(encoding)
91 except UnicodeDecodeError:
92 logger.warning(
93 "Subprocess output does not appear to be encoded as %s",
94 encoding,
95 )
96 s = data.decode(encoding, errors=backslashreplace_decode)
97
98 # Make sure we can print the output, by encoding it to the output
99 # encoding with replacement of unencodable characters, and then
100 # decoding again.
101 # We use stderr's encoding because it's less likely to be
102 # redirected and if we don't find an encoding we skip this
103 # step (on the assumption that output is wrapped by something
104 # that won't fail).
105 # The double getattr is to deal with the possibility that we're
106 # being called in a situation where sys.__stderr__ doesn't exist,
107 # or doesn't have an encoding attribute. Neither of these cases
108 # should occur in normal pip use, but there's no harm in checking
109 # in case people use pip in (unsupported) unusual situations.
110 output_encoding = getattr(getattr(sys, "__stderr__", None),
111 "encoding", None)
112
113 if output_encoding:
114 s = s.encode(output_encoding, errors="backslashreplace")
115 s = s.decode(output_encoding)
116
117 return s
118
119
120if sys.version_info >= (3,):
121 def native_str(s, replace=False):
122 if isinstance(s, bytes):
123 return s.decode('utf-8', 'replace' if replace else 'strict')
124 return s
125
126else:
127 def native_str(s, replace=False):
128 # Replace is ignored -- unicode to UTF-8 can't fail
129 if isinstance(s, text_type):
130 return s.encode('utf-8')
131 return s
132
133
134def get_path_uid(path):
135 """
136 Return path's uid.
137
138 Does not follow symlinks:
139 https://github.com/pypa/pip/pull/935#discussion_r5307003
140
141 Placed this function in compat due to differences on AIX and
142 Jython, that should eventually go away.
143
144 :raises OSError: When path is a symlink or can't be read.
145 """
146 if hasattr(os, 'O_NOFOLLOW'):
147 fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
148 file_uid = os.fstat(fd).st_uid
149 os.close(fd)
150 else: # AIX and Jython
151 # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
152 if not os.path.islink(path):
153 # older versions of Jython don't have `os.fstat`
154 file_uid = os.stat(path).st_uid
155 else:
156 # raise OSError for parity with os.O_NOFOLLOW above
157 raise OSError(
158 "%s is a symlink; Will not return uid for symlinks" % path
159 )
160 return file_uid
161
162
163def expanduser(path):
164 """
165 Expand ~ and ~user constructions.
166
167 Includes a workaround for http://bugs.python.org/issue14768
168 """
169 expanded = os.path.expanduser(path)
170 if path.startswith('~/') and expanded.startswith('//'):
171 expanded = expanded[1:]
172 return expanded
173
174
175# packages in the stdlib that may have installation metadata, but should not be
176# considered 'installed'. this theoretically could be determined based on
177# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
178# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
179# make this ineffective, so hard-coding
180stdlib_pkgs = {"python", "wsgiref", "argparse"}
181
182
183# windows detection, covers cpython and ironpython
184WINDOWS = (sys.platform.startswith("win") or
185 (sys.platform == 'cli' and os.name == 'nt'))
186
187
188def samefile(file1, file2):
189 """Provide an alternative for os.path.samefile on Windows/Python2"""
190 if hasattr(os.path, 'samefile'):
191 return os.path.samefile(file1, file2)
192 else:
193 path1 = os.path.normcase(os.path.abspath(file1))
194 path2 = os.path.normcase(os.path.abspath(file2))
195 return path1 == path2
196
197
198if hasattr(shutil, 'get_terminal_size'):
199 def get_terminal_size():
200 """
201 Returns a tuple (x, y) representing the width(x) and the height(y)
202 in characters of the terminal window.
203 """
204 return tuple(shutil.get_terminal_size())
205else:
206 def get_terminal_size():
207 """
208 Returns a tuple (x, y) representing the width(x) and the height(y)
209 in characters of the terminal window.
210 """
211 def ioctl_GWINSZ(fd):
212 try:
213 import fcntl
214 import termios
215 import struct
216 cr = struct.unpack_from(
217 'hh',
218 fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
219 )
220 except:
221 return None
222 if cr == (0, 0):
223 return None
224 return cr
225 cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
226 if not cr:
227 try:
228 fd = os.open(os.ctermid(), os.O_RDONLY)
229 cr = ioctl_GWINSZ(fd)
230 os.close(fd)
231 except:
232 pass
233 if not cr:
234 cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
235 return int(cr[1]), int(cr[0])
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py
new file mode 100644
index 0000000..07af373
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/configuration.py
@@ -0,0 +1,378 @@
1"""Configuration management setup
2
3Some terminology:
4- name
5 As written in config files.
6- value
7 Value associated with a name
8- key
9 Name combined with it's section (section.name)
10- variant
11 A single word describing where the configuration key-value pair came from
12"""
13
14import locale
15import logging
16import os
17
18from pip._vendor import six
19from pip._vendor.six.moves import configparser
20
21from pip._internal.exceptions import ConfigurationError
22from pip._internal.locations import (
23 legacy_config_file, new_config_file, running_under_virtualenv,
24 site_config_files, venv_config_file,
25)
26from pip._internal.utils.misc import ensure_dir, enum
27from pip._internal.utils.typing import MYPY_CHECK_RUNNING
28
29if MYPY_CHECK_RUNNING:
30 from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
31
32 RawConfigParser = configparser.RawConfigParser # Shorthand
33 Kind = NewType("Kind", str)
34
35logger = logging.getLogger(__name__)
36
37
38# NOTE: Maybe use the optionx attribute to normalize keynames.
39def _normalize_name(name):
40 # type: (str) -> str
41 """Make a name consistent regardless of source (environment or file)
42 """
43 name = name.lower().replace('_', '-')
44 if name.startswith('--'):
45 name = name[2:] # only prefer long opts
46 return name
47
48
49def _disassemble_key(name):
50 # type: (str) -> List[str]
51 return name.split(".", 1)
52
53
54# The kinds of configurations there are.
55kinds = enum(
56 USER="user", # User Specific
57 GLOBAL="global", # System Wide
58 VENV="venv", # Virtual Environment Specific
59 ENV="env", # from PIP_CONFIG_FILE
60 ENV_VAR="env-var", # from Environment Variables
61)
62
63
64class Configuration(object):
65 """Handles management of configuration.
66
67 Provides an interface to accessing and managing configuration files.
68
69 This class converts provides an API that takes "section.key-name" style
70 keys and stores the value associated with it as "key-name" under the
71 section "section".
72
73 This allows for a clean interface wherein the both the section and the
74 key-name are preserved in an easy to manage form in the configuration files
75 and the data stored is also nice.
76 """
77
78 def __init__(self, isolated, load_only=None):
79 # type: (bool, Kind) -> None
80 super(Configuration, self).__init__()
81
82 _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
83 if load_only not in _valid_load_only:
84 raise ConfigurationError(
85 "Got invalid value for load_only - should be one of {}".format(
86 ", ".join(map(repr, _valid_load_only[:-1]))
87 )
88 )
89 self.isolated = isolated # type: bool
90 self.load_only = load_only # type: Optional[Kind]
91
92 # The order here determines the override order.
93 self._override_order = [
94 kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
95 ]
96
97 self._ignore_env_names = ["version", "help"]
98
99 # Because we keep track of where we got the data from
100 self._parsers = {
101 variant: [] for variant in self._override_order
102 } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
103 self._config = {
104 variant: {} for variant in self._override_order
105 } # type: Dict[Kind, Dict[str, Any]]
106 self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
107
108 def load(self):
109 # type: () -> None
110 """Loads configuration from configuration files and environment
111 """
112 self._load_config_files()
113 if not self.isolated:
114 self._load_environment_vars()
115
116 def get_file_to_edit(self):
117 # type: () -> Optional[str]
118 """Returns the file with highest priority in configuration
119 """
120 assert self.load_only is not None, \
121 "Need to be specified a file to be editing"
122
123 try:
124 return self._get_parser_to_modify()[0]
125 except IndexError:
126 return None
127
128 def items(self):
129 # type: () -> Iterable[Tuple[str, Any]]
130 """Returns key-value pairs like dict.items() representing the loaded
131 configuration
132 """
133 return self._dictionary.items()
134
135 def get_value(self, key):
136 # type: (str) -> Any
137 """Get a value from the configuration.
138 """
139 try:
140 return self._dictionary[key]
141 except KeyError:
142 raise ConfigurationError("No such key - {}".format(key))
143
144 def set_value(self, key, value):
145 # type: (str, Any) -> None
146 """Modify a value in the configuration.
147 """
148 self._ensure_have_load_only()
149
150 fname, parser = self._get_parser_to_modify()
151
152 if parser is not None:
153 section, name = _disassemble_key(key)
154
155 # Modify the parser and the configuration
156 if not parser.has_section(section):
157 parser.add_section(section)
158 parser.set(section, name, value)
159
160 self._config[self.load_only][key] = value
161 self._mark_as_modified(fname, parser)
162
163 def unset_value(self, key):
164 # type: (str) -> None
165 """Unset a value in the configuration.
166 """
167 self._ensure_have_load_only()
168
169 if key not in self._config[self.load_only]:
170 raise ConfigurationError("No such key - {}".format(key))
171
172 fname, parser = self._get_parser_to_modify()
173
174 if parser is not None:
175 section, name = _disassemble_key(key)
176
177 # Remove the key in the parser
178 modified_something = False
179 if parser.has_section(section):
180 # Returns whether the option was removed or not
181 modified_something = parser.remove_option(section, name)
182
183 if modified_something:
184 # name removed from parser, section may now be empty
185 section_iter = iter(parser.items(section))
186 try:
187 val = six.next(section_iter)
188 except StopIteration:
189 val = None
190
191 if val is None:
192 parser.remove_section(section)
193
194 self._mark_as_modified(fname, parser)
195 else:
196 raise ConfigurationError(
197 "Fatal Internal error [id=1]. Please report as a bug."
198 )
199
200 del self._config[self.load_only][key]
201
202 def save(self):
203 # type: () -> None
204 """Save the currentin-memory state.
205 """
206 self._ensure_have_load_only()
207
208 for fname, parser in self._modified_parsers:
209 logger.info("Writing to %s", fname)
210
211 # Ensure directory exists.
212 ensure_dir(os.path.dirname(fname))
213
214 with open(fname, "w") as f:
215 parser.write(f) # type: ignore
216
217 #
218 # Private routines
219 #
220
221 def _ensure_have_load_only(self):
222 # type: () -> None
223 if self.load_only is None:
224 raise ConfigurationError("Needed a specific file to be modifying.")
225 logger.debug("Will be working with %s variant only", self.load_only)
226
227 @property
228 def _dictionary(self):
229 # type: () -> Dict[str, Any]
230 """A dictionary representing the loaded configuration.
231 """
232 # NOTE: Dictionaries are not populated if not loaded. So, conditionals
233 # are not needed here.
234 retval = {}
235
236 for variant in self._override_order:
237 retval.update(self._config[variant])
238
239 return retval
240
241 def _load_config_files(self):
242 # type: () -> None
243 """Loads configuration from configuration files
244 """
245 config_files = dict(self._iter_config_files())
246 if config_files[kinds.ENV][0:1] == [os.devnull]:
247 logger.debug(
248 "Skipping loading configuration files due to "
249 "environment's PIP_CONFIG_FILE being os.devnull"
250 )
251 return
252
253 for variant, files in config_files.items():
254 for fname in files:
255 # If there's specific variant set in `load_only`, load only
256 # that variant, not the others.
257 if self.load_only is not None and variant != self.load_only:
258 logger.debug(
259 "Skipping file '%s' (variant: %s)", fname, variant
260 )
261 continue
262
263 parser = self._load_file(variant, fname)
264
265 # Keeping track of the parsers used
266 self._parsers[variant].append((fname, parser))
267
268 def _load_file(self, variant, fname):
269 # type: (Kind, str) -> RawConfigParser
270 logger.debug("For variant '%s', will try loading '%s'", variant, fname)
271 parser = self._construct_parser(fname)
272
273 for section in parser.sections():
274 items = parser.items(section)
275 self._config[variant].update(self._normalized_keys(section, items))
276
277 return parser
278
279 def _construct_parser(self, fname):
280 # type: (str) -> RawConfigParser
281 parser = configparser.RawConfigParser()
282 # If there is no such file, don't bother reading it but create the
283 # parser anyway, to hold the data.
284 # Doing this is useful when modifying and saving files, where we don't
285 # need to construct a parser.
286 if os.path.exists(fname):
287 try:
288 parser.read(fname)
289 except UnicodeDecodeError:
290 raise ConfigurationError((
291 "ERROR: "
292 "Configuration file contains invalid %s characters.\n"
293 "Please fix your configuration, located at %s\n"
294 ) % (locale.getpreferredencoding(False), fname))
295 return parser
296
297 def _load_environment_vars(self):
298 # type: () -> None
299 """Loads configuration from environment variables
300 """
301 self._config[kinds.ENV_VAR].update(
302 self._normalized_keys(":env:", self._get_environ_vars())
303 )
304
305 def _normalized_keys(self, section, items):
306 # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
307 """Normalizes items to construct a dictionary with normalized keys.
308
309 This routine is where the names become keys and are made the same
310 regardless of source - configuration files or environment.
311 """
312 normalized = {}
313 for name, val in items:
314 key = section + "." + _normalize_name(name)
315 normalized[key] = val
316 return normalized
317
318 def _get_environ_vars(self):
319 # type: () -> Iterable[Tuple[str, str]]
320 """Returns a generator with all environmental vars with prefix PIP_"""
321 for key, val in os.environ.items():
322 should_be_yielded = (
323 key.startswith("PIP_") and
324 key[4:].lower() not in self._ignore_env_names
325 )
326 if should_be_yielded:
327 yield key[4:].lower(), val
328
329 # XXX: This is patched in the tests.
330 def _iter_config_files(self):
331 # type: () -> Iterable[Tuple[Kind, List[str]]]
332 """Yields variant and configuration files associated with it.
333
334 This should be treated like items of a dictionary.
335 """
336 # SMELL: Move the conditions out of this function
337
338 # environment variables have the lowest priority
339 config_file = os.environ.get('PIP_CONFIG_FILE', None)
340 if config_file is not None:
341 yield kinds.ENV, [config_file]
342 else:
343 yield kinds.ENV, []
344
345 # at the base we have any global configuration
346 yield kinds.GLOBAL, list(site_config_files)
347
348 # per-user configuration next
349 should_load_user_config = not self.isolated and not (
350 config_file and os.path.exists(config_file)
351 )
352 if should_load_user_config:
353 # The legacy config file is overridden by the new config file
354 yield kinds.USER, [legacy_config_file, new_config_file]
355
356 # finally virtualenv configuration first trumping others
357 if running_under_virtualenv():
358 yield kinds.VENV, [venv_config_file]
359
360 def _get_parser_to_modify(self):
361 # type: () -> Tuple[str, RawConfigParser]
362 # Determine which parser to modify
363 parsers = self._parsers[self.load_only]
364 if not parsers:
365 # This should not happen if everything works correctly.
366 raise ConfigurationError(
367 "Fatal Internal error [id=2]. Please report as a bug."
368 )
369
370 # Use the highest priority parser.
371 return parsers[-1]
372
373 # XXX: This is patched in the tests.
374 def _mark_as_modified(self, fname, parser):
375 # type: (str, RawConfigParser) -> None
376 file_parser_tuple = (fname, parser)
377 if file_parser_tuple not in self._modified_parsers:
378 self._modified_parsers.append(file_parser_tuple)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py
new file mode 100644
index 0000000..e0e2d24
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/download.py
@@ -0,0 +1,922 @@
1from __future__ import absolute_import
2
3import cgi
4import email.utils
5import getpass
6import json
7import logging
8import mimetypes
9import os
10import platform
11import re
12import shutil
13import sys
14
15from pip._vendor import requests, six, urllib3
16from pip._vendor.cachecontrol import CacheControlAdapter
17from pip._vendor.cachecontrol.caches import FileCache
18from pip._vendor.lockfile import LockError
19from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
20from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
21from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
22from pip._vendor.requests.structures import CaseInsensitiveDict
23from pip._vendor.requests.utils import get_netrc_auth
24# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
25# why we ignore the type on this import
26from pip._vendor.six.moves import xmlrpc_client # type: ignore
27from pip._vendor.six.moves.urllib import parse as urllib_parse
28from pip._vendor.six.moves.urllib import request as urllib_request
29from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
30from pip._vendor.urllib3.util import IS_PYOPENSSL
31
32import pip
33from pip._internal.compat import WINDOWS
34from pip._internal.exceptions import HashMismatch, InstallationError
35from pip._internal.locations import write_delete_marker_file
36from pip._internal.models import PyPI
37from pip._internal.utils.encoding import auto_decode
38from pip._internal.utils.filesystem import check_path_owner
39from pip._internal.utils.glibc import libc_ver
40from pip._internal.utils.logging import indent_log
41from pip._internal.utils.misc import (
42 ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
43 display_path, format_size, get_installed_version, rmtree, splitext,
44 unpack_file,
45)
46from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
47from pip._internal.utils.temp_dir import TempDirectory
48from pip._internal.utils.ui import DownloadProgressProvider
49from pip._internal.vcs import vcs
50
51try:
52 import ssl # noqa
53except ImportError:
54 ssl = None
55
56HAS_TLS = (ssl is not None) or IS_PYOPENSSL
57
58__all__ = ['get_file_content',
59 'is_url', 'url_to_path', 'path_to_url',
60 'is_archive_file', 'unpack_vcs_link',
61 'unpack_file_url', 'is_vcs_url', 'is_file_url',
62 'unpack_http_url', 'unpack_url']
63
64
65logger = logging.getLogger(__name__)
66
67
68def user_agent():
69 """
70 Return a string representing the user agent.
71 """
72 data = {
73 "installer": {"name": "pip", "version": pip.__version__},
74 "python": platform.python_version(),
75 "implementation": {
76 "name": platform.python_implementation(),
77 },
78 }
79
80 if data["implementation"]["name"] == 'CPython':
81 data["implementation"]["version"] = platform.python_version()
82 elif data["implementation"]["name"] == 'PyPy':
83 if sys.pypy_version_info.releaselevel == 'final':
84 pypy_version_info = sys.pypy_version_info[:3]
85 else:
86 pypy_version_info = sys.pypy_version_info
87 data["implementation"]["version"] = ".".join(
88 [str(x) for x in pypy_version_info]
89 )
90 elif data["implementation"]["name"] == 'Jython':
91 # Complete Guess
92 data["implementation"]["version"] = platform.python_version()
93 elif data["implementation"]["name"] == 'IronPython':
94 # Complete Guess
95 data["implementation"]["version"] = platform.python_version()
96
97 if sys.platform.startswith("linux"):
98 from pip._vendor import distro
99 distro_infos = dict(filter(
100 lambda x: x[1],
101 zip(["name", "version", "id"], distro.linux_distribution()),
102 ))
103 libc = dict(filter(
104 lambda x: x[1],
105 zip(["lib", "version"], libc_ver()),
106 ))
107 if libc:
108 distro_infos["libc"] = libc
109 if distro_infos:
110 data["distro"] = distro_infos
111
112 if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
113 data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
114
115 if platform.system():
116 data.setdefault("system", {})["name"] = platform.system()
117
118 if platform.release():
119 data.setdefault("system", {})["release"] = platform.release()
120
121 if platform.machine():
122 data["cpu"] = platform.machine()
123
124 if HAS_TLS:
125 data["openssl_version"] = ssl.OPENSSL_VERSION
126
127 setuptools_version = get_installed_version("setuptools")
128 if setuptools_version is not None:
129 data["setuptools_version"] = setuptools_version
130
131 return "{data[installer][name]}/{data[installer][version]} {json}".format(
132 data=data,
133 json=json.dumps(data, separators=(",", ":"), sort_keys=True),
134 )
135
136
137class MultiDomainBasicAuth(AuthBase):
138
139 def __init__(self, prompting=True):
140 self.prompting = prompting
141 self.passwords = {}
142
143 def __call__(self, req):
144 parsed = urllib_parse.urlparse(req.url)
145
146 # Get the netloc without any embedded credentials
147 netloc = parsed.netloc.rsplit("@", 1)[-1]
148
149 # Set the url of the request to the url without any credentials
150 req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
151
152 # Use any stored credentials that we have for this netloc
153 username, password = self.passwords.get(netloc, (None, None))
154
155 # Extract credentials embedded in the url if we have none stored
156 if username is None:
157 username, password = self.parse_credentials(parsed.netloc)
158
159 # Get creds from netrc if we still don't have them
160 if username is None and password is None:
161 netrc_auth = get_netrc_auth(req.url)
162 username, password = netrc_auth if netrc_auth else (None, None)
163
164 if username or password:
165 # Store the username and password
166 self.passwords[netloc] = (username, password)
167
168 # Send the basic auth with this request
169 req = HTTPBasicAuth(username or "", password or "")(req)
170
171 # Attach a hook to handle 401 responses
172 req.register_hook("response", self.handle_401)
173
174 return req
175
176 def handle_401(self, resp, **kwargs):
177 # We only care about 401 responses, anything else we want to just
178 # pass through the actual response
179 if resp.status_code != 401:
180 return resp
181
182 # We are not able to prompt the user so simply return the response
183 if not self.prompting:
184 return resp
185
186 parsed = urllib_parse.urlparse(resp.url)
187
188 # Prompt the user for a new username and password
189 username = six.moves.input("User for %s: " % parsed.netloc)
190 password = getpass.getpass("Password: ")
191
192 # Store the new username and password to use for future requests
193 if username or password:
194 self.passwords[parsed.netloc] = (username, password)
195
196 # Consume content and release the original connection to allow our new
197 # request to reuse the same one.
198 resp.content
199 resp.raw.release_conn()
200
201 # Add our new username and password to the request
202 req = HTTPBasicAuth(username or "", password or "")(resp.request)
203
204 # Send our new request
205 new_resp = resp.connection.send(req, **kwargs)
206 new_resp.history.append(resp)
207
208 return new_resp
209
210 def parse_credentials(self, netloc):
211 if "@" in netloc:
212 userinfo = netloc.rsplit("@", 1)[0]
213 if ":" in userinfo:
214 user, pwd = userinfo.split(":", 1)
215 return (urllib_unquote(user), urllib_unquote(pwd))
216 return urllib_unquote(userinfo), None
217 return None, None
218
219
220class LocalFSAdapter(BaseAdapter):
221
222 def send(self, request, stream=None, timeout=None, verify=None, cert=None,
223 proxies=None):
224 pathname = url_to_path(request.url)
225
226 resp = Response()
227 resp.status_code = 200
228 resp.url = request.url
229
230 try:
231 stats = os.stat(pathname)
232 except OSError as exc:
233 resp.status_code = 404
234 resp.raw = exc
235 else:
236 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
237 content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
238 resp.headers = CaseInsensitiveDict({
239 "Content-Type": content_type,
240 "Content-Length": stats.st_size,
241 "Last-Modified": modified,
242 })
243
244 resp.raw = open(pathname, "rb")
245 resp.close = resp.raw.close
246
247 return resp
248
249 def close(self):
250 pass
251
252
253class SafeFileCache(FileCache):
254 """
255 A file based cache which is safe to use even when the target directory may
256 not be accessible or writable.
257 """
258
259 def __init__(self, *args, **kwargs):
260 super(SafeFileCache, self).__init__(*args, **kwargs)
261
262 # Check to ensure that the directory containing our cache directory
263 # is owned by the user current executing pip. If it does not exist
264 # we will check the parent directory until we find one that does exist.
265 # If it is not owned by the user executing pip then we will disable
266 # the cache and log a warning.
267 if not check_path_owner(self.directory):
268 logger.warning(
269 "The directory '%s' or its parent directory is not owned by "
270 "the current user and the cache has been disabled. Please "
271 "check the permissions and owner of that directory. If "
272 "executing pip with sudo, you may want sudo's -H flag.",
273 self.directory,
274 )
275
276 # Set our directory to None to disable the Cache
277 self.directory = None
278
279 def get(self, *args, **kwargs):
280 # If we don't have a directory, then the cache should be a no-op.
281 if self.directory is None:
282 return
283
284 try:
285 return super(SafeFileCache, self).get(*args, **kwargs)
286 except (LockError, OSError, IOError):
287 # We intentionally silence this error, if we can't access the cache
288 # then we can just skip caching and process the request as if
289 # caching wasn't enabled.
290 pass
291
292 def set(self, *args, **kwargs):
293 # If we don't have a directory, then the cache should be a no-op.
294 if self.directory is None:
295 return
296
297 try:
298 return super(SafeFileCache, self).set(*args, **kwargs)
299 except (LockError, OSError, IOError):
300 # We intentionally silence this error, if we can't access the cache
301 # then we can just skip caching and process the request as if
302 # caching wasn't enabled.
303 pass
304
305 def delete(self, *args, **kwargs):
306 # If we don't have a directory, then the cache should be a no-op.
307 if self.directory is None:
308 return
309
310 try:
311 return super(SafeFileCache, self).delete(*args, **kwargs)
312 except (LockError, OSError, IOError):
313 # We intentionally silence this error, if we can't access the cache
314 # then we can just skip caching and process the request as if
315 # caching wasn't enabled.
316 pass
317
318
319class InsecureHTTPAdapter(HTTPAdapter):
320
321 def cert_verify(self, conn, url, verify, cert):
322 conn.cert_reqs = 'CERT_NONE'
323 conn.ca_certs = None
324
325
326class PipSession(requests.Session):
327
328 timeout = None
329
330 def __init__(self, *args, **kwargs):
331 retries = kwargs.pop("retries", 0)
332 cache = kwargs.pop("cache", None)
333 insecure_hosts = kwargs.pop("insecure_hosts", [])
334
335 super(PipSession, self).__init__(*args, **kwargs)
336
337 # Attach our User Agent to the request
338 self.headers["User-Agent"] = user_agent()
339
340 # Attach our Authentication handler to the session
341 self.auth = MultiDomainBasicAuth()
342
343 # Create our urllib3.Retry instance which will allow us to customize
344 # how we handle retries.
345 retries = urllib3.Retry(
346 # Set the total number of retries that a particular request can
347 # have.
348 total=retries,
349
350 # A 503 error from PyPI typically means that the Fastly -> Origin
351 # connection got interrupted in some way. A 503 error in general
352 # is typically considered a transient error so we'll go ahead and
353 # retry it.
354 # A 500 may indicate transient error in Amazon S3
355 # A 520 or 527 - may indicate transient error in CloudFlare
356 status_forcelist=[500, 503, 520, 527],
357
358 # Add a small amount of back off between failed requests in
359 # order to prevent hammering the service.
360 backoff_factor=0.25,
361 )
362
363 # We want to _only_ cache responses on securely fetched origins. We do
364 # this because we can't validate the response of an insecurely fetched
365 # origin, and we don't want someone to be able to poison the cache and
366 # require manual eviction from the cache to fix it.
367 if cache:
368 secure_adapter = CacheControlAdapter(
369 cache=SafeFileCache(cache, use_dir_lock=True),
370 max_retries=retries,
371 )
372 else:
373 secure_adapter = HTTPAdapter(max_retries=retries)
374
375 # Our Insecure HTTPAdapter disables HTTPS validation. It does not
376 # support caching (see above) so we'll use it for all http:// URLs as
377 # well as any https:// host that we've marked as ignoring TLS errors
378 # for.
379 insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
380
381 self.mount("https://", secure_adapter)
382 self.mount("http://", insecure_adapter)
383
384 # Enable file:// urls
385 self.mount("file://", LocalFSAdapter())
386
387 # We want to use a non-validating adapter for any requests which are
388 # deemed insecure.
389 for host in insecure_hosts:
390 self.mount("https://{}/".format(host), insecure_adapter)
391
392 def request(self, method, url, *args, **kwargs):
393 # Allow setting a default timeout on a session
394 kwargs.setdefault("timeout", self.timeout)
395
396 # Dispatch the actual request
397 return super(PipSession, self).request(method, url, *args, **kwargs)
398
399
400def get_file_content(url, comes_from=None, session=None):
401 """Gets the content of a file; it may be a filename, file: URL, or
402 http: URL. Returns (location, content). Content is unicode.
403
404 :param url: File path or url.
405 :param comes_from: Origin description of requirements.
406 :param session: Instance of pip.download.PipSession.
407 """
408 if session is None:
409 raise TypeError(
410 "get_file_content() missing 1 required keyword argument: 'session'"
411 )
412
413 match = _scheme_re.search(url)
414 if match:
415 scheme = match.group(1).lower()
416 if (scheme == 'file' and comes_from and
417 comes_from.startswith('http')):
418 raise InstallationError(
419 'Requirements file %s references URL %s, which is local'
420 % (comes_from, url))
421 if scheme == 'file':
422 path = url.split(':', 1)[1]
423 path = path.replace('\\', '/')
424 match = _url_slash_drive_re.match(path)
425 if match:
426 path = match.group(1) + ':' + path.split('|', 1)[1]
427 path = urllib_parse.unquote(path)
428 if path.startswith('/'):
429 path = '/' + path.lstrip('/')
430 url = path
431 else:
432 # FIXME: catch some errors
433 resp = session.get(url)
434 resp.raise_for_status()
435 return resp.url, resp.text
436 try:
437 with open(url, 'rb') as f:
438 content = auto_decode(f.read())
439 except IOError as exc:
440 raise InstallationError(
441 'Could not open requirements file: %s' % str(exc)
442 )
443 return url, content
444
445
446_scheme_re = re.compile(r'^(http|https|file):', re.I)
447_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
448
449
450def is_url(name):
451 """Returns true if the name looks like a URL"""
452 if ':' not in name:
453 return False
454 scheme = name.split(':', 1)[0].lower()
455 return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
456
457
458def url_to_path(url):
459 """
460 Convert a file: URL to a path.
461 """
462 assert url.startswith('file:'), (
463 "You can only turn file: urls into filenames (not %r)" % url)
464
465 _, netloc, path, _, _ = urllib_parse.urlsplit(url)
466
467 # if we have a UNC path, prepend UNC share notation
468 if netloc:
469 netloc = '\\\\' + netloc
470
471 path = urllib_request.url2pathname(netloc + path)
472 return path
473
474
475def path_to_url(path):
476 """
477 Convert a path to a file: URL. The path will be made absolute and have
478 quoted path parts.
479 """
480 path = os.path.normpath(os.path.abspath(path))
481 url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
482 return url
483
484
485def is_archive_file(name):
486 """Return True if `name` is a considered as an archive file."""
487 ext = splitext(name)[1].lower()
488 if ext in ARCHIVE_EXTENSIONS:
489 return True
490 return False
491
492
493def unpack_vcs_link(link, location):
494 vcs_backend = _get_used_vcs_backend(link)
495 vcs_backend.unpack(location)
496
497
498def _get_used_vcs_backend(link):
499 for backend in vcs.backends:
500 if link.scheme in backend.schemes:
501 vcs_backend = backend(link.url)
502 return vcs_backend
503
504
505def is_vcs_url(link):
506 return bool(_get_used_vcs_backend(link))
507
508
509def is_file_url(link):
510 return link.url.lower().startswith('file:')
511
512
513def is_dir_url(link):
514 """Return whether a file:// Link points to a directory.
515
516 ``link`` must not have any other scheme but file://. Call is_file_url()
517 first.
518
519 """
520 link_path = url_to_path(link.url_without_fragment)
521 return os.path.isdir(link_path)
522
523
524def _progress_indicator(iterable, *args, **kwargs):
525 return iterable
526
527
528def _download_url(resp, link, content_file, hashes, progress_bar):
529 try:
530 total_length = int(resp.headers['content-length'])
531 except (ValueError, KeyError, TypeError):
532 total_length = 0
533
534 cached_resp = getattr(resp, "from_cache", False)
535 if logger.getEffectiveLevel() > logging.INFO:
536 show_progress = False
537 elif cached_resp:
538 show_progress = False
539 elif total_length > (40 * 1000):
540 show_progress = True
541 elif not total_length:
542 show_progress = True
543 else:
544 show_progress = False
545
546 show_url = link.show_url
547
548 def resp_read(chunk_size):
549 try:
550 # Special case for urllib3.
551 for chunk in resp.raw.stream(
552 chunk_size,
553 # We use decode_content=False here because we don't
554 # want urllib3 to mess with the raw bytes we get
555 # from the server. If we decompress inside of
556 # urllib3 then we cannot verify the checksum
557 # because the checksum will be of the compressed
558 # file. This breakage will only occur if the
559 # server adds a Content-Encoding header, which
560 # depends on how the server was configured:
561 # - Some servers will notice that the file isn't a
562 # compressible file and will leave the file alone
563 # and with an empty Content-Encoding
564 # - Some servers will notice that the file is
565 # already compressed and will leave the file
566 # alone and will add a Content-Encoding: gzip
567 # header
568 # - Some servers won't notice anything at all and
569 # will take a file that's already been compressed
570 # and compress it again and set the
571 # Content-Encoding: gzip header
572 #
573 # By setting this not to decode automatically we
574 # hope to eliminate problems with the second case.
575 decode_content=False):
576 yield chunk
577 except AttributeError:
578 # Standard file-like object.
579 while True:
580 chunk = resp.raw.read(chunk_size)
581 if not chunk:
582 break
583 yield chunk
584
585 def written_chunks(chunks):
586 for chunk in chunks:
587 content_file.write(chunk)
588 yield chunk
589
590 progress_indicator = _progress_indicator
591
592 if link.netloc == PyPI.netloc:
593 url = show_url
594 else:
595 url = link.url_without_fragment
596
597 if show_progress: # We don't show progress on cached responses
598 progress_indicator = DownloadProgressProvider(progress_bar,
599 max=total_length)
600 if total_length:
601 logger.info("Downloading %s (%s)", url, format_size(total_length))
602 else:
603 logger.info("Downloading %s", url)
604 elif cached_resp:
605 logger.info("Using cached %s", url)
606 else:
607 logger.info("Downloading %s", url)
608
609 logger.debug('Downloading from URL %s', link)
610
611 downloaded_chunks = written_chunks(
612 progress_indicator(
613 resp_read(CONTENT_CHUNK_SIZE),
614 CONTENT_CHUNK_SIZE
615 )
616 )
617 if hashes:
618 hashes.check_against_chunks(downloaded_chunks)
619 else:
620 consume(downloaded_chunks)
621
622
623def _copy_file(filename, location, link):
624 copy = True
625 download_location = os.path.join(location, link.filename)
626 if os.path.exists(download_location):
627 response = ask_path_exists(
628 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
629 display_path(download_location), ('i', 'w', 'b', 'a'))
630 if response == 'i':
631 copy = False
632 elif response == 'w':
633 logger.warning('Deleting %s', display_path(download_location))
634 os.remove(download_location)
635 elif response == 'b':
636 dest_file = backup_dir(download_location)
637 logger.warning(
638 'Backing up %s to %s',
639 display_path(download_location),
640 display_path(dest_file),
641 )
642 shutil.move(download_location, dest_file)
643 elif response == 'a':
644 sys.exit(-1)
645 if copy:
646 shutil.copy(filename, download_location)
647 logger.info('Saved %s', display_path(download_location))
648
649
650def unpack_http_url(link, location, download_dir=None,
651 session=None, hashes=None, progress_bar="on"):
652 if session is None:
653 raise TypeError(
654 "unpack_http_url() missing 1 required keyword argument: 'session'"
655 )
656
657 with TempDirectory(kind="unpack") as temp_dir:
658 # If a download dir is specified, is the file already downloaded there?
659 already_downloaded_path = None
660 if download_dir:
661 already_downloaded_path = _check_download_dir(link,
662 download_dir,
663 hashes)
664
665 if already_downloaded_path:
666 from_path = already_downloaded_path
667 content_type = mimetypes.guess_type(from_path)[0]
668 else:
669 # let's download to a tmp dir
670 from_path, content_type = _download_http_url(link,
671 session,
672 temp_dir.path,
673 hashes,
674 progress_bar)
675
676 # unpack the archive to the build dir location. even when only
677 # downloading archives, they have to be unpacked to parse dependencies
678 unpack_file(from_path, location, content_type, link)
679
680 # a download dir is specified; let's copy the archive there
681 if download_dir and not already_downloaded_path:
682 _copy_file(from_path, download_dir, link)
683
684 if not already_downloaded_path:
685 os.unlink(from_path)
686
687
688def unpack_file_url(link, location, download_dir=None, hashes=None):
689 """Unpack link into location.
690
691 If download_dir is provided and link points to a file, make a copy
692 of the link file inside download_dir.
693 """
694 link_path = url_to_path(link.url_without_fragment)
695
696 # If it's a url to a local directory
697 if is_dir_url(link):
698 if os.path.isdir(location):
699 rmtree(location)
700 shutil.copytree(link_path, location, symlinks=True)
701 if download_dir:
702 logger.info('Link is a directory, ignoring download_dir')
703 return
704
705 # If --require-hashes is off, `hashes` is either empty, the
706 # link's embedded hash, or MissingHashes; it is required to
707 # match. If --require-hashes is on, we are satisfied by any
708 # hash in `hashes` matching: a URL-based or an option-based
709 # one; no internet-sourced hash will be in `hashes`.
710 if hashes:
711 hashes.check_against_path(link_path)
712
713 # If a download dir is specified, is the file already there and valid?
714 already_downloaded_path = None
715 if download_dir:
716 already_downloaded_path = _check_download_dir(link,
717 download_dir,
718 hashes)
719
720 if already_downloaded_path:
721 from_path = already_downloaded_path
722 else:
723 from_path = link_path
724
725 content_type = mimetypes.guess_type(from_path)[0]
726
727 # unpack the archive to the build dir location. even when only downloading
728 # archives, they have to be unpacked to parse dependencies
729 unpack_file(from_path, location, content_type, link)
730
731 # a download dir is specified and not already downloaded
732 if download_dir and not already_downloaded_path:
733 _copy_file(from_path, download_dir, link)
734
735
736def _copy_dist_from_dir(link_path, location):
737 """Copy distribution files in `link_path` to `location`.
738
739 Invoked when user requests to install a local directory. E.g.:
740
741 pip install .
742 pip install ~/dev/git-repos/python-prompt-toolkit
743
744 """
745
746 # Note: This is currently VERY SLOW if you have a lot of data in the
747 # directory, because it copies everything with `shutil.copytree`.
748 # What it should really do is build an sdist and install that.
749 # See https://github.com/pypa/pip/issues/2195
750
751 if os.path.isdir(location):
752 rmtree(location)
753
754 # build an sdist
755 setup_py = 'setup.py'
756 sdist_args = [sys.executable]
757 sdist_args.append('-c')
758 sdist_args.append(SETUPTOOLS_SHIM % setup_py)
759 sdist_args.append('sdist')
760 sdist_args += ['--dist-dir', location]
761 logger.info('Running setup.py sdist for %s', link_path)
762
763 with indent_log():
764 call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
765
766 # unpack sdist into `location`
767 sdist = os.path.join(location, os.listdir(location)[0])
768 logger.info('Unpacking sdist %s into %s', sdist, location)
769 unpack_file(sdist, location, content_type=None, link=None)
770
771
772class PipXmlrpcTransport(xmlrpc_client.Transport):
773 """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
774 object.
775 """
776
777 def __init__(self, index_url, session, use_datetime=False):
778 xmlrpc_client.Transport.__init__(self, use_datetime)
779 index_parts = urllib_parse.urlparse(index_url)
780 self._scheme = index_parts.scheme
781 self._session = session
782
783 def request(self, host, handler, request_body, verbose=False):
784 parts = (self._scheme, host, handler, None, None, None)
785 url = urllib_parse.urlunparse(parts)
786 try:
787 headers = {'Content-Type': 'text/xml'}
788 response = self._session.post(url, data=request_body,
789 headers=headers, stream=True)
790 response.raise_for_status()
791 self.verbose = verbose
792 return self.parse_response(response.raw)
793 except requests.HTTPError as exc:
794 logger.critical(
795 "HTTP error %s while getting %s",
796 exc.response.status_code, url,
797 )
798 raise
799
800
801def unpack_url(link, location, download_dir=None,
802 only_download=False, session=None, hashes=None,
803 progress_bar="on"):
804 """Unpack link.
805 If link is a VCS link:
806 if only_download, export into download_dir and ignore location
807 else unpack into location
808 for other types of link:
809 - unpack into location
810 - if download_dir, copy the file into download_dir
811 - if only_download, mark location for deletion
812
813 :param hashes: A Hashes object, one of whose embedded hashes must match,
814 or HashMismatch will be raised. If the Hashes is empty, no matches are
815 required, and unhashable types of requirements (like VCS ones, which
816 would ordinarily raise HashUnsupported) are allowed.
817 """
818 # non-editable vcs urls
819 if is_vcs_url(link):
820 unpack_vcs_link(link, location)
821
822 # file urls
823 elif is_file_url(link):
824 unpack_file_url(link, location, download_dir, hashes=hashes)
825
826 # http urls
827 else:
828 if session is None:
829 session = PipSession()
830
831 unpack_http_url(
832 link,
833 location,
834 download_dir,
835 session,
836 hashes=hashes,
837 progress_bar=progress_bar
838 )
839 if only_download:
840 write_delete_marker_file(location)
841
842
843def _download_http_url(link, session, temp_dir, hashes, progress_bar):
844 """Download link url into temp_dir using provided session"""
845 target_url = link.url.split('#', 1)[0]
846 try:
847 resp = session.get(
848 target_url,
849 # We use Accept-Encoding: identity here because requests
850 # defaults to accepting compressed responses. This breaks in
851 # a variety of ways depending on how the server is configured.
852 # - Some servers will notice that the file isn't a compressible
853 # file and will leave the file alone and with an empty
854 # Content-Encoding
855 # - Some servers will notice that the file is already
856 # compressed and will leave the file alone and will add a
857 # Content-Encoding: gzip header
858 # - Some servers won't notice anything at all and will take
859 # a file that's already been compressed and compress it again
860 # and set the Content-Encoding: gzip header
861 # By setting this to request only the identity encoding We're
862 # hoping to eliminate the third case. Hopefully there does not
863 # exist a server which when given a file will notice it is
864 # already compressed and that you're not asking for a
865 # compressed file and will then decompress it before sending
866 # because if that's the case I don't think it'll ever be
867 # possible to make this work.
868 headers={"Accept-Encoding": "identity"},
869 stream=True,
870 )
871 resp.raise_for_status()
872 except requests.HTTPError as exc:
873 logger.critical(
874 "HTTP error %s while getting %s", exc.response.status_code, link,
875 )
876 raise
877
878 content_type = resp.headers.get('content-type', '')
879 filename = link.filename # fallback
880 # Have a look at the Content-Disposition header for a better guess
881 content_disposition = resp.headers.get('content-disposition')
882 if content_disposition:
883 type, params = cgi.parse_header(content_disposition)
884 # We use ``or`` here because we don't want to use an "empty" value
885 # from the filename param.
886 filename = params.get('filename') or filename
887 ext = splitext(filename)[1]
888 if not ext:
889 ext = mimetypes.guess_extension(content_type)
890 if ext:
891 filename += ext
892 if not ext and link.url != resp.url:
893 ext = os.path.splitext(resp.url)[1]
894 if ext:
895 filename += ext
896 file_path = os.path.join(temp_dir, filename)
897 with open(file_path, 'wb') as content_file:
898 _download_url(resp, link, content_file, hashes, progress_bar)
899 return file_path, content_type
900
901
902def _check_download_dir(link, download_dir, hashes):
903 """ Check download_dir for previously downloaded file with correct hash
904 If a correct file is found return its path else None
905 """
906 download_path = os.path.join(download_dir, link.filename)
907 if os.path.exists(download_path):
908 # If already downloaded, does its hash match?
909 logger.info('File was already downloaded %s', download_path)
910 if hashes:
911 try:
912 hashes.check_against_path(download_path)
913 except HashMismatch:
914 logger.warning(
915 'Previously-downloaded file %s has bad hash. '
916 'Re-downloading.',
917 download_path
918 )
919 os.unlink(download_path)
920 return None
921 return download_path
922 return None
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py
new file mode 100644
index 0000000..28705c8
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/exceptions.py
@@ -0,0 +1,249 @@
1"""Exceptions used throughout package"""
2from __future__ import absolute_import
3
4from itertools import chain, groupby, repeat
5
6from pip._vendor.six import iteritems
7
8
9class PipError(Exception):
10 """Base pip exception"""
11
12
13class ConfigurationError(PipError):
14 """General exception in configuration"""
15
16
17class InstallationError(PipError):
18 """General exception during installation"""
19
20
21class UninstallationError(PipError):
22 """General exception during uninstallation"""
23
24
25class DistributionNotFound(InstallationError):
26 """Raised when a distribution cannot be found to satisfy a requirement"""
27
28
29class RequirementsFileParseError(InstallationError):
30 """Raised when a general error occurs parsing a requirements file line."""
31
32
33class BestVersionAlreadyInstalled(PipError):
34 """Raised when the most up-to-date version of a package is already
35 installed."""
36
37
38class BadCommand(PipError):
39 """Raised when virtualenv or a command is not found"""
40
41
42class CommandError(PipError):
43 """Raised when there is an error in command-line arguments"""
44
45
46class PreviousBuildDirError(PipError):
47 """Raised when there's a previous conflicting build directory"""
48
49
50class InvalidWheelFilename(InstallationError):
51 """Invalid wheel filename."""
52
53
54class UnsupportedWheel(InstallationError):
55 """Unsupported wheel."""
56
57
58class HashErrors(InstallationError):
59 """Multiple HashError instances rolled into one for reporting"""
60
61 def __init__(self):
62 self.errors = []
63
64 def append(self, error):
65 self.errors.append(error)
66
67 def __str__(self):
68 lines = []
69 self.errors.sort(key=lambda e: e.order)
70 for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
71 lines.append(cls.head)
72 lines.extend(e.body() for e in errors_of_cls)
73 if lines:
74 return '\n'.join(lines)
75
76 def __nonzero__(self):
77 return bool(self.errors)
78
79 def __bool__(self):
80 return self.__nonzero__()
81
82
83class HashError(InstallationError):
84 """
85 A failure to verify a package against known-good hashes
86
87 :cvar order: An int sorting hash exception classes by difficulty of
88 recovery (lower being harder), so the user doesn't bother fretting
89 about unpinned packages when he has deeper issues, like VCS
90 dependencies, to deal with. Also keeps error reports in a
91 deterministic order.
92 :cvar head: A section heading for display above potentially many
93 exceptions of this kind
94 :ivar req: The InstallRequirement that triggered this error. This is
95 pasted on after the exception is instantiated, because it's not
96 typically available earlier.
97
98 """
99 req = None
100 head = ''
101
102 def body(self):
103 """Return a summary of me for display under the heading.
104
105 This default implementation simply prints a description of the
106 triggering requirement.
107
108 :param req: The InstallRequirement that provoked this error, with
109 populate_link() having already been called
110
111 """
112 return ' %s' % self._requirement_name()
113
114 def __str__(self):
115 return '%s\n%s' % (self.head, self.body())
116
117 def _requirement_name(self):
118 """Return a description of the requirement that triggered me.
119
120 This default implementation returns long description of the req, with
121 line numbers
122
123 """
124 return str(self.req) if self.req else 'unknown package'
125
126
127class VcsHashUnsupported(HashError):
128 """A hash was provided for a version-control-system-based requirement, but
129 we don't have a method for hashing those."""
130
131 order = 0
132 head = ("Can't verify hashes for these requirements because we don't "
133 "have a way to hash version control repositories:")
134
135
136class DirectoryUrlHashUnsupported(HashError):
137 """A hash was provided for a version-control-system-based requirement, but
138 we don't have a method for hashing those."""
139
140 order = 1
141 head = ("Can't verify hashes for these file:// requirements because they "
142 "point to directories:")
143
144
145class HashMissing(HashError):
146 """A hash was needed for a requirement but is absent."""
147
148 order = 2
149 head = ('Hashes are required in --require-hashes mode, but they are '
150 'missing from some requirements. Here is a list of those '
151 'requirements along with the hashes their downloaded archives '
152 'actually had. Add lines like these to your requirements files to '
153 'prevent tampering. (If you did not enable --require-hashes '
154 'manually, note that it turns on automatically when any package '
155 'has a hash.)')
156
157 def __init__(self, gotten_hash):
158 """
159 :param gotten_hash: The hash of the (possibly malicious) archive we
160 just downloaded
161 """
162 self.gotten_hash = gotten_hash
163
164 def body(self):
165 # Dodge circular import.
166 from pip._internal.utils.hashes import FAVORITE_HASH
167
168 package = None
169 if self.req:
170 # In the case of URL-based requirements, display the original URL
171 # seen in the requirements file rather than the package name,
172 # so the output can be directly copied into the requirements file.
173 package = (self.req.original_link if self.req.original_link
174 # In case someone feeds something downright stupid
175 # to InstallRequirement's constructor.
176 else getattr(self.req, 'req', None))
177 return ' %s --hash=%s:%s' % (package or 'unknown package',
178 FAVORITE_HASH,
179 self.gotten_hash)
180
181
182class HashUnpinned(HashError):
183 """A requirement had a hash specified but was not pinned to a specific
184 version."""
185
186 order = 3
187 head = ('In --require-hashes mode, all requirements must have their '
188 'versions pinned with ==. These do not:')
189
190
191class HashMismatch(HashError):
192 """
193 Distribution file hash values don't match.
194
195 :ivar package_name: The name of the package that triggered the hash
196 mismatch. Feel free to write to this after the exception is raise to
197 improve its error message.
198
199 """
200 order = 4
201 head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
202 'FILE. If you have updated the package versions, please update '
203 'the hashes. Otherwise, examine the package contents carefully; '
204 'someone may have tampered with them.')
205
206 def __init__(self, allowed, gots):
207 """
208 :param allowed: A dict of algorithm names pointing to lists of allowed
209 hex digests
210 :param gots: A dict of algorithm names pointing to hashes we
211 actually got from the files under suspicion
212 """
213 self.allowed = allowed
214 self.gots = gots
215
216 def body(self):
217 return ' %s:\n%s' % (self._requirement_name(),
218 self._hash_comparison())
219
220 def _hash_comparison(self):
221 """
222 Return a comparison of actual and expected hash values.
223
224 Example::
225
226 Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
227 or 123451234512345123451234512345123451234512345
228 Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
229
230 """
231 def hash_then_or(hash_name):
232 # For now, all the decent hashes have 6-char names, so we can get
233 # away with hard-coding space literals.
234 return chain([hash_name], repeat(' or'))
235
236 lines = []
237 for hash_name, expecteds in iteritems(self.allowed):
238 prefix = hash_then_or(hash_name)
239 lines.extend((' Expected %s %s' % (next(prefix), e))
240 for e in expecteds)
241 lines.append(' Got %s\n' %
242 self.gots[hash_name].hexdigest())
243 prefix = ' or'
244 return '\n'.join(lines)
245
246
247class UnsupportedPythonVersion(InstallationError):
248 """Unsupported python version according to Requires-Python package
249 metadata."""
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py
new file mode 100644
index 0000000..15e0bf3
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/index.py
@@ -0,0 +1,1117 @@
1"""Routines related to PyPI, indexes"""
2from __future__ import absolute_import
3
4import cgi
5import itertools
6import logging
7import mimetypes
8import os
9import posixpath
10import re
11import sys
12import warnings
13from collections import namedtuple
14
15from pip._vendor import html5lib, requests, six
16from pip._vendor.distlib.compat import unescape
17from pip._vendor.packaging import specifiers
18from pip._vendor.packaging.utils import canonicalize_name
19from pip._vendor.packaging.version import parse as parse_version
20from pip._vendor.requests.exceptions import SSLError
21from pip._vendor.six.moves.urllib import parse as urllib_parse
22from pip._vendor.six.moves.urllib import request as urllib_request
23
24from pip._internal.compat import ipaddress
25from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
26from pip._internal.exceptions import (
27 BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
28 UnsupportedWheel,
29)
30from pip._internal.models import PyPI
31from pip._internal.pep425tags import get_supported
32from pip._internal.utils.deprecation import RemovedInPip11Warning
33from pip._internal.utils.logging import indent_log
34from pip._internal.utils.misc import (
35 ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
36 splitext,
37)
38from pip._internal.utils.packaging import check_requires_python
39from pip._internal.wheel import Wheel, wheel_ext
40
41__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']
42
43
44SECURE_ORIGINS = [
45 # protocol, hostname, port
46 # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
47 ("https", "*", "*"),
48 ("*", "localhost", "*"),
49 ("*", "127.0.0.0/8", "*"),
50 ("*", "::1/128", "*"),
51 ("file", "*", None),
52 # ssh is always secure.
53 ("ssh", "*", "*"),
54]
55
56
57logger = logging.getLogger(__name__)
58
59
60class InstallationCandidate(object):
61
62 def __init__(self, project, version, location):
63 self.project = project
64 self.version = parse_version(version)
65 self.location = location
66 self._key = (self.project, self.version, self.location)
67
68 def __repr__(self):
69 return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
70 self.project, self.version, self.location,
71 )
72
73 def __hash__(self):
74 return hash(self._key)
75
76 def __lt__(self, other):
77 return self._compare(other, lambda s, o: s < o)
78
79 def __le__(self, other):
80 return self._compare(other, lambda s, o: s <= o)
81
82 def __eq__(self, other):
83 return self._compare(other, lambda s, o: s == o)
84
85 def __ge__(self, other):
86 return self._compare(other, lambda s, o: s >= o)
87
88 def __gt__(self, other):
89 return self._compare(other, lambda s, o: s > o)
90
91 def __ne__(self, other):
92 return self._compare(other, lambda s, o: s != o)
93
94 def _compare(self, other, method):
95 if not isinstance(other, InstallationCandidate):
96 return NotImplemented
97
98 return method(self._key, other._key)
99
100
101class PackageFinder(object):
102 """This finds packages.
103
104 This is meant to match easy_install's technique for looking for
105 packages, by reading pages and looking for appropriate links.
106 """
107
108 def __init__(self, find_links, index_urls, allow_all_prereleases=False,
109 trusted_hosts=None, process_dependency_links=False,
110 session=None, format_control=None, platform=None,
111 versions=None, abi=None, implementation=None):
112 """Create a PackageFinder.
113
114 :param format_control: A FormatControl object or None. Used to control
115 the selection of source packages / binary packages when consulting
116 the index and links.
117 :param platform: A string or None. If None, searches for packages
118 that are supported by the current system. Otherwise, will find
119 packages that can be built on the platform passed in. These
120 packages will only be downloaded for distribution: they will
121 not be built locally.
122 :param versions: A list of strings or None. This is passed directly
123 to pep425tags.py in the get_supported() method.
124 :param abi: A string or None. This is passed directly
125 to pep425tags.py in the get_supported() method.
126 :param implementation: A string or None. This is passed directly
127 to pep425tags.py in the get_supported() method.
128 """
129 if session is None:
130 raise TypeError(
131 "PackageFinder() missing 1 required keyword argument: "
132 "'session'"
133 )
134
135 # Build find_links. If an argument starts with ~, it may be
136 # a local file relative to a home directory. So try normalizing
137 # it and if it exists, use the normalized version.
138 # This is deliberately conservative - it might be fine just to
139 # blindly normalize anything starting with a ~...
140 self.find_links = []
141 for link in find_links:
142 if link.startswith('~'):
143 new_link = normalize_path(link)
144 if os.path.exists(new_link):
145 link = new_link
146 self.find_links.append(link)
147
148 self.index_urls = index_urls
149 self.dependency_links = []
150
151 # These are boring links that have already been logged somehow:
152 self.logged_links = set()
153
154 self.format_control = format_control or FormatControl(set(), set())
155
156 # Domains that we won't emit warnings for when not using HTTPS
157 self.secure_origins = [
158 ("*", host, "*")
159 for host in (trusted_hosts if trusted_hosts else [])
160 ]
161
162 # Do we want to allow _all_ pre-releases?
163 self.allow_all_prereleases = allow_all_prereleases
164
165 # Do we process dependency links?
166 self.process_dependency_links = process_dependency_links
167
168 # The Session we'll use to make requests
169 self.session = session
170
171 # The valid tags to check potential found wheel candidates against
172 self.valid_tags = get_supported(
173 versions=versions,
174 platform=platform,
175 abi=abi,
176 impl=implementation,
177 )
178
179 # If we don't have TLS enabled, then WARN if anyplace we're looking
180 # relies on TLS.
181 if not HAS_TLS:
182 for link in itertools.chain(self.index_urls, self.find_links):
183 parsed = urllib_parse.urlparse(link)
184 if parsed.scheme == "https":
185 logger.warning(
186 "pip is configured with locations that require "
187 "TLS/SSL, however the ssl module in Python is not "
188 "available."
189 )
190 break
191
192 def get_formatted_locations(self):
193 lines = []
194 if self.index_urls and self.index_urls != [PyPI.simple_url]:
195 lines.append(
196 "Looking in indexes: {}".format(", ".join(self.index_urls))
197 )
198 if self.find_links:
199 lines.append(
200 "Looking in links: {}".format(", ".join(self.find_links))
201 )
202 return "\n".join(lines)
203
204 def add_dependency_links(self, links):
205 # # FIXME: this shouldn't be global list this, it should only
206 # # apply to requirements of the package that specifies the
207 # # dependency_links value
208 # # FIXME: also, we should track comes_from (i.e., use Link)
209 if self.process_dependency_links:
210 warnings.warn(
211 "Dependency Links processing has been deprecated and will be "
212 "removed in a future release.",
213 RemovedInPip11Warning,
214 )
215 self.dependency_links.extend(links)
216
217 @staticmethod
218 def _sort_locations(locations, expand_dir=False):
219 """
220 Sort locations into "files" (archives) and "urls", and return
221 a pair of lists (files,urls)
222 """
223 files = []
224 urls = []
225
226 # puts the url for the given file path into the appropriate list
227 def sort_path(path):
228 url = path_to_url(path)
229 if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
230 urls.append(url)
231 else:
232 files.append(url)
233
234 for url in locations:
235
236 is_local_path = os.path.exists(url)
237 is_file_url = url.startswith('file:')
238
239 if is_local_path or is_file_url:
240 if is_local_path:
241 path = url
242 else:
243 path = url_to_path(url)
244 if os.path.isdir(path):
245 if expand_dir:
246 path = os.path.realpath(path)
247 for item in os.listdir(path):
248 sort_path(os.path.join(path, item))
249 elif is_file_url:
250 urls.append(url)
251 elif os.path.isfile(path):
252 sort_path(path)
253 else:
254 logger.warning(
255 "Url '%s' is ignored: it is neither a file "
256 "nor a directory.", url,
257 )
258 elif is_url(url):
259 # Only add url with clear scheme
260 urls.append(url)
261 else:
262 logger.warning(
263 "Url '%s' is ignored. It is either a non-existing "
264 "path or lacks a specific scheme.", url,
265 )
266
267 return files, urls
268
269 def _candidate_sort_key(self, candidate):
270 """
271 Function used to generate link sort key for link tuples.
272 The greater the return value, the more preferred it is.
273 If not finding wheels, then sorted by version only.
274 If finding wheels, then the sort order is by version, then:
275 1. existing installs
276 2. wheels ordered via Wheel.support_index_min(self.valid_tags)
277 3. source archives
278 Note: it was considered to embed this logic into the Link
279 comparison operators, but then different sdist links
280 with the same version, would have to be considered equal
281 """
282 support_num = len(self.valid_tags)
283 build_tag = tuple()
284 if candidate.location.is_wheel:
285 # can raise InvalidWheelFilename
286 wheel = Wheel(candidate.location.filename)
287 if not wheel.supported(self.valid_tags):
288 raise UnsupportedWheel(
289 "%s is not a supported wheel for this platform. It "
290 "can't be sorted." % wheel.filename
291 )
292 pri = -(wheel.support_index_min(self.valid_tags))
293 if wheel.build_tag is not None:
294 match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
295 build_tag_groups = match.groups()
296 build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
297 else: # sdist
298 pri = -(support_num)
299 return (candidate.version, build_tag, pri)
300
301 def _validate_secure_origin(self, logger, location):
302 # Determine if this url used a secure transport mechanism
303 parsed = urllib_parse.urlparse(str(location))
304 origin = (parsed.scheme, parsed.hostname, parsed.port)
305
306 # The protocol to use to see if the protocol matches.
307 # Don't count the repository type as part of the protocol: in
308 # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
309 # the last scheme.)
310 protocol = origin[0].rsplit('+', 1)[-1]
311
312 # Determine if our origin is a secure origin by looking through our
313 # hardcoded list of secure origins, as well as any additional ones
314 # configured on this PackageFinder instance.
315 for secure_origin in (SECURE_ORIGINS + self.secure_origins):
316 if protocol != secure_origin[0] and secure_origin[0] != "*":
317 continue
318
319 try:
320 # We need to do this decode dance to ensure that we have a
321 # unicode object, even on Python 2.x.
322 addr = ipaddress.ip_address(
323 origin[1]
324 if (
325 isinstance(origin[1], six.text_type) or
326 origin[1] is None
327 )
328 else origin[1].decode("utf8")
329 )
330 network = ipaddress.ip_network(
331 secure_origin[1]
332 if isinstance(secure_origin[1], six.text_type)
333 else secure_origin[1].decode("utf8")
334 )
335 except ValueError:
336 # We don't have both a valid address or a valid network, so
337 # we'll check this origin against hostnames.
338 if (origin[1] and
339 origin[1].lower() != secure_origin[1].lower() and
340 secure_origin[1] != "*"):
341 continue
342 else:
343 # We have a valid address and network, so see if the address
344 # is contained within the network.
345 if addr not in network:
346 continue
347
348 # Check to see if the port patches
349 if (origin[2] != secure_origin[2] and
350 secure_origin[2] != "*" and
351 secure_origin[2] is not None):
352 continue
353
354 # If we've gotten here, then this origin matches the current
355 # secure origin and we should return True
356 return True
357
358 # If we've gotten to this point, then the origin isn't secure and we
359 # will not accept it as a valid location to search. We will however
360 # log a warning that we are ignoring it.
361 logger.warning(
362 "The repository located at %s is not a trusted or secure host and "
363 "is being ignored. If this repository is available via HTTPS we "
364 "recommend you use HTTPS instead, otherwise you may silence "
365 "this warning and allow it anyway with '--trusted-host %s'.",
366 parsed.hostname,
367 parsed.hostname,
368 )
369
370 return False
371
372 def _get_index_urls_locations(self, project_name):
373 """Returns the locations found via self.index_urls
374
375 Checks the url_name on the main (first in the list) index and
376 use this url_name to produce all locations
377 """
378
379 def mkurl_pypi_url(url):
380 loc = posixpath.join(
381 url,
382 urllib_parse.quote(canonicalize_name(project_name)))
383 # For maximum compatibility with easy_install, ensure the path
384 # ends in a trailing slash. Although this isn't in the spec
385 # (and PyPI can handle it without the slash) some other index
386 # implementations might break if they relied on easy_install's
387 # behavior.
388 if not loc.endswith('/'):
389 loc = loc + '/'
390 return loc
391
392 return [mkurl_pypi_url(url) for url in self.index_urls]
393
394 def find_all_candidates(self, project_name):
395 """Find all available InstallationCandidate for project_name
396
397 This checks index_urls, find_links and dependency_links.
398 All versions found are returned as an InstallationCandidate list.
399
400 See _link_package_versions for details on which files are accepted
401 """
402 index_locations = self._get_index_urls_locations(project_name)
403 index_file_loc, index_url_loc = self._sort_locations(index_locations)
404 fl_file_loc, fl_url_loc = self._sort_locations(
405 self.find_links, expand_dir=True,
406 )
407 dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
408
409 file_locations = (Link(url) for url in itertools.chain(
410 index_file_loc, fl_file_loc, dep_file_loc,
411 ))
412
413 # We trust every url that the user has given us whether it was given
414 # via --index-url or --find-links
415 # We explicitly do not trust links that came from dependency_links
416 # We want to filter out any thing which does not have a secure origin.
417 url_locations = [
418 link for link in itertools.chain(
419 (Link(url) for url in index_url_loc),
420 (Link(url) for url in fl_url_loc),
421 (Link(url) for url in dep_url_loc),
422 )
423 if self._validate_secure_origin(logger, link)
424 ]
425
426 logger.debug('%d location(s) to search for versions of %s:',
427 len(url_locations), project_name)
428
429 for location in url_locations:
430 logger.debug('* %s', location)
431
432 canonical_name = canonicalize_name(project_name)
433 formats = fmt_ctl_formats(self.format_control, canonical_name)
434 search = Search(project_name, canonical_name, formats)
435 find_links_versions = self._package_versions(
436 # We trust every directly linked archive in find_links
437 (Link(url, '-f') for url in self.find_links),
438 search
439 )
440
441 page_versions = []
442 for page in self._get_pages(url_locations, project_name):
443 logger.debug('Analyzing links from page %s', page.url)
444 with indent_log():
445 page_versions.extend(
446 self._package_versions(page.links, search)
447 )
448
449 dependency_versions = self._package_versions(
450 (Link(url) for url in self.dependency_links), search
451 )
452 if dependency_versions:
453 logger.debug(
454 'dependency_links found: %s',
455 ', '.join([
456 version.location.url for version in dependency_versions
457 ])
458 )
459
460 file_versions = self._package_versions(file_locations, search)
461 if file_versions:
462 file_versions.sort(reverse=True)
463 logger.debug(
464 'Local files found: %s',
465 ', '.join([
466 url_to_path(candidate.location.url)
467 for candidate in file_versions
468 ])
469 )
470
471 # This is an intentional priority ordering
472 return (
473 file_versions + find_links_versions + page_versions +
474 dependency_versions
475 )
476
477 def find_requirement(self, req, upgrade):
478 """Try to find a Link matching req
479
480 Expects req, an InstallRequirement and upgrade, a boolean
481 Returns a Link if found,
482 Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
483 """
484 all_candidates = self.find_all_candidates(req.name)
485
486 # Filter out anything which doesn't match our specifier
487 compatible_versions = set(
488 req.specifier.filter(
489 # We turn the version object into a str here because otherwise
490 # when we're debundled but setuptools isn't, Python will see
491 # packaging.version.Version and
492 # pkg_resources._vendor.packaging.version.Version as different
493 # types. This way we'll use a str as a common data interchange
494 # format. If we stop using the pkg_resources provided specifier
495 # and start using our own, we can drop the cast to str().
496 [str(c.version) for c in all_candidates],
497 prereleases=(
498 self.allow_all_prereleases
499 if self.allow_all_prereleases else None
500 ),
501 )
502 )
503 applicable_candidates = [
504 # Again, converting to str to deal with debundling.
505 c for c in all_candidates if str(c.version) in compatible_versions
506 ]
507
508 if applicable_candidates:
509 best_candidate = max(applicable_candidates,
510 key=self._candidate_sort_key)
511 else:
512 best_candidate = None
513
514 if req.satisfied_by is not None:
515 installed_version = parse_version(req.satisfied_by.version)
516 else:
517 installed_version = None
518
519 if installed_version is None and best_candidate is None:
520 logger.critical(
521 'Could not find a version that satisfies the requirement %s '
522 '(from versions: %s)',
523 req,
524 ', '.join(
525 sorted(
526 {str(c.version) for c in all_candidates},
527 key=parse_version,
528 )
529 )
530 )
531
532 raise DistributionNotFound(
533 'No matching distribution found for %s' % req
534 )
535
536 best_installed = False
537 if installed_version and (
538 best_candidate is None or
539 best_candidate.version <= installed_version):
540 best_installed = True
541
542 if not upgrade and installed_version is not None:
543 if best_installed:
544 logger.debug(
545 'Existing installed version (%s) is most up-to-date and '
546 'satisfies requirement',
547 installed_version,
548 )
549 else:
550 logger.debug(
551 'Existing installed version (%s) satisfies requirement '
552 '(most up-to-date version is %s)',
553 installed_version,
554 best_candidate.version,
555 )
556 return None
557
558 if best_installed:
559 # We have an existing version, and its the best version
560 logger.debug(
561 'Installed version (%s) is most up-to-date (past versions: '
562 '%s)',
563 installed_version,
564 ', '.join(sorted(compatible_versions, key=parse_version)) or
565 "none",
566 )
567 raise BestVersionAlreadyInstalled
568
569 logger.debug(
570 'Using version %s (newest of versions: %s)',
571 best_candidate.version,
572 ', '.join(sorted(compatible_versions, key=parse_version))
573 )
574 return best_candidate.location
575
576 def _get_pages(self, locations, project_name):
577 """
578 Yields (page, page_url) from the given locations, skipping
579 locations that have errors.
580 """
581 seen = set()
582 for location in locations:
583 if location in seen:
584 continue
585 seen.add(location)
586
587 page = self._get_page(location)
588 if page is None:
589 continue
590
591 yield page
592
593 _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
594
595 def _sort_links(self, links):
596 """
597 Returns elements of links in order, non-egg links first, egg links
598 second, while eliminating duplicates
599 """
600 eggs, no_eggs = [], []
601 seen = set()
602 for link in links:
603 if link not in seen:
604 seen.add(link)
605 if link.egg_fragment:
606 eggs.append(link)
607 else:
608 no_eggs.append(link)
609 return no_eggs + eggs
610
611 def _package_versions(self, links, search):
612 result = []
613 for link in self._sort_links(links):
614 v = self._link_package_versions(link, search)
615 if v is not None:
616 result.append(v)
617 return result
618
619 def _log_skipped_link(self, link, reason):
620 if link not in self.logged_links:
621 logger.debug('Skipping link %s; %s', link, reason)
622 self.logged_links.add(link)
623
624 def _link_package_versions(self, link, search):
625 """Return an InstallationCandidate or None"""
626 version = None
627 if link.egg_fragment:
628 egg_info = link.egg_fragment
629 ext = link.ext
630 else:
631 egg_info, ext = link.splitext()
632 if not ext:
633 self._log_skipped_link(link, 'not a file')
634 return
635 if ext not in SUPPORTED_EXTENSIONS:
636 self._log_skipped_link(
637 link, 'unsupported archive format: %s' % ext,
638 )
639 return
640 if "binary" not in search.formats and ext == wheel_ext:
641 self._log_skipped_link(
642 link, 'No binaries permitted for %s' % search.supplied,
643 )
644 return
645 if "macosx10" in link.path and ext == '.zip':
646 self._log_skipped_link(link, 'macosx10 one')
647 return
648 if ext == wheel_ext:
649 try:
650 wheel = Wheel(link.filename)
651 except InvalidWheelFilename:
652 self._log_skipped_link(link, 'invalid wheel filename')
653 return
654 if canonicalize_name(wheel.name) != search.canonical:
655 self._log_skipped_link(
656 link, 'wrong project name (not %s)' % search.supplied)
657 return
658
659 if not wheel.supported(self.valid_tags):
660 self._log_skipped_link(
661 link, 'it is not compatible with this Python')
662 return
663
664 version = wheel.version
665
666 # This should be up by the search.ok_binary check, but see issue 2700.
667 if "source" not in search.formats and ext != wheel_ext:
668 self._log_skipped_link(
669 link, 'No sources permitted for %s' % search.supplied,
670 )
671 return
672
673 if not version:
674 version = egg_info_matches(egg_info, search.supplied, link)
675 if version is None:
676 self._log_skipped_link(
677 link, 'wrong project name (not %s)' % search.supplied)
678 return
679
680 match = self._py_version_re.search(version)
681 if match:
682 version = version[:match.start()]
683 py_version = match.group(1)
684 if py_version != sys.version[:3]:
685 self._log_skipped_link(
686 link, 'Python version is incorrect')
687 return
688 try:
689 support_this_python = check_requires_python(link.requires_python)
690 except specifiers.InvalidSpecifier:
691 logger.debug("Package %s has an invalid Requires-Python entry: %s",
692 link.filename, link.requires_python)
693 support_this_python = True
694
695 if not support_this_python:
696 logger.debug("The package %s is incompatible with the python"
697 "version in use. Acceptable python versions are:%s",
698 link, link.requires_python)
699 return
700 logger.debug('Found link %s, version: %s', link, version)
701
702 return InstallationCandidate(search.supplied, version, link)
703
704 def _get_page(self, link):
705 return HTMLPage.get_page(link, session=self.session)
706
707
708def egg_info_matches(
709 egg_info, search_name, link,
710 _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
711 """Pull the version part out of a string.
712
713 :param egg_info: The string to parse. E.g. foo-2.1
714 :param search_name: The name of the package this belongs to. None to
715 infer the name. Note that this cannot unambiguously parse strings
716 like foo-2-2 which might be foo, 2-2 or foo-2, 2.
717 :param link: The link the string came from, for logging on failure.
718 """
719 match = _egg_info_re.search(egg_info)
720 if not match:
721 logger.debug('Could not parse version from link: %s', link)
722 return None
723 if search_name is None:
724 full_match = match.group(0)
725 return full_match[full_match.index('-'):]
726 name = match.group(0).lower()
727 # To match the "safe" name that pkg_resources creates:
728 name = name.replace('_', '-')
729 # project name and version must be separated by a dash
730 look_for = search_name.lower() + "-"
731 if name.startswith(look_for):
732 return match.group(0)[len(look_for):]
733 else:
734 return None
735
736
737class HTMLPage(object):
738 """Represents one page, along with its URL"""
739
740 def __init__(self, content, url, headers=None):
741 # Determine if we have any encoding information in our headers
742 encoding = None
743 if headers and "Content-Type" in headers:
744 content_type, params = cgi.parse_header(headers["Content-Type"])
745
746 if "charset" in params:
747 encoding = params['charset']
748
749 self.content = content
750 self.parsed = html5lib.parse(
751 self.content,
752 transport_encoding=encoding,
753 namespaceHTMLElements=False,
754 )
755 self.url = url
756 self.headers = headers
757
758 def __str__(self):
759 return self.url
760
761 @classmethod
762 def get_page(cls, link, skip_archives=True, session=None):
763 if session is None:
764 raise TypeError(
765 "get_page() missing 1 required keyword argument: 'session'"
766 )
767
768 url = link.url
769 url = url.split('#', 1)[0]
770
771 # Check for VCS schemes that do not support lookup as web pages.
772 from pip._internal.vcs import VcsSupport
773 for scheme in VcsSupport.schemes:
774 if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
775 logger.debug('Cannot look at %s URL %s', scheme, link)
776 return None
777
778 try:
779 if skip_archives:
780 filename = link.filename
781 for bad_ext in ARCHIVE_EXTENSIONS:
782 if filename.endswith(bad_ext):
783 content_type = cls._get_content_type(
784 url, session=session,
785 )
786 if content_type.lower().startswith('text/html'):
787 break
788 else:
789 logger.debug(
790 'Skipping page %s because of Content-Type: %s',
791 link,
792 content_type,
793 )
794 return
795
796 logger.debug('Getting page %s', url)
797
798 # Tack index.html onto file:// URLs that point to directories
799 (scheme, netloc, path, params, query, fragment) = \
800 urllib_parse.urlparse(url)
801 if (scheme == 'file' and
802 os.path.isdir(urllib_request.url2pathname(path))):
803 # add trailing slash if not present so urljoin doesn't trim
804 # final segment
805 if not url.endswith('/'):
806 url += '/'
807 url = urllib_parse.urljoin(url, 'index.html')
808 logger.debug(' file: URL is directory, getting %s', url)
809
810 resp = session.get(
811 url,
812 headers={
813 "Accept": "text/html",
814 "Cache-Control": "max-age=600",
815 },
816 )
817 resp.raise_for_status()
818
819 # The check for archives above only works if the url ends with
820 # something that looks like an archive. However that is not a
821 # requirement of an url. Unless we issue a HEAD request on every
822 # url we cannot know ahead of time for sure if something is HTML
823 # or not. However we can check after we've downloaded it.
824 content_type = resp.headers.get('Content-Type', 'unknown')
825 if not content_type.lower().startswith("text/html"):
826 logger.debug(
827 'Skipping page %s because of Content-Type: %s',
828 link,
829 content_type,
830 )
831 return
832
833 inst = cls(resp.content, resp.url, resp.headers)
834 except requests.HTTPError as exc:
835 cls._handle_fail(link, exc, url)
836 except SSLError as exc:
837 reason = "There was a problem confirming the ssl certificate: "
838 reason += str(exc)
839 cls._handle_fail(link, reason, url, meth=logger.info)
840 except requests.ConnectionError as exc:
841 cls._handle_fail(link, "connection error: %s" % exc, url)
842 except requests.Timeout:
843 cls._handle_fail(link, "timed out", url)
844 else:
845 return inst
846
847 @staticmethod
848 def _handle_fail(link, reason, url, meth=None):
849 if meth is None:
850 meth = logger.debug
851
852 meth("Could not fetch URL %s: %s - skipping", link, reason)
853
854 @staticmethod
855 def _get_content_type(url, session):
856 """Get the Content-Type of the given url, using a HEAD request"""
857 scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
858 if scheme not in {'http', 'https'}:
859 # FIXME: some warning or something?
860 # assertion error?
861 return ''
862
863 resp = session.head(url, allow_redirects=True)
864 resp.raise_for_status()
865
866 return resp.headers.get("Content-Type", "")
867
868 @cached_property
869 def base_url(self):
870 bases = [
871 x for x in self.parsed.findall(".//base")
872 if x.get("href") is not None
873 ]
874 if bases and bases[0].get("href"):
875 return bases[0].get("href")
876 else:
877 return self.url
878
879 @property
880 def links(self):
881 """Yields all links in the page"""
882 for anchor in self.parsed.findall(".//a"):
883 if anchor.get("href"):
884 href = anchor.get("href")
885 url = self.clean_link(
886 urllib_parse.urljoin(self.base_url, href)
887 )
888 pyrequire = anchor.get('data-requires-python')
889 pyrequire = unescape(pyrequire) if pyrequire else None
890 yield Link(url, self, requires_python=pyrequire)
891
892 _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
893
894 def clean_link(self, url):
895 """Makes sure a link is fully encoded. That is, if a ' ' shows up in
896 the link, it will be rewritten to %20 (while not over-quoting
897 % or other characters)."""
898 return self._clean_re.sub(
899 lambda match: '%%%2x' % ord(match.group(0)), url)
900
901
902class Link(object):
903
904 def __init__(self, url, comes_from=None, requires_python=None):
905 """
906 Object representing a parsed link from https://pypi.org/simple/*
907
908 url:
909 url of the resource pointed to (href of the link)
910 comes_from:
911 instance of HTMLPage where the link was found, or string.
912 requires_python:
913 String containing the `Requires-Python` metadata field, specified
914 in PEP 345. This may be specified by a data-requires-python
915 attribute in the HTML link tag, as described in PEP 503.
916 """
917
918 # url can be a UNC windows share
919 if url.startswith('\\\\'):
920 url = path_to_url(url)
921
922 self.url = url
923 self.comes_from = comes_from
924 self.requires_python = requires_python if requires_python else None
925
926 def __str__(self):
927 if self.requires_python:
928 rp = ' (requires-python:%s)' % self.requires_python
929 else:
930 rp = ''
931 if self.comes_from:
932 return '%s (from %s)%s' % (self.url, self.comes_from, rp)
933 else:
934 return str(self.url)
935
936 def __repr__(self):
937 return '<Link %s>' % self
938
939 def __eq__(self, other):
940 if not isinstance(other, Link):
941 return NotImplemented
942 return self.url == other.url
943
944 def __ne__(self, other):
945 if not isinstance(other, Link):
946 return NotImplemented
947 return self.url != other.url
948
949 def __lt__(self, other):
950 if not isinstance(other, Link):
951 return NotImplemented
952 return self.url < other.url
953
954 def __le__(self, other):
955 if not isinstance(other, Link):
956 return NotImplemented
957 return self.url <= other.url
958
959 def __gt__(self, other):
960 if not isinstance(other, Link):
961 return NotImplemented
962 return self.url > other.url
963
964 def __ge__(self, other):
965 if not isinstance(other, Link):
966 return NotImplemented
967 return self.url >= other.url
968
969 def __hash__(self):
970 return hash(self.url)
971
972 @property
973 def filename(self):
974 _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
975 name = posixpath.basename(path.rstrip('/')) or netloc
976 name = urllib_parse.unquote(name)
977 assert name, ('URL %r produced no filename' % self.url)
978 return name
979
980 @property
981 def scheme(self):
982 return urllib_parse.urlsplit(self.url)[0]
983
984 @property
985 def netloc(self):
986 return urllib_parse.urlsplit(self.url)[1]
987
988 @property
989 def path(self):
990 return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
991
992 def splitext(self):
993 return splitext(posixpath.basename(self.path.rstrip('/')))
994
995 @property
996 def ext(self):
997 return self.splitext()[1]
998
999 @property
1000 def url_without_fragment(self):
1001 scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
1002 return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
1003
1004 _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
1005
1006 @property
1007 def egg_fragment(self):
1008 match = self._egg_fragment_re.search(self.url)
1009 if not match:
1010 return None
1011 return match.group(1)
1012
1013 _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
1014
1015 @property
1016 def subdirectory_fragment(self):
1017 match = self._subdirectory_fragment_re.search(self.url)
1018 if not match:
1019 return None
1020 return match.group(1)
1021
1022 _hash_re = re.compile(
1023 r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
1024 )
1025
1026 @property
1027 def hash(self):
1028 match = self._hash_re.search(self.url)
1029 if match:
1030 return match.group(2)
1031 return None
1032
1033 @property
1034 def hash_name(self):
1035 match = self._hash_re.search(self.url)
1036 if match:
1037 return match.group(1)
1038 return None
1039
1040 @property
1041 def show_url(self):
1042 return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
1043
1044 @property
1045 def is_wheel(self):
1046 return self.ext == wheel_ext
1047
1048 @property
1049 def is_artifact(self):
1050 """
1051 Determines if this points to an actual artifact (e.g. a tarball) or if
1052 it points to an "abstract" thing like a path or a VCS location.
1053 """
1054 from pip._internal.vcs import vcs
1055
1056 if self.scheme in vcs.all_schemes:
1057 return False
1058
1059 return True
1060
1061
1062FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
1063"""This object has two fields, no_binary and only_binary.
1064
1065If a field is falsy, it isn't set. If it is {':all:'}, it should match all
1066packages except those listed in the other field. Only one field can be set
1067to {':all:'} at a time. The rest of the time exact package name matches
1068are listed, with any given package only showing up in one field at a time.
1069"""
1070
1071
1072def fmt_ctl_handle_mutual_exclude(value, target, other):
1073 new = value.split(',')
1074 while ':all:' in new:
1075 other.clear()
1076 target.clear()
1077 target.add(':all:')
1078 del new[:new.index(':all:') + 1]
1079 if ':none:' not in new:
1080 # Without a none, we want to discard everything as :all: covers it
1081 return
1082 for name in new:
1083 if name == ':none:':
1084 target.clear()
1085 continue
1086 name = canonicalize_name(name)
1087 other.discard(name)
1088 target.add(name)
1089
1090
1091def fmt_ctl_formats(fmt_ctl, canonical_name):
1092 result = {"binary", "source"}
1093 if canonical_name in fmt_ctl.only_binary:
1094 result.discard('source')
1095 elif canonical_name in fmt_ctl.no_binary:
1096 result.discard('binary')
1097 elif ':all:' in fmt_ctl.only_binary:
1098 result.discard('source')
1099 elif ':all:' in fmt_ctl.no_binary:
1100 result.discard('binary')
1101 return frozenset(result)
1102
1103
1104def fmt_ctl_no_binary(fmt_ctl):
1105 fmt_ctl_handle_mutual_exclude(
1106 ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary,
1107 )
1108
1109
1110Search = namedtuple('Search', 'supplied canonical formats')
1111"""Capture key aspects of a search.
1112
1113:attribute supplied: The user supplied package.
1114:attribute canonical: The canonical package name.
1115:attribute formats: The formats allowed for this package. Should be a set
1116 with 'binary' or 'source' or both in it.
1117"""
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py
new file mode 100644
index 0000000..ce8f7e9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/locations.py
@@ -0,0 +1,194 @@
1"""Locations where we look for configs, install stuff, etc"""
2from __future__ import absolute_import
3
4import os
5import os.path
6import platform
7import site
8import sys
9import sysconfig
10from distutils import sysconfig as distutils_sysconfig
11from distutils.command.install import SCHEME_KEYS, install # type: ignore
12
13from pip._internal.compat import WINDOWS, expanduser
14from pip._internal.utils import appdirs
15
16# Application Directories
17USER_CACHE_DIR = appdirs.user_cache_dir("pip")
18
19
20DELETE_MARKER_MESSAGE = '''\
21This file is placed here by pip to indicate the source was put
22here by pip.
23
24Once this package is successfully installed this source code will be
25deleted (unless you remove this file).
26'''
27PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
28
29
30def write_delete_marker_file(directory):
31 """
32 Write the pip delete marker file into this directory.
33 """
34 filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
35 with open(filepath, 'w') as marker_fp:
36 marker_fp.write(DELETE_MARKER_MESSAGE)
37
38
39def running_under_virtualenv():
40 """
41 Return True if we're running inside a virtualenv, False otherwise.
42
43 """
44 if hasattr(sys, 'real_prefix'):
45 return True
46 elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
47 return True
48
49 return False
50
51
52def virtualenv_no_global():
53 """
54 Return True if in a venv and no system site packages.
55 """
56 # this mirrors the logic in virtualenv.py for locating the
57 # no-global-site-packages.txt file
58 site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
59 no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
60 if running_under_virtualenv() and os.path.isfile(no_global_file):
61 return True
62
63
64if running_under_virtualenv():
65 src_prefix = os.path.join(sys.prefix, 'src')
66else:
67 # FIXME: keep src in cwd for now (it is not a temporary folder)
68 try:
69 src_prefix = os.path.join(os.getcwd(), 'src')
70 except OSError:
71 # In case the current working directory has been renamed or deleted
72 sys.exit(
73 "The folder you are executing pip from can no longer be found."
74 )
75
76# under macOS + virtualenv sys.prefix is not properly resolved
77# it is something like /path/to/python/bin/..
78# Note: using realpath due to tmp dirs on OSX being symlinks
79src_prefix = os.path.abspath(src_prefix)
80
81# FIXME doesn't account for venv linked to global site-packages
82
83site_packages = sysconfig.get_path("purelib")
84# This is because of a bug in PyPy's sysconfig module, see
85# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
86# for more information.
87if platform.python_implementation().lower() == "pypy":
88 site_packages = distutils_sysconfig.get_python_lib()
89try:
90 # Use getusersitepackages if this is present, as it ensures that the
91 # value is initialised properly.
92 user_site = site.getusersitepackages()
93except AttributeError:
94 user_site = site.USER_SITE
95user_dir = expanduser('~')
96if WINDOWS:
97 bin_py = os.path.join(sys.prefix, 'Scripts')
98 bin_user = os.path.join(user_site, 'Scripts')
99 # buildout uses 'bin' on Windows too?
100 if not os.path.exists(bin_py):
101 bin_py = os.path.join(sys.prefix, 'bin')
102 bin_user = os.path.join(user_site, 'bin')
103
104 config_basename = 'pip.ini'
105
106 legacy_storage_dir = os.path.join(user_dir, 'pip')
107 legacy_config_file = os.path.join(
108 legacy_storage_dir,
109 config_basename,
110 )
111else:
112 bin_py = os.path.join(sys.prefix, 'bin')
113 bin_user = os.path.join(user_site, 'bin')
114
115 config_basename = 'pip.conf'
116
117 legacy_storage_dir = os.path.join(user_dir, '.pip')
118 legacy_config_file = os.path.join(
119 legacy_storage_dir,
120 config_basename,
121 )
122 # Forcing to use /usr/local/bin for standard macOS framework installs
123 # Also log to ~/Library/Logs/ for use with the Console.app log viewer
124 if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
125 bin_py = '/usr/local/bin'
126
127site_config_files = [
128 os.path.join(path, config_basename)
129 for path in appdirs.site_config_dirs('pip')
130]
131
132venv_config_file = os.path.join(sys.prefix, config_basename)
133new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
134
135
136def distutils_scheme(dist_name, user=False, home=None, root=None,
137 isolated=False, prefix=None):
138 """
139 Return a distutils install scheme
140 """
141 from distutils.dist import Distribution
142
143 scheme = {}
144
145 if isolated:
146 extra_dist_args = {"script_args": ["--no-user-cfg"]}
147 else:
148 extra_dist_args = {}
149 dist_args = {'name': dist_name}
150 dist_args.update(extra_dist_args)
151
152 d = Distribution(dist_args)
153 d.parse_config_files()
154 i = d.get_command_obj('install', create=True)
155 # NOTE: setting user or home has the side-effect of creating the home dir
156 # or user base for installations during finalize_options()
157 # ideally, we'd prefer a scheme class that has no side-effects.
158 assert not (user and prefix), "user={} prefix={}".format(user, prefix)
159 i.user = user or i.user
160 if user:
161 i.prefix = ""
162 i.prefix = prefix or i.prefix
163 i.home = home or i.home
164 i.root = root or i.root
165 i.finalize_options()
166 for key in SCHEME_KEYS:
167 scheme[key] = getattr(i, 'install_' + key)
168
169 # install_lib specified in setup.cfg should install *everything*
170 # into there (i.e. it takes precedence over both purelib and
171 # platlib). Note, i.install_lib is *always* set after
172 # finalize_options(); we only want to override here if the user
173 # has explicitly requested it hence going back to the config
174 if 'install_lib' in d.get_option_dict('install'):
175 scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
176
177 if running_under_virtualenv():
178 scheme['headers'] = os.path.join(
179 sys.prefix,
180 'include',
181 'site',
182 'python' + sys.version[:3],
183 dist_name,
184 )
185
186 if root is not None:
187 path_no_drive = os.path.splitdrive(
188 os.path.abspath(scheme["headers"]))[1]
189 scheme["headers"] = os.path.join(
190 root,
191 path_no_drive[1:],
192 )
193
194 return scheme
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py
new file mode 100644
index 0000000..2d080a4
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/__init__.py
@@ -0,0 +1,4 @@
1from pip._internal.models.index import Index, PyPI
2
3
4__all__ = ["Index", "PyPI"]
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py
new file mode 100644
index 0000000..161de50
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/models/index.py
@@ -0,0 +1,15 @@
1from pip._vendor.six.moves.urllib import parse as urllib_parse
2
3
4class Index(object):
5 def __init__(self, url):
6 self.url = url
7 self.netloc = urllib_parse.urlsplit(url).netloc
8 self.simple_url = self.url_to_path('simple')
9 self.pypi_url = self.url_to_path('pypi')
10
11 def url_to_path(self, path):
12 return urllib_parse.urljoin(self.url, path)
13
14
15PyPI = Index('https://pypi.org/')
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py
new file mode 100644
index 0000000..bab6b9f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py
@@ -0,0 +1,106 @@
1"""Validation of dependencies of packages
2"""
3
4from collections import namedtuple
5
6from pip._vendor.packaging.utils import canonicalize_name
7
8from pip._internal.operations.prepare import make_abstract_dist
9
10from pip._internal.utils.misc import get_installed_distributions
11from pip._internal.utils.typing import MYPY_CHECK_RUNNING
12
13if MYPY_CHECK_RUNNING:
14 from pip._internal.req.req_install import InstallRequirement
15 from typing import Any, Dict, Iterator, Set, Tuple, List
16
17 # Shorthands
18 PackageSet = Dict[str, 'PackageDetails']
19 Missing = Tuple[str, Any]
20 Conflicting = Tuple[str, str, Any]
21
22 MissingDict = Dict[str, List[Missing]]
23 ConflictingDict = Dict[str, List[Conflicting]]
24 CheckResult = Tuple[MissingDict, ConflictingDict]
25
26PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
27
28
29def create_package_set_from_installed(**kwargs):
30 # type: (**Any) -> PackageSet
31 """Converts a list of distributions into a PackageSet.
32 """
33 # Default to using all packages installed on the system
34 if kwargs == {}:
35 kwargs = {"local_only": False, "skip": ()}
36 retval = {}
37 for dist in get_installed_distributions(**kwargs):
38 name = canonicalize_name(dist.project_name)
39 retval[name] = PackageDetails(dist.version, dist.requires())
40 return retval
41
42
43def check_package_set(package_set):
44 # type: (PackageSet) -> CheckResult
45 """Check if a package set is consistent
46 """
47 missing = dict()
48 conflicting = dict()
49
50 for package_name in package_set:
51 # Info about dependencies of package_name
52 missing_deps = set() # type: Set[Missing]
53 conflicting_deps = set() # type: Set[Conflicting]
54
55 for req in package_set[package_name].requires:
56 name = canonicalize_name(req.project_name) # type: str
57
58 # Check if it's missing
59 if name not in package_set:
60 missed = True
61 if req.marker is not None:
62 missed = req.marker.evaluate()
63 if missed:
64 missing_deps.add((name, req))
65 continue
66
67 # Check if there's a conflict
68 version = package_set[name].version # type: str
69 if not req.specifier.contains(version, prereleases=True):
70 conflicting_deps.add((name, version, req))
71
72 def str_key(x):
73 return str(x)
74
75 if missing_deps:
76 missing[package_name] = sorted(missing_deps, key=str_key)
77 if conflicting_deps:
78 conflicting[package_name] = sorted(conflicting_deps, key=str_key)
79
80 return missing, conflicting
81
82
83def check_install_conflicts(to_install):
84 # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
85 """For checking if the dependency graph would be consistent after \
86 installing given requirements
87 """
88 # Start from the current state
89 state = create_package_set_from_installed()
90 _simulate_installation_of(to_install, state)
91 return state, check_package_set(state)
92
93
94# NOTE from @pradyunsg
95# This required a minor update in dependency link handling logic over at
96# operations.prepare.IsSDist.dist() to get it working
97def _simulate_installation_of(to_install, state):
98 # type: (List[InstallRequirement], PackageSet) -> None
99 """Computes the version of packages after installing to_install.
100 """
101
102 # Modify it as installing requirement_set would (assuming no errors)
103 for inst_req in to_install:
104 dist = make_abstract_dist(inst_req).dist(finder=None)
105 name = canonicalize_name(dist.key)
106 state[name] = PackageDetails(dist.version, dist.requires())
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py
new file mode 100644
index 0000000..000102d
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py
@@ -0,0 +1,252 @@
1from __future__ import absolute_import
2
3import collections
4import logging
5import os
6import re
7import warnings
8
9from pip._vendor import pkg_resources, six
10from pip._vendor.packaging.utils import canonicalize_name
11from pip._vendor.pkg_resources import RequirementParseError
12
13from pip._internal.exceptions import InstallationError
14from pip._internal.req import InstallRequirement
15from pip._internal.req.req_file import COMMENT_RE
16from pip._internal.utils.deprecation import RemovedInPip11Warning
17from pip._internal.utils.misc import (
18 dist_is_editable, get_installed_distributions,
19)
20
21logger = logging.getLogger(__name__)
22
23
24def freeze(
25 requirement=None,
26 find_links=None, local_only=None, user_only=None, skip_regex=None,
27 isolated=False,
28 wheel_cache=None,
29 exclude_editable=False,
30 skip=()):
31 find_links = find_links or []
32 skip_match = None
33
34 if skip_regex:
35 skip_match = re.compile(skip_regex).search
36
37 dependency_links = []
38
39 for dist in pkg_resources.working_set:
40 if dist.has_metadata('dependency_links.txt'):
41 dependency_links.extend(
42 dist.get_metadata_lines('dependency_links.txt')
43 )
44 for link in find_links:
45 if '#egg=' in link:
46 dependency_links.append(link)
47 for link in find_links:
48 yield '-f %s' % link
49 installations = {}
50 for dist in get_installed_distributions(local_only=local_only,
51 skip=(),
52 user_only=user_only):
53 try:
54 req = FrozenRequirement.from_dist(
55 dist,
56 dependency_links
57 )
58 except RequirementParseError:
59 logger.warning(
60 "Could not parse requirement: %s",
61 dist.project_name
62 )
63 continue
64 if exclude_editable and req.editable:
65 continue
66 installations[req.name] = req
67
68 if requirement:
69 # the options that don't get turned into an InstallRequirement
70 # should only be emitted once, even if the same option is in multiple
71 # requirements files, so we need to keep track of what has been emitted
72 # so that we don't emit it again if it's seen again
73 emitted_options = set()
74 # keep track of which files a requirement is in so that we can
75 # give an accurate warning if a requirement appears multiple times.
76 req_files = collections.defaultdict(list)
77 for req_file_path in requirement:
78 with open(req_file_path) as req_file:
79 for line in req_file:
80 if (not line.strip() or
81 line.strip().startswith('#') or
82 (skip_match and skip_match(line)) or
83 line.startswith((
84 '-r', '--requirement',
85 '-Z', '--always-unzip',
86 '-f', '--find-links',
87 '-i', '--index-url',
88 '--pre',
89 '--trusted-host',
90 '--process-dependency-links',
91 '--extra-index-url'))):
92 line = line.rstrip()
93 if line not in emitted_options:
94 emitted_options.add(line)
95 yield line
96 continue
97
98 if line.startswith('-e') or line.startswith('--editable'):
99 if line.startswith('-e'):
100 line = line[2:].strip()
101 else:
102 line = line[len('--editable'):].strip().lstrip('=')
103 line_req = InstallRequirement.from_editable(
104 line,
105 isolated=isolated,
106 wheel_cache=wheel_cache,
107 )
108 else:
109 line_req = InstallRequirement.from_line(
110 COMMENT_RE.sub('', line).strip(),
111 isolated=isolated,
112 wheel_cache=wheel_cache,
113 )
114
115 if not line_req.name:
116 logger.info(
117 "Skipping line in requirement file [%s] because "
118 "it's not clear what it would install: %s",
119 req_file_path, line.strip(),
120 )
121 logger.info(
122 " (add #egg=PackageName to the URL to avoid"
123 " this warning)"
124 )
125 elif line_req.name not in installations:
126 # either it's not installed, or it is installed
127 # but has been processed already
128 if not req_files[line_req.name]:
129 logger.warning(
130 "Requirement file [%s] contains %s, but that "
131 "package is not installed",
132 req_file_path,
133 COMMENT_RE.sub('', line).strip(),
134 )
135 else:
136 req_files[line_req.name].append(req_file_path)
137 else:
138 yield str(installations[line_req.name]).rstrip()
139 del installations[line_req.name]
140 req_files[line_req.name].append(req_file_path)
141
142 # Warn about requirements that were included multiple times (in a
143 # single requirements file or in different requirements files).
144 for name, files in six.iteritems(req_files):
145 if len(files) > 1:
146 logger.warning("Requirement %s included multiple times [%s]",
147 name, ', '.join(sorted(set(files))))
148
149 yield(
150 '## The following requirements were added by '
151 'pip freeze:'
152 )
153 for installation in sorted(
154 installations.values(), key=lambda x: x.name.lower()):
155 if canonicalize_name(installation.name) not in skip:
156 yield str(installation).rstrip()
157
158
159class FrozenRequirement(object):
160 def __init__(self, name, req, editable, comments=()):
161 self.name = name
162 self.req = req
163 self.editable = editable
164 self.comments = comments
165
166 _rev_re = re.compile(r'-r(\d+)$')
167 _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
168
169 @classmethod
170 def from_dist(cls, dist, dependency_links):
171 location = os.path.normcase(os.path.abspath(dist.location))
172 comments = []
173 from pip._internal.vcs import vcs, get_src_requirement
174 if dist_is_editable(dist) and vcs.get_backend_name(location):
175 editable = True
176 try:
177 req = get_src_requirement(dist, location)
178 except InstallationError as exc:
179 logger.warning(
180 "Error when trying to get requirement for VCS system %s, "
181 "falling back to uneditable format", exc
182 )
183 req = None
184 if req is None:
185 logger.warning(
186 'Could not determine repository location of %s', location
187 )
188 comments.append(
189 '## !! Could not determine repository location'
190 )
191 req = dist.as_requirement()
192 editable = False
193 else:
194 editable = False
195 req = dist.as_requirement()
196 specs = req.specs
197 assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
198 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
199 (specs, dist)
200 version = specs[0][1]
201 ver_match = cls._rev_re.search(version)
202 date_match = cls._date_re.search(version)
203 if ver_match or date_match:
204 svn_backend = vcs.get_backend('svn')
205 if svn_backend:
206 svn_location = svn_backend().get_location(
207 dist,
208 dependency_links,
209 )
210 if not svn_location:
211 logger.warning(
212 'Warning: cannot find svn location for %s', req,
213 )
214 comments.append(
215 '## FIXME: could not find svn URL in dependency_links '
216 'for this package:'
217 )
218 else:
219 warnings.warn(
220 "SVN editable detection based on dependency links "
221 "will be dropped in the future.",
222 RemovedInPip11Warning,
223 )
224 comments.append(
225 '# Installing as editable to satisfy requirement %s:' %
226 req
227 )
228 if ver_match:
229 rev = ver_match.group(1)
230 else:
231 rev = '{%s}' % date_match.group(1)
232 editable = True
233 req = '%s@%s#egg=%s' % (
234 svn_location,
235 rev,
236 cls.egg_name(dist)
237 )
238 return cls(dist.project_name, req, editable, comments)
239
240 @staticmethod
241 def egg_name(dist):
242 name = dist.egg_name()
243 match = re.search(r'-py\d\.\d$', name)
244 if match:
245 name = name[:match.start()]
246 return name
247
248 def __str__(self):
249 req = self.req
250 if self.editable:
251 req = '-e %s' % req
252 return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py
new file mode 100644
index 0000000..c1e8158
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py
@@ -0,0 +1,380 @@
1"""Prepares a distribution for installation
2"""
3
4import itertools
5import logging
6import os
7import sys
8from copy import copy
9
10from pip._vendor import pkg_resources, requests
11
12from pip._internal.build_env import NoOpBuildEnvironment
13from pip._internal.compat import expanduser
14from pip._internal.download import (
15 is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
16)
17from pip._internal.exceptions import (
18 DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
19 PreviousBuildDirError, VcsHashUnsupported,
20)
21from pip._internal.index import FormatControl
22from pip._internal.req.req_install import InstallRequirement
23from pip._internal.utils.hashes import MissingHashes
24from pip._internal.utils.logging import indent_log
25from pip._internal.utils.misc import (
26 call_subprocess, display_path, normalize_path,
27)
28from pip._internal.utils.ui import open_spinner
29from pip._internal.vcs import vcs
30
31logger = logging.getLogger(__name__)
32
33
34def make_abstract_dist(req):
35 """Factory to make an abstract dist object.
36
37 Preconditions: Either an editable req with a source_dir, or satisfied_by or
38 a wheel link, or a non-editable req with a source_dir.
39
40 :return: A concrete DistAbstraction.
41 """
42 if req.editable:
43 return IsSDist(req)
44 elif req.link and req.link.is_wheel:
45 return IsWheel(req)
46 else:
47 return IsSDist(req)
48
49
50def _install_build_reqs(finder, prefix, build_requirements):
51 # NOTE: What follows is not a very good thing.
52 # Eventually, this should move into the BuildEnvironment class and
53 # that should handle all the isolation and sub-process invocation.
54 finder = copy(finder)
55 finder.format_control = FormatControl(set(), set([":all:"]))
56 urls = [
57 finder.find_requirement(
58 InstallRequirement.from_line(r), upgrade=False).url
59 for r in build_requirements
60 ]
61 args = [
62 sys.executable, '-m', 'pip', 'install', '--ignore-installed',
63 '--no-user', '--prefix', prefix,
64 ] + list(urls)
65
66 with open_spinner("Installing build dependencies") as spinner:
67 call_subprocess(args, show_stdout=False, spinner=spinner)
68
69
70class DistAbstraction(object):
71 """Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
72
73 The requirements for anything installable are as follows:
74 - we must be able to determine the requirement name
75 (or we can't correctly handle the non-upgrade case).
76 - we must be able to generate a list of run-time dependencies
77 without installing any additional packages (or we would
78 have to either burn time by doing temporary isolated installs
79 or alternatively violate pips 'don't start installing unless
80 all requirements are available' rule - neither of which are
81 desirable).
82 - for packages with setup requirements, we must also be able
83 to determine their requirements without installing additional
84 packages (for the same reason as run-time dependencies)
85 - we must be able to create a Distribution object exposing the
86 above metadata.
87 """
88
89 def __init__(self, req):
90 self.req = req
91
92 def dist(self, finder):
93 """Return a setuptools Dist object."""
94 raise NotImplementedError(self.dist)
95
96 def prep_for_dist(self, finder):
97 """Ensure that we can get a Dist for this requirement."""
98 raise NotImplementedError(self.dist)
99
100
101class IsWheel(DistAbstraction):
102
103 def dist(self, finder):
104 return list(pkg_resources.find_distributions(
105 self.req.source_dir))[0]
106
107 def prep_for_dist(self, finder, build_isolation):
108 # FIXME:https://github.com/pypa/pip/issues/1112
109 pass
110
111
112class IsSDist(DistAbstraction):
113
114 def dist(self, finder):
115 dist = self.req.get_dist()
116 # FIXME: shouldn't be globally added.
117 if finder and dist.has_metadata('dependency_links.txt'):
118 finder.add_dependency_links(
119 dist.get_metadata_lines('dependency_links.txt')
120 )
121 return dist
122
123 def prep_for_dist(self, finder, build_isolation):
124 # Before calling "setup.py egg_info", we need to set-up the build
125 # environment.
126 build_requirements, isolate = self.req.get_pep_518_info()
127 should_isolate = build_isolation and isolate
128
129 minimum_requirements = ('setuptools', 'wheel')
130 missing_requirements = set(minimum_requirements) - set(
131 pkg_resources.Requirement(r).key
132 for r in build_requirements
133 )
134 if missing_requirements:
135 def format_reqs(rs):
136 return ' and '.join(map(repr, sorted(rs)))
137 logger.warning(
138 "Missing build time requirements in pyproject.toml for %s: "
139 "%s.", self.req, format_reqs(missing_requirements)
140 )
141 logger.warning(
142 "This version of pip does not implement PEP 517 so it cannot "
143 "build a wheel without %s.", format_reqs(minimum_requirements)
144 )
145
146 if should_isolate:
147 with self.req.build_env:
148 pass
149 _install_build_reqs(finder, self.req.build_env.path,
150 build_requirements)
151 else:
152 self.req.build_env = NoOpBuildEnvironment(no_clean=False)
153
154 self.req.run_egg_info()
155 self.req.assert_source_matches_version()
156
157
158class Installed(DistAbstraction):
159
160 def dist(self, finder):
161 return self.req.satisfied_by
162
163 def prep_for_dist(self, finder):
164 pass
165
166
167class RequirementPreparer(object):
168 """Prepares a Requirement
169 """
170
171 def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
172 progress_bar, build_isolation):
173 super(RequirementPreparer, self).__init__()
174
175 self.src_dir = src_dir
176 self.build_dir = build_dir
177
178 # Where still packed archives should be written to. If None, they are
179 # not saved, and are deleted immediately after unpacking.
180 self.download_dir = download_dir
181
182 # Where still-packed .whl files should be written to. If None, they are
183 # written to the download_dir parameter. Separate to download_dir to
184 # permit only keeping wheel archives for pip wheel.
185 if wheel_download_dir:
186 wheel_download_dir = normalize_path(wheel_download_dir)
187 self.wheel_download_dir = wheel_download_dir
188
189 # NOTE
190 # download_dir and wheel_download_dir overlap semantically and may
191 # be combined if we're willing to have non-wheel archives present in
192 # the wheelhouse output by 'pip wheel'.
193
194 self.progress_bar = progress_bar
195
196 # Is build isolation allowed?
197 self.build_isolation = build_isolation
198
199 @property
200 def _download_should_save(self):
201 # TODO: Modify to reduce indentation needed
202 if self.download_dir:
203 self.download_dir = expanduser(self.download_dir)
204 if os.path.exists(self.download_dir):
205 return True
206 else:
207 logger.critical('Could not find download directory')
208 raise InstallationError(
209 "Could not find or access download directory '%s'"
210 % display_path(self.download_dir))
211 return False
212
213 def prepare_linked_requirement(self, req, session, finder,
214 upgrade_allowed, require_hashes):
215 """Prepare a requirement that would be obtained from req.link
216 """
217 # TODO: Breakup into smaller functions
218 if req.link and req.link.scheme == 'file':
219 path = url_to_path(req.link.url)
220 logger.info('Processing %s', display_path(path))
221 else:
222 logger.info('Collecting %s', req)
223
224 with indent_log():
225 # @@ if filesystem packages are not marked
226 # editable in a req, a non deterministic error
227 # occurs when the script attempts to unpack the
228 # build directory
229 req.ensure_has_source_dir(self.build_dir)
230 # If a checkout exists, it's unwise to keep going. version
231 # inconsistencies are logged later, but do not fail the
232 # installation.
233 # FIXME: this won't upgrade when there's an existing
234 # package unpacked in `req.source_dir`
235 # package unpacked in `req.source_dir`
236 if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
237 raise PreviousBuildDirError(
238 "pip can't proceed with requirements '%s' due to a"
239 " pre-existing build directory (%s). This is "
240 "likely due to a previous installation that failed"
241 ". pip is being responsible and not assuming it "
242 "can delete this. Please delete it and try again."
243 % (req, req.source_dir)
244 )
245 req.populate_link(finder, upgrade_allowed, require_hashes)
246
247 # We can't hit this spot and have populate_link return None.
248 # req.satisfied_by is None here (because we're
249 # guarded) and upgrade has no impact except when satisfied_by
250 # is not None.
251 # Then inside find_requirement existing_applicable -> False
252 # If no new versions are found, DistributionNotFound is raised,
253 # otherwise a result is guaranteed.
254 assert req.link
255 link = req.link
256
257 # Now that we have the real link, we can tell what kind of
258 # requirements we have and raise some more informative errors
259 # than otherwise. (For example, we can raise VcsHashUnsupported
260 # for a VCS URL rather than HashMissing.)
261 if require_hashes:
262 # We could check these first 2 conditions inside
263 # unpack_url and save repetition of conditions, but then
264 # we would report less-useful error messages for
265 # unhashable requirements, complaining that there's no
266 # hash provided.
267 if is_vcs_url(link):
268 raise VcsHashUnsupported()
269 elif is_file_url(link) and is_dir_url(link):
270 raise DirectoryUrlHashUnsupported()
271 if not req.original_link and not req.is_pinned:
272 # Unpinned packages are asking for trouble when a new
273 # version is uploaded. This isn't a security check, but
274 # it saves users a surprising hash mismatch in the
275 # future.
276 #
277 # file:/// URLs aren't pinnable, so don't complain
278 # about them not being pinned.
279 raise HashUnpinned()
280
281 hashes = req.hashes(trust_internet=not require_hashes)
282 if require_hashes and not hashes:
283 # Known-good hashes are missing for this requirement, so
284 # shim it with a facade object that will provoke hash
285 # computation and then raise a HashMissing exception
286 # showing the user what the hash should be.
287 hashes = MissingHashes()
288
289 try:
290 download_dir = self.download_dir
291 # We always delete unpacked sdists after pip ran.
292 autodelete_unpacked = True
293 if req.link.is_wheel and self.wheel_download_dir:
294 # when doing 'pip wheel` we download wheels to a
295 # dedicated dir.
296 download_dir = self.wheel_download_dir
297 if req.link.is_wheel:
298 if download_dir:
299 # When downloading, we only unpack wheels to get
300 # metadata.
301 autodelete_unpacked = True
302 else:
303 # When installing a wheel, we use the unpacked
304 # wheel.
305 autodelete_unpacked = False
306 unpack_url(
307 req.link, req.source_dir,
308 download_dir, autodelete_unpacked,
309 session=session, hashes=hashes,
310 progress_bar=self.progress_bar
311 )
312 except requests.HTTPError as exc:
313 logger.critical(
314 'Could not install requirement %s because of error %s',
315 req,
316 exc,
317 )
318 raise InstallationError(
319 'Could not install requirement %s because of HTTP '
320 'error %s for URL %s' %
321 (req, exc, req.link)
322 )
323 abstract_dist = make_abstract_dist(req)
324 abstract_dist.prep_for_dist(finder, self.build_isolation)
325 if self._download_should_save:
326 # Make a .zip of the source_dir we already created.
327 if req.link.scheme in vcs.all_schemes:
328 req.archive(self.download_dir)
329 return abstract_dist
330
331 def prepare_editable_requirement(self, req, require_hashes, use_user_site,
332 finder):
333 """Prepare an editable requirement
334 """
335 assert req.editable, "cannot prepare a non-editable req as editable"
336
337 logger.info('Obtaining %s', req)
338
339 with indent_log():
340 if require_hashes:
341 raise InstallationError(
342 'The editable requirement %s cannot be installed when '
343 'requiring hashes, because there is no single file to '
344 'hash.' % req
345 )
346 req.ensure_has_source_dir(self.src_dir)
347 req.update_editable(not self._download_should_save)
348
349 abstract_dist = make_abstract_dist(req)
350 abstract_dist.prep_for_dist(finder, self.build_isolation)
351
352 if self._download_should_save:
353 req.archive(self.download_dir)
354 req.check_if_exists(use_user_site)
355
356 return abstract_dist
357
358 def prepare_installed_requirement(self, req, require_hashes, skip_reason):
359 """Prepare an already-installed requirement
360 """
361 assert req.satisfied_by, "req should have been satisfied but isn't"
362 assert skip_reason is not None, (
363 "did not get skip reason skipped but req.satisfied_by "
364 "is set to %r" % (req.satisfied_by,)
365 )
366 logger.info(
367 'Requirement %s: %s (%s)',
368 skip_reason, req, req.satisfied_by.version
369 )
370 with indent_log():
371 if require_hashes:
372 logger.debug(
373 'Since it is already installed, we are trusting this '
374 'package without checking its hash. To ensure a '
375 'completely repeatable environment, install into an '
376 'empty virtualenv.'
377 )
378 abstract_dist = Installed(req)
379
380 return abstract_dist
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py
new file mode 100644
index 0000000..5d31310
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/pep425tags.py
@@ -0,0 +1,317 @@
1"""Generate and work with PEP 425 Compatibility Tags."""
2from __future__ import absolute_import
3
4import distutils.util
5import logging
6import platform
7import re
8import sys
9import sysconfig
10import warnings
11from collections import OrderedDict
12
13import pip._internal.utils.glibc
14
15logger = logging.getLogger(__name__)
16
17_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
18
19
20def get_config_var(var):
21 try:
22 return sysconfig.get_config_var(var)
23 except IOError as e: # Issue #1074
24 warnings.warn("{}".format(e), RuntimeWarning)
25 return None
26
27
28def get_abbr_impl():
29 """Return abbreviated implementation name."""
30 if hasattr(sys, 'pypy_version_info'):
31 pyimpl = 'pp'
32 elif sys.platform.startswith('java'):
33 pyimpl = 'jy'
34 elif sys.platform == 'cli':
35 pyimpl = 'ip'
36 else:
37 pyimpl = 'cp'
38 return pyimpl
39
40
41def get_impl_ver():
42 """Return implementation version."""
43 impl_ver = get_config_var("py_version_nodot")
44 if not impl_ver or get_abbr_impl() == 'pp':
45 impl_ver = ''.join(map(str, get_impl_version_info()))
46 return impl_ver
47
48
49def get_impl_version_info():
50 """Return sys.version_info-like tuple for use in decrementing the minor
51 version."""
52 if get_abbr_impl() == 'pp':
53 # as per https://github.com/pypa/pip/issues/2882
54 return (sys.version_info[0], sys.pypy_version_info.major,
55 sys.pypy_version_info.minor)
56 else:
57 return sys.version_info[0], sys.version_info[1]
58
59
60def get_impl_tag():
61 """
62 Returns the Tag for this specific implementation.
63 """
64 return "{}{}".format(get_abbr_impl(), get_impl_ver())
65
66
67def get_flag(var, fallback, expected=True, warn=True):
68 """Use a fallback method for determining SOABI flags if the needed config
69 var is unset or unavailable."""
70 val = get_config_var(var)
71 if val is None:
72 if warn:
73 logger.debug("Config variable '%s' is unset, Python ABI tag may "
74 "be incorrect", var)
75 return fallback()
76 return val == expected
77
78
79def get_abi_tag():
80 """Return the ABI tag based on SOABI (if available) or emulate SOABI
81 (CPython 2, PyPy)."""
82 soabi = get_config_var('SOABI')
83 impl = get_abbr_impl()
84 if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
85 d = ''
86 m = ''
87 u = ''
88 if get_flag('Py_DEBUG',
89 lambda: hasattr(sys, 'gettotalrefcount'),
90 warn=(impl == 'cp')):
91 d = 'd'
92 if get_flag('WITH_PYMALLOC',
93 lambda: impl == 'cp',
94 warn=(impl == 'cp')):
95 m = 'm'
96 if get_flag('Py_UNICODE_SIZE',
97 lambda: sys.maxunicode == 0x10ffff,
98 expected=4,
99 warn=(impl == 'cp' and
100 sys.version_info < (3, 3))) \
101 and sys.version_info < (3, 3):
102 u = 'u'
103 abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
104 elif soabi and soabi.startswith('cpython-'):
105 abi = 'cp' + soabi.split('-')[1]
106 elif soabi:
107 abi = soabi.replace('.', '_').replace('-', '_')
108 else:
109 abi = None
110 return abi
111
112
113def _is_running_32bit():
114 return sys.maxsize == 2147483647
115
116
117def get_platform():
118 """Return our platform name 'win32', 'linux_x86_64'"""
119 if sys.platform == 'darwin':
120 # distutils.util.get_platform() returns the release based on the value
121 # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
122 # be significantly older than the user's current machine.
123 release, _, machine = platform.mac_ver()
124 split_ver = release.split('.')
125
126 if machine == "x86_64" and _is_running_32bit():
127 machine = "i386"
128 elif machine == "ppc64" and _is_running_32bit():
129 machine = "ppc"
130
131 return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
132
133 # XXX remove distutils dependency
134 result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
135 if result == "linux_x86_64" and _is_running_32bit():
136 # 32 bit Python program (running on a 64 bit Linux): pip should only
137 # install and run 32 bit compiled extensions in that case.
138 result = "linux_i686"
139
140 return result
141
142
143def is_manylinux1_compatible():
144 # Only Linux, and only x86-64 / i686
145 if get_platform() not in {"linux_x86_64", "linux_i686"}:
146 return False
147
148 # Check for presence of _manylinux module
149 try:
150 import _manylinux
151 return bool(_manylinux.manylinux1_compatible)
152 except (ImportError, AttributeError):
153 # Fall through to heuristic check below
154 pass
155
156 # Check glibc version. CentOS 5 uses glibc 2.5.
157 return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
158
159
160def get_darwin_arches(major, minor, machine):
161 """Return a list of supported arches (including group arches) for
162 the given major, minor and machine architecture of an macOS machine.
163 """
164 arches = []
165
166 def _supports_arch(major, minor, arch):
167 # Looking at the application support for macOS versions in the chart
168 # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
169 # our timeline looks roughly like:
170 #
171 # 10.0 - Introduces ppc support.
172 # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
173 # and x86_64 support is CLI only, and cannot be used for GUI
174 # applications.
175 # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
176 # 10.6 - Drops support for ppc64
177 # 10.7 - Drops support for ppc
178 #
179 # Given that we do not know if we're installing a CLI or a GUI
180 # application, we must be conservative and assume it might be a GUI
181 # application and behave as if ppc64 and x86_64 support did not occur
182 # until 10.5.
183 #
184 # Note: The above information is taken from the "Application support"
185 # column in the chart not the "Processor support" since I believe
186 # that we care about what instruction sets an application can use
187 # not which processors the OS supports.
188 if arch == 'ppc':
189 return (major, minor) <= (10, 5)
190 if arch == 'ppc64':
191 return (major, minor) == (10, 5)
192 if arch == 'i386':
193 return (major, minor) >= (10, 4)
194 if arch == 'x86_64':
195 return (major, minor) >= (10, 5)
196 if arch in groups:
197 for garch in groups[arch]:
198 if _supports_arch(major, minor, garch):
199 return True
200 return False
201
202 groups = OrderedDict([
203 ("fat", ("i386", "ppc")),
204 ("intel", ("x86_64", "i386")),
205 ("fat64", ("x86_64", "ppc64")),
206 ("fat32", ("x86_64", "i386", "ppc")),
207 ])
208
209 if _supports_arch(major, minor, machine):
210 arches.append(machine)
211
212 for garch in groups:
213 if machine in groups[garch] and _supports_arch(major, minor, garch):
214 arches.append(garch)
215
216 arches.append('universal')
217
218 return arches
219
220
221def get_supported(versions=None, noarch=False, platform=None,
222 impl=None, abi=None):
223 """Return a list of supported tags for each version specified in
224 `versions`.
225
226 :param versions: a list of string versions, of the form ["33", "32"],
227 or None. The first version will be assumed to support our ABI.
228 :param platform: specify the exact platform you want valid
229 tags for, or None. If None, use the local system platform.
230 :param impl: specify the exact implementation you want valid
231 tags for, or None. If None, use the local interpreter impl.
232 :param abi: specify the exact abi you want valid
233 tags for, or None. If None, use the local interpreter abi.
234 """
235 supported = []
236
237 # Versions must be given with respect to the preference
238 if versions is None:
239 versions = []
240 version_info = get_impl_version_info()
241 major = version_info[:-1]
242 # Support all previous minor Python versions.
243 for minor in range(version_info[-1], -1, -1):
244 versions.append(''.join(map(str, major + (minor,))))
245
246 impl = impl or get_abbr_impl()
247
248 abis = []
249
250 abi = abi or get_abi_tag()
251 if abi:
252 abis[0:0] = [abi]
253
254 abi3s = set()
255 import imp
256 for suffix in imp.get_suffixes():
257 if suffix[0].startswith('.abi'):
258 abi3s.add(suffix[0].split('.', 2)[1])
259
260 abis.extend(sorted(list(abi3s)))
261
262 abis.append('none')
263
264 if not noarch:
265 arch = platform or get_platform()
266 if arch.startswith('macosx'):
267 # support macosx-10.6-intel on macosx-10.9-x86_64
268 match = _osx_arch_pat.match(arch)
269 if match:
270 name, major, minor, actual_arch = match.groups()
271 tpl = '{}_{}_%i_%s'.format(name, major)
272 arches = []
273 for m in reversed(range(int(minor) + 1)):
274 for a in get_darwin_arches(int(major), m, actual_arch):
275 arches.append(tpl % (m, a))
276 else:
277 # arch pattern didn't match (?!)
278 arches = [arch]
279 elif platform is None and is_manylinux1_compatible():
280 arches = [arch.replace('linux', 'manylinux1'), arch]
281 else:
282 arches = [arch]
283
284 # Current version, current API (built specifically for our Python):
285 for abi in abis:
286 for arch in arches:
287 supported.append(('%s%s' % (impl, versions[0]), abi, arch))
288
289 # abi3 modules compatible with older version of Python
290 for version in versions[1:]:
291 # abi3 was introduced in Python 3.2
292 if version in {'31', '30'}:
293 break
294 for abi in abi3s: # empty set if not Python 3
295 for arch in arches:
296 supported.append(("%s%s" % (impl, version), abi, arch))
297
298 # Has binaries, does not use the Python API:
299 for arch in arches:
300 supported.append(('py%s' % (versions[0][0]), 'none', arch))
301
302 # No abi / arch, but requires our implementation:
303 supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
304 # Tagged specifically as being cross-version compatible
305 # (with just the major version specified)
306 supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
307
308 # No abi / arch, generic Python
309 for i, version in enumerate(versions):
310 supported.append(('py%s' % (version,), 'none', 'any'))
311 if i == 0:
312 supported.append(('py%s' % (version[0]), 'none', 'any'))
313
314 return supported
315
316
317implementation_tag = get_impl_tag()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py
new file mode 100644
index 0000000..07ae607
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py
@@ -0,0 +1,69 @@
1from __future__ import absolute_import
2
3import logging
4
5from .req_install import InstallRequirement
6from .req_set import RequirementSet
7from .req_file import parse_requirements
8from pip._internal.utils.logging import indent_log
9
10
11__all__ = [
12 "RequirementSet", "InstallRequirement",
13 "parse_requirements", "install_given_reqs",
14]
15
16logger = logging.getLogger(__name__)
17
18
19def install_given_reqs(to_install, install_options, global_options=(),
20 *args, **kwargs):
21 """
22 Install everything in the given list.
23
24 (to be called after having downloaded and unpacked the packages)
25 """
26
27 if to_install:
28 logger.info(
29 'Installing collected packages: %s',
30 ', '.join([req.name for req in to_install]),
31 )
32
33 with indent_log():
34 for requirement in to_install:
35 if requirement.conflicts_with:
36 logger.info(
37 'Found existing installation: %s',
38 requirement.conflicts_with,
39 )
40 with indent_log():
41 uninstalled_pathset = requirement.uninstall(
42 auto_confirm=True
43 )
44 try:
45 requirement.install(
46 install_options,
47 global_options,
48 *args,
49 **kwargs
50 )
51 except:
52 should_rollback = (
53 requirement.conflicts_with and
54 not requirement.install_succeeded
55 )
56 # if install did not succeed, rollback previous uninstall
57 if should_rollback:
58 uninstalled_pathset.rollback()
59 raise
60 else:
61 should_commit = (
62 requirement.conflicts_with and
63 requirement.install_succeeded
64 )
65 if should_commit:
66 uninstalled_pathset.commit()
67 requirement.remove_temporary_source()
68
69 return to_install
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py
new file mode 100644
index 0000000..9e6ef41
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py
@@ -0,0 +1,338 @@
1"""
2Requirements file parsing
3"""
4
5from __future__ import absolute_import
6
7import optparse
8import os
9import re
10import shlex
11import sys
12
13from pip._vendor.six.moves import filterfalse
14from pip._vendor.six.moves.urllib import parse as urllib_parse
15
16from pip._internal import cmdoptions
17from pip._internal.download import get_file_content
18from pip._internal.exceptions import RequirementsFileParseError
19from pip._internal.req.req_install import InstallRequirement
20
21__all__ = ['parse_requirements']
22
23SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
24COMMENT_RE = re.compile(r'(^|\s)+#.*$')
25
26# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
27# variable name consisting of only uppercase letters, digits or the '_'
28# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
29# 2013 Edition.
30ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
31
32SUPPORTED_OPTIONS = [
33 cmdoptions.constraints,
34 cmdoptions.editable,
35 cmdoptions.requirements,
36 cmdoptions.no_index,
37 cmdoptions.index_url,
38 cmdoptions.find_links,
39 cmdoptions.extra_index_url,
40 cmdoptions.always_unzip,
41 cmdoptions.no_binary,
42 cmdoptions.only_binary,
43 cmdoptions.pre,
44 cmdoptions.process_dependency_links,
45 cmdoptions.trusted_host,
46 cmdoptions.require_hashes,
47]
48
49# options to be passed to requirements
50SUPPORTED_OPTIONS_REQ = [
51 cmdoptions.install_options,
52 cmdoptions.global_options,
53 cmdoptions.hash,
54]
55
56# the 'dest' string values
57SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
58
59
60def parse_requirements(filename, finder=None, comes_from=None, options=None,
61 session=None, constraint=False, wheel_cache=None):
62 """Parse a requirements file and yield InstallRequirement instances.
63
64 :param filename: Path or url of requirements file.
65 :param finder: Instance of pip.index.PackageFinder.
66 :param comes_from: Origin description of requirements.
67 :param options: cli options.
68 :param session: Instance of pip.download.PipSession.
69 :param constraint: If true, parsing a constraint file rather than
70 requirements file.
71 :param wheel_cache: Instance of pip.wheel.WheelCache
72 """
73 if session is None:
74 raise TypeError(
75 "parse_requirements() missing 1 required keyword argument: "
76 "'session'"
77 )
78
79 _, content = get_file_content(
80 filename, comes_from=comes_from, session=session
81 )
82
83 lines_enum = preprocess(content, options)
84
85 for line_number, line in lines_enum:
86 req_iter = process_line(line, filename, line_number, finder,
87 comes_from, options, session, wheel_cache,
88 constraint=constraint)
89 for req in req_iter:
90 yield req
91
92
93def preprocess(content, options):
94 """Split, filter, and join lines, and return a line iterator
95
96 :param content: the content of the requirements file
97 :param options: cli options
98 """
99 lines_enum = enumerate(content.splitlines(), start=1)
100 lines_enum = join_lines(lines_enum)
101 lines_enum = ignore_comments(lines_enum)
102 lines_enum = skip_regex(lines_enum, options)
103 lines_enum = expand_env_variables(lines_enum)
104 return lines_enum
105
106
107def process_line(line, filename, line_number, finder=None, comes_from=None,
108 options=None, session=None, wheel_cache=None,
109 constraint=False):
110 """Process a single requirements line; This can result in creating/yielding
111 requirements, or updating the finder.
112
113 For lines that contain requirements, the only options that have an effect
114 are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
115 requirement. Other options from SUPPORTED_OPTIONS may be present, but are
116 ignored.
117
118 For lines that do not contain requirements, the only options that have an
119 effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
120 be present, but are ignored. These lines may contain multiple options
121 (although our docs imply only one is supported), and all our parsed and
122 affect the finder.
123
124 :param constraint: If True, parsing a constraints file.
125 :param options: OptionParser options that we may update
126 """
127 parser = build_parser(line)
128 defaults = parser.get_default_values()
129 defaults.index_url = None
130 if finder:
131 # `finder.format_control` will be updated during parsing
132 defaults.format_control = finder.format_control
133 args_str, options_str = break_args_options(line)
134 if sys.version_info < (2, 7, 3):
135 # Prior to 2.7.3, shlex cannot deal with unicode entries
136 options_str = options_str.encode('utf8')
137 opts, _ = parser.parse_args(shlex.split(options_str), defaults)
138
139 # preserve for the nested code path
140 line_comes_from = '%s %s (line %s)' % (
141 '-c' if constraint else '-r', filename, line_number,
142 )
143
144 # yield a line requirement
145 if args_str:
146 isolated = options.isolated_mode if options else False
147 if options:
148 cmdoptions.check_install_build_global(options, opts)
149 # get the options that apply to requirements
150 req_options = {}
151 for dest in SUPPORTED_OPTIONS_REQ_DEST:
152 if dest in opts.__dict__ and opts.__dict__[dest]:
153 req_options[dest] = opts.__dict__[dest]
154 yield InstallRequirement.from_line(
155 args_str, line_comes_from, constraint=constraint,
156 isolated=isolated, options=req_options, wheel_cache=wheel_cache
157 )
158
159 # yield an editable requirement
160 elif opts.editables:
161 isolated = options.isolated_mode if options else False
162 yield InstallRequirement.from_editable(
163 opts.editables[0], comes_from=line_comes_from,
164 constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
165 )
166
167 # parse a nested requirements file
168 elif opts.requirements or opts.constraints:
169 if opts.requirements:
170 req_path = opts.requirements[0]
171 nested_constraint = False
172 else:
173 req_path = opts.constraints[0]
174 nested_constraint = True
175 # original file is over http
176 if SCHEME_RE.search(filename):
177 # do a url join so relative paths work
178 req_path = urllib_parse.urljoin(filename, req_path)
179 # original file and nested file are paths
180 elif not SCHEME_RE.search(req_path):
181 # do a join so relative paths work
182 req_path = os.path.join(os.path.dirname(filename), req_path)
183 # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
184 parser = parse_requirements(
185 req_path, finder, comes_from, options, session,
186 constraint=nested_constraint, wheel_cache=wheel_cache
187 )
188 for req in parser:
189 yield req
190
191 # percolate hash-checking option upward
192 elif opts.require_hashes:
193 options.require_hashes = opts.require_hashes
194
195 # set finder options
196 elif finder:
197 if opts.index_url:
198 finder.index_urls = [opts.index_url]
199 if opts.no_index is True:
200 finder.index_urls = []
201 if opts.extra_index_urls:
202 finder.index_urls.extend(opts.extra_index_urls)
203 if opts.find_links:
204 # FIXME: it would be nice to keep track of the source
205 # of the find_links: support a find-links local path
206 # relative to a requirements file.
207 value = opts.find_links[0]
208 req_dir = os.path.dirname(os.path.abspath(filename))
209 relative_to_reqs_file = os.path.join(req_dir, value)
210 if os.path.exists(relative_to_reqs_file):
211 value = relative_to_reqs_file
212 finder.find_links.append(value)
213 if opts.pre:
214 finder.allow_all_prereleases = True
215 if opts.process_dependency_links:
216 finder.process_dependency_links = True
217 if opts.trusted_hosts:
218 finder.secure_origins.extend(
219 ("*", host, "*") for host in opts.trusted_hosts)
220
221
222def break_args_options(line):
223 """Break up the line into an args and options string. We only want to shlex
224 (and then optparse) the options, not the args. args can contain markers
225 which are corrupted by shlex.
226 """
227 tokens = line.split(' ')
228 args = []
229 options = tokens[:]
230 for token in tokens:
231 if token.startswith('-') or token.startswith('--'):
232 break
233 else:
234 args.append(token)
235 options.pop(0)
236 return ' '.join(args), ' '.join(options)
237
238
239def build_parser(line):
240 """
241 Return a parser for parsing requirement lines
242 """
243 parser = optparse.OptionParser(add_help_option=False)
244
245 option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
246 for option_factory in option_factories:
247 option = option_factory()
248 parser.add_option(option)
249
250 # By default optparse sys.exits on parsing errors. We want to wrap
251 # that in our own exception.
252 def parser_exit(self, msg):
253 # add offending line
254 msg = 'Invalid requirement: %s\n%s' % (line, msg)
255 raise RequirementsFileParseError(msg)
256 parser.exit = parser_exit
257
258 return parser
259
260
261def join_lines(lines_enum):
262 """Joins a line ending in '\' with the previous line (except when following
263 comments). The joined line takes on the index of the first line.
264 """
265 primary_line_number = None
266 new_line = []
267 for line_number, line in lines_enum:
268 if not line.endswith('\\') or COMMENT_RE.match(line):
269 if COMMENT_RE.match(line):
270 # this ensures comments are always matched later
271 line = ' ' + line
272 if new_line:
273 new_line.append(line)
274 yield primary_line_number, ''.join(new_line)
275 new_line = []
276 else:
277 yield line_number, line
278 else:
279 if not new_line:
280 primary_line_number = line_number
281 new_line.append(line.strip('\\'))
282
283 # last line contains \
284 if new_line:
285 yield primary_line_number, ''.join(new_line)
286
287 # TODO: handle space after '\'.
288
289
290def ignore_comments(lines_enum):
291 """
292 Strips comments and filter empty lines.
293 """
294 for line_number, line in lines_enum:
295 line = COMMENT_RE.sub('', line)
296 line = line.strip()
297 if line:
298 yield line_number, line
299
300
301def skip_regex(lines_enum, options):
302 """
303 Skip lines that match '--skip-requirements-regex' pattern
304
305 Note: the regex pattern is only built once
306 """
307 skip_regex = options.skip_requirements_regex if options else None
308 if skip_regex:
309 pattern = re.compile(skip_regex)
310 lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
311 return lines_enum
312
313
314def expand_env_variables(lines_enum):
315 """Replace all environment variables that can be retrieved via `os.getenv`.
316
317 The only allowed format for environment variables defined in the
318 requirement file is `${MY_VARIABLE_1}` to ensure two things:
319
320 1. Strings that contain a `$` aren't accidentally (partially) expanded.
321 2. Ensure consistency across platforms for requirement files.
322
323 These points are the result of a discusssion on the `github pull
324 request #3514 <https://github.com/pypa/pip/pull/3514>`_.
325
326 Valid characters in variable names follow the `POSIX standard
327 <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
328 to uppercase letter, digits and the `_` (underscore).
329 """
330 for line_number, line in lines_enum:
331 for env_var, var_name in ENV_VAR_RE.findall(line):
332 value = os.getenv(var_name)
333 if not value:
334 continue
335
336 line = line.replace(env_var, value)
337
338 yield line_number, line
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py
new file mode 100644
index 0000000..9dd1523
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py
@@ -0,0 +1,1115 @@
1from __future__ import absolute_import
2
3import logging
4import os
5import re
6import shutil
7import sys
8import sysconfig
9import traceback
10import warnings
11import zipfile
12from distutils.util import change_root
13from email.parser import FeedParser # type: ignore
14
15from pip._vendor import pkg_resources, pytoml, six
16from pip._vendor.packaging import specifiers
17from pip._vendor.packaging.markers import Marker
18from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
19from pip._vendor.packaging.utils import canonicalize_name
20from pip._vendor.packaging.version import parse as parse_version
21from pip._vendor.packaging.version import Version
22from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
23
24from pip._internal import wheel
25from pip._internal.build_env import BuildEnvironment
26from pip._internal.compat import native_str
27from pip._internal.download import (
28 is_archive_file, is_url, path_to_url, url_to_path,
29)
30from pip._internal.exceptions import InstallationError, UninstallationError
31from pip._internal.locations import (
32 PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
33)
34from pip._internal.req.req_uninstall import UninstallPathSet
35from pip._internal.utils.deprecation import RemovedInPip11Warning
36from pip._internal.utils.hashes import Hashes
37from pip._internal.utils.logging import indent_log
38from pip._internal.utils.misc import (
39 _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
40 display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
41 get_installed_version, is_installable_dir, read_text_file, rmtree,
42)
43from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
44from pip._internal.utils.temp_dir import TempDirectory
45from pip._internal.utils.ui import open_spinner
46from pip._internal.vcs import vcs
47from pip._internal.wheel import Wheel, move_wheel_files
48
49logger = logging.getLogger(__name__)
50
51operators = specifiers.Specifier._operators.keys()
52
53
54def _strip_extras(path):
55 m = re.match(r'^(.+)(\[[^\]]+\])$', path)
56 extras = None
57 if m:
58 path_no_extras = m.group(1)
59 extras = m.group(2)
60 else:
61 path_no_extras = path
62
63 return path_no_extras, extras
64
65
66class InstallRequirement(object):
67 """
68 Represents something that may be installed later on, may have information
69 about where to fetch the relavant requirement and also contains logic for
70 installing the said requirement.
71 """
72
73 def __init__(self, req, comes_from, source_dir=None, editable=False,
74 link=None, update=True, markers=None,
75 isolated=False, options=None, wheel_cache=None,
76 constraint=False, extras=()):
77 assert req is None or isinstance(req, Requirement), req
78 self.req = req
79 self.comes_from = comes_from
80 self.constraint = constraint
81 if source_dir is not None:
82 self.source_dir = os.path.normpath(os.path.abspath(source_dir))
83 else:
84 self.source_dir = None
85 self.editable = editable
86
87 self._wheel_cache = wheel_cache
88 if link is not None:
89 self.link = self.original_link = link
90 else:
91 from pip._internal.index import Link
92 self.link = self.original_link = req and req.url and Link(req.url)
93
94 if extras:
95 self.extras = extras
96 elif req:
97 self.extras = {
98 pkg_resources.safe_extra(extra) for extra in req.extras
99 }
100 else:
101 self.extras = set()
102 if markers is not None:
103 self.markers = markers
104 else:
105 self.markers = req and req.marker
106 self._egg_info_path = None
107 # This holds the pkg_resources.Distribution object if this requirement
108 # is already available:
109 self.satisfied_by = None
110 # This hold the pkg_resources.Distribution object if this requirement
111 # conflicts with another installed distribution:
112 self.conflicts_with = None
113 # Temporary build location
114 self._temp_build_dir = TempDirectory(kind="req-build")
115 # Used to store the global directory where the _temp_build_dir should
116 # have been created. Cf _correct_build_location method.
117 self._ideal_build_dir = None
118 # True if the editable should be updated:
119 self.update = update
120 # Set to True after successful installation
121 self.install_succeeded = None
122 # UninstallPathSet of uninstalled distribution (for possible rollback)
123 self.uninstalled_pathset = None
124 self.options = options if options else {}
125 # Set to True after successful preparation of this requirement
126 self.prepared = False
127 self.is_direct = False
128
129 self.isolated = isolated
130 self.build_env = BuildEnvironment(no_clean=True)
131
132 @classmethod
133 def from_editable(cls, editable_req, comes_from=None, isolated=False,
134 options=None, wheel_cache=None, constraint=False):
135 from pip._internal.index import Link
136
137 name, url, extras_override = parse_editable(editable_req)
138 if url.startswith('file:'):
139 source_dir = url_to_path(url)
140 else:
141 source_dir = None
142
143 if name is not None:
144 try:
145 req = Requirement(name)
146 except InvalidRequirement:
147 raise InstallationError("Invalid requirement: '%s'" % name)
148 else:
149 req = None
150 return cls(
151 req, comes_from, source_dir=source_dir,
152 editable=True,
153 link=Link(url),
154 constraint=constraint,
155 isolated=isolated,
156 options=options if options else {},
157 wheel_cache=wheel_cache,
158 extras=extras_override or (),
159 )
160
161 @classmethod
162 def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None):
163 try:
164 req = Requirement(req)
165 except InvalidRequirement:
166 raise InstallationError("Invalid requirement: '%s'" % req)
167 if req.url:
168 raise InstallationError(
169 "Direct url requirement (like %s) are not allowed for "
170 "dependencies" % req
171 )
172 return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
173
174 @classmethod
175 def from_line(
176 cls, name, comes_from=None, isolated=False, options=None,
177 wheel_cache=None, constraint=False):
178 """Creates an InstallRequirement from a name, which might be a
179 requirement, directory containing 'setup.py', filename, or URL.
180 """
181 from pip._internal.index import Link
182
183 if is_url(name):
184 marker_sep = '; '
185 else:
186 marker_sep = ';'
187 if marker_sep in name:
188 name, markers = name.split(marker_sep, 1)
189 markers = markers.strip()
190 if not markers:
191 markers = None
192 else:
193 markers = Marker(markers)
194 else:
195 markers = None
196 name = name.strip()
197 req = None
198 path = os.path.normpath(os.path.abspath(name))
199 link = None
200 extras = None
201
202 if is_url(name):
203 link = Link(name)
204 else:
205 p, extras = _strip_extras(path)
206 looks_like_dir = os.path.isdir(p) and (
207 os.path.sep in name or
208 (os.path.altsep is not None and os.path.altsep in name) or
209 name.startswith('.')
210 )
211 if looks_like_dir:
212 if not is_installable_dir(p):
213 raise InstallationError(
214 "Directory %r is not installable. File 'setup.py' "
215 "not found." % name
216 )
217 link = Link(path_to_url(p))
218 elif is_archive_file(p):
219 if not os.path.isfile(p):
220 logger.warning(
221 'Requirement %r looks like a filename, but the '
222 'file does not exist',
223 name
224 )
225 link = Link(path_to_url(p))
226
227 # it's a local file, dir, or url
228 if link:
229 # Handle relative file URLs
230 if link.scheme == 'file' and re.search(r'\.\./', link.url):
231 link = Link(
232 path_to_url(os.path.normpath(os.path.abspath(link.path))))
233 # wheel file
234 if link.is_wheel:
235 wheel = Wheel(link.filename) # can raise InvalidWheelFilename
236 req = "%s==%s" % (wheel.name, wheel.version)
237 else:
238 # set the req to the egg fragment. when it's not there, this
239 # will become an 'unnamed' requirement
240 req = link.egg_fragment
241
242 # a requirement specifier
243 else:
244 req = name
245
246 if extras:
247 extras = Requirement("placeholder" + extras.lower()).extras
248 else:
249 extras = ()
250 if req is not None:
251 try:
252 req = Requirement(req)
253 except InvalidRequirement:
254 if os.path.sep in req:
255 add_msg = "It looks like a path."
256 add_msg += deduce_helpful_msg(req)
257 elif '=' in req and not any(op in req for op in operators):
258 add_msg = "= is not a valid operator. Did you mean == ?"
259 else:
260 add_msg = traceback.format_exc()
261 raise InstallationError(
262 "Invalid requirement: '%s'\n%s" % (req, add_msg))
263 return cls(
264 req, comes_from, link=link, markers=markers,
265 isolated=isolated,
266 options=options if options else {},
267 wheel_cache=wheel_cache,
268 constraint=constraint,
269 extras=extras,
270 )
271
272 def __str__(self):
273 if self.req:
274 s = str(self.req)
275 if self.link:
276 s += ' from %s' % self.link.url
277 else:
278 s = self.link.url if self.link else None
279 if self.satisfied_by is not None:
280 s += ' in %s' % display_path(self.satisfied_by.location)
281 if self.comes_from:
282 if isinstance(self.comes_from, six.string_types):
283 comes_from = self.comes_from
284 else:
285 comes_from = self.comes_from.from_path()
286 if comes_from:
287 s += ' (from %s)' % comes_from
288 return s
289
290 def __repr__(self):
291 return '<%s object: %s editable=%r>' % (
292 self.__class__.__name__, str(self), self.editable)
293
294 def populate_link(self, finder, upgrade, require_hashes):
295 """Ensure that if a link can be found for this, that it is found.
296
297 Note that self.link may still be None - if Upgrade is False and the
298 requirement is already installed.
299
300 If require_hashes is True, don't use the wheel cache, because cached
301 wheels, always built locally, have different hashes than the files
302 downloaded from the index server and thus throw false hash mismatches.
303 Furthermore, cached wheels at present have undeterministic contents due
304 to file modification times.
305 """
306 if self.link is None:
307 self.link = finder.find_requirement(self, upgrade)
308 if self._wheel_cache is not None and not require_hashes:
309 old_link = self.link
310 self.link = self._wheel_cache.get(self.link, self.name)
311 if old_link != self.link:
312 logger.debug('Using cached wheel link: %s', self.link)
313
314 @property
315 def specifier(self):
316 return self.req.specifier
317
318 @property
319 def is_pinned(self):
320 """Return whether I am pinned to an exact version.
321
322 For example, some-package==1.2 is pinned; some-package>1.2 is not.
323 """
324 specifiers = self.specifier
325 return (len(specifiers) == 1 and
326 next(iter(specifiers)).operator in {'==', '==='})
327
328 def from_path(self):
329 if self.req is None:
330 return None
331 s = str(self.req)
332 if self.comes_from:
333 if isinstance(self.comes_from, six.string_types):
334 comes_from = self.comes_from
335 else:
336 comes_from = self.comes_from.from_path()
337 if comes_from:
338 s += '->' + comes_from
339 return s
340
341 def build_location(self, build_dir):
342 assert build_dir is not None
343 if self._temp_build_dir.path is not None:
344 return self._temp_build_dir.path
345 if self.req is None:
346 # for requirement via a path to a directory: the name of the
347 # package is not available yet so we create a temp directory
348 # Once run_egg_info will have run, we'll be able
349 # to fix it via _correct_build_location
350 # Some systems have /tmp as a symlink which confuses custom
351 # builds (such as numpy). Thus, we ensure that the real path
352 # is returned.
353 self._temp_build_dir.create()
354 self._ideal_build_dir = build_dir
355
356 return self._temp_build_dir.path
357 if self.editable:
358 name = self.name.lower()
359 else:
360 name = self.name
361 # FIXME: Is there a better place to create the build_dir? (hg and bzr
362 # need this)
363 if not os.path.exists(build_dir):
364 logger.debug('Creating directory %s', build_dir)
365 _make_build_dir(build_dir)
366 return os.path.join(build_dir, name)
367
368 def _correct_build_location(self):
369 """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
370
371 For some requirements (e.g. a path to a directory), the name of the
372 package is not available until we run egg_info, so the build_location
373 will return a temporary directory and store the _ideal_build_dir.
374
375 This is only called by self.egg_info_path to fix the temporary build
376 directory.
377 """
378 if self.source_dir is not None:
379 return
380 assert self.req is not None
381 assert self._temp_build_dir.path
382 assert self._ideal_build_dir.path
383 old_location = self._temp_build_dir.path
384 self._temp_build_dir.path = None
385
386 new_location = self.build_location(self._ideal_build_dir)
387 if os.path.exists(new_location):
388 raise InstallationError(
389 'A package already exists in %s; please remove it to continue'
390 % display_path(new_location))
391 logger.debug(
392 'Moving package %s from %s to new location %s',
393 self, display_path(old_location), display_path(new_location),
394 )
395 shutil.move(old_location, new_location)
396 self._temp_build_dir.path = new_location
397 self._ideal_build_dir = None
398 self.source_dir = os.path.normpath(os.path.abspath(new_location))
399 self._egg_info_path = None
400
401 @property
402 def name(self):
403 if self.req is None:
404 return None
405 return native_str(pkg_resources.safe_name(self.req.name))
406
407 @property
408 def setup_py_dir(self):
409 return os.path.join(
410 self.source_dir,
411 self.link and self.link.subdirectory_fragment or '')
412
413 @property
414 def setup_py(self):
415 assert self.source_dir, "No source dir for %s" % self
416
417 setup_py = os.path.join(self.setup_py_dir, 'setup.py')
418
419 # Python2 __file__ should not be unicode
420 if six.PY2 and isinstance(setup_py, six.text_type):
421 setup_py = setup_py.encode(sys.getfilesystemencoding())
422
423 return setup_py
424
425 @property
426 def pyproject_toml(self):
427 assert self.source_dir, "No source dir for %s" % self
428
429 pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml')
430
431 # Python2 __file__ should not be unicode
432 if six.PY2 and isinstance(pp_toml, six.text_type):
433 pp_toml = pp_toml.encode(sys.getfilesystemencoding())
434
435 return pp_toml
436
437 def get_pep_518_info(self):
438 """Get a list of the packages required to build the project, if any,
439 and a flag indicating whether pyproject.toml is present, indicating
440 that the build should be isolated.
441
442 Build requirements can be specified in a pyproject.toml, as described
443 in PEP 518. If this file exists but doesn't specify build
444 requirements, pip will default to installing setuptools and wheel.
445 """
446 if os.path.isfile(self.pyproject_toml):
447 with open(self.pyproject_toml) as f:
448 pp_toml = pytoml.load(f)
449 build_sys = pp_toml.get('build-system', {})
450 return (build_sys.get('requires', ['setuptools', 'wheel']), True)
451 return (['setuptools', 'wheel'], False)
452
453 def run_egg_info(self):
454 assert self.source_dir
455 if self.name:
456 logger.debug(
457 'Running setup.py (path:%s) egg_info for package %s',
458 self.setup_py, self.name,
459 )
460 else:
461 logger.debug(
462 'Running setup.py (path:%s) egg_info for package from %s',
463 self.setup_py, self.link,
464 )
465
466 with indent_log():
467 script = SETUPTOOLS_SHIM % self.setup_py
468 base_cmd = [sys.executable, '-c', script]
469 if self.isolated:
470 base_cmd += ["--no-user-cfg"]
471 egg_info_cmd = base_cmd + ['egg_info']
472 # We can't put the .egg-info files at the root, because then the
473 # source code will be mistaken for an installed egg, causing
474 # problems
475 if self.editable:
476 egg_base_option = []
477 else:
478 egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
479 ensure_dir(egg_info_dir)
480 egg_base_option = ['--egg-base', 'pip-egg-info']
481 with self.build_env:
482 call_subprocess(
483 egg_info_cmd + egg_base_option,
484 cwd=self.setup_py_dir,
485 show_stdout=False,
486 command_desc='python setup.py egg_info')
487
488 if not self.req:
489 if isinstance(parse_version(self.pkg_info()["Version"]), Version):
490 op = "=="
491 else:
492 op = "==="
493 self.req = Requirement(
494 "".join([
495 self.pkg_info()["Name"],
496 op,
497 self.pkg_info()["Version"],
498 ])
499 )
500 self._correct_build_location()
501 else:
502 metadata_name = canonicalize_name(self.pkg_info()["Name"])
503 if canonicalize_name(self.req.name) != metadata_name:
504 logger.warning(
505 'Running setup.py (path:%s) egg_info for package %s '
506 'produced metadata for project name %s. Fix your '
507 '#egg=%s fragments.',
508 self.setup_py, self.name, metadata_name, self.name
509 )
510 self.req = Requirement(metadata_name)
511
512 def egg_info_data(self, filename):
513 if self.satisfied_by is not None:
514 if not self.satisfied_by.has_metadata(filename):
515 return None
516 return self.satisfied_by.get_metadata(filename)
517 assert self.source_dir
518 filename = self.egg_info_path(filename)
519 if not os.path.exists(filename):
520 return None
521 data = read_text_file(filename)
522 return data
523
524 def egg_info_path(self, filename):
525 if self._egg_info_path is None:
526 if self.editable:
527 base = self.source_dir
528 else:
529 base = os.path.join(self.setup_py_dir, 'pip-egg-info')
530 filenames = os.listdir(base)
531 if self.editable:
532 filenames = []
533 for root, dirs, files in os.walk(base):
534 for dir in vcs.dirnames:
535 if dir in dirs:
536 dirs.remove(dir)
537 # Iterate over a copy of ``dirs``, since mutating
538 # a list while iterating over it can cause trouble.
539 # (See https://github.com/pypa/pip/pull/462.)
540 for dir in list(dirs):
541 # Don't search in anything that looks like a virtualenv
542 # environment
543 if (
544 os.path.lexists(
545 os.path.join(root, dir, 'bin', 'python')
546 ) or
547 os.path.exists(
548 os.path.join(
549 root, dir, 'Scripts', 'Python.exe'
550 )
551 )):
552 dirs.remove(dir)
553 # Also don't search through tests
554 elif dir == 'test' or dir == 'tests':
555 dirs.remove(dir)
556 filenames.extend([os.path.join(root, dir)
557 for dir in dirs])
558 filenames = [f for f in filenames if f.endswith('.egg-info')]
559
560 if not filenames:
561 raise InstallationError(
562 'No files/directories in %s (from %s)' % (base, filename)
563 )
564 assert filenames, \
565 "No files/directories in %s (from %s)" % (base, filename)
566
567 # if we have more than one match, we pick the toplevel one. This
568 # can easily be the case if there is a dist folder which contains
569 # an extracted tarball for testing purposes.
570 if len(filenames) > 1:
571 filenames.sort(
572 key=lambda x: x.count(os.path.sep) +
573 (os.path.altsep and x.count(os.path.altsep) or 0)
574 )
575 self._egg_info_path = os.path.join(base, filenames[0])
576 return os.path.join(self._egg_info_path, filename)
577
578 def pkg_info(self):
579 p = FeedParser()
580 data = self.egg_info_data('PKG-INFO')
581 if not data:
582 logger.warning(
583 'No PKG-INFO file found in %s',
584 display_path(self.egg_info_path('PKG-INFO')),
585 )
586 p.feed(data or '')
587 return p.close()
588
589 _requirements_section_re = re.compile(r'\[(.*?)\]')
590
591 @property
592 def installed_version(self):
593 return get_installed_version(self.name)
594
595 def assert_source_matches_version(self):
596 assert self.source_dir
597 version = self.pkg_info()['version']
598 if self.req.specifier and version not in self.req.specifier:
599 logger.warning(
600 'Requested %s, but installing version %s',
601 self,
602 version,
603 )
604 else:
605 logger.debug(
606 'Source in %s has version %s, which satisfies requirement %s',
607 display_path(self.source_dir),
608 version,
609 self,
610 )
611
612 def update_editable(self, obtain=True):
613 if not self.link:
614 logger.debug(
615 "Cannot update repository at %s; repository location is "
616 "unknown",
617 self.source_dir,
618 )
619 return
620 assert self.editable
621 assert self.source_dir
622 if self.link.scheme == 'file':
623 # Static paths don't get updated
624 return
625 assert '+' in self.link.url, "bad url: %r" % self.link.url
626 if not self.update:
627 return
628 vc_type, url = self.link.url.split('+', 1)
629 backend = vcs.get_backend(vc_type)
630 if backend:
631 vcs_backend = backend(self.link.url)
632 if obtain:
633 vcs_backend.obtain(self.source_dir)
634 else:
635 vcs_backend.export(self.source_dir)
636 else:
637 assert 0, (
638 'Unexpected version control type (in %s): %s'
639 % (self.link, vc_type))
640
641 def uninstall(self, auto_confirm=False, verbose=False,
642 use_user_site=False):
643 """
644 Uninstall the distribution currently satisfying this requirement.
645
646 Prompts before removing or modifying files unless
647 ``auto_confirm`` is True.
648
649 Refuses to delete or modify files outside of ``sys.prefix`` -
650 thus uninstallation within a virtual environment can only
651 modify that virtual environment, even if the virtualenv is
652 linked to global site-packages.
653
654 """
655 if not self.check_if_exists(use_user_site):
656 logger.warning("Skipping %s as it is not installed.", self.name)
657 return
658 dist = self.satisfied_by or self.conflicts_with
659
660 uninstalled_pathset = UninstallPathSet.from_dist(dist)
661 uninstalled_pathset.remove(auto_confirm, verbose)
662 return uninstalled_pathset
663
664 def archive(self, build_dir):
665 assert self.source_dir
666 create_archive = True
667 archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
668 archive_path = os.path.join(build_dir, archive_name)
669 if os.path.exists(archive_path):
670 response = ask_path_exists(
671 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
672 display_path(archive_path), ('i', 'w', 'b', 'a'))
673 if response == 'i':
674 create_archive = False
675 elif response == 'w':
676 logger.warning('Deleting %s', display_path(archive_path))
677 os.remove(archive_path)
678 elif response == 'b':
679 dest_file = backup_dir(archive_path)
680 logger.warning(
681 'Backing up %s to %s',
682 display_path(archive_path),
683 display_path(dest_file),
684 )
685 shutil.move(archive_path, dest_file)
686 elif response == 'a':
687 sys.exit(-1)
688 if create_archive:
689 zip = zipfile.ZipFile(
690 archive_path, 'w', zipfile.ZIP_DEFLATED,
691 allowZip64=True
692 )
693 dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
694 for dirpath, dirnames, filenames in os.walk(dir):
695 if 'pip-egg-info' in dirnames:
696 dirnames.remove('pip-egg-info')
697 for dirname in dirnames:
698 dirname = os.path.join(dirpath, dirname)
699 name = self._clean_zip_name(dirname, dir)
700 zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
701 zipdir.external_attr = 0x1ED << 16 # 0o755
702 zip.writestr(zipdir, '')
703 for filename in filenames:
704 if filename == PIP_DELETE_MARKER_FILENAME:
705 continue
706 filename = os.path.join(dirpath, filename)
707 name = self._clean_zip_name(filename, dir)
708 zip.write(filename, self.name + '/' + name)
709 zip.close()
710 logger.info('Saved %s', display_path(archive_path))
711
712 def _clean_zip_name(self, name, prefix):
713 assert name.startswith(prefix + os.path.sep), (
714 "name %r doesn't start with prefix %r" % (name, prefix)
715 )
716 name = name[len(prefix) + 1:]
717 name = name.replace(os.path.sep, '/')
718 return name
719
720 def match_markers(self, extras_requested=None):
721 if not extras_requested:
722 # Provide an extra to safely evaluate the markers
723 # without matching any extra
724 extras_requested = ('',)
725 if self.markers is not None:
726 return any(
727 self.markers.evaluate({'extra': extra})
728 for extra in extras_requested)
729 else:
730 return True
731
732 def install(self, install_options, global_options=None, root=None,
733 home=None, prefix=None, warn_script_location=True,
734 use_user_site=False, pycompile=True):
735 global_options = global_options if global_options is not None else []
736 if self.editable:
737 self.install_editable(
738 install_options, global_options, prefix=prefix,
739 )
740 return
741 if self.is_wheel:
742 version = wheel.wheel_version(self.source_dir)
743 wheel.check_compatibility(version, self.name)
744
745 self.move_wheel_files(
746 self.source_dir, root=root, prefix=prefix, home=home,
747 warn_script_location=warn_script_location,
748 use_user_site=use_user_site, pycompile=pycompile,
749 )
750 self.install_succeeded = True
751 return
752
753 # Extend the list of global and install options passed on to
754 # the setup.py call with the ones from the requirements file.
755 # Options specified in requirements file override those
756 # specified on the command line, since the last option given
757 # to setup.py is the one that is used.
758 global_options = list(global_options) + \
759 self.options.get('global_options', [])
760 install_options = list(install_options) + \
761 self.options.get('install_options', [])
762
763 if self.isolated:
764 global_options = global_options + ["--no-user-cfg"]
765
766 with TempDirectory(kind="record") as temp_dir:
767 record_filename = os.path.join(temp_dir.path, 'install-record.txt')
768 install_args = self.get_install_args(
769 global_options, record_filename, root, prefix, pycompile,
770 )
771 msg = 'Running setup.py install for %s' % (self.name,)
772 with open_spinner(msg) as spinner:
773 with indent_log():
774 with self.build_env:
775 call_subprocess(
776 install_args + install_options,
777 cwd=self.setup_py_dir,
778 show_stdout=False,
779 spinner=spinner,
780 )
781
782 if not os.path.exists(record_filename):
783 logger.debug('Record file %s not found', record_filename)
784 return
785 self.install_succeeded = True
786
787 def prepend_root(path):
788 if root is None or not os.path.isabs(path):
789 return path
790 else:
791 return change_root(root, path)
792
793 with open(record_filename) as f:
794 for line in f:
795 directory = os.path.dirname(line)
796 if directory.endswith('.egg-info'):
797 egg_info_dir = prepend_root(directory)
798 break
799 else:
800 logger.warning(
801 'Could not find .egg-info directory in install record'
802 ' for %s',
803 self,
804 )
805 # FIXME: put the record somewhere
806 # FIXME: should this be an error?
807 return
808 new_lines = []
809 with open(record_filename) as f:
810 for line in f:
811 filename = line.strip()
812 if os.path.isdir(filename):
813 filename += os.path.sep
814 new_lines.append(
815 os.path.relpath(prepend_root(filename), egg_info_dir)
816 )
817 new_lines.sort()
818 ensure_dir(egg_info_dir)
819 inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
820 with open(inst_files_path, 'w') as f:
821 f.write('\n'.join(new_lines) + '\n')
822
823 def ensure_has_source_dir(self, parent_dir):
824 """Ensure that a source_dir is set.
825
826 This will create a temporary build dir if the name of the requirement
827 isn't known yet.
828
829 :param parent_dir: The ideal pip parent_dir for the source_dir.
830 Generally src_dir for editables and build_dir for sdists.
831 :return: self.source_dir
832 """
833 if self.source_dir is None:
834 self.source_dir = self.build_location(parent_dir)
835 return self.source_dir
836
837 def get_install_args(self, global_options, record_filename, root, prefix,
838 pycompile):
839 install_args = [sys.executable, "-u"]
840 install_args.append('-c')
841 install_args.append(SETUPTOOLS_SHIM % self.setup_py)
842 install_args += list(global_options) + \
843 ['install', '--record', record_filename]
844 install_args += ['--single-version-externally-managed']
845
846 if root is not None:
847 install_args += ['--root', root]
848 if prefix is not None:
849 install_args += ['--prefix', prefix]
850
851 if pycompile:
852 install_args += ["--compile"]
853 else:
854 install_args += ["--no-compile"]
855
856 if running_under_virtualenv():
857 py_ver_str = 'python' + sysconfig.get_python_version()
858 install_args += ['--install-headers',
859 os.path.join(sys.prefix, 'include', 'site',
860 py_ver_str, self.name)]
861
862 return install_args
863
864 def remove_temporary_source(self):
865 """Remove the source files from this requirement, if they are marked
866 for deletion"""
867 if self.source_dir and os.path.exists(
868 os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
869 logger.debug('Removing source in %s', self.source_dir)
870 rmtree(self.source_dir)
871 self.source_dir = None
872 self._temp_build_dir.cleanup()
873 self.build_env.cleanup()
874
875 def install_editable(self, install_options,
876 global_options=(), prefix=None):
877 logger.info('Running setup.py develop for %s', self.name)
878
879 if self.isolated:
880 global_options = list(global_options) + ["--no-user-cfg"]
881
882 if prefix:
883 prefix_param = ['--prefix={}'.format(prefix)]
884 install_options = list(install_options) + prefix_param
885
886 with indent_log():
887 # FIXME: should we do --install-headers here too?
888 with self.build_env:
889 call_subprocess(
890 [
891 sys.executable,
892 '-c',
893 SETUPTOOLS_SHIM % self.setup_py
894 ] +
895 list(global_options) +
896 ['develop', '--no-deps'] +
897 list(install_options),
898
899 cwd=self.setup_py_dir,
900 show_stdout=False,
901 )
902
903 self.install_succeeded = True
904
905 def check_if_exists(self, use_user_site):
906 """Find an installed distribution that satisfies or conflicts
907 with this requirement, and set self.satisfied_by or
908 self.conflicts_with appropriately.
909 """
910 if self.req is None:
911 return False
912 try:
913 # get_distribution() will resolve the entire list of requirements
914 # anyway, and we've already determined that we need the requirement
915 # in question, so strip the marker so that we don't try to
916 # evaluate it.
917 no_marker = Requirement(str(self.req))
918 no_marker.marker = None
919 self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
920 if self.editable and self.satisfied_by:
921 self.conflicts_with = self.satisfied_by
922 # when installing editables, nothing pre-existing should ever
923 # satisfy
924 self.satisfied_by = None
925 return True
926 except pkg_resources.DistributionNotFound:
927 return False
928 except pkg_resources.VersionConflict:
929 existing_dist = pkg_resources.get_distribution(
930 self.req.name
931 )
932 if use_user_site:
933 if dist_in_usersite(existing_dist):
934 self.conflicts_with = existing_dist
935 elif (running_under_virtualenv() and
936 dist_in_site_packages(existing_dist)):
937 raise InstallationError(
938 "Will not install to the user site because it will "
939 "lack sys.path precedence to %s in %s" %
940 (existing_dist.project_name, existing_dist.location)
941 )
942 else:
943 self.conflicts_with = existing_dist
944 return True
945
946 @property
947 def is_wheel(self):
948 return self.link and self.link.is_wheel
949
950 def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None,
951 warn_script_location=True, use_user_site=False,
952 pycompile=True):
953 move_wheel_files(
954 self.name, self.req, wheeldir,
955 user=use_user_site,
956 home=home,
957 root=root,
958 prefix=prefix,
959 pycompile=pycompile,
960 isolated=self.isolated,
961 warn_script_location=warn_script_location,
962 )
963
964 def get_dist(self):
965 """Return a pkg_resources.Distribution built from self.egg_info_path"""
966 egg_info = self.egg_info_path('').rstrip(os.path.sep)
967 base_dir = os.path.dirname(egg_info)
968 metadata = pkg_resources.PathMetadata(base_dir, egg_info)
969 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
970 return pkg_resources.Distribution(
971 os.path.dirname(egg_info),
972 project_name=dist_name,
973 metadata=metadata,
974 )
975
976 @property
977 def has_hash_options(self):
978 """Return whether any known-good hashes are specified as options.
979
980 These activate --require-hashes mode; hashes specified as part of a
981 URL do not.
982
983 """
984 return bool(self.options.get('hashes', {}))
985
986 def hashes(self, trust_internet=True):
987 """Return a hash-comparer that considers my option- and URL-based
988 hashes to be known-good.
989
990 Hashes in URLs--ones embedded in the requirements file, not ones
991 downloaded from an index server--are almost peers with ones from
992 flags. They satisfy --require-hashes (whether it was implicitly or
993 explicitly activated) but do not activate it. md5 and sha224 are not
994 allowed in flags, which should nudge people toward good algos. We
995 always OR all hashes together, even ones from URLs.
996
997 :param trust_internet: Whether to trust URL-based (#md5=...) hashes
998 downloaded from the internet, as by populate_link()
999
1000 """
1001 good_hashes = self.options.get('hashes', {}).copy()
1002 link = self.link if trust_internet else self.original_link
1003 if link and link.hash:
1004 good_hashes.setdefault(link.hash_name, []).append(link.hash)
1005 return Hashes(good_hashes)
1006
1007
1008def _strip_postfix(req):
1009 """
1010 Strip req postfix ( -dev, 0.2, etc )
1011 """
1012 # FIXME: use package_to_requirement?
1013 match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
1014 if match:
1015 # Strip off -dev, -0.2, etc.
1016 warnings.warn(
1017 "#egg cleanup for editable urls will be dropped in the future",
1018 RemovedInPip11Warning,
1019 )
1020 req = match.group(1)
1021 return req
1022
1023
1024def parse_editable(editable_req):
1025 """Parses an editable requirement into:
1026 - a requirement name
1027 - an URL
1028 - extras
1029 - editable options
1030 Accepted requirements:
1031 svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
1032 .[some_extra]
1033 """
1034
1035 from pip._internal.index import Link
1036
1037 url = editable_req
1038
1039 # If a file path is specified with extras, strip off the extras.
1040 url_no_extras, extras = _strip_extras(url)
1041
1042 if os.path.isdir(url_no_extras):
1043 if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
1044 raise InstallationError(
1045 "Directory %r is not installable. File 'setup.py' not found." %
1046 url_no_extras
1047 )
1048 # Treating it as code that has already been checked out
1049 url_no_extras = path_to_url(url_no_extras)
1050
1051 if url_no_extras.lower().startswith('file:'):
1052 package_name = Link(url_no_extras).egg_fragment
1053 if extras:
1054 return (
1055 package_name,
1056 url_no_extras,
1057 Requirement("placeholder" + extras.lower()).extras,
1058 )
1059 else:
1060 return package_name, url_no_extras, None
1061
1062 for version_control in vcs:
1063 if url.lower().startswith('%s:' % version_control):
1064 url = '%s+%s' % (version_control, url)
1065 break
1066
1067 if '+' not in url:
1068 raise InstallationError(
1069 '%s should either be a path to a local project or a VCS url '
1070 'beginning with svn+, git+, hg+, or bzr+' %
1071 editable_req
1072 )
1073
1074 vc_type = url.split('+', 1)[0].lower()
1075
1076 if not vcs.get_backend(vc_type):
1077 error_message = 'For --editable=%s only ' % editable_req + \
1078 ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
1079 ' is currently supported'
1080 raise InstallationError(error_message)
1081
1082 package_name = Link(url).egg_fragment
1083 if not package_name:
1084 raise InstallationError(
1085 "Could not detect requirement name for '%s', please specify one "
1086 "with #egg=your_package_name" % editable_req
1087 )
1088 return _strip_postfix(package_name), url, None
1089
1090
1091def deduce_helpful_msg(req):
1092 """Returns helpful msg in case requirements file does not exist,
1093 or cannot be parsed.
1094
1095 :params req: Requirements file path
1096 """
1097 msg = ""
1098 if os.path.exists(req):
1099 msg = " It does exist."
1100 # Try to parse and check if it is a requirements file.
1101 try:
1102 with open(req, 'r') as fp:
1103 # parse first line only
1104 next(parse_requirements(fp.read()))
1105 msg += " The argument you provided " + \
1106 "(%s) appears to be a" % (req) + \
1107 " requirements file. If that is the" + \
1108 " case, use the '-r' flag to install" + \
1109 " the packages specified within it."
1110 except RequirementParseError:
1111 logger.debug("Cannot parse '%s' as requirements \
1112 file" % (req), exc_info=1)
1113 else:
1114 msg += " File '%s' does not exist." % (req)
1115 return msg
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py
new file mode 100644
index 0000000..78b7d32
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py
@@ -0,0 +1,164 @@
1from __future__ import absolute_import
2
3import logging
4from collections import OrderedDict
5
6from pip._internal.exceptions import InstallationError
7from pip._internal.utils.logging import indent_log
8from pip._internal.wheel import Wheel
9
10logger = logging.getLogger(__name__)
11
12
13class RequirementSet(object):
14
15 def __init__(self, require_hashes=False):
16 """Create a RequirementSet.
17
18 :param wheel_cache: The pip wheel cache, for passing to
19 InstallRequirement.
20 """
21
22 self.requirements = OrderedDict()
23 self.require_hashes = require_hashes
24
25 # Mapping of alias: real_name
26 self.requirement_aliases = {}
27 self.unnamed_requirements = []
28 self.successfully_downloaded = []
29 self.reqs_to_cleanup = []
30
31 def __str__(self):
32 reqs = [req for req in self.requirements.values()
33 if not req.comes_from]
34 reqs.sort(key=lambda req: req.name.lower())
35 return ' '.join([str(req.req) for req in reqs])
36
37 def __repr__(self):
38 reqs = [req for req in self.requirements.values()]
39 reqs.sort(key=lambda req: req.name.lower())
40 reqs_str = ', '.join([str(req.req) for req in reqs])
41 return ('<%s object; %d requirement(s): %s>'
42 % (self.__class__.__name__, len(reqs), reqs_str))
43
44 def add_requirement(self, install_req, parent_req_name=None,
45 extras_requested=None):
46 """Add install_req as a requirement to install.
47
48 :param parent_req_name: The name of the requirement that needed this
49 added. The name is used because when multiple unnamed requirements
50 resolve to the same name, we could otherwise end up with dependency
51 links that point outside the Requirements set. parent_req must
52 already be added. Note that None implies that this is a user
53 supplied requirement, vs an inferred one.
54 :param extras_requested: an iterable of extras used to evaluate the
55 environment markers.
56 :return: Additional requirements to scan. That is either [] if
57 the requirement is not applicable, or [install_req] if the
58 requirement is applicable and has just been added.
59 """
60 name = install_req.name
61 if not install_req.match_markers(extras_requested):
62 logger.info("Ignoring %s: markers '%s' don't match your "
63 "environment", install_req.name,
64 install_req.markers)
65 return [], None
66
67 # This check has to come after we filter requirements with the
68 # environment markers.
69 if install_req.link and install_req.link.is_wheel:
70 wheel = Wheel(install_req.link.filename)
71 if not wheel.supported():
72 raise InstallationError(
73 "%s is not a supported wheel on this platform." %
74 wheel.filename
75 )
76
77 # This next bit is really a sanity check.
78 assert install_req.is_direct == (parent_req_name is None), (
79 "a direct req shouldn't have a parent and also, "
80 "a non direct req should have a parent"
81 )
82
83 if not name:
84 # url or path requirement w/o an egg fragment
85 self.unnamed_requirements.append(install_req)
86 return [install_req], None
87 else:
88 try:
89 existing_req = self.get_requirement(name)
90 except KeyError:
91 existing_req = None
92 if (parent_req_name is None and existing_req and not
93 existing_req.constraint and
94 existing_req.extras == install_req.extras and not
95 existing_req.req.specifier == install_req.req.specifier):
96 raise InstallationError(
97 'Double requirement given: %s (already in %s, name=%r)'
98 % (install_req, existing_req, name))
99 if not existing_req:
100 # Add requirement
101 self.requirements[name] = install_req
102 # FIXME: what about other normalizations? E.g., _ vs. -?
103 if name.lower() != name:
104 self.requirement_aliases[name.lower()] = name
105 result = [install_req]
106 else:
107 # Assume there's no need to scan, and that we've already
108 # encountered this for scanning.
109 result = []
110 if not install_req.constraint and existing_req.constraint:
111 if (install_req.link and not (existing_req.link and
112 install_req.link.path == existing_req.link.path)):
113 self.reqs_to_cleanup.append(install_req)
114 raise InstallationError(
115 "Could not satisfy constraints for '%s': "
116 "installation from path or url cannot be "
117 "constrained to a version" % name,
118 )
119 # If we're now installing a constraint, mark the existing
120 # object for real installation.
121 existing_req.constraint = False
122 existing_req.extras = tuple(
123 sorted(set(existing_req.extras).union(
124 set(install_req.extras))))
125 logger.debug("Setting %s extras to: %s",
126 existing_req, existing_req.extras)
127 # And now we need to scan this.
128 result = [existing_req]
129 # Canonicalise to the already-added object for the backref
130 # check below.
131 install_req = existing_req
132
133 # We return install_req here to allow for the caller to add it to
134 # the dependency information for the parent package.
135 return result, install_req
136
137 def has_requirement(self, project_name):
138 name = project_name.lower()
139 if (name in self.requirements and
140 not self.requirements[name].constraint or
141 name in self.requirement_aliases and
142 not self.requirements[self.requirement_aliases[name]].constraint):
143 return True
144 return False
145
146 @property
147 def has_requirements(self):
148 return list(req for req in self.requirements.values() if not
149 req.constraint) or self.unnamed_requirements
150
151 def get_requirement(self, project_name):
152 for name in project_name, project_name.lower():
153 if name in self.requirements:
154 return self.requirements[name]
155 if name in self.requirement_aliases:
156 return self.requirements[self.requirement_aliases[name]]
157 raise KeyError("No project with the name %r" % project_name)
158
159 def cleanup_files(self):
160 """Clean up files, remove builds."""
161 logger.debug('Cleaning up...')
162 with indent_log():
163 for req in self.reqs_to_cleanup:
164 req.remove_temporary_source()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py
new file mode 100644
index 0000000..a47520f
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,455 @@
1from __future__ import absolute_import
2
3import csv
4import functools
5import logging
6import os
7import sys
8import sysconfig
9
10from pip._vendor import pkg_resources
11
12from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache
13from pip._internal.exceptions import UninstallationError
14from pip._internal.locations import bin_py, bin_user
15from pip._internal.utils.logging import indent_log
16from pip._internal.utils.misc import (
17 FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
18 normalize_path, renames,
19)
20from pip._internal.utils.temp_dir import TempDirectory
21
22logger = logging.getLogger(__name__)
23
24
25def _script_names(dist, script_name, is_gui):
26 """Create the fully qualified name of the files created by
27 {console,gui}_scripts for the given ``dist``.
28 Returns the list of file names
29 """
30 if dist_in_usersite(dist):
31 bin_dir = bin_user
32 else:
33 bin_dir = bin_py
34 exe_name = os.path.join(bin_dir, script_name)
35 paths_to_remove = [exe_name]
36 if WINDOWS:
37 paths_to_remove.append(exe_name + '.exe')
38 paths_to_remove.append(exe_name + '.exe.manifest')
39 if is_gui:
40 paths_to_remove.append(exe_name + '-script.pyw')
41 else:
42 paths_to_remove.append(exe_name + '-script.py')
43 return paths_to_remove
44
45
46def _unique(fn):
47 @functools.wraps(fn)
48 def unique(*args, **kw):
49 seen = set()
50 for item in fn(*args, **kw):
51 if item not in seen:
52 seen.add(item)
53 yield item
54 return unique
55
56
57@_unique
58def uninstallation_paths(dist):
59 """
60 Yield all the uninstallation paths for dist based on RECORD-without-.pyc
61
62 Yield paths to all the files in RECORD. For each .py file in RECORD, add
63 the .pyc in the same directory.
64
65 UninstallPathSet.add() takes care of the __pycache__ .pyc.
66 """
67 r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
68 for row in r:
69 path = os.path.join(dist.location, row[0])
70 yield path
71 if path.endswith('.py'):
72 dn, fn = os.path.split(path)
73 base = fn[:-3]
74 path = os.path.join(dn, base + '.pyc')
75 yield path
76
77
78def compact(paths):
79 """Compact a path set to contain the minimal number of paths
80 necessary to contain all paths in the set. If /a/path/ and
81 /a/path/to/a/file.txt are both in the set, leave only the
82 shorter path."""
83
84 sep = os.path.sep
85 short_paths = set()
86 for path in sorted(paths, key=len):
87 should_add = any(
88 path.startswith(shortpath.rstrip("*")) and
89 path[len(shortpath.rstrip("*").rstrip(sep))] == sep
90 for shortpath in short_paths
91 )
92 if not should_add:
93 short_paths.add(path)
94 return short_paths
95
96
97def compress_for_output_listing(paths):
98 """Returns a tuple of 2 sets of which paths to display to user
99
100 The first set contains paths that would be deleted. Files of a package
101 are not added and the top-level directory of the package has a '*' added
102 at the end - to signify that all it's contents are removed.
103
104 The second set contains files that would have been skipped in the above
105 folders.
106 """
107
108 will_remove = list(paths)
109 will_skip = set()
110
111 # Determine folders and files
112 folders = set()
113 files = set()
114 for path in will_remove:
115 if path.endswith(".pyc"):
116 continue
117 if path.endswith("__init__.py") or ".dist-info" in path:
118 folders.add(os.path.dirname(path))
119 files.add(path)
120
121 folders = compact(folders)
122
123 # This walks the tree using os.walk to not miss extra folders
124 # that might get added.
125 for folder in folders:
126 for dirpath, _, dirfiles in os.walk(folder):
127 for fname in dirfiles:
128 if fname.endswith(".pyc"):
129 continue
130
131 file_ = os.path.normcase(os.path.join(dirpath, fname))
132 if os.path.isfile(file_) and file_ not in files:
133 # We are skipping this file. Add it to the set.
134 will_skip.add(file_)
135
136 will_remove = files | {
137 os.path.join(folder, "*") for folder in folders
138 }
139
140 return will_remove, will_skip
141
142
143class UninstallPathSet(object):
144 """A set of file paths to be removed in the uninstallation of a
145 requirement."""
146 def __init__(self, dist):
147 self.paths = set()
148 self._refuse = set()
149 self.pth = {}
150 self.dist = dist
151 self.save_dir = TempDirectory(kind="uninstall")
152 self._moved_paths = []
153
154 def _permitted(self, path):
155 """
156 Return True if the given path is one we are permitted to
157 remove/modify, False otherwise.
158
159 """
160 return is_local(path)
161
162 def add(self, path):
163 head, tail = os.path.split(path)
164
165 # we normalize the head to resolve parent directory symlinks, but not
166 # the tail, since we only want to uninstall symlinks, not their targets
167 path = os.path.join(normalize_path(head), os.path.normcase(tail))
168
169 if not os.path.exists(path):
170 return
171 if self._permitted(path):
172 self.paths.add(path)
173 else:
174 self._refuse.add(path)
175
176 # __pycache__ files can show up after 'installed-files.txt' is created,
177 # due to imports
178 if os.path.splitext(path)[1] == '.py' and uses_pycache:
179 self.add(cache_from_source(path))
180
181 def add_pth(self, pth_file, entry):
182 pth_file = normalize_path(pth_file)
183 if self._permitted(pth_file):
184 if pth_file not in self.pth:
185 self.pth[pth_file] = UninstallPthEntries(pth_file)
186 self.pth[pth_file].add(entry)
187 else:
188 self._refuse.add(pth_file)
189
190 def _stash(self, path):
191 return os.path.join(
192 self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
193 )
194
195 def remove(self, auto_confirm=False, verbose=False):
196 """Remove paths in ``self.paths`` with confirmation (unless
197 ``auto_confirm`` is True)."""
198
199 if not self.paths:
200 logger.info(
201 "Can't uninstall '%s'. No files were found to uninstall.",
202 self.dist.project_name,
203 )
204 return
205
206 dist_name_version = (
207 self.dist.project_name + "-" + self.dist.version
208 )
209 logger.info('Uninstalling %s:', dist_name_version)
210
211 with indent_log():
212 if auto_confirm or self._allowed_to_proceed(verbose):
213 self.save_dir.create()
214
215 for path in sorted(compact(self.paths)):
216 new_path = self._stash(path)
217 logger.debug('Removing file or directory %s', path)
218 self._moved_paths.append(path)
219 renames(path, new_path)
220 for pth in self.pth.values():
221 pth.remove()
222
223 logger.info('Successfully uninstalled %s', dist_name_version)
224
225 def _allowed_to_proceed(self, verbose):
226 """Display which files would be deleted and prompt for confirmation
227 """
228
229 def _display(msg, paths):
230 if not paths:
231 return
232
233 logger.info(msg)
234 with indent_log():
235 for path in sorted(compact(paths)):
236 logger.info(path)
237
238 if not verbose:
239 will_remove, will_skip = compress_for_output_listing(self.paths)
240 else:
241 # In verbose mode, display all the files that are going to be
242 # deleted.
243 will_remove = list(self.paths)
244 will_skip = set()
245
246 _display('Would remove:', will_remove)
247 _display('Would not remove (might be manually added):', will_skip)
248 _display('Would not remove (outside of prefix):', self._refuse)
249
250 return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
251
252 def rollback(self):
253 """Rollback the changes previously made by remove()."""
254 if self.save_dir.path is None:
255 logger.error(
256 "Can't roll back %s; was not uninstalled",
257 self.dist.project_name,
258 )
259 return False
260 logger.info('Rolling back uninstall of %s', self.dist.project_name)
261 for path in self._moved_paths:
262 tmp_path = self._stash(path)
263 logger.debug('Replacing %s', path)
264 renames(tmp_path, path)
265 for pth in self.pth.values():
266 pth.rollback()
267
268 def commit(self):
269 """Remove temporary save dir: rollback will no longer be possible."""
270 self.save_dir.cleanup()
271 self._moved_paths = []
272
273 @classmethod
274 def from_dist(cls, dist):
275 dist_path = normalize_path(dist.location)
276 if not dist_is_local(dist):
277 logger.info(
278 "Not uninstalling %s at %s, outside environment %s",
279 dist.key,
280 dist_path,
281 sys.prefix,
282 )
283 return cls(dist)
284
285 if dist_path in {p for p in {sysconfig.get_path("stdlib"),
286 sysconfig.get_path("platstdlib")}
287 if p}:
288 logger.info(
289 "Not uninstalling %s at %s, as it is in the standard library.",
290 dist.key,
291 dist_path,
292 )
293 return cls(dist)
294
295 paths_to_remove = cls(dist)
296 develop_egg_link = egg_link_path(dist)
297 develop_egg_link_egg_info = '{}.egg-info'.format(
298 pkg_resources.to_filename(dist.project_name))
299 egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
300 # Special case for distutils installed package
301 distutils_egg_info = getattr(dist._provider, 'path', None)
302
303 # Uninstall cases order do matter as in the case of 2 installs of the
304 # same package, pip needs to uninstall the currently detected version
305 if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
306 not dist.egg_info.endswith(develop_egg_link_egg_info)):
307 # if dist.egg_info.endswith(develop_egg_link_egg_info), we
308 # are in fact in the develop_egg_link case
309 paths_to_remove.add(dist.egg_info)
310 if dist.has_metadata('installed-files.txt'):
311 for installed_file in dist.get_metadata(
312 'installed-files.txt').splitlines():
313 path = os.path.normpath(
314 os.path.join(dist.egg_info, installed_file)
315 )
316 paths_to_remove.add(path)
317 # FIXME: need a test for this elif block
318 # occurs with --single-version-externally-managed/--record outside
319 # of pip
320 elif dist.has_metadata('top_level.txt'):
321 if dist.has_metadata('namespace_packages.txt'):
322 namespaces = dist.get_metadata('namespace_packages.txt')
323 else:
324 namespaces = []
325 for top_level_pkg in [
326 p for p
327 in dist.get_metadata('top_level.txt').splitlines()
328 if p and p not in namespaces]:
329 path = os.path.join(dist.location, top_level_pkg)
330 paths_to_remove.add(path)
331 paths_to_remove.add(path + '.py')
332 paths_to_remove.add(path + '.pyc')
333 paths_to_remove.add(path + '.pyo')
334
335 elif distutils_egg_info:
336 raise UninstallationError(
337 "Cannot uninstall {!r}. It is a distutils installed project "
338 "and thus we cannot accurately determine which files belong "
339 "to it which would lead to only a partial uninstall.".format(
340 dist.project_name,
341 )
342 )
343
344 elif dist.location.endswith('.egg'):
345 # package installed by easy_install
346 # We cannot match on dist.egg_name because it can slightly vary
347 # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
348 paths_to_remove.add(dist.location)
349 easy_install_egg = os.path.split(dist.location)[1]
350 easy_install_pth = os.path.join(os.path.dirname(dist.location),
351 'easy-install.pth')
352 paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
353
354 elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
355 for path in uninstallation_paths(dist):
356 paths_to_remove.add(path)
357
358 elif develop_egg_link:
359 # develop egg
360 with open(develop_egg_link, 'r') as fh:
361 link_pointer = os.path.normcase(fh.readline().strip())
362 assert (link_pointer == dist.location), (
363 'Egg-link %s does not match installed location of %s '
364 '(at %s)' % (link_pointer, dist.project_name, dist.location)
365 )
366 paths_to_remove.add(develop_egg_link)
367 easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
368 'easy-install.pth')
369 paths_to_remove.add_pth(easy_install_pth, dist.location)
370
371 else:
372 logger.debug(
373 'Not sure how to uninstall: %s - Check: %s',
374 dist, dist.location,
375 )
376
377 # find distutils scripts= scripts
378 if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
379 for script in dist.metadata_listdir('scripts'):
380 if dist_in_usersite(dist):
381 bin_dir = bin_user
382 else:
383 bin_dir = bin_py
384 paths_to_remove.add(os.path.join(bin_dir, script))
385 if WINDOWS:
386 paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
387
388 # find console_scripts
389 _scripts_to_remove = []
390 console_scripts = dist.get_entry_map(group='console_scripts')
391 for name in console_scripts.keys():
392 _scripts_to_remove.extend(_script_names(dist, name, False))
393 # find gui_scripts
394 gui_scripts = dist.get_entry_map(group='gui_scripts')
395 for name in gui_scripts.keys():
396 _scripts_to_remove.extend(_script_names(dist, name, True))
397
398 for s in _scripts_to_remove:
399 paths_to_remove.add(s)
400
401 return paths_to_remove
402
403
404class UninstallPthEntries(object):
405 def __init__(self, pth_file):
406 if not os.path.isfile(pth_file):
407 raise UninstallationError(
408 "Cannot remove entries from nonexistent file %s" % pth_file
409 )
410 self.file = pth_file
411 self.entries = set()
412 self._saved_lines = None
413
414 def add(self, entry):
415 entry = os.path.normcase(entry)
416 # On Windows, os.path.normcase converts the entry to use
417 # backslashes. This is correct for entries that describe absolute
418 # paths outside of site-packages, but all the others use forward
419 # slashes.
420 if WINDOWS and not os.path.splitdrive(entry)[0]:
421 entry = entry.replace('\\', '/')
422 self.entries.add(entry)
423
424 def remove(self):
425 logger.debug('Removing pth entries from %s:', self.file)
426 with open(self.file, 'rb') as fh:
427 # windows uses '\r\n' with py3k, but uses '\n' with py2.x
428 lines = fh.readlines()
429 self._saved_lines = lines
430 if any(b'\r\n' in line for line in lines):
431 endline = '\r\n'
432 else:
433 endline = '\n'
434 # handle missing trailing newline
435 if lines and not lines[-1].endswith(endline.encode("utf-8")):
436 lines[-1] = lines[-1] + endline.encode("utf-8")
437 for entry in self.entries:
438 try:
439 logger.debug('Removing entry: %s', entry)
440 lines.remove((entry + endline).encode("utf-8"))
441 except ValueError:
442 pass
443 with open(self.file, 'wb') as fh:
444 fh.writelines(lines)
445
446 def rollback(self):
447 if self._saved_lines is None:
448 logger.error(
449 'Cannot roll back changes to %s, none were made', self.file
450 )
451 return False
452 logger.debug('Rolling %s back to previous state', self.file)
453 with open(self.file, 'wb') as fh:
454 fh.writelines(self._saved_lines)
455 return True
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py
new file mode 100644
index 0000000..189827e
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py
@@ -0,0 +1,354 @@
1"""Dependency Resolution
2
3The dependency resolution in pip is performed as follows:
4
5for top-level requirements:
6 a. only one spec allowed per project, regardless of conflicts or not.
7 otherwise a "double requirement" exception is raised
8 b. they override sub-dependency requirements.
9for sub-dependencies
10 a. "first found, wins" (where the order is breadth first)
11"""
12
13import logging
14from collections import defaultdict
15from itertools import chain
16
17from pip._internal.exceptions import (
18 BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
19 UnsupportedPythonVersion,
20)
21
22from pip._internal.req.req_install import InstallRequirement
23from pip._internal.utils.logging import indent_log
24from pip._internal.utils.misc import dist_in_usersite, ensure_dir
25from pip._internal.utils.packaging import check_dist_requires_python
26
27logger = logging.getLogger(__name__)
28
29
30class Resolver(object):
31 """Resolves which packages need to be installed/uninstalled to perform \
32 the requested operation without breaking the requirements of any package.
33 """
34
35 _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
36
37 def __init__(self, preparer, session, finder, wheel_cache, use_user_site,
38 ignore_dependencies, ignore_installed, ignore_requires_python,
39 force_reinstall, isolated, upgrade_strategy):
40 super(Resolver, self).__init__()
41 assert upgrade_strategy in self._allowed_strategies
42
43 self.preparer = preparer
44 self.finder = finder
45 self.session = session
46
47 # NOTE: This would eventually be replaced with a cache that can give
48 # information about both sdist and wheels transparently.
49 self.wheel_cache = wheel_cache
50
51 self.require_hashes = None # This is set in resolve
52
53 self.upgrade_strategy = upgrade_strategy
54 self.force_reinstall = force_reinstall
55 self.isolated = isolated
56 self.ignore_dependencies = ignore_dependencies
57 self.ignore_installed = ignore_installed
58 self.ignore_requires_python = ignore_requires_python
59 self.use_user_site = use_user_site
60
61 self._discovered_dependencies = defaultdict(list)
62
63 def resolve(self, requirement_set):
64 """Resolve what operations need to be done
65
66 As a side-effect of this method, the packages (and their dependencies)
67 are downloaded, unpacked and prepared for installation. This
68 preparation is done by ``pip.operations.prepare``.
69
70 Once PyPI has static dependency metadata available, it would be
71 possible to move the preparation to become a step separated from
72 dependency resolution.
73 """
74 # make the wheelhouse
75 if self.preparer.wheel_download_dir:
76 ensure_dir(self.preparer.wheel_download_dir)
77
78 # If any top-level requirement has a hash specified, enter
79 # hash-checking mode, which requires hashes from all.
80 root_reqs = (
81 requirement_set.unnamed_requirements +
82 list(requirement_set.requirements.values())
83 )
84 self.require_hashes = (
85 requirement_set.require_hashes or
86 any(req.has_hash_options for req in root_reqs)
87 )
88
89 # Display where finder is looking for packages
90 locations = self.finder.get_formatted_locations()
91 if locations:
92 logger.info(locations)
93
94 # Actually prepare the files, and collect any exceptions. Most hash
95 # exceptions cannot be checked ahead of time, because
96 # req.populate_link() needs to be called before we can make decisions
97 # based on link type.
98 discovered_reqs = []
99 hash_errors = HashErrors()
100 for req in chain(root_reqs, discovered_reqs):
101 try:
102 discovered_reqs.extend(
103 self._resolve_one(requirement_set, req)
104 )
105 except HashError as exc:
106 exc.req = req
107 hash_errors.append(exc)
108
109 if hash_errors:
110 raise hash_errors
111
112 def _is_upgrade_allowed(self, req):
113 if self.upgrade_strategy == "to-satisfy-only":
114 return False
115 elif self.upgrade_strategy == "eager":
116 return True
117 else:
118 assert self.upgrade_strategy == "only-if-needed"
119 return req.is_direct
120
121 def _set_req_to_reinstall(self, req):
122 """
123 Set a requirement to be installed.
124 """
125 # Don't uninstall the conflict if doing a user install and the
126 # conflict is not a user install.
127 if not self.use_user_site or dist_in_usersite(req.satisfied_by):
128 req.conflicts_with = req.satisfied_by
129 req.satisfied_by = None
130
131 # XXX: Stop passing requirement_set for options
132 def _check_skip_installed(self, req_to_install):
133 """Check if req_to_install should be skipped.
134
135 This will check if the req is installed, and whether we should upgrade
136 or reinstall it, taking into account all the relevant user options.
137
138 After calling this req_to_install will only have satisfied_by set to
139 None if the req_to_install is to be upgraded/reinstalled etc. Any
140 other value will be a dist recording the current thing installed that
141 satisfies the requirement.
142
143 Note that for vcs urls and the like we can't assess skipping in this
144 routine - we simply identify that we need to pull the thing down,
145 then later on it is pulled down and introspected to assess upgrade/
146 reinstalls etc.
147
148 :return: A text reason for why it was skipped, or None.
149 """
150 if self.ignore_installed:
151 return None
152
153 req_to_install.check_if_exists(self.use_user_site)
154 if not req_to_install.satisfied_by:
155 return None
156
157 if self.force_reinstall:
158 self._set_req_to_reinstall(req_to_install)
159 return None
160
161 if not self._is_upgrade_allowed(req_to_install):
162 if self.upgrade_strategy == "only-if-needed":
163 return 'not upgraded as not directly required'
164 return 'already satisfied'
165
166 # Check for the possibility of an upgrade. For link-based
167 # requirements we have to pull the tree down and inspect to assess
168 # the version #, so it's handled way down.
169 if not req_to_install.link:
170 try:
171 self.finder.find_requirement(req_to_install, upgrade=True)
172 except BestVersionAlreadyInstalled:
173 # Then the best version is installed.
174 return 'already up-to-date'
175 except DistributionNotFound:
176 # No distribution found, so we squash the error. It will
177 # be raised later when we re-try later to do the install.
178 # Why don't we just raise here?
179 pass
180
181 self._set_req_to_reinstall(req_to_install)
182 return None
183
184 def _get_abstract_dist_for(self, req):
185 """Takes a InstallRequirement and returns a single AbstractDist \
186 representing a prepared variant of the same.
187 """
188 assert self.require_hashes is not None, (
189 "require_hashes should have been set in Resolver.resolve()"
190 )
191
192 if req.editable:
193 return self.preparer.prepare_editable_requirement(
194 req, self.require_hashes, self.use_user_site, self.finder,
195 )
196
197 # satisfied_by is only evaluated by calling _check_skip_installed,
198 # so it must be None here.
199 assert req.satisfied_by is None
200 skip_reason = self._check_skip_installed(req)
201
202 if req.satisfied_by:
203 return self.preparer.prepare_installed_requirement(
204 req, self.require_hashes, skip_reason
205 )
206
207 upgrade_allowed = self._is_upgrade_allowed(req)
208 abstract_dist = self.preparer.prepare_linked_requirement(
209 req, self.session, self.finder, upgrade_allowed,
210 self.require_hashes
211 )
212
213 # NOTE
214 # The following portion is for determining if a certain package is
215 # going to be re-installed/upgraded or not and reporting to the user.
216 # This should probably get cleaned up in a future refactor.
217
218 # req.req is only avail after unpack for URL
219 # pkgs repeat check_if_exists to uninstall-on-upgrade
220 # (#14)
221 if not self.ignore_installed:
222 req.check_if_exists(self.use_user_site)
223
224 if req.satisfied_by:
225 should_modify = (
226 self.upgrade_strategy != "to-satisfy-only" or
227 self.force_reinstall or
228 self.ignore_installed or
229 req.link.scheme == 'file'
230 )
231 if should_modify:
232 self._set_req_to_reinstall(req)
233 else:
234 logger.info(
235 'Requirement already satisfied (use --upgrade to upgrade):'
236 ' %s', req,
237 )
238
239 return abstract_dist
240
241 def _resolve_one(self, requirement_set, req_to_install):
242 """Prepare a single requirements file.
243
244 :return: A list of additional InstallRequirements to also install.
245 """
246 # Tell user what we are doing for this requirement:
247 # obtain (editable), skipping, processing (local url), collecting
248 # (remote url or package name)
249 if req_to_install.constraint or req_to_install.prepared:
250 return []
251
252 req_to_install.prepared = True
253
254 # register tmp src for cleanup in case something goes wrong
255 requirement_set.reqs_to_cleanup.append(req_to_install)
256
257 abstract_dist = self._get_abstract_dist_for(req_to_install)
258
259 # Parse and return dependencies
260 dist = abstract_dist.dist(self.finder)
261 try:
262 check_dist_requires_python(dist)
263 except UnsupportedPythonVersion as err:
264 if self.ignore_requires_python:
265 logger.warning(err.args[0])
266 else:
267 raise
268
269 more_reqs = []
270
271 def add_req(subreq, extras_requested):
272 sub_install_req = InstallRequirement.from_req(
273 str(subreq),
274 req_to_install,
275 isolated=self.isolated,
276 wheel_cache=self.wheel_cache,
277 )
278 parent_req_name = req_to_install.name
279 to_scan_again, add_to_parent = requirement_set.add_requirement(
280 sub_install_req,
281 parent_req_name=parent_req_name,
282 extras_requested=extras_requested,
283 )
284 if parent_req_name and add_to_parent:
285 self._discovered_dependencies[parent_req_name].append(
286 add_to_parent
287 )
288 more_reqs.extend(to_scan_again)
289
290 with indent_log():
291 # We add req_to_install before its dependencies, so that we
292 # can refer to it when adding dependencies.
293 if not requirement_set.has_requirement(req_to_install.name):
294 # 'unnamed' requirements will get added here
295 req_to_install.is_direct = True
296 requirement_set.add_requirement(
297 req_to_install, parent_req_name=None,
298 )
299
300 if not self.ignore_dependencies:
301 if req_to_install.extras:
302 logger.debug(
303 "Installing extra requirements: %r",
304 ','.join(req_to_install.extras),
305 )
306 missing_requested = sorted(
307 set(req_to_install.extras) - set(dist.extras)
308 )
309 for missing in missing_requested:
310 logger.warning(
311 '%s does not provide the extra \'%s\'',
312 dist, missing
313 )
314
315 available_requested = sorted(
316 set(dist.extras) & set(req_to_install.extras)
317 )
318 for subreq in dist.requires(available_requested):
319 add_req(subreq, extras_requested=available_requested)
320
321 if not req_to_install.editable and not req_to_install.satisfied_by:
322 # XXX: --no-install leads this to report 'Successfully
323 # downloaded' for only non-editable reqs, even though we took
324 # action on them.
325 requirement_set.successfully_downloaded.append(req_to_install)
326
327 return more_reqs
328
329 def get_installation_order(self, req_set):
330 """Create the installation order.
331
332 The installation order is topological - requirements are installed
333 before the requiring thing. We break cycles at an arbitrary point,
334 and make no other guarantees.
335 """
336 # The current implementation, which we may change at any point
337 # installs the user specified things in the order given, except when
338 # dependencies must come earlier to achieve topological order.
339 order = []
340 ordered_reqs = set()
341
342 def schedule(req):
343 if req.satisfied_by or req in ordered_reqs:
344 return
345 if req.constraint:
346 return
347 ordered_reqs.add(req)
348 for dep in self._discovered_dependencies[req.name]:
349 schedule(dep)
350 order.append(req)
351
352 for install_req in req_set.requirements.values():
353 schedule(install_req)
354 return order
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py
new file mode 100644
index 0000000..2b56931
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/status_codes.py
@@ -0,0 +1,8 @@
1from __future__ import absolute_import
2
3SUCCESS = 0
4ERROR = 1
5UNKNOWN_ERROR = 2
6VIRTUALENV_NOT_FOUND = 3
7PREVIOUS_BUILD_DIR_ERROR = 4
8NO_MATCHES_FOUND = 23
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py
new file mode 100644
index 0000000..0eb87ca
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py
@@ -0,0 +1,258 @@
1"""
2This code was taken from https://github.com/ActiveState/appdirs and modified
3to suit our purposes.
4"""
5from __future__ import absolute_import
6
7import os
8import sys
9
10from pip._vendor.six import PY2, text_type
11
12from pip._internal.compat import WINDOWS, expanduser
13
14
15def user_cache_dir(appname):
16 r"""
17 Return full path to the user-specific cache dir for this application.
18
19 "appname" is the name of application.
20
21 Typical user cache directories are:
22 macOS: ~/Library/Caches/<AppName>
23 Unix: ~/.cache/<AppName> (XDG default)
24 Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
25
26 On Windows the only suggestion in the MSDN docs is that local settings go
27 in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
28 non-roaming app data dir (the default returned by `user_data_dir`). Apps
29 typically put cache data somewhere *under* the given dir here. Some
30 examples:
31 ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
32 ...\Acme\SuperApp\Cache\1.0
33
34 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
35 """
36 if WINDOWS:
37 # Get the base path
38 path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
39
40 # When using Python 2, return paths as bytes on Windows like we do on
41 # other operating systems. See helper function docs for more details.
42 if PY2 and isinstance(path, text_type):
43 path = _win_path_to_bytes(path)
44
45 # Add our app name and Cache directory to it
46 path = os.path.join(path, appname, "Cache")
47 elif sys.platform == "darwin":
48 # Get the base path
49 path = expanduser("~/Library/Caches")
50
51 # Add our app name to it
52 path = os.path.join(path, appname)
53 else:
54 # Get the base path
55 path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
56
57 # Add our app name to it
58 path = os.path.join(path, appname)
59
60 return path
61
62
63def user_data_dir(appname, roaming=False):
64 r"""
65 Return full path to the user-specific data dir for this application.
66
67 "appname" is the name of application.
68 If None, just the system directory is returned.
69 "roaming" (boolean, default False) can be set True to use the Windows
70 roaming appdata directory. That means that for users on a Windows
71 network setup for roaming profiles, this user data will be
72 sync'd on login. See
73 <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
74 for a discussion of issues.
75
76 Typical user data directories are:
77 macOS: ~/Library/Application Support/<AppName>
78 if it exists, else ~/.config/<AppName>
79 Unix: ~/.local/share/<AppName> # or in
80 $XDG_DATA_HOME, if defined
81 Win XP (not roaming): C:\Documents and Settings\<username>\ ...
82 ...Application Data\<AppName>
83 Win XP (roaming): C:\Documents and Settings\<username>\Local ...
84 ...Settings\Application Data\<AppName>
85 Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
86 Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
87
88 For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
89 That means, by default "~/.local/share/<AppName>".
90 """
91 if WINDOWS:
92 const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
93 path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
94 elif sys.platform == "darwin":
95 path = os.path.join(
96 expanduser('~/Library/Application Support/'),
97 appname,
98 ) if os.path.isdir(os.path.join(
99 expanduser('~/Library/Application Support/'),
100 appname,
101 )
102 ) else os.path.join(
103 expanduser('~/.config/'),
104 appname,
105 )
106 else:
107 path = os.path.join(
108 os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
109 appname,
110 )
111
112 return path
113
114
115def user_config_dir(appname, roaming=True):
116 """Return full path to the user-specific config dir for this application.
117
118 "appname" is the name of application.
119 If None, just the system directory is returned.
120 "roaming" (boolean, default True) can be set False to not use the
121 Windows roaming appdata directory. That means that for users on a
122 Windows network setup for roaming profiles, this user data will be
123 sync'd on login. See
124 <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
125 for a discussion of issues.
126
127 Typical user data directories are:
128 macOS: same as user_data_dir
129 Unix: ~/.config/<AppName>
130 Win *: same as user_data_dir
131
132 For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
133 That means, by default "~/.config/<AppName>".
134 """
135 if WINDOWS:
136 path = user_data_dir(appname, roaming=roaming)
137 elif sys.platform == "darwin":
138 path = user_data_dir(appname)
139 else:
140 path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
141 path = os.path.join(path, appname)
142
143 return path
144
145
146# for the discussion regarding site_config_dirs locations
147# see <https://github.com/pypa/pip/issues/1733>
148def site_config_dirs(appname):
149 r"""Return a list of potential user-shared config dirs for this application.
150
151 "appname" is the name of application.
152
153 Typical user config directories are:
154 macOS: /Library/Application Support/<AppName>/
155 Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
156 $XDG_CONFIG_DIRS
157 Win XP: C:\Documents and Settings\All Users\Application ...
158 ...Data\<AppName>\
159 Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
160 on Vista.)
161 Win 7: Hidden, but writeable on Win 7:
162 C:\ProgramData\<AppName>\
163 """
164 if WINDOWS:
165 path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
166 pathlist = [os.path.join(path, appname)]
167 elif sys.platform == 'darwin':
168 pathlist = [os.path.join('/Library/Application Support', appname)]
169 else:
170 # try looking in $XDG_CONFIG_DIRS
171 xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
172 if xdg_config_dirs:
173 pathlist = [
174 os.path.join(expanduser(x), appname)
175 for x in xdg_config_dirs.split(os.pathsep)
176 ]
177 else:
178 pathlist = []
179
180 # always look in /etc directly as well
181 pathlist.append('/etc')
182
183 return pathlist
184
185
186# -- Windows support functions --
187
188def _get_win_folder_from_registry(csidl_name):
189 """
190 This is a fallback technique at best. I'm not sure if using the
191 registry for this guarantees us the correct answer for all CSIDL_*
192 names.
193 """
194 import _winreg
195
196 shell_folder_name = {
197 "CSIDL_APPDATA": "AppData",
198 "CSIDL_COMMON_APPDATA": "Common AppData",
199 "CSIDL_LOCAL_APPDATA": "Local AppData",
200 }[csidl_name]
201
202 key = _winreg.OpenKey(
203 _winreg.HKEY_CURRENT_USER,
204 r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
205 )
206 directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
207 return directory
208
209
210def _get_win_folder_with_ctypes(csidl_name):
211 csidl_const = {
212 "CSIDL_APPDATA": 26,
213 "CSIDL_COMMON_APPDATA": 35,
214 "CSIDL_LOCAL_APPDATA": 28,
215 }[csidl_name]
216
217 buf = ctypes.create_unicode_buffer(1024)
218 ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
219
220 # Downgrade to short path name if have highbit chars. See
221 # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
222 has_high_char = False
223 for c in buf:
224 if ord(c) > 255:
225 has_high_char = True
226 break
227 if has_high_char:
228 buf2 = ctypes.create_unicode_buffer(1024)
229 if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
230 buf = buf2
231
232 return buf.value
233
234
235if WINDOWS:
236 try:
237 import ctypes
238 _get_win_folder = _get_win_folder_with_ctypes
239 except ImportError:
240 _get_win_folder = _get_win_folder_from_registry
241
242
243def _win_path_to_bytes(path):
244 """Encode Windows paths to bytes. Only used on Python 2.
245
246 Motivation is to be consistent with other operating systems where paths
247 are also returned as bytes. This avoids problems mixing bytes and Unicode
248 elsewhere in the codebase. For more details and discussion see
249 <https://github.com/pypa/pip/issues/3463>.
250
251 If encoding using ASCII and MBCS fails, return the original Unicode path.
252 """
253 for encoding in ('ASCII', 'MBCS'):
254 try:
255 return path.encode(encoding)
256 except (UnicodeEncodeError, LookupError):
257 pass
258 return path
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py
new file mode 100644
index 0000000..c0e3884
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py
@@ -0,0 +1,77 @@
1"""
2A module that implements tooling to enable easy warnings about deprecations.
3"""
4from __future__ import absolute_import
5
6import logging
7import warnings
8
9from pip._internal.utils.typing import MYPY_CHECK_RUNNING
10
11if MYPY_CHECK_RUNNING:
12 from typing import Any
13
14
15class PipDeprecationWarning(Warning):
16 pass
17
18
19class Pending(object):
20 pass
21
22
23class RemovedInPip11Warning(PipDeprecationWarning):
24 pass
25
26
27class RemovedInPip12Warning(PipDeprecationWarning, Pending):
28 pass
29
30
31# Warnings <-> Logging Integration
32
33
34_warnings_showwarning = None # type: Any
35
36
37def _showwarning(message, category, filename, lineno, file=None, line=None):
38 if file is not None:
39 if _warnings_showwarning is not None:
40 _warnings_showwarning(
41 message, category, filename, lineno, file, line,
42 )
43 else:
44 if issubclass(category, PipDeprecationWarning):
45 # We use a specially named logger which will handle all of the
46 # deprecation messages for pip.
47 logger = logging.getLogger("pip._internal.deprecations")
48
49 # This is purposely using the % formatter here instead of letting
50 # the logging module handle the interpolation. This is because we
51 # want it to appear as if someone typed this entire message out.
52 log_message = "DEPRECATION: %s" % message
53
54 # PipDeprecationWarnings that are Pending still have at least 2
55 # versions to go until they are removed so they can just be
56 # warnings. Otherwise, they will be removed in the very next
57 # version of pip. We want these to be more obvious so we use the
58 # ERROR logging level.
59 if issubclass(category, Pending):
60 logger.warning(log_message)
61 else:
62 logger.error(log_message)
63 else:
64 _warnings_showwarning(
65 message, category, filename, lineno, file, line,
66 )
67
68
69def install_warning_logger():
70 # Enable our Deprecation Warnings
71 warnings.simplefilter("default", PipDeprecationWarning, append=True)
72
73 global _warnings_showwarning
74
75 if _warnings_showwarning is None:
76 _warnings_showwarning = warnings.showwarning
77 warnings.showwarning = _showwarning
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py
new file mode 100644
index 0000000..831f3f6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py
@@ -0,0 +1,33 @@
1import codecs
2import locale
3import re
4import sys
5
6BOMS = [
7 (codecs.BOM_UTF8, 'utf8'),
8 (codecs.BOM_UTF16, 'utf16'),
9 (codecs.BOM_UTF16_BE, 'utf16-be'),
10 (codecs.BOM_UTF16_LE, 'utf16-le'),
11 (codecs.BOM_UTF32, 'utf32'),
12 (codecs.BOM_UTF32_BE, 'utf32-be'),
13 (codecs.BOM_UTF32_LE, 'utf32-le'),
14]
15
16ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
17
18
19def auto_decode(data):
20 """Check a bytes string for a BOM to correctly detect the encoding
21
22 Fallback to locale.getpreferredencoding(False) like open() on Python3"""
23 for bom, encoding in BOMS:
24 if data.startswith(bom):
25 return data[len(bom):].decode(encoding)
26 # Lets check the first two lines as in PEP263
27 for line in data.split(b'\n')[:2]:
28 if line[0:1] == b'#' and ENCODING_RE.search(line):
29 encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
30 return data.decode(encoding)
31 return data.decode(
32 locale.getpreferredencoding(False) or sys.getdefaultencoding(),
33 )
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py
new file mode 100644
index 0000000..94fa2c6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py
@@ -0,0 +1,28 @@
1import os
2import os.path
3
4from pip._internal.compat import get_path_uid
5
6
7def check_path_owner(path):
8 # If we don't have a way to check the effective uid of this process, then
9 # we'll just assume that we own the directory.
10 if not hasattr(os, "geteuid"):
11 return True
12
13 previous = None
14 while path != previous:
15 if os.path.lexists(path):
16 # Check if path is writable by current user.
17 if os.geteuid() == 0:
18 # Special handling for root user in order to handle properly
19 # cases where users use sudo without -H flag.
20 try:
21 path_uid = get_path_uid(path)
22 except OSError:
23 return False
24 return path_uid == 0
25 else:
26 return os.access(path, os.W_OK)
27 else:
28 previous, path = path, os.path.dirname(path)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py
new file mode 100644
index 0000000..5900a10
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py
@@ -0,0 +1,84 @@
1from __future__ import absolute_import
2
3import ctypes
4import re
5import warnings
6
7
8def glibc_version_string():
9 "Returns glibc version string, or None if not using glibc."
10
11 # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
12 # manpage says, "If filename is NULL, then the returned handle is for the
13 # main program". This way we can let the linker do the work to figure out
14 # which libc our process is actually using.
15 process_namespace = ctypes.CDLL(None)
16 try:
17 gnu_get_libc_version = process_namespace.gnu_get_libc_version
18 except AttributeError:
19 # Symbol doesn't exist -> therefore, we are not linked to
20 # glibc.
21 return None
22
23 # Call gnu_get_libc_version, which returns a string like "2.5"
24 gnu_get_libc_version.restype = ctypes.c_char_p
25 version_str = gnu_get_libc_version()
26 # py2 / py3 compatibility:
27 if not isinstance(version_str, str):
28 version_str = version_str.decode("ascii")
29
30 return version_str
31
32
33# Separated out from have_compatible_glibc for easier unit testing
34def check_glibc_version(version_str, required_major, minimum_minor):
35 # Parse string and check against requested version.
36 #
37 # We use a regexp instead of str.split because we want to discard any
38 # random junk that might come after the minor version -- this might happen
39 # in patched/forked versions of glibc (e.g. Linaro's version of glibc
40 # uses version strings like "2.20-2014.11"). See gh-3588.
41 m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
42 if not m:
43 warnings.warn("Expected glibc version with 2 components major.minor,"
44 " got: %s" % version_str, RuntimeWarning)
45 return False
46 return (int(m.group("major")) == required_major and
47 int(m.group("minor")) >= minimum_minor)
48
49
50def have_compatible_glibc(required_major, minimum_minor):
51 version_str = glibc_version_string()
52 if version_str is None:
53 return False
54 return check_glibc_version(version_str, required_major, minimum_minor)
55
56
57# platform.libc_ver regularly returns completely nonsensical glibc
58# versions. E.g. on my computer, platform says:
59#
60# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
61# ('glibc', '2.7')
62# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
63# ('glibc', '2.9')
64#
65# But the truth is:
66#
67# ~$ ldd --version
68# ldd (Debian GLIBC 2.22-11) 2.22
69#
70# This is unfortunate, because it means that the linehaul data on libc
71# versions that was generated by pip 8.1.2 and earlier is useless and
72# misleading. Solution: instead of using platform, use our code that actually
73# works.
74def libc_ver():
75 """Try to determine the glibc version
76
77 Returns a tuple of strings (lib, version) which default to empty strings
78 in case the lookup fails.
79 """
80 glibc_version = glibc_version_string()
81 if glibc_version is None:
82 return ("", "")
83 else:
84 return ("glibc", glibc_version)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py
new file mode 100644
index 0000000..8cf6367
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py
@@ -0,0 +1,94 @@
1from __future__ import absolute_import
2
3import hashlib
4
5from pip._vendor.six import iteritems, iterkeys, itervalues
6
7from pip._internal.exceptions import (
8 HashMismatch, HashMissing, InstallationError,
9)
10from pip._internal.utils.misc import read_chunks
11
12# The recommended hash algo of the moment. Change this whenever the state of
13# the art changes; it won't hurt backward compatibility.
14FAVORITE_HASH = 'sha256'
15
16
17# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
18# Currently, those are the ones at least as collision-resistant as sha256.
19STRONG_HASHES = ['sha256', 'sha384', 'sha512']
20
21
22class Hashes(object):
23 """A wrapper that builds multiple hashes at once and checks them against
24 known-good values
25
26 """
27 def __init__(self, hashes=None):
28 """
29 :param hashes: A dict of algorithm names pointing to lists of allowed
30 hex digests
31 """
32 self._allowed = {} if hashes is None else hashes
33
34 def check_against_chunks(self, chunks):
35 """Check good hashes against ones built from iterable of chunks of
36 data.
37
38 Raise HashMismatch if none match.
39
40 """
41 gots = {}
42 for hash_name in iterkeys(self._allowed):
43 try:
44 gots[hash_name] = hashlib.new(hash_name)
45 except (ValueError, TypeError):
46 raise InstallationError('Unknown hash name: %s' % hash_name)
47
48 for chunk in chunks:
49 for hash in itervalues(gots):
50 hash.update(chunk)
51
52 for hash_name, got in iteritems(gots):
53 if got.hexdigest() in self._allowed[hash_name]:
54 return
55 self._raise(gots)
56
57 def _raise(self, gots):
58 raise HashMismatch(self._allowed, gots)
59
60 def check_against_file(self, file):
61 """Check good hashes against a file-like object
62
63 Raise HashMismatch if none match.
64
65 """
66 return self.check_against_chunks(read_chunks(file))
67
68 def check_against_path(self, path):
69 with open(path, 'rb') as file:
70 return self.check_against_file(file)
71
72 def __nonzero__(self):
73 """Return whether I know any known-good hashes."""
74 return bool(self._allowed)
75
76 def __bool__(self):
77 return self.__nonzero__()
78
79
80class MissingHashes(Hashes):
81 """A workalike for Hashes used when we're missing a hash for a requirement
82
83 It computes the actual hash of the requirement and raises a HashMissing
84 exception showing it to the user.
85
86 """
87 def __init__(self):
88 """Don't offer the ``hashes`` kwarg."""
89 # Pass our favorite hash in to generate a "gotten hash". With the
90 # empty list, it will never match, so an error will always raise.
91 super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
92
93 def _raise(self, gots):
94 raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py
new file mode 100644
index 0000000..1fb3e8a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py
@@ -0,0 +1,132 @@
1from __future__ import absolute_import
2
3import contextlib
4import logging
5import logging.handlers
6import os
7
8from pip._internal.compat import WINDOWS
9from pip._internal.utils.misc import ensure_dir
10
11try:
12 import threading
13except ImportError:
14 import dummy_threading as threading # type: ignore
15
16
17try:
18 from pip._vendor import colorama
19# Lots of different errors can come from this, including SystemError and
20# ImportError.
21except Exception:
22 colorama = None
23
24
25_log_state = threading.local()
26_log_state.indentation = 0
27
28
29@contextlib.contextmanager
30def indent_log(num=2):
31 """
32 A context manager which will cause the log output to be indented for any
33 log messages emitted inside it.
34 """
35 _log_state.indentation += num
36 try:
37 yield
38 finally:
39 _log_state.indentation -= num
40
41
42def get_indentation():
43 return getattr(_log_state, 'indentation', 0)
44
45
46class IndentingFormatter(logging.Formatter):
47
48 def format(self, record):
49 """
50 Calls the standard formatter, but will indent all of the log messages
51 by our current indentation level.
52 """
53 formatted = logging.Formatter.format(self, record)
54 formatted = "".join([
55 (" " * get_indentation()) + line
56 for line in formatted.splitlines(True)
57 ])
58 return formatted
59
60
61def _color_wrap(*colors):
62 def wrapped(inp):
63 return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
64 return wrapped
65
66
67class ColorizedStreamHandler(logging.StreamHandler):
68
69 # Don't build up a list of colors if we don't have colorama
70 if colorama:
71 COLORS = [
72 # This needs to be in order from highest logging level to lowest.
73 (logging.ERROR, _color_wrap(colorama.Fore.RED)),
74 (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
75 ]
76 else:
77 COLORS = []
78
79 def __init__(self, stream=None, no_color=None):
80 logging.StreamHandler.__init__(self, stream)
81 self._no_color = no_color
82
83 if WINDOWS and colorama:
84 self.stream = colorama.AnsiToWin32(self.stream)
85
86 def should_color(self):
87 # Don't colorize things if we do not have colorama or if told not to
88 if not colorama or self._no_color:
89 return False
90
91 real_stream = (
92 self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
93 else self.stream.wrapped
94 )
95
96 # If the stream is a tty we should color it
97 if hasattr(real_stream, "isatty") and real_stream.isatty():
98 return True
99
100 # If we have an ASNI term we should color it
101 if os.environ.get("TERM") == "ANSI":
102 return True
103
104 # If anything else we should not color it
105 return False
106
107 def format(self, record):
108 msg = logging.StreamHandler.format(self, record)
109
110 if self.should_color():
111 for level, color in self.COLORS:
112 if record.levelno >= level:
113 msg = color(msg)
114 break
115
116 return msg
117
118
119class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
120
121 def _open(self):
122 ensure_dir(os.path.dirname(self.baseFilename))
123 return logging.handlers.RotatingFileHandler._open(self)
124
125
126class MaxLevelFilter(logging.Filter):
127
128 def __init__(self, level):
129 self.level = level
130
131 def filter(self, record):
132 return record.levelno < self.level
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py
new file mode 100644
index 0000000..db84a7c
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py
@@ -0,0 +1,851 @@
1from __future__ import absolute_import
2
3import contextlib
4import errno
5import io
6import locale
7# we have a submodule named 'logging' which would shadow this if we used the
8# regular name:
9import logging as std_logging
10import os
11import posixpath
12import re
13import shutil
14import stat
15import subprocess
16import sys
17import tarfile
18import zipfile
19from collections import deque
20
21from pip._vendor import pkg_resources
22# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
23# why we ignore the type on this import.
24from pip._vendor.retrying import retry # type: ignore
25from pip._vendor.six import PY2
26from pip._vendor.six.moves import input
27
28from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs
29from pip._internal.exceptions import InstallationError
30from pip._internal.locations import (
31 running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
32 write_delete_marker_file,
33)
34
35if PY2:
36 from io import BytesIO as StringIO
37else:
38 from io import StringIO
39
40__all__ = ['rmtree', 'display_path', 'backup_dir',
41 'ask', 'splitext',
42 'format_size', 'is_installable_dir',
43 'is_svn_page', 'file_contents',
44 'split_leading_dir', 'has_leading_dir',
45 'normalize_path',
46 'renames', 'get_prog',
47 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
48 'captured_stdout', 'ensure_dir',
49 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS',
50 'get_installed_version']
51
52
53logger = std_logging.getLogger(__name__)
54
55BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
56XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
57ZIP_EXTENSIONS = ('.zip', '.whl')
58TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
59ARCHIVE_EXTENSIONS = (
60 ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
61SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
62try:
63 import bz2 # noqa
64 SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
65except ImportError:
66 logger.debug('bz2 module is not available')
67
68try:
69 # Only for Python 3.3+
70 import lzma # noqa
71 SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
72except ImportError:
73 logger.debug('lzma module is not available')
74
75
76def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
77 try:
78 return __import__(pkg_or_module_string)
79 except ImportError:
80 raise ExceptionType(*args, **kwargs)
81
82
83def ensure_dir(path):
84 """os.path.makedirs without EEXIST."""
85 try:
86 os.makedirs(path)
87 except OSError as e:
88 if e.errno != errno.EEXIST:
89 raise
90
91
92def get_prog():
93 try:
94 prog = os.path.basename(sys.argv[0])
95 if prog in ('__main__.py', '-c'):
96 return "%s -m pip" % sys.executable
97 else:
98 return prog
99 except (AttributeError, TypeError, IndexError):
100 pass
101 return 'pip'
102
103
104# Retry every half second for up to 3 seconds
105@retry(stop_max_delay=3000, wait_fixed=500)
106def rmtree(dir, ignore_errors=False):
107 shutil.rmtree(dir, ignore_errors=ignore_errors,
108 onerror=rmtree_errorhandler)
109
110
111def rmtree_errorhandler(func, path, exc_info):
112 """On Windows, the files in .svn are read-only, so when rmtree() tries to
113 remove them, an exception is thrown. We catch that here, remove the
114 read-only attribute, and hopefully continue without problems."""
115 # if file type currently read only
116 if os.stat(path).st_mode & stat.S_IREAD:
117 # convert to read/write
118 os.chmod(path, stat.S_IWRITE)
119 # use the original function to repeat the operation
120 func(path)
121 return
122 else:
123 raise
124
125
126def display_path(path):
127 """Gives the display value for a given path, making it relative to cwd
128 if possible."""
129 path = os.path.normcase(os.path.abspath(path))
130 if sys.version_info[0] == 2:
131 path = path.decode(sys.getfilesystemencoding(), 'replace')
132 path = path.encode(sys.getdefaultencoding(), 'replace')
133 if path.startswith(os.getcwd() + os.path.sep):
134 path = '.' + path[len(os.getcwd()):]
135 return path
136
137
138def backup_dir(dir, ext='.bak'):
139 """Figure out the name of a directory to back up the given dir to
140 (adding .bak, .bak2, etc)"""
141 n = 1
142 extension = ext
143 while os.path.exists(dir + extension):
144 n += 1
145 extension = ext + str(n)
146 return dir + extension
147
148
149def ask_path_exists(message, options):
150 for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
151 if action in options:
152 return action
153 return ask(message, options)
154
155
156def ask(message, options):
157 """Ask the message interactively, with the given possible responses"""
158 while 1:
159 if os.environ.get('PIP_NO_INPUT'):
160 raise Exception(
161 'No input was expected ($PIP_NO_INPUT set); question: %s' %
162 message
163 )
164 response = input(message)
165 response = response.strip().lower()
166 if response not in options:
167 print(
168 'Your response (%r) was not one of the expected responses: '
169 '%s' % (response, ', '.join(options))
170 )
171 else:
172 return response
173
174
175def format_size(bytes):
176 if bytes > 1000 * 1000:
177 return '%.1fMB' % (bytes / 1000.0 / 1000)
178 elif bytes > 10 * 1000:
179 return '%ikB' % (bytes / 1000)
180 elif bytes > 1000:
181 return '%.1fkB' % (bytes / 1000.0)
182 else:
183 return '%ibytes' % bytes
184
185
186def is_installable_dir(path):
187 """Return True if `path` is a directory containing a setup.py file."""
188 if not os.path.isdir(path):
189 return False
190 setup_py = os.path.join(path, 'setup.py')
191 if os.path.isfile(setup_py):
192 return True
193 return False
194
195
196def is_svn_page(html):
197 """
198 Returns true if the page appears to be the index page of an svn repository
199 """
200 return (re.search(r'<title>[^<]*Revision \d+:', html) and
201 re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
202
203
204def file_contents(filename):
205 with open(filename, 'rb') as fp:
206 return fp.read().decode('utf-8')
207
208
209def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
210 """Yield pieces of data from a file-like object until EOF."""
211 while True:
212 chunk = file.read(size)
213 if not chunk:
214 break
215 yield chunk
216
217
218def split_leading_dir(path):
219 path = path.lstrip('/').lstrip('\\')
220 if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
221 '\\' not in path):
222 return path.split('/', 1)
223 elif '\\' in path:
224 return path.split('\\', 1)
225 else:
226 return path, ''
227
228
229def has_leading_dir(paths):
230 """Returns true if all the paths have the same leading path name
231 (i.e., everything is in one subdirectory in an archive)"""
232 common_prefix = None
233 for path in paths:
234 prefix, rest = split_leading_dir(path)
235 if not prefix:
236 return False
237 elif common_prefix is None:
238 common_prefix = prefix
239 elif prefix != common_prefix:
240 return False
241 return True
242
243
244def normalize_path(path, resolve_symlinks=True):
245 """
246 Convert a path to its canonical, case-normalized, absolute version.
247
248 """
249 path = expanduser(path)
250 if resolve_symlinks:
251 path = os.path.realpath(path)
252 else:
253 path = os.path.abspath(path)
254 return os.path.normcase(path)
255
256
257def splitext(path):
258 """Like os.path.splitext, but take off .tar too"""
259 base, ext = posixpath.splitext(path)
260 if base.lower().endswith('.tar'):
261 ext = base[-4:] + ext
262 base = base[:-4]
263 return base, ext
264
265
266def renames(old, new):
267 """Like os.renames(), but handles renaming across devices."""
268 # Implementation borrowed from os.renames().
269 head, tail = os.path.split(new)
270 if head and tail and not os.path.exists(head):
271 os.makedirs(head)
272
273 shutil.move(old, new)
274
275 head, tail = os.path.split(old)
276 if head and tail:
277 try:
278 os.removedirs(head)
279 except OSError:
280 pass
281
282
283def is_local(path):
284 """
285 Return True if path is within sys.prefix, if we're running in a virtualenv.
286
287 If we're not in a virtualenv, all paths are considered "local."
288
289 """
290 if not running_under_virtualenv():
291 return True
292 return normalize_path(path).startswith(normalize_path(sys.prefix))
293
294
295def dist_is_local(dist):
296 """
297 Return True if given Distribution object is installed locally
298 (i.e. within current virtualenv).
299
300 Always True if we're not in a virtualenv.
301
302 """
303 return is_local(dist_location(dist))
304
305
306def dist_in_usersite(dist):
307 """
308 Return True if given Distribution is installed in user site.
309 """
310 norm_path = normalize_path(dist_location(dist))
311 return norm_path.startswith(normalize_path(user_site))
312
313
314def dist_in_site_packages(dist):
315 """
316 Return True if given Distribution is installed in
317 sysconfig.get_python_lib().
318 """
319 return normalize_path(
320 dist_location(dist)
321 ).startswith(normalize_path(site_packages))
322
323
324def dist_is_editable(dist):
325 """Is distribution an editable install?"""
326 for path_item in sys.path:
327 egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
328 if os.path.isfile(egg_link):
329 return True
330 return False
331
332
333def get_installed_distributions(local_only=True,
334 skip=stdlib_pkgs,
335 include_editables=True,
336 editables_only=False,
337 user_only=False):
338 """
339 Return a list of installed Distribution objects.
340
341 If ``local_only`` is True (default), only return installations
342 local to the current virtualenv, if in a virtualenv.
343
344 ``skip`` argument is an iterable of lower-case project names to
345 ignore; defaults to stdlib_pkgs
346
347 If ``include_editables`` is False, don't report editables.
348
349 If ``editables_only`` is True , only report editables.
350
351 If ``user_only`` is True , only report installations in the user
352 site directory.
353
354 """
355 if local_only:
356 local_test = dist_is_local
357 else:
358 def local_test(d):
359 return True
360
361 if include_editables:
362 def editable_test(d):
363 return True
364 else:
365 def editable_test(d):
366 return not dist_is_editable(d)
367
368 if editables_only:
369 def editables_only_test(d):
370 return dist_is_editable(d)
371 else:
372 def editables_only_test(d):
373 return True
374
375 if user_only:
376 user_test = dist_in_usersite
377 else:
378 def user_test(d):
379 return True
380
381 return [d for d in pkg_resources.working_set
382 if local_test(d) and
383 d.key not in skip and
384 editable_test(d) and
385 editables_only_test(d) and
386 user_test(d)
387 ]
388
389
390def egg_link_path(dist):
391 """
392 Return the path for the .egg-link file if it exists, otherwise, None.
393
394 There's 3 scenarios:
395 1) not in a virtualenv
396 try to find in site.USER_SITE, then site_packages
397 2) in a no-global virtualenv
398 try to find in site_packages
399 3) in a yes-global virtualenv
400 try to find in site_packages, then site.USER_SITE
401 (don't look in global location)
402
403 For #1 and #3, there could be odd cases, where there's an egg-link in 2
404 locations.
405
406 This method will just return the first one found.
407 """
408 sites = []
409 if running_under_virtualenv():
410 if virtualenv_no_global():
411 sites.append(site_packages)
412 else:
413 sites.append(site_packages)
414 if user_site:
415 sites.append(user_site)
416 else:
417 if user_site:
418 sites.append(user_site)
419 sites.append(site_packages)
420
421 for site in sites:
422 egglink = os.path.join(site, dist.project_name) + '.egg-link'
423 if os.path.isfile(egglink):
424 return egglink
425
426
427def dist_location(dist):
428 """
429 Get the site-packages location of this distribution. Generally
430 this is dist.location, except in the case of develop-installed
431 packages, where dist.location is the source code location, and we
432 want to know where the egg-link file is.
433
434 """
435 egg_link = egg_link_path(dist)
436 if egg_link:
437 return egg_link
438 return dist.location
439
440
441def current_umask():
442 """Get the current umask which involves having to set it temporarily."""
443 mask = os.umask(0)
444 os.umask(mask)
445 return mask
446
447
448def unzip_file(filename, location, flatten=True):
449 """
450 Unzip the file (with path `filename`) to the destination `location`. All
451 files are written based on system defaults and umask (i.e. permissions are
452 not preserved), except that regular file members with any execute
453 permissions (user, group, or world) have "chmod +x" applied after being
454 written. Note that for windows, any execute changes using os.chmod are
455 no-ops per the python docs.
456 """
457 ensure_dir(location)
458 zipfp = open(filename, 'rb')
459 try:
460 zip = zipfile.ZipFile(zipfp, allowZip64=True)
461 leading = has_leading_dir(zip.namelist()) and flatten
462 for info in zip.infolist():
463 name = info.filename
464 data = zip.read(name)
465 fn = name
466 if leading:
467 fn = split_leading_dir(name)[1]
468 fn = os.path.join(location, fn)
469 dir = os.path.dirname(fn)
470 if fn.endswith('/') or fn.endswith('\\'):
471 # A directory
472 ensure_dir(fn)
473 else:
474 ensure_dir(dir)
475 fp = open(fn, 'wb')
476 try:
477 fp.write(data)
478 finally:
479 fp.close()
480 mode = info.external_attr >> 16
481 # if mode and regular file and any execute permissions for
482 # user/group/world?
483 if mode and stat.S_ISREG(mode) and mode & 0o111:
484 # make dest file have execute for user/group/world
485 # (chmod +x) no-op on windows per python docs
486 os.chmod(fn, (0o777 - current_umask() | 0o111))
487 finally:
488 zipfp.close()
489
490
491def untar_file(filename, location):
492 """
493 Untar the file (with path `filename`) to the destination `location`.
494 All files are written based on system defaults and umask (i.e. permissions
495 are not preserved), except that regular file members with any execute
496 permissions (user, group, or world) have "chmod +x" applied after being
497 written. Note that for windows, any execute changes using os.chmod are
498 no-ops per the python docs.
499 """
500 ensure_dir(location)
501 if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
502 mode = 'r:gz'
503 elif filename.lower().endswith(BZ2_EXTENSIONS):
504 mode = 'r:bz2'
505 elif filename.lower().endswith(XZ_EXTENSIONS):
506 mode = 'r:xz'
507 elif filename.lower().endswith('.tar'):
508 mode = 'r'
509 else:
510 logger.warning(
511 'Cannot determine compression type for file %s', filename,
512 )
513 mode = 'r:*'
514 tar = tarfile.open(filename, mode)
515 try:
516 # note: python<=2.5 doesn't seem to know about pax headers, filter them
517 leading = has_leading_dir([
518 member.name for member in tar.getmembers()
519 if member.name != 'pax_global_header'
520 ])
521 for member in tar.getmembers():
522 fn = member.name
523 if fn == 'pax_global_header':
524 continue
525 if leading:
526 fn = split_leading_dir(fn)[1]
527 path = os.path.join(location, fn)
528 if member.isdir():
529 ensure_dir(path)
530 elif member.issym():
531 try:
532 tar._extract_member(member, path)
533 except Exception as exc:
534 # Some corrupt tar files seem to produce this
535 # (specifically bad symlinks)
536 logger.warning(
537 'In the tar file %s the member %s is invalid: %s',
538 filename, member.name, exc,
539 )
540 continue
541 else:
542 try:
543 fp = tar.extractfile(member)
544 except (KeyError, AttributeError) as exc:
545 # Some corrupt tar files seem to produce this
546 # (specifically bad symlinks)
547 logger.warning(
548 'In the tar file %s the member %s is invalid: %s',
549 filename, member.name, exc,
550 )
551 continue
552 ensure_dir(os.path.dirname(path))
553 with open(path, 'wb') as destfp:
554 shutil.copyfileobj(fp, destfp)
555 fp.close()
556 # Update the timestamp (useful for cython compiled files)
557 tar.utime(member, path)
558 # member have any execute permissions for user/group/world?
559 if member.mode & 0o111:
560 # make dest file have execute for user/group/world
561 # no-op on windows per python docs
562 os.chmod(path, (0o777 - current_umask() | 0o111))
563 finally:
564 tar.close()
565
566
567def unpack_file(filename, location, content_type, link):
568 filename = os.path.realpath(filename)
569 if (content_type == 'application/zip' or
570 filename.lower().endswith(ZIP_EXTENSIONS) or
571 zipfile.is_zipfile(filename)):
572 unzip_file(
573 filename,
574 location,
575 flatten=not filename.endswith('.whl')
576 )
577 elif (content_type == 'application/x-gzip' or
578 tarfile.is_tarfile(filename) or
579 filename.lower().endswith(
580 TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
581 untar_file(filename, location)
582 elif (content_type and content_type.startswith('text/html') and
583 is_svn_page(file_contents(filename))):
584 # We don't really care about this
585 from pip._internal.vcs.subversion import Subversion
586 Subversion('svn+' + link.url).unpack(location)
587 else:
588 # FIXME: handle?
589 # FIXME: magic signatures?
590 logger.critical(
591 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
592 'cannot detect archive format',
593 filename, location, content_type,
594 )
595 raise InstallationError(
596 'Cannot determine archive format of %s' % location
597 )
598
599
600def call_subprocess(cmd, show_stdout=True, cwd=None,
601 on_returncode='raise',
602 command_desc=None,
603 extra_environ=None, unset_environ=None, spinner=None):
604 """
605 Args:
606 unset_environ: an iterable of environment variable names to unset
607 prior to calling subprocess.Popen().
608 """
609 if unset_environ is None:
610 unset_environ = []
611 # This function's handling of subprocess output is confusing and I
612 # previously broke it terribly, so as penance I will write a long comment
613 # explaining things.
614 #
615 # The obvious thing that affects output is the show_stdout=
616 # kwarg. show_stdout=True means, let the subprocess write directly to our
617 # stdout. Even though it is nominally the default, it is almost never used
618 # inside pip (and should not be used in new code without a very good
619 # reason); as of 2016-02-22 it is only used in a few places inside the VCS
620 # wrapper code. Ideally we should get rid of it entirely, because it
621 # creates a lot of complexity here for a rarely used feature.
622 #
623 # Most places in pip set show_stdout=False. What this means is:
624 # - We connect the child stdout to a pipe, which we read.
625 # - By default, we hide the output but show a spinner -- unless the
626 # subprocess exits with an error, in which case we show the output.
627 # - If the --verbose option was passed (= loglevel is DEBUG), then we show
628 # the output unconditionally. (But in this case we don't want to show
629 # the output a second time if it turns out that there was an error.)
630 #
631 # stderr is always merged with stdout (even if show_stdout=True).
632 if show_stdout:
633 stdout = None
634 else:
635 stdout = subprocess.PIPE
636 if command_desc is None:
637 cmd_parts = []
638 for part in cmd:
639 if ' ' in part or '\n' in part or '"' in part or "'" in part:
640 part = '"%s"' % part.replace('"', '\\"')
641 cmd_parts.append(part)
642 command_desc = ' '.join(cmd_parts)
643 logger.debug("Running command %s", command_desc)
644 env = os.environ.copy()
645 if extra_environ:
646 env.update(extra_environ)
647 for name in unset_environ:
648 env.pop(name, None)
649 try:
650 proc = subprocess.Popen(
651 cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
652 stdout=stdout, cwd=cwd, env=env,
653 )
654 proc.stdin.close()
655 except Exception as exc:
656 logger.critical(
657 "Error %s while executing command %s", exc, command_desc,
658 )
659 raise
660 all_output = []
661 if stdout is not None:
662 while True:
663 line = console_to_str(proc.stdout.readline())
664 if not line:
665 break
666 line = line.rstrip()
667 all_output.append(line + '\n')
668 if logger.getEffectiveLevel() <= std_logging.DEBUG:
669 # Show the line immediately
670 logger.debug(line)
671 else:
672 # Update the spinner
673 if spinner is not None:
674 spinner.spin()
675 try:
676 proc.wait()
677 finally:
678 if proc.stdout:
679 proc.stdout.close()
680 if spinner is not None:
681 if proc.returncode:
682 spinner.finish("error")
683 else:
684 spinner.finish("done")
685 if proc.returncode:
686 if on_returncode == 'raise':
687 if (logger.getEffectiveLevel() > std_logging.DEBUG and
688 not show_stdout):
689 logger.info(
690 'Complete output from command %s:', command_desc,
691 )
692 logger.info(
693 ''.join(all_output) +
694 '\n----------------------------------------'
695 )
696 raise InstallationError(
697 'Command "%s" failed with error code %s in %s'
698 % (command_desc, proc.returncode, cwd))
699 elif on_returncode == 'warn':
700 logger.warning(
701 'Command "%s" had error code %s in %s',
702 command_desc, proc.returncode, cwd,
703 )
704 elif on_returncode == 'ignore':
705 pass
706 else:
707 raise ValueError('Invalid value: on_returncode=%s' %
708 repr(on_returncode))
709 if not show_stdout:
710 return ''.join(all_output)
711
712
713def read_text_file(filename):
714 """Return the contents of *filename*.
715
716 Try to decode the file contents with utf-8, the preferred system encoding
717 (e.g., cp1252 on some Windows machines), and latin1, in that order.
718 Decoding a byte string with latin1 will never raise an error. In the worst
719 case, the returned string will contain some garbage characters.
720
721 """
722 with open(filename, 'rb') as fp:
723 data = fp.read()
724
725 encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
726 for enc in encodings:
727 try:
728 data = data.decode(enc)
729 except UnicodeDecodeError:
730 continue
731 break
732
733 assert type(data) != bytes # Latin1 should have worked.
734 return data
735
736
737def _make_build_dir(build_dir):
738 os.makedirs(build_dir)
739 write_delete_marker_file(build_dir)
740
741
742class FakeFile(object):
743 """Wrap a list of lines in an object with readline() to make
744 ConfigParser happy."""
745 def __init__(self, lines):
746 self._gen = (l for l in lines)
747
748 def readline(self):
749 try:
750 try:
751 return next(self._gen)
752 except NameError:
753 return self._gen.next()
754 except StopIteration:
755 return ''
756
757 def __iter__(self):
758 return self._gen
759
760
761class StreamWrapper(StringIO):
762
763 @classmethod
764 def from_stream(cls, orig_stream):
765 cls.orig_stream = orig_stream
766 return cls()
767
768 # compileall.compile_dir() needs stdout.encoding to print to stdout
769 @property
770 def encoding(self):
771 return self.orig_stream.encoding
772
773
774@contextlib.contextmanager
775def captured_output(stream_name):
776 """Return a context manager used by captured_stdout/stdin/stderr
777 that temporarily replaces the sys stream *stream_name* with a StringIO.
778
779 Taken from Lib/support/__init__.py in the CPython repo.
780 """
781 orig_stdout = getattr(sys, stream_name)
782 setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
783 try:
784 yield getattr(sys, stream_name)
785 finally:
786 setattr(sys, stream_name, orig_stdout)
787
788
789def captured_stdout():
790 """Capture the output of sys.stdout:
791
792 with captured_stdout() as stdout:
793 print('hello')
794 self.assertEqual(stdout.getvalue(), 'hello\n')
795
796 Taken from Lib/support/__init__.py in the CPython repo.
797 """
798 return captured_output('stdout')
799
800
801class cached_property(object):
802 """A property that is only computed once per instance and then replaces
803 itself with an ordinary attribute. Deleting the attribute resets the
804 property.
805
806 Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
807 """
808
809 def __init__(self, func):
810 self.__doc__ = getattr(func, '__doc__')
811 self.func = func
812
813 def __get__(self, obj, cls):
814 if obj is None:
815 # We're being accessed from the class itself, not from an object
816 return self
817 value = obj.__dict__[self.func.__name__] = self.func(obj)
818 return value
819
820
821def get_installed_version(dist_name, lookup_dirs=None):
822 """Get the installed version of dist_name avoiding pkg_resources cache"""
823 # Create a requirement that we'll look for inside of setuptools.
824 req = pkg_resources.Requirement.parse(dist_name)
825
826 # We want to avoid having this cached, so we need to construct a new
827 # working set each time.
828 if lookup_dirs is None:
829 working_set = pkg_resources.WorkingSet()
830 else:
831 working_set = pkg_resources.WorkingSet(lookup_dirs)
832
833 # Get the installed distribution from our working set
834 dist = working_set.find(req)
835
836 # Check to see if we got an installed distribution or not, if we did
837 # we want to return it's version.
838 return dist.version if dist else None
839
840
841def consume(iterator):
842 """Consume an iterable at C speed."""
843 deque(iterator, maxlen=0)
844
845
846# Simulates an enum
847def enum(*sequential, **named):
848 enums = dict(zip(sequential, range(len(sequential))), **named)
849 reverse = {value: key for key, value in enums.items()}
850 enums['reverse_mapping'] = reverse
851 return type('Enum', (), enums)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py
new file mode 100644
index 0000000..f4572ab
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py
@@ -0,0 +1,163 @@
1from __future__ import absolute_import
2
3import datetime
4import json
5import logging
6import os.path
7import sys
8
9from pip._vendor import lockfile
10from pip._vendor.packaging import version as packaging_version
11
12from pip._internal.compat import WINDOWS
13from pip._internal.index import PackageFinder
14from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv
15from pip._internal.utils.filesystem import check_path_owner
16from pip._internal.utils.misc import ensure_dir, get_installed_version
17
18SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
19
20
21logger = logging.getLogger(__name__)
22
23
24class VirtualenvSelfCheckState(object):
25 def __init__(self):
26 self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json")
27
28 # Load the existing state
29 try:
30 with open(self.statefile_path) as statefile:
31 self.state = json.load(statefile)
32 except (IOError, ValueError):
33 self.state = {}
34
35 def save(self, pypi_version, current_time):
36 # Attempt to write out our version check file
37 with open(self.statefile_path, "w") as statefile:
38 json.dump(
39 {
40 "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
41 "pypi_version": pypi_version,
42 },
43 statefile,
44 sort_keys=True,
45 separators=(",", ":")
46 )
47
48
49class GlobalSelfCheckState(object):
50 def __init__(self):
51 self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json")
52
53 # Load the existing state
54 try:
55 with open(self.statefile_path) as statefile:
56 self.state = json.load(statefile)[sys.prefix]
57 except (IOError, ValueError, KeyError):
58 self.state = {}
59
60 def save(self, pypi_version, current_time):
61 # Check to make sure that we own the directory
62 if not check_path_owner(os.path.dirname(self.statefile_path)):
63 return
64
65 # Now that we've ensured the directory is owned by this user, we'll go
66 # ahead and make sure that all our directories are created.
67 ensure_dir(os.path.dirname(self.statefile_path))
68
69 # Attempt to write out our version check file
70 with lockfile.LockFile(self.statefile_path):
71 if os.path.exists(self.statefile_path):
72 with open(self.statefile_path) as statefile:
73 state = json.load(statefile)
74 else:
75 state = {}
76
77 state[sys.prefix] = {
78 "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
79 "pypi_version": pypi_version,
80 }
81
82 with open(self.statefile_path, "w") as statefile:
83 json.dump(state, statefile, sort_keys=True,
84 separators=(",", ":"))
85
86
87def load_selfcheck_statefile():
88 if running_under_virtualenv():
89 return VirtualenvSelfCheckState()
90 else:
91 return GlobalSelfCheckState()
92
93
94def pip_version_check(session, options):
95 """Check for an update for pip.
96
97 Limit the frequency of checks to once per week. State is stored either in
98 the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
99 of the pip script path.
100 """
101 installed_version = get_installed_version("pip")
102 if not installed_version:
103 return
104
105 pip_version = packaging_version.parse(installed_version)
106 pypi_version = None
107
108 try:
109 state = load_selfcheck_statefile()
110
111 current_time = datetime.datetime.utcnow()
112 # Determine if we need to refresh the state
113 if "last_check" in state.state and "pypi_version" in state.state:
114 last_check = datetime.datetime.strptime(
115 state.state["last_check"],
116 SELFCHECK_DATE_FMT
117 )
118 if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
119 pypi_version = state.state["pypi_version"]
120
121 # Refresh the version if we need to or just see if we need to warn
122 if pypi_version is None:
123 # Lets use PackageFinder to see what the latest pip version is
124 finder = PackageFinder(
125 find_links=options.find_links,
126 index_urls=[options.index_url] + options.extra_index_urls,
127 allow_all_prereleases=False, # Explicitly set to False
128 trusted_hosts=options.trusted_hosts,
129 process_dependency_links=options.process_dependency_links,
130 session=session,
131 )
132 all_candidates = finder.find_all_candidates("pip")
133 if not all_candidates:
134 return
135 pypi_version = str(
136 max(all_candidates, key=lambda c: c.version).version
137 )
138
139 # save that we've performed a check
140 state.save(pypi_version, current_time)
141
142 remote_version = packaging_version.parse(pypi_version)
143
144 # Determine if our pypi_version is older
145 if (pip_version < remote_version and
146 pip_version.base_version != remote_version.base_version):
147 # Advise "python -m pip" on Windows to avoid issues
148 # with overwriting pip.exe.
149 if WINDOWS:
150 pip_cmd = "python -m pip"
151 else:
152 pip_cmd = "pip"
153 logger.warning(
154 "You are using pip version %s, however version %s is "
155 "available.\nYou should consider upgrading via the "
156 "'%s install --upgrade pip' command.",
157 pip_version, pypi_version, pip_cmd
158 )
159 except Exception:
160 logger.debug(
161 "There was an error checking the latest version of pip",
162 exc_info=True,
163 )
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py
new file mode 100644
index 0000000..d523953
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py
@@ -0,0 +1,70 @@
1from __future__ import absolute_import
2
3import logging
4import sys
5from email.parser import FeedParser # type: ignore
6
7from pip._vendor import pkg_resources
8from pip._vendor.packaging import specifiers, version
9
10from pip._internal import exceptions
11
12logger = logging.getLogger(__name__)
13
14
15def check_requires_python(requires_python):
16 """
17 Check if the python version in use match the `requires_python` specifier.
18
19 Returns `True` if the version of python in use matches the requirement.
20 Returns `False` if the version of python in use does not matches the
21 requirement.
22
23 Raises an InvalidSpecifier if `requires_python` have an invalid format.
24 """
25 if requires_python is None:
26 # The package provides no information
27 return True
28 requires_python_specifier = specifiers.SpecifierSet(requires_python)
29
30 # We only use major.minor.micro
31 python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
32 return python_version in requires_python_specifier
33
34
35def get_metadata(dist):
36 if (isinstance(dist, pkg_resources.DistInfoDistribution) and
37 dist.has_metadata('METADATA')):
38 return dist.get_metadata('METADATA')
39 elif dist.has_metadata('PKG-INFO'):
40 return dist.get_metadata('PKG-INFO')
41
42
43def check_dist_requires_python(dist):
44 metadata = get_metadata(dist)
45 feed_parser = FeedParser()
46 feed_parser.feed(metadata)
47 pkg_info_dict = feed_parser.close()
48 requires_python = pkg_info_dict.get('Requires-Python')
49 try:
50 if not check_requires_python(requires_python):
51 raise exceptions.UnsupportedPythonVersion(
52 "%s requires Python '%s' but the running Python is %s" % (
53 dist.project_name,
54 requires_python,
55 '.'.join(map(str, sys.version_info[:3])),)
56 )
57 except specifiers.InvalidSpecifier as e:
58 logger.warning(
59 "Package %s has an invalid Requires-Python entry %s - %s",
60 dist.project_name, requires_python, e,
61 )
62 return
63
64
65def get_installer(dist):
66 if dist.has_metadata('INSTALLER'):
67 for line in dist.get_metadata_lines('INSTALLER'):
68 if line.strip():
69 return line.strip()
70 return ''
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 0000000..9d32174
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,8 @@
1# Shim to wrap setup.py invocation with setuptools
2SETUPTOOLS_SHIM = (
3 "import setuptools, tokenize;__file__=%r;"
4 "f=getattr(tokenize, 'open', open)(__file__);"
5 "code=f.read().replace('\\r\\n', '\\n');"
6 "f.close();"
7 "exec(compile(code, __file__, 'exec'))"
8)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py
new file mode 100644
index 0000000..25bc0d9
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,82 @@
1from __future__ import absolute_import
2
3import logging
4import os.path
5import tempfile
6
7from pip._internal.utils.misc import rmtree
8
9logger = logging.getLogger(__name__)
10
11
12class TempDirectory(object):
13 """Helper class that owns and cleans up a temporary directory.
14
15 This class can be used as a context manager or as an OO representation of a
16 temporary directory.
17
18 Attributes:
19 path
20 Location to the created temporary directory or None
21 delete
22 Whether the directory should be deleted when exiting
23 (when used as a contextmanager)
24
25 Methods:
26 create()
27 Creates a temporary directory and stores its path in the path
28 attribute.
29 cleanup()
30 Deletes the temporary directory and sets path attribute to None
31
32 When used as a context manager, a temporary directory is created on
33 entering the context and, if the delete attribute is True, on exiting the
34 context the created directory is deleted.
35 """
36
37 def __init__(self, path=None, delete=None, kind="temp"):
38 super(TempDirectory, self).__init__()
39
40 if path is None and delete is None:
41 # If we were not given an explicit directory, and we were not given
42 # an explicit delete option, then we'll default to deleting.
43 delete = True
44
45 self.path = path
46 self.delete = delete
47 self.kind = kind
48
49 def __repr__(self):
50 return "<{} {!r}>".format(self.__class__.__name__, self.path)
51
52 def __enter__(self):
53 self.create()
54 return self
55
56 def __exit__(self, exc, value, tb):
57 if self.delete:
58 self.cleanup()
59
60 def create(self):
61 """Create a temporary directory and store it's path in self.path
62 """
63 if self.path is not None:
64 logger.debug(
65 "Skipped creation of temporary directory: {}".format(self.path)
66 )
67 return
68 # We realpath here because some systems have their default tmpdir
69 # symlinked to another directory. This tends to confuse build
70 # scripts, so we canonicalize the path by traversing potential
71 # symlinks here.
72 self.path = os.path.realpath(
73 tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
74 )
75 logger.debug("Created temporary directory: {}".format(self.path))
76
77 def cleanup(self):
78 """Remove the temporary directory created and reset state
79 """
80 if self.path is not None and os.path.exists(self.path):
81 rmtree(self.path)
82 self.path = None
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py
new file mode 100644
index 0000000..4e25ae6
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py
@@ -0,0 +1,29 @@
1"""For neatly implementing static typing in pip.
2
3`mypy` - the static type analysis tool we use - uses the `typing` module, which
4provides core functionality fundamental to mypy's functioning.
5
6Generally, `typing` would be imported at runtime and used in that fashion -
7it acts as a no-op at runtime and does not have any run-time overhead by
8design.
9
10As it turns out, `typing` is not vendorable - it uses separate sources for
11Python 2/Python 3. Thus, this codebase can not expect it to be present.
12To work around this, mypy allows the typing import to be behind a False-y
13optional to prevent it from running at runtime and type-comments can be used
14to remove the need for the types to be accessible directly during runtime.
15
16This module provides the False-y guard in a nicely named fashion so that a
17curious maintainer can reach here to read this.
18
19In pip, all static-typing related imports should be guarded as follows:
20
21 from pip.utils.typing import MYPY_CHECK_RUNNING
22
23 if MYPY_CHECK_RUNNING:
24 from typing import ...
25
26Ref: https://github.com/python/mypy/issues/3216
27"""
28
29MYPY_CHECK_RUNNING = False
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py
new file mode 100644
index 0000000..d97ea36
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py
@@ -0,0 +1,421 @@
1from __future__ import absolute_import, division
2
3import contextlib
4import itertools
5import logging
6import sys
7import time
8from signal import SIGINT, default_int_handler, signal
9
10from pip._vendor import six
11from pip._vendor.progress.bar import (
12 Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
13 ShadyBar,
14)
15from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
16from pip._vendor.progress.spinner import Spinner
17
18from pip._internal.compat import WINDOWS
19from pip._internal.utils.logging import get_indentation
20from pip._internal.utils.misc import format_size
21from pip._internal.utils.typing import MYPY_CHECK_RUNNING
22
23if MYPY_CHECK_RUNNING:
24 from typing import Any
25
26try:
27 from pip._vendor import colorama
28# Lots of different errors can come from this, including SystemError and
29# ImportError.
30except Exception:
31 colorama = None
32
33logger = logging.getLogger(__name__)
34
35
36def _select_progress_class(preferred, fallback):
37 encoding = getattr(preferred.file, "encoding", None)
38
39 # If we don't know what encoding this file is in, then we'll just assume
40 # that it doesn't support unicode and use the ASCII bar.
41 if not encoding:
42 return fallback
43
44 # Collect all of the possible characters we want to use with the preferred
45 # bar.
46 characters = [
47 getattr(preferred, "empty_fill", six.text_type()),
48 getattr(preferred, "fill", six.text_type()),
49 ]
50 characters += list(getattr(preferred, "phases", []))
51
52 # Try to decode the characters we're using for the bar using the encoding
53 # of the given file, if this works then we'll assume that we can use the
54 # fancier bar and if not we'll fall back to the plaintext bar.
55 try:
56 six.text_type().join(characters).encode(encoding)
57 except UnicodeEncodeError:
58 return fallback
59 else:
60 return preferred
61
62
63_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
64
65
66class InterruptibleMixin(object):
67 """
68 Helper to ensure that self.finish() gets called on keyboard interrupt.
69
70 This allows downloads to be interrupted without leaving temporary state
71 (like hidden cursors) behind.
72
73 This class is similar to the progress library's existing SigIntMixin
74 helper, but as of version 1.2, that helper has the following problems:
75
76 1. It calls sys.exit().
77 2. It discards the existing SIGINT handler completely.
78 3. It leaves its own handler in place even after an uninterrupted finish,
79 which will have unexpected delayed effects if the user triggers an
80 unrelated keyboard interrupt some time after a progress-displaying
81 download has already completed, for example.
82 """
83
84 def __init__(self, *args, **kwargs):
85 """
86 Save the original SIGINT handler for later.
87 """
88 super(InterruptibleMixin, self).__init__(*args, **kwargs)
89
90 self.original_handler = signal(SIGINT, self.handle_sigint)
91
92 # If signal() returns None, the previous handler was not installed from
93 # Python, and we cannot restore it. This probably should not happen,
94 # but if it does, we must restore something sensible instead, at least.
95 # The least bad option should be Python's default SIGINT handler, which
96 # just raises KeyboardInterrupt.
97 if self.original_handler is None:
98 self.original_handler = default_int_handler
99
100 def finish(self):
101 """
102 Restore the original SIGINT handler after finishing.
103
104 This should happen regardless of whether the progress display finishes
105 normally, or gets interrupted.
106 """
107 super(InterruptibleMixin, self).finish()
108 signal(SIGINT, self.original_handler)
109
110 def handle_sigint(self, signum, frame):
111 """
112 Call self.finish() before delegating to the original SIGINT handler.
113
114 This handler should only be in place while the progress display is
115 active.
116 """
117 self.finish()
118 self.original_handler(signum, frame)
119
120
121class SilentBar(Bar):
122
123 def update(self):
124 pass
125
126
127class BlueEmojiBar(IncrementalBar):
128
129 suffix = "%(percent)d%%"
130 bar_prefix = " "
131 bar_suffix = " "
132 phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
133
134
135class DownloadProgressMixin(object):
136
137 def __init__(self, *args, **kwargs):
138 super(DownloadProgressMixin, self).__init__(*args, **kwargs)
139 self.message = (" " * (get_indentation() + 2)) + self.message
140
141 @property
142 def downloaded(self):
143 return format_size(self.index)
144
145 @property
146 def download_speed(self):
147 # Avoid zero division errors...
148 if self.avg == 0.0:
149 return "..."
150 return format_size(1 / self.avg) + "/s"
151
152 @property
153 def pretty_eta(self):
154 if self.eta:
155 return "eta %s" % self.eta_td
156 return ""
157
158 def iter(self, it, n=1):
159 for x in it:
160 yield x
161 self.next(n)
162 self.finish()
163
164
165class WindowsMixin(object):
166
167 def __init__(self, *args, **kwargs):
168 # The Windows terminal does not support the hide/show cursor ANSI codes
169 # even with colorama. So we'll ensure that hide_cursor is False on
170 # Windows.
171 # This call neds to go before the super() call, so that hide_cursor
172 # is set in time. The base progress bar class writes the "hide cursor"
173 # code to the terminal in its init, so if we don't set this soon
174 # enough, we get a "hide" with no corresponding "show"...
175 if WINDOWS and self.hide_cursor:
176 self.hide_cursor = False
177
178 super(WindowsMixin, self).__init__(*args, **kwargs)
179
180 # Check if we are running on Windows and we have the colorama module,
181 # if we do then wrap our file with it.
182 if WINDOWS and colorama:
183 self.file = colorama.AnsiToWin32(self.file)
184 # The progress code expects to be able to call self.file.isatty()
185 # but the colorama.AnsiToWin32() object doesn't have that, so we'll
186 # add it.
187 self.file.isatty = lambda: self.file.wrapped.isatty()
188 # The progress code expects to be able to call self.file.flush()
189 # but the colorama.AnsiToWin32() object doesn't have that, so we'll
190 # add it.
191 self.file.flush = lambda: self.file.wrapped.flush()
192
193
194class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
195 DownloadProgressMixin):
196
197 file = sys.stdout
198 message = "%(percent)d%%"
199 suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
200
201# NOTE: The "type: ignore" comments on the following classes are there to
202# work around https://github.com/python/typing/issues/241
203
204
205class DefaultDownloadProgressBar(BaseDownloadProgressBar,
206 _BaseBar): # type: ignore
207 pass
208
209
210class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
211 pass
212
213
214class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
215 IncrementalBar):
216 pass
217
218
219class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
220 ChargingBar):
221 pass
222
223
224class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
225 pass
226
227
228class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
229 FillingSquaresBar):
230 pass
231
232
233class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
234 FillingCirclesBar):
235 pass
236
237
238class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
239 BlueEmojiBar):
240 pass
241
242
243class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
244 DownloadProgressMixin, WritelnMixin, Spinner):
245
246 file = sys.stdout
247 suffix = "%(downloaded)s %(download_speed)s"
248
249 def next_phase(self):
250 if not hasattr(self, "_phaser"):
251 self._phaser = itertools.cycle(self.phases)
252 return next(self._phaser)
253
254 def update(self):
255 message = self.message % self
256 phase = self.next_phase()
257 suffix = self.suffix % self
258 line = ''.join([
259 message,
260 " " if message else "",
261 phase,
262 " " if suffix else "",
263 suffix,
264 ])
265
266 self.writeln(line)
267
268
269BAR_TYPES = {
270 "off": (DownloadSilentBar, DownloadSilentBar),
271 "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
272 "ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
273 "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
274 "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
275}
276
277
278def DownloadProgressProvider(progress_bar, max=None):
279 if max is None or max == 0:
280 return BAR_TYPES[progress_bar][1]().iter
281 else:
282 return BAR_TYPES[progress_bar][0](max=max).iter
283
284
285################################################################
286# Generic "something is happening" spinners
287#
288# We don't even try using progress.spinner.Spinner here because it's actually
289# simpler to reimplement from scratch than to coerce their code into doing
290# what we need.
291################################################################
292
293@contextlib.contextmanager
294def hidden_cursor(file):
295 # The Windows terminal does not support the hide/show cursor ANSI codes,
296 # even via colorama. So don't even try.
297 if WINDOWS:
298 yield
299 # We don't want to clutter the output with control characters if we're
300 # writing to a file, or if the user is running with --quiet.
301 # See https://github.com/pypa/pip/issues/3418
302 elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
303 yield
304 else:
305 file.write(HIDE_CURSOR)
306 try:
307 yield
308 finally:
309 file.write(SHOW_CURSOR)
310
311
312class RateLimiter(object):
313 def __init__(self, min_update_interval_seconds):
314 self._min_update_interval_seconds = min_update_interval_seconds
315 self._last_update = 0
316
317 def ready(self):
318 now = time.time()
319 delta = now - self._last_update
320 return delta >= self._min_update_interval_seconds
321
322 def reset(self):
323 self._last_update = time.time()
324
325
326class InteractiveSpinner(object):
327 def __init__(self, message, file=None, spin_chars="-\\|/",
328 # Empirically, 8 updates/second looks nice
329 min_update_interval_seconds=0.125):
330 self._message = message
331 if file is None:
332 file = sys.stdout
333 self._file = file
334 self._rate_limiter = RateLimiter(min_update_interval_seconds)
335 self._finished = False
336
337 self._spin_cycle = itertools.cycle(spin_chars)
338
339 self._file.write(" " * get_indentation() + self._message + " ... ")
340 self._width = 0
341
342 def _write(self, status):
343 assert not self._finished
344 # Erase what we wrote before by backspacing to the beginning, writing
345 # spaces to overwrite the old text, and then backspacing again
346 backup = "\b" * self._width
347 self._file.write(backup + " " * self._width + backup)
348 # Now we have a blank slate to add our status
349 self._file.write(status)
350 self._width = len(status)
351 self._file.flush()
352 self._rate_limiter.reset()
353
354 def spin(self):
355 if self._finished:
356 return
357 if not self._rate_limiter.ready():
358 return
359 self._write(next(self._spin_cycle))
360
361 def finish(self, final_status):
362 if self._finished:
363 return
364 self._write(final_status)
365 self._file.write("\n")
366 self._file.flush()
367 self._finished = True
368
369
370# Used for dumb terminals, non-interactive installs (no tty), etc.
371# We still print updates occasionally (once every 60 seconds by default) to
372# act as a keep-alive for systems like Travis-CI that take lack-of-output as
373# an indication that a task has frozen.
374class NonInteractiveSpinner(object):
375 def __init__(self, message, min_update_interval_seconds=60):
376 self._message = message
377 self._finished = False
378 self._rate_limiter = RateLimiter(min_update_interval_seconds)
379 self._update("started")
380
381 def _update(self, status):
382 assert not self._finished
383 self._rate_limiter.reset()
384 logger.info("%s: %s", self._message, status)
385
386 def spin(self):
387 if self._finished:
388 return
389 if not self._rate_limiter.ready():
390 return
391 self._update("still running...")
392
393 def finish(self, final_status):
394 if self._finished:
395 return
396 self._update("finished with status '%s'" % (final_status,))
397 self._finished = True
398
399
400@contextlib.contextmanager
401def open_spinner(message):
402 # Interactive spinner goes directly to sys.stdout rather than being routed
403 # through the logging system, but it acts like it has level INFO,
404 # i.e. it's only displayed if we're at level INFO or better.
405 # Non-interactive spinner goes through the logging system, so it is always
406 # in sync with logging configuration.
407 if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
408 spinner = InteractiveSpinner(message)
409 else:
410 spinner = NonInteractiveSpinner(message)
411 try:
412 with hidden_cursor(sys.stdout):
413 yield spinner
414 except KeyboardInterrupt:
415 spinner.finish("canceled")
416 raise
417 except Exception:
418 spinner.finish("error")
419 raise
420 else:
421 spinner.finish("done")
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py
new file mode 100644
index 0000000..bff94fa
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/__init__.py
@@ -0,0 +1,471 @@
1"""Handles all VCS (version control) support"""
2from __future__ import absolute_import
3
4import copy
5import errno
6import logging
7import os
8import shutil
9import sys
10
11from pip._vendor.six.moves.urllib import parse as urllib_parse
12
13from pip._internal.exceptions import BadCommand
14from pip._internal.utils.misc import (
15 display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
16)
17from pip._internal.utils.typing import MYPY_CHECK_RUNNING
18
19if MYPY_CHECK_RUNNING:
20 from typing import Dict, Optional, Tuple
21 from pip._internal.basecommand import Command
22
23__all__ = ['vcs', 'get_src_requirement']
24
25
26logger = logging.getLogger(__name__)
27
28
29class RevOptions(object):
30
31 """
32 Encapsulates a VCS-specific revision to install, along with any VCS
33 install options.
34
35 Instances of this class should be treated as if immutable.
36 """
37
38 def __init__(self, vcs, rev=None, extra_args=None):
39 """
40 Args:
41 vcs: a VersionControl object.
42 rev: the name of the revision to install.
43 extra_args: a list of extra options.
44 """
45 if extra_args is None:
46 extra_args = []
47
48 self.extra_args = extra_args
49 self.rev = rev
50 self.vcs = vcs
51
52 def __repr__(self):
53 return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev)
54
55 @property
56 def arg_rev(self):
57 if self.rev is None:
58 return self.vcs.default_arg_rev
59
60 return self.rev
61
62 def to_args(self):
63 """
64 Return the VCS-specific command arguments.
65 """
66 args = []
67 rev = self.arg_rev
68 if rev is not None:
69 args += self.vcs.get_base_rev_args(rev)
70 args += self.extra_args
71
72 return args
73
74 def to_display(self):
75 if not self.rev:
76 return ''
77
78 return ' (to revision {})'.format(self.rev)
79
80 def make_new(self, rev):
81 """
82 Make a copy of the current instance, but with a new rev.
83
84 Args:
85 rev: the name of the revision for the new object.
86 """
87 return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
88
89
90class VcsSupport(object):
91 _registry = {} # type: Dict[str, Command]
92 schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
93
94 def __init__(self):
95 # Register more schemes with urlparse for various version control
96 # systems
97 urllib_parse.uses_netloc.extend(self.schemes)
98 # Python >= 2.7.4, 3.3 doesn't have uses_fragment
99 if getattr(urllib_parse, 'uses_fragment', None):
100 urllib_parse.uses_fragment.extend(self.schemes)
101 super(VcsSupport, self).__init__()
102
103 def __iter__(self):
104 return self._registry.__iter__()
105
106 @property
107 def backends(self):
108 return list(self._registry.values())
109
110 @property
111 def dirnames(self):
112 return [backend.dirname for backend in self.backends]
113
114 @property
115 def all_schemes(self):
116 schemes = []
117 for backend in self.backends:
118 schemes.extend(backend.schemes)
119 return schemes
120
121 def register(self, cls):
122 if not hasattr(cls, 'name'):
123 logger.warning('Cannot register VCS %s', cls.__name__)
124 return
125 if cls.name not in self._registry:
126 self._registry[cls.name] = cls
127 logger.debug('Registered VCS backend: %s', cls.name)
128
129 def unregister(self, cls=None, name=None):
130 if name in self._registry:
131 del self._registry[name]
132 elif cls in self._registry.values():
133 del self._registry[cls.name]
134 else:
135 logger.warning('Cannot unregister because no class or name given')
136
137 def get_backend_name(self, location):
138 """
139 Return the name of the version control backend if found at given
140 location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
141 """
142 for vc_type in self._registry.values():
143 if vc_type.controls_location(location):
144 logger.debug('Determine that %s uses VCS: %s',
145 location, vc_type.name)
146 return vc_type.name
147 return None
148
149 def get_backend(self, name):
150 name = name.lower()
151 if name in self._registry:
152 return self._registry[name]
153
154 def get_backend_from_location(self, location):
155 vc_type = self.get_backend_name(location)
156 if vc_type:
157 return self.get_backend(vc_type)
158 return None
159
160
161vcs = VcsSupport()
162
163
164class VersionControl(object):
165 name = ''
166 dirname = ''
167 # List of supported schemes for this Version Control
168 schemes = () # type: Tuple[str, ...]
169 # Iterable of environment variable names to pass to call_subprocess().
170 unset_environ = () # type: Tuple[str, ...]
171 default_arg_rev = None # type: Optional[str]
172
173 def __init__(self, url=None, *args, **kwargs):
174 self.url = url
175 super(VersionControl, self).__init__(*args, **kwargs)
176
177 def get_base_rev_args(self, rev):
178 """
179 Return the base revision arguments for a vcs command.
180
181 Args:
182 rev: the name of a revision to install. Cannot be None.
183 """
184 raise NotImplementedError
185
186 def make_rev_options(self, rev=None, extra_args=None):
187 """
188 Return a RevOptions object.
189
190 Args:
191 rev: the name of a revision to install.
192 extra_args: a list of extra options.
193 """
194 return RevOptions(self, rev, extra_args=extra_args)
195
196 def _is_local_repository(self, repo):
197 """
198 posix absolute paths start with os.path.sep,
199 win32 ones start with drive (like c:\\folder)
200 """
201 drive, tail = os.path.splitdrive(repo)
202 return repo.startswith(os.path.sep) or drive
203
204 # See issue #1083 for why this method was introduced:
205 # https://github.com/pypa/pip/issues/1083
206 def translate_egg_surname(self, surname):
207 # For example, Django has branches of the form "stable/1.7.x".
208 return surname.replace('/', '_')
209
210 def export(self, location):
211 """
212 Export the repository at the url to the destination location
213 i.e. only download the files, without vcs informations
214 """
215 raise NotImplementedError
216
217 def get_url_rev(self):
218 """
219 Returns the correct repository URL and revision by parsing the given
220 repository URL
221 """
222 error_message = (
223 "Sorry, '%s' is a malformed VCS url. "
224 "The format is <vcs>+<protocol>://<url>, "
225 "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
226 )
227 assert '+' in self.url, error_message % self.url
228 url = self.url.split('+', 1)[1]
229 scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
230 rev = None
231 if '@' in path:
232 path, rev = path.rsplit('@', 1)
233 url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
234 return url, rev
235
236 def get_info(self, location):
237 """
238 Returns (url, revision), where both are strings
239 """
240 assert not location.rstrip('/').endswith(self.dirname), \
241 'Bad directory: %s' % location
242 return self.get_url(location), self.get_revision(location)
243
244 def normalize_url(self, url):
245 """
246 Normalize a URL for comparison by unquoting it and removing any
247 trailing slash.
248 """
249 return urllib_parse.unquote(url).rstrip('/')
250
251 def compare_urls(self, url1, url2):
252 """
253 Compare two repo URLs for identity, ignoring incidental differences.
254 """
255 return (self.normalize_url(url1) == self.normalize_url(url2))
256
257 def obtain(self, dest):
258 """
259 Called when installing or updating an editable package, takes the
260 source path of the checkout.
261 """
262 raise NotImplementedError
263
264 def switch(self, dest, url, rev_options):
265 """
266 Switch the repo at ``dest`` to point to ``URL``.
267
268 Args:
269 rev_options: a RevOptions object.
270 """
271 raise NotImplementedError
272
273 def update(self, dest, rev_options):
274 """
275 Update an already-existing repo to the given ``rev_options``.
276
277 Args:
278 rev_options: a RevOptions object.
279 """
280 raise NotImplementedError
281
282 def is_commit_id_equal(self, dest, name):
283 """
284 Return whether the id of the current commit equals the given name.
285
286 Args:
287 dest: the repository directory.
288 name: a string name.
289 """
290 raise NotImplementedError
291
292 def check_destination(self, dest, url, rev_options):
293 """
294 Prepare a location to receive a checkout/clone.
295
296 Return True if the location is ready for (and requires) a
297 checkout/clone, False otherwise.
298
299 Args:
300 rev_options: a RevOptions object.
301 """
302 checkout = True
303 prompt = False
304 rev_display = rev_options.to_display()
305 if os.path.exists(dest):
306 checkout = False
307 if os.path.exists(os.path.join(dest, self.dirname)):
308 existing_url = self.get_url(dest)
309 if self.compare_urls(existing_url, url):
310 logger.debug(
311 '%s in %s exists, and has correct URL (%s)',
312 self.repo_name.title(),
313 display_path(dest),
314 url,
315 )
316 if not self.is_commit_id_equal(dest, rev_options.rev):
317 logger.info(
318 'Updating %s %s%s',
319 display_path(dest),
320 self.repo_name,
321 rev_display,
322 )
323 self.update(dest, rev_options)
324 else:
325 logger.info(
326 'Skipping because already up-to-date.')
327 else:
328 logger.warning(
329 '%s %s in %s exists with URL %s',
330 self.name,
331 self.repo_name,
332 display_path(dest),
333 existing_url,
334 )
335 prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
336 ('s', 'i', 'w', 'b'))
337 else:
338 logger.warning(
339 'Directory %s already exists, and is not a %s %s.',
340 dest,
341 self.name,
342 self.repo_name,
343 )
344 prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
345 if prompt:
346 logger.warning(
347 'The plan is to install the %s repository %s',
348 self.name,
349 url,
350 )
351 response = ask_path_exists('What to do? %s' % prompt[0],
352 prompt[1])
353
354 if response == 's':
355 logger.info(
356 'Switching %s %s to %s%s',
357 self.repo_name,
358 display_path(dest),
359 url,
360 rev_display,
361 )
362 self.switch(dest, url, rev_options)
363 elif response == 'i':
364 # do nothing
365 pass
366 elif response == 'w':
367 logger.warning('Deleting %s', display_path(dest))
368 rmtree(dest)
369 checkout = True
370 elif response == 'b':
371 dest_dir = backup_dir(dest)
372 logger.warning(
373 'Backing up %s to %s', display_path(dest), dest_dir,
374 )
375 shutil.move(dest, dest_dir)
376 checkout = True
377 elif response == 'a':
378 sys.exit(-1)
379 return checkout
380
381 def unpack(self, location):
382 """
383 Clean up current location and download the url repository
384 (and vcs infos) into location
385 """
386 if os.path.exists(location):
387 rmtree(location)
388 self.obtain(location)
389
390 def get_src_requirement(self, dist, location):
391 """
392 Return a string representing the requirement needed to
393 redownload the files currently present in location, something
394 like:
395 {repository_url}@{revision}#egg={project_name}-{version_identifier}
396 """
397 raise NotImplementedError
398
399 def get_url(self, location):
400 """
401 Return the url used at location
402 Used in get_info or check_destination
403 """
404 raise NotImplementedError
405
406 def get_revision(self, location):
407 """
408 Return the current commit id of the files at the given location.
409 """
410 raise NotImplementedError
411
412 def run_command(self, cmd, show_stdout=True, cwd=None,
413 on_returncode='raise',
414 command_desc=None,
415 extra_environ=None, spinner=None):
416 """
417 Run a VCS subcommand
418 This is simply a wrapper around call_subprocess that adds the VCS
419 command name, and checks that the VCS is available
420 """
421 cmd = [self.name] + cmd
422 try:
423 return call_subprocess(cmd, show_stdout, cwd,
424 on_returncode,
425 command_desc, extra_environ,
426 unset_environ=self.unset_environ,
427 spinner=spinner)
428 except OSError as e:
429 # errno.ENOENT = no such file or directory
430 # In other words, the VCS executable isn't available
431 if e.errno == errno.ENOENT:
432 raise BadCommand(
433 'Cannot find command %r - do you have '
434 '%r installed and in your '
435 'PATH?' % (self.name, self.name))
436 else:
437 raise # re-raise exception if a different error occurred
438
439 @classmethod
440 def controls_location(cls, location):
441 """
442 Check if a location is controlled by the vcs.
443 It is meant to be overridden to implement smarter detection
444 mechanisms for specific vcs.
445 """
446 logger.debug('Checking in %s for %s (%s)...',
447 location, cls.dirname, cls.name)
448 path = os.path.join(location, cls.dirname)
449 return os.path.exists(path)
450
451
452def get_src_requirement(dist, location):
453 version_control = vcs.get_backend_from_location(location)
454 if version_control:
455 try:
456 return version_control().get_src_requirement(dist,
457 location)
458 except BadCommand:
459 logger.warning(
460 'cannot determine version of editable source in %s '
461 '(%s command not found in path)',
462 location,
463 version_control.name,
464 )
465 return dist.as_requirement()
466 logger.warning(
467 'cannot determine version of editable source in %s (is not SVN '
468 'checkout, Git clone, Mercurial clone or Bazaar branch)',
469 location,
470 )
471 return dist.as_requirement()
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py
new file mode 100644
index 0000000..6ed629a
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,113 @@
1from __future__ import absolute_import
2
3import logging
4import os
5
6from pip._vendor.six.moves.urllib import parse as urllib_parse
7
8from pip._internal.download import path_to_url
9from pip._internal.utils.misc import display_path, rmtree
10from pip._internal.utils.temp_dir import TempDirectory
11from pip._internal.vcs import VersionControl, vcs
12
13logger = logging.getLogger(__name__)
14
15
16class Bazaar(VersionControl):
17 name = 'bzr'
18 dirname = '.bzr'
19 repo_name = 'branch'
20 schemes = (
21 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
22 'bzr+lp',
23 )
24
25 def __init__(self, url=None, *args, **kwargs):
26 super(Bazaar, self).__init__(url, *args, **kwargs)
27 # This is only needed for python <2.7.5
28 # Register lp but do not expose as a scheme to support bzr+lp.
29 if getattr(urllib_parse, 'uses_fragment', None):
30 urllib_parse.uses_fragment.extend(['lp'])
31
32 def get_base_rev_args(self, rev):
33 return ['-r', rev]
34
35 def export(self, location):
36 """
37 Export the Bazaar repository at the url to the destination location
38 """
39 # Remove the location to make sure Bazaar can export it correctly
40 if os.path.exists(location):
41 rmtree(location)
42
43 with TempDirectory(kind="export") as temp_dir:
44 self.unpack(temp_dir.path)
45
46 self.run_command(
47 ['export', location],
48 cwd=temp_dir.path, show_stdout=False,
49 )
50
51 def switch(self, dest, url, rev_options):
52 self.run_command(['switch', url], cwd=dest)
53
54 def update(self, dest, rev_options):
55 cmd_args = ['pull', '-q'] + rev_options.to_args()
56 self.run_command(cmd_args, cwd=dest)
57
58 def obtain(self, dest):
59 url, rev = self.get_url_rev()
60 rev_options = self.make_rev_options(rev)
61 if self.check_destination(dest, url, rev_options):
62 rev_display = rev_options.to_display()
63 logger.info(
64 'Checking out %s%s to %s',
65 url,
66 rev_display,
67 display_path(dest),
68 )
69 cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
70 self.run_command(cmd_args)
71
72 def get_url_rev(self):
73 # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
74 url, rev = super(Bazaar, self).get_url_rev()
75 if url.startswith('ssh://'):
76 url = 'bzr+' + url
77 return url, rev
78
79 def get_url(self, location):
80 urls = self.run_command(['info'], show_stdout=False, cwd=location)
81 for line in urls.splitlines():
82 line = line.strip()
83 for x in ('checkout of branch: ',
84 'parent branch: '):
85 if line.startswith(x):
86 repo = line.split(x)[1]
87 if self._is_local_repository(repo):
88 return path_to_url(repo)
89 return repo
90 return None
91
92 def get_revision(self, location):
93 revision = self.run_command(
94 ['revno'], show_stdout=False, cwd=location,
95 )
96 return revision.splitlines()[-1]
97
98 def get_src_requirement(self, dist, location):
99 repo = self.get_url(location)
100 if not repo:
101 return None
102 if not repo.lower().startswith('bzr:'):
103 repo = 'bzr+' + repo
104 egg_project_name = dist.egg_name().split('-', 1)[0]
105 current_rev = self.get_revision(location)
106 return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
107
108 def is_commit_id_equal(self, dest, name):
109 """Always assume the versions don't match"""
110 return False
111
112
113vcs.register(Bazaar)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py
new file mode 100644
index 0000000..7a63dfa
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/git.py
@@ -0,0 +1,311 @@
1from __future__ import absolute_import
2
3import logging
4import os.path
5import re
6
7from pip._vendor.packaging.version import parse as parse_version
8from pip._vendor.six.moves.urllib import parse as urllib_parse
9from pip._vendor.six.moves.urllib import request as urllib_request
10
11from pip._internal.compat import samefile
12from pip._internal.exceptions import BadCommand
13from pip._internal.utils.misc import display_path
14from pip._internal.utils.temp_dir import TempDirectory
15from pip._internal.vcs import VersionControl, vcs
16
17urlsplit = urllib_parse.urlsplit
18urlunsplit = urllib_parse.urlunsplit
19
20
21logger = logging.getLogger(__name__)
22
23
24HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
25
26
27def looks_like_hash(sha):
28 return bool(HASH_REGEX.match(sha))
29
30
31class Git(VersionControl):
32 name = 'git'
33 dirname = '.git'
34 repo_name = 'clone'
35 schemes = (
36 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
37 )
38 # Prevent the user's environment variables from interfering with pip:
39 # https://github.com/pypa/pip/issues/1130
40 unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
41 default_arg_rev = 'HEAD'
42
43 def __init__(self, url=None, *args, **kwargs):
44
45 # Works around an apparent Git bug
46 # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
47 if url:
48 scheme, netloc, path, query, fragment = urlsplit(url)
49 if scheme.endswith('file'):
50 initial_slashes = path[:-len(path.lstrip('/'))]
51 newpath = (
52 initial_slashes +
53 urllib_request.url2pathname(path)
54 .replace('\\', '/').lstrip('/')
55 )
56 url = urlunsplit((scheme, netloc, newpath, query, fragment))
57 after_plus = scheme.find('+') + 1
58 url = scheme[:after_plus] + urlunsplit(
59 (scheme[after_plus:], netloc, newpath, query, fragment),
60 )
61
62 super(Git, self).__init__(url, *args, **kwargs)
63
64 def get_base_rev_args(self, rev):
65 return [rev]
66
67 def get_git_version(self):
68 VERSION_PFX = 'git version '
69 version = self.run_command(['version'], show_stdout=False)
70 if version.startswith(VERSION_PFX):
71 version = version[len(VERSION_PFX):].split()[0]
72 else:
73 version = ''
74 # get first 3 positions of the git version becasue
75 # on windows it is x.y.z.windows.t, and this parses as
76 # LegacyVersion which always smaller than a Version.
77 version = '.'.join(version.split('.')[:3])
78 return parse_version(version)
79
80 def export(self, location):
81 """Export the Git repository at the url to the destination location"""
82 if not location.endswith('/'):
83 location = location + '/'
84
85 with TempDirectory(kind="export") as temp_dir:
86 self.unpack(temp_dir.path)
87 self.run_command(
88 ['checkout-index', '-a', '-f', '--prefix', location],
89 show_stdout=False, cwd=temp_dir.path
90 )
91
92 def get_revision_sha(self, dest, rev):
93 """
94 Return a commit hash for the given revision if it names a remote
95 branch or tag. Otherwise, return None.
96
97 Args:
98 dest: the repository directory.
99 rev: the revision name.
100 """
101 # Pass rev to pre-filter the list.
102 output = self.run_command(['show-ref', rev], cwd=dest,
103 show_stdout=False, on_returncode='ignore')
104 refs = {}
105 for line in output.strip().splitlines():
106 try:
107 sha, ref = line.split()
108 except ValueError:
109 # Include the offending line to simplify troubleshooting if
110 # this error ever occurs.
111 raise ValueError('unexpected show-ref line: {!r}'.format(line))
112
113 refs[ref] = sha
114
115 branch_ref = 'refs/remotes/origin/{}'.format(rev)
116 tag_ref = 'refs/tags/{}'.format(rev)
117
118 return refs.get(branch_ref) or refs.get(tag_ref)
119
120 def check_rev_options(self, dest, rev_options):
121 """Check the revision options before checkout.
122
123 Returns a new RevOptions object for the SHA1 of the branch or tag
124 if found.
125
126 Args:
127 rev_options: a RevOptions object.
128 """
129 rev = rev_options.arg_rev
130 sha = self.get_revision_sha(dest, rev)
131
132 if sha is not None:
133 return rev_options.make_new(sha)
134
135 # Do not show a warning for the common case of something that has
136 # the form of a Git commit hash.
137 if not looks_like_hash(rev):
138 logger.warning(
139 "Did not find branch or tag '%s', assuming revision or ref.",
140 rev,
141 )
142 return rev_options
143
144 def is_commit_id_equal(self, dest, name):
145 """
146 Return whether the current commit hash equals the given name.
147
148 Args:
149 dest: the repository directory.
150 name: a string name.
151 """
152 if not name:
153 # Then avoid an unnecessary subprocess call.
154 return False
155
156 return self.get_revision(dest) == name
157
158 def switch(self, dest, url, rev_options):
159 self.run_command(['config', 'remote.origin.url', url], cwd=dest)
160 cmd_args = ['checkout', '-q'] + rev_options.to_args()
161 self.run_command(cmd_args, cwd=dest)
162
163 self.update_submodules(dest)
164
165 def update(self, dest, rev_options):
166 # First fetch changes from the default remote
167 if self.get_git_version() >= parse_version('1.9.0'):
168 # fetch tags in addition to everything else
169 self.run_command(['fetch', '-q', '--tags'], cwd=dest)
170 else:
171 self.run_command(['fetch', '-q'], cwd=dest)
172 # Then reset to wanted revision (maybe even origin/master)
173 rev_options = self.check_rev_options(dest, rev_options)
174 cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
175 self.run_command(cmd_args, cwd=dest)
176 #: update submodules
177 self.update_submodules(dest)
178
179 def obtain(self, dest):
180 url, rev = self.get_url_rev()
181 rev_options = self.make_rev_options(rev)
182 if self.check_destination(dest, url, rev_options):
183 rev_display = rev_options.to_display()
184 logger.info(
185 'Cloning %s%s to %s', url, rev_display, display_path(dest),
186 )
187 self.run_command(['clone', '-q', url, dest])
188
189 if rev:
190 rev_options = self.check_rev_options(dest, rev_options)
191 # Only do a checkout if the current commit id doesn't match
192 # the requested revision.
193 if not self.is_commit_id_equal(dest, rev_options.rev):
194 rev = rev_options.rev
195 # Only fetch the revision if it's a ref
196 if rev.startswith('refs/'):
197 self.run_command(
198 ['fetch', '-q', url] + rev_options.to_args(),
199 cwd=dest,
200 )
201 # Change the revision to the SHA of the ref we fetched
202 rev = 'FETCH_HEAD'
203 self.run_command(['checkout', '-q', rev], cwd=dest)
204
205 #: repo may contain submodules
206 self.update_submodules(dest)
207
208 def get_url(self, location):
209 """Return URL of the first remote encountered."""
210 remotes = self.run_command(
211 ['config', '--get-regexp', r'remote\..*\.url'],
212 show_stdout=False, cwd=location,
213 )
214 remotes = remotes.splitlines()
215 found_remote = remotes[0]
216 for remote in remotes:
217 if remote.startswith('remote.origin.url '):
218 found_remote = remote
219 break
220 url = found_remote.split(' ')[1]
221 return url.strip()
222
223 def get_revision(self, location):
224 current_rev = self.run_command(
225 ['rev-parse', 'HEAD'], show_stdout=False, cwd=location,
226 )
227 return current_rev.strip()
228
229 def _get_subdirectory(self, location):
230 """Return the relative path of setup.py to the git repo root."""
231 # find the repo root
232 git_dir = self.run_command(['rev-parse', '--git-dir'],
233 show_stdout=False, cwd=location).strip()
234 if not os.path.isabs(git_dir):
235 git_dir = os.path.join(location, git_dir)
236 root_dir = os.path.join(git_dir, '..')
237 # find setup.py
238 orig_location = location
239 while not os.path.exists(os.path.join(location, 'setup.py')):
240 last_location = location
241 location = os.path.dirname(location)
242 if location == last_location:
243 # We've traversed up to the root of the filesystem without
244 # finding setup.py
245 logger.warning(
246 "Could not find setup.py for directory %s (tried all "
247 "parent directories)",
248 orig_location,
249 )
250 return None
251 # relative path of setup.py to repo root
252 if samefile(root_dir, location):
253 return None
254 return os.path.relpath(location, root_dir)
255
256 def get_src_requirement(self, dist, location):
257 repo = self.get_url(location)
258 if not repo.lower().startswith('git:'):
259 repo = 'git+' + repo
260 egg_project_name = dist.egg_name().split('-', 1)[0]
261 if not repo:
262 return None
263 current_rev = self.get_revision(location)
264 req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
265 subdirectory = self._get_subdirectory(location)
266 if subdirectory:
267 req += '&subdirectory=' + subdirectory
268 return req
269
270 def get_url_rev(self):
271 """
272 Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
273 That's required because although they use SSH they sometimes doesn't
274 work with a ssh:// scheme (e.g. Github). But we need a scheme for
275 parsing. Hence we remove it again afterwards and return it as a stub.
276 """
277 if '://' not in self.url:
278 assert 'file:' not in self.url
279 self.url = self.url.replace('git+', 'git+ssh://')
280 url, rev = super(Git, self).get_url_rev()
281 url = url.replace('ssh://', '')
282 else:
283 url, rev = super(Git, self).get_url_rev()
284
285 return url, rev
286
287 def update_submodules(self, location):
288 if not os.path.exists(os.path.join(location, '.gitmodules')):
289 return
290 self.run_command(
291 ['submodule', 'update', '--init', '--recursive', '-q'],
292 cwd=location,
293 )
294
295 @classmethod
296 def controls_location(cls, location):
297 if super(Git, cls).controls_location(location):
298 return True
299 try:
300 r = cls().run_command(['rev-parse'],
301 cwd=location,
302 show_stdout=False,
303 on_returncode='ignore')
304 return not r
305 except BadCommand:
306 logger.debug("could not determine if %s is under git control "
307 "because git is not available", location)
308 return False
309
310
311vcs.register(Git)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py
new file mode 100644
index 0000000..3936473
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,105 @@
1from __future__ import absolute_import
2
3import logging
4import os
5
6from pip._vendor.six.moves import configparser
7
8from pip._internal.download import path_to_url
9from pip._internal.utils.misc import display_path
10from pip._internal.utils.temp_dir import TempDirectory
11from pip._internal.vcs import VersionControl, vcs
12
13logger = logging.getLogger(__name__)
14
15
16class Mercurial(VersionControl):
17 name = 'hg'
18 dirname = '.hg'
19 repo_name = 'clone'
20 schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
21
22 def get_base_rev_args(self, rev):
23 return [rev]
24
25 def export(self, location):
26 """Export the Hg repository at the url to the destination location"""
27 with TempDirectory(kind="export") as temp_dir:
28 self.unpack(temp_dir.path)
29
30 self.run_command(
31 ['archive', location], show_stdout=False, cwd=temp_dir.path
32 )
33
34 def switch(self, dest, url, rev_options):
35 repo_config = os.path.join(dest, self.dirname, 'hgrc')
36 config = configparser.SafeConfigParser()
37 try:
38 config.read(repo_config)
39 config.set('paths', 'default', url)
40 with open(repo_config, 'w') as config_file:
41 config.write(config_file)
42 except (OSError, configparser.NoSectionError) as exc:
43 logger.warning(
44 'Could not switch Mercurial repository to %s: %s', url, exc,
45 )
46 else:
47 cmd_args = ['update', '-q'] + rev_options.to_args()
48 self.run_command(cmd_args, cwd=dest)
49
50 def update(self, dest, rev_options):
51 self.run_command(['pull', '-q'], cwd=dest)
52 cmd_args = ['update', '-q'] + rev_options.to_args()
53 self.run_command(cmd_args, cwd=dest)
54
55 def obtain(self, dest):
56 url, rev = self.get_url_rev()
57 rev_options = self.make_rev_options(rev)
58 if self.check_destination(dest, url, rev_options):
59 rev_display = rev_options.to_display()
60 logger.info(
61 'Cloning hg %s%s to %s',
62 url,
63 rev_display,
64 display_path(dest),
65 )
66 self.run_command(['clone', '--noupdate', '-q', url, dest])
67 cmd_args = ['update', '-q'] + rev_options.to_args()
68 self.run_command(cmd_args, cwd=dest)
69
70 def get_url(self, location):
71 url = self.run_command(
72 ['showconfig', 'paths.default'],
73 show_stdout=False, cwd=location).strip()
74 if self._is_local_repository(url):
75 url = path_to_url(url)
76 return url.strip()
77
78 def get_revision(self, location):
79 current_revision = self.run_command(
80 ['parents', '--template={rev}'],
81 show_stdout=False, cwd=location).strip()
82 return current_revision
83
84 def get_revision_hash(self, location):
85 current_rev_hash = self.run_command(
86 ['parents', '--template={node}'],
87 show_stdout=False, cwd=location).strip()
88 return current_rev_hash
89
90 def get_src_requirement(self, dist, location):
91 repo = self.get_url(location)
92 if not repo.lower().startswith('hg:'):
93 repo = 'hg+' + repo
94 egg_project_name = dist.egg_name().split('-', 1)[0]
95 if not repo:
96 return None
97 current_rev_hash = self.get_revision_hash(location)
98 return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)
99
100 def is_commit_id_equal(self, dest, name):
101 """Always assume the versions don't match"""
102 return False
103
104
105vcs.register(Mercurial)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py
new file mode 100644
index 0000000..95e5440
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/vcs/subversion.py
@@ -0,0 +1,271 @@
1from __future__ import absolute_import
2
3import logging
4import os
5import re
6
7from pip._vendor.six.moves.urllib import parse as urllib_parse
8
9from pip._internal.index import Link
10from pip._internal.utils.logging import indent_log
11from pip._internal.utils.misc import display_path, rmtree
12from pip._internal.vcs import VersionControl, vcs
13
14_svn_xml_url_re = re.compile('url="([^"]+)"')
15_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
16_svn_url_re = re.compile(r'URL: (.+)')
17_svn_revision_re = re.compile(r'Revision: (.+)')
18_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
19_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
20
21
22logger = logging.getLogger(__name__)
23
24
25class Subversion(VersionControl):
26 name = 'svn'
27 dirname = '.svn'
28 repo_name = 'checkout'
29 schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
30
31 def get_base_rev_args(self, rev):
32 return ['-r', rev]
33
34 def get_info(self, location):
35 """Returns (url, revision), where both are strings"""
36 assert not location.rstrip('/').endswith(self.dirname), \
37 'Bad directory: %s' % location
38 output = self.run_command(
39 ['info', location],
40 show_stdout=False,
41 extra_environ={'LANG': 'C'},
42 )
43 match = _svn_url_re.search(output)
44 if not match:
45 logger.warning(
46 'Cannot determine URL of svn checkout %s',
47 display_path(location),
48 )
49 logger.debug('Output that cannot be parsed: \n%s', output)
50 return None, None
51 url = match.group(1).strip()
52 match = _svn_revision_re.search(output)
53 if not match:
54 logger.warning(
55 'Cannot determine revision of svn checkout %s',
56 display_path(location),
57 )
58 logger.debug('Output that cannot be parsed: \n%s', output)
59 return url, None
60 return url, match.group(1)
61
62 def export(self, location):
63 """Export the svn repository at the url to the destination location"""
64 url, rev = self.get_url_rev()
65 rev_options = get_rev_options(self, url, rev)
66 url = self.remove_auth_from_url(url)
67 logger.info('Exporting svn repository %s to %s', url, location)
68 with indent_log():
69 if os.path.exists(location):
70 # Subversion doesn't like to check out over an existing
71 # directory --force fixes this, but was only added in svn 1.5
72 rmtree(location)
73 cmd_args = ['export'] + rev_options.to_args() + [url, location]
74 self.run_command(cmd_args, show_stdout=False)
75
76 def switch(self, dest, url, rev_options):
77 cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
78 self.run_command(cmd_args)
79
80 def update(self, dest, rev_options):
81 cmd_args = ['update'] + rev_options.to_args() + [dest]
82 self.run_command(cmd_args)
83
84 def obtain(self, dest):
85 url, rev = self.get_url_rev()
86 rev_options = get_rev_options(self, url, rev)
87 url = self.remove_auth_from_url(url)
88 if self.check_destination(dest, url, rev_options):
89 rev_display = rev_options.to_display()
90 logger.info(
91 'Checking out %s%s to %s',
92 url,
93 rev_display,
94 display_path(dest),
95 )
96 cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
97 self.run_command(cmd_args)
98
99 def get_location(self, dist, dependency_links):
100 for url in dependency_links:
101 egg_fragment = Link(url).egg_fragment
102 if not egg_fragment:
103 continue
104 if '-' in egg_fragment:
105 # FIXME: will this work when a package has - in the name?
106 key = '-'.join(egg_fragment.split('-')[:-1]).lower()
107 else:
108 key = egg_fragment
109 if key == dist.key:
110 return url.split('#', 1)[0]
111 return None
112
113 def get_revision(self, location):
114 """
115 Return the maximum revision for all files under a given location
116 """
117 # Note: taken from setuptools.command.egg_info
118 revision = 0
119
120 for base, dirs, files in os.walk(location):
121 if self.dirname not in dirs:
122 dirs[:] = []
123 continue # no sense walking uncontrolled subdirs
124 dirs.remove(self.dirname)
125 entries_fn = os.path.join(base, self.dirname, 'entries')
126 if not os.path.exists(entries_fn):
127 # FIXME: should we warn?
128 continue
129
130 dirurl, localrev = self._get_svn_url_rev(base)
131
132 if base == location:
133 base = dirurl + '/' # save the root url
134 elif not dirurl or not dirurl.startswith(base):
135 dirs[:] = []
136 continue # not part of the same svn tree, skip it
137 revision = max(revision, localrev)
138 return revision
139
140 def get_url_rev(self):
141 # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
142 url, rev = super(Subversion, self).get_url_rev()
143 if url.startswith('ssh://'):
144 url = 'svn+' + url
145 return url, rev
146
147 def get_url(self, location):
148 # In cases where the source is in a subdirectory, not alongside
149 # setup.py we have to look up in the location until we find a real
150 # setup.py
151 orig_location = location
152 while not os.path.exists(os.path.join(location, 'setup.py')):
153 last_location = location
154 location = os.path.dirname(location)
155 if location == last_location:
156 # We've traversed up to the root of the filesystem without
157 # finding setup.py
158 logger.warning(
159 "Could not find setup.py for directory %s (tried all "
160 "parent directories)",
161 orig_location,
162 )
163 return None
164
165 return self._get_svn_url_rev(location)[0]
166
167 def _get_svn_url_rev(self, location):
168 from pip._internal.exceptions import InstallationError
169
170 entries_path = os.path.join(location, self.dirname, 'entries')
171 if os.path.exists(entries_path):
172 with open(entries_path) as f:
173 data = f.read()
174 else: # subversion >= 1.7 does not have the 'entries' file
175 data = ''
176
177 if (data.startswith('8') or
178 data.startswith('9') or
179 data.startswith('10')):
180 data = list(map(str.splitlines, data.split('\n\x0c\n')))
181 del data[0][0] # get rid of the '8'
182 url = data[0][3]
183 revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
184 elif data.startswith('<?xml'):
185 match = _svn_xml_url_re.search(data)
186 if not match:
187 raise ValueError('Badly formatted data: %r' % data)
188 url = match.group(1) # get repository URL
189 revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
190 else:
191 try:
192 # subversion >= 1.7
193 xml = self.run_command(
194 ['info', '--xml', location],
195 show_stdout=False,
196 )
197 url = _svn_info_xml_url_re.search(xml).group(1)
198 revs = [
199 int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
200 ]
201 except InstallationError:
202 url, revs = None, []
203
204 if revs:
205 rev = max(revs)
206 else:
207 rev = 0
208
209 return url, rev
210
211 def get_src_requirement(self, dist, location):
212 repo = self.get_url(location)
213 if repo is None:
214 return None
215 # FIXME: why not project name?
216 egg_project_name = dist.egg_name().split('-', 1)[0]
217 rev = self.get_revision(location)
218 return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)
219
220 def is_commit_id_equal(self, dest, name):
221 """Always assume the versions don't match"""
222 return False
223
224 @staticmethod
225 def remove_auth_from_url(url):
226 # Return a copy of url with 'username:password@' removed.
227 # username/pass params are passed to subversion through flags
228 # and are not recognized in the url.
229
230 # parsed url
231 purl = urllib_parse.urlsplit(url)
232 stripped_netloc = \
233 purl.netloc.split('@')[-1]
234
235 # stripped url
236 url_pieces = (
237 purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
238 )
239 surl = urllib_parse.urlunsplit(url_pieces)
240 return surl
241
242
243def get_rev_options(vcs, url, rev):
244 """
245 Return a RevOptions object.
246 """
247 r = urllib_parse.urlsplit(url)
248 if hasattr(r, 'username'):
249 # >= Python-2.5
250 username, password = r.username, r.password
251 else:
252 netloc = r[1]
253 if '@' in netloc:
254 auth = netloc.split('@')[0]
255 if ':' in auth:
256 username, password = auth.split(':', 1)
257 else:
258 username, password = auth, None
259 else:
260 username, password = None, None
261
262 extra_args = []
263 if username:
264 extra_args += ['--username', username]
265 if password:
266 extra_args += ['--password', password]
267
268 return vcs.make_rev_options(rev, extra_args=extra_args)
269
270
271vcs.register(Subversion)
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py
new file mode 100644
index 0000000..36459dd
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py
@@ -0,0 +1,817 @@
1"""
2Support for installing and building the "wheel" binary package format.
3"""
4from __future__ import absolute_import
5
6import collections
7import compileall
8import copy
9import csv
10import hashlib
11import logging
12import os.path
13import re
14import shutil
15import stat
16import sys
17import warnings
18from base64 import urlsafe_b64encode
19from email.parser import Parser
20
21from pip._vendor import pkg_resources
22from pip._vendor.distlib.scripts import ScriptMaker
23from pip._vendor.packaging.utils import canonicalize_name
24from pip._vendor.six import StringIO
25
26from pip._internal import pep425tags
27from pip._internal.build_env import BuildEnvironment
28from pip._internal.download import path_to_url, unpack_url
29from pip._internal.exceptions import (
30 InstallationError, InvalidWheelFilename, UnsupportedWheel,
31)
32from pip._internal.locations import (
33 PIP_DELETE_MARKER_FILENAME, distutils_scheme,
34)
35from pip._internal.utils.logging import indent_log
36from pip._internal.utils.misc import (
37 call_subprocess, captured_stdout, ensure_dir, read_chunks,
38)
39from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
40from pip._internal.utils.temp_dir import TempDirectory
41from pip._internal.utils.typing import MYPY_CHECK_RUNNING
42from pip._internal.utils.ui import open_spinner
43
44if MYPY_CHECK_RUNNING:
45 from typing import Dict, List, Optional
46
47wheel_ext = '.whl'
48
49VERSION_COMPATIBLE = (1, 0)
50
51
52logger = logging.getLogger(__name__)
53
54
55def rehash(path, algo='sha256', blocksize=1 << 20):
56 """Return (hash, length) for path using hashlib.new(algo)"""
57 h = hashlib.new(algo)
58 length = 0
59 with open(path, 'rb') as f:
60 for block in read_chunks(f, size=blocksize):
61 length += len(block)
62 h.update(block)
63 digest = 'sha256=' + urlsafe_b64encode(
64 h.digest()
65 ).decode('latin1').rstrip('=')
66 return (digest, length)
67
68
69def open_for_csv(name, mode):
70 if sys.version_info[0] < 3:
71 nl = {}
72 bin = 'b'
73 else:
74 nl = {'newline': ''}
75 bin = ''
76 return open(name, mode + bin, **nl)
77
78
79def fix_script(path):
80 """Replace #!python with #!/path/to/python
81 Return True if file was changed."""
82 # XXX RECORD hashes will need to be updated
83 if os.path.isfile(path):
84 with open(path, 'rb') as script:
85 firstline = script.readline()
86 if not firstline.startswith(b'#!python'):
87 return False
88 exename = sys.executable.encode(sys.getfilesystemencoding())
89 firstline = b'#!' + exename + os.linesep.encode("ascii")
90 rest = script.read()
91 with open(path, 'wb') as script:
92 script.write(firstline)
93 script.write(rest)
94 return True
95
96
97dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
98 \.dist-info$""", re.VERBOSE)
99
100
101def root_is_purelib(name, wheeldir):
102 """
103 Return True if the extracted wheel in wheeldir should go into purelib.
104 """
105 name_folded = name.replace("-", "_")
106 for item in os.listdir(wheeldir):
107 match = dist_info_re.match(item)
108 if match and match.group('name') == name_folded:
109 with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
110 for line in wheel:
111 line = line.lower().rstrip()
112 if line == "root-is-purelib: true":
113 return True
114 return False
115
116
117def get_entrypoints(filename):
118 if not os.path.exists(filename):
119 return {}, {}
120
121 # This is done because you can pass a string to entry_points wrappers which
122 # means that they may or may not be valid INI files. The attempt here is to
123 # strip leading and trailing whitespace in order to make them valid INI
124 # files.
125 with open(filename) as fp:
126 data = StringIO()
127 for line in fp:
128 data.write(line.strip())
129 data.write("\n")
130 data.seek(0)
131
132 # get the entry points and then the script names
133 entry_points = pkg_resources.EntryPoint.parse_map(data)
134 console = entry_points.get('console_scripts', {})
135 gui = entry_points.get('gui_scripts', {})
136
137 def _split_ep(s):
138 """get the string representation of EntryPoint, remove space and split
139 on '='"""
140 return str(s).replace(" ", "").split("=")
141
142 # convert the EntryPoint objects into strings with module:function
143 console = dict(_split_ep(v) for v in console.values())
144 gui = dict(_split_ep(v) for v in gui.values())
145 return console, gui
146
147
148def message_about_scripts_not_on_PATH(scripts):
149 # type: (List[str]) -> Optional[str]
150 """Determine if any scripts are not on PATH and format a warning.
151
152 Returns a warning message if one or more scripts are not on PATH,
153 otherwise None.
154 """
155 if not scripts:
156 return None
157
158 # Group scripts by the path they were installed in
159 grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
160 for destfile in scripts:
161 parent_dir = os.path.dirname(destfile)
162 script_name = os.path.basename(destfile)
163 grouped_by_dir[parent_dir].add(script_name)
164
165 # We don't want to warn for directories that are on PATH.
166 not_warn_dirs = [
167 os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep)
168 ]
169 # If an executable sits with sys.executable, we don't warn for it.
170 # This covers the case of venv invocations without activating the venv.
171 not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
172 warn_for = {
173 parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
174 if os.path.normcase(parent_dir) not in not_warn_dirs
175 }
176 if not warn_for:
177 return None
178
179 # Format a message
180 msg_lines = []
181 for parent_dir, scripts in warn_for.items():
182 scripts = sorted(scripts)
183 if len(scripts) == 1:
184 start_text = "script {} is".format(scripts[0])
185 else:
186 start_text = "scripts {} are".format(
187 ", ".join(scripts[:-1]) + " and " + scripts[-1]
188 )
189
190 msg_lines.append(
191 "The {} installed in '{}' which is not on PATH."
192 .format(start_text, parent_dir)
193 )
194
195 last_line_fmt = (
196 "Consider adding {} to PATH or, if you prefer "
197 "to suppress this warning, use --no-warn-script-location."
198 )
199 if len(msg_lines) == 1:
200 msg_lines.append(last_line_fmt.format("this directory"))
201 else:
202 msg_lines.append(last_line_fmt.format("these directories"))
203
204 # Returns the formatted multiline message
205 return "\n".join(msg_lines)
206
207
208def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
209 pycompile=True, scheme=None, isolated=False, prefix=None,
210 warn_script_location=True):
211 """Install a wheel"""
212
213 if not scheme:
214 scheme = distutils_scheme(
215 name, user=user, home=home, root=root, isolated=isolated,
216 prefix=prefix,
217 )
218
219 if root_is_purelib(name, wheeldir):
220 lib_dir = scheme['purelib']
221 else:
222 lib_dir = scheme['platlib']
223
224 info_dir = []
225 data_dirs = []
226 source = wheeldir.rstrip(os.path.sep) + os.path.sep
227
228 # Record details of the files moved
229 # installed = files copied from the wheel to the destination
230 # changed = files changed while installing (scripts #! line typically)
231 # generated = files newly generated during the install (script wrappers)
232 installed = {}
233 changed = set()
234 generated = []
235
236 # Compile all of the pyc files that we're going to be installing
237 if pycompile:
238 with captured_stdout() as stdout:
239 with warnings.catch_warnings():
240 warnings.filterwarnings('ignore')
241 compileall.compile_dir(source, force=True, quiet=True)
242 logger.debug(stdout.getvalue())
243
244 def normpath(src, p):
245 return os.path.relpath(src, p).replace(os.path.sep, '/')
246
247 def record_installed(srcfile, destfile, modified=False):
248 """Map archive RECORD paths to installation RECORD paths."""
249 oldpath = normpath(srcfile, wheeldir)
250 newpath = normpath(destfile, lib_dir)
251 installed[oldpath] = newpath
252 if modified:
253 changed.add(destfile)
254
255 def clobber(source, dest, is_base, fixer=None, filter=None):
256 ensure_dir(dest) # common for the 'include' path
257
258 for dir, subdirs, files in os.walk(source):
259 basedir = dir[len(source):].lstrip(os.path.sep)
260 destdir = os.path.join(dest, basedir)
261 if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
262 continue
263 for s in subdirs:
264 destsubdir = os.path.join(dest, basedir, s)
265 if is_base and basedir == '' and destsubdir.endswith('.data'):
266 data_dirs.append(s)
267 continue
268 elif (is_base and
269 s.endswith('.dist-info') and
270 canonicalize_name(s).startswith(
271 canonicalize_name(req.name))):
272 assert not info_dir, ('Multiple .dist-info directories: ' +
273 destsubdir + ', ' +
274 ', '.join(info_dir))
275 info_dir.append(destsubdir)
276 for f in files:
277 # Skip unwanted files
278 if filter and filter(f):
279 continue
280 srcfile = os.path.join(dir, f)
281 destfile = os.path.join(dest, basedir, f)
282 # directory creation is lazy and after the file filtering above
283 # to ensure we don't install empty dirs; empty dirs can't be
284 # uninstalled.
285 ensure_dir(destdir)
286
287 # We use copyfile (not move, copy, or copy2) to be extra sure
288 # that we are not moving directories over (copyfile fails for
289 # directories) as well as to ensure that we are not copying
290 # over any metadata because we want more control over what
291 # metadata we actually copy over.
292 shutil.copyfile(srcfile, destfile)
293
294 # Copy over the metadata for the file, currently this only
295 # includes the atime and mtime.
296 st = os.stat(srcfile)
297 if hasattr(os, "utime"):
298 os.utime(destfile, (st.st_atime, st.st_mtime))
299
300 # If our file is executable, then make our destination file
301 # executable.
302 if os.access(srcfile, os.X_OK):
303 st = os.stat(srcfile)
304 permissions = (
305 st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
306 )
307 os.chmod(destfile, permissions)
308
309 changed = False
310 if fixer:
311 changed = fixer(destfile)
312 record_installed(srcfile, destfile, changed)
313
314 clobber(source, lib_dir, True)
315
316 assert info_dir, "%s .dist-info directory not found" % req
317
318 # Get the defined entry points
319 ep_file = os.path.join(info_dir[0], 'entry_points.txt')
320 console, gui = get_entrypoints(ep_file)
321
322 def is_entrypoint_wrapper(name):
323 # EP, EP.exe and EP-script.py are scripts generated for
324 # entry point EP by setuptools
325 if name.lower().endswith('.exe'):
326 matchname = name[:-4]
327 elif name.lower().endswith('-script.py'):
328 matchname = name[:-10]
329 elif name.lower().endswith(".pya"):
330 matchname = name[:-4]
331 else:
332 matchname = name
333 # Ignore setuptools-generated scripts
334 return (matchname in console or matchname in gui)
335
336 for datadir in data_dirs:
337 fixer = None
338 filter = None
339 for subdir in os.listdir(os.path.join(wheeldir, datadir)):
340 fixer = None
341 if subdir == 'scripts':
342 fixer = fix_script
343 filter = is_entrypoint_wrapper
344 source = os.path.join(wheeldir, datadir, subdir)
345 dest = scheme[subdir]
346 clobber(source, dest, False, fixer=fixer, filter=filter)
347
348 maker = ScriptMaker(None, scheme['scripts'])
349
350 # Ensure old scripts are overwritten.
351 # See https://github.com/pypa/pip/issues/1800
352 maker.clobber = True
353
354 # Ensure we don't generate any variants for scripts because this is almost
355 # never what somebody wants.
356 # See https://bitbucket.org/pypa/distlib/issue/35/
357 maker.variants = {''}
358
359 # This is required because otherwise distlib creates scripts that are not
360 # executable.
361 # See https://bitbucket.org/pypa/distlib/issue/32/
362 maker.set_mode = True
363
364 # Simplify the script and fix the fact that the default script swallows
365 # every single stack trace.
366 # See https://bitbucket.org/pypa/distlib/issue/34/
367 # See https://bitbucket.org/pypa/distlib/issue/33/
368 def _get_script_text(entry):
369 if entry.suffix is None:
370 raise InstallationError(
371 "Invalid script entry point: %s for req: %s - A callable "
372 "suffix is required. Cf https://packaging.python.org/en/"
373 "latest/distributing.html#console-scripts for more "
374 "information." % (entry, req)
375 )
376 return maker.script_template % {
377 "module": entry.prefix,
378 "import_name": entry.suffix.split(".")[0],
379 "func": entry.suffix,
380 }
381
382 maker._get_script_text = _get_script_text
383 maker.script_template = r"""# -*- coding: utf-8 -*-
384import re
385import sys
386
387from %(module)s import %(import_name)s
388
389if __name__ == '__main__':
390 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
391 sys.exit(%(func)s())
392"""
393
394 # Special case pip and setuptools to generate versioned wrappers
395 #
396 # The issue is that some projects (specifically, pip and setuptools) use
397 # code in setup.py to create "versioned" entry points - pip2.7 on Python
398 # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
399 # the wheel metadata at build time, and so if the wheel is installed with
400 # a *different* version of Python the entry points will be wrong. The
401 # correct fix for this is to enhance the metadata to be able to describe
402 # such versioned entry points, but that won't happen till Metadata 2.0 is
403 # available.
404 # In the meantime, projects using versioned entry points will either have
405 # incorrect versioned entry points, or they will not be able to distribute
406 # "universal" wheels (i.e., they will need a wheel per Python version).
407 #
408 # Because setuptools and pip are bundled with _ensurepip and virtualenv,
409 # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
410 # override the versioned entry points in the wheel and generate the
411 # correct ones. This code is purely a short-term measure until Metadata 2.0
412 # is available.
413 #
414 # To add the level of hack in this section of code, in order to support
415 # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
416 # variable which will control which version scripts get installed.
417 #
418 # ENSUREPIP_OPTIONS=altinstall
419 # - Only pipX.Y and easy_install-X.Y will be generated and installed
420 # ENSUREPIP_OPTIONS=install
421 # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
422 # that this option is technically if ENSUREPIP_OPTIONS is set and is
423 # not altinstall
424 # DEFAULT
425 # - The default behavior is to install pip, pipX, pipX.Y, easy_install
426 # and easy_install-X.Y.
427 pip_script = console.pop('pip', None)
428 if pip_script:
429 if "ENSUREPIP_OPTIONS" not in os.environ:
430 spec = 'pip = ' + pip_script
431 generated.extend(maker.make(spec))
432
433 if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
434 spec = 'pip%s = %s' % (sys.version[:1], pip_script)
435 generated.extend(maker.make(spec))
436
437 spec = 'pip%s = %s' % (sys.version[:3], pip_script)
438 generated.extend(maker.make(spec))
439 # Delete any other versioned pip entry points
440 pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
441 for k in pip_ep:
442 del console[k]
443 easy_install_script = console.pop('easy_install', None)
444 if easy_install_script:
445 if "ENSUREPIP_OPTIONS" not in os.environ:
446 spec = 'easy_install = ' + easy_install_script
447 generated.extend(maker.make(spec))
448
449 spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
450 generated.extend(maker.make(spec))
451 # Delete any other versioned easy_install entry points
452 easy_install_ep = [
453 k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
454 ]
455 for k in easy_install_ep:
456 del console[k]
457
458 # Generate the console and GUI entry points specified in the wheel
459 if len(console) > 0:
460 generated_console_scripts = maker.make_multiple(
461 ['%s = %s' % kv for kv in console.items()]
462 )
463 generated.extend(generated_console_scripts)
464
465 if warn_script_location:
466 msg = message_about_scripts_not_on_PATH(generated_console_scripts)
467 if msg is not None:
468 logger.warn(msg)
469
470 if len(gui) > 0:
471 generated.extend(
472 maker.make_multiple(
473 ['%s = %s' % kv for kv in gui.items()],
474 {'gui': True}
475 )
476 )
477
478 # Record pip as the installer
479 installer = os.path.join(info_dir[0], 'INSTALLER')
480 temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
481 with open(temp_installer, 'wb') as installer_file:
482 installer_file.write(b'pip\n')
483 shutil.move(temp_installer, installer)
484 generated.append(installer)
485
486 # Record details of all files installed
487 record = os.path.join(info_dir[0], 'RECORD')
488 temp_record = os.path.join(info_dir[0], 'RECORD.pip')
489 with open_for_csv(record, 'r') as record_in:
490 with open_for_csv(temp_record, 'w+') as record_out:
491 reader = csv.reader(record_in)
492 writer = csv.writer(record_out)
493 for row in reader:
494 row[0] = installed.pop(row[0], row[0])
495 if row[0] in changed:
496 row[1], row[2] = rehash(row[0])
497 writer.writerow(row)
498 for f in generated:
499 h, l = rehash(f)
500 writer.writerow((normpath(f, lib_dir), h, l))
501 for f in installed:
502 writer.writerow((installed[f], '', ''))
503 shutil.move(temp_record, record)
504
505
506def wheel_version(source_dir):
507 """
508 Return the Wheel-Version of an extracted wheel, if possible.
509
510 Otherwise, return False if we couldn't parse / extract it.
511 """
512 try:
513 dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
514
515 wheel_data = dist.get_metadata('WHEEL')
516 wheel_data = Parser().parsestr(wheel_data)
517
518 version = wheel_data['Wheel-Version'].strip()
519 version = tuple(map(int, version.split('.')))
520 return version
521 except:
522 return False
523
524
525def check_compatibility(version, name):
526 """
527 Raises errors or warns if called with an incompatible Wheel-Version.
528
529 Pip should refuse to install a Wheel-Version that's a major series
530 ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
531 installing a version only minor version ahead (e.g 1.2 > 1.1).
532
533 version: a 2-tuple representing a Wheel-Version (Major, Minor)
534 name: name of wheel or package to raise exception about
535
536 :raises UnsupportedWheel: when an incompatible Wheel-Version is given
537 """
538 if not version:
539 raise UnsupportedWheel(
540 "%s is in an unsupported or invalid wheel" % name
541 )
542 if version[0] > VERSION_COMPATIBLE[0]:
543 raise UnsupportedWheel(
544 "%s's Wheel-Version (%s) is not compatible with this version "
545 "of pip" % (name, '.'.join(map(str, version)))
546 )
547 elif version > VERSION_COMPATIBLE:
548 logger.warning(
549 'Installing from a newer Wheel-Version (%s)',
550 '.'.join(map(str, version)),
551 )
552
553
554class Wheel(object):
555 """A wheel file"""
556
557 # TODO: maybe move the install code into this class
558
559 wheel_file_re = re.compile(
560 r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
561 ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
562 \.whl|\.dist-info)$""",
563 re.VERBOSE
564 )
565
566 def __init__(self, filename):
567 """
568 :raises InvalidWheelFilename: when the filename is invalid for a wheel
569 """
570 wheel_info = self.wheel_file_re.match(filename)
571 if not wheel_info:
572 raise InvalidWheelFilename(
573 "%s is not a valid wheel filename." % filename
574 )
575 self.filename = filename
576 self.name = wheel_info.group('name').replace('_', '-')
577 # we'll assume "_" means "-" due to wheel naming scheme
578 # (https://github.com/pypa/pip/issues/1150)
579 self.version = wheel_info.group('ver').replace('_', '-')
580 self.build_tag = wheel_info.group('build')
581 self.pyversions = wheel_info.group('pyver').split('.')
582 self.abis = wheel_info.group('abi').split('.')
583 self.plats = wheel_info.group('plat').split('.')
584
585 # All the tag combinations from this file
586 self.file_tags = {
587 (x, y, z) for x in self.pyversions
588 for y in self.abis for z in self.plats
589 }
590
591 def support_index_min(self, tags=None):
592 """
593 Return the lowest index that one of the wheel's file_tag combinations
594 achieves in the supported_tags list e.g. if there are 8 supported tags,
595 and one of the file tags is first in the list, then return 0. Returns
596 None is the wheel is not supported.
597 """
598 if tags is None: # for mock
599 tags = pep425tags.get_supported()
600 indexes = [tags.index(c) for c in self.file_tags if c in tags]
601 return min(indexes) if indexes else None
602
603 def supported(self, tags=None):
604 """Is this wheel supported on this system?"""
605 if tags is None: # for mock
606 tags = pep425tags.get_supported()
607 return bool(set(tags).intersection(self.file_tags))
608
609
610class WheelBuilder(object):
611 """Build wheels from a RequirementSet."""
612
613 def __init__(self, finder, preparer, wheel_cache,
614 build_options=None, global_options=None, no_clean=False):
615 self.finder = finder
616 self.preparer = preparer
617 self.wheel_cache = wheel_cache
618
619 self._wheel_dir = preparer.wheel_download_dir
620
621 self.build_options = build_options or []
622 self.global_options = global_options or []
623 self.no_clean = no_clean
624
625 def _build_one(self, req, output_dir, python_tag=None):
626 """Build one wheel.
627
628 :return: The filename of the built wheel, or None if the build failed.
629 """
630 # Install build deps into temporary directory (PEP 518)
631 with req.build_env:
632 return self._build_one_inside_env(req, output_dir,
633 python_tag=python_tag)
634
635 def _build_one_inside_env(self, req, output_dir, python_tag=None):
636 with TempDirectory(kind="wheel") as temp_dir:
637 if self.__build_one(req, temp_dir.path, python_tag=python_tag):
638 try:
639 wheel_name = os.listdir(temp_dir.path)[0]
640 wheel_path = os.path.join(output_dir, wheel_name)
641 shutil.move(
642 os.path.join(temp_dir.path, wheel_name), wheel_path
643 )
644 logger.info('Stored in directory: %s', output_dir)
645 return wheel_path
646 except:
647 pass
648 # Ignore return, we can't do anything else useful.
649 self._clean_one(req)
650 return None
651
652 def _base_setup_args(self, req):
653 # NOTE: Eventually, we'd want to also -S to the flags here, when we're
654 # isolating. Currently, it breaks Python in virtualenvs, because it
655 # relies on site.py to find parts of the standard library outside the
656 # virtualenv.
657 return [
658 sys.executable, '-u', '-c',
659 SETUPTOOLS_SHIM % req.setup_py
660 ] + list(self.global_options)
661
662 def __build_one(self, req, tempd, python_tag=None):
663 base_args = self._base_setup_args(req)
664
665 spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
666 with open_spinner(spin_message) as spinner:
667 logger.debug('Destination directory: %s', tempd)
668 wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
669 + self.build_options
670
671 if python_tag is not None:
672 wheel_args += ["--python-tag", python_tag]
673
674 try:
675 call_subprocess(wheel_args, cwd=req.setup_py_dir,
676 show_stdout=False, spinner=spinner)
677 return True
678 except:
679 spinner.finish("error")
680 logger.error('Failed building wheel for %s', req.name)
681 return False
682
683 def _clean_one(self, req):
684 base_args = self._base_setup_args(req)
685
686 logger.info('Running setup.py clean for %s', req.name)
687 clean_args = base_args + ['clean', '--all']
688 try:
689 call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
690 return True
691 except:
692 logger.error('Failed cleaning build dir for %s', req.name)
693 return False
694
695 def build(self, requirements, session, autobuilding=False):
696 """Build wheels.
697
698 :param unpack: If True, replace the sdist we built from with the
699 newly built wheel, in preparation for installation.
700 :return: True if all the wheels built correctly.
701 """
702 from pip._internal import index
703
704 building_is_possible = self._wheel_dir or (
705 autobuilding and self.wheel_cache.cache_dir
706 )
707 assert building_is_possible
708
709 buildset = []
710 for req in requirements:
711 if req.constraint:
712 continue
713 if req.is_wheel:
714 if not autobuilding:
715 logger.info(
716 'Skipping %s, due to already being wheel.', req.name,
717 )
718 elif autobuilding and req.editable:
719 pass
720 elif autobuilding and not req.source_dir:
721 pass
722 elif autobuilding and req.link and not req.link.is_artifact:
723 # VCS checkout. Build wheel just for this run.
724 buildset.append((req, True))
725 else:
726 ephem_cache = False
727 if autobuilding:
728 link = req.link
729 base, ext = link.splitext()
730 if index.egg_info_matches(base, None, link) is None:
731 # E.g. local directory. Build wheel just for this run.
732 ephem_cache = True
733 if "binary" not in index.fmt_ctl_formats(
734 self.finder.format_control,
735 canonicalize_name(req.name)):
736 logger.info(
737 "Skipping bdist_wheel for %s, due to binaries "
738 "being disabled for it.", req.name,
739 )
740 continue
741 buildset.append((req, ephem_cache))
742
743 if not buildset:
744 return True
745
746 # Build the wheels.
747 logger.info(
748 'Building wheels for collected packages: %s',
749 ', '.join([req.name for (req, _) in buildset]),
750 )
751 _cache = self.wheel_cache # shorter name
752 with indent_log():
753 build_success, build_failure = [], []
754 for req, ephem in buildset:
755 python_tag = None
756 if autobuilding:
757 python_tag = pep425tags.implementation_tag
758 if ephem:
759 output_dir = _cache.get_ephem_path_for_link(req.link)
760 else:
761 output_dir = _cache.get_path_for_link(req.link)
762 try:
763 ensure_dir(output_dir)
764 except OSError as e:
765 logger.warning("Building wheel for %s failed: %s",
766 req.name, e)
767 build_failure.append(req)
768 continue
769 else:
770 output_dir = self._wheel_dir
771 wheel_file = self._build_one(
772 req, output_dir,
773 python_tag=python_tag,
774 )
775 if wheel_file:
776 build_success.append(req)
777 if autobuilding:
778 # XXX: This is mildly duplicative with prepare_files,
779 # but not close enough to pull out to a single common
780 # method.
781 # The code below assumes temporary source dirs -
782 # prevent it doing bad things.
783 if req.source_dir and not os.path.exists(os.path.join(
784 req.source_dir, PIP_DELETE_MARKER_FILENAME)):
785 raise AssertionError(
786 "bad source dir - missing marker")
787 # Delete the source we built the wheel from
788 req.remove_temporary_source()
789 # set the build directory again - name is known from
790 # the work prepare_files did.
791 req.source_dir = req.build_location(
792 self.preparer.build_dir
793 )
794 # Update the link for this.
795 req.link = index.Link(path_to_url(wheel_file))
796 assert req.link.is_wheel
797 # extract the wheel into the dir
798 unpack_url(
799 req.link, req.source_dir, None, False,
800 session=session,
801 )
802 else:
803 build_failure.append(req)
804
805 # notify success/failure
806 if build_success:
807 logger.info(
808 'Successfully built %s',
809 ' '.join([req.name for req in build_success]),
810 )
811 if build_failure:
812 logger.info(
813 'Failed to build %s',
814 ' '.join([req.name for req in build_failure]),
815 )
816 # Return True if all builds were successful
817 return len(build_failure) == 0