diff options
| author | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
|---|---|---|
| committer | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
| commit | 227b2d30a8675b44918f9d9ca89b24144a938215 (patch) | |
| tree | 9f8e6a28724514b6fdf463a9ab2067a7ef309b72 /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req | |
| parent | 842a8cfbbbdb1f92889d892e4859dbd5d40c5be8 (diff) | |
removing venv files
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req')
5 files changed, 0 insertions, 2141 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py deleted file mode 100644 index 07ae607..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/__init__.py +++ /dev/null | |||
| @@ -1,69 +0,0 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | |||
| 5 | from .req_install import InstallRequirement | ||
| 6 | from .req_set import RequirementSet | ||
| 7 | from .req_file import parse_requirements | ||
| 8 | from pip._internal.utils.logging import indent_log | ||
| 9 | |||
| 10 | |||
| 11 | __all__ = [ | ||
| 12 | "RequirementSet", "InstallRequirement", | ||
| 13 | "parse_requirements", "install_given_reqs", | ||
| 14 | ] | ||
| 15 | |||
| 16 | logger = logging.getLogger(__name__) | ||
| 17 | |||
| 18 | |||
| 19 | def install_given_reqs(to_install, install_options, global_options=(), | ||
| 20 | *args, **kwargs): | ||
| 21 | """ | ||
| 22 | Install everything in the given list. | ||
| 23 | |||
| 24 | (to be called after having downloaded and unpacked the packages) | ||
| 25 | """ | ||
| 26 | |||
| 27 | if to_install: | ||
| 28 | logger.info( | ||
| 29 | 'Installing collected packages: %s', | ||
| 30 | ', '.join([req.name for req in to_install]), | ||
| 31 | ) | ||
| 32 | |||
| 33 | with indent_log(): | ||
| 34 | for requirement in to_install: | ||
| 35 | if requirement.conflicts_with: | ||
| 36 | logger.info( | ||
| 37 | 'Found existing installation: %s', | ||
| 38 | requirement.conflicts_with, | ||
| 39 | ) | ||
| 40 | with indent_log(): | ||
| 41 | uninstalled_pathset = requirement.uninstall( | ||
| 42 | auto_confirm=True | ||
| 43 | ) | ||
| 44 | try: | ||
| 45 | requirement.install( | ||
| 46 | install_options, | ||
| 47 | global_options, | ||
| 48 | *args, | ||
| 49 | **kwargs | ||
| 50 | ) | ||
| 51 | except: | ||
| 52 | should_rollback = ( | ||
| 53 | requirement.conflicts_with and | ||
| 54 | not requirement.install_succeeded | ||
| 55 | ) | ||
| 56 | # if install did not succeed, rollback previous uninstall | ||
| 57 | if should_rollback: | ||
| 58 | uninstalled_pathset.rollback() | ||
| 59 | raise | ||
| 60 | else: | ||
| 61 | should_commit = ( | ||
| 62 | requirement.conflicts_with and | ||
| 63 | requirement.install_succeeded | ||
| 64 | ) | ||
| 65 | if should_commit: | ||
| 66 | uninstalled_pathset.commit() | ||
| 67 | requirement.remove_temporary_source() | ||
| 68 | |||
| 69 | return to_install | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py deleted file mode 100644 index 9e6ef41..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_file.py +++ /dev/null | |||
| @@ -1,338 +0,0 @@ | |||
| 1 | """ | ||
| 2 | Requirements file parsing | ||
| 3 | """ | ||
| 4 | |||
| 5 | from __future__ import absolute_import | ||
| 6 | |||
| 7 | import optparse | ||
| 8 | import os | ||
| 9 | import re | ||
| 10 | import shlex | ||
| 11 | import sys | ||
| 12 | |||
| 13 | from pip._vendor.six.moves import filterfalse | ||
| 14 | from pip._vendor.six.moves.urllib import parse as urllib_parse | ||
| 15 | |||
| 16 | from pip._internal import cmdoptions | ||
| 17 | from pip._internal.download import get_file_content | ||
| 18 | from pip._internal.exceptions import RequirementsFileParseError | ||
| 19 | from pip._internal.req.req_install import InstallRequirement | ||
| 20 | |||
| 21 | __all__ = ['parse_requirements'] | ||
| 22 | |||
| 23 | SCHEME_RE = re.compile(r'^(http|https|file):', re.I) | ||
| 24 | COMMENT_RE = re.compile(r'(^|\s)+#.*$') | ||
| 25 | |||
| 26 | # Matches environment variable-style values in '${MY_VARIABLE_1}' with the | ||
| 27 | # variable name consisting of only uppercase letters, digits or the '_' | ||
| 28 | # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, | ||
| 29 | # 2013 Edition. | ||
| 30 | ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') | ||
| 31 | |||
| 32 | SUPPORTED_OPTIONS = [ | ||
| 33 | cmdoptions.constraints, | ||
| 34 | cmdoptions.editable, | ||
| 35 | cmdoptions.requirements, | ||
| 36 | cmdoptions.no_index, | ||
| 37 | cmdoptions.index_url, | ||
| 38 | cmdoptions.find_links, | ||
| 39 | cmdoptions.extra_index_url, | ||
| 40 | cmdoptions.always_unzip, | ||
| 41 | cmdoptions.no_binary, | ||
| 42 | cmdoptions.only_binary, | ||
| 43 | cmdoptions.pre, | ||
| 44 | cmdoptions.process_dependency_links, | ||
| 45 | cmdoptions.trusted_host, | ||
| 46 | cmdoptions.require_hashes, | ||
| 47 | ] | ||
| 48 | |||
| 49 | # options to be passed to requirements | ||
| 50 | SUPPORTED_OPTIONS_REQ = [ | ||
| 51 | cmdoptions.install_options, | ||
| 52 | cmdoptions.global_options, | ||
| 53 | cmdoptions.hash, | ||
| 54 | ] | ||
| 55 | |||
| 56 | # the 'dest' string values | ||
| 57 | SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] | ||
| 58 | |||
| 59 | |||
| 60 | def parse_requirements(filename, finder=None, comes_from=None, options=None, | ||
| 61 | session=None, constraint=False, wheel_cache=None): | ||
| 62 | """Parse a requirements file and yield InstallRequirement instances. | ||
| 63 | |||
| 64 | :param filename: Path or url of requirements file. | ||
| 65 | :param finder: Instance of pip.index.PackageFinder. | ||
| 66 | :param comes_from: Origin description of requirements. | ||
| 67 | :param options: cli options. | ||
| 68 | :param session: Instance of pip.download.PipSession. | ||
| 69 | :param constraint: If true, parsing a constraint file rather than | ||
| 70 | requirements file. | ||
| 71 | :param wheel_cache: Instance of pip.wheel.WheelCache | ||
| 72 | """ | ||
| 73 | if session is None: | ||
| 74 | raise TypeError( | ||
| 75 | "parse_requirements() missing 1 required keyword argument: " | ||
| 76 | "'session'" | ||
| 77 | ) | ||
| 78 | |||
| 79 | _, content = get_file_content( | ||
| 80 | filename, comes_from=comes_from, session=session | ||
| 81 | ) | ||
| 82 | |||
| 83 | lines_enum = preprocess(content, options) | ||
| 84 | |||
| 85 | for line_number, line in lines_enum: | ||
| 86 | req_iter = process_line(line, filename, line_number, finder, | ||
| 87 | comes_from, options, session, wheel_cache, | ||
| 88 | constraint=constraint) | ||
| 89 | for req in req_iter: | ||
| 90 | yield req | ||
| 91 | |||
| 92 | |||
| 93 | def preprocess(content, options): | ||
| 94 | """Split, filter, and join lines, and return a line iterator | ||
| 95 | |||
| 96 | :param content: the content of the requirements file | ||
| 97 | :param options: cli options | ||
| 98 | """ | ||
| 99 | lines_enum = enumerate(content.splitlines(), start=1) | ||
| 100 | lines_enum = join_lines(lines_enum) | ||
| 101 | lines_enum = ignore_comments(lines_enum) | ||
| 102 | lines_enum = skip_regex(lines_enum, options) | ||
| 103 | lines_enum = expand_env_variables(lines_enum) | ||
| 104 | return lines_enum | ||
| 105 | |||
| 106 | |||
| 107 | def process_line(line, filename, line_number, finder=None, comes_from=None, | ||
| 108 | options=None, session=None, wheel_cache=None, | ||
| 109 | constraint=False): | ||
| 110 | """Process a single requirements line; This can result in creating/yielding | ||
| 111 | requirements, or updating the finder. | ||
| 112 | |||
| 113 | For lines that contain requirements, the only options that have an effect | ||
| 114 | are from SUPPORTED_OPTIONS_REQ, and they are scoped to the | ||
| 115 | requirement. Other options from SUPPORTED_OPTIONS may be present, but are | ||
| 116 | ignored. | ||
| 117 | |||
| 118 | For lines that do not contain requirements, the only options that have an | ||
| 119 | effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may | ||
| 120 | be present, but are ignored. These lines may contain multiple options | ||
| 121 | (although our docs imply only one is supported), and all our parsed and | ||
| 122 | affect the finder. | ||
| 123 | |||
| 124 | :param constraint: If True, parsing a constraints file. | ||
| 125 | :param options: OptionParser options that we may update | ||
| 126 | """ | ||
| 127 | parser = build_parser(line) | ||
| 128 | defaults = parser.get_default_values() | ||
| 129 | defaults.index_url = None | ||
| 130 | if finder: | ||
| 131 | # `finder.format_control` will be updated during parsing | ||
| 132 | defaults.format_control = finder.format_control | ||
| 133 | args_str, options_str = break_args_options(line) | ||
| 134 | if sys.version_info < (2, 7, 3): | ||
| 135 | # Prior to 2.7.3, shlex cannot deal with unicode entries | ||
| 136 | options_str = options_str.encode('utf8') | ||
| 137 | opts, _ = parser.parse_args(shlex.split(options_str), defaults) | ||
| 138 | |||
| 139 | # preserve for the nested code path | ||
| 140 | line_comes_from = '%s %s (line %s)' % ( | ||
| 141 | '-c' if constraint else '-r', filename, line_number, | ||
| 142 | ) | ||
| 143 | |||
| 144 | # yield a line requirement | ||
| 145 | if args_str: | ||
| 146 | isolated = options.isolated_mode if options else False | ||
| 147 | if options: | ||
| 148 | cmdoptions.check_install_build_global(options, opts) | ||
| 149 | # get the options that apply to requirements | ||
| 150 | req_options = {} | ||
| 151 | for dest in SUPPORTED_OPTIONS_REQ_DEST: | ||
| 152 | if dest in opts.__dict__ and opts.__dict__[dest]: | ||
| 153 | req_options[dest] = opts.__dict__[dest] | ||
| 154 | yield InstallRequirement.from_line( | ||
| 155 | args_str, line_comes_from, constraint=constraint, | ||
| 156 | isolated=isolated, options=req_options, wheel_cache=wheel_cache | ||
| 157 | ) | ||
| 158 | |||
| 159 | # yield an editable requirement | ||
| 160 | elif opts.editables: | ||
| 161 | isolated = options.isolated_mode if options else False | ||
| 162 | yield InstallRequirement.from_editable( | ||
| 163 | opts.editables[0], comes_from=line_comes_from, | ||
| 164 | constraint=constraint, isolated=isolated, wheel_cache=wheel_cache | ||
| 165 | ) | ||
| 166 | |||
| 167 | # parse a nested requirements file | ||
| 168 | elif opts.requirements or opts.constraints: | ||
| 169 | if opts.requirements: | ||
| 170 | req_path = opts.requirements[0] | ||
| 171 | nested_constraint = False | ||
| 172 | else: | ||
| 173 | req_path = opts.constraints[0] | ||
| 174 | nested_constraint = True | ||
| 175 | # original file is over http | ||
| 176 | if SCHEME_RE.search(filename): | ||
| 177 | # do a url join so relative paths work | ||
| 178 | req_path = urllib_parse.urljoin(filename, req_path) | ||
| 179 | # original file and nested file are paths | ||
| 180 | elif not SCHEME_RE.search(req_path): | ||
| 181 | # do a join so relative paths work | ||
| 182 | req_path = os.path.join(os.path.dirname(filename), req_path) | ||
| 183 | # TODO: Why not use `comes_from='-r {} (line {})'` here as well? | ||
| 184 | parser = parse_requirements( | ||
| 185 | req_path, finder, comes_from, options, session, | ||
| 186 | constraint=nested_constraint, wheel_cache=wheel_cache | ||
| 187 | ) | ||
| 188 | for req in parser: | ||
| 189 | yield req | ||
| 190 | |||
| 191 | # percolate hash-checking option upward | ||
| 192 | elif opts.require_hashes: | ||
| 193 | options.require_hashes = opts.require_hashes | ||
| 194 | |||
| 195 | # set finder options | ||
| 196 | elif finder: | ||
| 197 | if opts.index_url: | ||
| 198 | finder.index_urls = [opts.index_url] | ||
| 199 | if opts.no_index is True: | ||
| 200 | finder.index_urls = [] | ||
| 201 | if opts.extra_index_urls: | ||
| 202 | finder.index_urls.extend(opts.extra_index_urls) | ||
| 203 | if opts.find_links: | ||
| 204 | # FIXME: it would be nice to keep track of the source | ||
| 205 | # of the find_links: support a find-links local path | ||
| 206 | # relative to a requirements file. | ||
| 207 | value = opts.find_links[0] | ||
| 208 | req_dir = os.path.dirname(os.path.abspath(filename)) | ||
| 209 | relative_to_reqs_file = os.path.join(req_dir, value) | ||
| 210 | if os.path.exists(relative_to_reqs_file): | ||
| 211 | value = relative_to_reqs_file | ||
| 212 | finder.find_links.append(value) | ||
| 213 | if opts.pre: | ||
| 214 | finder.allow_all_prereleases = True | ||
| 215 | if opts.process_dependency_links: | ||
| 216 | finder.process_dependency_links = True | ||
| 217 | if opts.trusted_hosts: | ||
| 218 | finder.secure_origins.extend( | ||
| 219 | ("*", host, "*") for host in opts.trusted_hosts) | ||
| 220 | |||
| 221 | |||
| 222 | def break_args_options(line): | ||
| 223 | """Break up the line into an args and options string. We only want to shlex | ||
| 224 | (and then optparse) the options, not the args. args can contain markers | ||
| 225 | which are corrupted by shlex. | ||
| 226 | """ | ||
| 227 | tokens = line.split(' ') | ||
| 228 | args = [] | ||
| 229 | options = tokens[:] | ||
| 230 | for token in tokens: | ||
| 231 | if token.startswith('-') or token.startswith('--'): | ||
| 232 | break | ||
| 233 | else: | ||
| 234 | args.append(token) | ||
| 235 | options.pop(0) | ||
| 236 | return ' '.join(args), ' '.join(options) | ||
| 237 | |||
| 238 | |||
| 239 | def build_parser(line): | ||
| 240 | """ | ||
| 241 | Return a parser for parsing requirement lines | ||
| 242 | """ | ||
| 243 | parser = optparse.OptionParser(add_help_option=False) | ||
| 244 | |||
| 245 | option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ | ||
| 246 | for option_factory in option_factories: | ||
| 247 | option = option_factory() | ||
| 248 | parser.add_option(option) | ||
| 249 | |||
| 250 | # By default optparse sys.exits on parsing errors. We want to wrap | ||
| 251 | # that in our own exception. | ||
| 252 | def parser_exit(self, msg): | ||
| 253 | # add offending line | ||
| 254 | msg = 'Invalid requirement: %s\n%s' % (line, msg) | ||
| 255 | raise RequirementsFileParseError(msg) | ||
| 256 | parser.exit = parser_exit | ||
| 257 | |||
| 258 | return parser | ||
| 259 | |||
| 260 | |||
| 261 | def join_lines(lines_enum): | ||
| 262 | """Joins a line ending in '\' with the previous line (except when following | ||
| 263 | comments). The joined line takes on the index of the first line. | ||
| 264 | """ | ||
| 265 | primary_line_number = None | ||
| 266 | new_line = [] | ||
| 267 | for line_number, line in lines_enum: | ||
| 268 | if not line.endswith('\\') or COMMENT_RE.match(line): | ||
| 269 | if COMMENT_RE.match(line): | ||
| 270 | # this ensures comments are always matched later | ||
| 271 | line = ' ' + line | ||
| 272 | if new_line: | ||
| 273 | new_line.append(line) | ||
| 274 | yield primary_line_number, ''.join(new_line) | ||
| 275 | new_line = [] | ||
| 276 | else: | ||
| 277 | yield line_number, line | ||
| 278 | else: | ||
| 279 | if not new_line: | ||
| 280 | primary_line_number = line_number | ||
| 281 | new_line.append(line.strip('\\')) | ||
| 282 | |||
| 283 | # last line contains \ | ||
| 284 | if new_line: | ||
| 285 | yield primary_line_number, ''.join(new_line) | ||
| 286 | |||
| 287 | # TODO: handle space after '\'. | ||
| 288 | |||
| 289 | |||
| 290 | def ignore_comments(lines_enum): | ||
| 291 | """ | ||
| 292 | Strips comments and filter empty lines. | ||
| 293 | """ | ||
| 294 | for line_number, line in lines_enum: | ||
| 295 | line = COMMENT_RE.sub('', line) | ||
| 296 | line = line.strip() | ||
| 297 | if line: | ||
| 298 | yield line_number, line | ||
| 299 | |||
| 300 | |||
| 301 | def skip_regex(lines_enum, options): | ||
| 302 | """ | ||
| 303 | Skip lines that match '--skip-requirements-regex' pattern | ||
| 304 | |||
| 305 | Note: the regex pattern is only built once | ||
| 306 | """ | ||
| 307 | skip_regex = options.skip_requirements_regex if options else None | ||
| 308 | if skip_regex: | ||
| 309 | pattern = re.compile(skip_regex) | ||
| 310 | lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) | ||
| 311 | return lines_enum | ||
| 312 | |||
| 313 | |||
| 314 | def expand_env_variables(lines_enum): | ||
| 315 | """Replace all environment variables that can be retrieved via `os.getenv`. | ||
| 316 | |||
| 317 | The only allowed format for environment variables defined in the | ||
| 318 | requirement file is `${MY_VARIABLE_1}` to ensure two things: | ||
| 319 | |||
| 320 | 1. Strings that contain a `$` aren't accidentally (partially) expanded. | ||
| 321 | 2. Ensure consistency across platforms for requirement files. | ||
| 322 | |||
| 323 | These points are the result of a discusssion on the `github pull | ||
| 324 | request #3514 <https://github.com/pypa/pip/pull/3514>`_. | ||
| 325 | |||
| 326 | Valid characters in variable names follow the `POSIX standard | ||
| 327 | <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited | ||
| 328 | to uppercase letter, digits and the `_` (underscore). | ||
| 329 | """ | ||
| 330 | for line_number, line in lines_enum: | ||
| 331 | for env_var, var_name in ENV_VAR_RE.findall(line): | ||
| 332 | value = os.getenv(var_name) | ||
| 333 | if not value: | ||
| 334 | continue | ||
| 335 | |||
| 336 | line = line.replace(env_var, value) | ||
| 337 | |||
| 338 | yield line_number, line | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py deleted file mode 100644 index 9dd1523..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_install.py +++ /dev/null | |||
| @@ -1,1115 +0,0 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | import os | ||
| 5 | import re | ||
| 6 | import shutil | ||
| 7 | import sys | ||
| 8 | import sysconfig | ||
| 9 | import traceback | ||
| 10 | import warnings | ||
| 11 | import zipfile | ||
| 12 | from distutils.util import change_root | ||
| 13 | from email.parser import FeedParser # type: ignore | ||
| 14 | |||
| 15 | from pip._vendor import pkg_resources, pytoml, six | ||
| 16 | from pip._vendor.packaging import specifiers | ||
| 17 | from pip._vendor.packaging.markers import Marker | ||
| 18 | from pip._vendor.packaging.requirements import InvalidRequirement, Requirement | ||
| 19 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 20 | from pip._vendor.packaging.version import parse as parse_version | ||
| 21 | from pip._vendor.packaging.version import Version | ||
| 22 | from pip._vendor.pkg_resources import RequirementParseError, parse_requirements | ||
| 23 | |||
| 24 | from pip._internal import wheel | ||
| 25 | from pip._internal.build_env import BuildEnvironment | ||
| 26 | from pip._internal.compat import native_str | ||
| 27 | from pip._internal.download import ( | ||
| 28 | is_archive_file, is_url, path_to_url, url_to_path, | ||
| 29 | ) | ||
| 30 | from pip._internal.exceptions import InstallationError, UninstallationError | ||
| 31 | from pip._internal.locations import ( | ||
| 32 | PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, | ||
| 33 | ) | ||
| 34 | from pip._internal.req.req_uninstall import UninstallPathSet | ||
| 35 | from pip._internal.utils.deprecation import RemovedInPip11Warning | ||
| 36 | from pip._internal.utils.hashes import Hashes | ||
| 37 | from pip._internal.utils.logging import indent_log | ||
| 38 | from pip._internal.utils.misc import ( | ||
| 39 | _make_build_dir, ask_path_exists, backup_dir, call_subprocess, | ||
| 40 | display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, | ||
| 41 | get_installed_version, is_installable_dir, read_text_file, rmtree, | ||
| 42 | ) | ||
| 43 | from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | ||
| 44 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 45 | from pip._internal.utils.ui import open_spinner | ||
| 46 | from pip._internal.vcs import vcs | ||
| 47 | from pip._internal.wheel import Wheel, move_wheel_files | ||
| 48 | |||
| 49 | logger = logging.getLogger(__name__) | ||
| 50 | |||
| 51 | operators = specifiers.Specifier._operators.keys() | ||
| 52 | |||
| 53 | |||
| 54 | def _strip_extras(path): | ||
| 55 | m = re.match(r'^(.+)(\[[^\]]+\])$', path) | ||
| 56 | extras = None | ||
| 57 | if m: | ||
| 58 | path_no_extras = m.group(1) | ||
| 59 | extras = m.group(2) | ||
| 60 | else: | ||
| 61 | path_no_extras = path | ||
| 62 | |||
| 63 | return path_no_extras, extras | ||
| 64 | |||
| 65 | |||
| 66 | class InstallRequirement(object): | ||
| 67 | """ | ||
| 68 | Represents something that may be installed later on, may have information | ||
| 69 | about where to fetch the relavant requirement and also contains logic for | ||
| 70 | installing the said requirement. | ||
| 71 | """ | ||
| 72 | |||
| 73 | def __init__(self, req, comes_from, source_dir=None, editable=False, | ||
| 74 | link=None, update=True, markers=None, | ||
| 75 | isolated=False, options=None, wheel_cache=None, | ||
| 76 | constraint=False, extras=()): | ||
| 77 | assert req is None or isinstance(req, Requirement), req | ||
| 78 | self.req = req | ||
| 79 | self.comes_from = comes_from | ||
| 80 | self.constraint = constraint | ||
| 81 | if source_dir is not None: | ||
| 82 | self.source_dir = os.path.normpath(os.path.abspath(source_dir)) | ||
| 83 | else: | ||
| 84 | self.source_dir = None | ||
| 85 | self.editable = editable | ||
| 86 | |||
| 87 | self._wheel_cache = wheel_cache | ||
| 88 | if link is not None: | ||
| 89 | self.link = self.original_link = link | ||
| 90 | else: | ||
| 91 | from pip._internal.index import Link | ||
| 92 | self.link = self.original_link = req and req.url and Link(req.url) | ||
| 93 | |||
| 94 | if extras: | ||
| 95 | self.extras = extras | ||
| 96 | elif req: | ||
| 97 | self.extras = { | ||
| 98 | pkg_resources.safe_extra(extra) for extra in req.extras | ||
| 99 | } | ||
| 100 | else: | ||
| 101 | self.extras = set() | ||
| 102 | if markers is not None: | ||
| 103 | self.markers = markers | ||
| 104 | else: | ||
| 105 | self.markers = req and req.marker | ||
| 106 | self._egg_info_path = None | ||
| 107 | # This holds the pkg_resources.Distribution object if this requirement | ||
| 108 | # is already available: | ||
| 109 | self.satisfied_by = None | ||
| 110 | # This hold the pkg_resources.Distribution object if this requirement | ||
| 111 | # conflicts with another installed distribution: | ||
| 112 | self.conflicts_with = None | ||
| 113 | # Temporary build location | ||
| 114 | self._temp_build_dir = TempDirectory(kind="req-build") | ||
| 115 | # Used to store the global directory where the _temp_build_dir should | ||
| 116 | # have been created. Cf _correct_build_location method. | ||
| 117 | self._ideal_build_dir = None | ||
| 118 | # True if the editable should be updated: | ||
| 119 | self.update = update | ||
| 120 | # Set to True after successful installation | ||
| 121 | self.install_succeeded = None | ||
| 122 | # UninstallPathSet of uninstalled distribution (for possible rollback) | ||
| 123 | self.uninstalled_pathset = None | ||
| 124 | self.options = options if options else {} | ||
| 125 | # Set to True after successful preparation of this requirement | ||
| 126 | self.prepared = False | ||
| 127 | self.is_direct = False | ||
| 128 | |||
| 129 | self.isolated = isolated | ||
| 130 | self.build_env = BuildEnvironment(no_clean=True) | ||
| 131 | |||
| 132 | @classmethod | ||
| 133 | def from_editable(cls, editable_req, comes_from=None, isolated=False, | ||
| 134 | options=None, wheel_cache=None, constraint=False): | ||
| 135 | from pip._internal.index import Link | ||
| 136 | |||
| 137 | name, url, extras_override = parse_editable(editable_req) | ||
| 138 | if url.startswith('file:'): | ||
| 139 | source_dir = url_to_path(url) | ||
| 140 | else: | ||
| 141 | source_dir = None | ||
| 142 | |||
| 143 | if name is not None: | ||
| 144 | try: | ||
| 145 | req = Requirement(name) | ||
| 146 | except InvalidRequirement: | ||
| 147 | raise InstallationError("Invalid requirement: '%s'" % name) | ||
| 148 | else: | ||
| 149 | req = None | ||
| 150 | return cls( | ||
| 151 | req, comes_from, source_dir=source_dir, | ||
| 152 | editable=True, | ||
| 153 | link=Link(url), | ||
| 154 | constraint=constraint, | ||
| 155 | isolated=isolated, | ||
| 156 | options=options if options else {}, | ||
| 157 | wheel_cache=wheel_cache, | ||
| 158 | extras=extras_override or (), | ||
| 159 | ) | ||
| 160 | |||
| 161 | @classmethod | ||
| 162 | def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None): | ||
| 163 | try: | ||
| 164 | req = Requirement(req) | ||
| 165 | except InvalidRequirement: | ||
| 166 | raise InstallationError("Invalid requirement: '%s'" % req) | ||
| 167 | if req.url: | ||
| 168 | raise InstallationError( | ||
| 169 | "Direct url requirement (like %s) are not allowed for " | ||
| 170 | "dependencies" % req | ||
| 171 | ) | ||
| 172 | return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache) | ||
| 173 | |||
| 174 | @classmethod | ||
| 175 | def from_line( | ||
| 176 | cls, name, comes_from=None, isolated=False, options=None, | ||
| 177 | wheel_cache=None, constraint=False): | ||
| 178 | """Creates an InstallRequirement from a name, which might be a | ||
| 179 | requirement, directory containing 'setup.py', filename, or URL. | ||
| 180 | """ | ||
| 181 | from pip._internal.index import Link | ||
| 182 | |||
| 183 | if is_url(name): | ||
| 184 | marker_sep = '; ' | ||
| 185 | else: | ||
| 186 | marker_sep = ';' | ||
| 187 | if marker_sep in name: | ||
| 188 | name, markers = name.split(marker_sep, 1) | ||
| 189 | markers = markers.strip() | ||
| 190 | if not markers: | ||
| 191 | markers = None | ||
| 192 | else: | ||
| 193 | markers = Marker(markers) | ||
| 194 | else: | ||
| 195 | markers = None | ||
| 196 | name = name.strip() | ||
| 197 | req = None | ||
| 198 | path = os.path.normpath(os.path.abspath(name)) | ||
| 199 | link = None | ||
| 200 | extras = None | ||
| 201 | |||
| 202 | if is_url(name): | ||
| 203 | link = Link(name) | ||
| 204 | else: | ||
| 205 | p, extras = _strip_extras(path) | ||
| 206 | looks_like_dir = os.path.isdir(p) and ( | ||
| 207 | os.path.sep in name or | ||
| 208 | (os.path.altsep is not None and os.path.altsep in name) or | ||
| 209 | name.startswith('.') | ||
| 210 | ) | ||
| 211 | if looks_like_dir: | ||
| 212 | if not is_installable_dir(p): | ||
| 213 | raise InstallationError( | ||
| 214 | "Directory %r is not installable. File 'setup.py' " | ||
| 215 | "not found." % name | ||
| 216 | ) | ||
| 217 | link = Link(path_to_url(p)) | ||
| 218 | elif is_archive_file(p): | ||
| 219 | if not os.path.isfile(p): | ||
| 220 | logger.warning( | ||
| 221 | 'Requirement %r looks like a filename, but the ' | ||
| 222 | 'file does not exist', | ||
| 223 | name | ||
| 224 | ) | ||
| 225 | link = Link(path_to_url(p)) | ||
| 226 | |||
| 227 | # it's a local file, dir, or url | ||
| 228 | if link: | ||
| 229 | # Handle relative file URLs | ||
| 230 | if link.scheme == 'file' and re.search(r'\.\./', link.url): | ||
| 231 | link = Link( | ||
| 232 | path_to_url(os.path.normpath(os.path.abspath(link.path)))) | ||
| 233 | # wheel file | ||
| 234 | if link.is_wheel: | ||
| 235 | wheel = Wheel(link.filename) # can raise InvalidWheelFilename | ||
| 236 | req = "%s==%s" % (wheel.name, wheel.version) | ||
| 237 | else: | ||
| 238 | # set the req to the egg fragment. when it's not there, this | ||
| 239 | # will become an 'unnamed' requirement | ||
| 240 | req = link.egg_fragment | ||
| 241 | |||
| 242 | # a requirement specifier | ||
| 243 | else: | ||
| 244 | req = name | ||
| 245 | |||
| 246 | if extras: | ||
| 247 | extras = Requirement("placeholder" + extras.lower()).extras | ||
| 248 | else: | ||
| 249 | extras = () | ||
| 250 | if req is not None: | ||
| 251 | try: | ||
| 252 | req = Requirement(req) | ||
| 253 | except InvalidRequirement: | ||
| 254 | if os.path.sep in req: | ||
| 255 | add_msg = "It looks like a path." | ||
| 256 | add_msg += deduce_helpful_msg(req) | ||
| 257 | elif '=' in req and not any(op in req for op in operators): | ||
| 258 | add_msg = "= is not a valid operator. Did you mean == ?" | ||
| 259 | else: | ||
| 260 | add_msg = traceback.format_exc() | ||
| 261 | raise InstallationError( | ||
| 262 | "Invalid requirement: '%s'\n%s" % (req, add_msg)) | ||
| 263 | return cls( | ||
| 264 | req, comes_from, link=link, markers=markers, | ||
| 265 | isolated=isolated, | ||
| 266 | options=options if options else {}, | ||
| 267 | wheel_cache=wheel_cache, | ||
| 268 | constraint=constraint, | ||
| 269 | extras=extras, | ||
| 270 | ) | ||
| 271 | |||
| 272 | def __str__(self): | ||
| 273 | if self.req: | ||
| 274 | s = str(self.req) | ||
| 275 | if self.link: | ||
| 276 | s += ' from %s' % self.link.url | ||
| 277 | else: | ||
| 278 | s = self.link.url if self.link else None | ||
| 279 | if self.satisfied_by is not None: | ||
| 280 | s += ' in %s' % display_path(self.satisfied_by.location) | ||
| 281 | if self.comes_from: | ||
| 282 | if isinstance(self.comes_from, six.string_types): | ||
| 283 | comes_from = self.comes_from | ||
| 284 | else: | ||
| 285 | comes_from = self.comes_from.from_path() | ||
| 286 | if comes_from: | ||
| 287 | s += ' (from %s)' % comes_from | ||
| 288 | return s | ||
| 289 | |||
| 290 | def __repr__(self): | ||
| 291 | return '<%s object: %s editable=%r>' % ( | ||
| 292 | self.__class__.__name__, str(self), self.editable) | ||
| 293 | |||
| 294 | def populate_link(self, finder, upgrade, require_hashes): | ||
| 295 | """Ensure that if a link can be found for this, that it is found. | ||
| 296 | |||
| 297 | Note that self.link may still be None - if Upgrade is False and the | ||
| 298 | requirement is already installed. | ||
| 299 | |||
| 300 | If require_hashes is True, don't use the wheel cache, because cached | ||
| 301 | wheels, always built locally, have different hashes than the files | ||
| 302 | downloaded from the index server and thus throw false hash mismatches. | ||
| 303 | Furthermore, cached wheels at present have undeterministic contents due | ||
| 304 | to file modification times. | ||
| 305 | """ | ||
| 306 | if self.link is None: | ||
| 307 | self.link = finder.find_requirement(self, upgrade) | ||
| 308 | if self._wheel_cache is not None and not require_hashes: | ||
| 309 | old_link = self.link | ||
| 310 | self.link = self._wheel_cache.get(self.link, self.name) | ||
| 311 | if old_link != self.link: | ||
| 312 | logger.debug('Using cached wheel link: %s', self.link) | ||
| 313 | |||
| 314 | @property | ||
| 315 | def specifier(self): | ||
| 316 | return self.req.specifier | ||
| 317 | |||
| 318 | @property | ||
| 319 | def is_pinned(self): | ||
| 320 | """Return whether I am pinned to an exact version. | ||
| 321 | |||
| 322 | For example, some-package==1.2 is pinned; some-package>1.2 is not. | ||
| 323 | """ | ||
| 324 | specifiers = self.specifier | ||
| 325 | return (len(specifiers) == 1 and | ||
| 326 | next(iter(specifiers)).operator in {'==', '==='}) | ||
| 327 | |||
| 328 | def from_path(self): | ||
| 329 | if self.req is None: | ||
| 330 | return None | ||
| 331 | s = str(self.req) | ||
| 332 | if self.comes_from: | ||
| 333 | if isinstance(self.comes_from, six.string_types): | ||
| 334 | comes_from = self.comes_from | ||
| 335 | else: | ||
| 336 | comes_from = self.comes_from.from_path() | ||
| 337 | if comes_from: | ||
| 338 | s += '->' + comes_from | ||
| 339 | return s | ||
| 340 | |||
| 341 | def build_location(self, build_dir): | ||
| 342 | assert build_dir is not None | ||
| 343 | if self._temp_build_dir.path is not None: | ||
| 344 | return self._temp_build_dir.path | ||
| 345 | if self.req is None: | ||
| 346 | # for requirement via a path to a directory: the name of the | ||
| 347 | # package is not available yet so we create a temp directory | ||
| 348 | # Once run_egg_info will have run, we'll be able | ||
| 349 | # to fix it via _correct_build_location | ||
| 350 | # Some systems have /tmp as a symlink which confuses custom | ||
| 351 | # builds (such as numpy). Thus, we ensure that the real path | ||
| 352 | # is returned. | ||
| 353 | self._temp_build_dir.create() | ||
| 354 | self._ideal_build_dir = build_dir | ||
| 355 | |||
| 356 | return self._temp_build_dir.path | ||
| 357 | if self.editable: | ||
| 358 | name = self.name.lower() | ||
| 359 | else: | ||
| 360 | name = self.name | ||
| 361 | # FIXME: Is there a better place to create the build_dir? (hg and bzr | ||
| 362 | # need this) | ||
| 363 | if not os.path.exists(build_dir): | ||
| 364 | logger.debug('Creating directory %s', build_dir) | ||
| 365 | _make_build_dir(build_dir) | ||
| 366 | return os.path.join(build_dir, name) | ||
| 367 | |||
| 368 | def _correct_build_location(self): | ||
| 369 | """Move self._temp_build_dir to self._ideal_build_dir/self.req.name | ||
| 370 | |||
| 371 | For some requirements (e.g. a path to a directory), the name of the | ||
| 372 | package is not available until we run egg_info, so the build_location | ||
| 373 | will return a temporary directory and store the _ideal_build_dir. | ||
| 374 | |||
| 375 | This is only called by self.egg_info_path to fix the temporary build | ||
| 376 | directory. | ||
| 377 | """ | ||
| 378 | if self.source_dir is not None: | ||
| 379 | return | ||
| 380 | assert self.req is not None | ||
| 381 | assert self._temp_build_dir.path | ||
| 382 | assert self._ideal_build_dir.path | ||
| 383 | old_location = self._temp_build_dir.path | ||
| 384 | self._temp_build_dir.path = None | ||
| 385 | |||
| 386 | new_location = self.build_location(self._ideal_build_dir) | ||
| 387 | if os.path.exists(new_location): | ||
| 388 | raise InstallationError( | ||
| 389 | 'A package already exists in %s; please remove it to continue' | ||
| 390 | % display_path(new_location)) | ||
| 391 | logger.debug( | ||
| 392 | 'Moving package %s from %s to new location %s', | ||
| 393 | self, display_path(old_location), display_path(new_location), | ||
| 394 | ) | ||
| 395 | shutil.move(old_location, new_location) | ||
| 396 | self._temp_build_dir.path = new_location | ||
| 397 | self._ideal_build_dir = None | ||
| 398 | self.source_dir = os.path.normpath(os.path.abspath(new_location)) | ||
| 399 | self._egg_info_path = None | ||
| 400 | |||
| 401 | @property | ||
| 402 | def name(self): | ||
| 403 | if self.req is None: | ||
| 404 | return None | ||
| 405 | return native_str(pkg_resources.safe_name(self.req.name)) | ||
| 406 | |||
| 407 | @property | ||
| 408 | def setup_py_dir(self): | ||
| 409 | return os.path.join( | ||
| 410 | self.source_dir, | ||
| 411 | self.link and self.link.subdirectory_fragment or '') | ||
| 412 | |||
| 413 | @property | ||
| 414 | def setup_py(self): | ||
| 415 | assert self.source_dir, "No source dir for %s" % self | ||
| 416 | |||
| 417 | setup_py = os.path.join(self.setup_py_dir, 'setup.py') | ||
| 418 | |||
| 419 | # Python2 __file__ should not be unicode | ||
| 420 | if six.PY2 and isinstance(setup_py, six.text_type): | ||
| 421 | setup_py = setup_py.encode(sys.getfilesystemencoding()) | ||
| 422 | |||
| 423 | return setup_py | ||
| 424 | |||
| 425 | @property | ||
| 426 | def pyproject_toml(self): | ||
| 427 | assert self.source_dir, "No source dir for %s" % self | ||
| 428 | |||
| 429 | pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml') | ||
| 430 | |||
| 431 | # Python2 __file__ should not be unicode | ||
| 432 | if six.PY2 and isinstance(pp_toml, six.text_type): | ||
| 433 | pp_toml = pp_toml.encode(sys.getfilesystemencoding()) | ||
| 434 | |||
| 435 | return pp_toml | ||
| 436 | |||
| 437 | def get_pep_518_info(self): | ||
| 438 | """Get a list of the packages required to build the project, if any, | ||
| 439 | and a flag indicating whether pyproject.toml is present, indicating | ||
| 440 | that the build should be isolated. | ||
| 441 | |||
| 442 | Build requirements can be specified in a pyproject.toml, as described | ||
| 443 | in PEP 518. If this file exists but doesn't specify build | ||
| 444 | requirements, pip will default to installing setuptools and wheel. | ||
| 445 | """ | ||
| 446 | if os.path.isfile(self.pyproject_toml): | ||
| 447 | with open(self.pyproject_toml) as f: | ||
| 448 | pp_toml = pytoml.load(f) | ||
| 449 | build_sys = pp_toml.get('build-system', {}) | ||
| 450 | return (build_sys.get('requires', ['setuptools', 'wheel']), True) | ||
| 451 | return (['setuptools', 'wheel'], False) | ||
| 452 | |||
| 453 | def run_egg_info(self): | ||
| 454 | assert self.source_dir | ||
| 455 | if self.name: | ||
| 456 | logger.debug( | ||
| 457 | 'Running setup.py (path:%s) egg_info for package %s', | ||
| 458 | self.setup_py, self.name, | ||
| 459 | ) | ||
| 460 | else: | ||
| 461 | logger.debug( | ||
| 462 | 'Running setup.py (path:%s) egg_info for package from %s', | ||
| 463 | self.setup_py, self.link, | ||
| 464 | ) | ||
| 465 | |||
| 466 | with indent_log(): | ||
| 467 | script = SETUPTOOLS_SHIM % self.setup_py | ||
| 468 | base_cmd = [sys.executable, '-c', script] | ||
| 469 | if self.isolated: | ||
| 470 | base_cmd += ["--no-user-cfg"] | ||
| 471 | egg_info_cmd = base_cmd + ['egg_info'] | ||
| 472 | # We can't put the .egg-info files at the root, because then the | ||
| 473 | # source code will be mistaken for an installed egg, causing | ||
| 474 | # problems | ||
| 475 | if self.editable: | ||
| 476 | egg_base_option = [] | ||
| 477 | else: | ||
| 478 | egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') | ||
| 479 | ensure_dir(egg_info_dir) | ||
| 480 | egg_base_option = ['--egg-base', 'pip-egg-info'] | ||
| 481 | with self.build_env: | ||
| 482 | call_subprocess( | ||
| 483 | egg_info_cmd + egg_base_option, | ||
| 484 | cwd=self.setup_py_dir, | ||
| 485 | show_stdout=False, | ||
| 486 | command_desc='python setup.py egg_info') | ||
| 487 | |||
| 488 | if not self.req: | ||
| 489 | if isinstance(parse_version(self.pkg_info()["Version"]), Version): | ||
| 490 | op = "==" | ||
| 491 | else: | ||
| 492 | op = "===" | ||
| 493 | self.req = Requirement( | ||
| 494 | "".join([ | ||
| 495 | self.pkg_info()["Name"], | ||
| 496 | op, | ||
| 497 | self.pkg_info()["Version"], | ||
| 498 | ]) | ||
| 499 | ) | ||
| 500 | self._correct_build_location() | ||
| 501 | else: | ||
| 502 | metadata_name = canonicalize_name(self.pkg_info()["Name"]) | ||
| 503 | if canonicalize_name(self.req.name) != metadata_name: | ||
| 504 | logger.warning( | ||
| 505 | 'Running setup.py (path:%s) egg_info for package %s ' | ||
| 506 | 'produced metadata for project name %s. Fix your ' | ||
| 507 | '#egg=%s fragments.', | ||
| 508 | self.setup_py, self.name, metadata_name, self.name | ||
| 509 | ) | ||
| 510 | self.req = Requirement(metadata_name) | ||
| 511 | |||
| 512 | def egg_info_data(self, filename): | ||
| 513 | if self.satisfied_by is not None: | ||
| 514 | if not self.satisfied_by.has_metadata(filename): | ||
| 515 | return None | ||
| 516 | return self.satisfied_by.get_metadata(filename) | ||
| 517 | assert self.source_dir | ||
| 518 | filename = self.egg_info_path(filename) | ||
| 519 | if not os.path.exists(filename): | ||
| 520 | return None | ||
| 521 | data = read_text_file(filename) | ||
| 522 | return data | ||
| 523 | |||
| 524 | def egg_info_path(self, filename): | ||
| 525 | if self._egg_info_path is None: | ||
| 526 | if self.editable: | ||
| 527 | base = self.source_dir | ||
| 528 | else: | ||
| 529 | base = os.path.join(self.setup_py_dir, 'pip-egg-info') | ||
| 530 | filenames = os.listdir(base) | ||
| 531 | if self.editable: | ||
| 532 | filenames = [] | ||
| 533 | for root, dirs, files in os.walk(base): | ||
| 534 | for dir in vcs.dirnames: | ||
| 535 | if dir in dirs: | ||
| 536 | dirs.remove(dir) | ||
| 537 | # Iterate over a copy of ``dirs``, since mutating | ||
| 538 | # a list while iterating over it can cause trouble. | ||
| 539 | # (See https://github.com/pypa/pip/pull/462.) | ||
| 540 | for dir in list(dirs): | ||
| 541 | # Don't search in anything that looks like a virtualenv | ||
| 542 | # environment | ||
| 543 | if ( | ||
| 544 | os.path.lexists( | ||
| 545 | os.path.join(root, dir, 'bin', 'python') | ||
| 546 | ) or | ||
| 547 | os.path.exists( | ||
| 548 | os.path.join( | ||
| 549 | root, dir, 'Scripts', 'Python.exe' | ||
| 550 | ) | ||
| 551 | )): | ||
| 552 | dirs.remove(dir) | ||
| 553 | # Also don't search through tests | ||
| 554 | elif dir == 'test' or dir == 'tests': | ||
| 555 | dirs.remove(dir) | ||
| 556 | filenames.extend([os.path.join(root, dir) | ||
| 557 | for dir in dirs]) | ||
| 558 | filenames = [f for f in filenames if f.endswith('.egg-info')] | ||
| 559 | |||
| 560 | if not filenames: | ||
| 561 | raise InstallationError( | ||
| 562 | 'No files/directories in %s (from %s)' % (base, filename) | ||
| 563 | ) | ||
| 564 | assert filenames, \ | ||
| 565 | "No files/directories in %s (from %s)" % (base, filename) | ||
| 566 | |||
| 567 | # if we have more than one match, we pick the toplevel one. This | ||
| 568 | # can easily be the case if there is a dist folder which contains | ||
| 569 | # an extracted tarball for testing purposes. | ||
| 570 | if len(filenames) > 1: | ||
| 571 | filenames.sort( | ||
| 572 | key=lambda x: x.count(os.path.sep) + | ||
| 573 | (os.path.altsep and x.count(os.path.altsep) or 0) | ||
| 574 | ) | ||
| 575 | self._egg_info_path = os.path.join(base, filenames[0]) | ||
| 576 | return os.path.join(self._egg_info_path, filename) | ||
| 577 | |||
| 578 | def pkg_info(self): | ||
| 579 | p = FeedParser() | ||
| 580 | data = self.egg_info_data('PKG-INFO') | ||
| 581 | if not data: | ||
| 582 | logger.warning( | ||
| 583 | 'No PKG-INFO file found in %s', | ||
| 584 | display_path(self.egg_info_path('PKG-INFO')), | ||
| 585 | ) | ||
| 586 | p.feed(data or '') | ||
| 587 | return p.close() | ||
| 588 | |||
| 589 | _requirements_section_re = re.compile(r'\[(.*?)\]') | ||
| 590 | |||
| 591 | @property | ||
| 592 | def installed_version(self): | ||
| 593 | return get_installed_version(self.name) | ||
| 594 | |||
| 595 | def assert_source_matches_version(self): | ||
| 596 | assert self.source_dir | ||
| 597 | version = self.pkg_info()['version'] | ||
| 598 | if self.req.specifier and version not in self.req.specifier: | ||
| 599 | logger.warning( | ||
| 600 | 'Requested %s, but installing version %s', | ||
| 601 | self, | ||
| 602 | version, | ||
| 603 | ) | ||
| 604 | else: | ||
| 605 | logger.debug( | ||
| 606 | 'Source in %s has version %s, which satisfies requirement %s', | ||
| 607 | display_path(self.source_dir), | ||
| 608 | version, | ||
| 609 | self, | ||
| 610 | ) | ||
| 611 | |||
| 612 | def update_editable(self, obtain=True): | ||
| 613 | if not self.link: | ||
| 614 | logger.debug( | ||
| 615 | "Cannot update repository at %s; repository location is " | ||
| 616 | "unknown", | ||
| 617 | self.source_dir, | ||
| 618 | ) | ||
| 619 | return | ||
| 620 | assert self.editable | ||
| 621 | assert self.source_dir | ||
| 622 | if self.link.scheme == 'file': | ||
| 623 | # Static paths don't get updated | ||
| 624 | return | ||
| 625 | assert '+' in self.link.url, "bad url: %r" % self.link.url | ||
| 626 | if not self.update: | ||
| 627 | return | ||
| 628 | vc_type, url = self.link.url.split('+', 1) | ||
| 629 | backend = vcs.get_backend(vc_type) | ||
| 630 | if backend: | ||
| 631 | vcs_backend = backend(self.link.url) | ||
| 632 | if obtain: | ||
| 633 | vcs_backend.obtain(self.source_dir) | ||
| 634 | else: | ||
| 635 | vcs_backend.export(self.source_dir) | ||
| 636 | else: | ||
| 637 | assert 0, ( | ||
| 638 | 'Unexpected version control type (in %s): %s' | ||
| 639 | % (self.link, vc_type)) | ||
| 640 | |||
| 641 | def uninstall(self, auto_confirm=False, verbose=False, | ||
| 642 | use_user_site=False): | ||
| 643 | """ | ||
| 644 | Uninstall the distribution currently satisfying this requirement. | ||
| 645 | |||
| 646 | Prompts before removing or modifying files unless | ||
| 647 | ``auto_confirm`` is True. | ||
| 648 | |||
| 649 | Refuses to delete or modify files outside of ``sys.prefix`` - | ||
| 650 | thus uninstallation within a virtual environment can only | ||
| 651 | modify that virtual environment, even if the virtualenv is | ||
| 652 | linked to global site-packages. | ||
| 653 | |||
| 654 | """ | ||
| 655 | if not self.check_if_exists(use_user_site): | ||
| 656 | logger.warning("Skipping %s as it is not installed.", self.name) | ||
| 657 | return | ||
| 658 | dist = self.satisfied_by or self.conflicts_with | ||
| 659 | |||
| 660 | uninstalled_pathset = UninstallPathSet.from_dist(dist) | ||
| 661 | uninstalled_pathset.remove(auto_confirm, verbose) | ||
| 662 | return uninstalled_pathset | ||
| 663 | |||
| 664 | def archive(self, build_dir): | ||
| 665 | assert self.source_dir | ||
| 666 | create_archive = True | ||
| 667 | archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) | ||
| 668 | archive_path = os.path.join(build_dir, archive_name) | ||
| 669 | if os.path.exists(archive_path): | ||
| 670 | response = ask_path_exists( | ||
| 671 | 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % | ||
| 672 | display_path(archive_path), ('i', 'w', 'b', 'a')) | ||
| 673 | if response == 'i': | ||
| 674 | create_archive = False | ||
| 675 | elif response == 'w': | ||
| 676 | logger.warning('Deleting %s', display_path(archive_path)) | ||
| 677 | os.remove(archive_path) | ||
| 678 | elif response == 'b': | ||
| 679 | dest_file = backup_dir(archive_path) | ||
| 680 | logger.warning( | ||
| 681 | 'Backing up %s to %s', | ||
| 682 | display_path(archive_path), | ||
| 683 | display_path(dest_file), | ||
| 684 | ) | ||
| 685 | shutil.move(archive_path, dest_file) | ||
| 686 | elif response == 'a': | ||
| 687 | sys.exit(-1) | ||
| 688 | if create_archive: | ||
| 689 | zip = zipfile.ZipFile( | ||
| 690 | archive_path, 'w', zipfile.ZIP_DEFLATED, | ||
| 691 | allowZip64=True | ||
| 692 | ) | ||
| 693 | dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) | ||
| 694 | for dirpath, dirnames, filenames in os.walk(dir): | ||
| 695 | if 'pip-egg-info' in dirnames: | ||
| 696 | dirnames.remove('pip-egg-info') | ||
| 697 | for dirname in dirnames: | ||
| 698 | dirname = os.path.join(dirpath, dirname) | ||
| 699 | name = self._clean_zip_name(dirname, dir) | ||
| 700 | zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') | ||
| 701 | zipdir.external_attr = 0x1ED << 16 # 0o755 | ||
| 702 | zip.writestr(zipdir, '') | ||
| 703 | for filename in filenames: | ||
| 704 | if filename == PIP_DELETE_MARKER_FILENAME: | ||
| 705 | continue | ||
| 706 | filename = os.path.join(dirpath, filename) | ||
| 707 | name = self._clean_zip_name(filename, dir) | ||
| 708 | zip.write(filename, self.name + '/' + name) | ||
| 709 | zip.close() | ||
| 710 | logger.info('Saved %s', display_path(archive_path)) | ||
| 711 | |||
| 712 | def _clean_zip_name(self, name, prefix): | ||
| 713 | assert name.startswith(prefix + os.path.sep), ( | ||
| 714 | "name %r doesn't start with prefix %r" % (name, prefix) | ||
| 715 | ) | ||
| 716 | name = name[len(prefix) + 1:] | ||
| 717 | name = name.replace(os.path.sep, '/') | ||
| 718 | return name | ||
| 719 | |||
| 720 | def match_markers(self, extras_requested=None): | ||
| 721 | if not extras_requested: | ||
| 722 | # Provide an extra to safely evaluate the markers | ||
| 723 | # without matching any extra | ||
| 724 | extras_requested = ('',) | ||
| 725 | if self.markers is not None: | ||
| 726 | return any( | ||
| 727 | self.markers.evaluate({'extra': extra}) | ||
| 728 | for extra in extras_requested) | ||
| 729 | else: | ||
| 730 | return True | ||
| 731 | |||
| 732 | def install(self, install_options, global_options=None, root=None, | ||
| 733 | home=None, prefix=None, warn_script_location=True, | ||
| 734 | use_user_site=False, pycompile=True): | ||
| 735 | global_options = global_options if global_options is not None else [] | ||
| 736 | if self.editable: | ||
| 737 | self.install_editable( | ||
| 738 | install_options, global_options, prefix=prefix, | ||
| 739 | ) | ||
| 740 | return | ||
| 741 | if self.is_wheel: | ||
| 742 | version = wheel.wheel_version(self.source_dir) | ||
| 743 | wheel.check_compatibility(version, self.name) | ||
| 744 | |||
| 745 | self.move_wheel_files( | ||
| 746 | self.source_dir, root=root, prefix=prefix, home=home, | ||
| 747 | warn_script_location=warn_script_location, | ||
| 748 | use_user_site=use_user_site, pycompile=pycompile, | ||
| 749 | ) | ||
| 750 | self.install_succeeded = True | ||
| 751 | return | ||
| 752 | |||
| 753 | # Extend the list of global and install options passed on to | ||
| 754 | # the setup.py call with the ones from the requirements file. | ||
| 755 | # Options specified in requirements file override those | ||
| 756 | # specified on the command line, since the last option given | ||
| 757 | # to setup.py is the one that is used. | ||
| 758 | global_options = list(global_options) + \ | ||
| 759 | self.options.get('global_options', []) | ||
| 760 | install_options = list(install_options) + \ | ||
| 761 | self.options.get('install_options', []) | ||
| 762 | |||
| 763 | if self.isolated: | ||
| 764 | global_options = global_options + ["--no-user-cfg"] | ||
| 765 | |||
| 766 | with TempDirectory(kind="record") as temp_dir: | ||
| 767 | record_filename = os.path.join(temp_dir.path, 'install-record.txt') | ||
| 768 | install_args = self.get_install_args( | ||
| 769 | global_options, record_filename, root, prefix, pycompile, | ||
| 770 | ) | ||
| 771 | msg = 'Running setup.py install for %s' % (self.name,) | ||
| 772 | with open_spinner(msg) as spinner: | ||
| 773 | with indent_log(): | ||
| 774 | with self.build_env: | ||
| 775 | call_subprocess( | ||
| 776 | install_args + install_options, | ||
| 777 | cwd=self.setup_py_dir, | ||
| 778 | show_stdout=False, | ||
| 779 | spinner=spinner, | ||
| 780 | ) | ||
| 781 | |||
| 782 | if not os.path.exists(record_filename): | ||
| 783 | logger.debug('Record file %s not found', record_filename) | ||
| 784 | return | ||
| 785 | self.install_succeeded = True | ||
| 786 | |||
| 787 | def prepend_root(path): | ||
| 788 | if root is None or not os.path.isabs(path): | ||
| 789 | return path | ||
| 790 | else: | ||
| 791 | return change_root(root, path) | ||
| 792 | |||
| 793 | with open(record_filename) as f: | ||
| 794 | for line in f: | ||
| 795 | directory = os.path.dirname(line) | ||
| 796 | if directory.endswith('.egg-info'): | ||
| 797 | egg_info_dir = prepend_root(directory) | ||
| 798 | break | ||
| 799 | else: | ||
| 800 | logger.warning( | ||
| 801 | 'Could not find .egg-info directory in install record' | ||
| 802 | ' for %s', | ||
| 803 | self, | ||
| 804 | ) | ||
| 805 | # FIXME: put the record somewhere | ||
| 806 | # FIXME: should this be an error? | ||
| 807 | return | ||
| 808 | new_lines = [] | ||
| 809 | with open(record_filename) as f: | ||
| 810 | for line in f: | ||
| 811 | filename = line.strip() | ||
| 812 | if os.path.isdir(filename): | ||
| 813 | filename += os.path.sep | ||
| 814 | new_lines.append( | ||
| 815 | os.path.relpath(prepend_root(filename), egg_info_dir) | ||
| 816 | ) | ||
| 817 | new_lines.sort() | ||
| 818 | ensure_dir(egg_info_dir) | ||
| 819 | inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') | ||
| 820 | with open(inst_files_path, 'w') as f: | ||
| 821 | f.write('\n'.join(new_lines) + '\n') | ||
| 822 | |||
| 823 | def ensure_has_source_dir(self, parent_dir): | ||
| 824 | """Ensure that a source_dir is set. | ||
| 825 | |||
| 826 | This will create a temporary build dir if the name of the requirement | ||
| 827 | isn't known yet. | ||
| 828 | |||
| 829 | :param parent_dir: The ideal pip parent_dir for the source_dir. | ||
| 830 | Generally src_dir for editables and build_dir for sdists. | ||
| 831 | :return: self.source_dir | ||
| 832 | """ | ||
| 833 | if self.source_dir is None: | ||
| 834 | self.source_dir = self.build_location(parent_dir) | ||
| 835 | return self.source_dir | ||
| 836 | |||
| 837 | def get_install_args(self, global_options, record_filename, root, prefix, | ||
| 838 | pycompile): | ||
| 839 | install_args = [sys.executable, "-u"] | ||
| 840 | install_args.append('-c') | ||
| 841 | install_args.append(SETUPTOOLS_SHIM % self.setup_py) | ||
| 842 | install_args += list(global_options) + \ | ||
| 843 | ['install', '--record', record_filename] | ||
| 844 | install_args += ['--single-version-externally-managed'] | ||
| 845 | |||
| 846 | if root is not None: | ||
| 847 | install_args += ['--root', root] | ||
| 848 | if prefix is not None: | ||
| 849 | install_args += ['--prefix', prefix] | ||
| 850 | |||
| 851 | if pycompile: | ||
| 852 | install_args += ["--compile"] | ||
| 853 | else: | ||
| 854 | install_args += ["--no-compile"] | ||
| 855 | |||
| 856 | if running_under_virtualenv(): | ||
| 857 | py_ver_str = 'python' + sysconfig.get_python_version() | ||
| 858 | install_args += ['--install-headers', | ||
| 859 | os.path.join(sys.prefix, 'include', 'site', | ||
| 860 | py_ver_str, self.name)] | ||
| 861 | |||
| 862 | return install_args | ||
| 863 | |||
| 864 | def remove_temporary_source(self): | ||
| 865 | """Remove the source files from this requirement, if they are marked | ||
| 866 | for deletion""" | ||
| 867 | if self.source_dir and os.path.exists( | ||
| 868 | os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): | ||
| 869 | logger.debug('Removing source in %s', self.source_dir) | ||
| 870 | rmtree(self.source_dir) | ||
| 871 | self.source_dir = None | ||
| 872 | self._temp_build_dir.cleanup() | ||
| 873 | self.build_env.cleanup() | ||
| 874 | |||
| 875 | def install_editable(self, install_options, | ||
| 876 | global_options=(), prefix=None): | ||
| 877 | logger.info('Running setup.py develop for %s', self.name) | ||
| 878 | |||
| 879 | if self.isolated: | ||
| 880 | global_options = list(global_options) + ["--no-user-cfg"] | ||
| 881 | |||
| 882 | if prefix: | ||
| 883 | prefix_param = ['--prefix={}'.format(prefix)] | ||
| 884 | install_options = list(install_options) + prefix_param | ||
| 885 | |||
| 886 | with indent_log(): | ||
| 887 | # FIXME: should we do --install-headers here too? | ||
| 888 | with self.build_env: | ||
| 889 | call_subprocess( | ||
| 890 | [ | ||
| 891 | sys.executable, | ||
| 892 | '-c', | ||
| 893 | SETUPTOOLS_SHIM % self.setup_py | ||
| 894 | ] + | ||
| 895 | list(global_options) + | ||
| 896 | ['develop', '--no-deps'] + | ||
| 897 | list(install_options), | ||
| 898 | |||
| 899 | cwd=self.setup_py_dir, | ||
| 900 | show_stdout=False, | ||
| 901 | ) | ||
| 902 | |||
| 903 | self.install_succeeded = True | ||
| 904 | |||
| 905 | def check_if_exists(self, use_user_site): | ||
| 906 | """Find an installed distribution that satisfies or conflicts | ||
| 907 | with this requirement, and set self.satisfied_by or | ||
| 908 | self.conflicts_with appropriately. | ||
| 909 | """ | ||
| 910 | if self.req is None: | ||
| 911 | return False | ||
| 912 | try: | ||
| 913 | # get_distribution() will resolve the entire list of requirements | ||
| 914 | # anyway, and we've already determined that we need the requirement | ||
| 915 | # in question, so strip the marker so that we don't try to | ||
| 916 | # evaluate it. | ||
| 917 | no_marker = Requirement(str(self.req)) | ||
| 918 | no_marker.marker = None | ||
| 919 | self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) | ||
| 920 | if self.editable and self.satisfied_by: | ||
| 921 | self.conflicts_with = self.satisfied_by | ||
| 922 | # when installing editables, nothing pre-existing should ever | ||
| 923 | # satisfy | ||
| 924 | self.satisfied_by = None | ||
| 925 | return True | ||
| 926 | except pkg_resources.DistributionNotFound: | ||
| 927 | return False | ||
| 928 | except pkg_resources.VersionConflict: | ||
| 929 | existing_dist = pkg_resources.get_distribution( | ||
| 930 | self.req.name | ||
| 931 | ) | ||
| 932 | if use_user_site: | ||
| 933 | if dist_in_usersite(existing_dist): | ||
| 934 | self.conflicts_with = existing_dist | ||
| 935 | elif (running_under_virtualenv() and | ||
| 936 | dist_in_site_packages(existing_dist)): | ||
| 937 | raise InstallationError( | ||
| 938 | "Will not install to the user site because it will " | ||
| 939 | "lack sys.path precedence to %s in %s" % | ||
| 940 | (existing_dist.project_name, existing_dist.location) | ||
| 941 | ) | ||
| 942 | else: | ||
| 943 | self.conflicts_with = existing_dist | ||
| 944 | return True | ||
| 945 | |||
| 946 | @property | ||
| 947 | def is_wheel(self): | ||
| 948 | return self.link and self.link.is_wheel | ||
| 949 | |||
| 950 | def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, | ||
| 951 | warn_script_location=True, use_user_site=False, | ||
| 952 | pycompile=True): | ||
| 953 | move_wheel_files( | ||
| 954 | self.name, self.req, wheeldir, | ||
| 955 | user=use_user_site, | ||
| 956 | home=home, | ||
| 957 | root=root, | ||
| 958 | prefix=prefix, | ||
| 959 | pycompile=pycompile, | ||
| 960 | isolated=self.isolated, | ||
| 961 | warn_script_location=warn_script_location, | ||
| 962 | ) | ||
| 963 | |||
| 964 | def get_dist(self): | ||
| 965 | """Return a pkg_resources.Distribution built from self.egg_info_path""" | ||
| 966 | egg_info = self.egg_info_path('').rstrip(os.path.sep) | ||
| 967 | base_dir = os.path.dirname(egg_info) | ||
| 968 | metadata = pkg_resources.PathMetadata(base_dir, egg_info) | ||
| 969 | dist_name = os.path.splitext(os.path.basename(egg_info))[0] | ||
| 970 | return pkg_resources.Distribution( | ||
| 971 | os.path.dirname(egg_info), | ||
| 972 | project_name=dist_name, | ||
| 973 | metadata=metadata, | ||
| 974 | ) | ||
| 975 | |||
| 976 | @property | ||
| 977 | def has_hash_options(self): | ||
| 978 | """Return whether any known-good hashes are specified as options. | ||
| 979 | |||
| 980 | These activate --require-hashes mode; hashes specified as part of a | ||
| 981 | URL do not. | ||
| 982 | |||
| 983 | """ | ||
| 984 | return bool(self.options.get('hashes', {})) | ||
| 985 | |||
| 986 | def hashes(self, trust_internet=True): | ||
| 987 | """Return a hash-comparer that considers my option- and URL-based | ||
| 988 | hashes to be known-good. | ||
| 989 | |||
| 990 | Hashes in URLs--ones embedded in the requirements file, not ones | ||
| 991 | downloaded from an index server--are almost peers with ones from | ||
| 992 | flags. They satisfy --require-hashes (whether it was implicitly or | ||
| 993 | explicitly activated) but do not activate it. md5 and sha224 are not | ||
| 994 | allowed in flags, which should nudge people toward good algos. We | ||
| 995 | always OR all hashes together, even ones from URLs. | ||
| 996 | |||
| 997 | :param trust_internet: Whether to trust URL-based (#md5=...) hashes | ||
| 998 | downloaded from the internet, as by populate_link() | ||
| 999 | |||
| 1000 | """ | ||
| 1001 | good_hashes = self.options.get('hashes', {}).copy() | ||
| 1002 | link = self.link if trust_internet else self.original_link | ||
| 1003 | if link and link.hash: | ||
| 1004 | good_hashes.setdefault(link.hash_name, []).append(link.hash) | ||
| 1005 | return Hashes(good_hashes) | ||
| 1006 | |||
| 1007 | |||
| 1008 | def _strip_postfix(req): | ||
| 1009 | """ | ||
| 1010 | Strip req postfix ( -dev, 0.2, etc ) | ||
| 1011 | """ | ||
| 1012 | # FIXME: use package_to_requirement? | ||
| 1013 | match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) | ||
| 1014 | if match: | ||
| 1015 | # Strip off -dev, -0.2, etc. | ||
| 1016 | warnings.warn( | ||
| 1017 | "#egg cleanup for editable urls will be dropped in the future", | ||
| 1018 | RemovedInPip11Warning, | ||
| 1019 | ) | ||
| 1020 | req = match.group(1) | ||
| 1021 | return req | ||
| 1022 | |||
| 1023 | |||
| 1024 | def parse_editable(editable_req): | ||
| 1025 | """Parses an editable requirement into: | ||
| 1026 | - a requirement name | ||
| 1027 | - an URL | ||
| 1028 | - extras | ||
| 1029 | - editable options | ||
| 1030 | Accepted requirements: | ||
| 1031 | svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir | ||
| 1032 | .[some_extra] | ||
| 1033 | """ | ||
| 1034 | |||
| 1035 | from pip._internal.index import Link | ||
| 1036 | |||
| 1037 | url = editable_req | ||
| 1038 | |||
| 1039 | # If a file path is specified with extras, strip off the extras. | ||
| 1040 | url_no_extras, extras = _strip_extras(url) | ||
| 1041 | |||
| 1042 | if os.path.isdir(url_no_extras): | ||
| 1043 | if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): | ||
| 1044 | raise InstallationError( | ||
| 1045 | "Directory %r is not installable. File 'setup.py' not found." % | ||
| 1046 | url_no_extras | ||
| 1047 | ) | ||
| 1048 | # Treating it as code that has already been checked out | ||
| 1049 | url_no_extras = path_to_url(url_no_extras) | ||
| 1050 | |||
| 1051 | if url_no_extras.lower().startswith('file:'): | ||
| 1052 | package_name = Link(url_no_extras).egg_fragment | ||
| 1053 | if extras: | ||
| 1054 | return ( | ||
| 1055 | package_name, | ||
| 1056 | url_no_extras, | ||
| 1057 | Requirement("placeholder" + extras.lower()).extras, | ||
| 1058 | ) | ||
| 1059 | else: | ||
| 1060 | return package_name, url_no_extras, None | ||
| 1061 | |||
| 1062 | for version_control in vcs: | ||
| 1063 | if url.lower().startswith('%s:' % version_control): | ||
| 1064 | url = '%s+%s' % (version_control, url) | ||
| 1065 | break | ||
| 1066 | |||
| 1067 | if '+' not in url: | ||
| 1068 | raise InstallationError( | ||
| 1069 | '%s should either be a path to a local project or a VCS url ' | ||
| 1070 | 'beginning with svn+, git+, hg+, or bzr+' % | ||
| 1071 | editable_req | ||
| 1072 | ) | ||
| 1073 | |||
| 1074 | vc_type = url.split('+', 1)[0].lower() | ||
| 1075 | |||
| 1076 | if not vcs.get_backend(vc_type): | ||
| 1077 | error_message = 'For --editable=%s only ' % editable_req + \ | ||
| 1078 | ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ | ||
| 1079 | ' is currently supported' | ||
| 1080 | raise InstallationError(error_message) | ||
| 1081 | |||
| 1082 | package_name = Link(url).egg_fragment | ||
| 1083 | if not package_name: | ||
| 1084 | raise InstallationError( | ||
| 1085 | "Could not detect requirement name for '%s', please specify one " | ||
| 1086 | "with #egg=your_package_name" % editable_req | ||
| 1087 | ) | ||
| 1088 | return _strip_postfix(package_name), url, None | ||
| 1089 | |||
| 1090 | |||
| 1091 | def deduce_helpful_msg(req): | ||
| 1092 | """Returns helpful msg in case requirements file does not exist, | ||
| 1093 | or cannot be parsed. | ||
| 1094 | |||
| 1095 | :params req: Requirements file path | ||
| 1096 | """ | ||
| 1097 | msg = "" | ||
| 1098 | if os.path.exists(req): | ||
| 1099 | msg = " It does exist." | ||
| 1100 | # Try to parse and check if it is a requirements file. | ||
| 1101 | try: | ||
| 1102 | with open(req, 'r') as fp: | ||
| 1103 | # parse first line only | ||
| 1104 | next(parse_requirements(fp.read())) | ||
| 1105 | msg += " The argument you provided " + \ | ||
| 1106 | "(%s) appears to be a" % (req) + \ | ||
| 1107 | " requirements file. If that is the" + \ | ||
| 1108 | " case, use the '-r' flag to install" + \ | ||
| 1109 | " the packages specified within it." | ||
| 1110 | except RequirementParseError: | ||
| 1111 | logger.debug("Cannot parse '%s' as requirements \ | ||
| 1112 | file" % (req), exc_info=1) | ||
| 1113 | else: | ||
| 1114 | msg += " File '%s' does not exist." % (req) | ||
| 1115 | return msg | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py deleted file mode 100644 index 78b7d32..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_set.py +++ /dev/null | |||
| @@ -1,164 +0,0 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import logging | ||
| 4 | from collections import OrderedDict | ||
| 5 | |||
| 6 | from pip._internal.exceptions import InstallationError | ||
| 7 | from pip._internal.utils.logging import indent_log | ||
| 8 | from pip._internal.wheel import Wheel | ||
| 9 | |||
| 10 | logger = logging.getLogger(__name__) | ||
| 11 | |||
| 12 | |||
| 13 | class RequirementSet(object): | ||
| 14 | |||
| 15 | def __init__(self, require_hashes=False): | ||
| 16 | """Create a RequirementSet. | ||
| 17 | |||
| 18 | :param wheel_cache: The pip wheel cache, for passing to | ||
| 19 | InstallRequirement. | ||
| 20 | """ | ||
| 21 | |||
| 22 | self.requirements = OrderedDict() | ||
| 23 | self.require_hashes = require_hashes | ||
| 24 | |||
| 25 | # Mapping of alias: real_name | ||
| 26 | self.requirement_aliases = {} | ||
| 27 | self.unnamed_requirements = [] | ||
| 28 | self.successfully_downloaded = [] | ||
| 29 | self.reqs_to_cleanup = [] | ||
| 30 | |||
| 31 | def __str__(self): | ||
| 32 | reqs = [req for req in self.requirements.values() | ||
| 33 | if not req.comes_from] | ||
| 34 | reqs.sort(key=lambda req: req.name.lower()) | ||
| 35 | return ' '.join([str(req.req) for req in reqs]) | ||
| 36 | |||
| 37 | def __repr__(self): | ||
| 38 | reqs = [req for req in self.requirements.values()] | ||
| 39 | reqs.sort(key=lambda req: req.name.lower()) | ||
| 40 | reqs_str = ', '.join([str(req.req) for req in reqs]) | ||
| 41 | return ('<%s object; %d requirement(s): %s>' | ||
| 42 | % (self.__class__.__name__, len(reqs), reqs_str)) | ||
| 43 | |||
| 44 | def add_requirement(self, install_req, parent_req_name=None, | ||
| 45 | extras_requested=None): | ||
| 46 | """Add install_req as a requirement to install. | ||
| 47 | |||
| 48 | :param parent_req_name: The name of the requirement that needed this | ||
| 49 | added. The name is used because when multiple unnamed requirements | ||
| 50 | resolve to the same name, we could otherwise end up with dependency | ||
| 51 | links that point outside the Requirements set. parent_req must | ||
| 52 | already be added. Note that None implies that this is a user | ||
| 53 | supplied requirement, vs an inferred one. | ||
| 54 | :param extras_requested: an iterable of extras used to evaluate the | ||
| 55 | environment markers. | ||
| 56 | :return: Additional requirements to scan. That is either [] if | ||
| 57 | the requirement is not applicable, or [install_req] if the | ||
| 58 | requirement is applicable and has just been added. | ||
| 59 | """ | ||
| 60 | name = install_req.name | ||
| 61 | if not install_req.match_markers(extras_requested): | ||
| 62 | logger.info("Ignoring %s: markers '%s' don't match your " | ||
| 63 | "environment", install_req.name, | ||
| 64 | install_req.markers) | ||
| 65 | return [], None | ||
| 66 | |||
| 67 | # This check has to come after we filter requirements with the | ||
| 68 | # environment markers. | ||
| 69 | if install_req.link and install_req.link.is_wheel: | ||
| 70 | wheel = Wheel(install_req.link.filename) | ||
| 71 | if not wheel.supported(): | ||
| 72 | raise InstallationError( | ||
| 73 | "%s is not a supported wheel on this platform." % | ||
| 74 | wheel.filename | ||
| 75 | ) | ||
| 76 | |||
| 77 | # This next bit is really a sanity check. | ||
| 78 | assert install_req.is_direct == (parent_req_name is None), ( | ||
| 79 | "a direct req shouldn't have a parent and also, " | ||
| 80 | "a non direct req should have a parent" | ||
| 81 | ) | ||
| 82 | |||
| 83 | if not name: | ||
| 84 | # url or path requirement w/o an egg fragment | ||
| 85 | self.unnamed_requirements.append(install_req) | ||
| 86 | return [install_req], None | ||
| 87 | else: | ||
| 88 | try: | ||
| 89 | existing_req = self.get_requirement(name) | ||
| 90 | except KeyError: | ||
| 91 | existing_req = None | ||
| 92 | if (parent_req_name is None and existing_req and not | ||
| 93 | existing_req.constraint and | ||
| 94 | existing_req.extras == install_req.extras and not | ||
| 95 | existing_req.req.specifier == install_req.req.specifier): | ||
| 96 | raise InstallationError( | ||
| 97 | 'Double requirement given: %s (already in %s, name=%r)' | ||
| 98 | % (install_req, existing_req, name)) | ||
| 99 | if not existing_req: | ||
| 100 | # Add requirement | ||
| 101 | self.requirements[name] = install_req | ||
| 102 | # FIXME: what about other normalizations? E.g., _ vs. -? | ||
| 103 | if name.lower() != name: | ||
| 104 | self.requirement_aliases[name.lower()] = name | ||
| 105 | result = [install_req] | ||
| 106 | else: | ||
| 107 | # Assume there's no need to scan, and that we've already | ||
| 108 | # encountered this for scanning. | ||
| 109 | result = [] | ||
| 110 | if not install_req.constraint and existing_req.constraint: | ||
| 111 | if (install_req.link and not (existing_req.link and | ||
| 112 | install_req.link.path == existing_req.link.path)): | ||
| 113 | self.reqs_to_cleanup.append(install_req) | ||
| 114 | raise InstallationError( | ||
| 115 | "Could not satisfy constraints for '%s': " | ||
| 116 | "installation from path or url cannot be " | ||
| 117 | "constrained to a version" % name, | ||
| 118 | ) | ||
| 119 | # If we're now installing a constraint, mark the existing | ||
| 120 | # object for real installation. | ||
| 121 | existing_req.constraint = False | ||
| 122 | existing_req.extras = tuple( | ||
| 123 | sorted(set(existing_req.extras).union( | ||
| 124 | set(install_req.extras)))) | ||
| 125 | logger.debug("Setting %s extras to: %s", | ||
| 126 | existing_req, existing_req.extras) | ||
| 127 | # And now we need to scan this. | ||
| 128 | result = [existing_req] | ||
| 129 | # Canonicalise to the already-added object for the backref | ||
| 130 | # check below. | ||
| 131 | install_req = existing_req | ||
| 132 | |||
| 133 | # We return install_req here to allow for the caller to add it to | ||
| 134 | # the dependency information for the parent package. | ||
| 135 | return result, install_req | ||
| 136 | |||
| 137 | def has_requirement(self, project_name): | ||
| 138 | name = project_name.lower() | ||
| 139 | if (name in self.requirements and | ||
| 140 | not self.requirements[name].constraint or | ||
| 141 | name in self.requirement_aliases and | ||
| 142 | not self.requirements[self.requirement_aliases[name]].constraint): | ||
| 143 | return True | ||
| 144 | return False | ||
| 145 | |||
| 146 | @property | ||
| 147 | def has_requirements(self): | ||
| 148 | return list(req for req in self.requirements.values() if not | ||
| 149 | req.constraint) or self.unnamed_requirements | ||
| 150 | |||
| 151 | def get_requirement(self, project_name): | ||
| 152 | for name in project_name, project_name.lower(): | ||
| 153 | if name in self.requirements: | ||
| 154 | return self.requirements[name] | ||
| 155 | if name in self.requirement_aliases: | ||
| 156 | return self.requirements[self.requirement_aliases[name]] | ||
| 157 | raise KeyError("No project with the name %r" % project_name) | ||
| 158 | |||
| 159 | def cleanup_files(self): | ||
| 160 | """Clean up files, remove builds.""" | ||
| 161 | logger.debug('Cleaning up...') | ||
| 162 | with indent_log(): | ||
| 163 | for req in self.reqs_to_cleanup: | ||
| 164 | req.remove_temporary_source() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py deleted file mode 100644 index a47520f..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/req/req_uninstall.py +++ /dev/null | |||
| @@ -1,455 +0,0 @@ | |||
| 1 | from __future__ import absolute_import | ||
| 2 | |||
| 3 | import csv | ||
| 4 | import functools | ||
| 5 | import logging | ||
| 6 | import os | ||
| 7 | import sys | ||
| 8 | import sysconfig | ||
| 9 | |||
| 10 | from pip._vendor import pkg_resources | ||
| 11 | |||
| 12 | from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache | ||
| 13 | from pip._internal.exceptions import UninstallationError | ||
| 14 | from pip._internal.locations import bin_py, bin_user | ||
| 15 | from pip._internal.utils.logging import indent_log | ||
| 16 | from pip._internal.utils.misc import ( | ||
| 17 | FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, | ||
| 18 | normalize_path, renames, | ||
| 19 | ) | ||
| 20 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 21 | |||
| 22 | logger = logging.getLogger(__name__) | ||
| 23 | |||
| 24 | |||
| 25 | def _script_names(dist, script_name, is_gui): | ||
| 26 | """Create the fully qualified name of the files created by | ||
| 27 | {console,gui}_scripts for the given ``dist``. | ||
| 28 | Returns the list of file names | ||
| 29 | """ | ||
| 30 | if dist_in_usersite(dist): | ||
| 31 | bin_dir = bin_user | ||
| 32 | else: | ||
| 33 | bin_dir = bin_py | ||
| 34 | exe_name = os.path.join(bin_dir, script_name) | ||
| 35 | paths_to_remove = [exe_name] | ||
| 36 | if WINDOWS: | ||
| 37 | paths_to_remove.append(exe_name + '.exe') | ||
| 38 | paths_to_remove.append(exe_name + '.exe.manifest') | ||
| 39 | if is_gui: | ||
| 40 | paths_to_remove.append(exe_name + '-script.pyw') | ||
| 41 | else: | ||
| 42 | paths_to_remove.append(exe_name + '-script.py') | ||
| 43 | return paths_to_remove | ||
| 44 | |||
| 45 | |||
| 46 | def _unique(fn): | ||
| 47 | @functools.wraps(fn) | ||
| 48 | def unique(*args, **kw): | ||
| 49 | seen = set() | ||
| 50 | for item in fn(*args, **kw): | ||
| 51 | if item not in seen: | ||
| 52 | seen.add(item) | ||
| 53 | yield item | ||
| 54 | return unique | ||
| 55 | |||
| 56 | |||
| 57 | @_unique | ||
| 58 | def uninstallation_paths(dist): | ||
| 59 | """ | ||
| 60 | Yield all the uninstallation paths for dist based on RECORD-without-.pyc | ||
| 61 | |||
| 62 | Yield paths to all the files in RECORD. For each .py file in RECORD, add | ||
| 63 | the .pyc in the same directory. | ||
| 64 | |||
| 65 | UninstallPathSet.add() takes care of the __pycache__ .pyc. | ||
| 66 | """ | ||
| 67 | r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) | ||
| 68 | for row in r: | ||
| 69 | path = os.path.join(dist.location, row[0]) | ||
| 70 | yield path | ||
| 71 | if path.endswith('.py'): | ||
| 72 | dn, fn = os.path.split(path) | ||
| 73 | base = fn[:-3] | ||
| 74 | path = os.path.join(dn, base + '.pyc') | ||
| 75 | yield path | ||
| 76 | |||
| 77 | |||
| 78 | def compact(paths): | ||
| 79 | """Compact a path set to contain the minimal number of paths | ||
| 80 | necessary to contain all paths in the set. If /a/path/ and | ||
| 81 | /a/path/to/a/file.txt are both in the set, leave only the | ||
| 82 | shorter path.""" | ||
| 83 | |||
| 84 | sep = os.path.sep | ||
| 85 | short_paths = set() | ||
| 86 | for path in sorted(paths, key=len): | ||
| 87 | should_add = any( | ||
| 88 | path.startswith(shortpath.rstrip("*")) and | ||
| 89 | path[len(shortpath.rstrip("*").rstrip(sep))] == sep | ||
| 90 | for shortpath in short_paths | ||
| 91 | ) | ||
| 92 | if not should_add: | ||
| 93 | short_paths.add(path) | ||
| 94 | return short_paths | ||
| 95 | |||
| 96 | |||
| 97 | def compress_for_output_listing(paths): | ||
| 98 | """Returns a tuple of 2 sets of which paths to display to user | ||
| 99 | |||
| 100 | The first set contains paths that would be deleted. Files of a package | ||
| 101 | are not added and the top-level directory of the package has a '*' added | ||
| 102 | at the end - to signify that all it's contents are removed. | ||
| 103 | |||
| 104 | The second set contains files that would have been skipped in the above | ||
| 105 | folders. | ||
| 106 | """ | ||
| 107 | |||
| 108 | will_remove = list(paths) | ||
| 109 | will_skip = set() | ||
| 110 | |||
| 111 | # Determine folders and files | ||
| 112 | folders = set() | ||
| 113 | files = set() | ||
| 114 | for path in will_remove: | ||
| 115 | if path.endswith(".pyc"): | ||
| 116 | continue | ||
| 117 | if path.endswith("__init__.py") or ".dist-info" in path: | ||
| 118 | folders.add(os.path.dirname(path)) | ||
| 119 | files.add(path) | ||
| 120 | |||
| 121 | folders = compact(folders) | ||
| 122 | |||
| 123 | # This walks the tree using os.walk to not miss extra folders | ||
| 124 | # that might get added. | ||
| 125 | for folder in folders: | ||
| 126 | for dirpath, _, dirfiles in os.walk(folder): | ||
| 127 | for fname in dirfiles: | ||
| 128 | if fname.endswith(".pyc"): | ||
| 129 | continue | ||
| 130 | |||
| 131 | file_ = os.path.normcase(os.path.join(dirpath, fname)) | ||
| 132 | if os.path.isfile(file_) and file_ not in files: | ||
| 133 | # We are skipping this file. Add it to the set. | ||
| 134 | will_skip.add(file_) | ||
| 135 | |||
| 136 | will_remove = files | { | ||
| 137 | os.path.join(folder, "*") for folder in folders | ||
| 138 | } | ||
| 139 | |||
| 140 | return will_remove, will_skip | ||
| 141 | |||
| 142 | |||
| 143 | class UninstallPathSet(object): | ||
| 144 | """A set of file paths to be removed in the uninstallation of a | ||
| 145 | requirement.""" | ||
| 146 | def __init__(self, dist): | ||
| 147 | self.paths = set() | ||
| 148 | self._refuse = set() | ||
| 149 | self.pth = {} | ||
| 150 | self.dist = dist | ||
| 151 | self.save_dir = TempDirectory(kind="uninstall") | ||
| 152 | self._moved_paths = [] | ||
| 153 | |||
| 154 | def _permitted(self, path): | ||
| 155 | """ | ||
| 156 | Return True if the given path is one we are permitted to | ||
| 157 | remove/modify, False otherwise. | ||
| 158 | |||
| 159 | """ | ||
| 160 | return is_local(path) | ||
| 161 | |||
| 162 | def add(self, path): | ||
| 163 | head, tail = os.path.split(path) | ||
| 164 | |||
| 165 | # we normalize the head to resolve parent directory symlinks, but not | ||
| 166 | # the tail, since we only want to uninstall symlinks, not their targets | ||
| 167 | path = os.path.join(normalize_path(head), os.path.normcase(tail)) | ||
| 168 | |||
| 169 | if not os.path.exists(path): | ||
| 170 | return | ||
| 171 | if self._permitted(path): | ||
| 172 | self.paths.add(path) | ||
| 173 | else: | ||
| 174 | self._refuse.add(path) | ||
| 175 | |||
| 176 | # __pycache__ files can show up after 'installed-files.txt' is created, | ||
| 177 | # due to imports | ||
| 178 | if os.path.splitext(path)[1] == '.py' and uses_pycache: | ||
| 179 | self.add(cache_from_source(path)) | ||
| 180 | |||
| 181 | def add_pth(self, pth_file, entry): | ||
| 182 | pth_file = normalize_path(pth_file) | ||
| 183 | if self._permitted(pth_file): | ||
| 184 | if pth_file not in self.pth: | ||
| 185 | self.pth[pth_file] = UninstallPthEntries(pth_file) | ||
| 186 | self.pth[pth_file].add(entry) | ||
| 187 | else: | ||
| 188 | self._refuse.add(pth_file) | ||
| 189 | |||
| 190 | def _stash(self, path): | ||
| 191 | return os.path.join( | ||
| 192 | self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep) | ||
| 193 | ) | ||
| 194 | |||
| 195 | def remove(self, auto_confirm=False, verbose=False): | ||
| 196 | """Remove paths in ``self.paths`` with confirmation (unless | ||
| 197 | ``auto_confirm`` is True).""" | ||
| 198 | |||
| 199 | if not self.paths: | ||
| 200 | logger.info( | ||
| 201 | "Can't uninstall '%s'. No files were found to uninstall.", | ||
| 202 | self.dist.project_name, | ||
| 203 | ) | ||
| 204 | return | ||
| 205 | |||
| 206 | dist_name_version = ( | ||
| 207 | self.dist.project_name + "-" + self.dist.version | ||
| 208 | ) | ||
| 209 | logger.info('Uninstalling %s:', dist_name_version) | ||
| 210 | |||
| 211 | with indent_log(): | ||
| 212 | if auto_confirm or self._allowed_to_proceed(verbose): | ||
| 213 | self.save_dir.create() | ||
| 214 | |||
| 215 | for path in sorted(compact(self.paths)): | ||
| 216 | new_path = self._stash(path) | ||
| 217 | logger.debug('Removing file or directory %s', path) | ||
| 218 | self._moved_paths.append(path) | ||
| 219 | renames(path, new_path) | ||
| 220 | for pth in self.pth.values(): | ||
| 221 | pth.remove() | ||
| 222 | |||
| 223 | logger.info('Successfully uninstalled %s', dist_name_version) | ||
| 224 | |||
| 225 | def _allowed_to_proceed(self, verbose): | ||
| 226 | """Display which files would be deleted and prompt for confirmation | ||
| 227 | """ | ||
| 228 | |||
| 229 | def _display(msg, paths): | ||
| 230 | if not paths: | ||
| 231 | return | ||
| 232 | |||
| 233 | logger.info(msg) | ||
| 234 | with indent_log(): | ||
| 235 | for path in sorted(compact(paths)): | ||
| 236 | logger.info(path) | ||
| 237 | |||
| 238 | if not verbose: | ||
| 239 | will_remove, will_skip = compress_for_output_listing(self.paths) | ||
| 240 | else: | ||
| 241 | # In verbose mode, display all the files that are going to be | ||
| 242 | # deleted. | ||
| 243 | will_remove = list(self.paths) | ||
| 244 | will_skip = set() | ||
| 245 | |||
| 246 | _display('Would remove:', will_remove) | ||
| 247 | _display('Would not remove (might be manually added):', will_skip) | ||
| 248 | _display('Would not remove (outside of prefix):', self._refuse) | ||
| 249 | |||
| 250 | return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' | ||
| 251 | |||
| 252 | def rollback(self): | ||
| 253 | """Rollback the changes previously made by remove().""" | ||
| 254 | if self.save_dir.path is None: | ||
| 255 | logger.error( | ||
| 256 | "Can't roll back %s; was not uninstalled", | ||
| 257 | self.dist.project_name, | ||
| 258 | ) | ||
| 259 | return False | ||
| 260 | logger.info('Rolling back uninstall of %s', self.dist.project_name) | ||
| 261 | for path in self._moved_paths: | ||
| 262 | tmp_path = self._stash(path) | ||
| 263 | logger.debug('Replacing %s', path) | ||
| 264 | renames(tmp_path, path) | ||
| 265 | for pth in self.pth.values(): | ||
| 266 | pth.rollback() | ||
| 267 | |||
| 268 | def commit(self): | ||
| 269 | """Remove temporary save dir: rollback will no longer be possible.""" | ||
| 270 | self.save_dir.cleanup() | ||
| 271 | self._moved_paths = [] | ||
| 272 | |||
| 273 | @classmethod | ||
| 274 | def from_dist(cls, dist): | ||
| 275 | dist_path = normalize_path(dist.location) | ||
| 276 | if not dist_is_local(dist): | ||
| 277 | logger.info( | ||
| 278 | "Not uninstalling %s at %s, outside environment %s", | ||
| 279 | dist.key, | ||
| 280 | dist_path, | ||
| 281 | sys.prefix, | ||
| 282 | ) | ||
| 283 | return cls(dist) | ||
| 284 | |||
| 285 | if dist_path in {p for p in {sysconfig.get_path("stdlib"), | ||
| 286 | sysconfig.get_path("platstdlib")} | ||
| 287 | if p}: | ||
| 288 | logger.info( | ||
| 289 | "Not uninstalling %s at %s, as it is in the standard library.", | ||
| 290 | dist.key, | ||
| 291 | dist_path, | ||
| 292 | ) | ||
| 293 | return cls(dist) | ||
| 294 | |||
| 295 | paths_to_remove = cls(dist) | ||
| 296 | develop_egg_link = egg_link_path(dist) | ||
| 297 | develop_egg_link_egg_info = '{}.egg-info'.format( | ||
| 298 | pkg_resources.to_filename(dist.project_name)) | ||
| 299 | egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) | ||
| 300 | # Special case for distutils installed package | ||
| 301 | distutils_egg_info = getattr(dist._provider, 'path', None) | ||
| 302 | |||
| 303 | # Uninstall cases order do matter as in the case of 2 installs of the | ||
| 304 | # same package, pip needs to uninstall the currently detected version | ||
| 305 | if (egg_info_exists and dist.egg_info.endswith('.egg-info') and | ||
| 306 | not dist.egg_info.endswith(develop_egg_link_egg_info)): | ||
| 307 | # if dist.egg_info.endswith(develop_egg_link_egg_info), we | ||
| 308 | # are in fact in the develop_egg_link case | ||
| 309 | paths_to_remove.add(dist.egg_info) | ||
| 310 | if dist.has_metadata('installed-files.txt'): | ||
| 311 | for installed_file in dist.get_metadata( | ||
| 312 | 'installed-files.txt').splitlines(): | ||
| 313 | path = os.path.normpath( | ||
| 314 | os.path.join(dist.egg_info, installed_file) | ||
| 315 | ) | ||
| 316 | paths_to_remove.add(path) | ||
| 317 | # FIXME: need a test for this elif block | ||
| 318 | # occurs with --single-version-externally-managed/--record outside | ||
| 319 | # of pip | ||
| 320 | elif dist.has_metadata('top_level.txt'): | ||
| 321 | if dist.has_metadata('namespace_packages.txt'): | ||
| 322 | namespaces = dist.get_metadata('namespace_packages.txt') | ||
| 323 | else: | ||
| 324 | namespaces = [] | ||
| 325 | for top_level_pkg in [ | ||
| 326 | p for p | ||
| 327 | in dist.get_metadata('top_level.txt').splitlines() | ||
| 328 | if p and p not in namespaces]: | ||
| 329 | path = os.path.join(dist.location, top_level_pkg) | ||
| 330 | paths_to_remove.add(path) | ||
| 331 | paths_to_remove.add(path + '.py') | ||
| 332 | paths_to_remove.add(path + '.pyc') | ||
| 333 | paths_to_remove.add(path + '.pyo') | ||
| 334 | |||
| 335 | elif distutils_egg_info: | ||
| 336 | raise UninstallationError( | ||
| 337 | "Cannot uninstall {!r}. It is a distutils installed project " | ||
| 338 | "and thus we cannot accurately determine which files belong " | ||
| 339 | "to it which would lead to only a partial uninstall.".format( | ||
| 340 | dist.project_name, | ||
| 341 | ) | ||
| 342 | ) | ||
| 343 | |||
| 344 | elif dist.location.endswith('.egg'): | ||
| 345 | # package installed by easy_install | ||
| 346 | # We cannot match on dist.egg_name because it can slightly vary | ||
| 347 | # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg | ||
| 348 | paths_to_remove.add(dist.location) | ||
| 349 | easy_install_egg = os.path.split(dist.location)[1] | ||
| 350 | easy_install_pth = os.path.join(os.path.dirname(dist.location), | ||
| 351 | 'easy-install.pth') | ||
| 352 | paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) | ||
| 353 | |||
| 354 | elif egg_info_exists and dist.egg_info.endswith('.dist-info'): | ||
| 355 | for path in uninstallation_paths(dist): | ||
| 356 | paths_to_remove.add(path) | ||
| 357 | |||
| 358 | elif develop_egg_link: | ||
| 359 | # develop egg | ||
| 360 | with open(develop_egg_link, 'r') as fh: | ||
| 361 | link_pointer = os.path.normcase(fh.readline().strip()) | ||
| 362 | assert (link_pointer == dist.location), ( | ||
| 363 | 'Egg-link %s does not match installed location of %s ' | ||
| 364 | '(at %s)' % (link_pointer, dist.project_name, dist.location) | ||
| 365 | ) | ||
| 366 | paths_to_remove.add(develop_egg_link) | ||
| 367 | easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), | ||
| 368 | 'easy-install.pth') | ||
| 369 | paths_to_remove.add_pth(easy_install_pth, dist.location) | ||
| 370 | |||
| 371 | else: | ||
| 372 | logger.debug( | ||
| 373 | 'Not sure how to uninstall: %s - Check: %s', | ||
| 374 | dist, dist.location, | ||
| 375 | ) | ||
| 376 | |||
| 377 | # find distutils scripts= scripts | ||
| 378 | if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): | ||
| 379 | for script in dist.metadata_listdir('scripts'): | ||
| 380 | if dist_in_usersite(dist): | ||
| 381 | bin_dir = bin_user | ||
| 382 | else: | ||
| 383 | bin_dir = bin_py | ||
| 384 | paths_to_remove.add(os.path.join(bin_dir, script)) | ||
| 385 | if WINDOWS: | ||
| 386 | paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') | ||
| 387 | |||
| 388 | # find console_scripts | ||
| 389 | _scripts_to_remove = [] | ||
| 390 | console_scripts = dist.get_entry_map(group='console_scripts') | ||
| 391 | for name in console_scripts.keys(): | ||
| 392 | _scripts_to_remove.extend(_script_names(dist, name, False)) | ||
| 393 | # find gui_scripts | ||
| 394 | gui_scripts = dist.get_entry_map(group='gui_scripts') | ||
| 395 | for name in gui_scripts.keys(): | ||
| 396 | _scripts_to_remove.extend(_script_names(dist, name, True)) | ||
| 397 | |||
| 398 | for s in _scripts_to_remove: | ||
| 399 | paths_to_remove.add(s) | ||
| 400 | |||
| 401 | return paths_to_remove | ||
| 402 | |||
| 403 | |||
| 404 | class UninstallPthEntries(object): | ||
| 405 | def __init__(self, pth_file): | ||
| 406 | if not os.path.isfile(pth_file): | ||
| 407 | raise UninstallationError( | ||
| 408 | "Cannot remove entries from nonexistent file %s" % pth_file | ||
| 409 | ) | ||
| 410 | self.file = pth_file | ||
| 411 | self.entries = set() | ||
| 412 | self._saved_lines = None | ||
| 413 | |||
| 414 | def add(self, entry): | ||
| 415 | entry = os.path.normcase(entry) | ||
| 416 | # On Windows, os.path.normcase converts the entry to use | ||
| 417 | # backslashes. This is correct for entries that describe absolute | ||
| 418 | # paths outside of site-packages, but all the others use forward | ||
| 419 | # slashes. | ||
| 420 | if WINDOWS and not os.path.splitdrive(entry)[0]: | ||
| 421 | entry = entry.replace('\\', '/') | ||
| 422 | self.entries.add(entry) | ||
| 423 | |||
| 424 | def remove(self): | ||
| 425 | logger.debug('Removing pth entries from %s:', self.file) | ||
| 426 | with open(self.file, 'rb') as fh: | ||
| 427 | # windows uses '\r\n' with py3k, but uses '\n' with py2.x | ||
| 428 | lines = fh.readlines() | ||
| 429 | self._saved_lines = lines | ||
| 430 | if any(b'\r\n' in line for line in lines): | ||
| 431 | endline = '\r\n' | ||
| 432 | else: | ||
| 433 | endline = '\n' | ||
| 434 | # handle missing trailing newline | ||
| 435 | if lines and not lines[-1].endswith(endline.encode("utf-8")): | ||
| 436 | lines[-1] = lines[-1] + endline.encode("utf-8") | ||
| 437 | for entry in self.entries: | ||
| 438 | try: | ||
| 439 | logger.debug('Removing entry: %s', entry) | ||
| 440 | lines.remove((entry + endline).encode("utf-8")) | ||
| 441 | except ValueError: | ||
| 442 | pass | ||
| 443 | with open(self.file, 'wb') as fh: | ||
| 444 | fh.writelines(lines) | ||
| 445 | |||
| 446 | def rollback(self): | ||
| 447 | if self._saved_lines is None: | ||
| 448 | logger.error( | ||
| 449 | 'Cannot roll back changes to %s, none were made', self.file | ||
| 450 | ) | ||
| 451 | return False | ||
| 452 | logger.debug('Rolling %s back to previous state', self.file) | ||
| 453 | with open(self.file, 'wb') as fh: | ||
| 454 | fh.writelines(self._saved_lines) | ||
| 455 | return True | ||
