diff options
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py')
| -rw-r--r-- | venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py | 817 |
1 files changed, 817 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py new file mode 100644 index 0000000..36459dd --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/wheel.py | |||
| @@ -0,0 +1,817 @@ | |||
| 1 | """ | ||
| 2 | Support for installing and building the "wheel" binary package format. | ||
| 3 | """ | ||
| 4 | from __future__ import absolute_import | ||
| 5 | |||
| 6 | import collections | ||
| 7 | import compileall | ||
| 8 | import copy | ||
| 9 | import csv | ||
| 10 | import hashlib | ||
| 11 | import logging | ||
| 12 | import os.path | ||
| 13 | import re | ||
| 14 | import shutil | ||
| 15 | import stat | ||
| 16 | import sys | ||
| 17 | import warnings | ||
| 18 | from base64 import urlsafe_b64encode | ||
| 19 | from email.parser import Parser | ||
| 20 | |||
| 21 | from pip._vendor import pkg_resources | ||
| 22 | from pip._vendor.distlib.scripts import ScriptMaker | ||
| 23 | from pip._vendor.packaging.utils import canonicalize_name | ||
| 24 | from pip._vendor.six import StringIO | ||
| 25 | |||
| 26 | from pip._internal import pep425tags | ||
| 27 | from pip._internal.build_env import BuildEnvironment | ||
| 28 | from pip._internal.download import path_to_url, unpack_url | ||
| 29 | from pip._internal.exceptions import ( | ||
| 30 | InstallationError, InvalidWheelFilename, UnsupportedWheel, | ||
| 31 | ) | ||
| 32 | from pip._internal.locations import ( | ||
| 33 | PIP_DELETE_MARKER_FILENAME, distutils_scheme, | ||
| 34 | ) | ||
| 35 | from pip._internal.utils.logging import indent_log | ||
| 36 | from pip._internal.utils.misc import ( | ||
| 37 | call_subprocess, captured_stdout, ensure_dir, read_chunks, | ||
| 38 | ) | ||
| 39 | from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | ||
| 40 | from pip._internal.utils.temp_dir import TempDirectory | ||
| 41 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
| 42 | from pip._internal.utils.ui import open_spinner | ||
| 43 | |||
| 44 | if MYPY_CHECK_RUNNING: | ||
| 45 | from typing import Dict, List, Optional | ||
| 46 | |||
| 47 | wheel_ext = '.whl' | ||
| 48 | |||
| 49 | VERSION_COMPATIBLE = (1, 0) | ||
| 50 | |||
| 51 | |||
| 52 | logger = logging.getLogger(__name__) | ||
| 53 | |||
| 54 | |||
| 55 | def rehash(path, algo='sha256', blocksize=1 << 20): | ||
| 56 | """Return (hash, length) for path using hashlib.new(algo)""" | ||
| 57 | h = hashlib.new(algo) | ||
| 58 | length = 0 | ||
| 59 | with open(path, 'rb') as f: | ||
| 60 | for block in read_chunks(f, size=blocksize): | ||
| 61 | length += len(block) | ||
| 62 | h.update(block) | ||
| 63 | digest = 'sha256=' + urlsafe_b64encode( | ||
| 64 | h.digest() | ||
| 65 | ).decode('latin1').rstrip('=') | ||
| 66 | return (digest, length) | ||
| 67 | |||
| 68 | |||
| 69 | def open_for_csv(name, mode): | ||
| 70 | if sys.version_info[0] < 3: | ||
| 71 | nl = {} | ||
| 72 | bin = 'b' | ||
| 73 | else: | ||
| 74 | nl = {'newline': ''} | ||
| 75 | bin = '' | ||
| 76 | return open(name, mode + bin, **nl) | ||
| 77 | |||
| 78 | |||
| 79 | def fix_script(path): | ||
| 80 | """Replace #!python with #!/path/to/python | ||
| 81 | Return True if file was changed.""" | ||
| 82 | # XXX RECORD hashes will need to be updated | ||
| 83 | if os.path.isfile(path): | ||
| 84 | with open(path, 'rb') as script: | ||
| 85 | firstline = script.readline() | ||
| 86 | if not firstline.startswith(b'#!python'): | ||
| 87 | return False | ||
| 88 | exename = sys.executable.encode(sys.getfilesystemencoding()) | ||
| 89 | firstline = b'#!' + exename + os.linesep.encode("ascii") | ||
| 90 | rest = script.read() | ||
| 91 | with open(path, 'wb') as script: | ||
| 92 | script.write(firstline) | ||
| 93 | script.write(rest) | ||
| 94 | return True | ||
| 95 | |||
| 96 | |||
| 97 | dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) | ||
| 98 | \.dist-info$""", re.VERBOSE) | ||
| 99 | |||
| 100 | |||
| 101 | def root_is_purelib(name, wheeldir): | ||
| 102 | """ | ||
| 103 | Return True if the extracted wheel in wheeldir should go into purelib. | ||
| 104 | """ | ||
| 105 | name_folded = name.replace("-", "_") | ||
| 106 | for item in os.listdir(wheeldir): | ||
| 107 | match = dist_info_re.match(item) | ||
| 108 | if match and match.group('name') == name_folded: | ||
| 109 | with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: | ||
| 110 | for line in wheel: | ||
| 111 | line = line.lower().rstrip() | ||
| 112 | if line == "root-is-purelib: true": | ||
| 113 | return True | ||
| 114 | return False | ||
| 115 | |||
| 116 | |||
| 117 | def get_entrypoints(filename): | ||
| 118 | if not os.path.exists(filename): | ||
| 119 | return {}, {} | ||
| 120 | |||
| 121 | # This is done because you can pass a string to entry_points wrappers which | ||
| 122 | # means that they may or may not be valid INI files. The attempt here is to | ||
| 123 | # strip leading and trailing whitespace in order to make them valid INI | ||
| 124 | # files. | ||
| 125 | with open(filename) as fp: | ||
| 126 | data = StringIO() | ||
| 127 | for line in fp: | ||
| 128 | data.write(line.strip()) | ||
| 129 | data.write("\n") | ||
| 130 | data.seek(0) | ||
| 131 | |||
| 132 | # get the entry points and then the script names | ||
| 133 | entry_points = pkg_resources.EntryPoint.parse_map(data) | ||
| 134 | console = entry_points.get('console_scripts', {}) | ||
| 135 | gui = entry_points.get('gui_scripts', {}) | ||
| 136 | |||
| 137 | def _split_ep(s): | ||
| 138 | """get the string representation of EntryPoint, remove space and split | ||
| 139 | on '='""" | ||
| 140 | return str(s).replace(" ", "").split("=") | ||
| 141 | |||
| 142 | # convert the EntryPoint objects into strings with module:function | ||
| 143 | console = dict(_split_ep(v) for v in console.values()) | ||
| 144 | gui = dict(_split_ep(v) for v in gui.values()) | ||
| 145 | return console, gui | ||
| 146 | |||
| 147 | |||
| 148 | def message_about_scripts_not_on_PATH(scripts): | ||
| 149 | # type: (List[str]) -> Optional[str] | ||
| 150 | """Determine if any scripts are not on PATH and format a warning. | ||
| 151 | |||
| 152 | Returns a warning message if one or more scripts are not on PATH, | ||
| 153 | otherwise None. | ||
| 154 | """ | ||
| 155 | if not scripts: | ||
| 156 | return None | ||
| 157 | |||
| 158 | # Group scripts by the path they were installed in | ||
| 159 | grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] | ||
| 160 | for destfile in scripts: | ||
| 161 | parent_dir = os.path.dirname(destfile) | ||
| 162 | script_name = os.path.basename(destfile) | ||
| 163 | grouped_by_dir[parent_dir].add(script_name) | ||
| 164 | |||
| 165 | # We don't want to warn for directories that are on PATH. | ||
| 166 | not_warn_dirs = [ | ||
| 167 | os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep) | ||
| 168 | ] | ||
| 169 | # If an executable sits with sys.executable, we don't warn for it. | ||
| 170 | # This covers the case of venv invocations without activating the venv. | ||
| 171 | not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) | ||
| 172 | warn_for = { | ||
| 173 | parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() | ||
| 174 | if os.path.normcase(parent_dir) not in not_warn_dirs | ||
| 175 | } | ||
| 176 | if not warn_for: | ||
| 177 | return None | ||
| 178 | |||
| 179 | # Format a message | ||
| 180 | msg_lines = [] | ||
| 181 | for parent_dir, scripts in warn_for.items(): | ||
| 182 | scripts = sorted(scripts) | ||
| 183 | if len(scripts) == 1: | ||
| 184 | start_text = "script {} is".format(scripts[0]) | ||
| 185 | else: | ||
| 186 | start_text = "scripts {} are".format( | ||
| 187 | ", ".join(scripts[:-1]) + " and " + scripts[-1] | ||
| 188 | ) | ||
| 189 | |||
| 190 | msg_lines.append( | ||
| 191 | "The {} installed in '{}' which is not on PATH." | ||
| 192 | .format(start_text, parent_dir) | ||
| 193 | ) | ||
| 194 | |||
| 195 | last_line_fmt = ( | ||
| 196 | "Consider adding {} to PATH or, if you prefer " | ||
| 197 | "to suppress this warning, use --no-warn-script-location." | ||
| 198 | ) | ||
| 199 | if len(msg_lines) == 1: | ||
| 200 | msg_lines.append(last_line_fmt.format("this directory")) | ||
| 201 | else: | ||
| 202 | msg_lines.append(last_line_fmt.format("these directories")) | ||
| 203 | |||
| 204 | # Returns the formatted multiline message | ||
| 205 | return "\n".join(msg_lines) | ||
| 206 | |||
| 207 | |||
| 208 | def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, | ||
| 209 | pycompile=True, scheme=None, isolated=False, prefix=None, | ||
| 210 | warn_script_location=True): | ||
| 211 | """Install a wheel""" | ||
| 212 | |||
| 213 | if not scheme: | ||
| 214 | scheme = distutils_scheme( | ||
| 215 | name, user=user, home=home, root=root, isolated=isolated, | ||
| 216 | prefix=prefix, | ||
| 217 | ) | ||
| 218 | |||
| 219 | if root_is_purelib(name, wheeldir): | ||
| 220 | lib_dir = scheme['purelib'] | ||
| 221 | else: | ||
| 222 | lib_dir = scheme['platlib'] | ||
| 223 | |||
| 224 | info_dir = [] | ||
| 225 | data_dirs = [] | ||
| 226 | source = wheeldir.rstrip(os.path.sep) + os.path.sep | ||
| 227 | |||
| 228 | # Record details of the files moved | ||
| 229 | # installed = files copied from the wheel to the destination | ||
| 230 | # changed = files changed while installing (scripts #! line typically) | ||
| 231 | # generated = files newly generated during the install (script wrappers) | ||
| 232 | installed = {} | ||
| 233 | changed = set() | ||
| 234 | generated = [] | ||
| 235 | |||
| 236 | # Compile all of the pyc files that we're going to be installing | ||
| 237 | if pycompile: | ||
| 238 | with captured_stdout() as stdout: | ||
| 239 | with warnings.catch_warnings(): | ||
| 240 | warnings.filterwarnings('ignore') | ||
| 241 | compileall.compile_dir(source, force=True, quiet=True) | ||
| 242 | logger.debug(stdout.getvalue()) | ||
| 243 | |||
| 244 | def normpath(src, p): | ||
| 245 | return os.path.relpath(src, p).replace(os.path.sep, '/') | ||
| 246 | |||
| 247 | def record_installed(srcfile, destfile, modified=False): | ||
| 248 | """Map archive RECORD paths to installation RECORD paths.""" | ||
| 249 | oldpath = normpath(srcfile, wheeldir) | ||
| 250 | newpath = normpath(destfile, lib_dir) | ||
| 251 | installed[oldpath] = newpath | ||
| 252 | if modified: | ||
| 253 | changed.add(destfile) | ||
| 254 | |||
| 255 | def clobber(source, dest, is_base, fixer=None, filter=None): | ||
| 256 | ensure_dir(dest) # common for the 'include' path | ||
| 257 | |||
| 258 | for dir, subdirs, files in os.walk(source): | ||
| 259 | basedir = dir[len(source):].lstrip(os.path.sep) | ||
| 260 | destdir = os.path.join(dest, basedir) | ||
| 261 | if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): | ||
| 262 | continue | ||
| 263 | for s in subdirs: | ||
| 264 | destsubdir = os.path.join(dest, basedir, s) | ||
| 265 | if is_base and basedir == '' and destsubdir.endswith('.data'): | ||
| 266 | data_dirs.append(s) | ||
| 267 | continue | ||
| 268 | elif (is_base and | ||
| 269 | s.endswith('.dist-info') and | ||
| 270 | canonicalize_name(s).startswith( | ||
| 271 | canonicalize_name(req.name))): | ||
| 272 | assert not info_dir, ('Multiple .dist-info directories: ' + | ||
| 273 | destsubdir + ', ' + | ||
| 274 | ', '.join(info_dir)) | ||
| 275 | info_dir.append(destsubdir) | ||
| 276 | for f in files: | ||
| 277 | # Skip unwanted files | ||
| 278 | if filter and filter(f): | ||
| 279 | continue | ||
| 280 | srcfile = os.path.join(dir, f) | ||
| 281 | destfile = os.path.join(dest, basedir, f) | ||
| 282 | # directory creation is lazy and after the file filtering above | ||
| 283 | # to ensure we don't install empty dirs; empty dirs can't be | ||
| 284 | # uninstalled. | ||
| 285 | ensure_dir(destdir) | ||
| 286 | |||
| 287 | # We use copyfile (not move, copy, or copy2) to be extra sure | ||
| 288 | # that we are not moving directories over (copyfile fails for | ||
| 289 | # directories) as well as to ensure that we are not copying | ||
| 290 | # over any metadata because we want more control over what | ||
| 291 | # metadata we actually copy over. | ||
| 292 | shutil.copyfile(srcfile, destfile) | ||
| 293 | |||
| 294 | # Copy over the metadata for the file, currently this only | ||
| 295 | # includes the atime and mtime. | ||
| 296 | st = os.stat(srcfile) | ||
| 297 | if hasattr(os, "utime"): | ||
| 298 | os.utime(destfile, (st.st_atime, st.st_mtime)) | ||
| 299 | |||
| 300 | # If our file is executable, then make our destination file | ||
| 301 | # executable. | ||
| 302 | if os.access(srcfile, os.X_OK): | ||
| 303 | st = os.stat(srcfile) | ||
| 304 | permissions = ( | ||
| 305 | st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | ||
| 306 | ) | ||
| 307 | os.chmod(destfile, permissions) | ||
| 308 | |||
| 309 | changed = False | ||
| 310 | if fixer: | ||
| 311 | changed = fixer(destfile) | ||
| 312 | record_installed(srcfile, destfile, changed) | ||
| 313 | |||
| 314 | clobber(source, lib_dir, True) | ||
| 315 | |||
| 316 | assert info_dir, "%s .dist-info directory not found" % req | ||
| 317 | |||
| 318 | # Get the defined entry points | ||
| 319 | ep_file = os.path.join(info_dir[0], 'entry_points.txt') | ||
| 320 | console, gui = get_entrypoints(ep_file) | ||
| 321 | |||
| 322 | def is_entrypoint_wrapper(name): | ||
| 323 | # EP, EP.exe and EP-script.py are scripts generated for | ||
| 324 | # entry point EP by setuptools | ||
| 325 | if name.lower().endswith('.exe'): | ||
| 326 | matchname = name[:-4] | ||
| 327 | elif name.lower().endswith('-script.py'): | ||
| 328 | matchname = name[:-10] | ||
| 329 | elif name.lower().endswith(".pya"): | ||
| 330 | matchname = name[:-4] | ||
| 331 | else: | ||
| 332 | matchname = name | ||
| 333 | # Ignore setuptools-generated scripts | ||
| 334 | return (matchname in console or matchname in gui) | ||
| 335 | |||
| 336 | for datadir in data_dirs: | ||
| 337 | fixer = None | ||
| 338 | filter = None | ||
| 339 | for subdir in os.listdir(os.path.join(wheeldir, datadir)): | ||
| 340 | fixer = None | ||
| 341 | if subdir == 'scripts': | ||
| 342 | fixer = fix_script | ||
| 343 | filter = is_entrypoint_wrapper | ||
| 344 | source = os.path.join(wheeldir, datadir, subdir) | ||
| 345 | dest = scheme[subdir] | ||
| 346 | clobber(source, dest, False, fixer=fixer, filter=filter) | ||
| 347 | |||
| 348 | maker = ScriptMaker(None, scheme['scripts']) | ||
| 349 | |||
| 350 | # Ensure old scripts are overwritten. | ||
| 351 | # See https://github.com/pypa/pip/issues/1800 | ||
| 352 | maker.clobber = True | ||
| 353 | |||
| 354 | # Ensure we don't generate any variants for scripts because this is almost | ||
| 355 | # never what somebody wants. | ||
| 356 | # See https://bitbucket.org/pypa/distlib/issue/35/ | ||
| 357 | maker.variants = {''} | ||
| 358 | |||
| 359 | # This is required because otherwise distlib creates scripts that are not | ||
| 360 | # executable. | ||
| 361 | # See https://bitbucket.org/pypa/distlib/issue/32/ | ||
| 362 | maker.set_mode = True | ||
| 363 | |||
| 364 | # Simplify the script and fix the fact that the default script swallows | ||
| 365 | # every single stack trace. | ||
| 366 | # See https://bitbucket.org/pypa/distlib/issue/34/ | ||
| 367 | # See https://bitbucket.org/pypa/distlib/issue/33/ | ||
| 368 | def _get_script_text(entry): | ||
| 369 | if entry.suffix is None: | ||
| 370 | raise InstallationError( | ||
| 371 | "Invalid script entry point: %s for req: %s - A callable " | ||
| 372 | "suffix is required. Cf https://packaging.python.org/en/" | ||
| 373 | "latest/distributing.html#console-scripts for more " | ||
| 374 | "information." % (entry, req) | ||
| 375 | ) | ||
| 376 | return maker.script_template % { | ||
| 377 | "module": entry.prefix, | ||
| 378 | "import_name": entry.suffix.split(".")[0], | ||
| 379 | "func": entry.suffix, | ||
| 380 | } | ||
| 381 | |||
| 382 | maker._get_script_text = _get_script_text | ||
| 383 | maker.script_template = r"""# -*- coding: utf-8 -*- | ||
| 384 | import re | ||
| 385 | import sys | ||
| 386 | |||
| 387 | from %(module)s import %(import_name)s | ||
| 388 | |||
| 389 | if __name__ == '__main__': | ||
| 390 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | ||
| 391 | sys.exit(%(func)s()) | ||
| 392 | """ | ||
| 393 | |||
| 394 | # Special case pip and setuptools to generate versioned wrappers | ||
| 395 | # | ||
| 396 | # The issue is that some projects (specifically, pip and setuptools) use | ||
| 397 | # code in setup.py to create "versioned" entry points - pip2.7 on Python | ||
| 398 | # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into | ||
| 399 | # the wheel metadata at build time, and so if the wheel is installed with | ||
| 400 | # a *different* version of Python the entry points will be wrong. The | ||
| 401 | # correct fix for this is to enhance the metadata to be able to describe | ||
| 402 | # such versioned entry points, but that won't happen till Metadata 2.0 is | ||
| 403 | # available. | ||
| 404 | # In the meantime, projects using versioned entry points will either have | ||
| 405 | # incorrect versioned entry points, or they will not be able to distribute | ||
| 406 | # "universal" wheels (i.e., they will need a wheel per Python version). | ||
| 407 | # | ||
| 408 | # Because setuptools and pip are bundled with _ensurepip and virtualenv, | ||
| 409 | # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we | ||
| 410 | # override the versioned entry points in the wheel and generate the | ||
| 411 | # correct ones. This code is purely a short-term measure until Metadata 2.0 | ||
| 412 | # is available. | ||
| 413 | # | ||
| 414 | # To add the level of hack in this section of code, in order to support | ||
| 415 | # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment | ||
| 416 | # variable which will control which version scripts get installed. | ||
| 417 | # | ||
| 418 | # ENSUREPIP_OPTIONS=altinstall | ||
| 419 | # - Only pipX.Y and easy_install-X.Y will be generated and installed | ||
| 420 | # ENSUREPIP_OPTIONS=install | ||
| 421 | # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note | ||
| 422 | # that this option is technically if ENSUREPIP_OPTIONS is set and is | ||
| 423 | # not altinstall | ||
| 424 | # DEFAULT | ||
| 425 | # - The default behavior is to install pip, pipX, pipX.Y, easy_install | ||
| 426 | # and easy_install-X.Y. | ||
| 427 | pip_script = console.pop('pip', None) | ||
| 428 | if pip_script: | ||
| 429 | if "ENSUREPIP_OPTIONS" not in os.environ: | ||
| 430 | spec = 'pip = ' + pip_script | ||
| 431 | generated.extend(maker.make(spec)) | ||
| 432 | |||
| 433 | if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": | ||
| 434 | spec = 'pip%s = %s' % (sys.version[:1], pip_script) | ||
| 435 | generated.extend(maker.make(spec)) | ||
| 436 | |||
| 437 | spec = 'pip%s = %s' % (sys.version[:3], pip_script) | ||
| 438 | generated.extend(maker.make(spec)) | ||
| 439 | # Delete any other versioned pip entry points | ||
| 440 | pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] | ||
| 441 | for k in pip_ep: | ||
| 442 | del console[k] | ||
| 443 | easy_install_script = console.pop('easy_install', None) | ||
| 444 | if easy_install_script: | ||
| 445 | if "ENSUREPIP_OPTIONS" not in os.environ: | ||
| 446 | spec = 'easy_install = ' + easy_install_script | ||
| 447 | generated.extend(maker.make(spec)) | ||
| 448 | |||
| 449 | spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) | ||
| 450 | generated.extend(maker.make(spec)) | ||
| 451 | # Delete any other versioned easy_install entry points | ||
| 452 | easy_install_ep = [ | ||
| 453 | k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) | ||
| 454 | ] | ||
| 455 | for k in easy_install_ep: | ||
| 456 | del console[k] | ||
| 457 | |||
| 458 | # Generate the console and GUI entry points specified in the wheel | ||
| 459 | if len(console) > 0: | ||
| 460 | generated_console_scripts = maker.make_multiple( | ||
| 461 | ['%s = %s' % kv for kv in console.items()] | ||
| 462 | ) | ||
| 463 | generated.extend(generated_console_scripts) | ||
| 464 | |||
| 465 | if warn_script_location: | ||
| 466 | msg = message_about_scripts_not_on_PATH(generated_console_scripts) | ||
| 467 | if msg is not None: | ||
| 468 | logger.warn(msg) | ||
| 469 | |||
| 470 | if len(gui) > 0: | ||
| 471 | generated.extend( | ||
| 472 | maker.make_multiple( | ||
| 473 | ['%s = %s' % kv for kv in gui.items()], | ||
| 474 | {'gui': True} | ||
| 475 | ) | ||
| 476 | ) | ||
| 477 | |||
| 478 | # Record pip as the installer | ||
| 479 | installer = os.path.join(info_dir[0], 'INSTALLER') | ||
| 480 | temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') | ||
| 481 | with open(temp_installer, 'wb') as installer_file: | ||
| 482 | installer_file.write(b'pip\n') | ||
| 483 | shutil.move(temp_installer, installer) | ||
| 484 | generated.append(installer) | ||
| 485 | |||
| 486 | # Record details of all files installed | ||
| 487 | record = os.path.join(info_dir[0], 'RECORD') | ||
| 488 | temp_record = os.path.join(info_dir[0], 'RECORD.pip') | ||
| 489 | with open_for_csv(record, 'r') as record_in: | ||
| 490 | with open_for_csv(temp_record, 'w+') as record_out: | ||
| 491 | reader = csv.reader(record_in) | ||
| 492 | writer = csv.writer(record_out) | ||
| 493 | for row in reader: | ||
| 494 | row[0] = installed.pop(row[0], row[0]) | ||
| 495 | if row[0] in changed: | ||
| 496 | row[1], row[2] = rehash(row[0]) | ||
| 497 | writer.writerow(row) | ||
| 498 | for f in generated: | ||
| 499 | h, l = rehash(f) | ||
| 500 | writer.writerow((normpath(f, lib_dir), h, l)) | ||
| 501 | for f in installed: | ||
| 502 | writer.writerow((installed[f], '', '')) | ||
| 503 | shutil.move(temp_record, record) | ||
| 504 | |||
| 505 | |||
| 506 | def wheel_version(source_dir): | ||
| 507 | """ | ||
| 508 | Return the Wheel-Version of an extracted wheel, if possible. | ||
| 509 | |||
| 510 | Otherwise, return False if we couldn't parse / extract it. | ||
| 511 | """ | ||
| 512 | try: | ||
| 513 | dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] | ||
| 514 | |||
| 515 | wheel_data = dist.get_metadata('WHEEL') | ||
| 516 | wheel_data = Parser().parsestr(wheel_data) | ||
| 517 | |||
| 518 | version = wheel_data['Wheel-Version'].strip() | ||
| 519 | version = tuple(map(int, version.split('.'))) | ||
| 520 | return version | ||
| 521 | except: | ||
| 522 | return False | ||
| 523 | |||
| 524 | |||
| 525 | def check_compatibility(version, name): | ||
| 526 | """ | ||
| 527 | Raises errors or warns if called with an incompatible Wheel-Version. | ||
| 528 | |||
| 529 | Pip should refuse to install a Wheel-Version that's a major series | ||
| 530 | ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when | ||
| 531 | installing a version only minor version ahead (e.g 1.2 > 1.1). | ||
| 532 | |||
| 533 | version: a 2-tuple representing a Wheel-Version (Major, Minor) | ||
| 534 | name: name of wheel or package to raise exception about | ||
| 535 | |||
| 536 | :raises UnsupportedWheel: when an incompatible Wheel-Version is given | ||
| 537 | """ | ||
| 538 | if not version: | ||
| 539 | raise UnsupportedWheel( | ||
| 540 | "%s is in an unsupported or invalid wheel" % name | ||
| 541 | ) | ||
| 542 | if version[0] > VERSION_COMPATIBLE[0]: | ||
| 543 | raise UnsupportedWheel( | ||
| 544 | "%s's Wheel-Version (%s) is not compatible with this version " | ||
| 545 | "of pip" % (name, '.'.join(map(str, version))) | ||
| 546 | ) | ||
| 547 | elif version > VERSION_COMPATIBLE: | ||
| 548 | logger.warning( | ||
| 549 | 'Installing from a newer Wheel-Version (%s)', | ||
| 550 | '.'.join(map(str, version)), | ||
| 551 | ) | ||
| 552 | |||
| 553 | |||
| 554 | class Wheel(object): | ||
| 555 | """A wheel file""" | ||
| 556 | |||
| 557 | # TODO: maybe move the install code into this class | ||
| 558 | |||
| 559 | wheel_file_re = re.compile( | ||
| 560 | r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) | ||
| 561 | ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) | ||
| 562 | \.whl|\.dist-info)$""", | ||
| 563 | re.VERBOSE | ||
| 564 | ) | ||
| 565 | |||
| 566 | def __init__(self, filename): | ||
| 567 | """ | ||
| 568 | :raises InvalidWheelFilename: when the filename is invalid for a wheel | ||
| 569 | """ | ||
| 570 | wheel_info = self.wheel_file_re.match(filename) | ||
| 571 | if not wheel_info: | ||
| 572 | raise InvalidWheelFilename( | ||
| 573 | "%s is not a valid wheel filename." % filename | ||
| 574 | ) | ||
| 575 | self.filename = filename | ||
| 576 | self.name = wheel_info.group('name').replace('_', '-') | ||
| 577 | # we'll assume "_" means "-" due to wheel naming scheme | ||
| 578 | # (https://github.com/pypa/pip/issues/1150) | ||
| 579 | self.version = wheel_info.group('ver').replace('_', '-') | ||
| 580 | self.build_tag = wheel_info.group('build') | ||
| 581 | self.pyversions = wheel_info.group('pyver').split('.') | ||
| 582 | self.abis = wheel_info.group('abi').split('.') | ||
| 583 | self.plats = wheel_info.group('plat').split('.') | ||
| 584 | |||
| 585 | # All the tag combinations from this file | ||
| 586 | self.file_tags = { | ||
| 587 | (x, y, z) for x in self.pyversions | ||
| 588 | for y in self.abis for z in self.plats | ||
| 589 | } | ||
| 590 | |||
| 591 | def support_index_min(self, tags=None): | ||
| 592 | """ | ||
| 593 | Return the lowest index that one of the wheel's file_tag combinations | ||
| 594 | achieves in the supported_tags list e.g. if there are 8 supported tags, | ||
| 595 | and one of the file tags is first in the list, then return 0. Returns | ||
| 596 | None is the wheel is not supported. | ||
| 597 | """ | ||
| 598 | if tags is None: # for mock | ||
| 599 | tags = pep425tags.get_supported() | ||
| 600 | indexes = [tags.index(c) for c in self.file_tags if c in tags] | ||
| 601 | return min(indexes) if indexes else None | ||
| 602 | |||
| 603 | def supported(self, tags=None): | ||
| 604 | """Is this wheel supported on this system?""" | ||
| 605 | if tags is None: # for mock | ||
| 606 | tags = pep425tags.get_supported() | ||
| 607 | return bool(set(tags).intersection(self.file_tags)) | ||
| 608 | |||
| 609 | |||
| 610 | class WheelBuilder(object): | ||
| 611 | """Build wheels from a RequirementSet.""" | ||
| 612 | |||
| 613 | def __init__(self, finder, preparer, wheel_cache, | ||
| 614 | build_options=None, global_options=None, no_clean=False): | ||
| 615 | self.finder = finder | ||
| 616 | self.preparer = preparer | ||
| 617 | self.wheel_cache = wheel_cache | ||
| 618 | |||
| 619 | self._wheel_dir = preparer.wheel_download_dir | ||
| 620 | |||
| 621 | self.build_options = build_options or [] | ||
| 622 | self.global_options = global_options or [] | ||
| 623 | self.no_clean = no_clean | ||
| 624 | |||
| 625 | def _build_one(self, req, output_dir, python_tag=None): | ||
| 626 | """Build one wheel. | ||
| 627 | |||
| 628 | :return: The filename of the built wheel, or None if the build failed. | ||
| 629 | """ | ||
| 630 | # Install build deps into temporary directory (PEP 518) | ||
| 631 | with req.build_env: | ||
| 632 | return self._build_one_inside_env(req, output_dir, | ||
| 633 | python_tag=python_tag) | ||
| 634 | |||
| 635 | def _build_one_inside_env(self, req, output_dir, python_tag=None): | ||
| 636 | with TempDirectory(kind="wheel") as temp_dir: | ||
| 637 | if self.__build_one(req, temp_dir.path, python_tag=python_tag): | ||
| 638 | try: | ||
| 639 | wheel_name = os.listdir(temp_dir.path)[0] | ||
| 640 | wheel_path = os.path.join(output_dir, wheel_name) | ||
| 641 | shutil.move( | ||
| 642 | os.path.join(temp_dir.path, wheel_name), wheel_path | ||
| 643 | ) | ||
| 644 | logger.info('Stored in directory: %s', output_dir) | ||
| 645 | return wheel_path | ||
| 646 | except: | ||
| 647 | pass | ||
| 648 | # Ignore return, we can't do anything else useful. | ||
| 649 | self._clean_one(req) | ||
| 650 | return None | ||
| 651 | |||
| 652 | def _base_setup_args(self, req): | ||
| 653 | # NOTE: Eventually, we'd want to also -S to the flags here, when we're | ||
| 654 | # isolating. Currently, it breaks Python in virtualenvs, because it | ||
| 655 | # relies on site.py to find parts of the standard library outside the | ||
| 656 | # virtualenv. | ||
| 657 | return [ | ||
| 658 | sys.executable, '-u', '-c', | ||
| 659 | SETUPTOOLS_SHIM % req.setup_py | ||
| 660 | ] + list(self.global_options) | ||
| 661 | |||
| 662 | def __build_one(self, req, tempd, python_tag=None): | ||
| 663 | base_args = self._base_setup_args(req) | ||
| 664 | |||
| 665 | spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) | ||
| 666 | with open_spinner(spin_message) as spinner: | ||
| 667 | logger.debug('Destination directory: %s', tempd) | ||
| 668 | wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ | ||
| 669 | + self.build_options | ||
| 670 | |||
| 671 | if python_tag is not None: | ||
| 672 | wheel_args += ["--python-tag", python_tag] | ||
| 673 | |||
| 674 | try: | ||
| 675 | call_subprocess(wheel_args, cwd=req.setup_py_dir, | ||
| 676 | show_stdout=False, spinner=spinner) | ||
| 677 | return True | ||
| 678 | except: | ||
| 679 | spinner.finish("error") | ||
| 680 | logger.error('Failed building wheel for %s', req.name) | ||
| 681 | return False | ||
| 682 | |||
| 683 | def _clean_one(self, req): | ||
| 684 | base_args = self._base_setup_args(req) | ||
| 685 | |||
| 686 | logger.info('Running setup.py clean for %s', req.name) | ||
| 687 | clean_args = base_args + ['clean', '--all'] | ||
| 688 | try: | ||
| 689 | call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) | ||
| 690 | return True | ||
| 691 | except: | ||
| 692 | logger.error('Failed cleaning build dir for %s', req.name) | ||
| 693 | return False | ||
| 694 | |||
| 695 | def build(self, requirements, session, autobuilding=False): | ||
| 696 | """Build wheels. | ||
| 697 | |||
| 698 | :param unpack: If True, replace the sdist we built from with the | ||
| 699 | newly built wheel, in preparation for installation. | ||
| 700 | :return: True if all the wheels built correctly. | ||
| 701 | """ | ||
| 702 | from pip._internal import index | ||
| 703 | |||
| 704 | building_is_possible = self._wheel_dir or ( | ||
| 705 | autobuilding and self.wheel_cache.cache_dir | ||
| 706 | ) | ||
| 707 | assert building_is_possible | ||
| 708 | |||
| 709 | buildset = [] | ||
| 710 | for req in requirements: | ||
| 711 | if req.constraint: | ||
| 712 | continue | ||
| 713 | if req.is_wheel: | ||
| 714 | if not autobuilding: | ||
| 715 | logger.info( | ||
| 716 | 'Skipping %s, due to already being wheel.', req.name, | ||
| 717 | ) | ||
| 718 | elif autobuilding and req.editable: | ||
| 719 | pass | ||
| 720 | elif autobuilding and not req.source_dir: | ||
| 721 | pass | ||
| 722 | elif autobuilding and req.link and not req.link.is_artifact: | ||
| 723 | # VCS checkout. Build wheel just for this run. | ||
| 724 | buildset.append((req, True)) | ||
| 725 | else: | ||
| 726 | ephem_cache = False | ||
| 727 | if autobuilding: | ||
| 728 | link = req.link | ||
| 729 | base, ext = link.splitext() | ||
| 730 | if index.egg_info_matches(base, None, link) is None: | ||
| 731 | # E.g. local directory. Build wheel just for this run. | ||
| 732 | ephem_cache = True | ||
| 733 | if "binary" not in index.fmt_ctl_formats( | ||
| 734 | self.finder.format_control, | ||
| 735 | canonicalize_name(req.name)): | ||
| 736 | logger.info( | ||
| 737 | "Skipping bdist_wheel for %s, due to binaries " | ||
| 738 | "being disabled for it.", req.name, | ||
| 739 | ) | ||
| 740 | continue | ||
| 741 | buildset.append((req, ephem_cache)) | ||
| 742 | |||
| 743 | if not buildset: | ||
| 744 | return True | ||
| 745 | |||
| 746 | # Build the wheels. | ||
| 747 | logger.info( | ||
| 748 | 'Building wheels for collected packages: %s', | ||
| 749 | ', '.join([req.name for (req, _) in buildset]), | ||
| 750 | ) | ||
| 751 | _cache = self.wheel_cache # shorter name | ||
| 752 | with indent_log(): | ||
| 753 | build_success, build_failure = [], [] | ||
| 754 | for req, ephem in buildset: | ||
| 755 | python_tag = None | ||
| 756 | if autobuilding: | ||
| 757 | python_tag = pep425tags.implementation_tag | ||
| 758 | if ephem: | ||
| 759 | output_dir = _cache.get_ephem_path_for_link(req.link) | ||
| 760 | else: | ||
| 761 | output_dir = _cache.get_path_for_link(req.link) | ||
| 762 | try: | ||
| 763 | ensure_dir(output_dir) | ||
| 764 | except OSError as e: | ||
| 765 | logger.warning("Building wheel for %s failed: %s", | ||
| 766 | req.name, e) | ||
| 767 | build_failure.append(req) | ||
| 768 | continue | ||
| 769 | else: | ||
| 770 | output_dir = self._wheel_dir | ||
| 771 | wheel_file = self._build_one( | ||
| 772 | req, output_dir, | ||
| 773 | python_tag=python_tag, | ||
| 774 | ) | ||
| 775 | if wheel_file: | ||
| 776 | build_success.append(req) | ||
| 777 | if autobuilding: | ||
| 778 | # XXX: This is mildly duplicative with prepare_files, | ||
| 779 | # but not close enough to pull out to a single common | ||
| 780 | # method. | ||
| 781 | # The code below assumes temporary source dirs - | ||
| 782 | # prevent it doing bad things. | ||
| 783 | if req.source_dir and not os.path.exists(os.path.join( | ||
| 784 | req.source_dir, PIP_DELETE_MARKER_FILENAME)): | ||
| 785 | raise AssertionError( | ||
| 786 | "bad source dir - missing marker") | ||
| 787 | # Delete the source we built the wheel from | ||
| 788 | req.remove_temporary_source() | ||
| 789 | # set the build directory again - name is known from | ||
| 790 | # the work prepare_files did. | ||
| 791 | req.source_dir = req.build_location( | ||
| 792 | self.preparer.build_dir | ||
| 793 | ) | ||
| 794 | # Update the link for this. | ||
| 795 | req.link = index.Link(path_to_url(wheel_file)) | ||
| 796 | assert req.link.is_wheel | ||
| 797 | # extract the wheel into the dir | ||
| 798 | unpack_url( | ||
| 799 | req.link, req.source_dir, None, False, | ||
| 800 | session=session, | ||
| 801 | ) | ||
| 802 | else: | ||
| 803 | build_failure.append(req) | ||
| 804 | |||
| 805 | # notify success/failure | ||
| 806 | if build_success: | ||
| 807 | logger.info( | ||
| 808 | 'Successfully built %s', | ||
| 809 | ' '.join([req.name for req in build_success]), | ||
| 810 | ) | ||
| 811 | if build_failure: | ||
| 812 | logger.info( | ||
| 813 | 'Failed to build %s', | ||
| 814 | ' '.join([req.name for req in build_failure]), | ||
| 815 | ) | ||
| 816 | # Return True if all builds were successful | ||
| 817 | return len(build_failure) == 0 | ||
