diff options
author | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
commit | 68df54d6629ec019142eb149dd037774f2d11e7c (patch) | |
tree | 345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py |
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py')
-rw-r--r-- | venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py | 984 |
1 files changed, 984 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..3693410 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py | |||
@@ -0,0 +1,984 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | from __future__ import unicode_literals | ||
8 | |||
9 | import base64 | ||
10 | import codecs | ||
11 | import datetime | ||
12 | import distutils.util | ||
13 | from email import message_from_file | ||
14 | import hashlib | ||
15 | import imp | ||
16 | import json | ||
17 | import logging | ||
18 | import os | ||
19 | import posixpath | ||
20 | import re | ||
21 | import shutil | ||
22 | import sys | ||
23 | import tempfile | ||
24 | import zipfile | ||
25 | |||
26 | from . import __version__, DistlibException | ||
27 | from .compat import sysconfig, ZipFile, fsdecode, text_type, filter | ||
28 | from .database import InstalledDistribution | ||
29 | from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | ||
30 | from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, | ||
31 | cached_property, get_cache_base, read_exports, tempdir) | ||
32 | from .version import NormalizedVersion, UnsupportedVersionError | ||
33 | |||
34 | logger = logging.getLogger(__name__) | ||
35 | |||
36 | cache = None # created when needed | ||
37 | |||
38 | if hasattr(sys, 'pypy_version_info'): # pragma: no cover | ||
39 | IMP_PREFIX = 'pp' | ||
40 | elif sys.platform.startswith('java'): # pragma: no cover | ||
41 | IMP_PREFIX = 'jy' | ||
42 | elif sys.platform == 'cli': # pragma: no cover | ||
43 | IMP_PREFIX = 'ip' | ||
44 | else: | ||
45 | IMP_PREFIX = 'cp' | ||
46 | |||
47 | VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') | ||
48 | if not VER_SUFFIX: # pragma: no cover | ||
49 | VER_SUFFIX = '%s%s' % sys.version_info[:2] | ||
50 | PYVER = 'py' + VER_SUFFIX | ||
51 | IMPVER = IMP_PREFIX + VER_SUFFIX | ||
52 | |||
53 | ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') | ||
54 | |||
55 | ABI = sysconfig.get_config_var('SOABI') | ||
56 | if ABI and ABI.startswith('cpython-'): | ||
57 | ABI = ABI.replace('cpython-', 'cp') | ||
58 | else: | ||
59 | def _derive_abi(): | ||
60 | parts = ['cp', VER_SUFFIX] | ||
61 | if sysconfig.get_config_var('Py_DEBUG'): | ||
62 | parts.append('d') | ||
63 | if sysconfig.get_config_var('WITH_PYMALLOC'): | ||
64 | parts.append('m') | ||
65 | if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: | ||
66 | parts.append('u') | ||
67 | return ''.join(parts) | ||
68 | ABI = _derive_abi() | ||
69 | del _derive_abi | ||
70 | |||
71 | FILENAME_RE = re.compile(r''' | ||
72 | (?P<nm>[^-]+) | ||
73 | -(?P<vn>\d+[^-]*) | ||
74 | (-(?P<bn>\d+[^-]*))? | ||
75 | -(?P<py>\w+\d+(\.\w+\d+)*) | ||
76 | -(?P<bi>\w+) | ||
77 | -(?P<ar>\w+(\.\w+)*) | ||
78 | \.whl$ | ||
79 | ''', re.IGNORECASE | re.VERBOSE) | ||
80 | |||
81 | NAME_VERSION_RE = re.compile(r''' | ||
82 | (?P<nm>[^-]+) | ||
83 | -(?P<vn>\d+[^-]*) | ||
84 | (-(?P<bn>\d+[^-]*))?$ | ||
85 | ''', re.IGNORECASE | re.VERBOSE) | ||
86 | |||
87 | SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') | ||
88 | SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') | ||
89 | SHEBANG_PYTHON = b'#!python' | ||
90 | SHEBANG_PYTHONW = b'#!pythonw' | ||
91 | |||
92 | if os.sep == '/': | ||
93 | to_posix = lambda o: o | ||
94 | else: | ||
95 | to_posix = lambda o: o.replace(os.sep, '/') | ||
96 | |||
97 | |||
98 | class Mounter(object): | ||
99 | def __init__(self): | ||
100 | self.impure_wheels = {} | ||
101 | self.libs = {} | ||
102 | |||
103 | def add(self, pathname, extensions): | ||
104 | self.impure_wheels[pathname] = extensions | ||
105 | self.libs.update(extensions) | ||
106 | |||
107 | def remove(self, pathname): | ||
108 | extensions = self.impure_wheels.pop(pathname) | ||
109 | for k, v in extensions: | ||
110 | if k in self.libs: | ||
111 | del self.libs[k] | ||
112 | |||
113 | def find_module(self, fullname, path=None): | ||
114 | if fullname in self.libs: | ||
115 | result = self | ||
116 | else: | ||
117 | result = None | ||
118 | return result | ||
119 | |||
120 | def load_module(self, fullname): | ||
121 | if fullname in sys.modules: | ||
122 | result = sys.modules[fullname] | ||
123 | else: | ||
124 | if fullname not in self.libs: | ||
125 | raise ImportError('unable to find extension for %s' % fullname) | ||
126 | result = imp.load_dynamic(fullname, self.libs[fullname]) | ||
127 | result.__loader__ = self | ||
128 | parts = fullname.rsplit('.', 1) | ||
129 | if len(parts) > 1: | ||
130 | result.__package__ = parts[0] | ||
131 | return result | ||
132 | |||
133 | _hook = Mounter() | ||
134 | |||
135 | |||
136 | class Wheel(object): | ||
137 | """ | ||
138 | Class to build and install from Wheel files (PEP 427). | ||
139 | """ | ||
140 | |||
141 | wheel_version = (1, 1) | ||
142 | hash_kind = 'sha256' | ||
143 | |||
144 | def __init__(self, filename=None, sign=False, verify=False): | ||
145 | """ | ||
146 | Initialise an instance using a (valid) filename. | ||
147 | """ | ||
148 | self.sign = sign | ||
149 | self.should_verify = verify | ||
150 | self.buildver = '' | ||
151 | self.pyver = [PYVER] | ||
152 | self.abi = ['none'] | ||
153 | self.arch = ['any'] | ||
154 | self.dirname = os.getcwd() | ||
155 | if filename is None: | ||
156 | self.name = 'dummy' | ||
157 | self.version = '0.1' | ||
158 | self._filename = self.filename | ||
159 | else: | ||
160 | m = NAME_VERSION_RE.match(filename) | ||
161 | if m: | ||
162 | info = m.groupdict('') | ||
163 | self.name = info['nm'] | ||
164 | # Reinstate the local version separator | ||
165 | self.version = info['vn'].replace('_', '-') | ||
166 | self.buildver = info['bn'] | ||
167 | self._filename = self.filename | ||
168 | else: | ||
169 | dirname, filename = os.path.split(filename) | ||
170 | m = FILENAME_RE.match(filename) | ||
171 | if not m: | ||
172 | raise DistlibException('Invalid name or ' | ||
173 | 'filename: %r' % filename) | ||
174 | if dirname: | ||
175 | self.dirname = os.path.abspath(dirname) | ||
176 | self._filename = filename | ||
177 | info = m.groupdict('') | ||
178 | self.name = info['nm'] | ||
179 | self.version = info['vn'] | ||
180 | self.buildver = info['bn'] | ||
181 | self.pyver = info['py'].split('.') | ||
182 | self.abi = info['bi'].split('.') | ||
183 | self.arch = info['ar'].split('.') | ||
184 | |||
185 | @property | ||
186 | def filename(self): | ||
187 | """ | ||
188 | Build and return a filename from the various components. | ||
189 | """ | ||
190 | if self.buildver: | ||
191 | buildver = '-' + self.buildver | ||
192 | else: | ||
193 | buildver = '' | ||
194 | pyver = '.'.join(self.pyver) | ||
195 | abi = '.'.join(self.abi) | ||
196 | arch = '.'.join(self.arch) | ||
197 | # replace - with _ as a local version separator | ||
198 | version = self.version.replace('-', '_') | ||
199 | return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, | ||
200 | pyver, abi, arch) | ||
201 | |||
202 | @property | ||
203 | def exists(self): | ||
204 | path = os.path.join(self.dirname, self.filename) | ||
205 | return os.path.isfile(path) | ||
206 | |||
207 | @property | ||
208 | def tags(self): | ||
209 | for pyver in self.pyver: | ||
210 | for abi in self.abi: | ||
211 | for arch in self.arch: | ||
212 | yield pyver, abi, arch | ||
213 | |||
214 | @cached_property | ||
215 | def metadata(self): | ||
216 | pathname = os.path.join(self.dirname, self.filename) | ||
217 | name_ver = '%s-%s' % (self.name, self.version) | ||
218 | info_dir = '%s.dist-info' % name_ver | ||
219 | wrapper = codecs.getreader('utf-8') | ||
220 | with ZipFile(pathname, 'r') as zf: | ||
221 | wheel_metadata = self.get_wheel_metadata(zf) | ||
222 | wv = wheel_metadata['Wheel-Version'].split('.', 1) | ||
223 | file_version = tuple([int(i) for i in wv]) | ||
224 | if file_version < (1, 1): | ||
225 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] | ||
226 | else: | ||
227 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] | ||
228 | result = None | ||
229 | for fn in fns: | ||
230 | try: | ||
231 | metadata_filename = posixpath.join(info_dir, fn) | ||
232 | with zf.open(metadata_filename) as bf: | ||
233 | wf = wrapper(bf) | ||
234 | result = Metadata(fileobj=wf) | ||
235 | if result: | ||
236 | break | ||
237 | except KeyError: | ||
238 | pass | ||
239 | if not result: | ||
240 | raise ValueError('Invalid wheel, because metadata is ' | ||
241 | 'missing: looked in %s' % ', '.join(fns)) | ||
242 | return result | ||
243 | |||
244 | def get_wheel_metadata(self, zf): | ||
245 | name_ver = '%s-%s' % (self.name, self.version) | ||
246 | info_dir = '%s.dist-info' % name_ver | ||
247 | metadata_filename = posixpath.join(info_dir, 'WHEEL') | ||
248 | with zf.open(metadata_filename) as bf: | ||
249 | wf = codecs.getreader('utf-8')(bf) | ||
250 | message = message_from_file(wf) | ||
251 | return dict(message) | ||
252 | |||
253 | @cached_property | ||
254 | def info(self): | ||
255 | pathname = os.path.join(self.dirname, self.filename) | ||
256 | with ZipFile(pathname, 'r') as zf: | ||
257 | result = self.get_wheel_metadata(zf) | ||
258 | return result | ||
259 | |||
260 | def process_shebang(self, data): | ||
261 | m = SHEBANG_RE.match(data) | ||
262 | if m: | ||
263 | end = m.end() | ||
264 | shebang, data_after_shebang = data[:end], data[end:] | ||
265 | # Preserve any arguments after the interpreter | ||
266 | if b'pythonw' in shebang.lower(): | ||
267 | shebang_python = SHEBANG_PYTHONW | ||
268 | else: | ||
269 | shebang_python = SHEBANG_PYTHON | ||
270 | m = SHEBANG_DETAIL_RE.match(shebang) | ||
271 | if m: | ||
272 | args = b' ' + m.groups()[-1] | ||
273 | else: | ||
274 | args = b'' | ||
275 | shebang = shebang_python + args | ||
276 | data = shebang + data_after_shebang | ||
277 | else: | ||
278 | cr = data.find(b'\r') | ||
279 | lf = data.find(b'\n') | ||
280 | if cr < 0 or cr > lf: | ||
281 | term = b'\n' | ||
282 | else: | ||
283 | if data[cr:cr + 2] == b'\r\n': | ||
284 | term = b'\r\n' | ||
285 | else: | ||
286 | term = b'\r' | ||
287 | data = SHEBANG_PYTHON + term + data | ||
288 | return data | ||
289 | |||
290 | def get_hash(self, data, hash_kind=None): | ||
291 | if hash_kind is None: | ||
292 | hash_kind = self.hash_kind | ||
293 | try: | ||
294 | hasher = getattr(hashlib, hash_kind) | ||
295 | except AttributeError: | ||
296 | raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) | ||
297 | result = hasher(data).digest() | ||
298 | result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') | ||
299 | return hash_kind, result | ||
300 | |||
301 | def write_record(self, records, record_path, base): | ||
302 | records = list(records) # make a copy for sorting | ||
303 | p = to_posix(os.path.relpath(record_path, base)) | ||
304 | records.append((p, '', '')) | ||
305 | records.sort() | ||
306 | with CSVWriter(record_path) as writer: | ||
307 | for row in records: | ||
308 | writer.writerow(row) | ||
309 | |||
310 | def write_records(self, info, libdir, archive_paths): | ||
311 | records = [] | ||
312 | distinfo, info_dir = info | ||
313 | hasher = getattr(hashlib, self.hash_kind) | ||
314 | for ap, p in archive_paths: | ||
315 | with open(p, 'rb') as f: | ||
316 | data = f.read() | ||
317 | digest = '%s=%s' % self.get_hash(data) | ||
318 | size = os.path.getsize(p) | ||
319 | records.append((ap, digest, size)) | ||
320 | |||
321 | p = os.path.join(distinfo, 'RECORD') | ||
322 | self.write_record(records, p, libdir) | ||
323 | ap = to_posix(os.path.join(info_dir, 'RECORD')) | ||
324 | archive_paths.append((ap, p)) | ||
325 | |||
326 | def build_zip(self, pathname, archive_paths): | ||
327 | with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: | ||
328 | for ap, p in archive_paths: | ||
329 | logger.debug('Wrote %s to %s in wheel', p, ap) | ||
330 | zf.write(p, ap) | ||
331 | |||
332 | def build(self, paths, tags=None, wheel_version=None): | ||
333 | """ | ||
334 | Build a wheel from files in specified paths, and use any specified tags | ||
335 | when determining the name of the wheel. | ||
336 | """ | ||
337 | if tags is None: | ||
338 | tags = {} | ||
339 | |||
340 | libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] | ||
341 | if libkey == 'platlib': | ||
342 | is_pure = 'false' | ||
343 | default_pyver = [IMPVER] | ||
344 | default_abi = [ABI] | ||
345 | default_arch = [ARCH] | ||
346 | else: | ||
347 | is_pure = 'true' | ||
348 | default_pyver = [PYVER] | ||
349 | default_abi = ['none'] | ||
350 | default_arch = ['any'] | ||
351 | |||
352 | self.pyver = tags.get('pyver', default_pyver) | ||
353 | self.abi = tags.get('abi', default_abi) | ||
354 | self.arch = tags.get('arch', default_arch) | ||
355 | |||
356 | libdir = paths[libkey] | ||
357 | |||
358 | name_ver = '%s-%s' % (self.name, self.version) | ||
359 | data_dir = '%s.data' % name_ver | ||
360 | info_dir = '%s.dist-info' % name_ver | ||
361 | |||
362 | archive_paths = [] | ||
363 | |||
364 | # First, stuff which is not in site-packages | ||
365 | for key in ('data', 'headers', 'scripts'): | ||
366 | if key not in paths: | ||
367 | continue | ||
368 | path = paths[key] | ||
369 | if os.path.isdir(path): | ||
370 | for root, dirs, files in os.walk(path): | ||
371 | for fn in files: | ||
372 | p = fsdecode(os.path.join(root, fn)) | ||
373 | rp = os.path.relpath(p, path) | ||
374 | ap = to_posix(os.path.join(data_dir, key, rp)) | ||
375 | archive_paths.append((ap, p)) | ||
376 | if key == 'scripts' and not p.endswith('.exe'): | ||
377 | with open(p, 'rb') as f: | ||
378 | data = f.read() | ||
379 | data = self.process_shebang(data) | ||
380 | with open(p, 'wb') as f: | ||
381 | f.write(data) | ||
382 | |||
383 | # Now, stuff which is in site-packages, other than the | ||
384 | # distinfo stuff. | ||
385 | path = libdir | ||
386 | distinfo = None | ||
387 | for root, dirs, files in os.walk(path): | ||
388 | if root == path: | ||
389 | # At the top level only, save distinfo for later | ||
390 | # and skip it for now | ||
391 | for i, dn in enumerate(dirs): | ||
392 | dn = fsdecode(dn) | ||
393 | if dn.endswith('.dist-info'): | ||
394 | distinfo = os.path.join(root, dn) | ||
395 | del dirs[i] | ||
396 | break | ||
397 | assert distinfo, '.dist-info directory expected, not found' | ||
398 | |||
399 | for fn in files: | ||
400 | # comment out next suite to leave .pyc files in | ||
401 | if fsdecode(fn).endswith(('.pyc', '.pyo')): | ||
402 | continue | ||
403 | p = os.path.join(root, fn) | ||
404 | rp = to_posix(os.path.relpath(p, path)) | ||
405 | archive_paths.append((rp, p)) | ||
406 | |||
407 | # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. | ||
408 | files = os.listdir(distinfo) | ||
409 | for fn in files: | ||
410 | if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): | ||
411 | p = fsdecode(os.path.join(distinfo, fn)) | ||
412 | ap = to_posix(os.path.join(info_dir, fn)) | ||
413 | archive_paths.append((ap, p)) | ||
414 | |||
415 | wheel_metadata = [ | ||
416 | 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), | ||
417 | 'Generator: distlib %s' % __version__, | ||
418 | 'Root-Is-Purelib: %s' % is_pure, | ||
419 | ] | ||
420 | for pyver, abi, arch in self.tags: | ||
421 | wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) | ||
422 | p = os.path.join(distinfo, 'WHEEL') | ||
423 | with open(p, 'w') as f: | ||
424 | f.write('\n'.join(wheel_metadata)) | ||
425 | ap = to_posix(os.path.join(info_dir, 'WHEEL')) | ||
426 | archive_paths.append((ap, p)) | ||
427 | |||
428 | # Now, at last, RECORD. | ||
429 | # Paths in here are archive paths - nothing else makes sense. | ||
430 | self.write_records((distinfo, info_dir), libdir, archive_paths) | ||
431 | # Now, ready to build the zip file | ||
432 | pathname = os.path.join(self.dirname, self.filename) | ||
433 | self.build_zip(pathname, archive_paths) | ||
434 | return pathname | ||
435 | |||
436 | def install(self, paths, maker, **kwargs): | ||
437 | """ | ||
438 | Install a wheel to the specified paths. If kwarg ``warner`` is | ||
439 | specified, it should be a callable, which will be called with two | ||
440 | tuples indicating the wheel version of this software and the wheel | ||
441 | version in the file, if there is a discrepancy in the versions. | ||
442 | This can be used to issue any warnings to raise any exceptions. | ||
443 | If kwarg ``lib_only`` is True, only the purelib/platlib files are | ||
444 | installed, and the headers, scripts, data and dist-info metadata are | ||
445 | not written. | ||
446 | |||
447 | The return value is a :class:`InstalledDistribution` instance unless | ||
448 | ``options.lib_only`` is True, in which case the return value is ``None``. | ||
449 | """ | ||
450 | |||
451 | dry_run = maker.dry_run | ||
452 | warner = kwargs.get('warner') | ||
453 | lib_only = kwargs.get('lib_only', False) | ||
454 | |||
455 | pathname = os.path.join(self.dirname, self.filename) | ||
456 | name_ver = '%s-%s' % (self.name, self.version) | ||
457 | data_dir = '%s.data' % name_ver | ||
458 | info_dir = '%s.dist-info' % name_ver | ||
459 | |||
460 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
461 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
462 | record_name = posixpath.join(info_dir, 'RECORD') | ||
463 | |||
464 | wrapper = codecs.getreader('utf-8') | ||
465 | |||
466 | with ZipFile(pathname, 'r') as zf: | ||
467 | with zf.open(wheel_metadata_name) as bwf: | ||
468 | wf = wrapper(bwf) | ||
469 | message = message_from_file(wf) | ||
470 | wv = message['Wheel-Version'].split('.', 1) | ||
471 | file_version = tuple([int(i) for i in wv]) | ||
472 | if (file_version != self.wheel_version) and warner: | ||
473 | warner(self.wheel_version, file_version) | ||
474 | |||
475 | if message['Root-Is-Purelib'] == 'true': | ||
476 | libdir = paths['purelib'] | ||
477 | else: | ||
478 | libdir = paths['platlib'] | ||
479 | |||
480 | records = {} | ||
481 | with zf.open(record_name) as bf: | ||
482 | with CSVReader(stream=bf) as reader: | ||
483 | for row in reader: | ||
484 | p = row[0] | ||
485 | records[p] = row | ||
486 | |||
487 | data_pfx = posixpath.join(data_dir, '') | ||
488 | info_pfx = posixpath.join(info_dir, '') | ||
489 | script_pfx = posixpath.join(data_dir, 'scripts', '') | ||
490 | |||
491 | # make a new instance rather than a copy of maker's, | ||
492 | # as we mutate it | ||
493 | fileop = FileOperator(dry_run=dry_run) | ||
494 | fileop.record = True # so we can rollback if needed | ||
495 | |||
496 | bc = not sys.dont_write_bytecode # Double negatives. Lovely! | ||
497 | |||
498 | outfiles = [] # for RECORD writing | ||
499 | |||
500 | # for script copying/shebang processing | ||
501 | workdir = tempfile.mkdtemp() | ||
502 | # set target dir later | ||
503 | # we default add_launchers to False, as the | ||
504 | # Python Launcher should be used instead | ||
505 | maker.source_dir = workdir | ||
506 | maker.target_dir = None | ||
507 | try: | ||
508 | for zinfo in zf.infolist(): | ||
509 | arcname = zinfo.filename | ||
510 | if isinstance(arcname, text_type): | ||
511 | u_arcname = arcname | ||
512 | else: | ||
513 | u_arcname = arcname.decode('utf-8') | ||
514 | # The signature file won't be in RECORD, | ||
515 | # and we don't currently don't do anything with it | ||
516 | if u_arcname.endswith('/RECORD.jws'): | ||
517 | continue | ||
518 | row = records[u_arcname] | ||
519 | if row[2] and str(zinfo.file_size) != row[2]: | ||
520 | raise DistlibException('size mismatch for ' | ||
521 | '%s' % u_arcname) | ||
522 | if row[1]: | ||
523 | kind, value = row[1].split('=', 1) | ||
524 | with zf.open(arcname) as bf: | ||
525 | data = bf.read() | ||
526 | _, digest = self.get_hash(data, kind) | ||
527 | if digest != value: | ||
528 | raise DistlibException('digest mismatch for ' | ||
529 | '%s' % arcname) | ||
530 | |||
531 | if lib_only and u_arcname.startswith((info_pfx, data_pfx)): | ||
532 | logger.debug('lib_only: skipping %s', u_arcname) | ||
533 | continue | ||
534 | is_script = (u_arcname.startswith(script_pfx) | ||
535 | and not u_arcname.endswith('.exe')) | ||
536 | |||
537 | if u_arcname.startswith(data_pfx): | ||
538 | _, where, rp = u_arcname.split('/', 2) | ||
539 | outfile = os.path.join(paths[where], convert_path(rp)) | ||
540 | else: | ||
541 | # meant for site-packages. | ||
542 | if u_arcname in (wheel_metadata_name, record_name): | ||
543 | continue | ||
544 | outfile = os.path.join(libdir, convert_path(u_arcname)) | ||
545 | if not is_script: | ||
546 | with zf.open(arcname) as bf: | ||
547 | fileop.copy_stream(bf, outfile) | ||
548 | outfiles.append(outfile) | ||
549 | # Double check the digest of the written file | ||
550 | if not dry_run and row[1]: | ||
551 | with open(outfile, 'rb') as bf: | ||
552 | data = bf.read() | ||
553 | _, newdigest = self.get_hash(data, kind) | ||
554 | if newdigest != digest: | ||
555 | raise DistlibException('digest mismatch ' | ||
556 | 'on write for ' | ||
557 | '%s' % outfile) | ||
558 | if bc and outfile.endswith('.py'): | ||
559 | try: | ||
560 | pyc = fileop.byte_compile(outfile) | ||
561 | outfiles.append(pyc) | ||
562 | except Exception: | ||
563 | # Don't give up if byte-compilation fails, | ||
564 | # but log it and perhaps warn the user | ||
565 | logger.warning('Byte-compilation failed', | ||
566 | exc_info=True) | ||
567 | else: | ||
568 | fn = os.path.basename(convert_path(arcname)) | ||
569 | workname = os.path.join(workdir, fn) | ||
570 | with zf.open(arcname) as bf: | ||
571 | fileop.copy_stream(bf, workname) | ||
572 | |||
573 | dn, fn = os.path.split(outfile) | ||
574 | maker.target_dir = dn | ||
575 | filenames = maker.make(fn) | ||
576 | fileop.set_executable_mode(filenames) | ||
577 | outfiles.extend(filenames) | ||
578 | |||
579 | if lib_only: | ||
580 | logger.debug('lib_only: returning None') | ||
581 | dist = None | ||
582 | else: | ||
583 | # Generate scripts | ||
584 | |||
585 | # Try to get pydist.json so we can see if there are | ||
586 | # any commands to generate. If this fails (e.g. because | ||
587 | # of a legacy wheel), log a warning but don't give up. | ||
588 | commands = None | ||
589 | file_version = self.info['Wheel-Version'] | ||
590 | if file_version == '1.0': | ||
591 | # Use legacy info | ||
592 | ep = posixpath.join(info_dir, 'entry_points.txt') | ||
593 | try: | ||
594 | with zf.open(ep) as bwf: | ||
595 | epdata = read_exports(bwf) | ||
596 | commands = {} | ||
597 | for key in ('console', 'gui'): | ||
598 | k = '%s_scripts' % key | ||
599 | if k in epdata: | ||
600 | commands['wrap_%s' % key] = d = {} | ||
601 | for v in epdata[k].values(): | ||
602 | s = '%s:%s' % (v.prefix, v.suffix) | ||
603 | if v.flags: | ||
604 | s += ' %s' % v.flags | ||
605 | d[v.name] = s | ||
606 | except Exception: | ||
607 | logger.warning('Unable to read legacy script ' | ||
608 | 'metadata, so cannot generate ' | ||
609 | 'scripts') | ||
610 | else: | ||
611 | try: | ||
612 | with zf.open(metadata_name) as bwf: | ||
613 | wf = wrapper(bwf) | ||
614 | commands = json.load(wf).get('extensions') | ||
615 | if commands: | ||
616 | commands = commands.get('python.commands') | ||
617 | except Exception: | ||
618 | logger.warning('Unable to read JSON metadata, so ' | ||
619 | 'cannot generate scripts') | ||
620 | if commands: | ||
621 | console_scripts = commands.get('wrap_console', {}) | ||
622 | gui_scripts = commands.get('wrap_gui', {}) | ||
623 | if console_scripts or gui_scripts: | ||
624 | script_dir = paths.get('scripts', '') | ||
625 | if not os.path.isdir(script_dir): | ||
626 | raise ValueError('Valid script path not ' | ||
627 | 'specified') | ||
628 | maker.target_dir = script_dir | ||
629 | for k, v in console_scripts.items(): | ||
630 | script = '%s = %s' % (k, v) | ||
631 | filenames = maker.make(script) | ||
632 | fileop.set_executable_mode(filenames) | ||
633 | |||
634 | if gui_scripts: | ||
635 | options = {'gui': True } | ||
636 | for k, v in gui_scripts.items(): | ||
637 | script = '%s = %s' % (k, v) | ||
638 | filenames = maker.make(script, options) | ||
639 | fileop.set_executable_mode(filenames) | ||
640 | |||
641 | p = os.path.join(libdir, info_dir) | ||
642 | dist = InstalledDistribution(p) | ||
643 | |||
644 | # Write SHARED | ||
645 | paths = dict(paths) # don't change passed in dict | ||
646 | del paths['purelib'] | ||
647 | del paths['platlib'] | ||
648 | paths['lib'] = libdir | ||
649 | p = dist.write_shared_locations(paths, dry_run) | ||
650 | if p: | ||
651 | outfiles.append(p) | ||
652 | |||
653 | # Write RECORD | ||
654 | dist.write_installed_files(outfiles, paths['prefix'], | ||
655 | dry_run) | ||
656 | return dist | ||
657 | except Exception: # pragma: no cover | ||
658 | logger.exception('installation failed.') | ||
659 | fileop.rollback() | ||
660 | raise | ||
661 | finally: | ||
662 | shutil.rmtree(workdir) | ||
663 | |||
664 | def _get_dylib_cache(self): | ||
665 | global cache | ||
666 | if cache is None: | ||
667 | # Use native string to avoid issues on 2.x: see Python #20140. | ||
668 | base = os.path.join(get_cache_base(), str('dylib-cache'), | ||
669 | sys.version[:3]) | ||
670 | cache = Cache(base) | ||
671 | return cache | ||
672 | |||
673 | def _get_extensions(self): | ||
674 | pathname = os.path.join(self.dirname, self.filename) | ||
675 | name_ver = '%s-%s' % (self.name, self.version) | ||
676 | info_dir = '%s.dist-info' % name_ver | ||
677 | arcname = posixpath.join(info_dir, 'EXTENSIONS') | ||
678 | wrapper = codecs.getreader('utf-8') | ||
679 | result = [] | ||
680 | with ZipFile(pathname, 'r') as zf: | ||
681 | try: | ||
682 | with zf.open(arcname) as bf: | ||
683 | wf = wrapper(bf) | ||
684 | extensions = json.load(wf) | ||
685 | cache = self._get_dylib_cache() | ||
686 | prefix = cache.prefix_to_dir(pathname) | ||
687 | cache_base = os.path.join(cache.base, prefix) | ||
688 | if not os.path.isdir(cache_base): | ||
689 | os.makedirs(cache_base) | ||
690 | for name, relpath in extensions.items(): | ||
691 | dest = os.path.join(cache_base, convert_path(relpath)) | ||
692 | if not os.path.exists(dest): | ||
693 | extract = True | ||
694 | else: | ||
695 | file_time = os.stat(dest).st_mtime | ||
696 | file_time = datetime.datetime.fromtimestamp(file_time) | ||
697 | info = zf.getinfo(relpath) | ||
698 | wheel_time = datetime.datetime(*info.date_time) | ||
699 | extract = wheel_time > file_time | ||
700 | if extract: | ||
701 | zf.extract(relpath, cache_base) | ||
702 | result.append((name, dest)) | ||
703 | except KeyError: | ||
704 | pass | ||
705 | return result | ||
706 | |||
707 | def is_compatible(self): | ||
708 | """ | ||
709 | Determine if a wheel is compatible with the running system. | ||
710 | """ | ||
711 | return is_compatible(self) | ||
712 | |||
713 | def is_mountable(self): | ||
714 | """ | ||
715 | Determine if a wheel is asserted as mountable by its metadata. | ||
716 | """ | ||
717 | return True # for now - metadata details TBD | ||
718 | |||
719 | def mount(self, append=False): | ||
720 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
721 | if not self.is_compatible(): | ||
722 | msg = 'Wheel %s not compatible with this Python.' % pathname | ||
723 | raise DistlibException(msg) | ||
724 | if not self.is_mountable(): | ||
725 | msg = 'Wheel %s is marked as not mountable.' % pathname | ||
726 | raise DistlibException(msg) | ||
727 | if pathname in sys.path: | ||
728 | logger.debug('%s already in path', pathname) | ||
729 | else: | ||
730 | if append: | ||
731 | sys.path.append(pathname) | ||
732 | else: | ||
733 | sys.path.insert(0, pathname) | ||
734 | extensions = self._get_extensions() | ||
735 | if extensions: | ||
736 | if _hook not in sys.meta_path: | ||
737 | sys.meta_path.append(_hook) | ||
738 | _hook.add(pathname, extensions) | ||
739 | |||
740 | def unmount(self): | ||
741 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
742 | if pathname not in sys.path: | ||
743 | logger.debug('%s not in path', pathname) | ||
744 | else: | ||
745 | sys.path.remove(pathname) | ||
746 | if pathname in _hook.impure_wheels: | ||
747 | _hook.remove(pathname) | ||
748 | if not _hook.impure_wheels: | ||
749 | if _hook in sys.meta_path: | ||
750 | sys.meta_path.remove(_hook) | ||
751 | |||
752 | def verify(self): | ||
753 | pathname = os.path.join(self.dirname, self.filename) | ||
754 | name_ver = '%s-%s' % (self.name, self.version) | ||
755 | data_dir = '%s.data' % name_ver | ||
756 | info_dir = '%s.dist-info' % name_ver | ||
757 | |||
758 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
759 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
760 | record_name = posixpath.join(info_dir, 'RECORD') | ||
761 | |||
762 | wrapper = codecs.getreader('utf-8') | ||
763 | |||
764 | with ZipFile(pathname, 'r') as zf: | ||
765 | with zf.open(wheel_metadata_name) as bwf: | ||
766 | wf = wrapper(bwf) | ||
767 | message = message_from_file(wf) | ||
768 | wv = message['Wheel-Version'].split('.', 1) | ||
769 | file_version = tuple([int(i) for i in wv]) | ||
770 | # TODO version verification | ||
771 | |||
772 | records = {} | ||
773 | with zf.open(record_name) as bf: | ||
774 | with CSVReader(stream=bf) as reader: | ||
775 | for row in reader: | ||
776 | p = row[0] | ||
777 | records[p] = row | ||
778 | |||
779 | for zinfo in zf.infolist(): | ||
780 | arcname = zinfo.filename | ||
781 | if isinstance(arcname, text_type): | ||
782 | u_arcname = arcname | ||
783 | else: | ||
784 | u_arcname = arcname.decode('utf-8') | ||
785 | if '..' in u_arcname: | ||
786 | raise DistlibException('invalid entry in ' | ||
787 | 'wheel: %r' % u_arcname) | ||
788 | |||
789 | # The signature file won't be in RECORD, | ||
790 | # and we don't currently don't do anything with it | ||
791 | if u_arcname.endswith('/RECORD.jws'): | ||
792 | continue | ||
793 | row = records[u_arcname] | ||
794 | if row[2] and str(zinfo.file_size) != row[2]: | ||
795 | raise DistlibException('size mismatch for ' | ||
796 | '%s' % u_arcname) | ||
797 | if row[1]: | ||
798 | kind, value = row[1].split('=', 1) | ||
799 | with zf.open(arcname) as bf: | ||
800 | data = bf.read() | ||
801 | _, digest = self.get_hash(data, kind) | ||
802 | if digest != value: | ||
803 | raise DistlibException('digest mismatch for ' | ||
804 | '%s' % arcname) | ||
805 | |||
806 | def update(self, modifier, dest_dir=None, **kwargs): | ||
807 | """ | ||
808 | Update the contents of a wheel in a generic way. The modifier should | ||
809 | be a callable which expects a dictionary argument: its keys are | ||
810 | archive-entry paths, and its values are absolute filesystem paths | ||
811 | where the contents the corresponding archive entries can be found. The | ||
812 | modifier is free to change the contents of the files pointed to, add | ||
813 | new entries and remove entries, before returning. This method will | ||
814 | extract the entire contents of the wheel to a temporary location, call | ||
815 | the modifier, and then use the passed (and possibly updated) | ||
816 | dictionary to write a new wheel. If ``dest_dir`` is specified, the new | ||
817 | wheel is written there -- otherwise, the original wheel is overwritten. | ||
818 | |||
819 | The modifier should return True if it updated the wheel, else False. | ||
820 | This method returns the same value the modifier returns. | ||
821 | """ | ||
822 | |||
823 | def get_version(path_map, info_dir): | ||
824 | version = path = None | ||
825 | key = '%s/%s' % (info_dir, METADATA_FILENAME) | ||
826 | if key not in path_map: | ||
827 | key = '%s/PKG-INFO' % info_dir | ||
828 | if key in path_map: | ||
829 | path = path_map[key] | ||
830 | version = Metadata(path=path).version | ||
831 | return version, path | ||
832 | |||
833 | def update_version(version, path): | ||
834 | updated = None | ||
835 | try: | ||
836 | v = NormalizedVersion(version) | ||
837 | i = version.find('-') | ||
838 | if i < 0: | ||
839 | updated = '%s+1' % version | ||
840 | else: | ||
841 | parts = [int(s) for s in version[i + 1:].split('.')] | ||
842 | parts[-1] += 1 | ||
843 | updated = '%s+%s' % (version[:i], | ||
844 | '.'.join(str(i) for i in parts)) | ||
845 | except UnsupportedVersionError: | ||
846 | logger.debug('Cannot update non-compliant (PEP-440) ' | ||
847 | 'version %r', version) | ||
848 | if updated: | ||
849 | md = Metadata(path=path) | ||
850 | md.version = updated | ||
851 | legacy = not path.endswith(METADATA_FILENAME) | ||
852 | md.write(path=path, legacy=legacy) | ||
853 | logger.debug('Version updated from %r to %r', version, | ||
854 | updated) | ||
855 | |||
856 | pathname = os.path.join(self.dirname, self.filename) | ||
857 | name_ver = '%s-%s' % (self.name, self.version) | ||
858 | info_dir = '%s.dist-info' % name_ver | ||
859 | record_name = posixpath.join(info_dir, 'RECORD') | ||
860 | with tempdir() as workdir: | ||
861 | with ZipFile(pathname, 'r') as zf: | ||
862 | path_map = {} | ||
863 | for zinfo in zf.infolist(): | ||
864 | arcname = zinfo.filename | ||
865 | if isinstance(arcname, text_type): | ||
866 | u_arcname = arcname | ||
867 | else: | ||
868 | u_arcname = arcname.decode('utf-8') | ||
869 | if u_arcname == record_name: | ||
870 | continue | ||
871 | if '..' in u_arcname: | ||
872 | raise DistlibException('invalid entry in ' | ||
873 | 'wheel: %r' % u_arcname) | ||
874 | zf.extract(zinfo, workdir) | ||
875 | path = os.path.join(workdir, convert_path(u_arcname)) | ||
876 | path_map[u_arcname] = path | ||
877 | |||
878 | # Remember the version. | ||
879 | original_version, _ = get_version(path_map, info_dir) | ||
880 | # Files extracted. Call the modifier. | ||
881 | modified = modifier(path_map, **kwargs) | ||
882 | if modified: | ||
883 | # Something changed - need to build a new wheel. | ||
884 | current_version, path = get_version(path_map, info_dir) | ||
885 | if current_version and (current_version == original_version): | ||
886 | # Add or update local version to signify changes. | ||
887 | update_version(current_version, path) | ||
888 | # Decide where the new wheel goes. | ||
889 | if dest_dir is None: | ||
890 | fd, newpath = tempfile.mkstemp(suffix='.whl', | ||
891 | prefix='wheel-update-', | ||
892 | dir=workdir) | ||
893 | os.close(fd) | ||
894 | else: | ||
895 | if not os.path.isdir(dest_dir): | ||
896 | raise DistlibException('Not a directory: %r' % dest_dir) | ||
897 | newpath = os.path.join(dest_dir, self.filename) | ||
898 | archive_paths = list(path_map.items()) | ||
899 | distinfo = os.path.join(workdir, info_dir) | ||
900 | info = distinfo, info_dir | ||
901 | self.write_records(info, workdir, archive_paths) | ||
902 | self.build_zip(newpath, archive_paths) | ||
903 | if dest_dir is None: | ||
904 | shutil.copyfile(newpath, pathname) | ||
905 | return modified | ||
906 | |||
907 | def compatible_tags(): | ||
908 | """ | ||
909 | Return (pyver, abi, arch) tuples compatible with this Python. | ||
910 | """ | ||
911 | versions = [VER_SUFFIX] | ||
912 | major = VER_SUFFIX[0] | ||
913 | for minor in range(sys.version_info[1] - 1, - 1, -1): | ||
914 | versions.append(''.join([major, str(minor)])) | ||
915 | |||
916 | abis = [] | ||
917 | for suffix, _, _ in imp.get_suffixes(): | ||
918 | if suffix.startswith('.abi'): | ||
919 | abis.append(suffix.split('.', 2)[1]) | ||
920 | abis.sort() | ||
921 | if ABI != 'none': | ||
922 | abis.insert(0, ABI) | ||
923 | abis.append('none') | ||
924 | result = [] | ||
925 | |||
926 | arches = [ARCH] | ||
927 | if sys.platform == 'darwin': | ||
928 | m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) | ||
929 | if m: | ||
930 | name, major, minor, arch = m.groups() | ||
931 | minor = int(minor) | ||
932 | matches = [arch] | ||
933 | if arch in ('i386', 'ppc'): | ||
934 | matches.append('fat') | ||
935 | if arch in ('i386', 'ppc', 'x86_64'): | ||
936 | matches.append('fat3') | ||
937 | if arch in ('ppc64', 'x86_64'): | ||
938 | matches.append('fat64') | ||
939 | if arch in ('i386', 'x86_64'): | ||
940 | matches.append('intel') | ||
941 | if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): | ||
942 | matches.append('universal') | ||
943 | while minor >= 0: | ||
944 | for match in matches: | ||
945 | s = '%s_%s_%s_%s' % (name, major, minor, match) | ||
946 | if s != ARCH: # already there | ||
947 | arches.append(s) | ||
948 | minor -= 1 | ||
949 | |||
950 | # Most specific - our Python version, ABI and arch | ||
951 | for abi in abis: | ||
952 | for arch in arches: | ||
953 | result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) | ||
954 | |||
955 | # where no ABI / arch dependency, but IMP_PREFIX dependency | ||
956 | for i, version in enumerate(versions): | ||
957 | result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) | ||
958 | if i == 0: | ||
959 | result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) | ||
960 | |||
961 | # no IMP_PREFIX, ABI or arch dependency | ||
962 | for i, version in enumerate(versions): | ||
963 | result.append((''.join(('py', version)), 'none', 'any')) | ||
964 | if i == 0: | ||
965 | result.append((''.join(('py', version[0])), 'none', 'any')) | ||
966 | return set(result) | ||
967 | |||
968 | |||
969 | COMPATIBLE_TAGS = compatible_tags() | ||
970 | |||
971 | del compatible_tags | ||
972 | |||
973 | |||
974 | def is_compatible(wheel, tags=None): | ||
975 | if not isinstance(wheel, Wheel): | ||
976 | wheel = Wheel(wheel) # assume it's a filename | ||
977 | result = False | ||
978 | if tags is None: | ||
979 | tags = COMPATIBLE_TAGS | ||
980 | for ver, abi, arch in tags: | ||
981 | if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: | ||
982 | result = True | ||
983 | break | ||
984 | return result | ||