diff options
author | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
commit | 68df54d6629ec019142eb149dd037774f2d11e7c (patch) | |
tree | 345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib |
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib')
23 files changed, 14434 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..9430718 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/__init__.py | |||
@@ -0,0 +1,23 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | import logging | ||
8 | |||
9 | __version__ = '0.2.7' | ||
10 | |||
11 | class DistlibException(Exception): | ||
12 | pass | ||
13 | |||
14 | try: | ||
15 | from logging import NullHandler | ||
16 | except ImportError: # pragma: no cover | ||
17 | class NullHandler(logging.Handler): | ||
18 | def handle(self, record): pass | ||
19 | def emit(self, record): pass | ||
20 | def createLock(self): self.lock = None | ||
21 | |||
22 | logger = logging.getLogger(__name__) | ||
23 | logger.addHandler(NullHandler()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000..e6143f1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py | |||
@@ -0,0 +1,6 @@ | |||
1 | """Modules copied from Python 3 standard libraries, for internal use only. | ||
2 | |||
3 | Individual classes and functions are found in d2._backport.misc. Intended | ||
4 | usage is to always import things missing from 3.1 from that module: the | ||
5 | built-in/stdlib objects will be used if found. | ||
6 | """ | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..6eb7b86 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/misc.py | |||
@@ -0,0 +1,41 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """Backports for individual classes and functions.""" | ||
7 | |||
8 | import os | ||
9 | import sys | ||
10 | |||
11 | __all__ = ['cache_from_source', 'callable', 'fsencode'] | ||
12 | |||
13 | |||
14 | try: | ||
15 | from imp import cache_from_source | ||
16 | except ImportError: | ||
17 | def cache_from_source(py_file, debug=__debug__): | ||
18 | ext = debug and 'c' or 'o' | ||
19 | return py_file + ext | ||
20 | |||
21 | |||
22 | try: | ||
23 | callable = callable | ||
24 | except NameError: | ||
25 | from collections import Callable | ||
26 | |||
27 | def callable(obj): | ||
28 | return isinstance(obj, Callable) | ||
29 | |||
30 | |||
31 | try: | ||
32 | fsencode = os.fsencode | ||
33 | except AttributeError: | ||
34 | def fsencode(filename): | ||
35 | if isinstance(filename, bytes): | ||
36 | return filename | ||
37 | elif isinstance(filename, str): | ||
38 | return filename.encode(sys.getfilesystemencoding()) | ||
39 | else: | ||
40 | raise TypeError("expect bytes or str, not %s" % | ||
41 | type(filename).__name__) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..becbfd7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py | |||
@@ -0,0 +1,761 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """Utility functions for copying and archiving files and directory trees. | ||
7 | |||
8 | XXX The functions here don't copy the resource fork or other metadata on Mac. | ||
9 | |||
10 | """ | ||
11 | |||
12 | import os | ||
13 | import sys | ||
14 | import stat | ||
15 | from os.path import abspath | ||
16 | import fnmatch | ||
17 | import collections | ||
18 | import errno | ||
19 | from . import tarfile | ||
20 | |||
21 | try: | ||
22 | import bz2 | ||
23 | _BZ2_SUPPORTED = True | ||
24 | except ImportError: | ||
25 | _BZ2_SUPPORTED = False | ||
26 | |||
27 | try: | ||
28 | from pwd import getpwnam | ||
29 | except ImportError: | ||
30 | getpwnam = None | ||
31 | |||
32 | try: | ||
33 | from grp import getgrnam | ||
34 | except ImportError: | ||
35 | getgrnam = None | ||
36 | |||
37 | __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", | ||
38 | "copytree", "move", "rmtree", "Error", "SpecialFileError", | ||
39 | "ExecError", "make_archive", "get_archive_formats", | ||
40 | "register_archive_format", "unregister_archive_format", | ||
41 | "get_unpack_formats", "register_unpack_format", | ||
42 | "unregister_unpack_format", "unpack_archive", "ignore_patterns"] | ||
43 | |||
44 | class Error(EnvironmentError): | ||
45 | pass | ||
46 | |||
47 | class SpecialFileError(EnvironmentError): | ||
48 | """Raised when trying to do a kind of operation (e.g. copying) which is | ||
49 | not supported on a special file (e.g. a named pipe)""" | ||
50 | |||
51 | class ExecError(EnvironmentError): | ||
52 | """Raised when a command could not be executed""" | ||
53 | |||
54 | class ReadError(EnvironmentError): | ||
55 | """Raised when an archive cannot be read""" | ||
56 | |||
57 | class RegistryError(Exception): | ||
58 | """Raised when a registry operation with the archiving | ||
59 | and unpacking registries fails""" | ||
60 | |||
61 | |||
62 | try: | ||
63 | WindowsError | ||
64 | except NameError: | ||
65 | WindowsError = None | ||
66 | |||
67 | def copyfileobj(fsrc, fdst, length=16*1024): | ||
68 | """copy data from file-like object fsrc to file-like object fdst""" | ||
69 | while 1: | ||
70 | buf = fsrc.read(length) | ||
71 | if not buf: | ||
72 | break | ||
73 | fdst.write(buf) | ||
74 | |||
75 | def _samefile(src, dst): | ||
76 | # Macintosh, Unix. | ||
77 | if hasattr(os.path, 'samefile'): | ||
78 | try: | ||
79 | return os.path.samefile(src, dst) | ||
80 | except OSError: | ||
81 | return False | ||
82 | |||
83 | # All other platforms: check for same pathname. | ||
84 | return (os.path.normcase(os.path.abspath(src)) == | ||
85 | os.path.normcase(os.path.abspath(dst))) | ||
86 | |||
87 | def copyfile(src, dst): | ||
88 | """Copy data from src to dst""" | ||
89 | if _samefile(src, dst): | ||
90 | raise Error("`%s` and `%s` are the same file" % (src, dst)) | ||
91 | |||
92 | for fn in [src, dst]: | ||
93 | try: | ||
94 | st = os.stat(fn) | ||
95 | except OSError: | ||
96 | # File most likely does not exist | ||
97 | pass | ||
98 | else: | ||
99 | # XXX What about other special files? (sockets, devices...) | ||
100 | if stat.S_ISFIFO(st.st_mode): | ||
101 | raise SpecialFileError("`%s` is a named pipe" % fn) | ||
102 | |||
103 | with open(src, 'rb') as fsrc: | ||
104 | with open(dst, 'wb') as fdst: | ||
105 | copyfileobj(fsrc, fdst) | ||
106 | |||
107 | def copymode(src, dst): | ||
108 | """Copy mode bits from src to dst""" | ||
109 | if hasattr(os, 'chmod'): | ||
110 | st = os.stat(src) | ||
111 | mode = stat.S_IMODE(st.st_mode) | ||
112 | os.chmod(dst, mode) | ||
113 | |||
114 | def copystat(src, dst): | ||
115 | """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" | ||
116 | st = os.stat(src) | ||
117 | mode = stat.S_IMODE(st.st_mode) | ||
118 | if hasattr(os, 'utime'): | ||
119 | os.utime(dst, (st.st_atime, st.st_mtime)) | ||
120 | if hasattr(os, 'chmod'): | ||
121 | os.chmod(dst, mode) | ||
122 | if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): | ||
123 | try: | ||
124 | os.chflags(dst, st.st_flags) | ||
125 | except OSError as why: | ||
126 | if (not hasattr(errno, 'EOPNOTSUPP') or | ||
127 | why.errno != errno.EOPNOTSUPP): | ||
128 | raise | ||
129 | |||
130 | def copy(src, dst): | ||
131 | """Copy data and mode bits ("cp src dst"). | ||
132 | |||
133 | The destination may be a directory. | ||
134 | |||
135 | """ | ||
136 | if os.path.isdir(dst): | ||
137 | dst = os.path.join(dst, os.path.basename(src)) | ||
138 | copyfile(src, dst) | ||
139 | copymode(src, dst) | ||
140 | |||
141 | def copy2(src, dst): | ||
142 | """Copy data and all stat info ("cp -p src dst"). | ||
143 | |||
144 | The destination may be a directory. | ||
145 | |||
146 | """ | ||
147 | if os.path.isdir(dst): | ||
148 | dst = os.path.join(dst, os.path.basename(src)) | ||
149 | copyfile(src, dst) | ||
150 | copystat(src, dst) | ||
151 | |||
152 | def ignore_patterns(*patterns): | ||
153 | """Function that can be used as copytree() ignore parameter. | ||
154 | |||
155 | Patterns is a sequence of glob-style patterns | ||
156 | that are used to exclude files""" | ||
157 | def _ignore_patterns(path, names): | ||
158 | ignored_names = [] | ||
159 | for pattern in patterns: | ||
160 | ignored_names.extend(fnmatch.filter(names, pattern)) | ||
161 | return set(ignored_names) | ||
162 | return _ignore_patterns | ||
163 | |||
164 | def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, | ||
165 | ignore_dangling_symlinks=False): | ||
166 | """Recursively copy a directory tree. | ||
167 | |||
168 | The destination directory must not already exist. | ||
169 | If exception(s) occur, an Error is raised with a list of reasons. | ||
170 | |||
171 | If the optional symlinks flag is true, symbolic links in the | ||
172 | source tree result in symbolic links in the destination tree; if | ||
173 | it is false, the contents of the files pointed to by symbolic | ||
174 | links are copied. If the file pointed by the symlink doesn't | ||
175 | exist, an exception will be added in the list of errors raised in | ||
176 | an Error exception at the end of the copy process. | ||
177 | |||
178 | You can set the optional ignore_dangling_symlinks flag to true if you | ||
179 | want to silence this exception. Notice that this has no effect on | ||
180 | platforms that don't support os.symlink. | ||
181 | |||
182 | The optional ignore argument is a callable. If given, it | ||
183 | is called with the `src` parameter, which is the directory | ||
184 | being visited by copytree(), and `names` which is the list of | ||
185 | `src` contents, as returned by os.listdir(): | ||
186 | |||
187 | callable(src, names) -> ignored_names | ||
188 | |||
189 | Since copytree() is called recursively, the callable will be | ||
190 | called once for each directory that is copied. It returns a | ||
191 | list of names relative to the `src` directory that should | ||
192 | not be copied. | ||
193 | |||
194 | The optional copy_function argument is a callable that will be used | ||
195 | to copy each file. It will be called with the source path and the | ||
196 | destination path as arguments. By default, copy2() is used, but any | ||
197 | function that supports the same signature (like copy()) can be used. | ||
198 | |||
199 | """ | ||
200 | names = os.listdir(src) | ||
201 | if ignore is not None: | ||
202 | ignored_names = ignore(src, names) | ||
203 | else: | ||
204 | ignored_names = set() | ||
205 | |||
206 | os.makedirs(dst) | ||
207 | errors = [] | ||
208 | for name in names: | ||
209 | if name in ignored_names: | ||
210 | continue | ||
211 | srcname = os.path.join(src, name) | ||
212 | dstname = os.path.join(dst, name) | ||
213 | try: | ||
214 | if os.path.islink(srcname): | ||
215 | linkto = os.readlink(srcname) | ||
216 | if symlinks: | ||
217 | os.symlink(linkto, dstname) | ||
218 | else: | ||
219 | # ignore dangling symlink if the flag is on | ||
220 | if not os.path.exists(linkto) and ignore_dangling_symlinks: | ||
221 | continue | ||
222 | # otherwise let the copy occurs. copy2 will raise an error | ||
223 | copy_function(srcname, dstname) | ||
224 | elif os.path.isdir(srcname): | ||
225 | copytree(srcname, dstname, symlinks, ignore, copy_function) | ||
226 | else: | ||
227 | # Will raise a SpecialFileError for unsupported file types | ||
228 | copy_function(srcname, dstname) | ||
229 | # catch the Error from the recursive copytree so that we can | ||
230 | # continue with other files | ||
231 | except Error as err: | ||
232 | errors.extend(err.args[0]) | ||
233 | except EnvironmentError as why: | ||
234 | errors.append((srcname, dstname, str(why))) | ||
235 | try: | ||
236 | copystat(src, dst) | ||
237 | except OSError as why: | ||
238 | if WindowsError is not None and isinstance(why, WindowsError): | ||
239 | # Copying file access times may fail on Windows | ||
240 | pass | ||
241 | else: | ||
242 | errors.extend((src, dst, str(why))) | ||
243 | if errors: | ||
244 | raise Error(errors) | ||
245 | |||
246 | def rmtree(path, ignore_errors=False, onerror=None): | ||
247 | """Recursively delete a directory tree. | ||
248 | |||
249 | If ignore_errors is set, errors are ignored; otherwise, if onerror | ||
250 | is set, it is called to handle the error with arguments (func, | ||
251 | path, exc_info) where func is os.listdir, os.remove, or os.rmdir; | ||
252 | path is the argument to that function that caused it to fail; and | ||
253 | exc_info is a tuple returned by sys.exc_info(). If ignore_errors | ||
254 | is false and onerror is None, an exception is raised. | ||
255 | |||
256 | """ | ||
257 | if ignore_errors: | ||
258 | def onerror(*args): | ||
259 | pass | ||
260 | elif onerror is None: | ||
261 | def onerror(*args): | ||
262 | raise | ||
263 | try: | ||
264 | if os.path.islink(path): | ||
265 | # symlinks to directories are forbidden, see bug #1669 | ||
266 | raise OSError("Cannot call rmtree on a symbolic link") | ||
267 | except OSError: | ||
268 | onerror(os.path.islink, path, sys.exc_info()) | ||
269 | # can't continue even if onerror hook returns | ||
270 | return | ||
271 | names = [] | ||
272 | try: | ||
273 | names = os.listdir(path) | ||
274 | except os.error: | ||
275 | onerror(os.listdir, path, sys.exc_info()) | ||
276 | for name in names: | ||
277 | fullname = os.path.join(path, name) | ||
278 | try: | ||
279 | mode = os.lstat(fullname).st_mode | ||
280 | except os.error: | ||
281 | mode = 0 | ||
282 | if stat.S_ISDIR(mode): | ||
283 | rmtree(fullname, ignore_errors, onerror) | ||
284 | else: | ||
285 | try: | ||
286 | os.remove(fullname) | ||
287 | except os.error: | ||
288 | onerror(os.remove, fullname, sys.exc_info()) | ||
289 | try: | ||
290 | os.rmdir(path) | ||
291 | except os.error: | ||
292 | onerror(os.rmdir, path, sys.exc_info()) | ||
293 | |||
294 | |||
295 | def _basename(path): | ||
296 | # A basename() variant which first strips the trailing slash, if present. | ||
297 | # Thus we always get the last component of the path, even for directories. | ||
298 | return os.path.basename(path.rstrip(os.path.sep)) | ||
299 | |||
300 | def move(src, dst): | ||
301 | """Recursively move a file or directory to another location. This is | ||
302 | similar to the Unix "mv" command. | ||
303 | |||
304 | If the destination is a directory or a symlink to a directory, the source | ||
305 | is moved inside the directory. The destination path must not already | ||
306 | exist. | ||
307 | |||
308 | If the destination already exists but is not a directory, it may be | ||
309 | overwritten depending on os.rename() semantics. | ||
310 | |||
311 | If the destination is on our current filesystem, then rename() is used. | ||
312 | Otherwise, src is copied to the destination and then removed. | ||
313 | A lot more could be done here... A look at a mv.c shows a lot of | ||
314 | the issues this implementation glosses over. | ||
315 | |||
316 | """ | ||
317 | real_dst = dst | ||
318 | if os.path.isdir(dst): | ||
319 | if _samefile(src, dst): | ||
320 | # We might be on a case insensitive filesystem, | ||
321 | # perform the rename anyway. | ||
322 | os.rename(src, dst) | ||
323 | return | ||
324 | |||
325 | real_dst = os.path.join(dst, _basename(src)) | ||
326 | if os.path.exists(real_dst): | ||
327 | raise Error("Destination path '%s' already exists" % real_dst) | ||
328 | try: | ||
329 | os.rename(src, real_dst) | ||
330 | except OSError: | ||
331 | if os.path.isdir(src): | ||
332 | if _destinsrc(src, dst): | ||
333 | raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) | ||
334 | copytree(src, real_dst, symlinks=True) | ||
335 | rmtree(src) | ||
336 | else: | ||
337 | copy2(src, real_dst) | ||
338 | os.unlink(src) | ||
339 | |||
340 | def _destinsrc(src, dst): | ||
341 | src = abspath(src) | ||
342 | dst = abspath(dst) | ||
343 | if not src.endswith(os.path.sep): | ||
344 | src += os.path.sep | ||
345 | if not dst.endswith(os.path.sep): | ||
346 | dst += os.path.sep | ||
347 | return dst.startswith(src) | ||
348 | |||
349 | def _get_gid(name): | ||
350 | """Returns a gid, given a group name.""" | ||
351 | if getgrnam is None or name is None: | ||
352 | return None | ||
353 | try: | ||
354 | result = getgrnam(name) | ||
355 | except KeyError: | ||
356 | result = None | ||
357 | if result is not None: | ||
358 | return result[2] | ||
359 | return None | ||
360 | |||
361 | def _get_uid(name): | ||
362 | """Returns an uid, given a user name.""" | ||
363 | if getpwnam is None or name is None: | ||
364 | return None | ||
365 | try: | ||
366 | result = getpwnam(name) | ||
367 | except KeyError: | ||
368 | result = None | ||
369 | if result is not None: | ||
370 | return result[2] | ||
371 | return None | ||
372 | |||
373 | def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, | ||
374 | owner=None, group=None, logger=None): | ||
375 | """Create a (possibly compressed) tar file from all the files under | ||
376 | 'base_dir'. | ||
377 | |||
378 | 'compress' must be "gzip" (the default), "bzip2", or None. | ||
379 | |||
380 | 'owner' and 'group' can be used to define an owner and a group for the | ||
381 | archive that is being built. If not provided, the current owner and group | ||
382 | will be used. | ||
383 | |||
384 | The output tar file will be named 'base_name' + ".tar", possibly plus | ||
385 | the appropriate compression extension (".gz", or ".bz2"). | ||
386 | |||
387 | Returns the output filename. | ||
388 | """ | ||
389 | tar_compression = {'gzip': 'gz', None: ''} | ||
390 | compress_ext = {'gzip': '.gz'} | ||
391 | |||
392 | if _BZ2_SUPPORTED: | ||
393 | tar_compression['bzip2'] = 'bz2' | ||
394 | compress_ext['bzip2'] = '.bz2' | ||
395 | |||
396 | # flags for compression program, each element of list will be an argument | ||
397 | if compress is not None and compress not in compress_ext: | ||
398 | raise ValueError("bad value for 'compress', or compression format not " | ||
399 | "supported : {0}".format(compress)) | ||
400 | |||
401 | archive_name = base_name + '.tar' + compress_ext.get(compress, '') | ||
402 | archive_dir = os.path.dirname(archive_name) | ||
403 | |||
404 | if not os.path.exists(archive_dir): | ||
405 | if logger is not None: | ||
406 | logger.info("creating %s", archive_dir) | ||
407 | if not dry_run: | ||
408 | os.makedirs(archive_dir) | ||
409 | |||
410 | # creating the tarball | ||
411 | if logger is not None: | ||
412 | logger.info('Creating tar archive') | ||
413 | |||
414 | uid = _get_uid(owner) | ||
415 | gid = _get_gid(group) | ||
416 | |||
417 | def _set_uid_gid(tarinfo): | ||
418 | if gid is not None: | ||
419 | tarinfo.gid = gid | ||
420 | tarinfo.gname = group | ||
421 | if uid is not None: | ||
422 | tarinfo.uid = uid | ||
423 | tarinfo.uname = owner | ||
424 | return tarinfo | ||
425 | |||
426 | if not dry_run: | ||
427 | tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) | ||
428 | try: | ||
429 | tar.add(base_dir, filter=_set_uid_gid) | ||
430 | finally: | ||
431 | tar.close() | ||
432 | |||
433 | return archive_name | ||
434 | |||
435 | def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): | ||
436 | # XXX see if we want to keep an external call here | ||
437 | if verbose: | ||
438 | zipoptions = "-r" | ||
439 | else: | ||
440 | zipoptions = "-rq" | ||
441 | from distutils.errors import DistutilsExecError | ||
442 | from distutils.spawn import spawn | ||
443 | try: | ||
444 | spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) | ||
445 | except DistutilsExecError: | ||
446 | # XXX really should distinguish between "couldn't find | ||
447 | # external 'zip' command" and "zip failed". | ||
448 | raise ExecError("unable to create zip file '%s': " | ||
449 | "could neither import the 'zipfile' module nor " | ||
450 | "find a standalone zip utility") % zip_filename | ||
451 | |||
452 | def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): | ||
453 | """Create a zip file from all the files under 'base_dir'. | ||
454 | |||
455 | The output zip file will be named 'base_name' + ".zip". Uses either the | ||
456 | "zipfile" Python module (if available) or the InfoZIP "zip" utility | ||
457 | (if installed and found on the default search path). If neither tool is | ||
458 | available, raises ExecError. Returns the name of the output zip | ||
459 | file. | ||
460 | """ | ||
461 | zip_filename = base_name + ".zip" | ||
462 | archive_dir = os.path.dirname(base_name) | ||
463 | |||
464 | if not os.path.exists(archive_dir): | ||
465 | if logger is not None: | ||
466 | logger.info("creating %s", archive_dir) | ||
467 | if not dry_run: | ||
468 | os.makedirs(archive_dir) | ||
469 | |||
470 | # If zipfile module is not available, try spawning an external 'zip' | ||
471 | # command. | ||
472 | try: | ||
473 | import zipfile | ||
474 | except ImportError: | ||
475 | zipfile = None | ||
476 | |||
477 | if zipfile is None: | ||
478 | _call_external_zip(base_dir, zip_filename, verbose, dry_run) | ||
479 | else: | ||
480 | if logger is not None: | ||
481 | logger.info("creating '%s' and adding '%s' to it", | ||
482 | zip_filename, base_dir) | ||
483 | |||
484 | if not dry_run: | ||
485 | zip = zipfile.ZipFile(zip_filename, "w", | ||
486 | compression=zipfile.ZIP_DEFLATED) | ||
487 | |||
488 | for dirpath, dirnames, filenames in os.walk(base_dir): | ||
489 | for name in filenames: | ||
490 | path = os.path.normpath(os.path.join(dirpath, name)) | ||
491 | if os.path.isfile(path): | ||
492 | zip.write(path, path) | ||
493 | if logger is not None: | ||
494 | logger.info("adding '%s'", path) | ||
495 | zip.close() | ||
496 | |||
497 | return zip_filename | ||
498 | |||
499 | _ARCHIVE_FORMATS = { | ||
500 | 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), | ||
501 | 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), | ||
502 | 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), | ||
503 | 'zip': (_make_zipfile, [], "ZIP file"), | ||
504 | } | ||
505 | |||
506 | if _BZ2_SUPPORTED: | ||
507 | _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], | ||
508 | "bzip2'ed tar-file") | ||
509 | |||
510 | def get_archive_formats(): | ||
511 | """Returns a list of supported formats for archiving and unarchiving. | ||
512 | |||
513 | Each element of the returned sequence is a tuple (name, description) | ||
514 | """ | ||
515 | formats = [(name, registry[2]) for name, registry in | ||
516 | _ARCHIVE_FORMATS.items()] | ||
517 | formats.sort() | ||
518 | return formats | ||
519 | |||
520 | def register_archive_format(name, function, extra_args=None, description=''): | ||
521 | """Registers an archive format. | ||
522 | |||
523 | name is the name of the format. function is the callable that will be | ||
524 | used to create archives. If provided, extra_args is a sequence of | ||
525 | (name, value) tuples that will be passed as arguments to the callable. | ||
526 | description can be provided to describe the format, and will be returned | ||
527 | by the get_archive_formats() function. | ||
528 | """ | ||
529 | if extra_args is None: | ||
530 | extra_args = [] | ||
531 | if not isinstance(function, collections.Callable): | ||
532 | raise TypeError('The %s object is not callable' % function) | ||
533 | if not isinstance(extra_args, (tuple, list)): | ||
534 | raise TypeError('extra_args needs to be a sequence') | ||
535 | for element in extra_args: | ||
536 | if not isinstance(element, (tuple, list)) or len(element) !=2: | ||
537 | raise TypeError('extra_args elements are : (arg_name, value)') | ||
538 | |||
539 | _ARCHIVE_FORMATS[name] = (function, extra_args, description) | ||
540 | |||
541 | def unregister_archive_format(name): | ||
542 | del _ARCHIVE_FORMATS[name] | ||
543 | |||
544 | def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, | ||
545 | dry_run=0, owner=None, group=None, logger=None): | ||
546 | """Create an archive file (eg. zip or tar). | ||
547 | |||
548 | 'base_name' is the name of the file to create, minus any format-specific | ||
549 | extension; 'format' is the archive format: one of "zip", "tar", "bztar" | ||
550 | or "gztar". | ||
551 | |||
552 | 'root_dir' is a directory that will be the root directory of the | ||
553 | archive; ie. we typically chdir into 'root_dir' before creating the | ||
554 | archive. 'base_dir' is the directory where we start archiving from; | ||
555 | ie. 'base_dir' will be the common prefix of all files and | ||
556 | directories in the archive. 'root_dir' and 'base_dir' both default | ||
557 | to the current directory. Returns the name of the archive file. | ||
558 | |||
559 | 'owner' and 'group' are used when creating a tar archive. By default, | ||
560 | uses the current owner and group. | ||
561 | """ | ||
562 | save_cwd = os.getcwd() | ||
563 | if root_dir is not None: | ||
564 | if logger is not None: | ||
565 | logger.debug("changing into '%s'", root_dir) | ||
566 | base_name = os.path.abspath(base_name) | ||
567 | if not dry_run: | ||
568 | os.chdir(root_dir) | ||
569 | |||
570 | if base_dir is None: | ||
571 | base_dir = os.curdir | ||
572 | |||
573 | kwargs = {'dry_run': dry_run, 'logger': logger} | ||
574 | |||
575 | try: | ||
576 | format_info = _ARCHIVE_FORMATS[format] | ||
577 | except KeyError: | ||
578 | raise ValueError("unknown archive format '%s'" % format) | ||
579 | |||
580 | func = format_info[0] | ||
581 | for arg, val in format_info[1]: | ||
582 | kwargs[arg] = val | ||
583 | |||
584 | if format != 'zip': | ||
585 | kwargs['owner'] = owner | ||
586 | kwargs['group'] = group | ||
587 | |||
588 | try: | ||
589 | filename = func(base_name, base_dir, **kwargs) | ||
590 | finally: | ||
591 | if root_dir is not None: | ||
592 | if logger is not None: | ||
593 | logger.debug("changing back to '%s'", save_cwd) | ||
594 | os.chdir(save_cwd) | ||
595 | |||
596 | return filename | ||
597 | |||
598 | |||
599 | def get_unpack_formats(): | ||
600 | """Returns a list of supported formats for unpacking. | ||
601 | |||
602 | Each element of the returned sequence is a tuple | ||
603 | (name, extensions, description) | ||
604 | """ | ||
605 | formats = [(name, info[0], info[3]) for name, info in | ||
606 | _UNPACK_FORMATS.items()] | ||
607 | formats.sort() | ||
608 | return formats | ||
609 | |||
610 | def _check_unpack_options(extensions, function, extra_args): | ||
611 | """Checks what gets registered as an unpacker.""" | ||
612 | # first make sure no other unpacker is registered for this extension | ||
613 | existing_extensions = {} | ||
614 | for name, info in _UNPACK_FORMATS.items(): | ||
615 | for ext in info[0]: | ||
616 | existing_extensions[ext] = name | ||
617 | |||
618 | for extension in extensions: | ||
619 | if extension in existing_extensions: | ||
620 | msg = '%s is already registered for "%s"' | ||
621 | raise RegistryError(msg % (extension, | ||
622 | existing_extensions[extension])) | ||
623 | |||
624 | if not isinstance(function, collections.Callable): | ||
625 | raise TypeError('The registered function must be a callable') | ||
626 | |||
627 | |||
628 | def register_unpack_format(name, extensions, function, extra_args=None, | ||
629 | description=''): | ||
630 | """Registers an unpack format. | ||
631 | |||
632 | `name` is the name of the format. `extensions` is a list of extensions | ||
633 | corresponding to the format. | ||
634 | |||
635 | `function` is the callable that will be | ||
636 | used to unpack archives. The callable will receive archives to unpack. | ||
637 | If it's unable to handle an archive, it needs to raise a ReadError | ||
638 | exception. | ||
639 | |||
640 | If provided, `extra_args` is a sequence of | ||
641 | (name, value) tuples that will be passed as arguments to the callable. | ||
642 | description can be provided to describe the format, and will be returned | ||
643 | by the get_unpack_formats() function. | ||
644 | """ | ||
645 | if extra_args is None: | ||
646 | extra_args = [] | ||
647 | _check_unpack_options(extensions, function, extra_args) | ||
648 | _UNPACK_FORMATS[name] = extensions, function, extra_args, description | ||
649 | |||
650 | def unregister_unpack_format(name): | ||
651 | """Removes the pack format from the registry.""" | ||
652 | del _UNPACK_FORMATS[name] | ||
653 | |||
654 | def _ensure_directory(path): | ||
655 | """Ensure that the parent directory of `path` exists""" | ||
656 | dirname = os.path.dirname(path) | ||
657 | if not os.path.isdir(dirname): | ||
658 | os.makedirs(dirname) | ||
659 | |||
660 | def _unpack_zipfile(filename, extract_dir): | ||
661 | """Unpack zip `filename` to `extract_dir` | ||
662 | """ | ||
663 | try: | ||
664 | import zipfile | ||
665 | except ImportError: | ||
666 | raise ReadError('zlib not supported, cannot unpack this archive.') | ||
667 | |||
668 | if not zipfile.is_zipfile(filename): | ||
669 | raise ReadError("%s is not a zip file" % filename) | ||
670 | |||
671 | zip = zipfile.ZipFile(filename) | ||
672 | try: | ||
673 | for info in zip.infolist(): | ||
674 | name = info.filename | ||
675 | |||
676 | # don't extract absolute paths or ones with .. in them | ||
677 | if name.startswith('/') or '..' in name: | ||
678 | continue | ||
679 | |||
680 | target = os.path.join(extract_dir, *name.split('/')) | ||
681 | if not target: | ||
682 | continue | ||
683 | |||
684 | _ensure_directory(target) | ||
685 | if not name.endswith('/'): | ||
686 | # file | ||
687 | data = zip.read(info.filename) | ||
688 | f = open(target, 'wb') | ||
689 | try: | ||
690 | f.write(data) | ||
691 | finally: | ||
692 | f.close() | ||
693 | del data | ||
694 | finally: | ||
695 | zip.close() | ||
696 | |||
697 | def _unpack_tarfile(filename, extract_dir): | ||
698 | """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` | ||
699 | """ | ||
700 | try: | ||
701 | tarobj = tarfile.open(filename) | ||
702 | except tarfile.TarError: | ||
703 | raise ReadError( | ||
704 | "%s is not a compressed or uncompressed tar file" % filename) | ||
705 | try: | ||
706 | tarobj.extractall(extract_dir) | ||
707 | finally: | ||
708 | tarobj.close() | ||
709 | |||
710 | _UNPACK_FORMATS = { | ||
711 | 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), | ||
712 | 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), | ||
713 | 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") | ||
714 | } | ||
715 | |||
716 | if _BZ2_SUPPORTED: | ||
717 | _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], | ||
718 | "bzip2'ed tar-file") | ||
719 | |||
720 | def _find_unpack_format(filename): | ||
721 | for name, info in _UNPACK_FORMATS.items(): | ||
722 | for extension in info[0]: | ||
723 | if filename.endswith(extension): | ||
724 | return name | ||
725 | return None | ||
726 | |||
727 | def unpack_archive(filename, extract_dir=None, format=None): | ||
728 | """Unpack an archive. | ||
729 | |||
730 | `filename` is the name of the archive. | ||
731 | |||
732 | `extract_dir` is the name of the target directory, where the archive | ||
733 | is unpacked. If not provided, the current working directory is used. | ||
734 | |||
735 | `format` is the archive format: one of "zip", "tar", or "gztar". Or any | ||
736 | other registered format. If not provided, unpack_archive will use the | ||
737 | filename extension and see if an unpacker was registered for that | ||
738 | extension. | ||
739 | |||
740 | In case none is found, a ValueError is raised. | ||
741 | """ | ||
742 | if extract_dir is None: | ||
743 | extract_dir = os.getcwd() | ||
744 | |||
745 | if format is not None: | ||
746 | try: | ||
747 | format_info = _UNPACK_FORMATS[format] | ||
748 | except KeyError: | ||
749 | raise ValueError("Unknown unpack format '{0}'".format(format)) | ||
750 | |||
751 | func = format_info[1] | ||
752 | func(filename, extract_dir, **dict(format_info[2])) | ||
753 | else: | ||
754 | # we need to look at the registered unpackers supported extensions | ||
755 | format = _find_unpack_format(filename) | ||
756 | if format is None: | ||
757 | raise ReadError("Unknown archive format '{0}'".format(filename)) | ||
758 | |||
759 | func = _UNPACK_FORMATS[format][1] | ||
760 | kwargs = dict(_UNPACK_FORMATS[format][2]) | ||
761 | func(filename, extract_dir, **kwargs) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..c92cd48 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg | |||
@@ -0,0 +1,84 @@ | |||
1 | [posix_prefix] | ||
2 | # Configuration directories. Some of these come straight out of the | ||
3 | # configure script. They are for implementing the other variables, not to | ||
4 | # be used directly in [resource_locations]. | ||
5 | confdir = /etc | ||
6 | datadir = /usr/share | ||
7 | libdir = /usr/lib | ||
8 | statedir = /var | ||
9 | # User resource directory | ||
10 | local = ~/.local/{distribution.name} | ||
11 | |||
12 | stdlib = {base}/lib/python{py_version_short} | ||
13 | platstdlib = {platbase}/lib/python{py_version_short} | ||
14 | purelib = {base}/lib/python{py_version_short}/site-packages | ||
15 | platlib = {platbase}/lib/python{py_version_short}/site-packages | ||
16 | include = {base}/include/python{py_version_short}{abiflags} | ||
17 | platinclude = {platbase}/include/python{py_version_short}{abiflags} | ||
18 | data = {base} | ||
19 | |||
20 | [posix_home] | ||
21 | stdlib = {base}/lib/python | ||
22 | platstdlib = {base}/lib/python | ||
23 | purelib = {base}/lib/python | ||
24 | platlib = {base}/lib/python | ||
25 | include = {base}/include/python | ||
26 | platinclude = {base}/include/python | ||
27 | scripts = {base}/bin | ||
28 | data = {base} | ||
29 | |||
30 | [nt] | ||
31 | stdlib = {base}/Lib | ||
32 | platstdlib = {base}/Lib | ||
33 | purelib = {base}/Lib/site-packages | ||
34 | platlib = {base}/Lib/site-packages | ||
35 | include = {base}/Include | ||
36 | platinclude = {base}/Include | ||
37 | scripts = {base}/Scripts | ||
38 | data = {base} | ||
39 | |||
40 | [os2] | ||
41 | stdlib = {base}/Lib | ||
42 | platstdlib = {base}/Lib | ||
43 | purelib = {base}/Lib/site-packages | ||
44 | platlib = {base}/Lib/site-packages | ||
45 | include = {base}/Include | ||
46 | platinclude = {base}/Include | ||
47 | scripts = {base}/Scripts | ||
48 | data = {base} | ||
49 | |||
50 | [os2_home] | ||
51 | stdlib = {userbase}/lib/python{py_version_short} | ||
52 | platstdlib = {userbase}/lib/python{py_version_short} | ||
53 | purelib = {userbase}/lib/python{py_version_short}/site-packages | ||
54 | platlib = {userbase}/lib/python{py_version_short}/site-packages | ||
55 | include = {userbase}/include/python{py_version_short} | ||
56 | scripts = {userbase}/bin | ||
57 | data = {userbase} | ||
58 | |||
59 | [nt_user] | ||
60 | stdlib = {userbase}/Python{py_version_nodot} | ||
61 | platstdlib = {userbase}/Python{py_version_nodot} | ||
62 | purelib = {userbase}/Python{py_version_nodot}/site-packages | ||
63 | platlib = {userbase}/Python{py_version_nodot}/site-packages | ||
64 | include = {userbase}/Python{py_version_nodot}/Include | ||
65 | scripts = {userbase}/Scripts | ||
66 | data = {userbase} | ||
67 | |||
68 | [posix_user] | ||
69 | stdlib = {userbase}/lib/python{py_version_short} | ||
70 | platstdlib = {userbase}/lib/python{py_version_short} | ||
71 | purelib = {userbase}/lib/python{py_version_short}/site-packages | ||
72 | platlib = {userbase}/lib/python{py_version_short}/site-packages | ||
73 | include = {userbase}/include/python{py_version_short} | ||
74 | scripts = {userbase}/bin | ||
75 | data = {userbase} | ||
76 | |||
77 | [osx_framework_user] | ||
78 | stdlib = {userbase}/lib/python | ||
79 | platstdlib = {userbase}/lib/python | ||
80 | purelib = {userbase}/lib/python/site-packages | ||
81 | platlib = {userbase}/lib/python/site-packages | ||
82 | include = {userbase}/include | ||
83 | scripts = {userbase}/bin | ||
84 | data = {userbase} | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..b243da3 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py | |||
@@ -0,0 +1,788 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """Access to Python's configuration information.""" | ||
7 | |||
8 | import codecs | ||
9 | import os | ||
10 | import re | ||
11 | import sys | ||
12 | from os.path import pardir, realpath | ||
13 | try: | ||
14 | import configparser | ||
15 | except ImportError: | ||
16 | import ConfigParser as configparser | ||
17 | |||
18 | |||
19 | __all__ = [ | ||
20 | 'get_config_h_filename', | ||
21 | 'get_config_var', | ||
22 | 'get_config_vars', | ||
23 | 'get_makefile_filename', | ||
24 | 'get_path', | ||
25 | 'get_path_names', | ||
26 | 'get_paths', | ||
27 | 'get_platform', | ||
28 | 'get_python_version', | ||
29 | 'get_scheme_names', | ||
30 | 'parse_config_h', | ||
31 | ] | ||
32 | |||
33 | |||
34 | def _safe_realpath(path): | ||
35 | try: | ||
36 | return realpath(path) | ||
37 | except OSError: | ||
38 | return path | ||
39 | |||
40 | |||
41 | if sys.executable: | ||
42 | _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) | ||
43 | else: | ||
44 | # sys.executable can be empty if argv[0] has been changed and Python is | ||
45 | # unable to retrieve the real program name | ||
46 | _PROJECT_BASE = _safe_realpath(os.getcwd()) | ||
47 | |||
48 | if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): | ||
49 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) | ||
50 | # PC/VS7.1 | ||
51 | if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): | ||
52 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) | ||
53 | # PC/AMD64 | ||
54 | if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): | ||
55 | _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) | ||
56 | |||
57 | |||
58 | def is_python_build(): | ||
59 | for fn in ("Setup.dist", "Setup.local"): | ||
60 | if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): | ||
61 | return True | ||
62 | return False | ||
63 | |||
64 | _PYTHON_BUILD = is_python_build() | ||
65 | |||
66 | _cfg_read = False | ||
67 | |||
68 | def _ensure_cfg_read(): | ||
69 | global _cfg_read | ||
70 | if not _cfg_read: | ||
71 | from ..resources import finder | ||
72 | backport_package = __name__.rsplit('.', 1)[0] | ||
73 | _finder = finder(backport_package) | ||
74 | _cfgfile = _finder.find('sysconfig.cfg') | ||
75 | assert _cfgfile, 'sysconfig.cfg exists' | ||
76 | with _cfgfile.as_stream() as s: | ||
77 | _SCHEMES.readfp(s) | ||
78 | if _PYTHON_BUILD: | ||
79 | for scheme in ('posix_prefix', 'posix_home'): | ||
80 | _SCHEMES.set(scheme, 'include', '{srcdir}/Include') | ||
81 | _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') | ||
82 | |||
83 | _cfg_read = True | ||
84 | |||
85 | |||
86 | _SCHEMES = configparser.RawConfigParser() | ||
87 | _VAR_REPL = re.compile(r'\{([^{]*?)\}') | ||
88 | |||
89 | def _expand_globals(config): | ||
90 | _ensure_cfg_read() | ||
91 | if config.has_section('globals'): | ||
92 | globals = config.items('globals') | ||
93 | else: | ||
94 | globals = tuple() | ||
95 | |||
96 | sections = config.sections() | ||
97 | for section in sections: | ||
98 | if section == 'globals': | ||
99 | continue | ||
100 | for option, value in globals: | ||
101 | if config.has_option(section, option): | ||
102 | continue | ||
103 | config.set(section, option, value) | ||
104 | config.remove_section('globals') | ||
105 | |||
106 | # now expanding local variables defined in the cfg file | ||
107 | # | ||
108 | for section in config.sections(): | ||
109 | variables = dict(config.items(section)) | ||
110 | |||
111 | def _replacer(matchobj): | ||
112 | name = matchobj.group(1) | ||
113 | if name in variables: | ||
114 | return variables[name] | ||
115 | return matchobj.group(0) | ||
116 | |||
117 | for option, value in config.items(section): | ||
118 | config.set(section, option, _VAR_REPL.sub(_replacer, value)) | ||
119 | |||
120 | #_expand_globals(_SCHEMES) | ||
121 | |||
122 | # FIXME don't rely on sys.version here, its format is an implementation detail | ||
123 | # of CPython, use sys.version_info or sys.hexversion | ||
124 | _PY_VERSION = sys.version.split()[0] | ||
125 | _PY_VERSION_SHORT = sys.version[:3] | ||
126 | _PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] | ||
127 | _PREFIX = os.path.normpath(sys.prefix) | ||
128 | _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) | ||
129 | _CONFIG_VARS = None | ||
130 | _USER_BASE = None | ||
131 | |||
132 | |||
133 | def _subst_vars(path, local_vars): | ||
134 | """In the string `path`, replace tokens like {some.thing} with the | ||
135 | corresponding value from the map `local_vars`. | ||
136 | |||
137 | If there is no corresponding value, leave the token unchanged. | ||
138 | """ | ||
139 | def _replacer(matchobj): | ||
140 | name = matchobj.group(1) | ||
141 | if name in local_vars: | ||
142 | return local_vars[name] | ||
143 | elif name in os.environ: | ||
144 | return os.environ[name] | ||
145 | return matchobj.group(0) | ||
146 | return _VAR_REPL.sub(_replacer, path) | ||
147 | |||
148 | |||
149 | def _extend_dict(target_dict, other_dict): | ||
150 | target_keys = target_dict.keys() | ||
151 | for key, value in other_dict.items(): | ||
152 | if key in target_keys: | ||
153 | continue | ||
154 | target_dict[key] = value | ||
155 | |||
156 | |||
157 | def _expand_vars(scheme, vars): | ||
158 | res = {} | ||
159 | if vars is None: | ||
160 | vars = {} | ||
161 | _extend_dict(vars, get_config_vars()) | ||
162 | |||
163 | for key, value in _SCHEMES.items(scheme): | ||
164 | if os.name in ('posix', 'nt'): | ||
165 | value = os.path.expanduser(value) | ||
166 | res[key] = os.path.normpath(_subst_vars(value, vars)) | ||
167 | return res | ||
168 | |||
169 | |||
170 | def format_value(value, vars): | ||
171 | def _replacer(matchobj): | ||
172 | name = matchobj.group(1) | ||
173 | if name in vars: | ||
174 | return vars[name] | ||
175 | return matchobj.group(0) | ||
176 | return _VAR_REPL.sub(_replacer, value) | ||
177 | |||
178 | |||
179 | def _get_default_scheme(): | ||
180 | if os.name == 'posix': | ||
181 | # the default scheme for posix is posix_prefix | ||
182 | return 'posix_prefix' | ||
183 | return os.name | ||
184 | |||
185 | |||
186 | def _getuserbase(): | ||
187 | env_base = os.environ.get("PYTHONUSERBASE", None) | ||
188 | |||
189 | def joinuser(*args): | ||
190 | return os.path.expanduser(os.path.join(*args)) | ||
191 | |||
192 | # what about 'os2emx', 'riscos' ? | ||
193 | if os.name == "nt": | ||
194 | base = os.environ.get("APPDATA") or "~" | ||
195 | if env_base: | ||
196 | return env_base | ||
197 | else: | ||
198 | return joinuser(base, "Python") | ||
199 | |||
200 | if sys.platform == "darwin": | ||
201 | framework = get_config_var("PYTHONFRAMEWORK") | ||
202 | if framework: | ||
203 | if env_base: | ||
204 | return env_base | ||
205 | else: | ||
206 | return joinuser("~", "Library", framework, "%d.%d" % | ||
207 | sys.version_info[:2]) | ||
208 | |||
209 | if env_base: | ||
210 | return env_base | ||
211 | else: | ||
212 | return joinuser("~", ".local") | ||
213 | |||
214 | |||
215 | def _parse_makefile(filename, vars=None): | ||
216 | """Parse a Makefile-style file. | ||
217 | |||
218 | A dictionary containing name/value pairs is returned. If an | ||
219 | optional dictionary is passed in as the second argument, it is | ||
220 | used instead of a new dictionary. | ||
221 | """ | ||
222 | # Regexes needed for parsing Makefile (and similar syntaxes, | ||
223 | # like old-style Setup files). | ||
224 | _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") | ||
225 | _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") | ||
226 | _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") | ||
227 | |||
228 | if vars is None: | ||
229 | vars = {} | ||
230 | done = {} | ||
231 | notdone = {} | ||
232 | |||
233 | with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: | ||
234 | lines = f.readlines() | ||
235 | |||
236 | for line in lines: | ||
237 | if line.startswith('#') or line.strip() == '': | ||
238 | continue | ||
239 | m = _variable_rx.match(line) | ||
240 | if m: | ||
241 | n, v = m.group(1, 2) | ||
242 | v = v.strip() | ||
243 | # `$$' is a literal `$' in make | ||
244 | tmpv = v.replace('$$', '') | ||
245 | |||
246 | if "$" in tmpv: | ||
247 | notdone[n] = v | ||
248 | else: | ||
249 | try: | ||
250 | v = int(v) | ||
251 | except ValueError: | ||
252 | # insert literal `$' | ||
253 | done[n] = v.replace('$$', '$') | ||
254 | else: | ||
255 | done[n] = v | ||
256 | |||
257 | # do variable interpolation here | ||
258 | variables = list(notdone.keys()) | ||
259 | |||
260 | # Variables with a 'PY_' prefix in the makefile. These need to | ||
261 | # be made available without that prefix through sysconfig. | ||
262 | # Special care is needed to ensure that variable expansion works, even | ||
263 | # if the expansion uses the name without a prefix. | ||
264 | renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') | ||
265 | |||
266 | while len(variables) > 0: | ||
267 | for name in tuple(variables): | ||
268 | value = notdone[name] | ||
269 | m = _findvar1_rx.search(value) or _findvar2_rx.search(value) | ||
270 | if m is not None: | ||
271 | n = m.group(1) | ||
272 | found = True | ||
273 | if n in done: | ||
274 | item = str(done[n]) | ||
275 | elif n in notdone: | ||
276 | # get it on a subsequent round | ||
277 | found = False | ||
278 | elif n in os.environ: | ||
279 | # do it like make: fall back to environment | ||
280 | item = os.environ[n] | ||
281 | |||
282 | elif n in renamed_variables: | ||
283 | if (name.startswith('PY_') and | ||
284 | name[3:] in renamed_variables): | ||
285 | item = "" | ||
286 | |||
287 | elif 'PY_' + n in notdone: | ||
288 | found = False | ||
289 | |||
290 | else: | ||
291 | item = str(done['PY_' + n]) | ||
292 | |||
293 | else: | ||
294 | done[n] = item = "" | ||
295 | |||
296 | if found: | ||
297 | after = value[m.end():] | ||
298 | value = value[:m.start()] + item + after | ||
299 | if "$" in after: | ||
300 | notdone[name] = value | ||
301 | else: | ||
302 | try: | ||
303 | value = int(value) | ||
304 | except ValueError: | ||
305 | done[name] = value.strip() | ||
306 | else: | ||
307 | done[name] = value | ||
308 | variables.remove(name) | ||
309 | |||
310 | if (name.startswith('PY_') and | ||
311 | name[3:] in renamed_variables): | ||
312 | |||
313 | name = name[3:] | ||
314 | if name not in done: | ||
315 | done[name] = value | ||
316 | |||
317 | else: | ||
318 | # bogus variable reference (e.g. "prefix=$/opt/python"); | ||
319 | # just drop it since we can't deal | ||
320 | done[name] = value | ||
321 | variables.remove(name) | ||
322 | |||
323 | # strip spurious spaces | ||
324 | for k, v in done.items(): | ||
325 | if isinstance(v, str): | ||
326 | done[k] = v.strip() | ||
327 | |||
328 | # save the results in the global dictionary | ||
329 | vars.update(done) | ||
330 | return vars | ||
331 | |||
332 | |||
333 | def get_makefile_filename(): | ||
334 | """Return the path of the Makefile.""" | ||
335 | if _PYTHON_BUILD: | ||
336 | return os.path.join(_PROJECT_BASE, "Makefile") | ||
337 | if hasattr(sys, 'abiflags'): | ||
338 | config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) | ||
339 | else: | ||
340 | config_dir_name = 'config' | ||
341 | return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') | ||
342 | |||
343 | |||
344 | def _init_posix(vars): | ||
345 | """Initialize the module as appropriate for POSIX systems.""" | ||
346 | # load the installed Makefile: | ||
347 | makefile = get_makefile_filename() | ||
348 | try: | ||
349 | _parse_makefile(makefile, vars) | ||
350 | except IOError as e: | ||
351 | msg = "invalid Python installation: unable to open %s" % makefile | ||
352 | if hasattr(e, "strerror"): | ||
353 | msg = msg + " (%s)" % e.strerror | ||
354 | raise IOError(msg) | ||
355 | # load the installed pyconfig.h: | ||
356 | config_h = get_config_h_filename() | ||
357 | try: | ||
358 | with open(config_h) as f: | ||
359 | parse_config_h(f, vars) | ||
360 | except IOError as e: | ||
361 | msg = "invalid Python installation: unable to open %s" % config_h | ||
362 | if hasattr(e, "strerror"): | ||
363 | msg = msg + " (%s)" % e.strerror | ||
364 | raise IOError(msg) | ||
365 | # On AIX, there are wrong paths to the linker scripts in the Makefile | ||
366 | # -- these paths are relative to the Python source, but when installed | ||
367 | # the scripts are in another directory. | ||
368 | if _PYTHON_BUILD: | ||
369 | vars['LDSHARED'] = vars['BLDSHARED'] | ||
370 | |||
371 | |||
372 | def _init_non_posix(vars): | ||
373 | """Initialize the module as appropriate for NT""" | ||
374 | # set basic install directories | ||
375 | vars['LIBDEST'] = get_path('stdlib') | ||
376 | vars['BINLIBDEST'] = get_path('platstdlib') | ||
377 | vars['INCLUDEPY'] = get_path('include') | ||
378 | vars['SO'] = '.pyd' | ||
379 | vars['EXE'] = '.exe' | ||
380 | vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT | ||
381 | vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) | ||
382 | |||
383 | # | ||
384 | # public APIs | ||
385 | # | ||
386 | |||
387 | |||
388 | def parse_config_h(fp, vars=None): | ||
389 | """Parse a config.h-style file. | ||
390 | |||
391 | A dictionary containing name/value pairs is returned. If an | ||
392 | optional dictionary is passed in as the second argument, it is | ||
393 | used instead of a new dictionary. | ||
394 | """ | ||
395 | if vars is None: | ||
396 | vars = {} | ||
397 | define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") | ||
398 | undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") | ||
399 | |||
400 | while True: | ||
401 | line = fp.readline() | ||
402 | if not line: | ||
403 | break | ||
404 | m = define_rx.match(line) | ||
405 | if m: | ||
406 | n, v = m.group(1, 2) | ||
407 | try: | ||
408 | v = int(v) | ||
409 | except ValueError: | ||
410 | pass | ||
411 | vars[n] = v | ||
412 | else: | ||
413 | m = undef_rx.match(line) | ||
414 | if m: | ||
415 | vars[m.group(1)] = 0 | ||
416 | return vars | ||
417 | |||
418 | |||
419 | def get_config_h_filename(): | ||
420 | """Return the path of pyconfig.h.""" | ||
421 | if _PYTHON_BUILD: | ||
422 | if os.name == "nt": | ||
423 | inc_dir = os.path.join(_PROJECT_BASE, "PC") | ||
424 | else: | ||
425 | inc_dir = _PROJECT_BASE | ||
426 | else: | ||
427 | inc_dir = get_path('platinclude') | ||
428 | return os.path.join(inc_dir, 'pyconfig.h') | ||
429 | |||
430 | |||
431 | def get_scheme_names(): | ||
432 | """Return a tuple containing the schemes names.""" | ||
433 | return tuple(sorted(_SCHEMES.sections())) | ||
434 | |||
435 | |||
436 | def get_path_names(): | ||
437 | """Return a tuple containing the paths names.""" | ||
438 | # xxx see if we want a static list | ||
439 | return _SCHEMES.options('posix_prefix') | ||
440 | |||
441 | |||
442 | def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): | ||
443 | """Return a mapping containing an install scheme. | ||
444 | |||
445 | ``scheme`` is the install scheme name. If not provided, it will | ||
446 | return the default scheme for the current platform. | ||
447 | """ | ||
448 | _ensure_cfg_read() | ||
449 | if expand: | ||
450 | return _expand_vars(scheme, vars) | ||
451 | else: | ||
452 | return dict(_SCHEMES.items(scheme)) | ||
453 | |||
454 | |||
455 | def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): | ||
456 | """Return a path corresponding to the scheme. | ||
457 | |||
458 | ``scheme`` is the install scheme name. | ||
459 | """ | ||
460 | return get_paths(scheme, vars, expand)[name] | ||
461 | |||
462 | |||
463 | def get_config_vars(*args): | ||
464 | """With no arguments, return a dictionary of all configuration | ||
465 | variables relevant for the current platform. | ||
466 | |||
467 | On Unix, this means every variable defined in Python's installed Makefile; | ||
468 | On Windows and Mac OS it's a much smaller set. | ||
469 | |||
470 | With arguments, return a list of values that result from looking up | ||
471 | each argument in the configuration variable dictionary. | ||
472 | """ | ||
473 | global _CONFIG_VARS | ||
474 | if _CONFIG_VARS is None: | ||
475 | _CONFIG_VARS = {} | ||
476 | # Normalized versions of prefix and exec_prefix are handy to have; | ||
477 | # in fact, these are the standard versions used most places in the | ||
478 | # distutils2 module. | ||
479 | _CONFIG_VARS['prefix'] = _PREFIX | ||
480 | _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX | ||
481 | _CONFIG_VARS['py_version'] = _PY_VERSION | ||
482 | _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT | ||
483 | _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] | ||
484 | _CONFIG_VARS['base'] = _PREFIX | ||
485 | _CONFIG_VARS['platbase'] = _EXEC_PREFIX | ||
486 | _CONFIG_VARS['projectbase'] = _PROJECT_BASE | ||
487 | try: | ||
488 | _CONFIG_VARS['abiflags'] = sys.abiflags | ||
489 | except AttributeError: | ||
490 | # sys.abiflags may not be defined on all platforms. | ||
491 | _CONFIG_VARS['abiflags'] = '' | ||
492 | |||
493 | if os.name in ('nt', 'os2'): | ||
494 | _init_non_posix(_CONFIG_VARS) | ||
495 | if os.name == 'posix': | ||
496 | _init_posix(_CONFIG_VARS) | ||
497 | # Setting 'userbase' is done below the call to the | ||
498 | # init function to enable using 'get_config_var' in | ||
499 | # the init-function. | ||
500 | if sys.version >= '2.6': | ||
501 | _CONFIG_VARS['userbase'] = _getuserbase() | ||
502 | |||
503 | if 'srcdir' not in _CONFIG_VARS: | ||
504 | _CONFIG_VARS['srcdir'] = _PROJECT_BASE | ||
505 | else: | ||
506 | _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) | ||
507 | |||
508 | # Convert srcdir into an absolute path if it appears necessary. | ||
509 | # Normally it is relative to the build directory. However, during | ||
510 | # testing, for example, we might be running a non-installed python | ||
511 | # from a different directory. | ||
512 | if _PYTHON_BUILD and os.name == "posix": | ||
513 | base = _PROJECT_BASE | ||
514 | try: | ||
515 | cwd = os.getcwd() | ||
516 | except OSError: | ||
517 | cwd = None | ||
518 | if (not os.path.isabs(_CONFIG_VARS['srcdir']) and | ||
519 | base != cwd): | ||
520 | # srcdir is relative and we are not in the same directory | ||
521 | # as the executable. Assume executable is in the build | ||
522 | # directory and make srcdir absolute. | ||
523 | srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) | ||
524 | _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) | ||
525 | |||
526 | if sys.platform == 'darwin': | ||
527 | kernel_version = os.uname()[2] # Kernel version (8.4.3) | ||
528 | major_version = int(kernel_version.split('.')[0]) | ||
529 | |||
530 | if major_version < 8: | ||
531 | # On Mac OS X before 10.4, check if -arch and -isysroot | ||
532 | # are in CFLAGS or LDFLAGS and remove them if they are. | ||
533 | # This is needed when building extensions on a 10.3 system | ||
534 | # using a universal build of python. | ||
535 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
536 | # a number of derived variables. These need to be | ||
537 | # patched up as well. | ||
538 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
539 | flags = _CONFIG_VARS[key] | ||
540 | flags = re.sub(r'-arch\s+\w+\s', ' ', flags) | ||
541 | flags = re.sub('-isysroot [^ \t]*', ' ', flags) | ||
542 | _CONFIG_VARS[key] = flags | ||
543 | else: | ||
544 | # Allow the user to override the architecture flags using | ||
545 | # an environment variable. | ||
546 | # NOTE: This name was introduced by Apple in OSX 10.5 and | ||
547 | # is used by several scripting languages distributed with | ||
548 | # that OS release. | ||
549 | if 'ARCHFLAGS' in os.environ: | ||
550 | arch = os.environ['ARCHFLAGS'] | ||
551 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
552 | # a number of derived variables. These need to be | ||
553 | # patched up as well. | ||
554 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
555 | |||
556 | flags = _CONFIG_VARS[key] | ||
557 | flags = re.sub(r'-arch\s+\w+\s', ' ', flags) | ||
558 | flags = flags + ' ' + arch | ||
559 | _CONFIG_VARS[key] = flags | ||
560 | |||
561 | # If we're on OSX 10.5 or later and the user tries to | ||
562 | # compiles an extension using an SDK that is not present | ||
563 | # on the current machine it is better to not use an SDK | ||
564 | # than to fail. | ||
565 | # | ||
566 | # The major usecase for this is users using a Python.org | ||
567 | # binary installer on OSX 10.6: that installer uses | ||
568 | # the 10.4u SDK, but that SDK is not installed by default | ||
569 | # when you install Xcode. | ||
570 | # | ||
571 | CFLAGS = _CONFIG_VARS.get('CFLAGS', '') | ||
572 | m = re.search(r'-isysroot\s+(\S+)', CFLAGS) | ||
573 | if m is not None: | ||
574 | sdk = m.group(1) | ||
575 | if not os.path.exists(sdk): | ||
576 | for key in ('LDFLAGS', 'BASECFLAGS', | ||
577 | # a number of derived variables. These need to be | ||
578 | # patched up as well. | ||
579 | 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): | ||
580 | |||
581 | flags = _CONFIG_VARS[key] | ||
582 | flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) | ||
583 | _CONFIG_VARS[key] = flags | ||
584 | |||
585 | if args: | ||
586 | vals = [] | ||
587 | for name in args: | ||
588 | vals.append(_CONFIG_VARS.get(name)) | ||
589 | return vals | ||
590 | else: | ||
591 | return _CONFIG_VARS | ||
592 | |||
593 | |||
594 | def get_config_var(name): | ||
595 | """Return the value of a single variable using the dictionary returned by | ||
596 | 'get_config_vars()'. | ||
597 | |||
598 | Equivalent to get_config_vars().get(name) | ||
599 | """ | ||
600 | return get_config_vars().get(name) | ||
601 | |||
602 | |||
603 | def get_platform(): | ||
604 | """Return a string that identifies the current platform. | ||
605 | |||
606 | This is used mainly to distinguish platform-specific build directories and | ||
607 | platform-specific built distributions. Typically includes the OS name | ||
608 | and version and the architecture (as supplied by 'os.uname()'), | ||
609 | although the exact information included depends on the OS; eg. for IRIX | ||
610 | the architecture isn't particularly important (IRIX only runs on SGI | ||
611 | hardware), but for Linux the kernel version isn't particularly | ||
612 | important. | ||
613 | |||
614 | Examples of returned values: | ||
615 | linux-i586 | ||
616 | linux-alpha (?) | ||
617 | solaris-2.6-sun4u | ||
618 | irix-5.3 | ||
619 | irix64-6.2 | ||
620 | |||
621 | Windows will return one of: | ||
622 | win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) | ||
623 | win-ia64 (64bit Windows on Itanium) | ||
624 | win32 (all others - specifically, sys.platform is returned) | ||
625 | |||
626 | For other non-POSIX platforms, currently just returns 'sys.platform'. | ||
627 | """ | ||
628 | if os.name == 'nt': | ||
629 | # sniff sys.version for architecture. | ||
630 | prefix = " bit (" | ||
631 | i = sys.version.find(prefix) | ||
632 | if i == -1: | ||
633 | return sys.platform | ||
634 | j = sys.version.find(")", i) | ||
635 | look = sys.version[i+len(prefix):j].lower() | ||
636 | if look == 'amd64': | ||
637 | return 'win-amd64' | ||
638 | if look == 'itanium': | ||
639 | return 'win-ia64' | ||
640 | return sys.platform | ||
641 | |||
642 | if os.name != "posix" or not hasattr(os, 'uname'): | ||
643 | # XXX what about the architecture? NT is Intel or Alpha, | ||
644 | # Mac OS is M68k or PPC, etc. | ||
645 | return sys.platform | ||
646 | |||
647 | # Try to distinguish various flavours of Unix | ||
648 | osname, host, release, version, machine = os.uname() | ||
649 | |||
650 | # Convert the OS name to lowercase, remove '/' characters | ||
651 | # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") | ||
652 | osname = osname.lower().replace('/', '') | ||
653 | machine = machine.replace(' ', '_') | ||
654 | machine = machine.replace('/', '-') | ||
655 | |||
656 | if osname[:5] == "linux": | ||
657 | # At least on Linux/Intel, 'machine' is the processor -- | ||
658 | # i386, etc. | ||
659 | # XXX what about Alpha, SPARC, etc? | ||
660 | return "%s-%s" % (osname, machine) | ||
661 | elif osname[:5] == "sunos": | ||
662 | if release[0] >= "5": # SunOS 5 == Solaris 2 | ||
663 | osname = "solaris" | ||
664 | release = "%d.%s" % (int(release[0]) - 3, release[2:]) | ||
665 | # fall through to standard osname-release-machine representation | ||
666 | elif osname[:4] == "irix": # could be "irix64"! | ||
667 | return "%s-%s" % (osname, release) | ||
668 | elif osname[:3] == "aix": | ||
669 | return "%s-%s.%s" % (osname, version, release) | ||
670 | elif osname[:6] == "cygwin": | ||
671 | osname = "cygwin" | ||
672 | rel_re = re.compile(r'[\d.]+') | ||
673 | m = rel_re.match(release) | ||
674 | if m: | ||
675 | release = m.group() | ||
676 | elif osname[:6] == "darwin": | ||
677 | # | ||
678 | # For our purposes, we'll assume that the system version from | ||
679 | # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set | ||
680 | # to. This makes the compatibility story a bit more sane because the | ||
681 | # machine is going to compile and link as if it were | ||
682 | # MACOSX_DEPLOYMENT_TARGET. | ||
683 | cfgvars = get_config_vars() | ||
684 | macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') | ||
685 | |||
686 | if True: | ||
687 | # Always calculate the release of the running machine, | ||
688 | # needed to determine if we can build fat binaries or not. | ||
689 | |||
690 | macrelease = macver | ||
691 | # Get the system version. Reading this plist is a documented | ||
692 | # way to get the system version (see the documentation for | ||
693 | # the Gestalt Manager) | ||
694 | try: | ||
695 | f = open('/System/Library/CoreServices/SystemVersion.plist') | ||
696 | except IOError: | ||
697 | # We're on a plain darwin box, fall back to the default | ||
698 | # behaviour. | ||
699 | pass | ||
700 | else: | ||
701 | try: | ||
702 | m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' | ||
703 | r'<string>(.*?)</string>', f.read()) | ||
704 | finally: | ||
705 | f.close() | ||
706 | if m is not None: | ||
707 | macrelease = '.'.join(m.group(1).split('.')[:2]) | ||
708 | # else: fall back to the default behaviour | ||
709 | |||
710 | if not macver: | ||
711 | macver = macrelease | ||
712 | |||
713 | if macver: | ||
714 | release = macver | ||
715 | osname = "macosx" | ||
716 | |||
717 | if ((macrelease + '.') >= '10.4.' and | ||
718 | '-arch' in get_config_vars().get('CFLAGS', '').strip()): | ||
719 | # The universal build will build fat binaries, but not on | ||
720 | # systems before 10.4 | ||
721 | # | ||
722 | # Try to detect 4-way universal builds, those have machine-type | ||
723 | # 'universal' instead of 'fat'. | ||
724 | |||
725 | machine = 'fat' | ||
726 | cflags = get_config_vars().get('CFLAGS') | ||
727 | |||
728 | archs = re.findall(r'-arch\s+(\S+)', cflags) | ||
729 | archs = tuple(sorted(set(archs))) | ||
730 | |||
731 | if len(archs) == 1: | ||
732 | machine = archs[0] | ||
733 | elif archs == ('i386', 'ppc'): | ||
734 | machine = 'fat' | ||
735 | elif archs == ('i386', 'x86_64'): | ||
736 | machine = 'intel' | ||
737 | elif archs == ('i386', 'ppc', 'x86_64'): | ||
738 | machine = 'fat3' | ||
739 | elif archs == ('ppc64', 'x86_64'): | ||
740 | machine = 'fat64' | ||
741 | elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): | ||
742 | machine = 'universal' | ||
743 | else: | ||
744 | raise ValueError( | ||
745 | "Don't know machine value for archs=%r" % (archs,)) | ||
746 | |||
747 | elif machine == 'i386': | ||
748 | # On OSX the machine type returned by uname is always the | ||
749 | # 32-bit variant, even if the executable architecture is | ||
750 | # the 64-bit variant | ||
751 | if sys.maxsize >= 2**32: | ||
752 | machine = 'x86_64' | ||
753 | |||
754 | elif machine in ('PowerPC', 'Power_Macintosh'): | ||
755 | # Pick a sane name for the PPC architecture. | ||
756 | # See 'i386' case | ||
757 | if sys.maxsize >= 2**32: | ||
758 | machine = 'ppc64' | ||
759 | else: | ||
760 | machine = 'ppc' | ||
761 | |||
762 | return "%s-%s-%s" % (osname, release, machine) | ||
763 | |||
764 | |||
765 | def get_python_version(): | ||
766 | return _PY_VERSION_SHORT | ||
767 | |||
768 | |||
769 | def _print_dict(title, data): | ||
770 | for index, (key, value) in enumerate(sorted(data.items())): | ||
771 | if index == 0: | ||
772 | print('%s: ' % (title)) | ||
773 | print('\t%s = "%s"' % (key, value)) | ||
774 | |||
775 | |||
776 | def _main(): | ||
777 | """Display all information sysconfig detains.""" | ||
778 | print('Platform: "%s"' % get_platform()) | ||
779 | print('Python version: "%s"' % get_python_version()) | ||
780 | print('Current installation scheme: "%s"' % _get_default_scheme()) | ||
781 | print() | ||
782 | _print_dict('Paths', get_paths()) | ||
783 | print() | ||
784 | _print_dict('Variables', get_config_vars()) | ||
785 | |||
786 | |||
787 | if __name__ == '__main__': | ||
788 | _main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..b0599bc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py | |||
@@ -0,0 +1,2607 @@ | |||
1 | #------------------------------------------------------------------- | ||
2 | # tarfile.py | ||
3 | #------------------------------------------------------------------- | ||
4 | # Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de> | ||
5 | # All rights reserved. | ||
6 | # | ||
7 | # Permission is hereby granted, free of charge, to any person | ||
8 | # obtaining a copy of this software and associated documentation | ||
9 | # files (the "Software"), to deal in the Software without | ||
10 | # restriction, including without limitation the rights to use, | ||
11 | # copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
12 | # copies of the Software, and to permit persons to whom the | ||
13 | # Software is furnished to do so, subject to the following | ||
14 | # conditions: | ||
15 | # | ||
16 | # The above copyright notice and this permission notice shall be | ||
17 | # included in all copies or substantial portions of the Software. | ||
18 | # | ||
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||
20 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES | ||
21 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | ||
22 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT | ||
23 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | ||
24 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | ||
25 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | ||
26 | # OTHER DEALINGS IN THE SOFTWARE. | ||
27 | # | ||
28 | from __future__ import print_function | ||
29 | |||
30 | """Read from and write to tar format archives. | ||
31 | """ | ||
32 | |||
33 | __version__ = "$Revision$" | ||
34 | |||
35 | version = "0.9.0" | ||
36 | __author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" | ||
37 | __date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" | ||
38 | __cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" | ||
39 | __credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." | ||
40 | |||
41 | #--------- | ||
42 | # Imports | ||
43 | #--------- | ||
44 | import sys | ||
45 | import os | ||
46 | import stat | ||
47 | import errno | ||
48 | import time | ||
49 | import struct | ||
50 | import copy | ||
51 | import re | ||
52 | |||
53 | try: | ||
54 | import grp, pwd | ||
55 | except ImportError: | ||
56 | grp = pwd = None | ||
57 | |||
58 | # os.symlink on Windows prior to 6.0 raises NotImplementedError | ||
59 | symlink_exception = (AttributeError, NotImplementedError) | ||
60 | try: | ||
61 | # WindowsError (1314) will be raised if the caller does not hold the | ||
62 | # SeCreateSymbolicLinkPrivilege privilege | ||
63 | symlink_exception += (WindowsError,) | ||
64 | except NameError: | ||
65 | pass | ||
66 | |||
67 | # from tarfile import * | ||
68 | __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] | ||
69 | |||
70 | if sys.version_info[0] < 3: | ||
71 | import __builtin__ as builtins | ||
72 | else: | ||
73 | import builtins | ||
74 | |||
75 | _open = builtins.open # Since 'open' is TarFile.open | ||
76 | |||
77 | #--------------------------------------------------------- | ||
78 | # tar constants | ||
79 | #--------------------------------------------------------- | ||
80 | NUL = b"\0" # the null character | ||
81 | BLOCKSIZE = 512 # length of processing blocks | ||
82 | RECORDSIZE = BLOCKSIZE * 20 # length of records | ||
83 | GNU_MAGIC = b"ustar \0" # magic gnu tar string | ||
84 | POSIX_MAGIC = b"ustar\x0000" # magic posix tar string | ||
85 | |||
86 | LENGTH_NAME = 100 # maximum length of a filename | ||
87 | LENGTH_LINK = 100 # maximum length of a linkname | ||
88 | LENGTH_PREFIX = 155 # maximum length of the prefix field | ||
89 | |||
90 | REGTYPE = b"0" # regular file | ||
91 | AREGTYPE = b"\0" # regular file | ||
92 | LNKTYPE = b"1" # link (inside tarfile) | ||
93 | SYMTYPE = b"2" # symbolic link | ||
94 | CHRTYPE = b"3" # character special device | ||
95 | BLKTYPE = b"4" # block special device | ||
96 | DIRTYPE = b"5" # directory | ||
97 | FIFOTYPE = b"6" # fifo special device | ||
98 | CONTTYPE = b"7" # contiguous file | ||
99 | |||
100 | GNUTYPE_LONGNAME = b"L" # GNU tar longname | ||
101 | GNUTYPE_LONGLINK = b"K" # GNU tar longlink | ||
102 | GNUTYPE_SPARSE = b"S" # GNU tar sparse file | ||
103 | |||
104 | XHDTYPE = b"x" # POSIX.1-2001 extended header | ||
105 | XGLTYPE = b"g" # POSIX.1-2001 global header | ||
106 | SOLARIS_XHDTYPE = b"X" # Solaris extended header | ||
107 | |||
108 | USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format | ||
109 | GNU_FORMAT = 1 # GNU tar format | ||
110 | PAX_FORMAT = 2 # POSIX.1-2001 (pax) format | ||
111 | DEFAULT_FORMAT = GNU_FORMAT | ||
112 | |||
113 | #--------------------------------------------------------- | ||
114 | # tarfile constants | ||
115 | #--------------------------------------------------------- | ||
116 | # File types that tarfile supports: | ||
117 | SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, | ||
118 | SYMTYPE, DIRTYPE, FIFOTYPE, | ||
119 | CONTTYPE, CHRTYPE, BLKTYPE, | ||
120 | GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, | ||
121 | GNUTYPE_SPARSE) | ||
122 | |||
123 | # File types that will be treated as a regular file. | ||
124 | REGULAR_TYPES = (REGTYPE, AREGTYPE, | ||
125 | CONTTYPE, GNUTYPE_SPARSE) | ||
126 | |||
127 | # File types that are part of the GNU tar format. | ||
128 | GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, | ||
129 | GNUTYPE_SPARSE) | ||
130 | |||
131 | # Fields from a pax header that override a TarInfo attribute. | ||
132 | PAX_FIELDS = ("path", "linkpath", "size", "mtime", | ||
133 | "uid", "gid", "uname", "gname") | ||
134 | |||
135 | # Fields from a pax header that are affected by hdrcharset. | ||
136 | PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) | ||
137 | |||
138 | # Fields in a pax header that are numbers, all other fields | ||
139 | # are treated as strings. | ||
140 | PAX_NUMBER_FIELDS = { | ||
141 | "atime": float, | ||
142 | "ctime": float, | ||
143 | "mtime": float, | ||
144 | "uid": int, | ||
145 | "gid": int, | ||
146 | "size": int | ||
147 | } | ||
148 | |||
149 | #--------------------------------------------------------- | ||
150 | # Bits used in the mode field, values in octal. | ||
151 | #--------------------------------------------------------- | ||
152 | S_IFLNK = 0o120000 # symbolic link | ||
153 | S_IFREG = 0o100000 # regular file | ||
154 | S_IFBLK = 0o060000 # block device | ||
155 | S_IFDIR = 0o040000 # directory | ||
156 | S_IFCHR = 0o020000 # character device | ||
157 | S_IFIFO = 0o010000 # fifo | ||
158 | |||
159 | TSUID = 0o4000 # set UID on execution | ||
160 | TSGID = 0o2000 # set GID on execution | ||
161 | TSVTX = 0o1000 # reserved | ||
162 | |||
163 | TUREAD = 0o400 # read by owner | ||
164 | TUWRITE = 0o200 # write by owner | ||
165 | TUEXEC = 0o100 # execute/search by owner | ||
166 | TGREAD = 0o040 # read by group | ||
167 | TGWRITE = 0o020 # write by group | ||
168 | TGEXEC = 0o010 # execute/search by group | ||
169 | TOREAD = 0o004 # read by other | ||
170 | TOWRITE = 0o002 # write by other | ||
171 | TOEXEC = 0o001 # execute/search by other | ||
172 | |||
173 | #--------------------------------------------------------- | ||
174 | # initialization | ||
175 | #--------------------------------------------------------- | ||
176 | if os.name in ("nt", "ce"): | ||
177 | ENCODING = "utf-8" | ||
178 | else: | ||
179 | ENCODING = sys.getfilesystemencoding() | ||
180 | |||
181 | #--------------------------------------------------------- | ||
182 | # Some useful functions | ||
183 | #--------------------------------------------------------- | ||
184 | |||
185 | def stn(s, length, encoding, errors): | ||
186 | """Convert a string to a null-terminated bytes object. | ||
187 | """ | ||
188 | s = s.encode(encoding, errors) | ||
189 | return s[:length] + (length - len(s)) * NUL | ||
190 | |||
191 | def nts(s, encoding, errors): | ||
192 | """Convert a null-terminated bytes object to a string. | ||
193 | """ | ||
194 | p = s.find(b"\0") | ||
195 | if p != -1: | ||
196 | s = s[:p] | ||
197 | return s.decode(encoding, errors) | ||
198 | |||
199 | def nti(s): | ||
200 | """Convert a number field to a python number. | ||
201 | """ | ||
202 | # There are two possible encodings for a number field, see | ||
203 | # itn() below. | ||
204 | if s[0] != chr(0o200): | ||
205 | try: | ||
206 | n = int(nts(s, "ascii", "strict") or "0", 8) | ||
207 | except ValueError: | ||
208 | raise InvalidHeaderError("invalid header") | ||
209 | else: | ||
210 | n = 0 | ||
211 | for i in range(len(s) - 1): | ||
212 | n <<= 8 | ||
213 | n += ord(s[i + 1]) | ||
214 | return n | ||
215 | |||
216 | def itn(n, digits=8, format=DEFAULT_FORMAT): | ||
217 | """Convert a python number to a number field. | ||
218 | """ | ||
219 | # POSIX 1003.1-1988 requires numbers to be encoded as a string of | ||
220 | # octal digits followed by a null-byte, this allows values up to | ||
221 | # (8**(digits-1))-1. GNU tar allows storing numbers greater than | ||
222 | # that if necessary. A leading 0o200 byte indicates this particular | ||
223 | # encoding, the following digits-1 bytes are a big-endian | ||
224 | # representation. This allows values up to (256**(digits-1))-1. | ||
225 | if 0 <= n < 8 ** (digits - 1): | ||
226 | s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL | ||
227 | else: | ||
228 | if format != GNU_FORMAT or n >= 256 ** (digits - 1): | ||
229 | raise ValueError("overflow in number field") | ||
230 | |||
231 | if n < 0: | ||
232 | # XXX We mimic GNU tar's behaviour with negative numbers, | ||
233 | # this could raise OverflowError. | ||
234 | n = struct.unpack("L", struct.pack("l", n))[0] | ||
235 | |||
236 | s = bytearray() | ||
237 | for i in range(digits - 1): | ||
238 | s.insert(0, n & 0o377) | ||
239 | n >>= 8 | ||
240 | s.insert(0, 0o200) | ||
241 | return s | ||
242 | |||
243 | def calc_chksums(buf): | ||
244 | """Calculate the checksum for a member's header by summing up all | ||
245 | characters except for the chksum field which is treated as if | ||
246 | it was filled with spaces. According to the GNU tar sources, | ||
247 | some tars (Sun and NeXT) calculate chksum with signed char, | ||
248 | which will be different if there are chars in the buffer with | ||
249 | the high bit set. So we calculate two checksums, unsigned and | ||
250 | signed. | ||
251 | """ | ||
252 | unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) | ||
253 | signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) | ||
254 | return unsigned_chksum, signed_chksum | ||
255 | |||
256 | def copyfileobj(src, dst, length=None): | ||
257 | """Copy length bytes from fileobj src to fileobj dst. | ||
258 | If length is None, copy the entire content. | ||
259 | """ | ||
260 | if length == 0: | ||
261 | return | ||
262 | if length is None: | ||
263 | while True: | ||
264 | buf = src.read(16*1024) | ||
265 | if not buf: | ||
266 | break | ||
267 | dst.write(buf) | ||
268 | return | ||
269 | |||
270 | BUFSIZE = 16 * 1024 | ||
271 | blocks, remainder = divmod(length, BUFSIZE) | ||
272 | for b in range(blocks): | ||
273 | buf = src.read(BUFSIZE) | ||
274 | if len(buf) < BUFSIZE: | ||
275 | raise IOError("end of file reached") | ||
276 | dst.write(buf) | ||
277 | |||
278 | if remainder != 0: | ||
279 | buf = src.read(remainder) | ||
280 | if len(buf) < remainder: | ||
281 | raise IOError("end of file reached") | ||
282 | dst.write(buf) | ||
283 | return | ||
284 | |||
285 | filemode_table = ( | ||
286 | ((S_IFLNK, "l"), | ||
287 | (S_IFREG, "-"), | ||
288 | (S_IFBLK, "b"), | ||
289 | (S_IFDIR, "d"), | ||
290 | (S_IFCHR, "c"), | ||
291 | (S_IFIFO, "p")), | ||
292 | |||
293 | ((TUREAD, "r"),), | ||
294 | ((TUWRITE, "w"),), | ||
295 | ((TUEXEC|TSUID, "s"), | ||
296 | (TSUID, "S"), | ||
297 | (TUEXEC, "x")), | ||
298 | |||
299 | ((TGREAD, "r"),), | ||
300 | ((TGWRITE, "w"),), | ||
301 | ((TGEXEC|TSGID, "s"), | ||
302 | (TSGID, "S"), | ||
303 | (TGEXEC, "x")), | ||
304 | |||
305 | ((TOREAD, "r"),), | ||
306 | ((TOWRITE, "w"),), | ||
307 | ((TOEXEC|TSVTX, "t"), | ||
308 | (TSVTX, "T"), | ||
309 | (TOEXEC, "x")) | ||
310 | ) | ||
311 | |||
312 | def filemode(mode): | ||
313 | """Convert a file's mode to a string of the form | ||
314 | -rwxrwxrwx. | ||
315 | Used by TarFile.list() | ||
316 | """ | ||
317 | perm = [] | ||
318 | for table in filemode_table: | ||
319 | for bit, char in table: | ||
320 | if mode & bit == bit: | ||
321 | perm.append(char) | ||
322 | break | ||
323 | else: | ||
324 | perm.append("-") | ||
325 | return "".join(perm) | ||
326 | |||
327 | class TarError(Exception): | ||
328 | """Base exception.""" | ||
329 | pass | ||
330 | class ExtractError(TarError): | ||
331 | """General exception for extract errors.""" | ||
332 | pass | ||
333 | class ReadError(TarError): | ||
334 | """Exception for unreadable tar archives.""" | ||
335 | pass | ||
336 | class CompressionError(TarError): | ||
337 | """Exception for unavailable compression methods.""" | ||
338 | pass | ||
339 | class StreamError(TarError): | ||
340 | """Exception for unsupported operations on stream-like TarFiles.""" | ||
341 | pass | ||
342 | class HeaderError(TarError): | ||
343 | """Base exception for header errors.""" | ||
344 | pass | ||
345 | class EmptyHeaderError(HeaderError): | ||
346 | """Exception for empty headers.""" | ||
347 | pass | ||
348 | class TruncatedHeaderError(HeaderError): | ||
349 | """Exception for truncated headers.""" | ||
350 | pass | ||
351 | class EOFHeaderError(HeaderError): | ||
352 | """Exception for end of file headers.""" | ||
353 | pass | ||
354 | class InvalidHeaderError(HeaderError): | ||
355 | """Exception for invalid headers.""" | ||
356 | pass | ||
357 | class SubsequentHeaderError(HeaderError): | ||
358 | """Exception for missing and invalid extended headers.""" | ||
359 | pass | ||
360 | |||
361 | #--------------------------- | ||
362 | # internal stream interface | ||
363 | #--------------------------- | ||
364 | class _LowLevelFile(object): | ||
365 | """Low-level file object. Supports reading and writing. | ||
366 | It is used instead of a regular file object for streaming | ||
367 | access. | ||
368 | """ | ||
369 | |||
370 | def __init__(self, name, mode): | ||
371 | mode = { | ||
372 | "r": os.O_RDONLY, | ||
373 | "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, | ||
374 | }[mode] | ||
375 | if hasattr(os, "O_BINARY"): | ||
376 | mode |= os.O_BINARY | ||
377 | self.fd = os.open(name, mode, 0o666) | ||
378 | |||
379 | def close(self): | ||
380 | os.close(self.fd) | ||
381 | |||
382 | def read(self, size): | ||
383 | return os.read(self.fd, size) | ||
384 | |||
385 | def write(self, s): | ||
386 | os.write(self.fd, s) | ||
387 | |||
388 | class _Stream(object): | ||
389 | """Class that serves as an adapter between TarFile and | ||
390 | a stream-like object. The stream-like object only | ||
391 | needs to have a read() or write() method and is accessed | ||
392 | blockwise. Use of gzip or bzip2 compression is possible. | ||
393 | A stream-like object could be for example: sys.stdin, | ||
394 | sys.stdout, a socket, a tape device etc. | ||
395 | |||
396 | _Stream is intended to be used only internally. | ||
397 | """ | ||
398 | |||
399 | def __init__(self, name, mode, comptype, fileobj, bufsize): | ||
400 | """Construct a _Stream object. | ||
401 | """ | ||
402 | self._extfileobj = True | ||
403 | if fileobj is None: | ||
404 | fileobj = _LowLevelFile(name, mode) | ||
405 | self._extfileobj = False | ||
406 | |||
407 | if comptype == '*': | ||
408 | # Enable transparent compression detection for the | ||
409 | # stream interface | ||
410 | fileobj = _StreamProxy(fileobj) | ||
411 | comptype = fileobj.getcomptype() | ||
412 | |||
413 | self.name = name or "" | ||
414 | self.mode = mode | ||
415 | self.comptype = comptype | ||
416 | self.fileobj = fileobj | ||
417 | self.bufsize = bufsize | ||
418 | self.buf = b"" | ||
419 | self.pos = 0 | ||
420 | self.closed = False | ||
421 | |||
422 | try: | ||
423 | if comptype == "gz": | ||
424 | try: | ||
425 | import zlib | ||
426 | except ImportError: | ||
427 | raise CompressionError("zlib module is not available") | ||
428 | self.zlib = zlib | ||
429 | self.crc = zlib.crc32(b"") | ||
430 | if mode == "r": | ||
431 | self._init_read_gz() | ||
432 | else: | ||
433 | self._init_write_gz() | ||
434 | |||
435 | if comptype == "bz2": | ||
436 | try: | ||
437 | import bz2 | ||
438 | except ImportError: | ||
439 | raise CompressionError("bz2 module is not available") | ||
440 | if mode == "r": | ||
441 | self.dbuf = b"" | ||
442 | self.cmp = bz2.BZ2Decompressor() | ||
443 | else: | ||
444 | self.cmp = bz2.BZ2Compressor() | ||
445 | except: | ||
446 | if not self._extfileobj: | ||
447 | self.fileobj.close() | ||
448 | self.closed = True | ||
449 | raise | ||
450 | |||
451 | def __del__(self): | ||
452 | if hasattr(self, "closed") and not self.closed: | ||
453 | self.close() | ||
454 | |||
455 | def _init_write_gz(self): | ||
456 | """Initialize for writing with gzip compression. | ||
457 | """ | ||
458 | self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, | ||
459 | -self.zlib.MAX_WBITS, | ||
460 | self.zlib.DEF_MEM_LEVEL, | ||
461 | 0) | ||
462 | timestamp = struct.pack("<L", int(time.time())) | ||
463 | self.__write(b"\037\213\010\010" + timestamp + b"\002\377") | ||
464 | if self.name.endswith(".gz"): | ||
465 | self.name = self.name[:-3] | ||
466 | # RFC1952 says we must use ISO-8859-1 for the FNAME field. | ||
467 | self.__write(self.name.encode("iso-8859-1", "replace") + NUL) | ||
468 | |||
469 | def write(self, s): | ||
470 | """Write string s to the stream. | ||
471 | """ | ||
472 | if self.comptype == "gz": | ||
473 | self.crc = self.zlib.crc32(s, self.crc) | ||
474 | self.pos += len(s) | ||
475 | if self.comptype != "tar": | ||
476 | s = self.cmp.compress(s) | ||
477 | self.__write(s) | ||
478 | |||
479 | def __write(self, s): | ||
480 | """Write string s to the stream if a whole new block | ||
481 | is ready to be written. | ||
482 | """ | ||
483 | self.buf += s | ||
484 | while len(self.buf) > self.bufsize: | ||
485 | self.fileobj.write(self.buf[:self.bufsize]) | ||
486 | self.buf = self.buf[self.bufsize:] | ||
487 | |||
488 | def close(self): | ||
489 | """Close the _Stream object. No operation should be | ||
490 | done on it afterwards. | ||
491 | """ | ||
492 | if self.closed: | ||
493 | return | ||
494 | |||
495 | if self.mode == "w" and self.comptype != "tar": | ||
496 | self.buf += self.cmp.flush() | ||
497 | |||
498 | if self.mode == "w" and self.buf: | ||
499 | self.fileobj.write(self.buf) | ||
500 | self.buf = b"" | ||
501 | if self.comptype == "gz": | ||
502 | # The native zlib crc is an unsigned 32-bit integer, but | ||
503 | # the Python wrapper implicitly casts that to a signed C | ||
504 | # long. So, on a 32-bit box self.crc may "look negative", | ||
505 | # while the same crc on a 64-bit box may "look positive". | ||
506 | # To avoid irksome warnings from the `struct` module, force | ||
507 | # it to look positive on all boxes. | ||
508 | self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff)) | ||
509 | self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF)) | ||
510 | |||
511 | if not self._extfileobj: | ||
512 | self.fileobj.close() | ||
513 | |||
514 | self.closed = True | ||
515 | |||
516 | def _init_read_gz(self): | ||
517 | """Initialize for reading a gzip compressed fileobj. | ||
518 | """ | ||
519 | self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS) | ||
520 | self.dbuf = b"" | ||
521 | |||
522 | # taken from gzip.GzipFile with some alterations | ||
523 | if self.__read(2) != b"\037\213": | ||
524 | raise ReadError("not a gzip file") | ||
525 | if self.__read(1) != b"\010": | ||
526 | raise CompressionError("unsupported compression method") | ||
527 | |||
528 | flag = ord(self.__read(1)) | ||
529 | self.__read(6) | ||
530 | |||
531 | if flag & 4: | ||
532 | xlen = ord(self.__read(1)) + 256 * ord(self.__read(1)) | ||
533 | self.read(xlen) | ||
534 | if flag & 8: | ||
535 | while True: | ||
536 | s = self.__read(1) | ||
537 | if not s or s == NUL: | ||
538 | break | ||
539 | if flag & 16: | ||
540 | while True: | ||
541 | s = self.__read(1) | ||
542 | if not s or s == NUL: | ||
543 | break | ||
544 | if flag & 2: | ||
545 | self.__read(2) | ||
546 | |||
547 | def tell(self): | ||
548 | """Return the stream's file pointer position. | ||
549 | """ | ||
550 | return self.pos | ||
551 | |||
552 | def seek(self, pos=0): | ||
553 | """Set the stream's file pointer to pos. Negative seeking | ||
554 | is forbidden. | ||
555 | """ | ||
556 | if pos - self.pos >= 0: | ||
557 | blocks, remainder = divmod(pos - self.pos, self.bufsize) | ||
558 | for i in range(blocks): | ||
559 | self.read(self.bufsize) | ||
560 | self.read(remainder) | ||
561 | else: | ||
562 | raise StreamError("seeking backwards is not allowed") | ||
563 | return self.pos | ||
564 | |||
565 | def read(self, size=None): | ||
566 | """Return the next size number of bytes from the stream. | ||
567 | If size is not defined, return all bytes of the stream | ||
568 | up to EOF. | ||
569 | """ | ||
570 | if size is None: | ||
571 | t = [] | ||
572 | while True: | ||
573 | buf = self._read(self.bufsize) | ||
574 | if not buf: | ||
575 | break | ||
576 | t.append(buf) | ||
577 | buf = "".join(t) | ||
578 | else: | ||
579 | buf = self._read(size) | ||
580 | self.pos += len(buf) | ||
581 | return buf | ||
582 | |||
583 | def _read(self, size): | ||
584 | """Return size bytes from the stream. | ||
585 | """ | ||
586 | if self.comptype == "tar": | ||
587 | return self.__read(size) | ||
588 | |||
589 | c = len(self.dbuf) | ||
590 | while c < size: | ||
591 | buf = self.__read(self.bufsize) | ||
592 | if not buf: | ||
593 | break | ||
594 | try: | ||
595 | buf = self.cmp.decompress(buf) | ||
596 | except IOError: | ||
597 | raise ReadError("invalid compressed data") | ||
598 | self.dbuf += buf | ||
599 | c += len(buf) | ||
600 | buf = self.dbuf[:size] | ||
601 | self.dbuf = self.dbuf[size:] | ||
602 | return buf | ||
603 | |||
604 | def __read(self, size): | ||
605 | """Return size bytes from stream. If internal buffer is empty, | ||
606 | read another block from the stream. | ||
607 | """ | ||
608 | c = len(self.buf) | ||
609 | while c < size: | ||
610 | buf = self.fileobj.read(self.bufsize) | ||
611 | if not buf: | ||
612 | break | ||
613 | self.buf += buf | ||
614 | c += len(buf) | ||
615 | buf = self.buf[:size] | ||
616 | self.buf = self.buf[size:] | ||
617 | return buf | ||
618 | # class _Stream | ||
619 | |||
620 | class _StreamProxy(object): | ||
621 | """Small proxy class that enables transparent compression | ||
622 | detection for the Stream interface (mode 'r|*'). | ||
623 | """ | ||
624 | |||
625 | def __init__(self, fileobj): | ||
626 | self.fileobj = fileobj | ||
627 | self.buf = self.fileobj.read(BLOCKSIZE) | ||
628 | |||
629 | def read(self, size): | ||
630 | self.read = self.fileobj.read | ||
631 | return self.buf | ||
632 | |||
633 | def getcomptype(self): | ||
634 | if self.buf.startswith(b"\037\213\010"): | ||
635 | return "gz" | ||
636 | if self.buf.startswith(b"BZh91"): | ||
637 | return "bz2" | ||
638 | return "tar" | ||
639 | |||
640 | def close(self): | ||
641 | self.fileobj.close() | ||
642 | # class StreamProxy | ||
643 | |||
644 | class _BZ2Proxy(object): | ||
645 | """Small proxy class that enables external file object | ||
646 | support for "r:bz2" and "w:bz2" modes. This is actually | ||
647 | a workaround for a limitation in bz2 module's BZ2File | ||
648 | class which (unlike gzip.GzipFile) has no support for | ||
649 | a file object argument. | ||
650 | """ | ||
651 | |||
652 | blocksize = 16 * 1024 | ||
653 | |||
654 | def __init__(self, fileobj, mode): | ||
655 | self.fileobj = fileobj | ||
656 | self.mode = mode | ||
657 | self.name = getattr(self.fileobj, "name", None) | ||
658 | self.init() | ||
659 | |||
660 | def init(self): | ||
661 | import bz2 | ||
662 | self.pos = 0 | ||
663 | if self.mode == "r": | ||
664 | self.bz2obj = bz2.BZ2Decompressor() | ||
665 | self.fileobj.seek(0) | ||
666 | self.buf = b"" | ||
667 | else: | ||
668 | self.bz2obj = bz2.BZ2Compressor() | ||
669 | |||
670 | def read(self, size): | ||
671 | x = len(self.buf) | ||
672 | while x < size: | ||
673 | raw = self.fileobj.read(self.blocksize) | ||
674 | if not raw: | ||
675 | break | ||
676 | data = self.bz2obj.decompress(raw) | ||
677 | self.buf += data | ||
678 | x += len(data) | ||
679 | |||
680 | buf = self.buf[:size] | ||
681 | self.buf = self.buf[size:] | ||
682 | self.pos += len(buf) | ||
683 | return buf | ||
684 | |||
685 | def seek(self, pos): | ||
686 | if pos < self.pos: | ||
687 | self.init() | ||
688 | self.read(pos - self.pos) | ||
689 | |||
690 | def tell(self): | ||
691 | return self.pos | ||
692 | |||
693 | def write(self, data): | ||
694 | self.pos += len(data) | ||
695 | raw = self.bz2obj.compress(data) | ||
696 | self.fileobj.write(raw) | ||
697 | |||
698 | def close(self): | ||
699 | if self.mode == "w": | ||
700 | raw = self.bz2obj.flush() | ||
701 | self.fileobj.write(raw) | ||
702 | # class _BZ2Proxy | ||
703 | |||
704 | #------------------------ | ||
705 | # Extraction file object | ||
706 | #------------------------ | ||
707 | class _FileInFile(object): | ||
708 | """A thin wrapper around an existing file object that | ||
709 | provides a part of its data as an individual file | ||
710 | object. | ||
711 | """ | ||
712 | |||
713 | def __init__(self, fileobj, offset, size, blockinfo=None): | ||
714 | self.fileobj = fileobj | ||
715 | self.offset = offset | ||
716 | self.size = size | ||
717 | self.position = 0 | ||
718 | |||
719 | if blockinfo is None: | ||
720 | blockinfo = [(0, size)] | ||
721 | |||
722 | # Construct a map with data and zero blocks. | ||
723 | self.map_index = 0 | ||
724 | self.map = [] | ||
725 | lastpos = 0 | ||
726 | realpos = self.offset | ||
727 | for offset, size in blockinfo: | ||
728 | if offset > lastpos: | ||
729 | self.map.append((False, lastpos, offset, None)) | ||
730 | self.map.append((True, offset, offset + size, realpos)) | ||
731 | realpos += size | ||
732 | lastpos = offset + size | ||
733 | if lastpos < self.size: | ||
734 | self.map.append((False, lastpos, self.size, None)) | ||
735 | |||
736 | def seekable(self): | ||
737 | if not hasattr(self.fileobj, "seekable"): | ||
738 | # XXX gzip.GzipFile and bz2.BZ2File | ||
739 | return True | ||
740 | return self.fileobj.seekable() | ||
741 | |||
742 | def tell(self): | ||
743 | """Return the current file position. | ||
744 | """ | ||
745 | return self.position | ||
746 | |||
747 | def seek(self, position): | ||
748 | """Seek to a position in the file. | ||
749 | """ | ||
750 | self.position = position | ||
751 | |||
752 | def read(self, size=None): | ||
753 | """Read data from the file. | ||
754 | """ | ||
755 | if size is None: | ||
756 | size = self.size - self.position | ||
757 | else: | ||
758 | size = min(size, self.size - self.position) | ||
759 | |||
760 | buf = b"" | ||
761 | while size > 0: | ||
762 | while True: | ||
763 | data, start, stop, offset = self.map[self.map_index] | ||
764 | if start <= self.position < stop: | ||
765 | break | ||
766 | else: | ||
767 | self.map_index += 1 | ||
768 | if self.map_index == len(self.map): | ||
769 | self.map_index = 0 | ||
770 | length = min(size, stop - self.position) | ||
771 | if data: | ||
772 | self.fileobj.seek(offset + (self.position - start)) | ||
773 | buf += self.fileobj.read(length) | ||
774 | else: | ||
775 | buf += NUL * length | ||
776 | size -= length | ||
777 | self.position += length | ||
778 | return buf | ||
779 | #class _FileInFile | ||
780 | |||
781 | |||
782 | class ExFileObject(object): | ||
783 | """File-like object for reading an archive member. | ||
784 | Is returned by TarFile.extractfile(). | ||
785 | """ | ||
786 | blocksize = 1024 | ||
787 | |||
788 | def __init__(self, tarfile, tarinfo): | ||
789 | self.fileobj = _FileInFile(tarfile.fileobj, | ||
790 | tarinfo.offset_data, | ||
791 | tarinfo.size, | ||
792 | tarinfo.sparse) | ||
793 | self.name = tarinfo.name | ||
794 | self.mode = "r" | ||
795 | self.closed = False | ||
796 | self.size = tarinfo.size | ||
797 | |||
798 | self.position = 0 | ||
799 | self.buffer = b"" | ||
800 | |||
801 | def readable(self): | ||
802 | return True | ||
803 | |||
804 | def writable(self): | ||
805 | return False | ||
806 | |||
807 | def seekable(self): | ||
808 | return self.fileobj.seekable() | ||
809 | |||
810 | def read(self, size=None): | ||
811 | """Read at most size bytes from the file. If size is not | ||
812 | present or None, read all data until EOF is reached. | ||
813 | """ | ||
814 | if self.closed: | ||
815 | raise ValueError("I/O operation on closed file") | ||
816 | |||
817 | buf = b"" | ||
818 | if self.buffer: | ||
819 | if size is None: | ||
820 | buf = self.buffer | ||
821 | self.buffer = b"" | ||
822 | else: | ||
823 | buf = self.buffer[:size] | ||
824 | self.buffer = self.buffer[size:] | ||
825 | |||
826 | if size is None: | ||
827 | buf += self.fileobj.read() | ||
828 | else: | ||
829 | buf += self.fileobj.read(size - len(buf)) | ||
830 | |||
831 | self.position += len(buf) | ||
832 | return buf | ||
833 | |||
834 | # XXX TextIOWrapper uses the read1() method. | ||
835 | read1 = read | ||
836 | |||
837 | def readline(self, size=-1): | ||
838 | """Read one entire line from the file. If size is present | ||
839 | and non-negative, return a string with at most that | ||
840 | size, which may be an incomplete line. | ||
841 | """ | ||
842 | if self.closed: | ||
843 | raise ValueError("I/O operation on closed file") | ||
844 | |||
845 | pos = self.buffer.find(b"\n") + 1 | ||
846 | if pos == 0: | ||
847 | # no newline found. | ||
848 | while True: | ||
849 | buf = self.fileobj.read(self.blocksize) | ||
850 | self.buffer += buf | ||
851 | if not buf or b"\n" in buf: | ||
852 | pos = self.buffer.find(b"\n") + 1 | ||
853 | if pos == 0: | ||
854 | # no newline found. | ||
855 | pos = len(self.buffer) | ||
856 | break | ||
857 | |||
858 | if size != -1: | ||
859 | pos = min(size, pos) | ||
860 | |||
861 | buf = self.buffer[:pos] | ||
862 | self.buffer = self.buffer[pos:] | ||
863 | self.position += len(buf) | ||
864 | return buf | ||
865 | |||
866 | def readlines(self): | ||
867 | """Return a list with all remaining lines. | ||
868 | """ | ||
869 | result = [] | ||
870 | while True: | ||
871 | line = self.readline() | ||
872 | if not line: break | ||
873 | result.append(line) | ||
874 | return result | ||
875 | |||
876 | def tell(self): | ||
877 | """Return the current file position. | ||
878 | """ | ||
879 | if self.closed: | ||
880 | raise ValueError("I/O operation on closed file") | ||
881 | |||
882 | return self.position | ||
883 | |||
884 | def seek(self, pos, whence=os.SEEK_SET): | ||
885 | """Seek to a position in the file. | ||
886 | """ | ||
887 | if self.closed: | ||
888 | raise ValueError("I/O operation on closed file") | ||
889 | |||
890 | if whence == os.SEEK_SET: | ||
891 | self.position = min(max(pos, 0), self.size) | ||
892 | elif whence == os.SEEK_CUR: | ||
893 | if pos < 0: | ||
894 | self.position = max(self.position + pos, 0) | ||
895 | else: | ||
896 | self.position = min(self.position + pos, self.size) | ||
897 | elif whence == os.SEEK_END: | ||
898 | self.position = max(min(self.size + pos, self.size), 0) | ||
899 | else: | ||
900 | raise ValueError("Invalid argument") | ||
901 | |||
902 | self.buffer = b"" | ||
903 | self.fileobj.seek(self.position) | ||
904 | |||
905 | def close(self): | ||
906 | """Close the file object. | ||
907 | """ | ||
908 | self.closed = True | ||
909 | |||
910 | def __iter__(self): | ||
911 | """Get an iterator over the file's lines. | ||
912 | """ | ||
913 | while True: | ||
914 | line = self.readline() | ||
915 | if not line: | ||
916 | break | ||
917 | yield line | ||
918 | #class ExFileObject | ||
919 | |||
920 | #------------------ | ||
921 | # Exported Classes | ||
922 | #------------------ | ||
923 | class TarInfo(object): | ||
924 | """Informational class which holds the details about an | ||
925 | archive member given by a tar header block. | ||
926 | TarInfo objects are returned by TarFile.getmember(), | ||
927 | TarFile.getmembers() and TarFile.gettarinfo() and are | ||
928 | usually created internally. | ||
929 | """ | ||
930 | |||
931 | __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", | ||
932 | "chksum", "type", "linkname", "uname", "gname", | ||
933 | "devmajor", "devminor", | ||
934 | "offset", "offset_data", "pax_headers", "sparse", | ||
935 | "tarfile", "_sparse_structs", "_link_target") | ||
936 | |||
937 | def __init__(self, name=""): | ||
938 | """Construct a TarInfo object. name is the optional name | ||
939 | of the member. | ||
940 | """ | ||
941 | self.name = name # member name | ||
942 | self.mode = 0o644 # file permissions | ||
943 | self.uid = 0 # user id | ||
944 | self.gid = 0 # group id | ||
945 | self.size = 0 # file size | ||
946 | self.mtime = 0 # modification time | ||
947 | self.chksum = 0 # header checksum | ||
948 | self.type = REGTYPE # member type | ||
949 | self.linkname = "" # link name | ||
950 | self.uname = "" # user name | ||
951 | self.gname = "" # group name | ||
952 | self.devmajor = 0 # device major number | ||
953 | self.devminor = 0 # device minor number | ||
954 | |||
955 | self.offset = 0 # the tar header starts here | ||
956 | self.offset_data = 0 # the file's data starts here | ||
957 | |||
958 | self.sparse = None # sparse member information | ||
959 | self.pax_headers = {} # pax header information | ||
960 | |||
961 | # In pax headers the "name" and "linkname" field are called | ||
962 | # "path" and "linkpath". | ||
963 | def _getpath(self): | ||
964 | return self.name | ||
965 | def _setpath(self, name): | ||
966 | self.name = name | ||
967 | path = property(_getpath, _setpath) | ||
968 | |||
969 | def _getlinkpath(self): | ||
970 | return self.linkname | ||
971 | def _setlinkpath(self, linkname): | ||
972 | self.linkname = linkname | ||
973 | linkpath = property(_getlinkpath, _setlinkpath) | ||
974 | |||
975 | def __repr__(self): | ||
976 | return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) | ||
977 | |||
978 | def get_info(self): | ||
979 | """Return the TarInfo's attributes as a dictionary. | ||
980 | """ | ||
981 | info = { | ||
982 | "name": self.name, | ||
983 | "mode": self.mode & 0o7777, | ||
984 | "uid": self.uid, | ||
985 | "gid": self.gid, | ||
986 | "size": self.size, | ||
987 | "mtime": self.mtime, | ||
988 | "chksum": self.chksum, | ||
989 | "type": self.type, | ||
990 | "linkname": self.linkname, | ||
991 | "uname": self.uname, | ||
992 | "gname": self.gname, | ||
993 | "devmajor": self.devmajor, | ||
994 | "devminor": self.devminor | ||
995 | } | ||
996 | |||
997 | if info["type"] == DIRTYPE and not info["name"].endswith("/"): | ||
998 | info["name"] += "/" | ||
999 | |||
1000 | return info | ||
1001 | |||
1002 | def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): | ||
1003 | """Return a tar header as a string of 512 byte blocks. | ||
1004 | """ | ||
1005 | info = self.get_info() | ||
1006 | |||
1007 | if format == USTAR_FORMAT: | ||
1008 | return self.create_ustar_header(info, encoding, errors) | ||
1009 | elif format == GNU_FORMAT: | ||
1010 | return self.create_gnu_header(info, encoding, errors) | ||
1011 | elif format == PAX_FORMAT: | ||
1012 | return self.create_pax_header(info, encoding) | ||
1013 | else: | ||
1014 | raise ValueError("invalid format") | ||
1015 | |||
1016 | def create_ustar_header(self, info, encoding, errors): | ||
1017 | """Return the object as a ustar header block. | ||
1018 | """ | ||
1019 | info["magic"] = POSIX_MAGIC | ||
1020 | |||
1021 | if len(info["linkname"]) > LENGTH_LINK: | ||
1022 | raise ValueError("linkname is too long") | ||
1023 | |||
1024 | if len(info["name"]) > LENGTH_NAME: | ||
1025 | info["prefix"], info["name"] = self._posix_split_name(info["name"]) | ||
1026 | |||
1027 | return self._create_header(info, USTAR_FORMAT, encoding, errors) | ||
1028 | |||
1029 | def create_gnu_header(self, info, encoding, errors): | ||
1030 | """Return the object as a GNU header block sequence. | ||
1031 | """ | ||
1032 | info["magic"] = GNU_MAGIC | ||
1033 | |||
1034 | buf = b"" | ||
1035 | if len(info["linkname"]) > LENGTH_LINK: | ||
1036 | buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) | ||
1037 | |||
1038 | if len(info["name"]) > LENGTH_NAME: | ||
1039 | buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) | ||
1040 | |||
1041 | return buf + self._create_header(info, GNU_FORMAT, encoding, errors) | ||
1042 | |||
1043 | def create_pax_header(self, info, encoding): | ||
1044 | """Return the object as a ustar header block. If it cannot be | ||
1045 | represented this way, prepend a pax extended header sequence | ||
1046 | with supplement information. | ||
1047 | """ | ||
1048 | info["magic"] = POSIX_MAGIC | ||
1049 | pax_headers = self.pax_headers.copy() | ||
1050 | |||
1051 | # Test string fields for values that exceed the field length or cannot | ||
1052 | # be represented in ASCII encoding. | ||
1053 | for name, hname, length in ( | ||
1054 | ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), | ||
1055 | ("uname", "uname", 32), ("gname", "gname", 32)): | ||
1056 | |||
1057 | if hname in pax_headers: | ||
1058 | # The pax header has priority. | ||
1059 | continue | ||
1060 | |||
1061 | # Try to encode the string as ASCII. | ||
1062 | try: | ||
1063 | info[name].encode("ascii", "strict") | ||
1064 | except UnicodeEncodeError: | ||
1065 | pax_headers[hname] = info[name] | ||
1066 | continue | ||
1067 | |||
1068 | if len(info[name]) > length: | ||
1069 | pax_headers[hname] = info[name] | ||
1070 | |||
1071 | # Test number fields for values that exceed the field limit or values | ||
1072 | # that like to be stored as float. | ||
1073 | for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): | ||
1074 | if name in pax_headers: | ||
1075 | # The pax header has priority. Avoid overflow. | ||
1076 | info[name] = 0 | ||
1077 | continue | ||
1078 | |||
1079 | val = info[name] | ||
1080 | if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): | ||
1081 | pax_headers[name] = str(val) | ||
1082 | info[name] = 0 | ||
1083 | |||
1084 | # Create a pax extended header if necessary. | ||
1085 | if pax_headers: | ||
1086 | buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) | ||
1087 | else: | ||
1088 | buf = b"" | ||
1089 | |||
1090 | return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") | ||
1091 | |||
1092 | @classmethod | ||
1093 | def create_pax_global_header(cls, pax_headers): | ||
1094 | """Return the object as a pax global header block sequence. | ||
1095 | """ | ||
1096 | return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") | ||
1097 | |||
1098 | def _posix_split_name(self, name): | ||
1099 | """Split a name longer than 100 chars into a prefix | ||
1100 | and a name part. | ||
1101 | """ | ||
1102 | prefix = name[:LENGTH_PREFIX + 1] | ||
1103 | while prefix and prefix[-1] != "/": | ||
1104 | prefix = prefix[:-1] | ||
1105 | |||
1106 | name = name[len(prefix):] | ||
1107 | prefix = prefix[:-1] | ||
1108 | |||
1109 | if not prefix or len(name) > LENGTH_NAME: | ||
1110 | raise ValueError("name is too long") | ||
1111 | return prefix, name | ||
1112 | |||
1113 | @staticmethod | ||
1114 | def _create_header(info, format, encoding, errors): | ||
1115 | """Return a header block. info is a dictionary with file | ||
1116 | information, format must be one of the *_FORMAT constants. | ||
1117 | """ | ||
1118 | parts = [ | ||
1119 | stn(info.get("name", ""), 100, encoding, errors), | ||
1120 | itn(info.get("mode", 0) & 0o7777, 8, format), | ||
1121 | itn(info.get("uid", 0), 8, format), | ||
1122 | itn(info.get("gid", 0), 8, format), | ||
1123 | itn(info.get("size", 0), 12, format), | ||
1124 | itn(info.get("mtime", 0), 12, format), | ||
1125 | b" ", # checksum field | ||
1126 | info.get("type", REGTYPE), | ||
1127 | stn(info.get("linkname", ""), 100, encoding, errors), | ||
1128 | info.get("magic", POSIX_MAGIC), | ||
1129 | stn(info.get("uname", ""), 32, encoding, errors), | ||
1130 | stn(info.get("gname", ""), 32, encoding, errors), | ||
1131 | itn(info.get("devmajor", 0), 8, format), | ||
1132 | itn(info.get("devminor", 0), 8, format), | ||
1133 | stn(info.get("prefix", ""), 155, encoding, errors) | ||
1134 | ] | ||
1135 | |||
1136 | buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) | ||
1137 | chksum = calc_chksums(buf[-BLOCKSIZE:])[0] | ||
1138 | buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] | ||
1139 | return buf | ||
1140 | |||
1141 | @staticmethod | ||
1142 | def _create_payload(payload): | ||
1143 | """Return the string payload filled with zero bytes | ||
1144 | up to the next 512 byte border. | ||
1145 | """ | ||
1146 | blocks, remainder = divmod(len(payload), BLOCKSIZE) | ||
1147 | if remainder > 0: | ||
1148 | payload += (BLOCKSIZE - remainder) * NUL | ||
1149 | return payload | ||
1150 | |||
1151 | @classmethod | ||
1152 | def _create_gnu_long_header(cls, name, type, encoding, errors): | ||
1153 | """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence | ||
1154 | for name. | ||
1155 | """ | ||
1156 | name = name.encode(encoding, errors) + NUL | ||
1157 | |||
1158 | info = {} | ||
1159 | info["name"] = "././@LongLink" | ||
1160 | info["type"] = type | ||
1161 | info["size"] = len(name) | ||
1162 | info["magic"] = GNU_MAGIC | ||
1163 | |||
1164 | # create extended header + name blocks. | ||
1165 | return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ | ||
1166 | cls._create_payload(name) | ||
1167 | |||
1168 | @classmethod | ||
1169 | def _create_pax_generic_header(cls, pax_headers, type, encoding): | ||
1170 | """Return a POSIX.1-2008 extended or global header sequence | ||
1171 | that contains a list of keyword, value pairs. The values | ||
1172 | must be strings. | ||
1173 | """ | ||
1174 | # Check if one of the fields contains surrogate characters and thereby | ||
1175 | # forces hdrcharset=BINARY, see _proc_pax() for more information. | ||
1176 | binary = False | ||
1177 | for keyword, value in pax_headers.items(): | ||
1178 | try: | ||
1179 | value.encode("utf8", "strict") | ||
1180 | except UnicodeEncodeError: | ||
1181 | binary = True | ||
1182 | break | ||
1183 | |||
1184 | records = b"" | ||
1185 | if binary: | ||
1186 | # Put the hdrcharset field at the beginning of the header. | ||
1187 | records += b"21 hdrcharset=BINARY\n" | ||
1188 | |||
1189 | for keyword, value in pax_headers.items(): | ||
1190 | keyword = keyword.encode("utf8") | ||
1191 | if binary: | ||
1192 | # Try to restore the original byte representation of `value'. | ||
1193 | # Needless to say, that the encoding must match the string. | ||
1194 | value = value.encode(encoding, "surrogateescape") | ||
1195 | else: | ||
1196 | value = value.encode("utf8") | ||
1197 | |||
1198 | l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' | ||
1199 | n = p = 0 | ||
1200 | while True: | ||
1201 | n = l + len(str(p)) | ||
1202 | if n == p: | ||
1203 | break | ||
1204 | p = n | ||
1205 | records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" | ||
1206 | |||
1207 | # We use a hardcoded "././@PaxHeader" name like star does | ||
1208 | # instead of the one that POSIX recommends. | ||
1209 | info = {} | ||
1210 | info["name"] = "././@PaxHeader" | ||
1211 | info["type"] = type | ||
1212 | info["size"] = len(records) | ||
1213 | info["magic"] = POSIX_MAGIC | ||
1214 | |||
1215 | # Create pax header + record blocks. | ||
1216 | return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ | ||
1217 | cls._create_payload(records) | ||
1218 | |||
1219 | @classmethod | ||
1220 | def frombuf(cls, buf, encoding, errors): | ||
1221 | """Construct a TarInfo object from a 512 byte bytes object. | ||
1222 | """ | ||
1223 | if len(buf) == 0: | ||
1224 | raise EmptyHeaderError("empty header") | ||
1225 | if len(buf) != BLOCKSIZE: | ||
1226 | raise TruncatedHeaderError("truncated header") | ||
1227 | if buf.count(NUL) == BLOCKSIZE: | ||
1228 | raise EOFHeaderError("end of file header") | ||
1229 | |||
1230 | chksum = nti(buf[148:156]) | ||
1231 | if chksum not in calc_chksums(buf): | ||
1232 | raise InvalidHeaderError("bad checksum") | ||
1233 | |||
1234 | obj = cls() | ||
1235 | obj.name = nts(buf[0:100], encoding, errors) | ||
1236 | obj.mode = nti(buf[100:108]) | ||
1237 | obj.uid = nti(buf[108:116]) | ||
1238 | obj.gid = nti(buf[116:124]) | ||
1239 | obj.size = nti(buf[124:136]) | ||
1240 | obj.mtime = nti(buf[136:148]) | ||
1241 | obj.chksum = chksum | ||
1242 | obj.type = buf[156:157] | ||
1243 | obj.linkname = nts(buf[157:257], encoding, errors) | ||
1244 | obj.uname = nts(buf[265:297], encoding, errors) | ||
1245 | obj.gname = nts(buf[297:329], encoding, errors) | ||
1246 | obj.devmajor = nti(buf[329:337]) | ||
1247 | obj.devminor = nti(buf[337:345]) | ||
1248 | prefix = nts(buf[345:500], encoding, errors) | ||
1249 | |||
1250 | # Old V7 tar format represents a directory as a regular | ||
1251 | # file with a trailing slash. | ||
1252 | if obj.type == AREGTYPE and obj.name.endswith("/"): | ||
1253 | obj.type = DIRTYPE | ||
1254 | |||
1255 | # The old GNU sparse format occupies some of the unused | ||
1256 | # space in the buffer for up to 4 sparse structures. | ||
1257 | # Save the them for later processing in _proc_sparse(). | ||
1258 | if obj.type == GNUTYPE_SPARSE: | ||
1259 | pos = 386 | ||
1260 | structs = [] | ||
1261 | for i in range(4): | ||
1262 | try: | ||
1263 | offset = nti(buf[pos:pos + 12]) | ||
1264 | numbytes = nti(buf[pos + 12:pos + 24]) | ||
1265 | except ValueError: | ||
1266 | break | ||
1267 | structs.append((offset, numbytes)) | ||
1268 | pos += 24 | ||
1269 | isextended = bool(buf[482]) | ||
1270 | origsize = nti(buf[483:495]) | ||
1271 | obj._sparse_structs = (structs, isextended, origsize) | ||
1272 | |||
1273 | # Remove redundant slashes from directories. | ||
1274 | if obj.isdir(): | ||
1275 | obj.name = obj.name.rstrip("/") | ||
1276 | |||
1277 | # Reconstruct a ustar longname. | ||
1278 | if prefix and obj.type not in GNU_TYPES: | ||
1279 | obj.name = prefix + "/" + obj.name | ||
1280 | return obj | ||
1281 | |||
1282 | @classmethod | ||
1283 | def fromtarfile(cls, tarfile): | ||
1284 | """Return the next TarInfo object from TarFile object | ||
1285 | tarfile. | ||
1286 | """ | ||
1287 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
1288 | obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) | ||
1289 | obj.offset = tarfile.fileobj.tell() - BLOCKSIZE | ||
1290 | return obj._proc_member(tarfile) | ||
1291 | |||
1292 | #-------------------------------------------------------------------------- | ||
1293 | # The following are methods that are called depending on the type of a | ||
1294 | # member. The entry point is _proc_member() which can be overridden in a | ||
1295 | # subclass to add custom _proc_*() methods. A _proc_*() method MUST | ||
1296 | # implement the following | ||
1297 | # operations: | ||
1298 | # 1. Set self.offset_data to the position where the data blocks begin, | ||
1299 | # if there is data that follows. | ||
1300 | # 2. Set tarfile.offset to the position where the next member's header will | ||
1301 | # begin. | ||
1302 | # 3. Return self or another valid TarInfo object. | ||
1303 | def _proc_member(self, tarfile): | ||
1304 | """Choose the right processing method depending on | ||
1305 | the type and call it. | ||
1306 | """ | ||
1307 | if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): | ||
1308 | return self._proc_gnulong(tarfile) | ||
1309 | elif self.type == GNUTYPE_SPARSE: | ||
1310 | return self._proc_sparse(tarfile) | ||
1311 | elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): | ||
1312 | return self._proc_pax(tarfile) | ||
1313 | else: | ||
1314 | return self._proc_builtin(tarfile) | ||
1315 | |||
1316 | def _proc_builtin(self, tarfile): | ||
1317 | """Process a builtin type or an unknown type which | ||
1318 | will be treated as a regular file. | ||
1319 | """ | ||
1320 | self.offset_data = tarfile.fileobj.tell() | ||
1321 | offset = self.offset_data | ||
1322 | if self.isreg() or self.type not in SUPPORTED_TYPES: | ||
1323 | # Skip the following data blocks. | ||
1324 | offset += self._block(self.size) | ||
1325 | tarfile.offset = offset | ||
1326 | |||
1327 | # Patch the TarInfo object with saved global | ||
1328 | # header information. | ||
1329 | self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) | ||
1330 | |||
1331 | return self | ||
1332 | |||
1333 | def _proc_gnulong(self, tarfile): | ||
1334 | """Process the blocks that hold a GNU longname | ||
1335 | or longlink member. | ||
1336 | """ | ||
1337 | buf = tarfile.fileobj.read(self._block(self.size)) | ||
1338 | |||
1339 | # Fetch the next header and process it. | ||
1340 | try: | ||
1341 | next = self.fromtarfile(tarfile) | ||
1342 | except HeaderError: | ||
1343 | raise SubsequentHeaderError("missing or bad subsequent header") | ||
1344 | |||
1345 | # Patch the TarInfo object from the next header with | ||
1346 | # the longname information. | ||
1347 | next.offset = self.offset | ||
1348 | if self.type == GNUTYPE_LONGNAME: | ||
1349 | next.name = nts(buf, tarfile.encoding, tarfile.errors) | ||
1350 | elif self.type == GNUTYPE_LONGLINK: | ||
1351 | next.linkname = nts(buf, tarfile.encoding, tarfile.errors) | ||
1352 | |||
1353 | return next | ||
1354 | |||
1355 | def _proc_sparse(self, tarfile): | ||
1356 | """Process a GNU sparse header plus extra headers. | ||
1357 | """ | ||
1358 | # We already collected some sparse structures in frombuf(). | ||
1359 | structs, isextended, origsize = self._sparse_structs | ||
1360 | del self._sparse_structs | ||
1361 | |||
1362 | # Collect sparse structures from extended header blocks. | ||
1363 | while isextended: | ||
1364 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
1365 | pos = 0 | ||
1366 | for i in range(21): | ||
1367 | try: | ||
1368 | offset = nti(buf[pos:pos + 12]) | ||
1369 | numbytes = nti(buf[pos + 12:pos + 24]) | ||
1370 | except ValueError: | ||
1371 | break | ||
1372 | if offset and numbytes: | ||
1373 | structs.append((offset, numbytes)) | ||
1374 | pos += 24 | ||
1375 | isextended = bool(buf[504]) | ||
1376 | self.sparse = structs | ||
1377 | |||
1378 | self.offset_data = tarfile.fileobj.tell() | ||
1379 | tarfile.offset = self.offset_data + self._block(self.size) | ||
1380 | self.size = origsize | ||
1381 | return self | ||
1382 | |||
1383 | def _proc_pax(self, tarfile): | ||
1384 | """Process an extended or global header as described in | ||
1385 | POSIX.1-2008. | ||
1386 | """ | ||
1387 | # Read the header information. | ||
1388 | buf = tarfile.fileobj.read(self._block(self.size)) | ||
1389 | |||
1390 | # A pax header stores supplemental information for either | ||
1391 | # the following file (extended) or all following files | ||
1392 | # (global). | ||
1393 | if self.type == XGLTYPE: | ||
1394 | pax_headers = tarfile.pax_headers | ||
1395 | else: | ||
1396 | pax_headers = tarfile.pax_headers.copy() | ||
1397 | |||
1398 | # Check if the pax header contains a hdrcharset field. This tells us | ||
1399 | # the encoding of the path, linkpath, uname and gname fields. Normally, | ||
1400 | # these fields are UTF-8 encoded but since POSIX.1-2008 tar | ||
1401 | # implementations are allowed to store them as raw binary strings if | ||
1402 | # the translation to UTF-8 fails. | ||
1403 | match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) | ||
1404 | if match is not None: | ||
1405 | pax_headers["hdrcharset"] = match.group(1).decode("utf8") | ||
1406 | |||
1407 | # For the time being, we don't care about anything other than "BINARY". | ||
1408 | # The only other value that is currently allowed by the standard is | ||
1409 | # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. | ||
1410 | hdrcharset = pax_headers.get("hdrcharset") | ||
1411 | if hdrcharset == "BINARY": | ||
1412 | encoding = tarfile.encoding | ||
1413 | else: | ||
1414 | encoding = "utf8" | ||
1415 | |||
1416 | # Parse pax header information. A record looks like that: | ||
1417 | # "%d %s=%s\n" % (length, keyword, value). length is the size | ||
1418 | # of the complete record including the length field itself and | ||
1419 | # the newline. keyword and value are both UTF-8 encoded strings. | ||
1420 | regex = re.compile(br"(\d+) ([^=]+)=") | ||
1421 | pos = 0 | ||
1422 | while True: | ||
1423 | match = regex.match(buf, pos) | ||
1424 | if not match: | ||
1425 | break | ||
1426 | |||
1427 | length, keyword = match.groups() | ||
1428 | length = int(length) | ||
1429 | value = buf[match.end(2) + 1:match.start(1) + length - 1] | ||
1430 | |||
1431 | # Normally, we could just use "utf8" as the encoding and "strict" | ||
1432 | # as the error handler, but we better not take the risk. For | ||
1433 | # example, GNU tar <= 1.23 is known to store filenames it cannot | ||
1434 | # translate to UTF-8 as raw strings (unfortunately without a | ||
1435 | # hdrcharset=BINARY header). | ||
1436 | # We first try the strict standard encoding, and if that fails we | ||
1437 | # fall back on the user's encoding and error handler. | ||
1438 | keyword = self._decode_pax_field(keyword, "utf8", "utf8", | ||
1439 | tarfile.errors) | ||
1440 | if keyword in PAX_NAME_FIELDS: | ||
1441 | value = self._decode_pax_field(value, encoding, tarfile.encoding, | ||
1442 | tarfile.errors) | ||
1443 | else: | ||
1444 | value = self._decode_pax_field(value, "utf8", "utf8", | ||
1445 | tarfile.errors) | ||
1446 | |||
1447 | pax_headers[keyword] = value | ||
1448 | pos += length | ||
1449 | |||
1450 | # Fetch the next header. | ||
1451 | try: | ||
1452 | next = self.fromtarfile(tarfile) | ||
1453 | except HeaderError: | ||
1454 | raise SubsequentHeaderError("missing or bad subsequent header") | ||
1455 | |||
1456 | # Process GNU sparse information. | ||
1457 | if "GNU.sparse.map" in pax_headers: | ||
1458 | # GNU extended sparse format version 0.1. | ||
1459 | self._proc_gnusparse_01(next, pax_headers) | ||
1460 | |||
1461 | elif "GNU.sparse.size" in pax_headers: | ||
1462 | # GNU extended sparse format version 0.0. | ||
1463 | self._proc_gnusparse_00(next, pax_headers, buf) | ||
1464 | |||
1465 | elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": | ||
1466 | # GNU extended sparse format version 1.0. | ||
1467 | self._proc_gnusparse_10(next, pax_headers, tarfile) | ||
1468 | |||
1469 | if self.type in (XHDTYPE, SOLARIS_XHDTYPE): | ||
1470 | # Patch the TarInfo object with the extended header info. | ||
1471 | next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) | ||
1472 | next.offset = self.offset | ||
1473 | |||
1474 | if "size" in pax_headers: | ||
1475 | # If the extended header replaces the size field, | ||
1476 | # we need to recalculate the offset where the next | ||
1477 | # header starts. | ||
1478 | offset = next.offset_data | ||
1479 | if next.isreg() or next.type not in SUPPORTED_TYPES: | ||
1480 | offset += next._block(next.size) | ||
1481 | tarfile.offset = offset | ||
1482 | |||
1483 | return next | ||
1484 | |||
1485 | def _proc_gnusparse_00(self, next, pax_headers, buf): | ||
1486 | """Process a GNU tar extended sparse header, version 0.0. | ||
1487 | """ | ||
1488 | offsets = [] | ||
1489 | for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): | ||
1490 | offsets.append(int(match.group(1))) | ||
1491 | numbytes = [] | ||
1492 | for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): | ||
1493 | numbytes.append(int(match.group(1))) | ||
1494 | next.sparse = list(zip(offsets, numbytes)) | ||
1495 | |||
1496 | def _proc_gnusparse_01(self, next, pax_headers): | ||
1497 | """Process a GNU tar extended sparse header, version 0.1. | ||
1498 | """ | ||
1499 | sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] | ||
1500 | next.sparse = list(zip(sparse[::2], sparse[1::2])) | ||
1501 | |||
1502 | def _proc_gnusparse_10(self, next, pax_headers, tarfile): | ||
1503 | """Process a GNU tar extended sparse header, version 1.0. | ||
1504 | """ | ||
1505 | fields = None | ||
1506 | sparse = [] | ||
1507 | buf = tarfile.fileobj.read(BLOCKSIZE) | ||
1508 | fields, buf = buf.split(b"\n", 1) | ||
1509 | fields = int(fields) | ||
1510 | while len(sparse) < fields * 2: | ||
1511 | if b"\n" not in buf: | ||
1512 | buf += tarfile.fileobj.read(BLOCKSIZE) | ||
1513 | number, buf = buf.split(b"\n", 1) | ||
1514 | sparse.append(int(number)) | ||
1515 | next.offset_data = tarfile.fileobj.tell() | ||
1516 | next.sparse = list(zip(sparse[::2], sparse[1::2])) | ||
1517 | |||
1518 | def _apply_pax_info(self, pax_headers, encoding, errors): | ||
1519 | """Replace fields with supplemental information from a previous | ||
1520 | pax extended or global header. | ||
1521 | """ | ||
1522 | for keyword, value in pax_headers.items(): | ||
1523 | if keyword == "GNU.sparse.name": | ||
1524 | setattr(self, "path", value) | ||
1525 | elif keyword == "GNU.sparse.size": | ||
1526 | setattr(self, "size", int(value)) | ||
1527 | elif keyword == "GNU.sparse.realsize": | ||
1528 | setattr(self, "size", int(value)) | ||
1529 | elif keyword in PAX_FIELDS: | ||
1530 | if keyword in PAX_NUMBER_FIELDS: | ||
1531 | try: | ||
1532 | value = PAX_NUMBER_FIELDS[keyword](value) | ||
1533 | except ValueError: | ||
1534 | value = 0 | ||
1535 | if keyword == "path": | ||
1536 | value = value.rstrip("/") | ||
1537 | setattr(self, keyword, value) | ||
1538 | |||
1539 | self.pax_headers = pax_headers.copy() | ||
1540 | |||
1541 | def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): | ||
1542 | """Decode a single field from a pax record. | ||
1543 | """ | ||
1544 | try: | ||
1545 | return value.decode(encoding, "strict") | ||
1546 | except UnicodeDecodeError: | ||
1547 | return value.decode(fallback_encoding, fallback_errors) | ||
1548 | |||
1549 | def _block(self, count): | ||
1550 | """Round up a byte count by BLOCKSIZE and return it, | ||
1551 | e.g. _block(834) => 1024. | ||
1552 | """ | ||
1553 | blocks, remainder = divmod(count, BLOCKSIZE) | ||
1554 | if remainder: | ||
1555 | blocks += 1 | ||
1556 | return blocks * BLOCKSIZE | ||
1557 | |||
1558 | def isreg(self): | ||
1559 | return self.type in REGULAR_TYPES | ||
1560 | def isfile(self): | ||
1561 | return self.isreg() | ||
1562 | def isdir(self): | ||
1563 | return self.type == DIRTYPE | ||
1564 | def issym(self): | ||
1565 | return self.type == SYMTYPE | ||
1566 | def islnk(self): | ||
1567 | return self.type == LNKTYPE | ||
1568 | def ischr(self): | ||
1569 | return self.type == CHRTYPE | ||
1570 | def isblk(self): | ||
1571 | return self.type == BLKTYPE | ||
1572 | def isfifo(self): | ||
1573 | return self.type == FIFOTYPE | ||
1574 | def issparse(self): | ||
1575 | return self.sparse is not None | ||
1576 | def isdev(self): | ||
1577 | return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) | ||
1578 | # class TarInfo | ||
1579 | |||
1580 | class TarFile(object): | ||
1581 | """The TarFile Class provides an interface to tar archives. | ||
1582 | """ | ||
1583 | |||
1584 | debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) | ||
1585 | |||
1586 | dereference = False # If true, add content of linked file to the | ||
1587 | # tar file, else the link. | ||
1588 | |||
1589 | ignore_zeros = False # If true, skips empty or invalid blocks and | ||
1590 | # continues processing. | ||
1591 | |||
1592 | errorlevel = 1 # If 0, fatal errors only appear in debug | ||
1593 | # messages (if debug >= 0). If > 0, errors | ||
1594 | # are passed to the caller as exceptions. | ||
1595 | |||
1596 | format = DEFAULT_FORMAT # The format to use when creating an archive. | ||
1597 | |||
1598 | encoding = ENCODING # Encoding for 8-bit character strings. | ||
1599 | |||
1600 | errors = None # Error handler for unicode conversion. | ||
1601 | |||
1602 | tarinfo = TarInfo # The default TarInfo class to use. | ||
1603 | |||
1604 | fileobject = ExFileObject # The default ExFileObject class to use. | ||
1605 | |||
1606 | def __init__(self, name=None, mode="r", fileobj=None, format=None, | ||
1607 | tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, | ||
1608 | errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): | ||
1609 | """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to | ||
1610 | read from an existing archive, 'a' to append data to an existing | ||
1611 | file or 'w' to create a new file overwriting an existing one. `mode' | ||
1612 | defaults to 'r'. | ||
1613 | If `fileobj' is given, it is used for reading or writing data. If it | ||
1614 | can be determined, `mode' is overridden by `fileobj's mode. | ||
1615 | `fileobj' is not closed, when TarFile is closed. | ||
1616 | """ | ||
1617 | if len(mode) > 1 or mode not in "raw": | ||
1618 | raise ValueError("mode must be 'r', 'a' or 'w'") | ||
1619 | self.mode = mode | ||
1620 | self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] | ||
1621 | |||
1622 | if not fileobj: | ||
1623 | if self.mode == "a" and not os.path.exists(name): | ||
1624 | # Create nonexistent files in append mode. | ||
1625 | self.mode = "w" | ||
1626 | self._mode = "wb" | ||
1627 | fileobj = bltn_open(name, self._mode) | ||
1628 | self._extfileobj = False | ||
1629 | else: | ||
1630 | if name is None and hasattr(fileobj, "name"): | ||
1631 | name = fileobj.name | ||
1632 | if hasattr(fileobj, "mode"): | ||
1633 | self._mode = fileobj.mode | ||
1634 | self._extfileobj = True | ||
1635 | self.name = os.path.abspath(name) if name else None | ||
1636 | self.fileobj = fileobj | ||
1637 | |||
1638 | # Init attributes. | ||
1639 | if format is not None: | ||
1640 | self.format = format | ||
1641 | if tarinfo is not None: | ||
1642 | self.tarinfo = tarinfo | ||
1643 | if dereference is not None: | ||
1644 | self.dereference = dereference | ||
1645 | if ignore_zeros is not None: | ||
1646 | self.ignore_zeros = ignore_zeros | ||
1647 | if encoding is not None: | ||
1648 | self.encoding = encoding | ||
1649 | self.errors = errors | ||
1650 | |||
1651 | if pax_headers is not None and self.format == PAX_FORMAT: | ||
1652 | self.pax_headers = pax_headers | ||
1653 | else: | ||
1654 | self.pax_headers = {} | ||
1655 | |||
1656 | if debug is not None: | ||
1657 | self.debug = debug | ||
1658 | if errorlevel is not None: | ||
1659 | self.errorlevel = errorlevel | ||
1660 | |||
1661 | # Init datastructures. | ||
1662 | self.closed = False | ||
1663 | self.members = [] # list of members as TarInfo objects | ||
1664 | self._loaded = False # flag if all members have been read | ||
1665 | self.offset = self.fileobj.tell() | ||
1666 | # current position in the archive file | ||
1667 | self.inodes = {} # dictionary caching the inodes of | ||
1668 | # archive members already added | ||
1669 | |||
1670 | try: | ||
1671 | if self.mode == "r": | ||
1672 | self.firstmember = None | ||
1673 | self.firstmember = self.next() | ||
1674 | |||
1675 | if self.mode == "a": | ||
1676 | # Move to the end of the archive, | ||
1677 | # before the first empty block. | ||
1678 | while True: | ||
1679 | self.fileobj.seek(self.offset) | ||
1680 | try: | ||
1681 | tarinfo = self.tarinfo.fromtarfile(self) | ||
1682 | self.members.append(tarinfo) | ||
1683 | except EOFHeaderError: | ||
1684 | self.fileobj.seek(self.offset) | ||
1685 | break | ||
1686 | except HeaderError as e: | ||
1687 | raise ReadError(str(e)) | ||
1688 | |||
1689 | if self.mode in "aw": | ||
1690 | self._loaded = True | ||
1691 | |||
1692 | if self.pax_headers: | ||
1693 | buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) | ||
1694 | self.fileobj.write(buf) | ||
1695 | self.offset += len(buf) | ||
1696 | except: | ||
1697 | if not self._extfileobj: | ||
1698 | self.fileobj.close() | ||
1699 | self.closed = True | ||
1700 | raise | ||
1701 | |||
1702 | #-------------------------------------------------------------------------- | ||
1703 | # Below are the classmethods which act as alternate constructors to the | ||
1704 | # TarFile class. The open() method is the only one that is needed for | ||
1705 | # public use; it is the "super"-constructor and is able to select an | ||
1706 | # adequate "sub"-constructor for a particular compression using the mapping | ||
1707 | # from OPEN_METH. | ||
1708 | # | ||
1709 | # This concept allows one to subclass TarFile without losing the comfort of | ||
1710 | # the super-constructor. A sub-constructor is registered and made available | ||
1711 | # by adding it to the mapping in OPEN_METH. | ||
1712 | |||
1713 | @classmethod | ||
1714 | def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): | ||
1715 | """Open a tar archive for reading, writing or appending. Return | ||
1716 | an appropriate TarFile class. | ||
1717 | |||
1718 | mode: | ||
1719 | 'r' or 'r:*' open for reading with transparent compression | ||
1720 | 'r:' open for reading exclusively uncompressed | ||
1721 | 'r:gz' open for reading with gzip compression | ||
1722 | 'r:bz2' open for reading with bzip2 compression | ||
1723 | 'a' or 'a:' open for appending, creating the file if necessary | ||
1724 | 'w' or 'w:' open for writing without compression | ||
1725 | 'w:gz' open for writing with gzip compression | ||
1726 | 'w:bz2' open for writing with bzip2 compression | ||
1727 | |||
1728 | 'r|*' open a stream of tar blocks with transparent compression | ||
1729 | 'r|' open an uncompressed stream of tar blocks for reading | ||
1730 | 'r|gz' open a gzip compressed stream of tar blocks | ||
1731 | 'r|bz2' open a bzip2 compressed stream of tar blocks | ||
1732 | 'w|' open an uncompressed stream for writing | ||
1733 | 'w|gz' open a gzip compressed stream for writing | ||
1734 | 'w|bz2' open a bzip2 compressed stream for writing | ||
1735 | """ | ||
1736 | |||
1737 | if not name and not fileobj: | ||
1738 | raise ValueError("nothing to open") | ||
1739 | |||
1740 | if mode in ("r", "r:*"): | ||
1741 | # Find out which *open() is appropriate for opening the file. | ||
1742 | for comptype in cls.OPEN_METH: | ||
1743 | func = getattr(cls, cls.OPEN_METH[comptype]) | ||
1744 | if fileobj is not None: | ||
1745 | saved_pos = fileobj.tell() | ||
1746 | try: | ||
1747 | return func(name, "r", fileobj, **kwargs) | ||
1748 | except (ReadError, CompressionError) as e: | ||
1749 | if fileobj is not None: | ||
1750 | fileobj.seek(saved_pos) | ||
1751 | continue | ||
1752 | raise ReadError("file could not be opened successfully") | ||
1753 | |||
1754 | elif ":" in mode: | ||
1755 | filemode, comptype = mode.split(":", 1) | ||
1756 | filemode = filemode or "r" | ||
1757 | comptype = comptype or "tar" | ||
1758 | |||
1759 | # Select the *open() function according to | ||
1760 | # given compression. | ||
1761 | if comptype in cls.OPEN_METH: | ||
1762 | func = getattr(cls, cls.OPEN_METH[comptype]) | ||
1763 | else: | ||
1764 | raise CompressionError("unknown compression type %r" % comptype) | ||
1765 | return func(name, filemode, fileobj, **kwargs) | ||
1766 | |||
1767 | elif "|" in mode: | ||
1768 | filemode, comptype = mode.split("|", 1) | ||
1769 | filemode = filemode or "r" | ||
1770 | comptype = comptype or "tar" | ||
1771 | |||
1772 | if filemode not in "rw": | ||
1773 | raise ValueError("mode must be 'r' or 'w'") | ||
1774 | |||
1775 | stream = _Stream(name, filemode, comptype, fileobj, bufsize) | ||
1776 | try: | ||
1777 | t = cls(name, filemode, stream, **kwargs) | ||
1778 | except: | ||
1779 | stream.close() | ||
1780 | raise | ||
1781 | t._extfileobj = False | ||
1782 | return t | ||
1783 | |||
1784 | elif mode in "aw": | ||
1785 | return cls.taropen(name, mode, fileobj, **kwargs) | ||
1786 | |||
1787 | raise ValueError("undiscernible mode") | ||
1788 | |||
1789 | @classmethod | ||
1790 | def taropen(cls, name, mode="r", fileobj=None, **kwargs): | ||
1791 | """Open uncompressed tar archive name for reading or writing. | ||
1792 | """ | ||
1793 | if len(mode) > 1 or mode not in "raw": | ||
1794 | raise ValueError("mode must be 'r', 'a' or 'w'") | ||
1795 | return cls(name, mode, fileobj, **kwargs) | ||
1796 | |||
1797 | @classmethod | ||
1798 | def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): | ||
1799 | """Open gzip compressed tar archive name for reading or writing. | ||
1800 | Appending is not allowed. | ||
1801 | """ | ||
1802 | if len(mode) > 1 or mode not in "rw": | ||
1803 | raise ValueError("mode must be 'r' or 'w'") | ||
1804 | |||
1805 | try: | ||
1806 | import gzip | ||
1807 | gzip.GzipFile | ||
1808 | except (ImportError, AttributeError): | ||
1809 | raise CompressionError("gzip module is not available") | ||
1810 | |||
1811 | extfileobj = fileobj is not None | ||
1812 | try: | ||
1813 | fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) | ||
1814 | t = cls.taropen(name, mode, fileobj, **kwargs) | ||
1815 | except IOError: | ||
1816 | if not extfileobj and fileobj is not None: | ||
1817 | fileobj.close() | ||
1818 | if fileobj is None: | ||
1819 | raise | ||
1820 | raise ReadError("not a gzip file") | ||
1821 | except: | ||
1822 | if not extfileobj and fileobj is not None: | ||
1823 | fileobj.close() | ||
1824 | raise | ||
1825 | t._extfileobj = extfileobj | ||
1826 | return t | ||
1827 | |||
1828 | @classmethod | ||
1829 | def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): | ||
1830 | """Open bzip2 compressed tar archive name for reading or writing. | ||
1831 | Appending is not allowed. | ||
1832 | """ | ||
1833 | if len(mode) > 1 or mode not in "rw": | ||
1834 | raise ValueError("mode must be 'r' or 'w'.") | ||
1835 | |||
1836 | try: | ||
1837 | import bz2 | ||
1838 | except ImportError: | ||
1839 | raise CompressionError("bz2 module is not available") | ||
1840 | |||
1841 | if fileobj is not None: | ||
1842 | fileobj = _BZ2Proxy(fileobj, mode) | ||
1843 | else: | ||
1844 | fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) | ||
1845 | |||
1846 | try: | ||
1847 | t = cls.taropen(name, mode, fileobj, **kwargs) | ||
1848 | except (IOError, EOFError): | ||
1849 | fileobj.close() | ||
1850 | raise ReadError("not a bzip2 file") | ||
1851 | t._extfileobj = False | ||
1852 | return t | ||
1853 | |||
1854 | # All *open() methods are registered here. | ||
1855 | OPEN_METH = { | ||
1856 | "tar": "taropen", # uncompressed tar | ||
1857 | "gz": "gzopen", # gzip compressed tar | ||
1858 | "bz2": "bz2open" # bzip2 compressed tar | ||
1859 | } | ||
1860 | |||
1861 | #-------------------------------------------------------------------------- | ||
1862 | # The public methods which TarFile provides: | ||
1863 | |||
1864 | def close(self): | ||
1865 | """Close the TarFile. In write-mode, two finishing zero blocks are | ||
1866 | appended to the archive. | ||
1867 | """ | ||
1868 | if self.closed: | ||
1869 | return | ||
1870 | |||
1871 | if self.mode in "aw": | ||
1872 | self.fileobj.write(NUL * (BLOCKSIZE * 2)) | ||
1873 | self.offset += (BLOCKSIZE * 2) | ||
1874 | # fill up the end with zero-blocks | ||
1875 | # (like option -b20 for tar does) | ||
1876 | blocks, remainder = divmod(self.offset, RECORDSIZE) | ||
1877 | if remainder > 0: | ||
1878 | self.fileobj.write(NUL * (RECORDSIZE - remainder)) | ||
1879 | |||
1880 | if not self._extfileobj: | ||
1881 | self.fileobj.close() | ||
1882 | self.closed = True | ||
1883 | |||
1884 | def getmember(self, name): | ||
1885 | """Return a TarInfo object for member `name'. If `name' can not be | ||
1886 | found in the archive, KeyError is raised. If a member occurs more | ||
1887 | than once in the archive, its last occurrence is assumed to be the | ||
1888 | most up-to-date version. | ||
1889 | """ | ||
1890 | tarinfo = self._getmember(name) | ||
1891 | if tarinfo is None: | ||
1892 | raise KeyError("filename %r not found" % name) | ||
1893 | return tarinfo | ||
1894 | |||
1895 | def getmembers(self): | ||
1896 | """Return the members of the archive as a list of TarInfo objects. The | ||
1897 | list has the same order as the members in the archive. | ||
1898 | """ | ||
1899 | self._check() | ||
1900 | if not self._loaded: # if we want to obtain a list of | ||
1901 | self._load() # all members, we first have to | ||
1902 | # scan the whole archive. | ||
1903 | return self.members | ||
1904 | |||
1905 | def getnames(self): | ||
1906 | """Return the members of the archive as a list of their names. It has | ||
1907 | the same order as the list returned by getmembers(). | ||
1908 | """ | ||
1909 | return [tarinfo.name for tarinfo in self.getmembers()] | ||
1910 | |||
1911 | def gettarinfo(self, name=None, arcname=None, fileobj=None): | ||
1912 | """Create a TarInfo object for either the file `name' or the file | ||
1913 | object `fileobj' (using os.fstat on its file descriptor). You can | ||
1914 | modify some of the TarInfo's attributes before you add it using | ||
1915 | addfile(). If given, `arcname' specifies an alternative name for the | ||
1916 | file in the archive. | ||
1917 | """ | ||
1918 | self._check("aw") | ||
1919 | |||
1920 | # When fileobj is given, replace name by | ||
1921 | # fileobj's real name. | ||
1922 | if fileobj is not None: | ||
1923 | name = fileobj.name | ||
1924 | |||
1925 | # Building the name of the member in the archive. | ||
1926 | # Backward slashes are converted to forward slashes, | ||
1927 | # Absolute paths are turned to relative paths. | ||
1928 | if arcname is None: | ||
1929 | arcname = name | ||
1930 | drv, arcname = os.path.splitdrive(arcname) | ||
1931 | arcname = arcname.replace(os.sep, "/") | ||
1932 | arcname = arcname.lstrip("/") | ||
1933 | |||
1934 | # Now, fill the TarInfo object with | ||
1935 | # information specific for the file. | ||
1936 | tarinfo = self.tarinfo() | ||
1937 | tarinfo.tarfile = self | ||
1938 | |||
1939 | # Use os.stat or os.lstat, depending on platform | ||
1940 | # and if symlinks shall be resolved. | ||
1941 | if fileobj is None: | ||
1942 | if hasattr(os, "lstat") and not self.dereference: | ||
1943 | statres = os.lstat(name) | ||
1944 | else: | ||
1945 | statres = os.stat(name) | ||
1946 | else: | ||
1947 | statres = os.fstat(fileobj.fileno()) | ||
1948 | linkname = "" | ||
1949 | |||
1950 | stmd = statres.st_mode | ||
1951 | if stat.S_ISREG(stmd): | ||
1952 | inode = (statres.st_ino, statres.st_dev) | ||
1953 | if not self.dereference and statres.st_nlink > 1 and \ | ||
1954 | inode in self.inodes and arcname != self.inodes[inode]: | ||
1955 | # Is it a hardlink to an already | ||
1956 | # archived file? | ||
1957 | type = LNKTYPE | ||
1958 | linkname = self.inodes[inode] | ||
1959 | else: | ||
1960 | # The inode is added only if its valid. | ||
1961 | # For win32 it is always 0. | ||
1962 | type = REGTYPE | ||
1963 | if inode[0]: | ||
1964 | self.inodes[inode] = arcname | ||
1965 | elif stat.S_ISDIR(stmd): | ||
1966 | type = DIRTYPE | ||
1967 | elif stat.S_ISFIFO(stmd): | ||
1968 | type = FIFOTYPE | ||
1969 | elif stat.S_ISLNK(stmd): | ||
1970 | type = SYMTYPE | ||
1971 | linkname = os.readlink(name) | ||
1972 | elif stat.S_ISCHR(stmd): | ||
1973 | type = CHRTYPE | ||
1974 | elif stat.S_ISBLK(stmd): | ||
1975 | type = BLKTYPE | ||
1976 | else: | ||
1977 | return None | ||
1978 | |||
1979 | # Fill the TarInfo object with all | ||
1980 | # information we can get. | ||
1981 | tarinfo.name = arcname | ||
1982 | tarinfo.mode = stmd | ||
1983 | tarinfo.uid = statres.st_uid | ||
1984 | tarinfo.gid = statres.st_gid | ||
1985 | if type == REGTYPE: | ||
1986 | tarinfo.size = statres.st_size | ||
1987 | else: | ||
1988 | tarinfo.size = 0 | ||
1989 | tarinfo.mtime = statres.st_mtime | ||
1990 | tarinfo.type = type | ||
1991 | tarinfo.linkname = linkname | ||
1992 | if pwd: | ||
1993 | try: | ||
1994 | tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] | ||
1995 | except KeyError: | ||
1996 | pass | ||
1997 | if grp: | ||
1998 | try: | ||
1999 | tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] | ||
2000 | except KeyError: | ||
2001 | pass | ||
2002 | |||
2003 | if type in (CHRTYPE, BLKTYPE): | ||
2004 | if hasattr(os, "major") and hasattr(os, "minor"): | ||
2005 | tarinfo.devmajor = os.major(statres.st_rdev) | ||
2006 | tarinfo.devminor = os.minor(statres.st_rdev) | ||
2007 | return tarinfo | ||
2008 | |||
2009 | def list(self, verbose=True): | ||
2010 | """Print a table of contents to sys.stdout. If `verbose' is False, only | ||
2011 | the names of the members are printed. If it is True, an `ls -l'-like | ||
2012 | output is produced. | ||
2013 | """ | ||
2014 | self._check() | ||
2015 | |||
2016 | for tarinfo in self: | ||
2017 | if verbose: | ||
2018 | print(filemode(tarinfo.mode), end=' ') | ||
2019 | print("%s/%s" % (tarinfo.uname or tarinfo.uid, | ||
2020 | tarinfo.gname or tarinfo.gid), end=' ') | ||
2021 | if tarinfo.ischr() or tarinfo.isblk(): | ||
2022 | print("%10s" % ("%d,%d" \ | ||
2023 | % (tarinfo.devmajor, tarinfo.devminor)), end=' ') | ||
2024 | else: | ||
2025 | print("%10d" % tarinfo.size, end=' ') | ||
2026 | print("%d-%02d-%02d %02d:%02d:%02d" \ | ||
2027 | % time.localtime(tarinfo.mtime)[:6], end=' ') | ||
2028 | |||
2029 | print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') | ||
2030 | |||
2031 | if verbose: | ||
2032 | if tarinfo.issym(): | ||
2033 | print("->", tarinfo.linkname, end=' ') | ||
2034 | if tarinfo.islnk(): | ||
2035 | print("link to", tarinfo.linkname, end=' ') | ||
2036 | print() | ||
2037 | |||
2038 | def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): | ||
2039 | """Add the file `name' to the archive. `name' may be any type of file | ||
2040 | (directory, fifo, symbolic link, etc.). If given, `arcname' | ||
2041 | specifies an alternative name for the file in the archive. | ||
2042 | Directories are added recursively by default. This can be avoided by | ||
2043 | setting `recursive' to False. `exclude' is a function that should | ||
2044 | return True for each filename to be excluded. `filter' is a function | ||
2045 | that expects a TarInfo object argument and returns the changed | ||
2046 | TarInfo object, if it returns None the TarInfo object will be | ||
2047 | excluded from the archive. | ||
2048 | """ | ||
2049 | self._check("aw") | ||
2050 | |||
2051 | if arcname is None: | ||
2052 | arcname = name | ||
2053 | |||
2054 | # Exclude pathnames. | ||
2055 | if exclude is not None: | ||
2056 | import warnings | ||
2057 | warnings.warn("use the filter argument instead", | ||
2058 | DeprecationWarning, 2) | ||
2059 | if exclude(name): | ||
2060 | self._dbg(2, "tarfile: Excluded %r" % name) | ||
2061 | return | ||
2062 | |||
2063 | # Skip if somebody tries to archive the archive... | ||
2064 | if self.name is not None and os.path.abspath(name) == self.name: | ||
2065 | self._dbg(2, "tarfile: Skipped %r" % name) | ||
2066 | return | ||
2067 | |||
2068 | self._dbg(1, name) | ||
2069 | |||
2070 | # Create a TarInfo object from the file. | ||
2071 | tarinfo = self.gettarinfo(name, arcname) | ||
2072 | |||
2073 | if tarinfo is None: | ||
2074 | self._dbg(1, "tarfile: Unsupported type %r" % name) | ||
2075 | return | ||
2076 | |||
2077 | # Change or exclude the TarInfo object. | ||
2078 | if filter is not None: | ||
2079 | tarinfo = filter(tarinfo) | ||
2080 | if tarinfo is None: | ||
2081 | self._dbg(2, "tarfile: Excluded %r" % name) | ||
2082 | return | ||
2083 | |||
2084 | # Append the tar header and data to the archive. | ||
2085 | if tarinfo.isreg(): | ||
2086 | f = bltn_open(name, "rb") | ||
2087 | self.addfile(tarinfo, f) | ||
2088 | f.close() | ||
2089 | |||
2090 | elif tarinfo.isdir(): | ||
2091 | self.addfile(tarinfo) | ||
2092 | if recursive: | ||
2093 | for f in os.listdir(name): | ||
2094 | self.add(os.path.join(name, f), os.path.join(arcname, f), | ||
2095 | recursive, exclude, filter=filter) | ||
2096 | |||
2097 | else: | ||
2098 | self.addfile(tarinfo) | ||
2099 | |||
2100 | def addfile(self, tarinfo, fileobj=None): | ||
2101 | """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is | ||
2102 | given, tarinfo.size bytes are read from it and added to the archive. | ||
2103 | You can create TarInfo objects using gettarinfo(). | ||
2104 | On Windows platforms, `fileobj' should always be opened with mode | ||
2105 | 'rb' to avoid irritation about the file size. | ||
2106 | """ | ||
2107 | self._check("aw") | ||
2108 | |||
2109 | tarinfo = copy.copy(tarinfo) | ||
2110 | |||
2111 | buf = tarinfo.tobuf(self.format, self.encoding, self.errors) | ||
2112 | self.fileobj.write(buf) | ||
2113 | self.offset += len(buf) | ||
2114 | |||
2115 | # If there's data to follow, append it. | ||
2116 | if fileobj is not None: | ||
2117 | copyfileobj(fileobj, self.fileobj, tarinfo.size) | ||
2118 | blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) | ||
2119 | if remainder > 0: | ||
2120 | self.fileobj.write(NUL * (BLOCKSIZE - remainder)) | ||
2121 | blocks += 1 | ||
2122 | self.offset += blocks * BLOCKSIZE | ||
2123 | |||
2124 | self.members.append(tarinfo) | ||
2125 | |||
2126 | def extractall(self, path=".", members=None): | ||
2127 | """Extract all members from the archive to the current working | ||
2128 | directory and set owner, modification time and permissions on | ||
2129 | directories afterwards. `path' specifies a different directory | ||
2130 | to extract to. `members' is optional and must be a subset of the | ||
2131 | list returned by getmembers(). | ||
2132 | """ | ||
2133 | directories = [] | ||
2134 | |||
2135 | if members is None: | ||
2136 | members = self | ||
2137 | |||
2138 | for tarinfo in members: | ||
2139 | if tarinfo.isdir(): | ||
2140 | # Extract directories with a safe mode. | ||
2141 | directories.append(tarinfo) | ||
2142 | tarinfo = copy.copy(tarinfo) | ||
2143 | tarinfo.mode = 0o700 | ||
2144 | # Do not set_attrs directories, as we will do that further down | ||
2145 | self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) | ||
2146 | |||
2147 | # Reverse sort directories. | ||
2148 | directories.sort(key=lambda a: a.name) | ||
2149 | directories.reverse() | ||
2150 | |||
2151 | # Set correct owner, mtime and filemode on directories. | ||
2152 | for tarinfo in directories: | ||
2153 | dirpath = os.path.join(path, tarinfo.name) | ||
2154 | try: | ||
2155 | self.chown(tarinfo, dirpath) | ||
2156 | self.utime(tarinfo, dirpath) | ||
2157 | self.chmod(tarinfo, dirpath) | ||
2158 | except ExtractError as e: | ||
2159 | if self.errorlevel > 1: | ||
2160 | raise | ||
2161 | else: | ||
2162 | self._dbg(1, "tarfile: %s" % e) | ||
2163 | |||
2164 | def extract(self, member, path="", set_attrs=True): | ||
2165 | """Extract a member from the archive to the current working directory, | ||
2166 | using its full name. Its file information is extracted as accurately | ||
2167 | as possible. `member' may be a filename or a TarInfo object. You can | ||
2168 | specify a different directory using `path'. File attributes (owner, | ||
2169 | mtime, mode) are set unless `set_attrs' is False. | ||
2170 | """ | ||
2171 | self._check("r") | ||
2172 | |||
2173 | if isinstance(member, str): | ||
2174 | tarinfo = self.getmember(member) | ||
2175 | else: | ||
2176 | tarinfo = member | ||
2177 | |||
2178 | # Prepare the link target for makelink(). | ||
2179 | if tarinfo.islnk(): | ||
2180 | tarinfo._link_target = os.path.join(path, tarinfo.linkname) | ||
2181 | |||
2182 | try: | ||
2183 | self._extract_member(tarinfo, os.path.join(path, tarinfo.name), | ||
2184 | set_attrs=set_attrs) | ||
2185 | except EnvironmentError as e: | ||
2186 | if self.errorlevel > 0: | ||
2187 | raise | ||
2188 | else: | ||
2189 | if e.filename is None: | ||
2190 | self._dbg(1, "tarfile: %s" % e.strerror) | ||
2191 | else: | ||
2192 | self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) | ||
2193 | except ExtractError as e: | ||
2194 | if self.errorlevel > 1: | ||
2195 | raise | ||
2196 | else: | ||
2197 | self._dbg(1, "tarfile: %s" % e) | ||
2198 | |||
2199 | def extractfile(self, member): | ||
2200 | """Extract a member from the archive as a file object. `member' may be | ||
2201 | a filename or a TarInfo object. If `member' is a regular file, a | ||
2202 | file-like object is returned. If `member' is a link, a file-like | ||
2203 | object is constructed from the link's target. If `member' is none of | ||
2204 | the above, None is returned. | ||
2205 | The file-like object is read-only and provides the following | ||
2206 | methods: read(), readline(), readlines(), seek() and tell() | ||
2207 | """ | ||
2208 | self._check("r") | ||
2209 | |||
2210 | if isinstance(member, str): | ||
2211 | tarinfo = self.getmember(member) | ||
2212 | else: | ||
2213 | tarinfo = member | ||
2214 | |||
2215 | if tarinfo.isreg(): | ||
2216 | return self.fileobject(self, tarinfo) | ||
2217 | |||
2218 | elif tarinfo.type not in SUPPORTED_TYPES: | ||
2219 | # If a member's type is unknown, it is treated as a | ||
2220 | # regular file. | ||
2221 | return self.fileobject(self, tarinfo) | ||
2222 | |||
2223 | elif tarinfo.islnk() or tarinfo.issym(): | ||
2224 | if isinstance(self.fileobj, _Stream): | ||
2225 | # A small but ugly workaround for the case that someone tries | ||
2226 | # to extract a (sym)link as a file-object from a non-seekable | ||
2227 | # stream of tar blocks. | ||
2228 | raise StreamError("cannot extract (sym)link as file object") | ||
2229 | else: | ||
2230 | # A (sym)link's file object is its target's file object. | ||
2231 | return self.extractfile(self._find_link_target(tarinfo)) | ||
2232 | else: | ||
2233 | # If there's no data associated with the member (directory, chrdev, | ||
2234 | # blkdev, etc.), return None instead of a file object. | ||
2235 | return None | ||
2236 | |||
2237 | def _extract_member(self, tarinfo, targetpath, set_attrs=True): | ||
2238 | """Extract the TarInfo object tarinfo to a physical | ||
2239 | file called targetpath. | ||
2240 | """ | ||
2241 | # Fetch the TarInfo object for the given name | ||
2242 | # and build the destination pathname, replacing | ||
2243 | # forward slashes to platform specific separators. | ||
2244 | targetpath = targetpath.rstrip("/") | ||
2245 | targetpath = targetpath.replace("/", os.sep) | ||
2246 | |||
2247 | # Create all upper directories. | ||
2248 | upperdirs = os.path.dirname(targetpath) | ||
2249 | if upperdirs and not os.path.exists(upperdirs): | ||
2250 | # Create directories that are not part of the archive with | ||
2251 | # default permissions. | ||
2252 | os.makedirs(upperdirs) | ||
2253 | |||
2254 | if tarinfo.islnk() or tarinfo.issym(): | ||
2255 | self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) | ||
2256 | else: | ||
2257 | self._dbg(1, tarinfo.name) | ||
2258 | |||
2259 | if tarinfo.isreg(): | ||
2260 | self.makefile(tarinfo, targetpath) | ||
2261 | elif tarinfo.isdir(): | ||
2262 | self.makedir(tarinfo, targetpath) | ||
2263 | elif tarinfo.isfifo(): | ||
2264 | self.makefifo(tarinfo, targetpath) | ||
2265 | elif tarinfo.ischr() or tarinfo.isblk(): | ||
2266 | self.makedev(tarinfo, targetpath) | ||
2267 | elif tarinfo.islnk() or tarinfo.issym(): | ||
2268 | self.makelink(tarinfo, targetpath) | ||
2269 | elif tarinfo.type not in SUPPORTED_TYPES: | ||
2270 | self.makeunknown(tarinfo, targetpath) | ||
2271 | else: | ||
2272 | self.makefile(tarinfo, targetpath) | ||
2273 | |||
2274 | if set_attrs: | ||
2275 | self.chown(tarinfo, targetpath) | ||
2276 | if not tarinfo.issym(): | ||
2277 | self.chmod(tarinfo, targetpath) | ||
2278 | self.utime(tarinfo, targetpath) | ||
2279 | |||
2280 | #-------------------------------------------------------------------------- | ||
2281 | # Below are the different file methods. They are called via | ||
2282 | # _extract_member() when extract() is called. They can be replaced in a | ||
2283 | # subclass to implement other functionality. | ||
2284 | |||
2285 | def makedir(self, tarinfo, targetpath): | ||
2286 | """Make a directory called targetpath. | ||
2287 | """ | ||
2288 | try: | ||
2289 | # Use a safe mode for the directory, the real mode is set | ||
2290 | # later in _extract_member(). | ||
2291 | os.mkdir(targetpath, 0o700) | ||
2292 | except EnvironmentError as e: | ||
2293 | if e.errno != errno.EEXIST: | ||
2294 | raise | ||
2295 | |||
2296 | def makefile(self, tarinfo, targetpath): | ||
2297 | """Make a file called targetpath. | ||
2298 | """ | ||
2299 | source = self.fileobj | ||
2300 | source.seek(tarinfo.offset_data) | ||
2301 | target = bltn_open(targetpath, "wb") | ||
2302 | if tarinfo.sparse is not None: | ||
2303 | for offset, size in tarinfo.sparse: | ||
2304 | target.seek(offset) | ||
2305 | copyfileobj(source, target, size) | ||
2306 | else: | ||
2307 | copyfileobj(source, target, tarinfo.size) | ||
2308 | target.seek(tarinfo.size) | ||
2309 | target.truncate() | ||
2310 | target.close() | ||
2311 | |||
2312 | def makeunknown(self, tarinfo, targetpath): | ||
2313 | """Make a file from a TarInfo object with an unknown type | ||
2314 | at targetpath. | ||
2315 | """ | ||
2316 | self.makefile(tarinfo, targetpath) | ||
2317 | self._dbg(1, "tarfile: Unknown file type %r, " \ | ||
2318 | "extracted as regular file." % tarinfo.type) | ||
2319 | |||
2320 | def makefifo(self, tarinfo, targetpath): | ||
2321 | """Make a fifo called targetpath. | ||
2322 | """ | ||
2323 | if hasattr(os, "mkfifo"): | ||
2324 | os.mkfifo(targetpath) | ||
2325 | else: | ||
2326 | raise ExtractError("fifo not supported by system") | ||
2327 | |||
2328 | def makedev(self, tarinfo, targetpath): | ||
2329 | """Make a character or block device called targetpath. | ||
2330 | """ | ||
2331 | if not hasattr(os, "mknod") or not hasattr(os, "makedev"): | ||
2332 | raise ExtractError("special devices not supported by system") | ||
2333 | |||
2334 | mode = tarinfo.mode | ||
2335 | if tarinfo.isblk(): | ||
2336 | mode |= stat.S_IFBLK | ||
2337 | else: | ||
2338 | mode |= stat.S_IFCHR | ||
2339 | |||
2340 | os.mknod(targetpath, mode, | ||
2341 | os.makedev(tarinfo.devmajor, tarinfo.devminor)) | ||
2342 | |||
2343 | def makelink(self, tarinfo, targetpath): | ||
2344 | """Make a (symbolic) link called targetpath. If it cannot be created | ||
2345 | (platform limitation), we try to make a copy of the referenced file | ||
2346 | instead of a link. | ||
2347 | """ | ||
2348 | try: | ||
2349 | # For systems that support symbolic and hard links. | ||
2350 | if tarinfo.issym(): | ||
2351 | os.symlink(tarinfo.linkname, targetpath) | ||
2352 | else: | ||
2353 | # See extract(). | ||
2354 | if os.path.exists(tarinfo._link_target): | ||
2355 | os.link(tarinfo._link_target, targetpath) | ||
2356 | else: | ||
2357 | self._extract_member(self._find_link_target(tarinfo), | ||
2358 | targetpath) | ||
2359 | except symlink_exception: | ||
2360 | if tarinfo.issym(): | ||
2361 | linkpath = os.path.join(os.path.dirname(tarinfo.name), | ||
2362 | tarinfo.linkname) | ||
2363 | else: | ||
2364 | linkpath = tarinfo.linkname | ||
2365 | else: | ||
2366 | try: | ||
2367 | self._extract_member(self._find_link_target(tarinfo), | ||
2368 | targetpath) | ||
2369 | except KeyError: | ||
2370 | raise ExtractError("unable to resolve link inside archive") | ||
2371 | |||
2372 | def chown(self, tarinfo, targetpath): | ||
2373 | """Set owner of targetpath according to tarinfo. | ||
2374 | """ | ||
2375 | if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: | ||
2376 | # We have to be root to do so. | ||
2377 | try: | ||
2378 | g = grp.getgrnam(tarinfo.gname)[2] | ||
2379 | except KeyError: | ||
2380 | g = tarinfo.gid | ||
2381 | try: | ||
2382 | u = pwd.getpwnam(tarinfo.uname)[2] | ||
2383 | except KeyError: | ||
2384 | u = tarinfo.uid | ||
2385 | try: | ||
2386 | if tarinfo.issym() and hasattr(os, "lchown"): | ||
2387 | os.lchown(targetpath, u, g) | ||
2388 | else: | ||
2389 | if sys.platform != "os2emx": | ||
2390 | os.chown(targetpath, u, g) | ||
2391 | except EnvironmentError as e: | ||
2392 | raise ExtractError("could not change owner") | ||
2393 | |||
2394 | def chmod(self, tarinfo, targetpath): | ||
2395 | """Set file permissions of targetpath according to tarinfo. | ||
2396 | """ | ||
2397 | if hasattr(os, 'chmod'): | ||
2398 | try: | ||
2399 | os.chmod(targetpath, tarinfo.mode) | ||
2400 | except EnvironmentError as e: | ||
2401 | raise ExtractError("could not change mode") | ||
2402 | |||
2403 | def utime(self, tarinfo, targetpath): | ||
2404 | """Set modification time of targetpath according to tarinfo. | ||
2405 | """ | ||
2406 | if not hasattr(os, 'utime'): | ||
2407 | return | ||
2408 | try: | ||
2409 | os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) | ||
2410 | except EnvironmentError as e: | ||
2411 | raise ExtractError("could not change modification time") | ||
2412 | |||
2413 | #-------------------------------------------------------------------------- | ||
2414 | def next(self): | ||
2415 | """Return the next member of the archive as a TarInfo object, when | ||
2416 | TarFile is opened for reading. Return None if there is no more | ||
2417 | available. | ||
2418 | """ | ||
2419 | self._check("ra") | ||
2420 | if self.firstmember is not None: | ||
2421 | m = self.firstmember | ||
2422 | self.firstmember = None | ||
2423 | return m | ||
2424 | |||
2425 | # Read the next block. | ||
2426 | self.fileobj.seek(self.offset) | ||
2427 | tarinfo = None | ||
2428 | while True: | ||
2429 | try: | ||
2430 | tarinfo = self.tarinfo.fromtarfile(self) | ||
2431 | except EOFHeaderError as e: | ||
2432 | if self.ignore_zeros: | ||
2433 | self._dbg(2, "0x%X: %s" % (self.offset, e)) | ||
2434 | self.offset += BLOCKSIZE | ||
2435 | continue | ||
2436 | except InvalidHeaderError as e: | ||
2437 | if self.ignore_zeros: | ||
2438 | self._dbg(2, "0x%X: %s" % (self.offset, e)) | ||
2439 | self.offset += BLOCKSIZE | ||
2440 | continue | ||
2441 | elif self.offset == 0: | ||
2442 | raise ReadError(str(e)) | ||
2443 | except EmptyHeaderError: | ||
2444 | if self.offset == 0: | ||
2445 | raise ReadError("empty file") | ||
2446 | except TruncatedHeaderError as e: | ||
2447 | if self.offset == 0: | ||
2448 | raise ReadError(str(e)) | ||
2449 | except SubsequentHeaderError as e: | ||
2450 | raise ReadError(str(e)) | ||
2451 | break | ||
2452 | |||
2453 | if tarinfo is not None: | ||
2454 | self.members.append(tarinfo) | ||
2455 | else: | ||
2456 | self._loaded = True | ||
2457 | |||
2458 | return tarinfo | ||
2459 | |||
2460 | #-------------------------------------------------------------------------- | ||
2461 | # Little helper methods: | ||
2462 | |||
2463 | def _getmember(self, name, tarinfo=None, normalize=False): | ||
2464 | """Find an archive member by name from bottom to top. | ||
2465 | If tarinfo is given, it is used as the starting point. | ||
2466 | """ | ||
2467 | # Ensure that all members have been loaded. | ||
2468 | members = self.getmembers() | ||
2469 | |||
2470 | # Limit the member search list up to tarinfo. | ||
2471 | if tarinfo is not None: | ||
2472 | members = members[:members.index(tarinfo)] | ||
2473 | |||
2474 | if normalize: | ||
2475 | name = os.path.normpath(name) | ||
2476 | |||
2477 | for member in reversed(members): | ||
2478 | if normalize: | ||
2479 | member_name = os.path.normpath(member.name) | ||
2480 | else: | ||
2481 | member_name = member.name | ||
2482 | |||
2483 | if name == member_name: | ||
2484 | return member | ||
2485 | |||
2486 | def _load(self): | ||
2487 | """Read through the entire archive file and look for readable | ||
2488 | members. | ||
2489 | """ | ||
2490 | while True: | ||
2491 | tarinfo = self.next() | ||
2492 | if tarinfo is None: | ||
2493 | break | ||
2494 | self._loaded = True | ||
2495 | |||
2496 | def _check(self, mode=None): | ||
2497 | """Check if TarFile is still open, and if the operation's mode | ||
2498 | corresponds to TarFile's mode. | ||
2499 | """ | ||
2500 | if self.closed: | ||
2501 | raise IOError("%s is closed" % self.__class__.__name__) | ||
2502 | if mode is not None and self.mode not in mode: | ||
2503 | raise IOError("bad operation for mode %r" % self.mode) | ||
2504 | |||
2505 | def _find_link_target(self, tarinfo): | ||
2506 | """Find the target member of a symlink or hardlink member in the | ||
2507 | archive. | ||
2508 | """ | ||
2509 | if tarinfo.issym(): | ||
2510 | # Always search the entire archive. | ||
2511 | linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname | ||
2512 | limit = None | ||
2513 | else: | ||
2514 | # Search the archive before the link, because a hard link is | ||
2515 | # just a reference to an already archived file. | ||
2516 | linkname = tarinfo.linkname | ||
2517 | limit = tarinfo | ||
2518 | |||
2519 | member = self._getmember(linkname, tarinfo=limit, normalize=True) | ||
2520 | if member is None: | ||
2521 | raise KeyError("linkname %r not found" % linkname) | ||
2522 | return member | ||
2523 | |||
2524 | def __iter__(self): | ||
2525 | """Provide an iterator object. | ||
2526 | """ | ||
2527 | if self._loaded: | ||
2528 | return iter(self.members) | ||
2529 | else: | ||
2530 | return TarIter(self) | ||
2531 | |||
2532 | def _dbg(self, level, msg): | ||
2533 | """Write debugging output to sys.stderr. | ||
2534 | """ | ||
2535 | if level <= self.debug: | ||
2536 | print(msg, file=sys.stderr) | ||
2537 | |||
2538 | def __enter__(self): | ||
2539 | self._check() | ||
2540 | return self | ||
2541 | |||
2542 | def __exit__(self, type, value, traceback): | ||
2543 | if type is None: | ||
2544 | self.close() | ||
2545 | else: | ||
2546 | # An exception occurred. We must not call close() because | ||
2547 | # it would try to write end-of-archive blocks and padding. | ||
2548 | if not self._extfileobj: | ||
2549 | self.fileobj.close() | ||
2550 | self.closed = True | ||
2551 | # class TarFile | ||
2552 | |||
2553 | class TarIter(object): | ||
2554 | """Iterator Class. | ||
2555 | |||
2556 | for tarinfo in TarFile(...): | ||
2557 | suite... | ||
2558 | """ | ||
2559 | |||
2560 | def __init__(self, tarfile): | ||
2561 | """Construct a TarIter object. | ||
2562 | """ | ||
2563 | self.tarfile = tarfile | ||
2564 | self.index = 0 | ||
2565 | def __iter__(self): | ||
2566 | """Return iterator object. | ||
2567 | """ | ||
2568 | return self | ||
2569 | |||
2570 | def __next__(self): | ||
2571 | """Return the next item using TarFile's next() method. | ||
2572 | When all members have been read, set TarFile as _loaded. | ||
2573 | """ | ||
2574 | # Fix for SF #1100429: Under rare circumstances it can | ||
2575 | # happen that getmembers() is called during iteration, | ||
2576 | # which will cause TarIter to stop prematurely. | ||
2577 | if not self.tarfile._loaded: | ||
2578 | tarinfo = self.tarfile.next() | ||
2579 | if not tarinfo: | ||
2580 | self.tarfile._loaded = True | ||
2581 | raise StopIteration | ||
2582 | else: | ||
2583 | try: | ||
2584 | tarinfo = self.tarfile.members[self.index] | ||
2585 | except IndexError: | ||
2586 | raise StopIteration | ||
2587 | self.index += 1 | ||
2588 | return tarinfo | ||
2589 | |||
2590 | next = __next__ # for Python 2.x | ||
2591 | |||
2592 | #-------------------- | ||
2593 | # exported functions | ||
2594 | #-------------------- | ||
2595 | def is_tarfile(name): | ||
2596 | """Return True if name points to a tar archive that we | ||
2597 | are able to handle, else return False. | ||
2598 | """ | ||
2599 | try: | ||
2600 | t = open(name) | ||
2601 | t.close() | ||
2602 | return True | ||
2603 | except TarError: | ||
2604 | return False | ||
2605 | |||
2606 | bltn_open = open | ||
2607 | open = TarFile.open | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..09929b0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/compat.py | |||
@@ -0,0 +1,1120 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | from __future__ import absolute_import | ||
8 | |||
9 | import os | ||
10 | import re | ||
11 | import sys | ||
12 | |||
13 | try: | ||
14 | import ssl | ||
15 | except ImportError: # pragma: no cover | ||
16 | ssl = None | ||
17 | |||
18 | if sys.version_info[0] < 3: # pragma: no cover | ||
19 | from StringIO import StringIO | ||
20 | string_types = basestring, | ||
21 | text_type = unicode | ||
22 | from types import FileType as file_type | ||
23 | import __builtin__ as builtins | ||
24 | import ConfigParser as configparser | ||
25 | from ._backport import shutil | ||
26 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit | ||
27 | from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, | ||
28 | pathname2url, ContentTooShortError, splittype) | ||
29 | |||
30 | def quote(s): | ||
31 | if isinstance(s, unicode): | ||
32 | s = s.encode('utf-8') | ||
33 | return _quote(s) | ||
34 | |||
35 | import urllib2 | ||
36 | from urllib2 import (Request, urlopen, URLError, HTTPError, | ||
37 | HTTPBasicAuthHandler, HTTPPasswordMgr, | ||
38 | HTTPHandler, HTTPRedirectHandler, | ||
39 | build_opener) | ||
40 | if ssl: | ||
41 | from urllib2 import HTTPSHandler | ||
42 | import httplib | ||
43 | import xmlrpclib | ||
44 | import Queue as queue | ||
45 | from HTMLParser import HTMLParser | ||
46 | import htmlentitydefs | ||
47 | raw_input = raw_input | ||
48 | from itertools import ifilter as filter | ||
49 | from itertools import ifilterfalse as filterfalse | ||
50 | |||
51 | _userprog = None | ||
52 | def splituser(host): | ||
53 | """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" | ||
54 | global _userprog | ||
55 | if _userprog is None: | ||
56 | import re | ||
57 | _userprog = re.compile('^(.*)@(.*)$') | ||
58 | |||
59 | match = _userprog.match(host) | ||
60 | if match: return match.group(1, 2) | ||
61 | return None, host | ||
62 | |||
63 | else: # pragma: no cover | ||
64 | from io import StringIO | ||
65 | string_types = str, | ||
66 | text_type = str | ||
67 | from io import TextIOWrapper as file_type | ||
68 | import builtins | ||
69 | import configparser | ||
70 | import shutil | ||
71 | from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, | ||
72 | unquote, urlsplit, urlunsplit, splittype) | ||
73 | from urllib.request import (urlopen, urlretrieve, Request, url2pathname, | ||
74 | pathname2url, | ||
75 | HTTPBasicAuthHandler, HTTPPasswordMgr, | ||
76 | HTTPHandler, HTTPRedirectHandler, | ||
77 | build_opener) | ||
78 | if ssl: | ||
79 | from urllib.request import HTTPSHandler | ||
80 | from urllib.error import HTTPError, URLError, ContentTooShortError | ||
81 | import http.client as httplib | ||
82 | import urllib.request as urllib2 | ||
83 | import xmlrpc.client as xmlrpclib | ||
84 | import queue | ||
85 | from html.parser import HTMLParser | ||
86 | import html.entities as htmlentitydefs | ||
87 | raw_input = input | ||
88 | from itertools import filterfalse | ||
89 | filter = filter | ||
90 | |||
91 | try: | ||
92 | from ssl import match_hostname, CertificateError | ||
93 | except ImportError: # pragma: no cover | ||
94 | class CertificateError(ValueError): | ||
95 | pass | ||
96 | |||
97 | |||
98 | def _dnsname_match(dn, hostname, max_wildcards=1): | ||
99 | """Matching according to RFC 6125, section 6.4.3 | ||
100 | |||
101 | http://tools.ietf.org/html/rfc6125#section-6.4.3 | ||
102 | """ | ||
103 | pats = [] | ||
104 | if not dn: | ||
105 | return False | ||
106 | |||
107 | parts = dn.split('.') | ||
108 | leftmost, remainder = parts[0], parts[1:] | ||
109 | |||
110 | wildcards = leftmost.count('*') | ||
111 | if wildcards > max_wildcards: | ||
112 | # Issue #17980: avoid denials of service by refusing more | ||
113 | # than one wildcard per fragment. A survey of established | ||
114 | # policy among SSL implementations showed it to be a | ||
115 | # reasonable choice. | ||
116 | raise CertificateError( | ||
117 | "too many wildcards in certificate DNS name: " + repr(dn)) | ||
118 | |||
119 | # speed up common case w/o wildcards | ||
120 | if not wildcards: | ||
121 | return dn.lower() == hostname.lower() | ||
122 | |||
123 | # RFC 6125, section 6.4.3, subitem 1. | ||
124 | # The client SHOULD NOT attempt to match a presented identifier in which | ||
125 | # the wildcard character comprises a label other than the left-most label. | ||
126 | if leftmost == '*': | ||
127 | # When '*' is a fragment by itself, it matches a non-empty dotless | ||
128 | # fragment. | ||
129 | pats.append('[^.]+') | ||
130 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'): | ||
131 | # RFC 6125, section 6.4.3, subitem 3. | ||
132 | # The client SHOULD NOT attempt to match a presented identifier | ||
133 | # where the wildcard character is embedded within an A-label or | ||
134 | # U-label of an internationalized domain name. | ||
135 | pats.append(re.escape(leftmost)) | ||
136 | else: | ||
137 | # Otherwise, '*' matches any dotless string, e.g. www* | ||
138 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) | ||
139 | |||
140 | # add the remaining fragments, ignore any wildcards | ||
141 | for frag in remainder: | ||
142 | pats.append(re.escape(frag)) | ||
143 | |||
144 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) | ||
145 | return pat.match(hostname) | ||
146 | |||
147 | |||
148 | def match_hostname(cert, hostname): | ||
149 | """Verify that *cert* (in decoded format as returned by | ||
150 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 | ||
151 | rules are followed, but IP addresses are not accepted for *hostname*. | ||
152 | |||
153 | CertificateError is raised on failure. On success, the function | ||
154 | returns nothing. | ||
155 | """ | ||
156 | if not cert: | ||
157 | raise ValueError("empty or no certificate, match_hostname needs a " | ||
158 | "SSL socket or SSL context with either " | ||
159 | "CERT_OPTIONAL or CERT_REQUIRED") | ||
160 | dnsnames = [] | ||
161 | san = cert.get('subjectAltName', ()) | ||
162 | for key, value in san: | ||
163 | if key == 'DNS': | ||
164 | if _dnsname_match(value, hostname): | ||
165 | return | ||
166 | dnsnames.append(value) | ||
167 | if not dnsnames: | ||
168 | # The subject is only checked when there is no dNSName entry | ||
169 | # in subjectAltName | ||
170 | for sub in cert.get('subject', ()): | ||
171 | for key, value in sub: | ||
172 | # XXX according to RFC 2818, the most specific Common Name | ||
173 | # must be used. | ||
174 | if key == 'commonName': | ||
175 | if _dnsname_match(value, hostname): | ||
176 | return | ||
177 | dnsnames.append(value) | ||
178 | if len(dnsnames) > 1: | ||
179 | raise CertificateError("hostname %r " | ||
180 | "doesn't match either of %s" | ||
181 | % (hostname, ', '.join(map(repr, dnsnames)))) | ||
182 | elif len(dnsnames) == 1: | ||
183 | raise CertificateError("hostname %r " | ||
184 | "doesn't match %r" | ||
185 | % (hostname, dnsnames[0])) | ||
186 | else: | ||
187 | raise CertificateError("no appropriate commonName or " | ||
188 | "subjectAltName fields were found") | ||
189 | |||
190 | |||
191 | try: | ||
192 | from types import SimpleNamespace as Container | ||
193 | except ImportError: # pragma: no cover | ||
194 | class Container(object): | ||
195 | """ | ||
196 | A generic container for when multiple values need to be returned | ||
197 | """ | ||
198 | def __init__(self, **kwargs): | ||
199 | self.__dict__.update(kwargs) | ||
200 | |||
201 | |||
202 | try: | ||
203 | from shutil import which | ||
204 | except ImportError: # pragma: no cover | ||
205 | # Implementation from Python 3.3 | ||
206 | def which(cmd, mode=os.F_OK | os.X_OK, path=None): | ||
207 | """Given a command, mode, and a PATH string, return the path which | ||
208 | conforms to the given mode on the PATH, or None if there is no such | ||
209 | file. | ||
210 | |||
211 | `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result | ||
212 | of os.environ.get("PATH"), or can be overridden with a custom search | ||
213 | path. | ||
214 | |||
215 | """ | ||
216 | # Check that a given file can be accessed with the correct mode. | ||
217 | # Additionally check that `file` is not a directory, as on Windows | ||
218 | # directories pass the os.access check. | ||
219 | def _access_check(fn, mode): | ||
220 | return (os.path.exists(fn) and os.access(fn, mode) | ||
221 | and not os.path.isdir(fn)) | ||
222 | |||
223 | # If we're given a path with a directory part, look it up directly rather | ||
224 | # than referring to PATH directories. This includes checking relative to the | ||
225 | # current directory, e.g. ./script | ||
226 | if os.path.dirname(cmd): | ||
227 | if _access_check(cmd, mode): | ||
228 | return cmd | ||
229 | return None | ||
230 | |||
231 | if path is None: | ||
232 | path = os.environ.get("PATH", os.defpath) | ||
233 | if not path: | ||
234 | return None | ||
235 | path = path.split(os.pathsep) | ||
236 | |||
237 | if sys.platform == "win32": | ||
238 | # The current directory takes precedence on Windows. | ||
239 | if not os.curdir in path: | ||
240 | path.insert(0, os.curdir) | ||
241 | |||
242 | # PATHEXT is necessary to check on Windows. | ||
243 | pathext = os.environ.get("PATHEXT", "").split(os.pathsep) | ||
244 | # See if the given file matches any of the expected path extensions. | ||
245 | # This will allow us to short circuit when given "python.exe". | ||
246 | # If it does match, only test that one, otherwise we have to try | ||
247 | # others. | ||
248 | if any(cmd.lower().endswith(ext.lower()) for ext in pathext): | ||
249 | files = [cmd] | ||
250 | else: | ||
251 | files = [cmd + ext for ext in pathext] | ||
252 | else: | ||
253 | # On other platforms you don't have things like PATHEXT to tell you | ||
254 | # what file suffixes are executable, so just pass on cmd as-is. | ||
255 | files = [cmd] | ||
256 | |||
257 | seen = set() | ||
258 | for dir in path: | ||
259 | normdir = os.path.normcase(dir) | ||
260 | if not normdir in seen: | ||
261 | seen.add(normdir) | ||
262 | for thefile in files: | ||
263 | name = os.path.join(dir, thefile) | ||
264 | if _access_check(name, mode): | ||
265 | return name | ||
266 | return None | ||
267 | |||
268 | |||
269 | # ZipFile is a context manager in 2.7, but not in 2.6 | ||
270 | |||
271 | from zipfile import ZipFile as BaseZipFile | ||
272 | |||
273 | if hasattr(BaseZipFile, '__enter__'): # pragma: no cover | ||
274 | ZipFile = BaseZipFile | ||
275 | else: # pragma: no cover | ||
276 | from zipfile import ZipExtFile as BaseZipExtFile | ||
277 | |||
278 | class ZipExtFile(BaseZipExtFile): | ||
279 | def __init__(self, base): | ||
280 | self.__dict__.update(base.__dict__) | ||
281 | |||
282 | def __enter__(self): | ||
283 | return self | ||
284 | |||
285 | def __exit__(self, *exc_info): | ||
286 | self.close() | ||
287 | # return None, so if an exception occurred, it will propagate | ||
288 | |||
289 | class ZipFile(BaseZipFile): | ||
290 | def __enter__(self): | ||
291 | return self | ||
292 | |||
293 | def __exit__(self, *exc_info): | ||
294 | self.close() | ||
295 | # return None, so if an exception occurred, it will propagate | ||
296 | |||
297 | def open(self, *args, **kwargs): | ||
298 | base = BaseZipFile.open(self, *args, **kwargs) | ||
299 | return ZipExtFile(base) | ||
300 | |||
301 | try: | ||
302 | from platform import python_implementation | ||
303 | except ImportError: # pragma: no cover | ||
304 | def python_implementation(): | ||
305 | """Return a string identifying the Python implementation.""" | ||
306 | if 'PyPy' in sys.version: | ||
307 | return 'PyPy' | ||
308 | if os.name == 'java': | ||
309 | return 'Jython' | ||
310 | if sys.version.startswith('IronPython'): | ||
311 | return 'IronPython' | ||
312 | return 'CPython' | ||
313 | |||
314 | try: | ||
315 | import sysconfig | ||
316 | except ImportError: # pragma: no cover | ||
317 | from ._backport import sysconfig | ||
318 | |||
319 | try: | ||
320 | callable = callable | ||
321 | except NameError: # pragma: no cover | ||
322 | from collections import Callable | ||
323 | |||
324 | def callable(obj): | ||
325 | return isinstance(obj, Callable) | ||
326 | |||
327 | |||
328 | try: | ||
329 | fsencode = os.fsencode | ||
330 | fsdecode = os.fsdecode | ||
331 | except AttributeError: # pragma: no cover | ||
332 | # Issue #99: on some systems (e.g. containerised), | ||
333 | # sys.getfilesystemencoding() returns None, and we need a real value, | ||
334 | # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and | ||
335 | # sys.getfilesystemencoding(): the return value is "the user’s preference | ||
336 | # according to the result of nl_langinfo(CODESET), or None if the | ||
337 | # nl_langinfo(CODESET) failed." | ||
338 | _fsencoding = sys.getfilesystemencoding() or 'utf-8' | ||
339 | if _fsencoding == 'mbcs': | ||
340 | _fserrors = 'strict' | ||
341 | else: | ||
342 | _fserrors = 'surrogateescape' | ||
343 | |||
344 | def fsencode(filename): | ||
345 | if isinstance(filename, bytes): | ||
346 | return filename | ||
347 | elif isinstance(filename, text_type): | ||
348 | return filename.encode(_fsencoding, _fserrors) | ||
349 | else: | ||
350 | raise TypeError("expect bytes or str, not %s" % | ||
351 | type(filename).__name__) | ||
352 | |||
353 | def fsdecode(filename): | ||
354 | if isinstance(filename, text_type): | ||
355 | return filename | ||
356 | elif isinstance(filename, bytes): | ||
357 | return filename.decode(_fsencoding, _fserrors) | ||
358 | else: | ||
359 | raise TypeError("expect bytes or str, not %s" % | ||
360 | type(filename).__name__) | ||
361 | |||
362 | try: | ||
363 | from tokenize import detect_encoding | ||
364 | except ImportError: # pragma: no cover | ||
365 | from codecs import BOM_UTF8, lookup | ||
366 | import re | ||
367 | |||
368 | cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)") | ||
369 | |||
370 | def _get_normal_name(orig_enc): | ||
371 | """Imitates get_normal_name in tokenizer.c.""" | ||
372 | # Only care about the first 12 characters. | ||
373 | enc = orig_enc[:12].lower().replace("_", "-") | ||
374 | if enc == "utf-8" or enc.startswith("utf-8-"): | ||
375 | return "utf-8" | ||
376 | if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ | ||
377 | enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): | ||
378 | return "iso-8859-1" | ||
379 | return orig_enc | ||
380 | |||
381 | def detect_encoding(readline): | ||
382 | """ | ||
383 | The detect_encoding() function is used to detect the encoding that should | ||
384 | be used to decode a Python source file. It requires one argument, readline, | ||
385 | in the same way as the tokenize() generator. | ||
386 | |||
387 | It will call readline a maximum of twice, and return the encoding used | ||
388 | (as a string) and a list of any lines (left as bytes) it has read in. | ||
389 | |||
390 | It detects the encoding from the presence of a utf-8 bom or an encoding | ||
391 | cookie as specified in pep-0263. If both a bom and a cookie are present, | ||
392 | but disagree, a SyntaxError will be raised. If the encoding cookie is an | ||
393 | invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, | ||
394 | 'utf-8-sig' is returned. | ||
395 | |||
396 | If no encoding is specified, then the default of 'utf-8' will be returned. | ||
397 | """ | ||
398 | try: | ||
399 | filename = readline.__self__.name | ||
400 | except AttributeError: | ||
401 | filename = None | ||
402 | bom_found = False | ||
403 | encoding = None | ||
404 | default = 'utf-8' | ||
405 | def read_or_stop(): | ||
406 | try: | ||
407 | return readline() | ||
408 | except StopIteration: | ||
409 | return b'' | ||
410 | |||
411 | def find_cookie(line): | ||
412 | try: | ||
413 | # Decode as UTF-8. Either the line is an encoding declaration, | ||
414 | # in which case it should be pure ASCII, or it must be UTF-8 | ||
415 | # per default encoding. | ||
416 | line_string = line.decode('utf-8') | ||
417 | except UnicodeDecodeError: | ||
418 | msg = "invalid or missing encoding declaration" | ||
419 | if filename is not None: | ||
420 | msg = '{} for {!r}'.format(msg, filename) | ||
421 | raise SyntaxError(msg) | ||
422 | |||
423 | matches = cookie_re.findall(line_string) | ||
424 | if not matches: | ||
425 | return None | ||
426 | encoding = _get_normal_name(matches[0]) | ||
427 | try: | ||
428 | codec = lookup(encoding) | ||
429 | except LookupError: | ||
430 | # This behaviour mimics the Python interpreter | ||
431 | if filename is None: | ||
432 | msg = "unknown encoding: " + encoding | ||
433 | else: | ||
434 | msg = "unknown encoding for {!r}: {}".format(filename, | ||
435 | encoding) | ||
436 | raise SyntaxError(msg) | ||
437 | |||
438 | if bom_found: | ||
439 | if codec.name != 'utf-8': | ||
440 | # This behaviour mimics the Python interpreter | ||
441 | if filename is None: | ||
442 | msg = 'encoding problem: utf-8' | ||
443 | else: | ||
444 | msg = 'encoding problem for {!r}: utf-8'.format(filename) | ||
445 | raise SyntaxError(msg) | ||
446 | encoding += '-sig' | ||
447 | return encoding | ||
448 | |||
449 | first = read_or_stop() | ||
450 | if first.startswith(BOM_UTF8): | ||
451 | bom_found = True | ||
452 | first = first[3:] | ||
453 | default = 'utf-8-sig' | ||
454 | if not first: | ||
455 | return default, [] | ||
456 | |||
457 | encoding = find_cookie(first) | ||
458 | if encoding: | ||
459 | return encoding, [first] | ||
460 | |||
461 | second = read_or_stop() | ||
462 | if not second: | ||
463 | return default, [first] | ||
464 | |||
465 | encoding = find_cookie(second) | ||
466 | if encoding: | ||
467 | return encoding, [first, second] | ||
468 | |||
469 | return default, [first, second] | ||
470 | |||
471 | # For converting & <-> & etc. | ||
472 | try: | ||
473 | from html import escape | ||
474 | except ImportError: | ||
475 | from cgi import escape | ||
476 | if sys.version_info[:2] < (3, 4): | ||
477 | unescape = HTMLParser().unescape | ||
478 | else: | ||
479 | from html import unescape | ||
480 | |||
481 | try: | ||
482 | from collections import ChainMap | ||
483 | except ImportError: # pragma: no cover | ||
484 | from collections import MutableMapping | ||
485 | |||
486 | try: | ||
487 | from reprlib import recursive_repr as _recursive_repr | ||
488 | except ImportError: | ||
489 | def _recursive_repr(fillvalue='...'): | ||
490 | ''' | ||
491 | Decorator to make a repr function return fillvalue for a recursive | ||
492 | call | ||
493 | ''' | ||
494 | |||
495 | def decorating_function(user_function): | ||
496 | repr_running = set() | ||
497 | |||
498 | def wrapper(self): | ||
499 | key = id(self), get_ident() | ||
500 | if key in repr_running: | ||
501 | return fillvalue | ||
502 | repr_running.add(key) | ||
503 | try: | ||
504 | result = user_function(self) | ||
505 | finally: | ||
506 | repr_running.discard(key) | ||
507 | return result | ||
508 | |||
509 | # Can't use functools.wraps() here because of bootstrap issues | ||
510 | wrapper.__module__ = getattr(user_function, '__module__') | ||
511 | wrapper.__doc__ = getattr(user_function, '__doc__') | ||
512 | wrapper.__name__ = getattr(user_function, '__name__') | ||
513 | wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) | ||
514 | return wrapper | ||
515 | |||
516 | return decorating_function | ||
517 | |||
518 | class ChainMap(MutableMapping): | ||
519 | ''' A ChainMap groups multiple dicts (or other mappings) together | ||
520 | to create a single, updateable view. | ||
521 | |||
522 | The underlying mappings are stored in a list. That list is public and can | ||
523 | accessed or updated using the *maps* attribute. There is no other state. | ||
524 | |||
525 | Lookups search the underlying mappings successively until a key is found. | ||
526 | In contrast, writes, updates, and deletions only operate on the first | ||
527 | mapping. | ||
528 | |||
529 | ''' | ||
530 | |||
531 | def __init__(self, *maps): | ||
532 | '''Initialize a ChainMap by setting *maps* to the given mappings. | ||
533 | If no mappings are provided, a single empty dictionary is used. | ||
534 | |||
535 | ''' | ||
536 | self.maps = list(maps) or [{}] # always at least one map | ||
537 | |||
538 | def __missing__(self, key): | ||
539 | raise KeyError(key) | ||
540 | |||
541 | def __getitem__(self, key): | ||
542 | for mapping in self.maps: | ||
543 | try: | ||
544 | return mapping[key] # can't use 'key in mapping' with defaultdict | ||
545 | except KeyError: | ||
546 | pass | ||
547 | return self.__missing__(key) # support subclasses that define __missing__ | ||
548 | |||
549 | def get(self, key, default=None): | ||
550 | return self[key] if key in self else default | ||
551 | |||
552 | def __len__(self): | ||
553 | return len(set().union(*self.maps)) # reuses stored hash values if possible | ||
554 | |||
555 | def __iter__(self): | ||
556 | return iter(set().union(*self.maps)) | ||
557 | |||
558 | def __contains__(self, key): | ||
559 | return any(key in m for m in self.maps) | ||
560 | |||
561 | def __bool__(self): | ||
562 | return any(self.maps) | ||
563 | |||
564 | @_recursive_repr() | ||
565 | def __repr__(self): | ||
566 | return '{0.__class__.__name__}({1})'.format( | ||
567 | self, ', '.join(map(repr, self.maps))) | ||
568 | |||
569 | @classmethod | ||
570 | def fromkeys(cls, iterable, *args): | ||
571 | 'Create a ChainMap with a single dict created from the iterable.' | ||
572 | return cls(dict.fromkeys(iterable, *args)) | ||
573 | |||
574 | def copy(self): | ||
575 | 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' | ||
576 | return self.__class__(self.maps[0].copy(), *self.maps[1:]) | ||
577 | |||
578 | __copy__ = copy | ||
579 | |||
580 | def new_child(self): # like Django's Context.push() | ||
581 | 'New ChainMap with a new dict followed by all previous maps.' | ||
582 | return self.__class__({}, *self.maps) | ||
583 | |||
584 | @property | ||
585 | def parents(self): # like Django's Context.pop() | ||
586 | 'New ChainMap from maps[1:].' | ||
587 | return self.__class__(*self.maps[1:]) | ||
588 | |||
589 | def __setitem__(self, key, value): | ||
590 | self.maps[0][key] = value | ||
591 | |||
592 | def __delitem__(self, key): | ||
593 | try: | ||
594 | del self.maps[0][key] | ||
595 | except KeyError: | ||
596 | raise KeyError('Key not found in the first mapping: {!r}'.format(key)) | ||
597 | |||
598 | def popitem(self): | ||
599 | 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' | ||
600 | try: | ||
601 | return self.maps[0].popitem() | ||
602 | except KeyError: | ||
603 | raise KeyError('No keys found in the first mapping.') | ||
604 | |||
605 | def pop(self, key, *args): | ||
606 | 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' | ||
607 | try: | ||
608 | return self.maps[0].pop(key, *args) | ||
609 | except KeyError: | ||
610 | raise KeyError('Key not found in the first mapping: {!r}'.format(key)) | ||
611 | |||
612 | def clear(self): | ||
613 | 'Clear maps[0], leaving maps[1:] intact.' | ||
614 | self.maps[0].clear() | ||
615 | |||
616 | try: | ||
617 | from importlib.util import cache_from_source # Python >= 3.4 | ||
618 | except ImportError: # pragma: no cover | ||
619 | try: | ||
620 | from imp import cache_from_source | ||
621 | except ImportError: # pragma: no cover | ||
622 | def cache_from_source(path, debug_override=None): | ||
623 | assert path.endswith('.py') | ||
624 | if debug_override is None: | ||
625 | debug_override = __debug__ | ||
626 | if debug_override: | ||
627 | suffix = 'c' | ||
628 | else: | ||
629 | suffix = 'o' | ||
630 | return path + suffix | ||
631 | |||
632 | try: | ||
633 | from collections import OrderedDict | ||
634 | except ImportError: # pragma: no cover | ||
635 | ## {{{ http://code.activestate.com/recipes/576693/ (r9) | ||
636 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. | ||
637 | # Passes Python2.7's test suite and incorporates all the latest updates. | ||
638 | try: | ||
639 | from thread import get_ident as _get_ident | ||
640 | except ImportError: | ||
641 | from dummy_thread import get_ident as _get_ident | ||
642 | |||
643 | try: | ||
644 | from _abcoll import KeysView, ValuesView, ItemsView | ||
645 | except ImportError: | ||
646 | pass | ||
647 | |||
648 | |||
649 | class OrderedDict(dict): | ||
650 | 'Dictionary that remembers insertion order' | ||
651 | # An inherited dict maps keys to values. | ||
652 | # The inherited dict provides __getitem__, __len__, __contains__, and get. | ||
653 | # The remaining methods are order-aware. | ||
654 | # Big-O running times for all methods are the same as for regular dictionaries. | ||
655 | |||
656 | # The internal self.__map dictionary maps keys to links in a doubly linked list. | ||
657 | # The circular doubly linked list starts and ends with a sentinel element. | ||
658 | # The sentinel element never gets deleted (this simplifies the algorithm). | ||
659 | # Each link is stored as a list of length three: [PREV, NEXT, KEY]. | ||
660 | |||
661 | def __init__(self, *args, **kwds): | ||
662 | '''Initialize an ordered dictionary. Signature is the same as for | ||
663 | regular dictionaries, but keyword arguments are not recommended | ||
664 | because their insertion order is arbitrary. | ||
665 | |||
666 | ''' | ||
667 | if len(args) > 1: | ||
668 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) | ||
669 | try: | ||
670 | self.__root | ||
671 | except AttributeError: | ||
672 | self.__root = root = [] # sentinel node | ||
673 | root[:] = [root, root, None] | ||
674 | self.__map = {} | ||
675 | self.__update(*args, **kwds) | ||
676 | |||
677 | def __setitem__(self, key, value, dict_setitem=dict.__setitem__): | ||
678 | 'od.__setitem__(i, y) <==> od[i]=y' | ||
679 | # Setting a new item creates a new link which goes at the end of the linked | ||
680 | # list, and the inherited dictionary is updated with the new key/value pair. | ||
681 | if key not in self: | ||
682 | root = self.__root | ||
683 | last = root[0] | ||
684 | last[1] = root[0] = self.__map[key] = [last, root, key] | ||
685 | dict_setitem(self, key, value) | ||
686 | |||
687 | def __delitem__(self, key, dict_delitem=dict.__delitem__): | ||
688 | 'od.__delitem__(y) <==> del od[y]' | ||
689 | # Deleting an existing item uses self.__map to find the link which is | ||
690 | # then removed by updating the links in the predecessor and successor nodes. | ||
691 | dict_delitem(self, key) | ||
692 | link_prev, link_next, key = self.__map.pop(key) | ||
693 | link_prev[1] = link_next | ||
694 | link_next[0] = link_prev | ||
695 | |||
696 | def __iter__(self): | ||
697 | 'od.__iter__() <==> iter(od)' | ||
698 | root = self.__root | ||
699 | curr = root[1] | ||
700 | while curr is not root: | ||
701 | yield curr[2] | ||
702 | curr = curr[1] | ||
703 | |||
704 | def __reversed__(self): | ||
705 | 'od.__reversed__() <==> reversed(od)' | ||
706 | root = self.__root | ||
707 | curr = root[0] | ||
708 | while curr is not root: | ||
709 | yield curr[2] | ||
710 | curr = curr[0] | ||
711 | |||
712 | def clear(self): | ||
713 | 'od.clear() -> None. Remove all items from od.' | ||
714 | try: | ||
715 | for node in self.__map.itervalues(): | ||
716 | del node[:] | ||
717 | root = self.__root | ||
718 | root[:] = [root, root, None] | ||
719 | self.__map.clear() | ||
720 | except AttributeError: | ||
721 | pass | ||
722 | dict.clear(self) | ||
723 | |||
724 | def popitem(self, last=True): | ||
725 | '''od.popitem() -> (k, v), return and remove a (key, value) pair. | ||
726 | Pairs are returned in LIFO order if last is true or FIFO order if false. | ||
727 | |||
728 | ''' | ||
729 | if not self: | ||
730 | raise KeyError('dictionary is empty') | ||
731 | root = self.__root | ||
732 | if last: | ||
733 | link = root[0] | ||
734 | link_prev = link[0] | ||
735 | link_prev[1] = root | ||
736 | root[0] = link_prev | ||
737 | else: | ||
738 | link = root[1] | ||
739 | link_next = link[1] | ||
740 | root[1] = link_next | ||
741 | link_next[0] = root | ||
742 | key = link[2] | ||
743 | del self.__map[key] | ||
744 | value = dict.pop(self, key) | ||
745 | return key, value | ||
746 | |||
747 | # -- the following methods do not depend on the internal structure -- | ||
748 | |||
749 | def keys(self): | ||
750 | 'od.keys() -> list of keys in od' | ||
751 | return list(self) | ||
752 | |||
753 | def values(self): | ||
754 | 'od.values() -> list of values in od' | ||
755 | return [self[key] for key in self] | ||
756 | |||
757 | def items(self): | ||
758 | 'od.items() -> list of (key, value) pairs in od' | ||
759 | return [(key, self[key]) for key in self] | ||
760 | |||
761 | def iterkeys(self): | ||
762 | 'od.iterkeys() -> an iterator over the keys in od' | ||
763 | return iter(self) | ||
764 | |||
765 | def itervalues(self): | ||
766 | 'od.itervalues -> an iterator over the values in od' | ||
767 | for k in self: | ||
768 | yield self[k] | ||
769 | |||
770 | def iteritems(self): | ||
771 | 'od.iteritems -> an iterator over the (key, value) items in od' | ||
772 | for k in self: | ||
773 | yield (k, self[k]) | ||
774 | |||
775 | def update(*args, **kwds): | ||
776 | '''od.update(E, **F) -> None. Update od from dict/iterable E and F. | ||
777 | |||
778 | If E is a dict instance, does: for k in E: od[k] = E[k] | ||
779 | If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] | ||
780 | Or if E is an iterable of items, does: for k, v in E: od[k] = v | ||
781 | In either case, this is followed by: for k, v in F.items(): od[k] = v | ||
782 | |||
783 | ''' | ||
784 | if len(args) > 2: | ||
785 | raise TypeError('update() takes at most 2 positional ' | ||
786 | 'arguments (%d given)' % (len(args),)) | ||
787 | elif not args: | ||
788 | raise TypeError('update() takes at least 1 argument (0 given)') | ||
789 | self = args[0] | ||
790 | # Make progressively weaker assumptions about "other" | ||
791 | other = () | ||
792 | if len(args) == 2: | ||
793 | other = args[1] | ||
794 | if isinstance(other, dict): | ||
795 | for key in other: | ||
796 | self[key] = other[key] | ||
797 | elif hasattr(other, 'keys'): | ||
798 | for key in other.keys(): | ||
799 | self[key] = other[key] | ||
800 | else: | ||
801 | for key, value in other: | ||
802 | self[key] = value | ||
803 | for key, value in kwds.items(): | ||
804 | self[key] = value | ||
805 | |||
806 | __update = update # let subclasses override update without breaking __init__ | ||
807 | |||
808 | __marker = object() | ||
809 | |||
810 | def pop(self, key, default=__marker): | ||
811 | '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. | ||
812 | If key is not found, d is returned if given, otherwise KeyError is raised. | ||
813 | |||
814 | ''' | ||
815 | if key in self: | ||
816 | result = self[key] | ||
817 | del self[key] | ||
818 | return result | ||
819 | if default is self.__marker: | ||
820 | raise KeyError(key) | ||
821 | return default | ||
822 | |||
823 | def setdefault(self, key, default=None): | ||
824 | 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' | ||
825 | if key in self: | ||
826 | return self[key] | ||
827 | self[key] = default | ||
828 | return default | ||
829 | |||
830 | def __repr__(self, _repr_running=None): | ||
831 | 'od.__repr__() <==> repr(od)' | ||
832 | if not _repr_running: _repr_running = {} | ||
833 | call_key = id(self), _get_ident() | ||
834 | if call_key in _repr_running: | ||
835 | return '...' | ||
836 | _repr_running[call_key] = 1 | ||
837 | try: | ||
838 | if not self: | ||
839 | return '%s()' % (self.__class__.__name__,) | ||
840 | return '%s(%r)' % (self.__class__.__name__, self.items()) | ||
841 | finally: | ||
842 | del _repr_running[call_key] | ||
843 | |||
844 | def __reduce__(self): | ||
845 | 'Return state information for pickling' | ||
846 | items = [[k, self[k]] for k in self] | ||
847 | inst_dict = vars(self).copy() | ||
848 | for k in vars(OrderedDict()): | ||
849 | inst_dict.pop(k, None) | ||
850 | if inst_dict: | ||
851 | return (self.__class__, (items,), inst_dict) | ||
852 | return self.__class__, (items,) | ||
853 | |||
854 | def copy(self): | ||
855 | 'od.copy() -> a shallow copy of od' | ||
856 | return self.__class__(self) | ||
857 | |||
858 | @classmethod | ||
859 | def fromkeys(cls, iterable, value=None): | ||
860 | '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S | ||
861 | and values equal to v (which defaults to None). | ||
862 | |||
863 | ''' | ||
864 | d = cls() | ||
865 | for key in iterable: | ||
866 | d[key] = value | ||
867 | return d | ||
868 | |||
869 | def __eq__(self, other): | ||
870 | '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive | ||
871 | while comparison to a regular mapping is order-insensitive. | ||
872 | |||
873 | ''' | ||
874 | if isinstance(other, OrderedDict): | ||
875 | return len(self)==len(other) and self.items() == other.items() | ||
876 | return dict.__eq__(self, other) | ||
877 | |||
878 | def __ne__(self, other): | ||
879 | return not self == other | ||
880 | |||
881 | # -- the following methods are only used in Python 2.7 -- | ||
882 | |||
883 | def viewkeys(self): | ||
884 | "od.viewkeys() -> a set-like object providing a view on od's keys" | ||
885 | return KeysView(self) | ||
886 | |||
887 | def viewvalues(self): | ||
888 | "od.viewvalues() -> an object providing a view on od's values" | ||
889 | return ValuesView(self) | ||
890 | |||
891 | def viewitems(self): | ||
892 | "od.viewitems() -> a set-like object providing a view on od's items" | ||
893 | return ItemsView(self) | ||
894 | |||
895 | try: | ||
896 | from logging.config import BaseConfigurator, valid_ident | ||
897 | except ImportError: # pragma: no cover | ||
898 | IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) | ||
899 | |||
900 | |||
901 | def valid_ident(s): | ||
902 | m = IDENTIFIER.match(s) | ||
903 | if not m: | ||
904 | raise ValueError('Not a valid Python identifier: %r' % s) | ||
905 | return True | ||
906 | |||
907 | |||
908 | # The ConvertingXXX classes are wrappers around standard Python containers, | ||
909 | # and they serve to convert any suitable values in the container. The | ||
910 | # conversion converts base dicts, lists and tuples to their wrapped | ||
911 | # equivalents, whereas strings which match a conversion format are converted | ||
912 | # appropriately. | ||
913 | # | ||
914 | # Each wrapper should have a configurator attribute holding the actual | ||
915 | # configurator to use for conversion. | ||
916 | |||
917 | class ConvertingDict(dict): | ||
918 | """A converting dictionary wrapper.""" | ||
919 | |||
920 | def __getitem__(self, key): | ||
921 | value = dict.__getitem__(self, key) | ||
922 | result = self.configurator.convert(value) | ||
923 | #If the converted value is different, save for next time | ||
924 | if value is not result: | ||
925 | self[key] = result | ||
926 | if type(result) in (ConvertingDict, ConvertingList, | ||
927 | ConvertingTuple): | ||
928 | result.parent = self | ||
929 | result.key = key | ||
930 | return result | ||
931 | |||
932 | def get(self, key, default=None): | ||
933 | value = dict.get(self, key, default) | ||
934 | result = self.configurator.convert(value) | ||
935 | #If the converted value is different, save for next time | ||
936 | if value is not result: | ||
937 | self[key] = result | ||
938 | if type(result) in (ConvertingDict, ConvertingList, | ||
939 | ConvertingTuple): | ||
940 | result.parent = self | ||
941 | result.key = key | ||
942 | return result | ||
943 | |||
944 | def pop(self, key, default=None): | ||
945 | value = dict.pop(self, key, default) | ||
946 | result = self.configurator.convert(value) | ||
947 | if value is not result: | ||
948 | if type(result) in (ConvertingDict, ConvertingList, | ||
949 | ConvertingTuple): | ||
950 | result.parent = self | ||
951 | result.key = key | ||
952 | return result | ||
953 | |||
954 | class ConvertingList(list): | ||
955 | """A converting list wrapper.""" | ||
956 | def __getitem__(self, key): | ||
957 | value = list.__getitem__(self, key) | ||
958 | result = self.configurator.convert(value) | ||
959 | #If the converted value is different, save for next time | ||
960 | if value is not result: | ||
961 | self[key] = result | ||
962 | if type(result) in (ConvertingDict, ConvertingList, | ||
963 | ConvertingTuple): | ||
964 | result.parent = self | ||
965 | result.key = key | ||
966 | return result | ||
967 | |||
968 | def pop(self, idx=-1): | ||
969 | value = list.pop(self, idx) | ||
970 | result = self.configurator.convert(value) | ||
971 | if value is not result: | ||
972 | if type(result) in (ConvertingDict, ConvertingList, | ||
973 | ConvertingTuple): | ||
974 | result.parent = self | ||
975 | return result | ||
976 | |||
977 | class ConvertingTuple(tuple): | ||
978 | """A converting tuple wrapper.""" | ||
979 | def __getitem__(self, key): | ||
980 | value = tuple.__getitem__(self, key) | ||
981 | result = self.configurator.convert(value) | ||
982 | if value is not result: | ||
983 | if type(result) in (ConvertingDict, ConvertingList, | ||
984 | ConvertingTuple): | ||
985 | result.parent = self | ||
986 | result.key = key | ||
987 | return result | ||
988 | |||
989 | class BaseConfigurator(object): | ||
990 | """ | ||
991 | The configurator base class which defines some useful defaults. | ||
992 | """ | ||
993 | |||
994 | CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') | ||
995 | |||
996 | WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') | ||
997 | DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') | ||
998 | INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') | ||
999 | DIGIT_PATTERN = re.compile(r'^\d+$') | ||
1000 | |||
1001 | value_converters = { | ||
1002 | 'ext' : 'ext_convert', | ||
1003 | 'cfg' : 'cfg_convert', | ||
1004 | } | ||
1005 | |||
1006 | # We might want to use a different one, e.g. importlib | ||
1007 | importer = staticmethod(__import__) | ||
1008 | |||
1009 | def __init__(self, config): | ||
1010 | self.config = ConvertingDict(config) | ||
1011 | self.config.configurator = self | ||
1012 | |||
1013 | def resolve(self, s): | ||
1014 | """ | ||
1015 | Resolve strings to objects using standard import and attribute | ||
1016 | syntax. | ||
1017 | """ | ||
1018 | name = s.split('.') | ||
1019 | used = name.pop(0) | ||
1020 | try: | ||
1021 | found = self.importer(used) | ||
1022 | for frag in name: | ||
1023 | used += '.' + frag | ||
1024 | try: | ||
1025 | found = getattr(found, frag) | ||
1026 | except AttributeError: | ||
1027 | self.importer(used) | ||
1028 | found = getattr(found, frag) | ||
1029 | return found | ||
1030 | except ImportError: | ||
1031 | e, tb = sys.exc_info()[1:] | ||
1032 | v = ValueError('Cannot resolve %r: %s' % (s, e)) | ||
1033 | v.__cause__, v.__traceback__ = e, tb | ||
1034 | raise v | ||
1035 | |||
1036 | def ext_convert(self, value): | ||
1037 | """Default converter for the ext:// protocol.""" | ||
1038 | return self.resolve(value) | ||
1039 | |||
1040 | def cfg_convert(self, value): | ||
1041 | """Default converter for the cfg:// protocol.""" | ||
1042 | rest = value | ||
1043 | m = self.WORD_PATTERN.match(rest) | ||
1044 | if m is None: | ||
1045 | raise ValueError("Unable to convert %r" % value) | ||
1046 | else: | ||
1047 | rest = rest[m.end():] | ||
1048 | d = self.config[m.groups()[0]] | ||
1049 | #print d, rest | ||
1050 | while rest: | ||
1051 | m = self.DOT_PATTERN.match(rest) | ||
1052 | if m: | ||
1053 | d = d[m.groups()[0]] | ||
1054 | else: | ||
1055 | m = self.INDEX_PATTERN.match(rest) | ||
1056 | if m: | ||
1057 | idx = m.groups()[0] | ||
1058 | if not self.DIGIT_PATTERN.match(idx): | ||
1059 | d = d[idx] | ||
1060 | else: | ||
1061 | try: | ||
1062 | n = int(idx) # try as number first (most likely) | ||
1063 | d = d[n] | ||
1064 | except TypeError: | ||
1065 | d = d[idx] | ||
1066 | if m: | ||
1067 | rest = rest[m.end():] | ||
1068 | else: | ||
1069 | raise ValueError('Unable to convert ' | ||
1070 | '%r at %r' % (value, rest)) | ||
1071 | #rest should be empty | ||
1072 | return d | ||
1073 | |||
1074 | def convert(self, value): | ||
1075 | """ | ||
1076 | Convert values to an appropriate type. dicts, lists and tuples are | ||
1077 | replaced by their converting alternatives. Strings are checked to | ||
1078 | see if they have a conversion format and are converted if they do. | ||
1079 | """ | ||
1080 | if not isinstance(value, ConvertingDict) and isinstance(value, dict): | ||
1081 | value = ConvertingDict(value) | ||
1082 | value.configurator = self | ||
1083 | elif not isinstance(value, ConvertingList) and isinstance(value, list): | ||
1084 | value = ConvertingList(value) | ||
1085 | value.configurator = self | ||
1086 | elif not isinstance(value, ConvertingTuple) and\ | ||
1087 | isinstance(value, tuple): | ||
1088 | value = ConvertingTuple(value) | ||
1089 | value.configurator = self | ||
1090 | elif isinstance(value, string_types): | ||
1091 | m = self.CONVERT_PATTERN.match(value) | ||
1092 | if m: | ||
1093 | d = m.groupdict() | ||
1094 | prefix = d['prefix'] | ||
1095 | converter = self.value_converters.get(prefix, None) | ||
1096 | if converter: | ||
1097 | suffix = d['suffix'] | ||
1098 | converter = getattr(self, converter) | ||
1099 | value = converter(suffix) | ||
1100 | return value | ||
1101 | |||
1102 | def configure_custom(self, config): | ||
1103 | """Configure an object with a user-supplied factory.""" | ||
1104 | c = config.pop('()') | ||
1105 | if not callable(c): | ||
1106 | c = self.resolve(c) | ||
1107 | props = config.pop('.', None) | ||
1108 | # Check for valid identifiers | ||
1109 | kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) | ||
1110 | result = c(**kwargs) | ||
1111 | if props: | ||
1112 | for name, value in props.items(): | ||
1113 | setattr(result, name, value) | ||
1114 | return result | ||
1115 | |||
1116 | def as_tuple(self, value): | ||
1117 | """Utility function which converts lists to tuples.""" | ||
1118 | if isinstance(value, list): | ||
1119 | value = tuple(value) | ||
1120 | return value | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..54483e1 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/database.py | |||
@@ -0,0 +1,1336 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """PEP 376 implementation.""" | ||
7 | |||
8 | from __future__ import unicode_literals | ||
9 | |||
10 | import base64 | ||
11 | import codecs | ||
12 | import contextlib | ||
13 | import hashlib | ||
14 | import logging | ||
15 | import os | ||
16 | import posixpath | ||
17 | import sys | ||
18 | import zipimport | ||
19 | |||
20 | from . import DistlibException, resources | ||
21 | from .compat import StringIO | ||
22 | from .version import get_scheme, UnsupportedVersionError | ||
23 | from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | ||
24 | from .util import (parse_requirement, cached_property, parse_name_and_version, | ||
25 | read_exports, write_exports, CSVReader, CSVWriter) | ||
26 | |||
27 | |||
28 | __all__ = ['Distribution', 'BaseInstalledDistribution', | ||
29 | 'InstalledDistribution', 'EggInfoDistribution', | ||
30 | 'DistributionPath'] | ||
31 | |||
32 | |||
33 | logger = logging.getLogger(__name__) | ||
34 | |||
35 | EXPORTS_FILENAME = 'pydist-exports.json' | ||
36 | COMMANDS_FILENAME = 'pydist-commands.json' | ||
37 | |||
38 | DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', | ||
39 | 'RESOURCES', EXPORTS_FILENAME, 'SHARED') | ||
40 | |||
41 | DISTINFO_EXT = '.dist-info' | ||
42 | |||
43 | |||
44 | class _Cache(object): | ||
45 | """ | ||
46 | A simple cache mapping names and .dist-info paths to distributions | ||
47 | """ | ||
48 | def __init__(self): | ||
49 | """ | ||
50 | Initialise an instance. There is normally one for each DistributionPath. | ||
51 | """ | ||
52 | self.name = {} | ||
53 | self.path = {} | ||
54 | self.generated = False | ||
55 | |||
56 | def clear(self): | ||
57 | """ | ||
58 | Clear the cache, setting it to its initial state. | ||
59 | """ | ||
60 | self.name.clear() | ||
61 | self.path.clear() | ||
62 | self.generated = False | ||
63 | |||
64 | def add(self, dist): | ||
65 | """ | ||
66 | Add a distribution to the cache. | ||
67 | :param dist: The distribution to add. | ||
68 | """ | ||
69 | if dist.path not in self.path: | ||
70 | self.path[dist.path] = dist | ||
71 | self.name.setdefault(dist.key, []).append(dist) | ||
72 | |||
73 | |||
74 | class DistributionPath(object): | ||
75 | """ | ||
76 | Represents a set of distributions installed on a path (typically sys.path). | ||
77 | """ | ||
78 | def __init__(self, path=None, include_egg=False): | ||
79 | """ | ||
80 | Create an instance from a path, optionally including legacy (distutils/ | ||
81 | setuptools/distribute) distributions. | ||
82 | :param path: The path to use, as a list of directories. If not specified, | ||
83 | sys.path is used. | ||
84 | :param include_egg: If True, this instance will look for and return legacy | ||
85 | distributions as well as those based on PEP 376. | ||
86 | """ | ||
87 | if path is None: | ||
88 | path = sys.path | ||
89 | self.path = path | ||
90 | self._include_dist = True | ||
91 | self._include_egg = include_egg | ||
92 | |||
93 | self._cache = _Cache() | ||
94 | self._cache_egg = _Cache() | ||
95 | self._cache_enabled = True | ||
96 | self._scheme = get_scheme('default') | ||
97 | |||
98 | def _get_cache_enabled(self): | ||
99 | return self._cache_enabled | ||
100 | |||
101 | def _set_cache_enabled(self, value): | ||
102 | self._cache_enabled = value | ||
103 | |||
104 | cache_enabled = property(_get_cache_enabled, _set_cache_enabled) | ||
105 | |||
106 | def clear_cache(self): | ||
107 | """ | ||
108 | Clears the internal cache. | ||
109 | """ | ||
110 | self._cache.clear() | ||
111 | self._cache_egg.clear() | ||
112 | |||
113 | |||
114 | def _yield_distributions(self): | ||
115 | """ | ||
116 | Yield .dist-info and/or .egg(-info) distributions. | ||
117 | """ | ||
118 | # We need to check if we've seen some resources already, because on | ||
119 | # some Linux systems (e.g. some Debian/Ubuntu variants) there are | ||
120 | # symlinks which alias other files in the environment. | ||
121 | seen = set() | ||
122 | for path in self.path: | ||
123 | finder = resources.finder_for_path(path) | ||
124 | if finder is None: | ||
125 | continue | ||
126 | r = finder.find('') | ||
127 | if not r or not r.is_container: | ||
128 | continue | ||
129 | rset = sorted(r.resources) | ||
130 | for entry in rset: | ||
131 | r = finder.find(entry) | ||
132 | if not r or r.path in seen: | ||
133 | continue | ||
134 | if self._include_dist and entry.endswith(DISTINFO_EXT): | ||
135 | possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] | ||
136 | for metadata_filename in possible_filenames: | ||
137 | metadata_path = posixpath.join(entry, metadata_filename) | ||
138 | pydist = finder.find(metadata_path) | ||
139 | if pydist: | ||
140 | break | ||
141 | else: | ||
142 | continue | ||
143 | |||
144 | with contextlib.closing(pydist.as_stream()) as stream: | ||
145 | metadata = Metadata(fileobj=stream, scheme='legacy') | ||
146 | logger.debug('Found %s', r.path) | ||
147 | seen.add(r.path) | ||
148 | yield new_dist_class(r.path, metadata=metadata, | ||
149 | env=self) | ||
150 | elif self._include_egg and entry.endswith(('.egg-info', | ||
151 | '.egg')): | ||
152 | logger.debug('Found %s', r.path) | ||
153 | seen.add(r.path) | ||
154 | yield old_dist_class(r.path, self) | ||
155 | |||
156 | def _generate_cache(self): | ||
157 | """ | ||
158 | Scan the path for distributions and populate the cache with | ||
159 | those that are found. | ||
160 | """ | ||
161 | gen_dist = not self._cache.generated | ||
162 | gen_egg = self._include_egg and not self._cache_egg.generated | ||
163 | if gen_dist or gen_egg: | ||
164 | for dist in self._yield_distributions(): | ||
165 | if isinstance(dist, InstalledDistribution): | ||
166 | self._cache.add(dist) | ||
167 | else: | ||
168 | self._cache_egg.add(dist) | ||
169 | |||
170 | if gen_dist: | ||
171 | self._cache.generated = True | ||
172 | if gen_egg: | ||
173 | self._cache_egg.generated = True | ||
174 | |||
175 | @classmethod | ||
176 | def distinfo_dirname(cls, name, version): | ||
177 | """ | ||
178 | The *name* and *version* parameters are converted into their | ||
179 | filename-escaped form, i.e. any ``'-'`` characters are replaced | ||
180 | with ``'_'`` other than the one in ``'dist-info'`` and the one | ||
181 | separating the name from the version number. | ||
182 | |||
183 | :parameter name: is converted to a standard distribution name by replacing | ||
184 | any runs of non- alphanumeric characters with a single | ||
185 | ``'-'``. | ||
186 | :type name: string | ||
187 | :parameter version: is converted to a standard version string. Spaces | ||
188 | become dots, and all other non-alphanumeric characters | ||
189 | (except dots) become dashes, with runs of multiple | ||
190 | dashes condensed to a single dash. | ||
191 | :type version: string | ||
192 | :returns: directory name | ||
193 | :rtype: string""" | ||
194 | name = name.replace('-', '_') | ||
195 | return '-'.join([name, version]) + DISTINFO_EXT | ||
196 | |||
197 | def get_distributions(self): | ||
198 | """ | ||
199 | Provides an iterator that looks for distributions and returns | ||
200 | :class:`InstalledDistribution` or | ||
201 | :class:`EggInfoDistribution` instances for each one of them. | ||
202 | |||
203 | :rtype: iterator of :class:`InstalledDistribution` and | ||
204 | :class:`EggInfoDistribution` instances | ||
205 | """ | ||
206 | if not self._cache_enabled: | ||
207 | for dist in self._yield_distributions(): | ||
208 | yield dist | ||
209 | else: | ||
210 | self._generate_cache() | ||
211 | |||
212 | for dist in self._cache.path.values(): | ||
213 | yield dist | ||
214 | |||
215 | if self._include_egg: | ||
216 | for dist in self._cache_egg.path.values(): | ||
217 | yield dist | ||
218 | |||
219 | def get_distribution(self, name): | ||
220 | """ | ||
221 | Looks for a named distribution on the path. | ||
222 | |||
223 | This function only returns the first result found, as no more than one | ||
224 | value is expected. If nothing is found, ``None`` is returned. | ||
225 | |||
226 | :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` | ||
227 | or ``None`` | ||
228 | """ | ||
229 | result = None | ||
230 | name = name.lower() | ||
231 | if not self._cache_enabled: | ||
232 | for dist in self._yield_distributions(): | ||
233 | if dist.key == name: | ||
234 | result = dist | ||
235 | break | ||
236 | else: | ||
237 | self._generate_cache() | ||
238 | |||
239 | if name in self._cache.name: | ||
240 | result = self._cache.name[name][0] | ||
241 | elif self._include_egg and name in self._cache_egg.name: | ||
242 | result = self._cache_egg.name[name][0] | ||
243 | return result | ||
244 | |||
245 | def provides_distribution(self, name, version=None): | ||
246 | """ | ||
247 | Iterates over all distributions to find which distributions provide *name*. | ||
248 | If a *version* is provided, it will be used to filter the results. | ||
249 | |||
250 | This function only returns the first result found, since no more than | ||
251 | one values are expected. If the directory is not found, returns ``None``. | ||
252 | |||
253 | :parameter version: a version specifier that indicates the version | ||
254 | required, conforming to the format in ``PEP-345`` | ||
255 | |||
256 | :type name: string | ||
257 | :type version: string | ||
258 | """ | ||
259 | matcher = None | ||
260 | if version is not None: | ||
261 | try: | ||
262 | matcher = self._scheme.matcher('%s (%s)' % (name, version)) | ||
263 | except ValueError: | ||
264 | raise DistlibException('invalid name or version: %r, %r' % | ||
265 | (name, version)) | ||
266 | |||
267 | for dist in self.get_distributions(): | ||
268 | # We hit a problem on Travis where enum34 was installed and doesn't | ||
269 | # have a provides attribute ... | ||
270 | if not hasattr(dist, 'provides'): | ||
271 | logger.debug('No "provides": %s', dist) | ||
272 | else: | ||
273 | provided = dist.provides | ||
274 | |||
275 | for p in provided: | ||
276 | p_name, p_ver = parse_name_and_version(p) | ||
277 | if matcher is None: | ||
278 | if p_name == name: | ||
279 | yield dist | ||
280 | break | ||
281 | else: | ||
282 | if p_name == name and matcher.match(p_ver): | ||
283 | yield dist | ||
284 | break | ||
285 | |||
286 | def get_file_path(self, name, relative_path): | ||
287 | """ | ||
288 | Return the path to a resource file. | ||
289 | """ | ||
290 | dist = self.get_distribution(name) | ||
291 | if dist is None: | ||
292 | raise LookupError('no distribution named %r found' % name) | ||
293 | return dist.get_resource_path(relative_path) | ||
294 | |||
295 | def get_exported_entries(self, category, name=None): | ||
296 | """ | ||
297 | Return all of the exported entries in a particular category. | ||
298 | |||
299 | :param category: The category to search for entries. | ||
300 | :param name: If specified, only entries with that name are returned. | ||
301 | """ | ||
302 | for dist in self.get_distributions(): | ||
303 | r = dist.exports | ||
304 | if category in r: | ||
305 | d = r[category] | ||
306 | if name is not None: | ||
307 | if name in d: | ||
308 | yield d[name] | ||
309 | else: | ||
310 | for v in d.values(): | ||
311 | yield v | ||
312 | |||
313 | |||
314 | class Distribution(object): | ||
315 | """ | ||
316 | A base class for distributions, whether installed or from indexes. | ||
317 | Either way, it must have some metadata, so that's all that's needed | ||
318 | for construction. | ||
319 | """ | ||
320 | |||
321 | build_time_dependency = False | ||
322 | """ | ||
323 | Set to True if it's known to be only a build-time dependency (i.e. | ||
324 | not needed after installation). | ||
325 | """ | ||
326 | |||
327 | requested = False | ||
328 | """A boolean that indicates whether the ``REQUESTED`` metadata file is | ||
329 | present (in other words, whether the package was installed by user | ||
330 | request or it was installed as a dependency).""" | ||
331 | |||
332 | def __init__(self, metadata): | ||
333 | """ | ||
334 | Initialise an instance. | ||
335 | :param metadata: The instance of :class:`Metadata` describing this | ||
336 | distribution. | ||
337 | """ | ||
338 | self.metadata = metadata | ||
339 | self.name = metadata.name | ||
340 | self.key = self.name.lower() # for case-insensitive comparisons | ||
341 | self.version = metadata.version | ||
342 | self.locator = None | ||
343 | self.digest = None | ||
344 | self.extras = None # additional features requested | ||
345 | self.context = None # environment marker overrides | ||
346 | self.download_urls = set() | ||
347 | self.digests = {} | ||
348 | |||
349 | @property | ||
350 | def source_url(self): | ||
351 | """ | ||
352 | The source archive download URL for this distribution. | ||
353 | """ | ||
354 | return self.metadata.source_url | ||
355 | |||
356 | download_url = source_url # Backward compatibility | ||
357 | |||
358 | @property | ||
359 | def name_and_version(self): | ||
360 | """ | ||
361 | A utility property which displays the name and version in parentheses. | ||
362 | """ | ||
363 | return '%s (%s)' % (self.name, self.version) | ||
364 | |||
365 | @property | ||
366 | def provides(self): | ||
367 | """ | ||
368 | A set of distribution names and versions provided by this distribution. | ||
369 | :return: A set of "name (version)" strings. | ||
370 | """ | ||
371 | plist = self.metadata.provides | ||
372 | s = '%s (%s)' % (self.name, self.version) | ||
373 | if s not in plist: | ||
374 | plist.append(s) | ||
375 | return plist | ||
376 | |||
377 | def _get_requirements(self, req_attr): | ||
378 | md = self.metadata | ||
379 | logger.debug('Getting requirements from metadata %r', md.todict()) | ||
380 | reqts = getattr(md, req_attr) | ||
381 | return set(md.get_requirements(reqts, extras=self.extras, | ||
382 | env=self.context)) | ||
383 | |||
384 | @property | ||
385 | def run_requires(self): | ||
386 | return self._get_requirements('run_requires') | ||
387 | |||
388 | @property | ||
389 | def meta_requires(self): | ||
390 | return self._get_requirements('meta_requires') | ||
391 | |||
392 | @property | ||
393 | def build_requires(self): | ||
394 | return self._get_requirements('build_requires') | ||
395 | |||
396 | @property | ||
397 | def test_requires(self): | ||
398 | return self._get_requirements('test_requires') | ||
399 | |||
400 | @property | ||
401 | def dev_requires(self): | ||
402 | return self._get_requirements('dev_requires') | ||
403 | |||
404 | def matches_requirement(self, req): | ||
405 | """ | ||
406 | Say if this instance matches (fulfills) a requirement. | ||
407 | :param req: The requirement to match. | ||
408 | :rtype req: str | ||
409 | :return: True if it matches, else False. | ||
410 | """ | ||
411 | # Requirement may contain extras - parse to lose those | ||
412 | # from what's passed to the matcher | ||
413 | r = parse_requirement(req) | ||
414 | scheme = get_scheme(self.metadata.scheme) | ||
415 | try: | ||
416 | matcher = scheme.matcher(r.requirement) | ||
417 | except UnsupportedVersionError: | ||
418 | # XXX compat-mode if cannot read the version | ||
419 | logger.warning('could not read version %r - using name only', | ||
420 | req) | ||
421 | name = req.split()[0] | ||
422 | matcher = scheme.matcher(name) | ||
423 | |||
424 | name = matcher.key # case-insensitive | ||
425 | |||
426 | result = False | ||
427 | for p in self.provides: | ||
428 | p_name, p_ver = parse_name_and_version(p) | ||
429 | if p_name != name: | ||
430 | continue | ||
431 | try: | ||
432 | result = matcher.match(p_ver) | ||
433 | break | ||
434 | except UnsupportedVersionError: | ||
435 | pass | ||
436 | return result | ||
437 | |||
438 | def __repr__(self): | ||
439 | """ | ||
440 | Return a textual representation of this instance, | ||
441 | """ | ||
442 | if self.source_url: | ||
443 | suffix = ' [%s]' % self.source_url | ||
444 | else: | ||
445 | suffix = '' | ||
446 | return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix) | ||
447 | |||
448 | def __eq__(self, other): | ||
449 | """ | ||
450 | See if this distribution is the same as another. | ||
451 | :param other: The distribution to compare with. To be equal to one | ||
452 | another. distributions must have the same type, name, | ||
453 | version and source_url. | ||
454 | :return: True if it is the same, else False. | ||
455 | """ | ||
456 | if type(other) is not type(self): | ||
457 | result = False | ||
458 | else: | ||
459 | result = (self.name == other.name and | ||
460 | self.version == other.version and | ||
461 | self.source_url == other.source_url) | ||
462 | return result | ||
463 | |||
464 | def __hash__(self): | ||
465 | """ | ||
466 | Compute hash in a way which matches the equality test. | ||
467 | """ | ||
468 | return hash(self.name) + hash(self.version) + hash(self.source_url) | ||
469 | |||
470 | |||
471 | class BaseInstalledDistribution(Distribution): | ||
472 | """ | ||
473 | This is the base class for installed distributions (whether PEP 376 or | ||
474 | legacy). | ||
475 | """ | ||
476 | |||
477 | hasher = None | ||
478 | |||
479 | def __init__(self, metadata, path, env=None): | ||
480 | """ | ||
481 | Initialise an instance. | ||
482 | :param metadata: An instance of :class:`Metadata` which describes the | ||
483 | distribution. This will normally have been initialised | ||
484 | from a metadata file in the ``path``. | ||
485 | :param path: The path of the ``.dist-info`` or ``.egg-info`` | ||
486 | directory for the distribution. | ||
487 | :param env: This is normally the :class:`DistributionPath` | ||
488 | instance where this distribution was found. | ||
489 | """ | ||
490 | super(BaseInstalledDistribution, self).__init__(metadata) | ||
491 | self.path = path | ||
492 | self.dist_path = env | ||
493 | |||
494 | def get_hash(self, data, hasher=None): | ||
495 | """ | ||
496 | Get the hash of some data, using a particular hash algorithm, if | ||
497 | specified. | ||
498 | |||
499 | :param data: The data to be hashed. | ||
500 | :type data: bytes | ||
501 | :param hasher: The name of a hash implementation, supported by hashlib, | ||
502 | or ``None``. Examples of valid values are ``'sha1'``, | ||
503 | ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and | ||
504 | ``'sha512'``. If no hasher is specified, the ``hasher`` | ||
505 | attribute of the :class:`InstalledDistribution` instance | ||
506 | is used. If the hasher is determined to be ``None``, MD5 | ||
507 | is used as the hashing algorithm. | ||
508 | :returns: The hash of the data. If a hasher was explicitly specified, | ||
509 | the returned hash will be prefixed with the specified hasher | ||
510 | followed by '='. | ||
511 | :rtype: str | ||
512 | """ | ||
513 | if hasher is None: | ||
514 | hasher = self.hasher | ||
515 | if hasher is None: | ||
516 | hasher = hashlib.md5 | ||
517 | prefix = '' | ||
518 | else: | ||
519 | hasher = getattr(hashlib, hasher) | ||
520 | prefix = '%s=' % self.hasher | ||
521 | digest = hasher(data).digest() | ||
522 | digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') | ||
523 | return '%s%s' % (prefix, digest) | ||
524 | |||
525 | |||
526 | class InstalledDistribution(BaseInstalledDistribution): | ||
527 | """ | ||
528 | Created with the *path* of the ``.dist-info`` directory provided to the | ||
529 | constructor. It reads the metadata contained in ``pydist.json`` when it is | ||
530 | instantiated., or uses a passed in Metadata instance (useful for when | ||
531 | dry-run mode is being used). | ||
532 | """ | ||
533 | |||
534 | hasher = 'sha256' | ||
535 | |||
536 | def __init__(self, path, metadata=None, env=None): | ||
537 | self.modules = [] | ||
538 | self.finder = finder = resources.finder_for_path(path) | ||
539 | if finder is None: | ||
540 | raise ValueError('finder unavailable for %s' % path) | ||
541 | if env and env._cache_enabled and path in env._cache.path: | ||
542 | metadata = env._cache.path[path].metadata | ||
543 | elif metadata is None: | ||
544 | r = finder.find(METADATA_FILENAME) | ||
545 | # Temporary - for Wheel 0.23 support | ||
546 | if r is None: | ||
547 | r = finder.find(WHEEL_METADATA_FILENAME) | ||
548 | # Temporary - for legacy support | ||
549 | if r is None: | ||
550 | r = finder.find('METADATA') | ||
551 | if r is None: | ||
552 | raise ValueError('no %s found in %s' % (METADATA_FILENAME, | ||
553 | path)) | ||
554 | with contextlib.closing(r.as_stream()) as stream: | ||
555 | metadata = Metadata(fileobj=stream, scheme='legacy') | ||
556 | |||
557 | super(InstalledDistribution, self).__init__(metadata, path, env) | ||
558 | |||
559 | if env and env._cache_enabled: | ||
560 | env._cache.add(self) | ||
561 | |||
562 | r = finder.find('REQUESTED') | ||
563 | self.requested = r is not None | ||
564 | p = os.path.join(path, 'top_level.txt') | ||
565 | if os.path.exists(p): | ||
566 | with open(p, 'rb') as f: | ||
567 | data = f.read() | ||
568 | self.modules = data.splitlines() | ||
569 | |||
570 | def __repr__(self): | ||
571 | return '<InstalledDistribution %r %s at %r>' % ( | ||
572 | self.name, self.version, self.path) | ||
573 | |||
574 | def __str__(self): | ||
575 | return "%s %s" % (self.name, self.version) | ||
576 | |||
577 | def _get_records(self): | ||
578 | """ | ||
579 | Get the list of installed files for the distribution | ||
580 | :return: A list of tuples of path, hash and size. Note that hash and | ||
581 | size might be ``None`` for some entries. The path is exactly | ||
582 | as stored in the file (which is as in PEP 376). | ||
583 | """ | ||
584 | results = [] | ||
585 | r = self.get_distinfo_resource('RECORD') | ||
586 | with contextlib.closing(r.as_stream()) as stream: | ||
587 | with CSVReader(stream=stream) as record_reader: | ||
588 | # Base location is parent dir of .dist-info dir | ||
589 | #base_location = os.path.dirname(self.path) | ||
590 | #base_location = os.path.abspath(base_location) | ||
591 | for row in record_reader: | ||
592 | missing = [None for i in range(len(row), 3)] | ||
593 | path, checksum, size = row + missing | ||
594 | #if not os.path.isabs(path): | ||
595 | # path = path.replace('/', os.sep) | ||
596 | # path = os.path.join(base_location, path) | ||
597 | results.append((path, checksum, size)) | ||
598 | return results | ||
599 | |||
600 | @cached_property | ||
601 | def exports(self): | ||
602 | """ | ||
603 | Return the information exported by this distribution. | ||
604 | :return: A dictionary of exports, mapping an export category to a dict | ||
605 | of :class:`ExportEntry` instances describing the individual | ||
606 | export entries, and keyed by name. | ||
607 | """ | ||
608 | result = {} | ||
609 | r = self.get_distinfo_resource(EXPORTS_FILENAME) | ||
610 | if r: | ||
611 | result = self.read_exports() | ||
612 | return result | ||
613 | |||
614 | def read_exports(self): | ||
615 | """ | ||
616 | Read exports data from a file in .ini format. | ||
617 | |||
618 | :return: A dictionary of exports, mapping an export category to a list | ||
619 | of :class:`ExportEntry` instances describing the individual | ||
620 | export entries. | ||
621 | """ | ||
622 | result = {} | ||
623 | r = self.get_distinfo_resource(EXPORTS_FILENAME) | ||
624 | if r: | ||
625 | with contextlib.closing(r.as_stream()) as stream: | ||
626 | result = read_exports(stream) | ||
627 | return result | ||
628 | |||
629 | def write_exports(self, exports): | ||
630 | """ | ||
631 | Write a dictionary of exports to a file in .ini format. | ||
632 | :param exports: A dictionary of exports, mapping an export category to | ||
633 | a list of :class:`ExportEntry` instances describing the | ||
634 | individual export entries. | ||
635 | """ | ||
636 | rf = self.get_distinfo_file(EXPORTS_FILENAME) | ||
637 | with open(rf, 'w') as f: | ||
638 | write_exports(exports, f) | ||
639 | |||
640 | def get_resource_path(self, relative_path): | ||
641 | """ | ||
642 | NOTE: This API may change in the future. | ||
643 | |||
644 | Return the absolute path to a resource file with the given relative | ||
645 | path. | ||
646 | |||
647 | :param relative_path: The path, relative to .dist-info, of the resource | ||
648 | of interest. | ||
649 | :return: The absolute path where the resource is to be found. | ||
650 | """ | ||
651 | r = self.get_distinfo_resource('RESOURCES') | ||
652 | with contextlib.closing(r.as_stream()) as stream: | ||
653 | with CSVReader(stream=stream) as resources_reader: | ||
654 | for relative, destination in resources_reader: | ||
655 | if relative == relative_path: | ||
656 | return destination | ||
657 | raise KeyError('no resource file with relative path %r ' | ||
658 | 'is installed' % relative_path) | ||
659 | |||
660 | def list_installed_files(self): | ||
661 | """ | ||
662 | Iterates over the ``RECORD`` entries and returns a tuple | ||
663 | ``(path, hash, size)`` for each line. | ||
664 | |||
665 | :returns: iterator of (path, hash, size) | ||
666 | """ | ||
667 | for result in self._get_records(): | ||
668 | yield result | ||
669 | |||
670 | def write_installed_files(self, paths, prefix, dry_run=False): | ||
671 | """ | ||
672 | Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any | ||
673 | existing ``RECORD`` file is silently overwritten. | ||
674 | |||
675 | prefix is used to determine when to write absolute paths. | ||
676 | """ | ||
677 | prefix = os.path.join(prefix, '') | ||
678 | base = os.path.dirname(self.path) | ||
679 | base_under_prefix = base.startswith(prefix) | ||
680 | base = os.path.join(base, '') | ||
681 | record_path = self.get_distinfo_file('RECORD') | ||
682 | logger.info('creating %s', record_path) | ||
683 | if dry_run: | ||
684 | return None | ||
685 | with CSVWriter(record_path) as writer: | ||
686 | for path in paths: | ||
687 | if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): | ||
688 | # do not put size and hash, as in PEP-376 | ||
689 | hash_value = size = '' | ||
690 | else: | ||
691 | size = '%d' % os.path.getsize(path) | ||
692 | with open(path, 'rb') as fp: | ||
693 | hash_value = self.get_hash(fp.read()) | ||
694 | if path.startswith(base) or (base_under_prefix and | ||
695 | path.startswith(prefix)): | ||
696 | path = os.path.relpath(path, base) | ||
697 | writer.writerow((path, hash_value, size)) | ||
698 | |||
699 | # add the RECORD file itself | ||
700 | if record_path.startswith(base): | ||
701 | record_path = os.path.relpath(record_path, base) | ||
702 | writer.writerow((record_path, '', '')) | ||
703 | return record_path | ||
704 | |||
705 | def check_installed_files(self): | ||
706 | """ | ||
707 | Checks that the hashes and sizes of the files in ``RECORD`` are | ||
708 | matched by the files themselves. Returns a (possibly empty) list of | ||
709 | mismatches. Each entry in the mismatch list will be a tuple consisting | ||
710 | of the path, 'exists', 'size' or 'hash' according to what didn't match | ||
711 | (existence is checked first, then size, then hash), the expected | ||
712 | value and the actual value. | ||
713 | """ | ||
714 | mismatches = [] | ||
715 | base = os.path.dirname(self.path) | ||
716 | record_path = self.get_distinfo_file('RECORD') | ||
717 | for path, hash_value, size in self.list_installed_files(): | ||
718 | if not os.path.isabs(path): | ||
719 | path = os.path.join(base, path) | ||
720 | if path == record_path: | ||
721 | continue | ||
722 | if not os.path.exists(path): | ||
723 | mismatches.append((path, 'exists', True, False)) | ||
724 | elif os.path.isfile(path): | ||
725 | actual_size = str(os.path.getsize(path)) | ||
726 | if size and actual_size != size: | ||
727 | mismatches.append((path, 'size', size, actual_size)) | ||
728 | elif hash_value: | ||
729 | if '=' in hash_value: | ||
730 | hasher = hash_value.split('=', 1)[0] | ||
731 | else: | ||
732 | hasher = None | ||
733 | |||
734 | with open(path, 'rb') as f: | ||
735 | actual_hash = self.get_hash(f.read(), hasher) | ||
736 | if actual_hash != hash_value: | ||
737 | mismatches.append((path, 'hash', hash_value, actual_hash)) | ||
738 | return mismatches | ||
739 | |||
740 | @cached_property | ||
741 | def shared_locations(self): | ||
742 | """ | ||
743 | A dictionary of shared locations whose keys are in the set 'prefix', | ||
744 | 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. | ||
745 | The corresponding value is the absolute path of that category for | ||
746 | this distribution, and takes into account any paths selected by the | ||
747 | user at installation time (e.g. via command-line arguments). In the | ||
748 | case of the 'namespace' key, this would be a list of absolute paths | ||
749 | for the roots of namespace packages in this distribution. | ||
750 | |||
751 | The first time this property is accessed, the relevant information is | ||
752 | read from the SHARED file in the .dist-info directory. | ||
753 | """ | ||
754 | result = {} | ||
755 | shared_path = os.path.join(self.path, 'SHARED') | ||
756 | if os.path.isfile(shared_path): | ||
757 | with codecs.open(shared_path, 'r', encoding='utf-8') as f: | ||
758 | lines = f.read().splitlines() | ||
759 | for line in lines: | ||
760 | key, value = line.split('=', 1) | ||
761 | if key == 'namespace': | ||
762 | result.setdefault(key, []).append(value) | ||
763 | else: | ||
764 | result[key] = value | ||
765 | return result | ||
766 | |||
767 | def write_shared_locations(self, paths, dry_run=False): | ||
768 | """ | ||
769 | Write shared location information to the SHARED file in .dist-info. | ||
770 | :param paths: A dictionary as described in the documentation for | ||
771 | :meth:`shared_locations`. | ||
772 | :param dry_run: If True, the action is logged but no file is actually | ||
773 | written. | ||
774 | :return: The path of the file written to. | ||
775 | """ | ||
776 | shared_path = os.path.join(self.path, 'SHARED') | ||
777 | logger.info('creating %s', shared_path) | ||
778 | if dry_run: | ||
779 | return None | ||
780 | lines = [] | ||
781 | for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): | ||
782 | path = paths[key] | ||
783 | if os.path.isdir(paths[key]): | ||
784 | lines.append('%s=%s' % (key, path)) | ||
785 | for ns in paths.get('namespace', ()): | ||
786 | lines.append('namespace=%s' % ns) | ||
787 | |||
788 | with codecs.open(shared_path, 'w', encoding='utf-8') as f: | ||
789 | f.write('\n'.join(lines)) | ||
790 | return shared_path | ||
791 | |||
792 | def get_distinfo_resource(self, path): | ||
793 | if path not in DIST_FILES: | ||
794 | raise DistlibException('invalid path for a dist-info file: ' | ||
795 | '%r at %r' % (path, self.path)) | ||
796 | finder = resources.finder_for_path(self.path) | ||
797 | if finder is None: | ||
798 | raise DistlibException('Unable to get a finder for %s' % self.path) | ||
799 | return finder.find(path) | ||
800 | |||
801 | def get_distinfo_file(self, path): | ||
802 | """ | ||
803 | Returns a path located under the ``.dist-info`` directory. Returns a | ||
804 | string representing the path. | ||
805 | |||
806 | :parameter path: a ``'/'``-separated path relative to the | ||
807 | ``.dist-info`` directory or an absolute path; | ||
808 | If *path* is an absolute path and doesn't start | ||
809 | with the ``.dist-info`` directory path, | ||
810 | a :class:`DistlibException` is raised | ||
811 | :type path: str | ||
812 | :rtype: str | ||
813 | """ | ||
814 | # Check if it is an absolute path # XXX use relpath, add tests | ||
815 | if path.find(os.sep) >= 0: | ||
816 | # it's an absolute path? | ||
817 | distinfo_dirname, path = path.split(os.sep)[-2:] | ||
818 | if distinfo_dirname != self.path.split(os.sep)[-1]: | ||
819 | raise DistlibException( | ||
820 | 'dist-info file %r does not belong to the %r %s ' | ||
821 | 'distribution' % (path, self.name, self.version)) | ||
822 | |||
823 | # The file must be relative | ||
824 | if path not in DIST_FILES: | ||
825 | raise DistlibException('invalid path for a dist-info file: ' | ||
826 | '%r at %r' % (path, self.path)) | ||
827 | |||
828 | return os.path.join(self.path, path) | ||
829 | |||
830 | def list_distinfo_files(self): | ||
831 | """ | ||
832 | Iterates over the ``RECORD`` entries and returns paths for each line if | ||
833 | the path is pointing to a file located in the ``.dist-info`` directory | ||
834 | or one of its subdirectories. | ||
835 | |||
836 | :returns: iterator of paths | ||
837 | """ | ||
838 | base = os.path.dirname(self.path) | ||
839 | for path, checksum, size in self._get_records(): | ||
840 | # XXX add separator or use real relpath algo | ||
841 | if not os.path.isabs(path): | ||
842 | path = os.path.join(base, path) | ||
843 | if path.startswith(self.path): | ||
844 | yield path | ||
845 | |||
846 | def __eq__(self, other): | ||
847 | return (isinstance(other, InstalledDistribution) and | ||
848 | self.path == other.path) | ||
849 | |||
850 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
851 | __hash__ = object.__hash__ | ||
852 | |||
853 | |||
854 | class EggInfoDistribution(BaseInstalledDistribution): | ||
855 | """Created with the *path* of the ``.egg-info`` directory or file provided | ||
856 | to the constructor. It reads the metadata contained in the file itself, or | ||
857 | if the given path happens to be a directory, the metadata is read from the | ||
858 | file ``PKG-INFO`` under that directory.""" | ||
859 | |||
860 | requested = True # as we have no way of knowing, assume it was | ||
861 | shared_locations = {} | ||
862 | |||
863 | def __init__(self, path, env=None): | ||
864 | def set_name_and_version(s, n, v): | ||
865 | s.name = n | ||
866 | s.key = n.lower() # for case-insensitive comparisons | ||
867 | s.version = v | ||
868 | |||
869 | self.path = path | ||
870 | self.dist_path = env | ||
871 | if env and env._cache_enabled and path in env._cache_egg.path: | ||
872 | metadata = env._cache_egg.path[path].metadata | ||
873 | set_name_and_version(self, metadata.name, metadata.version) | ||
874 | else: | ||
875 | metadata = self._get_metadata(path) | ||
876 | |||
877 | # Need to be set before caching | ||
878 | set_name_and_version(self, metadata.name, metadata.version) | ||
879 | |||
880 | if env and env._cache_enabled: | ||
881 | env._cache_egg.add(self) | ||
882 | super(EggInfoDistribution, self).__init__(metadata, path, env) | ||
883 | |||
884 | def _get_metadata(self, path): | ||
885 | requires = None | ||
886 | |||
887 | def parse_requires_data(data): | ||
888 | """Create a list of dependencies from a requires.txt file. | ||
889 | |||
890 | *data*: the contents of a setuptools-produced requires.txt file. | ||
891 | """ | ||
892 | reqs = [] | ||
893 | lines = data.splitlines() | ||
894 | for line in lines: | ||
895 | line = line.strip() | ||
896 | if line.startswith('['): | ||
897 | logger.warning('Unexpected line: quitting requirement scan: %r', | ||
898 | line) | ||
899 | break | ||
900 | r = parse_requirement(line) | ||
901 | if not r: | ||
902 | logger.warning('Not recognised as a requirement: %r', line) | ||
903 | continue | ||
904 | if r.extras: | ||
905 | logger.warning('extra requirements in requires.txt are ' | ||
906 | 'not supported') | ||
907 | if not r.constraints: | ||
908 | reqs.append(r.name) | ||
909 | else: | ||
910 | cons = ', '.join('%s%s' % c for c in r.constraints) | ||
911 | reqs.append('%s (%s)' % (r.name, cons)) | ||
912 | return reqs | ||
913 | |||
914 | def parse_requires_path(req_path): | ||
915 | """Create a list of dependencies from a requires.txt file. | ||
916 | |||
917 | *req_path*: the path to a setuptools-produced requires.txt file. | ||
918 | """ | ||
919 | |||
920 | reqs = [] | ||
921 | try: | ||
922 | with codecs.open(req_path, 'r', 'utf-8') as fp: | ||
923 | reqs = parse_requires_data(fp.read()) | ||
924 | except IOError: | ||
925 | pass | ||
926 | return reqs | ||
927 | |||
928 | tl_path = tl_data = None | ||
929 | if path.endswith('.egg'): | ||
930 | if os.path.isdir(path): | ||
931 | p = os.path.join(path, 'EGG-INFO') | ||
932 | meta_path = os.path.join(p, 'PKG-INFO') | ||
933 | metadata = Metadata(path=meta_path, scheme='legacy') | ||
934 | req_path = os.path.join(p, 'requires.txt') | ||
935 | tl_path = os.path.join(p, 'top_level.txt') | ||
936 | requires = parse_requires_path(req_path) | ||
937 | else: | ||
938 | # FIXME handle the case where zipfile is not available | ||
939 | zipf = zipimport.zipimporter(path) | ||
940 | fileobj = StringIO( | ||
941 | zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) | ||
942 | metadata = Metadata(fileobj=fileobj, scheme='legacy') | ||
943 | try: | ||
944 | data = zipf.get_data('EGG-INFO/requires.txt') | ||
945 | tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8') | ||
946 | requires = parse_requires_data(data.decode('utf-8')) | ||
947 | except IOError: | ||
948 | requires = None | ||
949 | elif path.endswith('.egg-info'): | ||
950 | if os.path.isdir(path): | ||
951 | req_path = os.path.join(path, 'requires.txt') | ||
952 | requires = parse_requires_path(req_path) | ||
953 | path = os.path.join(path, 'PKG-INFO') | ||
954 | tl_path = os.path.join(path, 'top_level.txt') | ||
955 | metadata = Metadata(path=path, scheme='legacy') | ||
956 | else: | ||
957 | raise DistlibException('path must end with .egg-info or .egg, ' | ||
958 | 'got %r' % path) | ||
959 | |||
960 | if requires: | ||
961 | metadata.add_requirements(requires) | ||
962 | # look for top-level modules in top_level.txt, if present | ||
963 | if tl_data is None: | ||
964 | if tl_path is not None and os.path.exists(tl_path): | ||
965 | with open(tl_path, 'rb') as f: | ||
966 | tl_data = f.read().decode('utf-8') | ||
967 | if not tl_data: | ||
968 | tl_data = [] | ||
969 | else: | ||
970 | tl_data = tl_data.splitlines() | ||
971 | self.modules = tl_data | ||
972 | return metadata | ||
973 | |||
974 | def __repr__(self): | ||
975 | return '<EggInfoDistribution %r %s at %r>' % ( | ||
976 | self.name, self.version, self.path) | ||
977 | |||
978 | def __str__(self): | ||
979 | return "%s %s" % (self.name, self.version) | ||
980 | |||
981 | def check_installed_files(self): | ||
982 | """ | ||
983 | Checks that the hashes and sizes of the files in ``RECORD`` are | ||
984 | matched by the files themselves. Returns a (possibly empty) list of | ||
985 | mismatches. Each entry in the mismatch list will be a tuple consisting | ||
986 | of the path, 'exists', 'size' or 'hash' according to what didn't match | ||
987 | (existence is checked first, then size, then hash), the expected | ||
988 | value and the actual value. | ||
989 | """ | ||
990 | mismatches = [] | ||
991 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
992 | if os.path.exists(record_path): | ||
993 | for path, _, _ in self.list_installed_files(): | ||
994 | if path == record_path: | ||
995 | continue | ||
996 | if not os.path.exists(path): | ||
997 | mismatches.append((path, 'exists', True, False)) | ||
998 | return mismatches | ||
999 | |||
1000 | def list_installed_files(self): | ||
1001 | """ | ||
1002 | Iterates over the ``installed-files.txt`` entries and returns a tuple | ||
1003 | ``(path, hash, size)`` for each line. | ||
1004 | |||
1005 | :returns: a list of (path, hash, size) | ||
1006 | """ | ||
1007 | |||
1008 | def _md5(path): | ||
1009 | f = open(path, 'rb') | ||
1010 | try: | ||
1011 | content = f.read() | ||
1012 | finally: | ||
1013 | f.close() | ||
1014 | return hashlib.md5(content).hexdigest() | ||
1015 | |||
1016 | def _size(path): | ||
1017 | return os.stat(path).st_size | ||
1018 | |||
1019 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
1020 | result = [] | ||
1021 | if os.path.exists(record_path): | ||
1022 | with codecs.open(record_path, 'r', encoding='utf-8') as f: | ||
1023 | for line in f: | ||
1024 | line = line.strip() | ||
1025 | p = os.path.normpath(os.path.join(self.path, line)) | ||
1026 | # "./" is present as a marker between installed files | ||
1027 | # and installation metadata files | ||
1028 | if not os.path.exists(p): | ||
1029 | logger.warning('Non-existent file: %s', p) | ||
1030 | if p.endswith(('.pyc', '.pyo')): | ||
1031 | continue | ||
1032 | #otherwise fall through and fail | ||
1033 | if not os.path.isdir(p): | ||
1034 | result.append((p, _md5(p), _size(p))) | ||
1035 | result.append((record_path, None, None)) | ||
1036 | return result | ||
1037 | |||
1038 | def list_distinfo_files(self, absolute=False): | ||
1039 | """ | ||
1040 | Iterates over the ``installed-files.txt`` entries and returns paths for | ||
1041 | each line if the path is pointing to a file located in the | ||
1042 | ``.egg-info`` directory or one of its subdirectories. | ||
1043 | |||
1044 | :parameter absolute: If *absolute* is ``True``, each returned path is | ||
1045 | transformed into a local absolute path. Otherwise the | ||
1046 | raw value from ``installed-files.txt`` is returned. | ||
1047 | :type absolute: boolean | ||
1048 | :returns: iterator of paths | ||
1049 | """ | ||
1050 | record_path = os.path.join(self.path, 'installed-files.txt') | ||
1051 | if os.path.exists(record_path): | ||
1052 | skip = True | ||
1053 | with codecs.open(record_path, 'r', encoding='utf-8') as f: | ||
1054 | for line in f: | ||
1055 | line = line.strip() | ||
1056 | if line == './': | ||
1057 | skip = False | ||
1058 | continue | ||
1059 | if not skip: | ||
1060 | p = os.path.normpath(os.path.join(self.path, line)) | ||
1061 | if p.startswith(self.path): | ||
1062 | if absolute: | ||
1063 | yield p | ||
1064 | else: | ||
1065 | yield line | ||
1066 | |||
1067 | def __eq__(self, other): | ||
1068 | return (isinstance(other, EggInfoDistribution) and | ||
1069 | self.path == other.path) | ||
1070 | |||
1071 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
1072 | __hash__ = object.__hash__ | ||
1073 | |||
1074 | new_dist_class = InstalledDistribution | ||
1075 | old_dist_class = EggInfoDistribution | ||
1076 | |||
1077 | |||
1078 | class DependencyGraph(object): | ||
1079 | """ | ||
1080 | Represents a dependency graph between distributions. | ||
1081 | |||
1082 | The dependency relationships are stored in an ``adjacency_list`` that maps | ||
1083 | distributions to a list of ``(other, label)`` tuples where ``other`` | ||
1084 | is a distribution and the edge is labeled with ``label`` (i.e. the version | ||
1085 | specifier, if such was provided). Also, for more efficient traversal, for | ||
1086 | every distribution ``x``, a list of predecessors is kept in | ||
1087 | ``reverse_list[x]``. An edge from distribution ``a`` to | ||
1088 | distribution ``b`` means that ``a`` depends on ``b``. If any missing | ||
1089 | dependencies are found, they are stored in ``missing``, which is a | ||
1090 | dictionary that maps distributions to a list of requirements that were not | ||
1091 | provided by any other distributions. | ||
1092 | """ | ||
1093 | |||
1094 | def __init__(self): | ||
1095 | self.adjacency_list = {} | ||
1096 | self.reverse_list = {} | ||
1097 | self.missing = {} | ||
1098 | |||
1099 | def add_distribution(self, distribution): | ||
1100 | """Add the *distribution* to the graph. | ||
1101 | |||
1102 | :type distribution: :class:`distutils2.database.InstalledDistribution` | ||
1103 | or :class:`distutils2.database.EggInfoDistribution` | ||
1104 | """ | ||
1105 | self.adjacency_list[distribution] = [] | ||
1106 | self.reverse_list[distribution] = [] | ||
1107 | #self.missing[distribution] = [] | ||
1108 | |||
1109 | def add_edge(self, x, y, label=None): | ||
1110 | """Add an edge from distribution *x* to distribution *y* with the given | ||
1111 | *label*. | ||
1112 | |||
1113 | :type x: :class:`distutils2.database.InstalledDistribution` or | ||
1114 | :class:`distutils2.database.EggInfoDistribution` | ||
1115 | :type y: :class:`distutils2.database.InstalledDistribution` or | ||
1116 | :class:`distutils2.database.EggInfoDistribution` | ||
1117 | :type label: ``str`` or ``None`` | ||
1118 | """ | ||
1119 | self.adjacency_list[x].append((y, label)) | ||
1120 | # multiple edges are allowed, so be careful | ||
1121 | if x not in self.reverse_list[y]: | ||
1122 | self.reverse_list[y].append(x) | ||
1123 | |||
1124 | def add_missing(self, distribution, requirement): | ||
1125 | """ | ||
1126 | Add a missing *requirement* for the given *distribution*. | ||
1127 | |||
1128 | :type distribution: :class:`distutils2.database.InstalledDistribution` | ||
1129 | or :class:`distutils2.database.EggInfoDistribution` | ||
1130 | :type requirement: ``str`` | ||
1131 | """ | ||
1132 | logger.debug('%s missing %r', distribution, requirement) | ||
1133 | self.missing.setdefault(distribution, []).append(requirement) | ||
1134 | |||
1135 | def _repr_dist(self, dist): | ||
1136 | return '%s %s' % (dist.name, dist.version) | ||
1137 | |||
1138 | def repr_node(self, dist, level=1): | ||
1139 | """Prints only a subgraph""" | ||
1140 | output = [self._repr_dist(dist)] | ||
1141 | for other, label in self.adjacency_list[dist]: | ||
1142 | dist = self._repr_dist(other) | ||
1143 | if label is not None: | ||
1144 | dist = '%s [%s]' % (dist, label) | ||
1145 | output.append(' ' * level + str(dist)) | ||
1146 | suboutput = self.repr_node(other, level + 1) | ||
1147 | subs = suboutput.split('\n') | ||
1148 | output.extend(subs[1:]) | ||
1149 | return '\n'.join(output) | ||
1150 | |||
1151 | def to_dot(self, f, skip_disconnected=True): | ||
1152 | """Writes a DOT output for the graph to the provided file *f*. | ||
1153 | |||
1154 | If *skip_disconnected* is set to ``True``, then all distributions | ||
1155 | that are not dependent on any other distribution are skipped. | ||
1156 | |||
1157 | :type f: has to support ``file``-like operations | ||
1158 | :type skip_disconnected: ``bool`` | ||
1159 | """ | ||
1160 | disconnected = [] | ||
1161 | |||
1162 | f.write("digraph dependencies {\n") | ||
1163 | for dist, adjs in self.adjacency_list.items(): | ||
1164 | if len(adjs) == 0 and not skip_disconnected: | ||
1165 | disconnected.append(dist) | ||
1166 | for other, label in adjs: | ||
1167 | if not label is None: | ||
1168 | f.write('"%s" -> "%s" [label="%s"]\n' % | ||
1169 | (dist.name, other.name, label)) | ||
1170 | else: | ||
1171 | f.write('"%s" -> "%s"\n' % (dist.name, other.name)) | ||
1172 | if not skip_disconnected and len(disconnected) > 0: | ||
1173 | f.write('subgraph disconnected {\n') | ||
1174 | f.write('label = "Disconnected"\n') | ||
1175 | f.write('bgcolor = red\n') | ||
1176 | |||
1177 | for dist in disconnected: | ||
1178 | f.write('"%s"' % dist.name) | ||
1179 | f.write('\n') | ||
1180 | f.write('}\n') | ||
1181 | f.write('}\n') | ||
1182 | |||
1183 | def topological_sort(self): | ||
1184 | """ | ||
1185 | Perform a topological sort of the graph. | ||
1186 | :return: A tuple, the first element of which is a topologically sorted | ||
1187 | list of distributions, and the second element of which is a | ||
1188 | list of distributions that cannot be sorted because they have | ||
1189 | circular dependencies and so form a cycle. | ||
1190 | """ | ||
1191 | result = [] | ||
1192 | # Make a shallow copy of the adjacency list | ||
1193 | alist = {} | ||
1194 | for k, v in self.adjacency_list.items(): | ||
1195 | alist[k] = v[:] | ||
1196 | while True: | ||
1197 | # See what we can remove in this run | ||
1198 | to_remove = [] | ||
1199 | for k, v in list(alist.items())[:]: | ||
1200 | if not v: | ||
1201 | to_remove.append(k) | ||
1202 | del alist[k] | ||
1203 | if not to_remove: | ||
1204 | # What's left in alist (if anything) is a cycle. | ||
1205 | break | ||
1206 | # Remove from the adjacency list of others | ||
1207 | for k, v in alist.items(): | ||
1208 | alist[k] = [(d, r) for d, r in v if d not in to_remove] | ||
1209 | logger.debug('Moving to result: %s', | ||
1210 | ['%s (%s)' % (d.name, d.version) for d in to_remove]) | ||
1211 | result.extend(to_remove) | ||
1212 | return result, list(alist.keys()) | ||
1213 | |||
1214 | def __repr__(self): | ||
1215 | """Representation of the graph""" | ||
1216 | output = [] | ||
1217 | for dist, adjs in self.adjacency_list.items(): | ||
1218 | output.append(self.repr_node(dist)) | ||
1219 | return '\n'.join(output) | ||
1220 | |||
1221 | |||
1222 | def make_graph(dists, scheme='default'): | ||
1223 | """Makes a dependency graph from the given distributions. | ||
1224 | |||
1225 | :parameter dists: a list of distributions | ||
1226 | :type dists: list of :class:`distutils2.database.InstalledDistribution` and | ||
1227 | :class:`distutils2.database.EggInfoDistribution` instances | ||
1228 | :rtype: a :class:`DependencyGraph` instance | ||
1229 | """ | ||
1230 | scheme = get_scheme(scheme) | ||
1231 | graph = DependencyGraph() | ||
1232 | provided = {} # maps names to lists of (version, dist) tuples | ||
1233 | |||
1234 | # first, build the graph and find out what's provided | ||
1235 | for dist in dists: | ||
1236 | graph.add_distribution(dist) | ||
1237 | |||
1238 | for p in dist.provides: | ||
1239 | name, version = parse_name_and_version(p) | ||
1240 | logger.debug('Add to provided: %s, %s, %s', name, version, dist) | ||
1241 | provided.setdefault(name, []).append((version, dist)) | ||
1242 | |||
1243 | # now make the edges | ||
1244 | for dist in dists: | ||
1245 | requires = (dist.run_requires | dist.meta_requires | | ||
1246 | dist.build_requires | dist.dev_requires) | ||
1247 | for req in requires: | ||
1248 | try: | ||
1249 | matcher = scheme.matcher(req) | ||
1250 | except UnsupportedVersionError: | ||
1251 | # XXX compat-mode if cannot read the version | ||
1252 | logger.warning('could not read version %r - using name only', | ||
1253 | req) | ||
1254 | name = req.split()[0] | ||
1255 | matcher = scheme.matcher(name) | ||
1256 | |||
1257 | name = matcher.key # case-insensitive | ||
1258 | |||
1259 | matched = False | ||
1260 | if name in provided: | ||
1261 | for version, provider in provided[name]: | ||
1262 | try: | ||
1263 | match = matcher.match(version) | ||
1264 | except UnsupportedVersionError: | ||
1265 | match = False | ||
1266 | |||
1267 | if match: | ||
1268 | graph.add_edge(dist, provider, req) | ||
1269 | matched = True | ||
1270 | break | ||
1271 | if not matched: | ||
1272 | graph.add_missing(dist, req) | ||
1273 | return graph | ||
1274 | |||
1275 | |||
1276 | def get_dependent_dists(dists, dist): | ||
1277 | """Recursively generate a list of distributions from *dists* that are | ||
1278 | dependent on *dist*. | ||
1279 | |||
1280 | :param dists: a list of distributions | ||
1281 | :param dist: a distribution, member of *dists* for which we are interested | ||
1282 | """ | ||
1283 | if dist not in dists: | ||
1284 | raise DistlibException('given distribution %r is not a member ' | ||
1285 | 'of the list' % dist.name) | ||
1286 | graph = make_graph(dists) | ||
1287 | |||
1288 | dep = [dist] # dependent distributions | ||
1289 | todo = graph.reverse_list[dist] # list of nodes we should inspect | ||
1290 | |||
1291 | while todo: | ||
1292 | d = todo.pop() | ||
1293 | dep.append(d) | ||
1294 | for succ in graph.reverse_list[d]: | ||
1295 | if succ not in dep: | ||
1296 | todo.append(succ) | ||
1297 | |||
1298 | dep.pop(0) # remove dist from dep, was there to prevent infinite loops | ||
1299 | return dep | ||
1300 | |||
1301 | |||
1302 | def get_required_dists(dists, dist): | ||
1303 | """Recursively generate a list of distributions from *dists* that are | ||
1304 | required by *dist*. | ||
1305 | |||
1306 | :param dists: a list of distributions | ||
1307 | :param dist: a distribution, member of *dists* for which we are interested | ||
1308 | """ | ||
1309 | if dist not in dists: | ||
1310 | raise DistlibException('given distribution %r is not a member ' | ||
1311 | 'of the list' % dist.name) | ||
1312 | graph = make_graph(dists) | ||
1313 | |||
1314 | req = [] # required distributions | ||
1315 | todo = graph.adjacency_list[dist] # list of nodes we should inspect | ||
1316 | |||
1317 | while todo: | ||
1318 | d = todo.pop()[0] | ||
1319 | req.append(d) | ||
1320 | for pred in graph.adjacency_list[d]: | ||
1321 | if pred not in req: | ||
1322 | todo.append(pred) | ||
1323 | |||
1324 | return req | ||
1325 | |||
1326 | |||
1327 | def make_dist(name, version, **kwargs): | ||
1328 | """ | ||
1329 | A convenience method for making a dist given just a name and version. | ||
1330 | """ | ||
1331 | summary = kwargs.pop('summary', 'Placeholder for summary') | ||
1332 | md = Metadata(**kwargs) | ||
1333 | md.name = name | ||
1334 | md.version = version | ||
1335 | md.summary = summary or 'Placeholder for summary' | ||
1336 | return Distribution(md) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..7197238 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/index.py | |||
@@ -0,0 +1,516 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | import hashlib | ||
8 | import logging | ||
9 | import os | ||
10 | import shutil | ||
11 | import subprocess | ||
12 | import tempfile | ||
13 | try: | ||
14 | from threading import Thread | ||
15 | except ImportError: | ||
16 | from dummy_threading import Thread | ||
17 | |||
18 | from . import DistlibException | ||
19 | from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, | ||
20 | urlparse, build_opener, string_types) | ||
21 | from .util import cached_property, zip_dir, ServerProxy | ||
22 | |||
23 | logger = logging.getLogger(__name__) | ||
24 | |||
25 | DEFAULT_INDEX = 'https://pypi.python.org/pypi' | ||
26 | DEFAULT_REALM = 'pypi' | ||
27 | |||
28 | class PackageIndex(object): | ||
29 | """ | ||
30 | This class represents a package index compatible with PyPI, the Python | ||
31 | Package Index. | ||
32 | """ | ||
33 | |||
34 | boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' | ||
35 | |||
36 | def __init__(self, url=None): | ||
37 | """ | ||
38 | Initialise an instance. | ||
39 | |||
40 | :param url: The URL of the index. If not specified, the URL for PyPI is | ||
41 | used. | ||
42 | """ | ||
43 | self.url = url or DEFAULT_INDEX | ||
44 | self.read_configuration() | ||
45 | scheme, netloc, path, params, query, frag = urlparse(self.url) | ||
46 | if params or query or frag or scheme not in ('http', 'https'): | ||
47 | raise DistlibException('invalid repository: %s' % self.url) | ||
48 | self.password_handler = None | ||
49 | self.ssl_verifier = None | ||
50 | self.gpg = None | ||
51 | self.gpg_home = None | ||
52 | with open(os.devnull, 'w') as sink: | ||
53 | # Use gpg by default rather than gpg2, as gpg2 insists on | ||
54 | # prompting for passwords | ||
55 | for s in ('gpg', 'gpg2'): | ||
56 | try: | ||
57 | rc = subprocess.check_call([s, '--version'], stdout=sink, | ||
58 | stderr=sink) | ||
59 | if rc == 0: | ||
60 | self.gpg = s | ||
61 | break | ||
62 | except OSError: | ||
63 | pass | ||
64 | |||
65 | def _get_pypirc_command(self): | ||
66 | """ | ||
67 | Get the distutils command for interacting with PyPI configurations. | ||
68 | :return: the command. | ||
69 | """ | ||
70 | from distutils.core import Distribution | ||
71 | from distutils.config import PyPIRCCommand | ||
72 | d = Distribution() | ||
73 | return PyPIRCCommand(d) | ||
74 | |||
75 | def read_configuration(self): | ||
76 | """ | ||
77 | Read the PyPI access configuration as supported by distutils, getting | ||
78 | PyPI to do the actual work. This populates ``username``, ``password``, | ||
79 | ``realm`` and ``url`` attributes from the configuration. | ||
80 | """ | ||
81 | # get distutils to do the work | ||
82 | c = self._get_pypirc_command() | ||
83 | c.repository = self.url | ||
84 | cfg = c._read_pypirc() | ||
85 | self.username = cfg.get('username') | ||
86 | self.password = cfg.get('password') | ||
87 | self.realm = cfg.get('realm', 'pypi') | ||
88 | self.url = cfg.get('repository', self.url) | ||
89 | |||
90 | def save_configuration(self): | ||
91 | """ | ||
92 | Save the PyPI access configuration. You must have set ``username`` and | ||
93 | ``password`` attributes before calling this method. | ||
94 | |||
95 | Again, distutils is used to do the actual work. | ||
96 | """ | ||
97 | self.check_credentials() | ||
98 | # get distutils to do the work | ||
99 | c = self._get_pypirc_command() | ||
100 | c._store_pypirc(self.username, self.password) | ||
101 | |||
102 | def check_credentials(self): | ||
103 | """ | ||
104 | Check that ``username`` and ``password`` have been set, and raise an | ||
105 | exception if not. | ||
106 | """ | ||
107 | if self.username is None or self.password is None: | ||
108 | raise DistlibException('username and password must be set') | ||
109 | pm = HTTPPasswordMgr() | ||
110 | _, netloc, _, _, _, _ = urlparse(self.url) | ||
111 | pm.add_password(self.realm, netloc, self.username, self.password) | ||
112 | self.password_handler = HTTPBasicAuthHandler(pm) | ||
113 | |||
114 | def register(self, metadata): | ||
115 | """ | ||
116 | Register a distribution on PyPI, using the provided metadata. | ||
117 | |||
118 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
119 | and version number for the distribution to be | ||
120 | registered. | ||
121 | :return: The HTTP response received from PyPI upon submission of the | ||
122 | request. | ||
123 | """ | ||
124 | self.check_credentials() | ||
125 | metadata.validate() | ||
126 | d = metadata.todict() | ||
127 | d[':action'] = 'verify' | ||
128 | request = self.encode_request(d.items(), []) | ||
129 | response = self.send_request(request) | ||
130 | d[':action'] = 'submit' | ||
131 | request = self.encode_request(d.items(), []) | ||
132 | return self.send_request(request) | ||
133 | |||
134 | def _reader(self, name, stream, outbuf): | ||
135 | """ | ||
136 | Thread runner for reading lines of from a subprocess into a buffer. | ||
137 | |||
138 | :param name: The logical name of the stream (used for logging only). | ||
139 | :param stream: The stream to read from. This will typically a pipe | ||
140 | connected to the output stream of a subprocess. | ||
141 | :param outbuf: The list to append the read lines to. | ||
142 | """ | ||
143 | while True: | ||
144 | s = stream.readline() | ||
145 | if not s: | ||
146 | break | ||
147 | s = s.decode('utf-8').rstrip() | ||
148 | outbuf.append(s) | ||
149 | logger.debug('%s: %s' % (name, s)) | ||
150 | stream.close() | ||
151 | |||
152 | def get_sign_command(self, filename, signer, sign_password, | ||
153 | keystore=None): | ||
154 | """ | ||
155 | Return a suitable command for signing a file. | ||
156 | |||
157 | :param filename: The pathname to the file to be signed. | ||
158 | :param signer: The identifier of the signer of the file. | ||
159 | :param sign_password: The passphrase for the signer's | ||
160 | private key used for signing. | ||
161 | :param keystore: The path to a directory which contains the keys | ||
162 | used in verification. If not specified, the | ||
163 | instance's ``gpg_home`` attribute is used instead. | ||
164 | :return: The signing command as a list suitable to be | ||
165 | passed to :class:`subprocess.Popen`. | ||
166 | """ | ||
167 | cmd = [self.gpg, '--status-fd', '2', '--no-tty'] | ||
168 | if keystore is None: | ||
169 | keystore = self.gpg_home | ||
170 | if keystore: | ||
171 | cmd.extend(['--homedir', keystore]) | ||
172 | if sign_password is not None: | ||
173 | cmd.extend(['--batch', '--passphrase-fd', '0']) | ||
174 | td = tempfile.mkdtemp() | ||
175 | sf = os.path.join(td, os.path.basename(filename) + '.asc') | ||
176 | cmd.extend(['--detach-sign', '--armor', '--local-user', | ||
177 | signer, '--output', sf, filename]) | ||
178 | logger.debug('invoking: %s', ' '.join(cmd)) | ||
179 | return cmd, sf | ||
180 | |||
181 | def run_command(self, cmd, input_data=None): | ||
182 | """ | ||
183 | Run a command in a child process , passing it any input data specified. | ||
184 | |||
185 | :param cmd: The command to run. | ||
186 | :param input_data: If specified, this must be a byte string containing | ||
187 | data to be sent to the child process. | ||
188 | :return: A tuple consisting of the subprocess' exit code, a list of | ||
189 | lines read from the subprocess' ``stdout``, and a list of | ||
190 | lines read from the subprocess' ``stderr``. | ||
191 | """ | ||
192 | kwargs = { | ||
193 | 'stdout': subprocess.PIPE, | ||
194 | 'stderr': subprocess.PIPE, | ||
195 | } | ||
196 | if input_data is not None: | ||
197 | kwargs['stdin'] = subprocess.PIPE | ||
198 | stdout = [] | ||
199 | stderr = [] | ||
200 | p = subprocess.Popen(cmd, **kwargs) | ||
201 | # We don't use communicate() here because we may need to | ||
202 | # get clever with interacting with the command | ||
203 | t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) | ||
204 | t1.start() | ||
205 | t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) | ||
206 | t2.start() | ||
207 | if input_data is not None: | ||
208 | p.stdin.write(input_data) | ||
209 | p.stdin.close() | ||
210 | |||
211 | p.wait() | ||
212 | t1.join() | ||
213 | t2.join() | ||
214 | return p.returncode, stdout, stderr | ||
215 | |||
216 | def sign_file(self, filename, signer, sign_password, keystore=None): | ||
217 | """ | ||
218 | Sign a file. | ||
219 | |||
220 | :param filename: The pathname to the file to be signed. | ||
221 | :param signer: The identifier of the signer of the file. | ||
222 | :param sign_password: The passphrase for the signer's | ||
223 | private key used for signing. | ||
224 | :param keystore: The path to a directory which contains the keys | ||
225 | used in signing. If not specified, the instance's | ||
226 | ``gpg_home`` attribute is used instead. | ||
227 | :return: The absolute pathname of the file where the signature is | ||
228 | stored. | ||
229 | """ | ||
230 | cmd, sig_file = self.get_sign_command(filename, signer, sign_password, | ||
231 | keystore) | ||
232 | rc, stdout, stderr = self.run_command(cmd, | ||
233 | sign_password.encode('utf-8')) | ||
234 | if rc != 0: | ||
235 | raise DistlibException('sign command failed with error ' | ||
236 | 'code %s' % rc) | ||
237 | return sig_file | ||
238 | |||
239 | def upload_file(self, metadata, filename, signer=None, sign_password=None, | ||
240 | filetype='sdist', pyversion='source', keystore=None): | ||
241 | """ | ||
242 | Upload a release file to the index. | ||
243 | |||
244 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
245 | and version number for the file to be uploaded. | ||
246 | :param filename: The pathname of the file to be uploaded. | ||
247 | :param signer: The identifier of the signer of the file. | ||
248 | :param sign_password: The passphrase for the signer's | ||
249 | private key used for signing. | ||
250 | :param filetype: The type of the file being uploaded. This is the | ||
251 | distutils command which produced that file, e.g. | ||
252 | ``sdist`` or ``bdist_wheel``. | ||
253 | :param pyversion: The version of Python which the release relates | ||
254 | to. For code compatible with any Python, this would | ||
255 | be ``source``, otherwise it would be e.g. ``3.2``. | ||
256 | :param keystore: The path to a directory which contains the keys | ||
257 | used in signing. If not specified, the instance's | ||
258 | ``gpg_home`` attribute is used instead. | ||
259 | :return: The HTTP response received from PyPI upon submission of the | ||
260 | request. | ||
261 | """ | ||
262 | self.check_credentials() | ||
263 | if not os.path.exists(filename): | ||
264 | raise DistlibException('not found: %s' % filename) | ||
265 | metadata.validate() | ||
266 | d = metadata.todict() | ||
267 | sig_file = None | ||
268 | if signer: | ||
269 | if not self.gpg: | ||
270 | logger.warning('no signing program available - not signed') | ||
271 | else: | ||
272 | sig_file = self.sign_file(filename, signer, sign_password, | ||
273 | keystore) | ||
274 | with open(filename, 'rb') as f: | ||
275 | file_data = f.read() | ||
276 | md5_digest = hashlib.md5(file_data).hexdigest() | ||
277 | sha256_digest = hashlib.sha256(file_data).hexdigest() | ||
278 | d.update({ | ||
279 | ':action': 'file_upload', | ||
280 | 'protocol_version': '1', | ||
281 | 'filetype': filetype, | ||
282 | 'pyversion': pyversion, | ||
283 | 'md5_digest': md5_digest, | ||
284 | 'sha256_digest': sha256_digest, | ||
285 | }) | ||
286 | files = [('content', os.path.basename(filename), file_data)] | ||
287 | if sig_file: | ||
288 | with open(sig_file, 'rb') as f: | ||
289 | sig_data = f.read() | ||
290 | files.append(('gpg_signature', os.path.basename(sig_file), | ||
291 | sig_data)) | ||
292 | shutil.rmtree(os.path.dirname(sig_file)) | ||
293 | request = self.encode_request(d.items(), files) | ||
294 | return self.send_request(request) | ||
295 | |||
296 | def upload_documentation(self, metadata, doc_dir): | ||
297 | """ | ||
298 | Upload documentation to the index. | ||
299 | |||
300 | :param metadata: A :class:`Metadata` instance defining at least a name | ||
301 | and version number for the documentation to be | ||
302 | uploaded. | ||
303 | :param doc_dir: The pathname of the directory which contains the | ||
304 | documentation. This should be the directory that | ||
305 | contains the ``index.html`` for the documentation. | ||
306 | :return: The HTTP response received from PyPI upon submission of the | ||
307 | request. | ||
308 | """ | ||
309 | self.check_credentials() | ||
310 | if not os.path.isdir(doc_dir): | ||
311 | raise DistlibException('not a directory: %r' % doc_dir) | ||
312 | fn = os.path.join(doc_dir, 'index.html') | ||
313 | if not os.path.exists(fn): | ||
314 | raise DistlibException('not found: %r' % fn) | ||
315 | metadata.validate() | ||
316 | name, version = metadata.name, metadata.version | ||
317 | zip_data = zip_dir(doc_dir).getvalue() | ||
318 | fields = [(':action', 'doc_upload'), | ||
319 | ('name', name), ('version', version)] | ||
320 | files = [('content', name, zip_data)] | ||
321 | request = self.encode_request(fields, files) | ||
322 | return self.send_request(request) | ||
323 | |||
324 | def get_verify_command(self, signature_filename, data_filename, | ||
325 | keystore=None): | ||
326 | """ | ||
327 | Return a suitable command for verifying a file. | ||
328 | |||
329 | :param signature_filename: The pathname to the file containing the | ||
330 | signature. | ||
331 | :param data_filename: The pathname to the file containing the | ||
332 | signed data. | ||
333 | :param keystore: The path to a directory which contains the keys | ||
334 | used in verification. If not specified, the | ||
335 | instance's ``gpg_home`` attribute is used instead. | ||
336 | :return: The verifying command as a list suitable to be | ||
337 | passed to :class:`subprocess.Popen`. | ||
338 | """ | ||
339 | cmd = [self.gpg, '--status-fd', '2', '--no-tty'] | ||
340 | if keystore is None: | ||
341 | keystore = self.gpg_home | ||
342 | if keystore: | ||
343 | cmd.extend(['--homedir', keystore]) | ||
344 | cmd.extend(['--verify', signature_filename, data_filename]) | ||
345 | logger.debug('invoking: %s', ' '.join(cmd)) | ||
346 | return cmd | ||
347 | |||
348 | def verify_signature(self, signature_filename, data_filename, | ||
349 | keystore=None): | ||
350 | """ | ||
351 | Verify a signature for a file. | ||
352 | |||
353 | :param signature_filename: The pathname to the file containing the | ||
354 | signature. | ||
355 | :param data_filename: The pathname to the file containing the | ||
356 | signed data. | ||
357 | :param keystore: The path to a directory which contains the keys | ||
358 | used in verification. If not specified, the | ||
359 | instance's ``gpg_home`` attribute is used instead. | ||
360 | :return: True if the signature was verified, else False. | ||
361 | """ | ||
362 | if not self.gpg: | ||
363 | raise DistlibException('verification unavailable because gpg ' | ||
364 | 'unavailable') | ||
365 | cmd = self.get_verify_command(signature_filename, data_filename, | ||
366 | keystore) | ||
367 | rc, stdout, stderr = self.run_command(cmd) | ||
368 | if rc not in (0, 1): | ||
369 | raise DistlibException('verify command failed with error ' | ||
370 | 'code %s' % rc) | ||
371 | return rc == 0 | ||
372 | |||
373 | def download_file(self, url, destfile, digest=None, reporthook=None): | ||
374 | """ | ||
375 | This is a convenience method for downloading a file from an URL. | ||
376 | Normally, this will be a file from the index, though currently | ||
377 | no check is made for this (i.e. a file can be downloaded from | ||
378 | anywhere). | ||
379 | |||
380 | The method is just like the :func:`urlretrieve` function in the | ||
381 | standard library, except that it allows digest computation to be | ||
382 | done during download and checking that the downloaded data | ||
383 | matched any expected value. | ||
384 | |||
385 | :param url: The URL of the file to be downloaded (assumed to be | ||
386 | available via an HTTP GET request). | ||
387 | :param destfile: The pathname where the downloaded file is to be | ||
388 | saved. | ||
389 | :param digest: If specified, this must be a (hasher, value) | ||
390 | tuple, where hasher is the algorithm used (e.g. | ||
391 | ``'md5'``) and ``value`` is the expected value. | ||
392 | :param reporthook: The same as for :func:`urlretrieve` in the | ||
393 | standard library. | ||
394 | """ | ||
395 | if digest is None: | ||
396 | digester = None | ||
397 | logger.debug('No digest specified') | ||
398 | else: | ||
399 | if isinstance(digest, (list, tuple)): | ||
400 | hasher, digest = digest | ||
401 | else: | ||
402 | hasher = 'md5' | ||
403 | digester = getattr(hashlib, hasher)() | ||
404 | logger.debug('Digest specified: %s' % digest) | ||
405 | # The following code is equivalent to urlretrieve. | ||
406 | # We need to do it this way so that we can compute the | ||
407 | # digest of the file as we go. | ||
408 | with open(destfile, 'wb') as dfp: | ||
409 | # addinfourl is not a context manager on 2.x | ||
410 | # so we have to use try/finally | ||
411 | sfp = self.send_request(Request(url)) | ||
412 | try: | ||
413 | headers = sfp.info() | ||
414 | blocksize = 8192 | ||
415 | size = -1 | ||
416 | read = 0 | ||
417 | blocknum = 0 | ||
418 | if "content-length" in headers: | ||
419 | size = int(headers["Content-Length"]) | ||
420 | if reporthook: | ||
421 | reporthook(blocknum, blocksize, size) | ||
422 | while True: | ||
423 | block = sfp.read(blocksize) | ||
424 | if not block: | ||
425 | break | ||
426 | read += len(block) | ||
427 | dfp.write(block) | ||
428 | if digester: | ||
429 | digester.update(block) | ||
430 | blocknum += 1 | ||
431 | if reporthook: | ||
432 | reporthook(blocknum, blocksize, size) | ||
433 | finally: | ||
434 | sfp.close() | ||
435 | |||
436 | # check that we got the whole file, if we can | ||
437 | if size >= 0 and read < size: | ||
438 | raise DistlibException( | ||
439 | 'retrieval incomplete: got only %d out of %d bytes' | ||
440 | % (read, size)) | ||
441 | # if we have a digest, it must match. | ||
442 | if digester: | ||
443 | actual = digester.hexdigest() | ||
444 | if digest != actual: | ||
445 | raise DistlibException('%s digest mismatch for %s: expected ' | ||
446 | '%s, got %s' % (hasher, destfile, | ||
447 | digest, actual)) | ||
448 | logger.debug('Digest verified: %s', digest) | ||
449 | |||
450 | def send_request(self, req): | ||
451 | """ | ||
452 | Send a standard library :class:`Request` to PyPI and return its | ||
453 | response. | ||
454 | |||
455 | :param req: The request to send. | ||
456 | :return: The HTTP response from PyPI (a standard library HTTPResponse). | ||
457 | """ | ||
458 | handlers = [] | ||
459 | if self.password_handler: | ||
460 | handlers.append(self.password_handler) | ||
461 | if self.ssl_verifier: | ||
462 | handlers.append(self.ssl_verifier) | ||
463 | opener = build_opener(*handlers) | ||
464 | return opener.open(req) | ||
465 | |||
466 | def encode_request(self, fields, files): | ||
467 | """ | ||
468 | Encode fields and files for posting to an HTTP server. | ||
469 | |||
470 | :param fields: The fields to send as a list of (fieldname, value) | ||
471 | tuples. | ||
472 | :param files: The files to send as a list of (fieldname, filename, | ||
473 | file_bytes) tuple. | ||
474 | """ | ||
475 | # Adapted from packaging, which in turn was adapted from | ||
476 | # http://code.activestate.com/recipes/146306 | ||
477 | |||
478 | parts = [] | ||
479 | boundary = self.boundary | ||
480 | for k, values in fields: | ||
481 | if not isinstance(values, (list, tuple)): | ||
482 | values = [values] | ||
483 | |||
484 | for v in values: | ||
485 | parts.extend(( | ||
486 | b'--' + boundary, | ||
487 | ('Content-Disposition: form-data; name="%s"' % | ||
488 | k).encode('utf-8'), | ||
489 | b'', | ||
490 | v.encode('utf-8'))) | ||
491 | for key, filename, value in files: | ||
492 | parts.extend(( | ||
493 | b'--' + boundary, | ||
494 | ('Content-Disposition: form-data; name="%s"; filename="%s"' % | ||
495 | (key, filename)).encode('utf-8'), | ||
496 | b'', | ||
497 | value)) | ||
498 | |||
499 | parts.extend((b'--' + boundary + b'--', b'')) | ||
500 | |||
501 | body = b'\r\n'.join(parts) | ||
502 | ct = b'multipart/form-data; boundary=' + boundary | ||
503 | headers = { | ||
504 | 'Content-type': ct, | ||
505 | 'Content-length': str(len(body)) | ||
506 | } | ||
507 | return Request(self.url, body, headers) | ||
508 | |||
509 | def search(self, terms, operator=None): | ||
510 | if isinstance(terms, string_types): | ||
511 | terms = {'name': terms} | ||
512 | rpc_proxy = ServerProxy(self.url, timeout=3.0) | ||
513 | try: | ||
514 | return rpc_proxy.search(terms, operator or 'and') | ||
515 | finally: | ||
516 | rpc_proxy('close')() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..9131b77 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/locators.py | |||
@@ -0,0 +1,1292 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2015 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | |||
8 | import gzip | ||
9 | from io import BytesIO | ||
10 | import json | ||
11 | import logging | ||
12 | import os | ||
13 | import posixpath | ||
14 | import re | ||
15 | try: | ||
16 | import threading | ||
17 | except ImportError: # pragma: no cover | ||
18 | import dummy_threading as threading | ||
19 | import zlib | ||
20 | |||
21 | from . import DistlibException | ||
22 | from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, | ||
23 | queue, quote, unescape, string_types, build_opener, | ||
24 | HTTPRedirectHandler as BaseRedirectHandler, text_type, | ||
25 | Request, HTTPError, URLError) | ||
26 | from .database import Distribution, DistributionPath, make_dist | ||
27 | from .metadata import Metadata, MetadataInvalidError | ||
28 | from .util import (cached_property, parse_credentials, ensure_slash, | ||
29 | split_filename, get_project_data, parse_requirement, | ||
30 | parse_name_and_version, ServerProxy, normalize_name) | ||
31 | from .version import get_scheme, UnsupportedVersionError | ||
32 | from .wheel import Wheel, is_compatible | ||
33 | |||
34 | logger = logging.getLogger(__name__) | ||
35 | |||
36 | HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)') | ||
37 | CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) | ||
38 | HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') | ||
39 | DEFAULT_INDEX = 'https://pypi.python.org/pypi' | ||
40 | |||
41 | def get_all_distribution_names(url=None): | ||
42 | """ | ||
43 | Return all distribution names known by an index. | ||
44 | :param url: The URL of the index. | ||
45 | :return: A list of all known distribution names. | ||
46 | """ | ||
47 | if url is None: | ||
48 | url = DEFAULT_INDEX | ||
49 | client = ServerProxy(url, timeout=3.0) | ||
50 | try: | ||
51 | return client.list_packages() | ||
52 | finally: | ||
53 | client('close')() | ||
54 | |||
55 | class RedirectHandler(BaseRedirectHandler): | ||
56 | """ | ||
57 | A class to work around a bug in some Python 3.2.x releases. | ||
58 | """ | ||
59 | # There's a bug in the base version for some 3.2.x | ||
60 | # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header | ||
61 | # returns e.g. /abc, it bails because it says the scheme '' | ||
62 | # is bogus, when actually it should use the request's | ||
63 | # URL for the scheme. See Python issue #13696. | ||
64 | def http_error_302(self, req, fp, code, msg, headers): | ||
65 | # Some servers (incorrectly) return multiple Location headers | ||
66 | # (so probably same goes for URI). Use first header. | ||
67 | newurl = None | ||
68 | for key in ('location', 'uri'): | ||
69 | if key in headers: | ||
70 | newurl = headers[key] | ||
71 | break | ||
72 | if newurl is None: # pragma: no cover | ||
73 | return | ||
74 | urlparts = urlparse(newurl) | ||
75 | if urlparts.scheme == '': | ||
76 | newurl = urljoin(req.get_full_url(), newurl) | ||
77 | if hasattr(headers, 'replace_header'): | ||
78 | headers.replace_header(key, newurl) | ||
79 | else: | ||
80 | headers[key] = newurl | ||
81 | return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, | ||
82 | headers) | ||
83 | |||
84 | http_error_301 = http_error_303 = http_error_307 = http_error_302 | ||
85 | |||
86 | class Locator(object): | ||
87 | """ | ||
88 | A base class for locators - things that locate distributions. | ||
89 | """ | ||
90 | source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') | ||
91 | binary_extensions = ('.egg', '.exe', '.whl') | ||
92 | excluded_extensions = ('.pdf',) | ||
93 | |||
94 | # A list of tags indicating which wheels you want to match. The default | ||
95 | # value of None matches against the tags compatible with the running | ||
96 | # Python. If you want to match other values, set wheel_tags on a locator | ||
97 | # instance to a list of tuples (pyver, abi, arch) which you want to match. | ||
98 | wheel_tags = None | ||
99 | |||
100 | downloadable_extensions = source_extensions + ('.whl',) | ||
101 | |||
102 | def __init__(self, scheme='default'): | ||
103 | """ | ||
104 | Initialise an instance. | ||
105 | :param scheme: Because locators look for most recent versions, they | ||
106 | need to know the version scheme to use. This specifies | ||
107 | the current PEP-recommended scheme - use ``'legacy'`` | ||
108 | if you need to support existing distributions on PyPI. | ||
109 | """ | ||
110 | self._cache = {} | ||
111 | self.scheme = scheme | ||
112 | # Because of bugs in some of the handlers on some of the platforms, | ||
113 | # we use our own opener rather than just using urlopen. | ||
114 | self.opener = build_opener(RedirectHandler()) | ||
115 | # If get_project() is called from locate(), the matcher instance | ||
116 | # is set from the requirement passed to locate(). See issue #18 for | ||
117 | # why this can be useful to know. | ||
118 | self.matcher = None | ||
119 | self.errors = queue.Queue() | ||
120 | |||
121 | def get_errors(self): | ||
122 | """ | ||
123 | Return any errors which have occurred. | ||
124 | """ | ||
125 | result = [] | ||
126 | while not self.errors.empty(): # pragma: no cover | ||
127 | try: | ||
128 | e = self.errors.get(False) | ||
129 | result.append(e) | ||
130 | except self.errors.Empty: | ||
131 | continue | ||
132 | self.errors.task_done() | ||
133 | return result | ||
134 | |||
135 | def clear_errors(self): | ||
136 | """ | ||
137 | Clear any errors which may have been logged. | ||
138 | """ | ||
139 | # Just get the errors and throw them away | ||
140 | self.get_errors() | ||
141 | |||
142 | def clear_cache(self): | ||
143 | self._cache.clear() | ||
144 | |||
145 | def _get_scheme(self): | ||
146 | return self._scheme | ||
147 | |||
148 | def _set_scheme(self, value): | ||
149 | self._scheme = value | ||
150 | |||
151 | scheme = property(_get_scheme, _set_scheme) | ||
152 | |||
153 | def _get_project(self, name): | ||
154 | """ | ||
155 | For a given project, get a dictionary mapping available versions to Distribution | ||
156 | instances. | ||
157 | |||
158 | This should be implemented in subclasses. | ||
159 | |||
160 | If called from a locate() request, self.matcher will be set to a | ||
161 | matcher for the requirement to satisfy, otherwise it will be None. | ||
162 | """ | ||
163 | raise NotImplementedError('Please implement in the subclass') | ||
164 | |||
165 | def get_distribution_names(self): | ||
166 | """ | ||
167 | Return all the distribution names known to this locator. | ||
168 | """ | ||
169 | raise NotImplementedError('Please implement in the subclass') | ||
170 | |||
171 | def get_project(self, name): | ||
172 | """ | ||
173 | For a given project, get a dictionary mapping available versions to Distribution | ||
174 | instances. | ||
175 | |||
176 | This calls _get_project to do all the work, and just implements a caching layer on top. | ||
177 | """ | ||
178 | if self._cache is None: # pragma: no cover | ||
179 | result = self._get_project(name) | ||
180 | elif name in self._cache: | ||
181 | result = self._cache[name] | ||
182 | else: | ||
183 | self.clear_errors() | ||
184 | result = self._get_project(name) | ||
185 | self._cache[name] = result | ||
186 | return result | ||
187 | |||
188 | def score_url(self, url): | ||
189 | """ | ||
190 | Give an url a score which can be used to choose preferred URLs | ||
191 | for a given project release. | ||
192 | """ | ||
193 | t = urlparse(url) | ||
194 | basename = posixpath.basename(t.path) | ||
195 | compatible = True | ||
196 | is_wheel = basename.endswith('.whl') | ||
197 | is_downloadable = basename.endswith(self.downloadable_extensions) | ||
198 | if is_wheel: | ||
199 | compatible = is_compatible(Wheel(basename), self.wheel_tags) | ||
200 | return (t.scheme == 'https', 'pypi.python.org' in t.netloc, | ||
201 | is_downloadable, is_wheel, compatible, basename) | ||
202 | |||
203 | def prefer_url(self, url1, url2): | ||
204 | """ | ||
205 | Choose one of two URLs where both are candidates for distribution | ||
206 | archives for the same version of a distribution (for example, | ||
207 | .tar.gz vs. zip). | ||
208 | |||
209 | The current implementation favours https:// URLs over http://, archives | ||
210 | from PyPI over those from other locations, wheel compatibility (if a | ||
211 | wheel) and then the archive name. | ||
212 | """ | ||
213 | result = url2 | ||
214 | if url1: | ||
215 | s1 = self.score_url(url1) | ||
216 | s2 = self.score_url(url2) | ||
217 | if s1 > s2: | ||
218 | result = url1 | ||
219 | if result != url2: | ||
220 | logger.debug('Not replacing %r with %r', url1, url2) | ||
221 | else: | ||
222 | logger.debug('Replacing %r with %r', url1, url2) | ||
223 | return result | ||
224 | |||
225 | def split_filename(self, filename, project_name): | ||
226 | """ | ||
227 | Attempt to split a filename in project name, version and Python version. | ||
228 | """ | ||
229 | return split_filename(filename, project_name) | ||
230 | |||
231 | def convert_url_to_download_info(self, url, project_name): | ||
232 | """ | ||
233 | See if a URL is a candidate for a download URL for a project (the URL | ||
234 | has typically been scraped from an HTML page). | ||
235 | |||
236 | If it is, a dictionary is returned with keys "name", "version", | ||
237 | "filename" and "url"; otherwise, None is returned. | ||
238 | """ | ||
239 | def same_project(name1, name2): | ||
240 | return normalize_name(name1) == normalize_name(name2) | ||
241 | |||
242 | result = None | ||
243 | scheme, netloc, path, params, query, frag = urlparse(url) | ||
244 | if frag.lower().startswith('egg='): # pragma: no cover | ||
245 | logger.debug('%s: version hint in fragment: %r', | ||
246 | project_name, frag) | ||
247 | m = HASHER_HASH.match(frag) | ||
248 | if m: | ||
249 | algo, digest = m.groups() | ||
250 | else: | ||
251 | algo, digest = None, None | ||
252 | origpath = path | ||
253 | if path and path[-1] == '/': # pragma: no cover | ||
254 | path = path[:-1] | ||
255 | if path.endswith('.whl'): | ||
256 | try: | ||
257 | wheel = Wheel(path) | ||
258 | if is_compatible(wheel, self.wheel_tags): | ||
259 | if project_name is None: | ||
260 | include = True | ||
261 | else: | ||
262 | include = same_project(wheel.name, project_name) | ||
263 | if include: | ||
264 | result = { | ||
265 | 'name': wheel.name, | ||
266 | 'version': wheel.version, | ||
267 | 'filename': wheel.filename, | ||
268 | 'url': urlunparse((scheme, netloc, origpath, | ||
269 | params, query, '')), | ||
270 | 'python-version': ', '.join( | ||
271 | ['.'.join(list(v[2:])) for v in wheel.pyver]), | ||
272 | } | ||
273 | except Exception as e: # pragma: no cover | ||
274 | logger.warning('invalid path for wheel: %s', path) | ||
275 | elif not path.endswith(self.downloadable_extensions): # pragma: no cover | ||
276 | logger.debug('Not downloadable: %s', path) | ||
277 | else: # downloadable extension | ||
278 | path = filename = posixpath.basename(path) | ||
279 | for ext in self.downloadable_extensions: | ||
280 | if path.endswith(ext): | ||
281 | path = path[:-len(ext)] | ||
282 | t = self.split_filename(path, project_name) | ||
283 | if not t: # pragma: no cover | ||
284 | logger.debug('No match for project/version: %s', path) | ||
285 | else: | ||
286 | name, version, pyver = t | ||
287 | if not project_name or same_project(project_name, name): | ||
288 | result = { | ||
289 | 'name': name, | ||
290 | 'version': version, | ||
291 | 'filename': filename, | ||
292 | 'url': urlunparse((scheme, netloc, origpath, | ||
293 | params, query, '')), | ||
294 | #'packagetype': 'sdist', | ||
295 | } | ||
296 | if pyver: # pragma: no cover | ||
297 | result['python-version'] = pyver | ||
298 | break | ||
299 | if result and algo: | ||
300 | result['%s_digest' % algo] = digest | ||
301 | return result | ||
302 | |||
303 | def _get_digest(self, info): | ||
304 | """ | ||
305 | Get a digest from a dictionary by looking at keys of the form | ||
306 | 'algo_digest'. | ||
307 | |||
308 | Returns a 2-tuple (algo, digest) if found, else None. Currently | ||
309 | looks only for SHA256, then MD5. | ||
310 | """ | ||
311 | result = None | ||
312 | for algo in ('sha256', 'md5'): | ||
313 | key = '%s_digest' % algo | ||
314 | if key in info: | ||
315 | result = (algo, info[key]) | ||
316 | break | ||
317 | return result | ||
318 | |||
319 | def _update_version_data(self, result, info): | ||
320 | """ | ||
321 | Update a result dictionary (the final result from _get_project) with a | ||
322 | dictionary for a specific version, which typically holds information | ||
323 | gleaned from a filename or URL for an archive for the distribution. | ||
324 | """ | ||
325 | name = info.pop('name') | ||
326 | version = info.pop('version') | ||
327 | if version in result: | ||
328 | dist = result[version] | ||
329 | md = dist.metadata | ||
330 | else: | ||
331 | dist = make_dist(name, version, scheme=self.scheme) | ||
332 | md = dist.metadata | ||
333 | dist.digest = digest = self._get_digest(info) | ||
334 | url = info['url'] | ||
335 | result['digests'][url] = digest | ||
336 | if md.source_url != info['url']: | ||
337 | md.source_url = self.prefer_url(md.source_url, url) | ||
338 | result['urls'].setdefault(version, set()).add(url) | ||
339 | dist.locator = self | ||
340 | result[version] = dist | ||
341 | |||
342 | def locate(self, requirement, prereleases=False): | ||
343 | """ | ||
344 | Find the most recent distribution which matches the given | ||
345 | requirement. | ||
346 | |||
347 | :param requirement: A requirement of the form 'foo (1.0)' or perhaps | ||
348 | 'foo (>= 1.0, < 2.0, != 1.3)' | ||
349 | :param prereleases: If ``True``, allow pre-release versions | ||
350 | to be located. Otherwise, pre-release versions | ||
351 | are not returned. | ||
352 | :return: A :class:`Distribution` instance, or ``None`` if no such | ||
353 | distribution could be located. | ||
354 | """ | ||
355 | result = None | ||
356 | r = parse_requirement(requirement) | ||
357 | if r is None: # pragma: no cover | ||
358 | raise DistlibException('Not a valid requirement: %r' % requirement) | ||
359 | scheme = get_scheme(self.scheme) | ||
360 | self.matcher = matcher = scheme.matcher(r.requirement) | ||
361 | logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) | ||
362 | versions = self.get_project(r.name) | ||
363 | if len(versions) > 2: # urls and digests keys are present | ||
364 | # sometimes, versions are invalid | ||
365 | slist = [] | ||
366 | vcls = matcher.version_class | ||
367 | for k in versions: | ||
368 | if k in ('urls', 'digests'): | ||
369 | continue | ||
370 | try: | ||
371 | if not matcher.match(k): | ||
372 | logger.debug('%s did not match %r', matcher, k) | ||
373 | else: | ||
374 | if prereleases or not vcls(k).is_prerelease: | ||
375 | slist.append(k) | ||
376 | else: | ||
377 | logger.debug('skipping pre-release ' | ||
378 | 'version %s of %s', k, matcher.name) | ||
379 | except Exception: # pragma: no cover | ||
380 | logger.warning('error matching %s with %r', matcher, k) | ||
381 | pass # slist.append(k) | ||
382 | if len(slist) > 1: | ||
383 | slist = sorted(slist, key=scheme.key) | ||
384 | if slist: | ||
385 | logger.debug('sorted list: %s', slist) | ||
386 | version = slist[-1] | ||
387 | result = versions[version] | ||
388 | if result: | ||
389 | if r.extras: | ||
390 | result.extras = r.extras | ||
391 | result.download_urls = versions.get('urls', {}).get(version, set()) | ||
392 | d = {} | ||
393 | sd = versions.get('digests', {}) | ||
394 | for url in result.download_urls: | ||
395 | if url in sd: # pragma: no cover | ||
396 | d[url] = sd[url] | ||
397 | result.digests = d | ||
398 | self.matcher = None | ||
399 | return result | ||
400 | |||
401 | |||
402 | class PyPIRPCLocator(Locator): | ||
403 | """ | ||
404 | This locator uses XML-RPC to locate distributions. It therefore | ||
405 | cannot be used with simple mirrors (that only mirror file content). | ||
406 | """ | ||
407 | def __init__(self, url, **kwargs): | ||
408 | """ | ||
409 | Initialise an instance. | ||
410 | |||
411 | :param url: The URL to use for XML-RPC. | ||
412 | :param kwargs: Passed to the superclass constructor. | ||
413 | """ | ||
414 | super(PyPIRPCLocator, self).__init__(**kwargs) | ||
415 | self.base_url = url | ||
416 | self.client = ServerProxy(url, timeout=3.0) | ||
417 | |||
418 | def get_distribution_names(self): | ||
419 | """ | ||
420 | Return all the distribution names known to this locator. | ||
421 | """ | ||
422 | return set(self.client.list_packages()) | ||
423 | |||
424 | def _get_project(self, name): | ||
425 | result = {'urls': {}, 'digests': {}} | ||
426 | versions = self.client.package_releases(name, True) | ||
427 | for v in versions: | ||
428 | urls = self.client.release_urls(name, v) | ||
429 | data = self.client.release_data(name, v) | ||
430 | metadata = Metadata(scheme=self.scheme) | ||
431 | metadata.name = data['name'] | ||
432 | metadata.version = data['version'] | ||
433 | metadata.license = data.get('license') | ||
434 | metadata.keywords = data.get('keywords', []) | ||
435 | metadata.summary = data.get('summary') | ||
436 | dist = Distribution(metadata) | ||
437 | if urls: | ||
438 | info = urls[0] | ||
439 | metadata.source_url = info['url'] | ||
440 | dist.digest = self._get_digest(info) | ||
441 | dist.locator = self | ||
442 | result[v] = dist | ||
443 | for info in urls: | ||
444 | url = info['url'] | ||
445 | digest = self._get_digest(info) | ||
446 | result['urls'].setdefault(v, set()).add(url) | ||
447 | result['digests'][url] = digest | ||
448 | return result | ||
449 | |||
450 | class PyPIJSONLocator(Locator): | ||
451 | """ | ||
452 | This locator uses PyPI's JSON interface. It's very limited in functionality | ||
453 | and probably not worth using. | ||
454 | """ | ||
455 | def __init__(self, url, **kwargs): | ||
456 | super(PyPIJSONLocator, self).__init__(**kwargs) | ||
457 | self.base_url = ensure_slash(url) | ||
458 | |||
459 | def get_distribution_names(self): | ||
460 | """ | ||
461 | Return all the distribution names known to this locator. | ||
462 | """ | ||
463 | raise NotImplementedError('Not available from this locator') | ||
464 | |||
465 | def _get_project(self, name): | ||
466 | result = {'urls': {}, 'digests': {}} | ||
467 | url = urljoin(self.base_url, '%s/json' % quote(name)) | ||
468 | try: | ||
469 | resp = self.opener.open(url) | ||
470 | data = resp.read().decode() # for now | ||
471 | d = json.loads(data) | ||
472 | md = Metadata(scheme=self.scheme) | ||
473 | data = d['info'] | ||
474 | md.name = data['name'] | ||
475 | md.version = data['version'] | ||
476 | md.license = data.get('license') | ||
477 | md.keywords = data.get('keywords', []) | ||
478 | md.summary = data.get('summary') | ||
479 | dist = Distribution(md) | ||
480 | dist.locator = self | ||
481 | urls = d['urls'] | ||
482 | result[md.version] = dist | ||
483 | for info in d['urls']: | ||
484 | url = info['url'] | ||
485 | dist.download_urls.add(url) | ||
486 | dist.digests[url] = self._get_digest(info) | ||
487 | result['urls'].setdefault(md.version, set()).add(url) | ||
488 | result['digests'][url] = self._get_digest(info) | ||
489 | # Now get other releases | ||
490 | for version, infos in d['releases'].items(): | ||
491 | if version == md.version: | ||
492 | continue # already done | ||
493 | omd = Metadata(scheme=self.scheme) | ||
494 | omd.name = md.name | ||
495 | omd.version = version | ||
496 | odist = Distribution(omd) | ||
497 | odist.locator = self | ||
498 | result[version] = odist | ||
499 | for info in infos: | ||
500 | url = info['url'] | ||
501 | odist.download_urls.add(url) | ||
502 | odist.digests[url] = self._get_digest(info) | ||
503 | result['urls'].setdefault(version, set()).add(url) | ||
504 | result['digests'][url] = self._get_digest(info) | ||
505 | # for info in urls: | ||
506 | # md.source_url = info['url'] | ||
507 | # dist.digest = self._get_digest(info) | ||
508 | # dist.locator = self | ||
509 | # for info in urls: | ||
510 | # url = info['url'] | ||
511 | # result['urls'].setdefault(md.version, set()).add(url) | ||
512 | # result['digests'][url] = self._get_digest(info) | ||
513 | except Exception as e: | ||
514 | self.errors.put(text_type(e)) | ||
515 | logger.exception('JSON fetch failed: %s', e) | ||
516 | return result | ||
517 | |||
518 | |||
519 | class Page(object): | ||
520 | """ | ||
521 | This class represents a scraped HTML page. | ||
522 | """ | ||
523 | # The following slightly hairy-looking regex just looks for the contents of | ||
524 | # an anchor link, which has an attribute "href" either immediately preceded | ||
525 | # or immediately followed by a "rel" attribute. The attribute values can be | ||
526 | # declared with double quotes, single quotes or no quotes - which leads to | ||
527 | # the length of the expression. | ||
528 | _href = re.compile(""" | ||
529 | (rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)? | ||
530 | href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*)) | ||
531 | (\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))? | ||
532 | """, re.I | re.S | re.X) | ||
533 | _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S) | ||
534 | |||
535 | def __init__(self, data, url): | ||
536 | """ | ||
537 | Initialise an instance with the Unicode page contents and the URL they | ||
538 | came from. | ||
539 | """ | ||
540 | self.data = data | ||
541 | self.base_url = self.url = url | ||
542 | m = self._base.search(self.data) | ||
543 | if m: | ||
544 | self.base_url = m.group(1) | ||
545 | |||
546 | _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) | ||
547 | |||
548 | @cached_property | ||
549 | def links(self): | ||
550 | """ | ||
551 | Return the URLs of all the links on a page together with information | ||
552 | about their "rel" attribute, for determining which ones to treat as | ||
553 | downloads and which ones to queue for further scraping. | ||
554 | """ | ||
555 | def clean(url): | ||
556 | "Tidy up an URL." | ||
557 | scheme, netloc, path, params, query, frag = urlparse(url) | ||
558 | return urlunparse((scheme, netloc, quote(path), | ||
559 | params, query, frag)) | ||
560 | |||
561 | result = set() | ||
562 | for match in self._href.finditer(self.data): | ||
563 | d = match.groupdict('') | ||
564 | rel = (d['rel1'] or d['rel2'] or d['rel3'] or | ||
565 | d['rel4'] or d['rel5'] or d['rel6']) | ||
566 | url = d['url1'] or d['url2'] or d['url3'] | ||
567 | url = urljoin(self.base_url, url) | ||
568 | url = unescape(url) | ||
569 | url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) | ||
570 | result.add((url, rel)) | ||
571 | # We sort the result, hoping to bring the most recent versions | ||
572 | # to the front | ||
573 | result = sorted(result, key=lambda t: t[0], reverse=True) | ||
574 | return result | ||
575 | |||
576 | |||
577 | class SimpleScrapingLocator(Locator): | ||
578 | """ | ||
579 | A locator which scrapes HTML pages to locate downloads for a distribution. | ||
580 | This runs multiple threads to do the I/O; performance is at least as good | ||
581 | as pip's PackageFinder, which works in an analogous fashion. | ||
582 | """ | ||
583 | |||
584 | # These are used to deal with various Content-Encoding schemes. | ||
585 | decoders = { | ||
586 | 'deflate': zlib.decompress, | ||
587 | 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), | ||
588 | 'none': lambda b: b, | ||
589 | } | ||
590 | |||
591 | def __init__(self, url, timeout=None, num_workers=10, **kwargs): | ||
592 | """ | ||
593 | Initialise an instance. | ||
594 | :param url: The root URL to use for scraping. | ||
595 | :param timeout: The timeout, in seconds, to be applied to requests. | ||
596 | This defaults to ``None`` (no timeout specified). | ||
597 | :param num_workers: The number of worker threads you want to do I/O, | ||
598 | This defaults to 10. | ||
599 | :param kwargs: Passed to the superclass. | ||
600 | """ | ||
601 | super(SimpleScrapingLocator, self).__init__(**kwargs) | ||
602 | self.base_url = ensure_slash(url) | ||
603 | self.timeout = timeout | ||
604 | self._page_cache = {} | ||
605 | self._seen = set() | ||
606 | self._to_fetch = queue.Queue() | ||
607 | self._bad_hosts = set() | ||
608 | self.skip_externals = False | ||
609 | self.num_workers = num_workers | ||
610 | self._lock = threading.RLock() | ||
611 | # See issue #45: we need to be resilient when the locator is used | ||
612 | # in a thread, e.g. with concurrent.futures. We can't use self._lock | ||
613 | # as it is for coordinating our internal threads - the ones created | ||
614 | # in _prepare_threads. | ||
615 | self._gplock = threading.RLock() | ||
616 | |||
617 | def _prepare_threads(self): | ||
618 | """ | ||
619 | Threads are created only when get_project is called, and terminate | ||
620 | before it returns. They are there primarily to parallelise I/O (i.e. | ||
621 | fetching web pages). | ||
622 | """ | ||
623 | self._threads = [] | ||
624 | for i in range(self.num_workers): | ||
625 | t = threading.Thread(target=self._fetch) | ||
626 | t.setDaemon(True) | ||
627 | t.start() | ||
628 | self._threads.append(t) | ||
629 | |||
630 | def _wait_threads(self): | ||
631 | """ | ||
632 | Tell all the threads to terminate (by sending a sentinel value) and | ||
633 | wait for them to do so. | ||
634 | """ | ||
635 | # Note that you need two loops, since you can't say which | ||
636 | # thread will get each sentinel | ||
637 | for t in self._threads: | ||
638 | self._to_fetch.put(None) # sentinel | ||
639 | for t in self._threads: | ||
640 | t.join() | ||
641 | self._threads = [] | ||
642 | |||
643 | def _get_project(self, name): | ||
644 | result = {'urls': {}, 'digests': {}} | ||
645 | with self._gplock: | ||
646 | self.result = result | ||
647 | self.project_name = name | ||
648 | url = urljoin(self.base_url, '%s/' % quote(name)) | ||
649 | self._seen.clear() | ||
650 | self._page_cache.clear() | ||
651 | self._prepare_threads() | ||
652 | try: | ||
653 | logger.debug('Queueing %s', url) | ||
654 | self._to_fetch.put(url) | ||
655 | self._to_fetch.join() | ||
656 | finally: | ||
657 | self._wait_threads() | ||
658 | del self.result | ||
659 | return result | ||
660 | |||
661 | platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' | ||
662 | r'win(32|-amd64)|macosx-?\d+)\b', re.I) | ||
663 | |||
664 | def _is_platform_dependent(self, url): | ||
665 | """ | ||
666 | Does an URL refer to a platform-specific download? | ||
667 | """ | ||
668 | return self.platform_dependent.search(url) | ||
669 | |||
670 | def _process_download(self, url): | ||
671 | """ | ||
672 | See if an URL is a suitable download for a project. | ||
673 | |||
674 | If it is, register information in the result dictionary (for | ||
675 | _get_project) about the specific version it's for. | ||
676 | |||
677 | Note that the return value isn't actually used other than as a boolean | ||
678 | value. | ||
679 | """ | ||
680 | if self._is_platform_dependent(url): | ||
681 | info = None | ||
682 | else: | ||
683 | info = self.convert_url_to_download_info(url, self.project_name) | ||
684 | logger.debug('process_download: %s -> %s', url, info) | ||
685 | if info: | ||
686 | with self._lock: # needed because self.result is shared | ||
687 | self._update_version_data(self.result, info) | ||
688 | return info | ||
689 | |||
690 | def _should_queue(self, link, referrer, rel): | ||
691 | """ | ||
692 | Determine whether a link URL from a referring page and with a | ||
693 | particular "rel" attribute should be queued for scraping. | ||
694 | """ | ||
695 | scheme, netloc, path, _, _, _ = urlparse(link) | ||
696 | if path.endswith(self.source_extensions + self.binary_extensions + | ||
697 | self.excluded_extensions): | ||
698 | result = False | ||
699 | elif self.skip_externals and not link.startswith(self.base_url): | ||
700 | result = False | ||
701 | elif not referrer.startswith(self.base_url): | ||
702 | result = False | ||
703 | elif rel not in ('homepage', 'download'): | ||
704 | result = False | ||
705 | elif scheme not in ('http', 'https', 'ftp'): | ||
706 | result = False | ||
707 | elif self._is_platform_dependent(link): | ||
708 | result = False | ||
709 | else: | ||
710 | host = netloc.split(':', 1)[0] | ||
711 | if host.lower() == 'localhost': | ||
712 | result = False | ||
713 | else: | ||
714 | result = True | ||
715 | logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, | ||
716 | referrer, result) | ||
717 | return result | ||
718 | |||
719 | def _fetch(self): | ||
720 | """ | ||
721 | Get a URL to fetch from the work queue, get the HTML page, examine its | ||
722 | links for download candidates and candidates for further scraping. | ||
723 | |||
724 | This is a handy method to run in a thread. | ||
725 | """ | ||
726 | while True: | ||
727 | url = self._to_fetch.get() | ||
728 | try: | ||
729 | if url: | ||
730 | page = self.get_page(url) | ||
731 | if page is None: # e.g. after an error | ||
732 | continue | ||
733 | for link, rel in page.links: | ||
734 | if link not in self._seen: | ||
735 | try: | ||
736 | self._seen.add(link) | ||
737 | if (not self._process_download(link) and | ||
738 | self._should_queue(link, url, rel)): | ||
739 | logger.debug('Queueing %s from %s', link, url) | ||
740 | self._to_fetch.put(link) | ||
741 | except MetadataInvalidError: # e.g. invalid versions | ||
742 | pass | ||
743 | except Exception as e: # pragma: no cover | ||
744 | self.errors.put(text_type(e)) | ||
745 | finally: | ||
746 | # always do this, to avoid hangs :-) | ||
747 | self._to_fetch.task_done() | ||
748 | if not url: | ||
749 | #logger.debug('Sentinel seen, quitting.') | ||
750 | break | ||
751 | |||
752 | def get_page(self, url): | ||
753 | """ | ||
754 | Get the HTML for an URL, possibly from an in-memory cache. | ||
755 | |||
756 | XXX TODO Note: this cache is never actually cleared. It's assumed that | ||
757 | the data won't get stale over the lifetime of a locator instance (not | ||
758 | necessarily true for the default_locator). | ||
759 | """ | ||
760 | # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api | ||
761 | scheme, netloc, path, _, _, _ = urlparse(url) | ||
762 | if scheme == 'file' and os.path.isdir(url2pathname(path)): | ||
763 | url = urljoin(ensure_slash(url), 'index.html') | ||
764 | |||
765 | if url in self._page_cache: | ||
766 | result = self._page_cache[url] | ||
767 | logger.debug('Returning %s from cache: %s', url, result) | ||
768 | else: | ||
769 | host = netloc.split(':', 1)[0] | ||
770 | result = None | ||
771 | if host in self._bad_hosts: | ||
772 | logger.debug('Skipping %s due to bad host %s', url, host) | ||
773 | else: | ||
774 | req = Request(url, headers={'Accept-encoding': 'identity'}) | ||
775 | try: | ||
776 | logger.debug('Fetching %s', url) | ||
777 | resp = self.opener.open(req, timeout=self.timeout) | ||
778 | logger.debug('Fetched %s', url) | ||
779 | headers = resp.info() | ||
780 | content_type = headers.get('Content-Type', '') | ||
781 | if HTML_CONTENT_TYPE.match(content_type): | ||
782 | final_url = resp.geturl() | ||
783 | data = resp.read() | ||
784 | encoding = headers.get('Content-Encoding') | ||
785 | if encoding: | ||
786 | decoder = self.decoders[encoding] # fail if not found | ||
787 | data = decoder(data) | ||
788 | encoding = 'utf-8' | ||
789 | m = CHARSET.search(content_type) | ||
790 | if m: | ||
791 | encoding = m.group(1) | ||
792 | try: | ||
793 | data = data.decode(encoding) | ||
794 | except UnicodeError: # pragma: no cover | ||
795 | data = data.decode('latin-1') # fallback | ||
796 | result = Page(data, final_url) | ||
797 | self._page_cache[final_url] = result | ||
798 | except HTTPError as e: | ||
799 | if e.code != 404: | ||
800 | logger.exception('Fetch failed: %s: %s', url, e) | ||
801 | except URLError as e: # pragma: no cover | ||
802 | logger.exception('Fetch failed: %s: %s', url, e) | ||
803 | with self._lock: | ||
804 | self._bad_hosts.add(host) | ||
805 | except Exception as e: # pragma: no cover | ||
806 | logger.exception('Fetch failed: %s: %s', url, e) | ||
807 | finally: | ||
808 | self._page_cache[url] = result # even if None (failure) | ||
809 | return result | ||
810 | |||
811 | _distname_re = re.compile('<a href=[^>]*>([^<]+)<') | ||
812 | |||
813 | def get_distribution_names(self): | ||
814 | """ | ||
815 | Return all the distribution names known to this locator. | ||
816 | """ | ||
817 | result = set() | ||
818 | page = self.get_page(self.base_url) | ||
819 | if not page: | ||
820 | raise DistlibException('Unable to get %s' % self.base_url) | ||
821 | for match in self._distname_re.finditer(page.data): | ||
822 | result.add(match.group(1)) | ||
823 | return result | ||
824 | |||
825 | class DirectoryLocator(Locator): | ||
826 | """ | ||
827 | This class locates distributions in a directory tree. | ||
828 | """ | ||
829 | |||
830 | def __init__(self, path, **kwargs): | ||
831 | """ | ||
832 | Initialise an instance. | ||
833 | :param path: The root of the directory tree to search. | ||
834 | :param kwargs: Passed to the superclass constructor, | ||
835 | except for: | ||
836 | * recursive - if True (the default), subdirectories are | ||
837 | recursed into. If False, only the top-level directory | ||
838 | is searched, | ||
839 | """ | ||
840 | self.recursive = kwargs.pop('recursive', True) | ||
841 | super(DirectoryLocator, self).__init__(**kwargs) | ||
842 | path = os.path.abspath(path) | ||
843 | if not os.path.isdir(path): # pragma: no cover | ||
844 | raise DistlibException('Not a directory: %r' % path) | ||
845 | self.base_dir = path | ||
846 | |||
847 | def should_include(self, filename, parent): | ||
848 | """ | ||
849 | Should a filename be considered as a candidate for a distribution | ||
850 | archive? As well as the filename, the directory which contains it | ||
851 | is provided, though not used by the current implementation. | ||
852 | """ | ||
853 | return filename.endswith(self.downloadable_extensions) | ||
854 | |||
855 | def _get_project(self, name): | ||
856 | result = {'urls': {}, 'digests': {}} | ||
857 | for root, dirs, files in os.walk(self.base_dir): | ||
858 | for fn in files: | ||
859 | if self.should_include(fn, root): | ||
860 | fn = os.path.join(root, fn) | ||
861 | url = urlunparse(('file', '', | ||
862 | pathname2url(os.path.abspath(fn)), | ||
863 | '', '', '')) | ||
864 | info = self.convert_url_to_download_info(url, name) | ||
865 | if info: | ||
866 | self._update_version_data(result, info) | ||
867 | if not self.recursive: | ||
868 | break | ||
869 | return result | ||
870 | |||
871 | def get_distribution_names(self): | ||
872 | """ | ||
873 | Return all the distribution names known to this locator. | ||
874 | """ | ||
875 | result = set() | ||
876 | for root, dirs, files in os.walk(self.base_dir): | ||
877 | for fn in files: | ||
878 | if self.should_include(fn, root): | ||
879 | fn = os.path.join(root, fn) | ||
880 | url = urlunparse(('file', '', | ||
881 | pathname2url(os.path.abspath(fn)), | ||
882 | '', '', '')) | ||
883 | info = self.convert_url_to_download_info(url, None) | ||
884 | if info: | ||
885 | result.add(info['name']) | ||
886 | if not self.recursive: | ||
887 | break | ||
888 | return result | ||
889 | |||
890 | class JSONLocator(Locator): | ||
891 | """ | ||
892 | This locator uses special extended metadata (not available on PyPI) and is | ||
893 | the basis of performant dependency resolution in distlib. Other locators | ||
894 | require archive downloads before dependencies can be determined! As you | ||
895 | might imagine, that can be slow. | ||
896 | """ | ||
897 | def get_distribution_names(self): | ||
898 | """ | ||
899 | Return all the distribution names known to this locator. | ||
900 | """ | ||
901 | raise NotImplementedError('Not available from this locator') | ||
902 | |||
903 | def _get_project(self, name): | ||
904 | result = {'urls': {}, 'digests': {}} | ||
905 | data = get_project_data(name) | ||
906 | if data: | ||
907 | for info in data.get('files', []): | ||
908 | if info['ptype'] != 'sdist' or info['pyversion'] != 'source': | ||
909 | continue | ||
910 | # We don't store summary in project metadata as it makes | ||
911 | # the data bigger for no benefit during dependency | ||
912 | # resolution | ||
913 | dist = make_dist(data['name'], info['version'], | ||
914 | summary=data.get('summary', | ||
915 | 'Placeholder for summary'), | ||
916 | scheme=self.scheme) | ||
917 | md = dist.metadata | ||
918 | md.source_url = info['url'] | ||
919 | # TODO SHA256 digest | ||
920 | if 'digest' in info and info['digest']: | ||
921 | dist.digest = ('md5', info['digest']) | ||
922 | md.dependencies = info.get('requirements', {}) | ||
923 | dist.exports = info.get('exports', {}) | ||
924 | result[dist.version] = dist | ||
925 | result['urls'].setdefault(dist.version, set()).add(info['url']) | ||
926 | return result | ||
927 | |||
928 | class DistPathLocator(Locator): | ||
929 | """ | ||
930 | This locator finds installed distributions in a path. It can be useful for | ||
931 | adding to an :class:`AggregatingLocator`. | ||
932 | """ | ||
933 | def __init__(self, distpath, **kwargs): | ||
934 | """ | ||
935 | Initialise an instance. | ||
936 | |||
937 | :param distpath: A :class:`DistributionPath` instance to search. | ||
938 | """ | ||
939 | super(DistPathLocator, self).__init__(**kwargs) | ||
940 | assert isinstance(distpath, DistributionPath) | ||
941 | self.distpath = distpath | ||
942 | |||
943 | def _get_project(self, name): | ||
944 | dist = self.distpath.get_distribution(name) | ||
945 | if dist is None: | ||
946 | result = {'urls': {}, 'digests': {}} | ||
947 | else: | ||
948 | result = { | ||
949 | dist.version: dist, | ||
950 | 'urls': {dist.version: set([dist.source_url])}, | ||
951 | 'digests': {dist.version: set([None])} | ||
952 | } | ||
953 | return result | ||
954 | |||
955 | |||
956 | class AggregatingLocator(Locator): | ||
957 | """ | ||
958 | This class allows you to chain and/or merge a list of locators. | ||
959 | """ | ||
960 | def __init__(self, *locators, **kwargs): | ||
961 | """ | ||
962 | Initialise an instance. | ||
963 | |||
964 | :param locators: The list of locators to search. | ||
965 | :param kwargs: Passed to the superclass constructor, | ||
966 | except for: | ||
967 | * merge - if False (the default), the first successful | ||
968 | search from any of the locators is returned. If True, | ||
969 | the results from all locators are merged (this can be | ||
970 | slow). | ||
971 | """ | ||
972 | self.merge = kwargs.pop('merge', False) | ||
973 | self.locators = locators | ||
974 | super(AggregatingLocator, self).__init__(**kwargs) | ||
975 | |||
976 | def clear_cache(self): | ||
977 | super(AggregatingLocator, self).clear_cache() | ||
978 | for locator in self.locators: | ||
979 | locator.clear_cache() | ||
980 | |||
981 | def _set_scheme(self, value): | ||
982 | self._scheme = value | ||
983 | for locator in self.locators: | ||
984 | locator.scheme = value | ||
985 | |||
986 | scheme = property(Locator.scheme.fget, _set_scheme) | ||
987 | |||
988 | def _get_project(self, name): | ||
989 | result = {} | ||
990 | for locator in self.locators: | ||
991 | d = locator.get_project(name) | ||
992 | if d: | ||
993 | if self.merge: | ||
994 | files = result.get('urls', {}) | ||
995 | digests = result.get('digests', {}) | ||
996 | # next line could overwrite result['urls'], result['digests'] | ||
997 | result.update(d) | ||
998 | df = result.get('urls') | ||
999 | if files and df: | ||
1000 | for k, v in files.items(): | ||
1001 | if k in df: | ||
1002 | df[k] |= v | ||
1003 | else: | ||
1004 | df[k] = v | ||
1005 | dd = result.get('digests') | ||
1006 | if digests and dd: | ||
1007 | dd.update(digests) | ||
1008 | else: | ||
1009 | # See issue #18. If any dists are found and we're looking | ||
1010 | # for specific constraints, we only return something if | ||
1011 | # a match is found. For example, if a DirectoryLocator | ||
1012 | # returns just foo (1.0) while we're looking for | ||
1013 | # foo (>= 2.0), we'll pretend there was nothing there so | ||
1014 | # that subsequent locators can be queried. Otherwise we | ||
1015 | # would just return foo (1.0) which would then lead to a | ||
1016 | # failure to find foo (>= 2.0), because other locators | ||
1017 | # weren't searched. Note that this only matters when | ||
1018 | # merge=False. | ||
1019 | if self.matcher is None: | ||
1020 | found = True | ||
1021 | else: | ||
1022 | found = False | ||
1023 | for k in d: | ||
1024 | if self.matcher.match(k): | ||
1025 | found = True | ||
1026 | break | ||
1027 | if found: | ||
1028 | result = d | ||
1029 | break | ||
1030 | return result | ||
1031 | |||
1032 | def get_distribution_names(self): | ||
1033 | """ | ||
1034 | Return all the distribution names known to this locator. | ||
1035 | """ | ||
1036 | result = set() | ||
1037 | for locator in self.locators: | ||
1038 | try: | ||
1039 | result |= locator.get_distribution_names() | ||
1040 | except NotImplementedError: | ||
1041 | pass | ||
1042 | return result | ||
1043 | |||
1044 | |||
1045 | # We use a legacy scheme simply because most of the dists on PyPI use legacy | ||
1046 | # versions which don't conform to PEP 426 / PEP 440. | ||
1047 | default_locator = AggregatingLocator( | ||
1048 | JSONLocator(), | ||
1049 | SimpleScrapingLocator('https://pypi.python.org/simple/', | ||
1050 | timeout=3.0), | ||
1051 | scheme='legacy') | ||
1052 | |||
1053 | locate = default_locator.locate | ||
1054 | |||
1055 | NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*' | ||
1056 | r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$') | ||
1057 | |||
1058 | class DependencyFinder(object): | ||
1059 | """ | ||
1060 | Locate dependencies for distributions. | ||
1061 | """ | ||
1062 | |||
1063 | def __init__(self, locator=None): | ||
1064 | """ | ||
1065 | Initialise an instance, using the specified locator | ||
1066 | to locate distributions. | ||
1067 | """ | ||
1068 | self.locator = locator or default_locator | ||
1069 | self.scheme = get_scheme(self.locator.scheme) | ||
1070 | |||
1071 | def add_distribution(self, dist): | ||
1072 | """ | ||
1073 | Add a distribution to the finder. This will update internal information | ||
1074 | about who provides what. | ||
1075 | :param dist: The distribution to add. | ||
1076 | """ | ||
1077 | logger.debug('adding distribution %s', dist) | ||
1078 | name = dist.key | ||
1079 | self.dists_by_name[name] = dist | ||
1080 | self.dists[(name, dist.version)] = dist | ||
1081 | for p in dist.provides: | ||
1082 | name, version = parse_name_and_version(p) | ||
1083 | logger.debug('Add to provided: %s, %s, %s', name, version, dist) | ||
1084 | self.provided.setdefault(name, set()).add((version, dist)) | ||
1085 | |||
1086 | def remove_distribution(self, dist): | ||
1087 | """ | ||
1088 | Remove a distribution from the finder. This will update internal | ||
1089 | information about who provides what. | ||
1090 | :param dist: The distribution to remove. | ||
1091 | """ | ||
1092 | logger.debug('removing distribution %s', dist) | ||
1093 | name = dist.key | ||
1094 | del self.dists_by_name[name] | ||
1095 | del self.dists[(name, dist.version)] | ||
1096 | for p in dist.provides: | ||
1097 | name, version = parse_name_and_version(p) | ||
1098 | logger.debug('Remove from provided: %s, %s, %s', name, version, dist) | ||
1099 | s = self.provided[name] | ||
1100 | s.remove((version, dist)) | ||
1101 | if not s: | ||
1102 | del self.provided[name] | ||
1103 | |||
1104 | def get_matcher(self, reqt): | ||
1105 | """ | ||
1106 | Get a version matcher for a requirement. | ||
1107 | :param reqt: The requirement | ||
1108 | :type reqt: str | ||
1109 | :return: A version matcher (an instance of | ||
1110 | :class:`distlib.version.Matcher`). | ||
1111 | """ | ||
1112 | try: | ||
1113 | matcher = self.scheme.matcher(reqt) | ||
1114 | except UnsupportedVersionError: # pragma: no cover | ||
1115 | # XXX compat-mode if cannot read the version | ||
1116 | name = reqt.split()[0] | ||
1117 | matcher = self.scheme.matcher(name) | ||
1118 | return matcher | ||
1119 | |||
1120 | def find_providers(self, reqt): | ||
1121 | """ | ||
1122 | Find the distributions which can fulfill a requirement. | ||
1123 | |||
1124 | :param reqt: The requirement. | ||
1125 | :type reqt: str | ||
1126 | :return: A set of distribution which can fulfill the requirement. | ||
1127 | """ | ||
1128 | matcher = self.get_matcher(reqt) | ||
1129 | name = matcher.key # case-insensitive | ||
1130 | result = set() | ||
1131 | provided = self.provided | ||
1132 | if name in provided: | ||
1133 | for version, provider in provided[name]: | ||
1134 | try: | ||
1135 | match = matcher.match(version) | ||
1136 | except UnsupportedVersionError: | ||
1137 | match = False | ||
1138 | |||
1139 | if match: | ||
1140 | result.add(provider) | ||
1141 | break | ||
1142 | return result | ||
1143 | |||
1144 | def try_to_replace(self, provider, other, problems): | ||
1145 | """ | ||
1146 | Attempt to replace one provider with another. This is typically used | ||
1147 | when resolving dependencies from multiple sources, e.g. A requires | ||
1148 | (B >= 1.0) while C requires (B >= 1.1). | ||
1149 | |||
1150 | For successful replacement, ``provider`` must meet all the requirements | ||
1151 | which ``other`` fulfills. | ||
1152 | |||
1153 | :param provider: The provider we are trying to replace with. | ||
1154 | :param other: The provider we're trying to replace. | ||
1155 | :param problems: If False is returned, this will contain what | ||
1156 | problems prevented replacement. This is currently | ||
1157 | a tuple of the literal string 'cantreplace', | ||
1158 | ``provider``, ``other`` and the set of requirements | ||
1159 | that ``provider`` couldn't fulfill. | ||
1160 | :return: True if we can replace ``other`` with ``provider``, else | ||
1161 | False. | ||
1162 | """ | ||
1163 | rlist = self.reqts[other] | ||
1164 | unmatched = set() | ||
1165 | for s in rlist: | ||
1166 | matcher = self.get_matcher(s) | ||
1167 | if not matcher.match(provider.version): | ||
1168 | unmatched.add(s) | ||
1169 | if unmatched: | ||
1170 | # can't replace other with provider | ||
1171 | problems.add(('cantreplace', provider, other, | ||
1172 | frozenset(unmatched))) | ||
1173 | result = False | ||
1174 | else: | ||
1175 | # can replace other with provider | ||
1176 | self.remove_distribution(other) | ||
1177 | del self.reqts[other] | ||
1178 | for s in rlist: | ||
1179 | self.reqts.setdefault(provider, set()).add(s) | ||
1180 | self.add_distribution(provider) | ||
1181 | result = True | ||
1182 | return result | ||
1183 | |||
1184 | def find(self, requirement, meta_extras=None, prereleases=False): | ||
1185 | """ | ||
1186 | Find a distribution and all distributions it depends on. | ||
1187 | |||
1188 | :param requirement: The requirement specifying the distribution to | ||
1189 | find, or a Distribution instance. | ||
1190 | :param meta_extras: A list of meta extras such as :test:, :build: and | ||
1191 | so on. | ||
1192 | :param prereleases: If ``True``, allow pre-release versions to be | ||
1193 | returned - otherwise, don't return prereleases | ||
1194 | unless they're all that's available. | ||
1195 | |||
1196 | Return a set of :class:`Distribution` instances and a set of | ||
1197 | problems. | ||
1198 | |||
1199 | The distributions returned should be such that they have the | ||
1200 | :attr:`required` attribute set to ``True`` if they were | ||
1201 | from the ``requirement`` passed to ``find()``, and they have the | ||
1202 | :attr:`build_time_dependency` attribute set to ``True`` unless they | ||
1203 | are post-installation dependencies of the ``requirement``. | ||
1204 | |||
1205 | The problems should be a tuple consisting of the string | ||
1206 | ``'unsatisfied'`` and the requirement which couldn't be satisfied | ||
1207 | by any distribution known to the locator. | ||
1208 | """ | ||
1209 | |||
1210 | self.provided = {} | ||
1211 | self.dists = {} | ||
1212 | self.dists_by_name = {} | ||
1213 | self.reqts = {} | ||
1214 | |||
1215 | meta_extras = set(meta_extras or []) | ||
1216 | if ':*:' in meta_extras: | ||
1217 | meta_extras.remove(':*:') | ||
1218 | # :meta: and :run: are implicitly included | ||
1219 | meta_extras |= set([':test:', ':build:', ':dev:']) | ||
1220 | |||
1221 | if isinstance(requirement, Distribution): | ||
1222 | dist = odist = requirement | ||
1223 | logger.debug('passed %s as requirement', odist) | ||
1224 | else: | ||
1225 | dist = odist = self.locator.locate(requirement, | ||
1226 | prereleases=prereleases) | ||
1227 | if dist is None: | ||
1228 | raise DistlibException('Unable to locate %r' % requirement) | ||
1229 | logger.debug('located %s', odist) | ||
1230 | dist.requested = True | ||
1231 | problems = set() | ||
1232 | todo = set([dist]) | ||
1233 | install_dists = set([odist]) | ||
1234 | while todo: | ||
1235 | dist = todo.pop() | ||
1236 | name = dist.key # case-insensitive | ||
1237 | if name not in self.dists_by_name: | ||
1238 | self.add_distribution(dist) | ||
1239 | else: | ||
1240 | #import pdb; pdb.set_trace() | ||
1241 | other = self.dists_by_name[name] | ||
1242 | if other != dist: | ||
1243 | self.try_to_replace(dist, other, problems) | ||
1244 | |||
1245 | ireqts = dist.run_requires | dist.meta_requires | ||
1246 | sreqts = dist.build_requires | ||
1247 | ereqts = set() | ||
1248 | if meta_extras and dist in install_dists: | ||
1249 | for key in ('test', 'build', 'dev'): | ||
1250 | e = ':%s:' % key | ||
1251 | if e in meta_extras: | ||
1252 | ereqts |= getattr(dist, '%s_requires' % key) | ||
1253 | all_reqts = ireqts | sreqts | ereqts | ||
1254 | for r in all_reqts: | ||
1255 | providers = self.find_providers(r) | ||
1256 | if not providers: | ||
1257 | logger.debug('No providers found for %r', r) | ||
1258 | provider = self.locator.locate(r, prereleases=prereleases) | ||
1259 | # If no provider is found and we didn't consider | ||
1260 | # prereleases, consider them now. | ||
1261 | if provider is None and not prereleases: | ||
1262 | provider = self.locator.locate(r, prereleases=True) | ||
1263 | if provider is None: | ||
1264 | logger.debug('Cannot satisfy %r', r) | ||
1265 | problems.add(('unsatisfied', r)) | ||
1266 | else: | ||
1267 | n, v = provider.key, provider.version | ||
1268 | if (n, v) not in self.dists: | ||
1269 | todo.add(provider) | ||
1270 | providers.add(provider) | ||
1271 | if r in ireqts and dist in install_dists: | ||
1272 | install_dists.add(provider) | ||
1273 | logger.debug('Adding %s to install_dists', | ||
1274 | provider.name_and_version) | ||
1275 | for p in providers: | ||
1276 | name = p.key | ||
1277 | if name not in self.dists_by_name: | ||
1278 | self.reqts.setdefault(p, set()).add(r) | ||
1279 | else: | ||
1280 | other = self.dists_by_name[name] | ||
1281 | if other != p: | ||
1282 | # see if other can be replaced by p | ||
1283 | self.try_to_replace(p, other, problems) | ||
1284 | |||
1285 | dists = set(self.dists.values()) | ||
1286 | for dist in dists: | ||
1287 | dist.build_time_dependency = dist not in install_dists | ||
1288 | if dist.build_time_dependency: | ||
1289 | logger.debug('%s is a build-time dependency only.', | ||
1290 | dist.name_and_version) | ||
1291 | logger.debug('find done for %s', odist) | ||
1292 | return dists, problems | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..92688d0 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/manifest.py | |||
@@ -0,0 +1,393 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2013 Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """ | ||
7 | Class representing the list of files in a distribution. | ||
8 | |||
9 | Equivalent to distutils.filelist, but fixes some problems. | ||
10 | """ | ||
11 | import fnmatch | ||
12 | import logging | ||
13 | import os | ||
14 | import re | ||
15 | import sys | ||
16 | |||
17 | from . import DistlibException | ||
18 | from .compat import fsdecode | ||
19 | from .util import convert_path | ||
20 | |||
21 | |||
22 | __all__ = ['Manifest'] | ||
23 | |||
24 | logger = logging.getLogger(__name__) | ||
25 | |||
26 | # a \ followed by some spaces + EOL | ||
27 | _COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M) | ||
28 | _COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) | ||
29 | |||
30 | # | ||
31 | # Due to the different results returned by fnmatch.translate, we need | ||
32 | # to do slightly different processing for Python 2.7 and 3.2 ... this needed | ||
33 | # to be brought in for Python 3.6 onwards. | ||
34 | # | ||
35 | _PYTHON_VERSION = sys.version_info[:2] | ||
36 | |||
37 | class Manifest(object): | ||
38 | """A list of files built by on exploring the filesystem and filtered by | ||
39 | applying various patterns to what we find there. | ||
40 | """ | ||
41 | |||
42 | def __init__(self, base=None): | ||
43 | """ | ||
44 | Initialise an instance. | ||
45 | |||
46 | :param base: The base directory to explore under. | ||
47 | """ | ||
48 | self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) | ||
49 | self.prefix = self.base + os.sep | ||
50 | self.allfiles = None | ||
51 | self.files = set() | ||
52 | |||
53 | # | ||
54 | # Public API | ||
55 | # | ||
56 | |||
57 | def findall(self): | ||
58 | """Find all files under the base and set ``allfiles`` to the absolute | ||
59 | pathnames of files found. | ||
60 | """ | ||
61 | from stat import S_ISREG, S_ISDIR, S_ISLNK | ||
62 | |||
63 | self.allfiles = allfiles = [] | ||
64 | root = self.base | ||
65 | stack = [root] | ||
66 | pop = stack.pop | ||
67 | push = stack.append | ||
68 | |||
69 | while stack: | ||
70 | root = pop() | ||
71 | names = os.listdir(root) | ||
72 | |||
73 | for name in names: | ||
74 | fullname = os.path.join(root, name) | ||
75 | |||
76 | # Avoid excess stat calls -- just one will do, thank you! | ||
77 | stat = os.stat(fullname) | ||
78 | mode = stat.st_mode | ||
79 | if S_ISREG(mode): | ||
80 | allfiles.append(fsdecode(fullname)) | ||
81 | elif S_ISDIR(mode) and not S_ISLNK(mode): | ||
82 | push(fullname) | ||
83 | |||
84 | def add(self, item): | ||
85 | """ | ||
86 | Add a file to the manifest. | ||
87 | |||
88 | :param item: The pathname to add. This can be relative to the base. | ||
89 | """ | ||
90 | if not item.startswith(self.prefix): | ||
91 | item = os.path.join(self.base, item) | ||
92 | self.files.add(os.path.normpath(item)) | ||
93 | |||
94 | def add_many(self, items): | ||
95 | """ | ||
96 | Add a list of files to the manifest. | ||
97 | |||
98 | :param items: The pathnames to add. These can be relative to the base. | ||
99 | """ | ||
100 | for item in items: | ||
101 | self.add(item) | ||
102 | |||
103 | def sorted(self, wantdirs=False): | ||
104 | """ | ||
105 | Return sorted files in directory order | ||
106 | """ | ||
107 | |||
108 | def add_dir(dirs, d): | ||
109 | dirs.add(d) | ||
110 | logger.debug('add_dir added %s', d) | ||
111 | if d != self.base: | ||
112 | parent, _ = os.path.split(d) | ||
113 | assert parent not in ('', '/') | ||
114 | add_dir(dirs, parent) | ||
115 | |||
116 | result = set(self.files) # make a copy! | ||
117 | if wantdirs: | ||
118 | dirs = set() | ||
119 | for f in result: | ||
120 | add_dir(dirs, os.path.dirname(f)) | ||
121 | result |= dirs | ||
122 | return [os.path.join(*path_tuple) for path_tuple in | ||
123 | sorted(os.path.split(path) for path in result)] | ||
124 | |||
125 | def clear(self): | ||
126 | """Clear all collected files.""" | ||
127 | self.files = set() | ||
128 | self.allfiles = [] | ||
129 | |||
130 | def process_directive(self, directive): | ||
131 | """ | ||
132 | Process a directive which either adds some files from ``allfiles`` to | ||
133 | ``files``, or removes some files from ``files``. | ||
134 | |||
135 | :param directive: The directive to process. This should be in a format | ||
136 | compatible with distutils ``MANIFEST.in`` files: | ||
137 | |||
138 | http://docs.python.org/distutils/sourcedist.html#commands | ||
139 | """ | ||
140 | # Parse the line: split it up, make sure the right number of words | ||
141 | # is there, and return the relevant words. 'action' is always | ||
142 | # defined: it's the first word of the line. Which of the other | ||
143 | # three are defined depends on the action; it'll be either | ||
144 | # patterns, (dir and patterns), or (dirpattern). | ||
145 | action, patterns, thedir, dirpattern = self._parse_directive(directive) | ||
146 | |||
147 | # OK, now we know that the action is valid and we have the | ||
148 | # right number of words on the line for that action -- so we | ||
149 | # can proceed with minimal error-checking. | ||
150 | if action == 'include': | ||
151 | for pattern in patterns: | ||
152 | if not self._include_pattern(pattern, anchor=True): | ||
153 | logger.warning('no files found matching %r', pattern) | ||
154 | |||
155 | elif action == 'exclude': | ||
156 | for pattern in patterns: | ||
157 | found = self._exclude_pattern(pattern, anchor=True) | ||
158 | #if not found: | ||
159 | # logger.warning('no previously-included files ' | ||
160 | # 'found matching %r', pattern) | ||
161 | |||
162 | elif action == 'global-include': | ||
163 | for pattern in patterns: | ||
164 | if not self._include_pattern(pattern, anchor=False): | ||
165 | logger.warning('no files found matching %r ' | ||
166 | 'anywhere in distribution', pattern) | ||
167 | |||
168 | elif action == 'global-exclude': | ||
169 | for pattern in patterns: | ||
170 | found = self._exclude_pattern(pattern, anchor=False) | ||
171 | #if not found: | ||
172 | # logger.warning('no previously-included files ' | ||
173 | # 'matching %r found anywhere in ' | ||
174 | # 'distribution', pattern) | ||
175 | |||
176 | elif action == 'recursive-include': | ||
177 | for pattern in patterns: | ||
178 | if not self._include_pattern(pattern, prefix=thedir): | ||
179 | logger.warning('no files found matching %r ' | ||
180 | 'under directory %r', pattern, thedir) | ||
181 | |||
182 | elif action == 'recursive-exclude': | ||
183 | for pattern in patterns: | ||
184 | found = self._exclude_pattern(pattern, prefix=thedir) | ||
185 | #if not found: | ||
186 | # logger.warning('no previously-included files ' | ||
187 | # 'matching %r found under directory %r', | ||
188 | # pattern, thedir) | ||
189 | |||
190 | elif action == 'graft': | ||
191 | if not self._include_pattern(None, prefix=dirpattern): | ||
192 | logger.warning('no directories found matching %r', | ||
193 | dirpattern) | ||
194 | |||
195 | elif action == 'prune': | ||
196 | if not self._exclude_pattern(None, prefix=dirpattern): | ||
197 | logger.warning('no previously-included directories found ' | ||
198 | 'matching %r', dirpattern) | ||
199 | else: # pragma: no cover | ||
200 | # This should never happen, as it should be caught in | ||
201 | # _parse_template_line | ||
202 | raise DistlibException( | ||
203 | 'invalid action %r' % action) | ||
204 | |||
205 | # | ||
206 | # Private API | ||
207 | # | ||
208 | |||
209 | def _parse_directive(self, directive): | ||
210 | """ | ||
211 | Validate a directive. | ||
212 | :param directive: The directive to validate. | ||
213 | :return: A tuple of action, patterns, thedir, dir_patterns | ||
214 | """ | ||
215 | words = directive.split() | ||
216 | if len(words) == 1 and words[0] not in ('include', 'exclude', | ||
217 | 'global-include', | ||
218 | 'global-exclude', | ||
219 | 'recursive-include', | ||
220 | 'recursive-exclude', | ||
221 | 'graft', 'prune'): | ||
222 | # no action given, let's use the default 'include' | ||
223 | words.insert(0, 'include') | ||
224 | |||
225 | action = words[0] | ||
226 | patterns = thedir = dir_pattern = None | ||
227 | |||
228 | if action in ('include', 'exclude', | ||
229 | 'global-include', 'global-exclude'): | ||
230 | if len(words) < 2: | ||
231 | raise DistlibException( | ||
232 | '%r expects <pattern1> <pattern2> ...' % action) | ||
233 | |||
234 | patterns = [convert_path(word) for word in words[1:]] | ||
235 | |||
236 | elif action in ('recursive-include', 'recursive-exclude'): | ||
237 | if len(words) < 3: | ||
238 | raise DistlibException( | ||
239 | '%r expects <dir> <pattern1> <pattern2> ...' % action) | ||
240 | |||
241 | thedir = convert_path(words[1]) | ||
242 | patterns = [convert_path(word) for word in words[2:]] | ||
243 | |||
244 | elif action in ('graft', 'prune'): | ||
245 | if len(words) != 2: | ||
246 | raise DistlibException( | ||
247 | '%r expects a single <dir_pattern>' % action) | ||
248 | |||
249 | dir_pattern = convert_path(words[1]) | ||
250 | |||
251 | else: | ||
252 | raise DistlibException('unknown action %r' % action) | ||
253 | |||
254 | return action, patterns, thedir, dir_pattern | ||
255 | |||
256 | def _include_pattern(self, pattern, anchor=True, prefix=None, | ||
257 | is_regex=False): | ||
258 | """Select strings (presumably filenames) from 'self.files' that | ||
259 | match 'pattern', a Unix-style wildcard (glob) pattern. | ||
260 | |||
261 | Patterns are not quite the same as implemented by the 'fnmatch' | ||
262 | module: '*' and '?' match non-special characters, where "special" | ||
263 | is platform-dependent: slash on Unix; colon, slash, and backslash on | ||
264 | DOS/Windows; and colon on Mac OS. | ||
265 | |||
266 | If 'anchor' is true (the default), then the pattern match is more | ||
267 | stringent: "*.py" will match "foo.py" but not "foo/bar.py". If | ||
268 | 'anchor' is false, both of these will match. | ||
269 | |||
270 | If 'prefix' is supplied, then only filenames starting with 'prefix' | ||
271 | (itself a pattern) and ending with 'pattern', with anything in between | ||
272 | them, will match. 'anchor' is ignored in this case. | ||
273 | |||
274 | If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and | ||
275 | 'pattern' is assumed to be either a string containing a regex or a | ||
276 | regex object -- no translation is done, the regex is just compiled | ||
277 | and used as-is. | ||
278 | |||
279 | Selected strings will be added to self.files. | ||
280 | |||
281 | Return True if files are found. | ||
282 | """ | ||
283 | # XXX docstring lying about what the special chars are? | ||
284 | found = False | ||
285 | pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) | ||
286 | |||
287 | # delayed loading of allfiles list | ||
288 | if self.allfiles is None: | ||
289 | self.findall() | ||
290 | |||
291 | for name in self.allfiles: | ||
292 | if pattern_re.search(name): | ||
293 | self.files.add(name) | ||
294 | found = True | ||
295 | return found | ||
296 | |||
297 | def _exclude_pattern(self, pattern, anchor=True, prefix=None, | ||
298 | is_regex=False): | ||
299 | """Remove strings (presumably filenames) from 'files' that match | ||
300 | 'pattern'. | ||
301 | |||
302 | Other parameters are the same as for 'include_pattern()', above. | ||
303 | The list 'self.files' is modified in place. Return True if files are | ||
304 | found. | ||
305 | |||
306 | This API is public to allow e.g. exclusion of SCM subdirs, e.g. when | ||
307 | packaging source distributions | ||
308 | """ | ||
309 | found = False | ||
310 | pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) | ||
311 | for f in list(self.files): | ||
312 | if pattern_re.search(f): | ||
313 | self.files.remove(f) | ||
314 | found = True | ||
315 | return found | ||
316 | |||
317 | def _translate_pattern(self, pattern, anchor=True, prefix=None, | ||
318 | is_regex=False): | ||
319 | """Translate a shell-like wildcard pattern to a compiled regular | ||
320 | expression. | ||
321 | |||
322 | Return the compiled regex. If 'is_regex' true, | ||
323 | then 'pattern' is directly compiled to a regex (if it's a string) | ||
324 | or just returned as-is (assumes it's a regex object). | ||
325 | """ | ||
326 | if is_regex: | ||
327 | if isinstance(pattern, str): | ||
328 | return re.compile(pattern) | ||
329 | else: | ||
330 | return pattern | ||
331 | |||
332 | if _PYTHON_VERSION > (3, 2): | ||
333 | # ditch start and end characters | ||
334 | start, _, end = self._glob_to_re('_').partition('_') | ||
335 | |||
336 | if pattern: | ||
337 | pattern_re = self._glob_to_re(pattern) | ||
338 | if _PYTHON_VERSION > (3, 2): | ||
339 | assert pattern_re.startswith(start) and pattern_re.endswith(end) | ||
340 | else: | ||
341 | pattern_re = '' | ||
342 | |||
343 | base = re.escape(os.path.join(self.base, '')) | ||
344 | if prefix is not None: | ||
345 | # ditch end of pattern character | ||
346 | if _PYTHON_VERSION <= (3, 2): | ||
347 | empty_pattern = self._glob_to_re('') | ||
348 | prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] | ||
349 | else: | ||
350 | prefix_re = self._glob_to_re(prefix) | ||
351 | assert prefix_re.startswith(start) and prefix_re.endswith(end) | ||
352 | prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] | ||
353 | sep = os.sep | ||
354 | if os.sep == '\\': | ||
355 | sep = r'\\' | ||
356 | if _PYTHON_VERSION <= (3, 2): | ||
357 | pattern_re = '^' + base + sep.join((prefix_re, | ||
358 | '.*' + pattern_re)) | ||
359 | else: | ||
360 | pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] | ||
361 | pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, | ||
362 | pattern_re, end) | ||
363 | else: # no prefix -- respect anchor flag | ||
364 | if anchor: | ||
365 | if _PYTHON_VERSION <= (3, 2): | ||
366 | pattern_re = '^' + base + pattern_re | ||
367 | else: | ||
368 | pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) | ||
369 | |||
370 | return re.compile(pattern_re) | ||
371 | |||
372 | def _glob_to_re(self, pattern): | ||
373 | """Translate a shell-like glob pattern to a regular expression. | ||
374 | |||
375 | Return a string containing the regex. Differs from | ||
376 | 'fnmatch.translate()' in that '*' does not match "special characters" | ||
377 | (which are platform-specific). | ||
378 | """ | ||
379 | pattern_re = fnmatch.translate(pattern) | ||
380 | |||
381 | # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which | ||
382 | # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, | ||
383 | # and by extension they shouldn't match such "special characters" under | ||
384 | # any OS. So change all non-escaped dots in the RE to match any | ||
385 | # character except the special characters (currently: just os.sep). | ||
386 | sep = os.sep | ||
387 | if os.sep == '\\': | ||
388 | # we're using a regex to manipulate a regex, so we need | ||
389 | # to escape the backslash twice | ||
390 | sep = r'\\\\' | ||
391 | escaped = r'\1[^%s]' % sep | ||
392 | pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) | ||
393 | return pattern_re | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py new file mode 100644 index 0000000..82fcfb8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/markers.py | |||
@@ -0,0 +1,131 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | """ | ||
8 | Parser for the environment markers micro-language defined in PEP 508. | ||
9 | """ | ||
10 | |||
11 | # Note: In PEP 345, the micro-language was Python compatible, so the ast | ||
12 | # module could be used to parse it. However, PEP 508 introduced operators such | ||
13 | # as ~= and === which aren't in Python, necessitating a different approach. | ||
14 | |||
15 | import os | ||
16 | import sys | ||
17 | import platform | ||
18 | import re | ||
19 | |||
20 | from .compat import python_implementation, urlparse, string_types | ||
21 | from .util import in_venv, parse_marker | ||
22 | |||
23 | __all__ = ['interpret'] | ||
24 | |||
25 | def _is_literal(o): | ||
26 | if not isinstance(o, string_types) or not o: | ||
27 | return False | ||
28 | return o[0] in '\'"' | ||
29 | |||
30 | class Evaluator(object): | ||
31 | """ | ||
32 | This class is used to evaluate marker expessions. | ||
33 | """ | ||
34 | |||
35 | operations = { | ||
36 | '==': lambda x, y: x == y, | ||
37 | '===': lambda x, y: x == y, | ||
38 | '~=': lambda x, y: x == y or x > y, | ||
39 | '!=': lambda x, y: x != y, | ||
40 | '<': lambda x, y: x < y, | ||
41 | '<=': lambda x, y: x == y or x < y, | ||
42 | '>': lambda x, y: x > y, | ||
43 | '>=': lambda x, y: x == y or x > y, | ||
44 | 'and': lambda x, y: x and y, | ||
45 | 'or': lambda x, y: x or y, | ||
46 | 'in': lambda x, y: x in y, | ||
47 | 'not in': lambda x, y: x not in y, | ||
48 | } | ||
49 | |||
50 | def evaluate(self, expr, context): | ||
51 | """ | ||
52 | Evaluate a marker expression returned by the :func:`parse_requirement` | ||
53 | function in the specified context. | ||
54 | """ | ||
55 | if isinstance(expr, string_types): | ||
56 | if expr[0] in '\'"': | ||
57 | result = expr[1:-1] | ||
58 | else: | ||
59 | if expr not in context: | ||
60 | raise SyntaxError('unknown variable: %s' % expr) | ||
61 | result = context[expr] | ||
62 | else: | ||
63 | assert isinstance(expr, dict) | ||
64 | op = expr['op'] | ||
65 | if op not in self.operations: | ||
66 | raise NotImplementedError('op not implemented: %s' % op) | ||
67 | elhs = expr['lhs'] | ||
68 | erhs = expr['rhs'] | ||
69 | if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): | ||
70 | raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) | ||
71 | |||
72 | lhs = self.evaluate(elhs, context) | ||
73 | rhs = self.evaluate(erhs, context) | ||
74 | result = self.operations[op](lhs, rhs) | ||
75 | return result | ||
76 | |||
77 | def default_context(): | ||
78 | def format_full_version(info): | ||
79 | version = '%s.%s.%s' % (info.major, info.minor, info.micro) | ||
80 | kind = info.releaselevel | ||
81 | if kind != 'final': | ||
82 | version += kind[0] + str(info.serial) | ||
83 | return version | ||
84 | |||
85 | if hasattr(sys, 'implementation'): | ||
86 | implementation_version = format_full_version(sys.implementation.version) | ||
87 | implementation_name = sys.implementation.name | ||
88 | else: | ||
89 | implementation_version = '0' | ||
90 | implementation_name = '' | ||
91 | |||
92 | result = { | ||
93 | 'implementation_name': implementation_name, | ||
94 | 'implementation_version': implementation_version, | ||
95 | 'os_name': os.name, | ||
96 | 'platform_machine': platform.machine(), | ||
97 | 'platform_python_implementation': platform.python_implementation(), | ||
98 | 'platform_release': platform.release(), | ||
99 | 'platform_system': platform.system(), | ||
100 | 'platform_version': platform.version(), | ||
101 | 'platform_in_venv': str(in_venv()), | ||
102 | 'python_full_version': platform.python_version(), | ||
103 | 'python_version': platform.python_version()[:3], | ||
104 | 'sys_platform': sys.platform, | ||
105 | } | ||
106 | return result | ||
107 | |||
108 | DEFAULT_CONTEXT = default_context() | ||
109 | del default_context | ||
110 | |||
111 | evaluator = Evaluator() | ||
112 | |||
113 | def interpret(marker, execution_context=None): | ||
114 | """ | ||
115 | Interpret a marker and return a result depending on environment. | ||
116 | |||
117 | :param marker: The marker to interpret. | ||
118 | :type marker: str | ||
119 | :param execution_context: The context used for name lookup. | ||
120 | :type execution_context: mapping | ||
121 | """ | ||
122 | try: | ||
123 | expr, rest = parse_marker(marker) | ||
124 | except Exception as e: | ||
125 | raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) | ||
126 | if rest and rest[0] != '#': | ||
127 | raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) | ||
128 | context = dict(DEFAULT_CONTEXT) | ||
129 | if execution_context: | ||
130 | context.update(execution_context) | ||
131 | return evaluator.evaluate(expr, context) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..10a1fee --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/metadata.py | |||
@@ -0,0 +1,1091 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """Implementation of the Metadata for Python packages PEPs. | ||
7 | |||
8 | Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). | ||
9 | """ | ||
10 | from __future__ import unicode_literals | ||
11 | |||
12 | import codecs | ||
13 | from email import message_from_file | ||
14 | import json | ||
15 | import logging | ||
16 | import re | ||
17 | |||
18 | |||
19 | from . import DistlibException, __version__ | ||
20 | from .compat import StringIO, string_types, text_type | ||
21 | from .markers import interpret | ||
22 | from .util import extract_by_key, get_extras | ||
23 | from .version import get_scheme, PEP440_VERSION_RE | ||
24 | |||
25 | logger = logging.getLogger(__name__) | ||
26 | |||
27 | |||
28 | class MetadataMissingError(DistlibException): | ||
29 | """A required metadata is missing""" | ||
30 | |||
31 | |||
32 | class MetadataConflictError(DistlibException): | ||
33 | """Attempt to read or write metadata fields that are conflictual.""" | ||
34 | |||
35 | |||
36 | class MetadataUnrecognizedVersionError(DistlibException): | ||
37 | """Unknown metadata version number.""" | ||
38 | |||
39 | |||
40 | class MetadataInvalidError(DistlibException): | ||
41 | """A metadata value is invalid""" | ||
42 | |||
43 | # public API of this module | ||
44 | __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] | ||
45 | |||
46 | # Encoding used for the PKG-INFO files | ||
47 | PKG_INFO_ENCODING = 'utf-8' | ||
48 | |||
49 | # preferred version. Hopefully will be changed | ||
50 | # to 1.2 once PEP 345 is supported everywhere | ||
51 | PKG_INFO_PREFERRED_VERSION = '1.1' | ||
52 | |||
53 | _LINE_PREFIX_1_2 = re.compile('\n \\|') | ||
54 | _LINE_PREFIX_PRE_1_2 = re.compile('\n ') | ||
55 | _241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
56 | 'Summary', 'Description', | ||
57 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
58 | 'License') | ||
59 | |||
60 | _314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
61 | 'Supported-Platform', 'Summary', 'Description', | ||
62 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
63 | 'License', 'Classifier', 'Download-URL', 'Obsoletes', | ||
64 | 'Provides', 'Requires') | ||
65 | |||
66 | _314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', | ||
67 | 'Download-URL') | ||
68 | |||
69 | _345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
70 | 'Supported-Platform', 'Summary', 'Description', | ||
71 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
72 | 'Maintainer', 'Maintainer-email', 'License', | ||
73 | 'Classifier', 'Download-URL', 'Obsoletes-Dist', | ||
74 | 'Project-URL', 'Provides-Dist', 'Requires-Dist', | ||
75 | 'Requires-Python', 'Requires-External') | ||
76 | |||
77 | _345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', | ||
78 | 'Obsoletes-Dist', 'Requires-External', 'Maintainer', | ||
79 | 'Maintainer-email', 'Project-URL') | ||
80 | |||
81 | _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', | ||
82 | 'Supported-Platform', 'Summary', 'Description', | ||
83 | 'Keywords', 'Home-page', 'Author', 'Author-email', | ||
84 | 'Maintainer', 'Maintainer-email', 'License', | ||
85 | 'Classifier', 'Download-URL', 'Obsoletes-Dist', | ||
86 | 'Project-URL', 'Provides-Dist', 'Requires-Dist', | ||
87 | 'Requires-Python', 'Requires-External', 'Private-Version', | ||
88 | 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', | ||
89 | 'Provides-Extra') | ||
90 | |||
91 | _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', | ||
92 | 'Setup-Requires-Dist', 'Extension') | ||
93 | |||
94 | _566_FIELDS = _426_FIELDS + ('Description-Content-Type',) | ||
95 | |||
96 | _566_MARKERS = ('Description-Content-Type',) | ||
97 | |||
98 | _ALL_FIELDS = set() | ||
99 | _ALL_FIELDS.update(_241_FIELDS) | ||
100 | _ALL_FIELDS.update(_314_FIELDS) | ||
101 | _ALL_FIELDS.update(_345_FIELDS) | ||
102 | _ALL_FIELDS.update(_426_FIELDS) | ||
103 | _ALL_FIELDS.update(_566_FIELDS) | ||
104 | |||
105 | EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') | ||
106 | |||
107 | |||
108 | def _version2fieldlist(version): | ||
109 | if version == '1.0': | ||
110 | return _241_FIELDS | ||
111 | elif version == '1.1': | ||
112 | return _314_FIELDS | ||
113 | elif version == '1.2': | ||
114 | return _345_FIELDS | ||
115 | elif version in ('1.3', '2.1'): | ||
116 | return _345_FIELDS + _566_FIELDS | ||
117 | elif version == '2.0': | ||
118 | return _426_FIELDS | ||
119 | raise MetadataUnrecognizedVersionError(version) | ||
120 | |||
121 | |||
122 | def _best_version(fields): | ||
123 | """Detect the best version depending on the fields used.""" | ||
124 | def _has_marker(keys, markers): | ||
125 | for marker in markers: | ||
126 | if marker in keys: | ||
127 | return True | ||
128 | return False | ||
129 | |||
130 | keys = [] | ||
131 | for key, value in fields.items(): | ||
132 | if value in ([], 'UNKNOWN', None): | ||
133 | continue | ||
134 | keys.append(key) | ||
135 | |||
136 | possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1'] | ||
137 | |||
138 | # first let's try to see if a field is not part of one of the version | ||
139 | for key in keys: | ||
140 | if key not in _241_FIELDS and '1.0' in possible_versions: | ||
141 | possible_versions.remove('1.0') | ||
142 | logger.debug('Removed 1.0 due to %s', key) | ||
143 | if key not in _314_FIELDS and '1.1' in possible_versions: | ||
144 | possible_versions.remove('1.1') | ||
145 | logger.debug('Removed 1.1 due to %s', key) | ||
146 | if key not in _345_FIELDS and '1.2' in possible_versions: | ||
147 | possible_versions.remove('1.2') | ||
148 | logger.debug('Removed 1.2 due to %s', key) | ||
149 | if key not in _566_FIELDS and '1.3' in possible_versions: | ||
150 | possible_versions.remove('1.3') | ||
151 | logger.debug('Removed 1.3 due to %s', key) | ||
152 | if key not in _566_FIELDS and '2.1' in possible_versions: | ||
153 | if key != 'Description': # In 2.1, description allowed after headers | ||
154 | possible_versions.remove('2.1') | ||
155 | logger.debug('Removed 2.1 due to %s', key) | ||
156 | if key not in _426_FIELDS and '2.0' in possible_versions: | ||
157 | possible_versions.remove('2.0') | ||
158 | logger.debug('Removed 2.0 due to %s', key) | ||
159 | |||
160 | # possible_version contains qualified versions | ||
161 | if len(possible_versions) == 1: | ||
162 | return possible_versions[0] # found ! | ||
163 | elif len(possible_versions) == 0: | ||
164 | logger.debug('Out of options - unknown metadata set: %s', fields) | ||
165 | raise MetadataConflictError('Unknown metadata set') | ||
166 | |||
167 | # let's see if one unique marker is found | ||
168 | is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) | ||
169 | is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) | ||
170 | is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS) | ||
171 | is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) | ||
172 | if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1: | ||
173 | raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields') | ||
174 | |||
175 | # we have the choice, 1.0, or 1.2, or 2.0 | ||
176 | # - 1.0 has a broken Summary field but works with all tools | ||
177 | # - 1.1 is to avoid | ||
178 | # - 1.2 fixes Summary but has little adoption | ||
179 | # - 2.0 adds more features and is very new | ||
180 | if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: | ||
181 | # we couldn't find any specific marker | ||
182 | if PKG_INFO_PREFERRED_VERSION in possible_versions: | ||
183 | return PKG_INFO_PREFERRED_VERSION | ||
184 | if is_1_1: | ||
185 | return '1.1' | ||
186 | if is_1_2: | ||
187 | return '1.2' | ||
188 | if is_2_1: | ||
189 | return '2.1' | ||
190 | |||
191 | return '2.0' | ||
192 | |||
193 | _ATTR2FIELD = { | ||
194 | 'metadata_version': 'Metadata-Version', | ||
195 | 'name': 'Name', | ||
196 | 'version': 'Version', | ||
197 | 'platform': 'Platform', | ||
198 | 'supported_platform': 'Supported-Platform', | ||
199 | 'summary': 'Summary', | ||
200 | 'description': 'Description', | ||
201 | 'keywords': 'Keywords', | ||
202 | 'home_page': 'Home-page', | ||
203 | 'author': 'Author', | ||
204 | 'author_email': 'Author-email', | ||
205 | 'maintainer': 'Maintainer', | ||
206 | 'maintainer_email': 'Maintainer-email', | ||
207 | 'license': 'License', | ||
208 | 'classifier': 'Classifier', | ||
209 | 'download_url': 'Download-URL', | ||
210 | 'obsoletes_dist': 'Obsoletes-Dist', | ||
211 | 'provides_dist': 'Provides-Dist', | ||
212 | 'requires_dist': 'Requires-Dist', | ||
213 | 'setup_requires_dist': 'Setup-Requires-Dist', | ||
214 | 'requires_python': 'Requires-Python', | ||
215 | 'requires_external': 'Requires-External', | ||
216 | 'requires': 'Requires', | ||
217 | 'provides': 'Provides', | ||
218 | 'obsoletes': 'Obsoletes', | ||
219 | 'project_url': 'Project-URL', | ||
220 | 'private_version': 'Private-Version', | ||
221 | 'obsoleted_by': 'Obsoleted-By', | ||
222 | 'extension': 'Extension', | ||
223 | 'provides_extra': 'Provides-Extra', | ||
224 | } | ||
225 | |||
226 | _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') | ||
227 | _VERSIONS_FIELDS = ('Requires-Python',) | ||
228 | _VERSION_FIELDS = ('Version',) | ||
229 | _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', | ||
230 | 'Requires', 'Provides', 'Obsoletes-Dist', | ||
231 | 'Provides-Dist', 'Requires-Dist', 'Requires-External', | ||
232 | 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', | ||
233 | 'Provides-Extra', 'Extension') | ||
234 | _LISTTUPLEFIELDS = ('Project-URL',) | ||
235 | |||
236 | _ELEMENTSFIELD = ('Keywords',) | ||
237 | |||
238 | _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') | ||
239 | |||
240 | _MISSING = object() | ||
241 | |||
242 | _FILESAFE = re.compile('[^A-Za-z0-9.]+') | ||
243 | |||
244 | |||
245 | def _get_name_and_version(name, version, for_filename=False): | ||
246 | """Return the distribution name with version. | ||
247 | |||
248 | If for_filename is true, return a filename-escaped form.""" | ||
249 | if for_filename: | ||
250 | # For both name and version any runs of non-alphanumeric or '.' | ||
251 | # characters are replaced with a single '-'. Additionally any | ||
252 | # spaces in the version string become '.' | ||
253 | name = _FILESAFE.sub('-', name) | ||
254 | version = _FILESAFE.sub('-', version.replace(' ', '.')) | ||
255 | return '%s-%s' % (name, version) | ||
256 | |||
257 | |||
258 | class LegacyMetadata(object): | ||
259 | """The legacy metadata of a release. | ||
260 | |||
261 | Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can | ||
262 | instantiate the class with one of these arguments (or none): | ||
263 | - *path*, the path to a metadata file | ||
264 | - *fileobj* give a file-like object with metadata as content | ||
265 | - *mapping* is a dict-like object | ||
266 | - *scheme* is a version scheme name | ||
267 | """ | ||
268 | # TODO document the mapping API and UNKNOWN default key | ||
269 | |||
270 | def __init__(self, path=None, fileobj=None, mapping=None, | ||
271 | scheme='default'): | ||
272 | if [path, fileobj, mapping].count(None) < 2: | ||
273 | raise TypeError('path, fileobj and mapping are exclusive') | ||
274 | self._fields = {} | ||
275 | self.requires_files = [] | ||
276 | self._dependencies = None | ||
277 | self.scheme = scheme | ||
278 | if path is not None: | ||
279 | self.read(path) | ||
280 | elif fileobj is not None: | ||
281 | self.read_file(fileobj) | ||
282 | elif mapping is not None: | ||
283 | self.update(mapping) | ||
284 | self.set_metadata_version() | ||
285 | |||
286 | def set_metadata_version(self): | ||
287 | self._fields['Metadata-Version'] = _best_version(self._fields) | ||
288 | |||
289 | def _write_field(self, fileobj, name, value): | ||
290 | fileobj.write('%s: %s\n' % (name, value)) | ||
291 | |||
292 | def __getitem__(self, name): | ||
293 | return self.get(name) | ||
294 | |||
295 | def __setitem__(self, name, value): | ||
296 | return self.set(name, value) | ||
297 | |||
298 | def __delitem__(self, name): | ||
299 | field_name = self._convert_name(name) | ||
300 | try: | ||
301 | del self._fields[field_name] | ||
302 | except KeyError: | ||
303 | raise KeyError(name) | ||
304 | |||
305 | def __contains__(self, name): | ||
306 | return (name in self._fields or | ||
307 | self._convert_name(name) in self._fields) | ||
308 | |||
309 | def _convert_name(self, name): | ||
310 | if name in _ALL_FIELDS: | ||
311 | return name | ||
312 | name = name.replace('-', '_').lower() | ||
313 | return _ATTR2FIELD.get(name, name) | ||
314 | |||
315 | def _default_value(self, name): | ||
316 | if name in _LISTFIELDS or name in _ELEMENTSFIELD: | ||
317 | return [] | ||
318 | return 'UNKNOWN' | ||
319 | |||
320 | def _remove_line_prefix(self, value): | ||
321 | if self.metadata_version in ('1.0', '1.1'): | ||
322 | return _LINE_PREFIX_PRE_1_2.sub('\n', value) | ||
323 | else: | ||
324 | return _LINE_PREFIX_1_2.sub('\n', value) | ||
325 | |||
326 | def __getattr__(self, name): | ||
327 | if name in _ATTR2FIELD: | ||
328 | return self[name] | ||
329 | raise AttributeError(name) | ||
330 | |||
331 | # | ||
332 | # Public API | ||
333 | # | ||
334 | |||
335 | # dependencies = property(_get_dependencies, _set_dependencies) | ||
336 | |||
337 | def get_fullname(self, filesafe=False): | ||
338 | """Return the distribution name with version. | ||
339 | |||
340 | If filesafe is true, return a filename-escaped form.""" | ||
341 | return _get_name_and_version(self['Name'], self['Version'], filesafe) | ||
342 | |||
343 | def is_field(self, name): | ||
344 | """return True if name is a valid metadata key""" | ||
345 | name = self._convert_name(name) | ||
346 | return name in _ALL_FIELDS | ||
347 | |||
348 | def is_multi_field(self, name): | ||
349 | name = self._convert_name(name) | ||
350 | return name in _LISTFIELDS | ||
351 | |||
352 | def read(self, filepath): | ||
353 | """Read the metadata values from a file path.""" | ||
354 | fp = codecs.open(filepath, 'r', encoding='utf-8') | ||
355 | try: | ||
356 | self.read_file(fp) | ||
357 | finally: | ||
358 | fp.close() | ||
359 | |||
360 | def read_file(self, fileob): | ||
361 | """Read the metadata values from a file object.""" | ||
362 | msg = message_from_file(fileob) | ||
363 | self._fields['Metadata-Version'] = msg['metadata-version'] | ||
364 | |||
365 | # When reading, get all the fields we can | ||
366 | for field in _ALL_FIELDS: | ||
367 | if field not in msg: | ||
368 | continue | ||
369 | if field in _LISTFIELDS: | ||
370 | # we can have multiple lines | ||
371 | values = msg.get_all(field) | ||
372 | if field in _LISTTUPLEFIELDS and values is not None: | ||
373 | values = [tuple(value.split(',')) for value in values] | ||
374 | self.set(field, values) | ||
375 | else: | ||
376 | # single line | ||
377 | value = msg[field] | ||
378 | if value is not None and value != 'UNKNOWN': | ||
379 | self.set(field, value) | ||
380 | logger.debug('Attempting to set metadata for %s', self) | ||
381 | self.set_metadata_version() | ||
382 | |||
383 | def write(self, filepath, skip_unknown=False): | ||
384 | """Write the metadata fields to filepath.""" | ||
385 | fp = codecs.open(filepath, 'w', encoding='utf-8') | ||
386 | try: | ||
387 | self.write_file(fp, skip_unknown) | ||
388 | finally: | ||
389 | fp.close() | ||
390 | |||
391 | def write_file(self, fileobject, skip_unknown=False): | ||
392 | """Write the PKG-INFO format data to a file object.""" | ||
393 | self.set_metadata_version() | ||
394 | |||
395 | for field in _version2fieldlist(self['Metadata-Version']): | ||
396 | values = self.get(field) | ||
397 | if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): | ||
398 | continue | ||
399 | if field in _ELEMENTSFIELD: | ||
400 | self._write_field(fileobject, field, ','.join(values)) | ||
401 | continue | ||
402 | if field not in _LISTFIELDS: | ||
403 | if field == 'Description': | ||
404 | if self.metadata_version in ('1.0', '1.1'): | ||
405 | values = values.replace('\n', '\n ') | ||
406 | else: | ||
407 | values = values.replace('\n', '\n |') | ||
408 | values = [values] | ||
409 | |||
410 | if field in _LISTTUPLEFIELDS: | ||
411 | values = [','.join(value) for value in values] | ||
412 | |||
413 | for value in values: | ||
414 | self._write_field(fileobject, field, value) | ||
415 | |||
416 | def update(self, other=None, **kwargs): | ||
417 | """Set metadata values from the given iterable `other` and kwargs. | ||
418 | |||
419 | Behavior is like `dict.update`: If `other` has a ``keys`` method, | ||
420 | they are looped over and ``self[key]`` is assigned ``other[key]``. | ||
421 | Else, ``other`` is an iterable of ``(key, value)`` iterables. | ||
422 | |||
423 | Keys that don't match a metadata field or that have an empty value are | ||
424 | dropped. | ||
425 | """ | ||
426 | def _set(key, value): | ||
427 | if key in _ATTR2FIELD and value: | ||
428 | self.set(self._convert_name(key), value) | ||
429 | |||
430 | if not other: | ||
431 | # other is None or empty container | ||
432 | pass | ||
433 | elif hasattr(other, 'keys'): | ||
434 | for k in other.keys(): | ||
435 | _set(k, other[k]) | ||
436 | else: | ||
437 | for k, v in other: | ||
438 | _set(k, v) | ||
439 | |||
440 | if kwargs: | ||
441 | for k, v in kwargs.items(): | ||
442 | _set(k, v) | ||
443 | |||
444 | def set(self, name, value): | ||
445 | """Control then set a metadata field.""" | ||
446 | name = self._convert_name(name) | ||
447 | |||
448 | if ((name in _ELEMENTSFIELD or name == 'Platform') and | ||
449 | not isinstance(value, (list, tuple))): | ||
450 | if isinstance(value, string_types): | ||
451 | value = [v.strip() for v in value.split(',')] | ||
452 | else: | ||
453 | value = [] | ||
454 | elif (name in _LISTFIELDS and | ||
455 | not isinstance(value, (list, tuple))): | ||
456 | if isinstance(value, string_types): | ||
457 | value = [value] | ||
458 | else: | ||
459 | value = [] | ||
460 | |||
461 | if logger.isEnabledFor(logging.WARNING): | ||
462 | project_name = self['Name'] | ||
463 | |||
464 | scheme = get_scheme(self.scheme) | ||
465 | if name in _PREDICATE_FIELDS and value is not None: | ||
466 | for v in value: | ||
467 | # check that the values are valid | ||
468 | if not scheme.is_valid_matcher(v.split(';')[0]): | ||
469 | logger.warning( | ||
470 | "'%s': '%s' is not valid (field '%s')", | ||
471 | project_name, v, name) | ||
472 | # FIXME this rejects UNKNOWN, is that right? | ||
473 | elif name in _VERSIONS_FIELDS and value is not None: | ||
474 | if not scheme.is_valid_constraint_list(value): | ||
475 | logger.warning("'%s': '%s' is not a valid version (field '%s')", | ||
476 | project_name, value, name) | ||
477 | elif name in _VERSION_FIELDS and value is not None: | ||
478 | if not scheme.is_valid_version(value): | ||
479 | logger.warning("'%s': '%s' is not a valid version (field '%s')", | ||
480 | project_name, value, name) | ||
481 | |||
482 | if name in _UNICODEFIELDS: | ||
483 | if name == 'Description': | ||
484 | value = self._remove_line_prefix(value) | ||
485 | |||
486 | self._fields[name] = value | ||
487 | |||
488 | def get(self, name, default=_MISSING): | ||
489 | """Get a metadata field.""" | ||
490 | name = self._convert_name(name) | ||
491 | if name not in self._fields: | ||
492 | if default is _MISSING: | ||
493 | default = self._default_value(name) | ||
494 | return default | ||
495 | if name in _UNICODEFIELDS: | ||
496 | value = self._fields[name] | ||
497 | return value | ||
498 | elif name in _LISTFIELDS: | ||
499 | value = self._fields[name] | ||
500 | if value is None: | ||
501 | return [] | ||
502 | res = [] | ||
503 | for val in value: | ||
504 | if name not in _LISTTUPLEFIELDS: | ||
505 | res.append(val) | ||
506 | else: | ||
507 | # That's for Project-URL | ||
508 | res.append((val[0], val[1])) | ||
509 | return res | ||
510 | |||
511 | elif name in _ELEMENTSFIELD: | ||
512 | value = self._fields[name] | ||
513 | if isinstance(value, string_types): | ||
514 | return value.split(',') | ||
515 | return self._fields[name] | ||
516 | |||
517 | def check(self, strict=False): | ||
518 | """Check if the metadata is compliant. If strict is True then raise if | ||
519 | no Name or Version are provided""" | ||
520 | self.set_metadata_version() | ||
521 | |||
522 | # XXX should check the versions (if the file was loaded) | ||
523 | missing, warnings = [], [] | ||
524 | |||
525 | for attr in ('Name', 'Version'): # required by PEP 345 | ||
526 | if attr not in self: | ||
527 | missing.append(attr) | ||
528 | |||
529 | if strict and missing != []: | ||
530 | msg = 'missing required metadata: %s' % ', '.join(missing) | ||
531 | raise MetadataMissingError(msg) | ||
532 | |||
533 | for attr in ('Home-page', 'Author'): | ||
534 | if attr not in self: | ||
535 | missing.append(attr) | ||
536 | |||
537 | # checking metadata 1.2 (XXX needs to check 1.1, 1.0) | ||
538 | if self['Metadata-Version'] != '1.2': | ||
539 | return missing, warnings | ||
540 | |||
541 | scheme = get_scheme(self.scheme) | ||
542 | |||
543 | def are_valid_constraints(value): | ||
544 | for v in value: | ||
545 | if not scheme.is_valid_matcher(v.split(';')[0]): | ||
546 | return False | ||
547 | return True | ||
548 | |||
549 | for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), | ||
550 | (_VERSIONS_FIELDS, | ||
551 | scheme.is_valid_constraint_list), | ||
552 | (_VERSION_FIELDS, | ||
553 | scheme.is_valid_version)): | ||
554 | for field in fields: | ||
555 | value = self.get(field, None) | ||
556 | if value is not None and not controller(value): | ||
557 | warnings.append("Wrong value for '%s': %s" % (field, value)) | ||
558 | |||
559 | return missing, warnings | ||
560 | |||
561 | def todict(self, skip_missing=False): | ||
562 | """Return fields as a dict. | ||
563 | |||
564 | Field names will be converted to use the underscore-lowercase style | ||
565 | instead of hyphen-mixed case (i.e. home_page instead of Home-page). | ||
566 | """ | ||
567 | self.set_metadata_version() | ||
568 | |||
569 | mapping_1_0 = ( | ||
570 | ('metadata_version', 'Metadata-Version'), | ||
571 | ('name', 'Name'), | ||
572 | ('version', 'Version'), | ||
573 | ('summary', 'Summary'), | ||
574 | ('home_page', 'Home-page'), | ||
575 | ('author', 'Author'), | ||
576 | ('author_email', 'Author-email'), | ||
577 | ('license', 'License'), | ||
578 | ('description', 'Description'), | ||
579 | ('keywords', 'Keywords'), | ||
580 | ('platform', 'Platform'), | ||
581 | ('classifiers', 'Classifier'), | ||
582 | ('download_url', 'Download-URL'), | ||
583 | ) | ||
584 | |||
585 | data = {} | ||
586 | for key, field_name in mapping_1_0: | ||
587 | if not skip_missing or field_name in self._fields: | ||
588 | data[key] = self[field_name] | ||
589 | |||
590 | if self['Metadata-Version'] == '1.2': | ||
591 | mapping_1_2 = ( | ||
592 | ('requires_dist', 'Requires-Dist'), | ||
593 | ('requires_python', 'Requires-Python'), | ||
594 | ('requires_external', 'Requires-External'), | ||
595 | ('provides_dist', 'Provides-Dist'), | ||
596 | ('obsoletes_dist', 'Obsoletes-Dist'), | ||
597 | ('project_url', 'Project-URL'), | ||
598 | ('maintainer', 'Maintainer'), | ||
599 | ('maintainer_email', 'Maintainer-email'), | ||
600 | ) | ||
601 | for key, field_name in mapping_1_2: | ||
602 | if not skip_missing or field_name in self._fields: | ||
603 | if key != 'project_url': | ||
604 | data[key] = self[field_name] | ||
605 | else: | ||
606 | data[key] = [','.join(u) for u in self[field_name]] | ||
607 | |||
608 | elif self['Metadata-Version'] == '1.1': | ||
609 | mapping_1_1 = ( | ||
610 | ('provides', 'Provides'), | ||
611 | ('requires', 'Requires'), | ||
612 | ('obsoletes', 'Obsoletes'), | ||
613 | ) | ||
614 | for key, field_name in mapping_1_1: | ||
615 | if not skip_missing or field_name in self._fields: | ||
616 | data[key] = self[field_name] | ||
617 | |||
618 | return data | ||
619 | |||
620 | def add_requirements(self, requirements): | ||
621 | if self['Metadata-Version'] == '1.1': | ||
622 | # we can't have 1.1 metadata *and* Setuptools requires | ||
623 | for field in ('Obsoletes', 'Requires', 'Provides'): | ||
624 | if field in self: | ||
625 | del self[field] | ||
626 | self['Requires-Dist'] += requirements | ||
627 | |||
628 | # Mapping API | ||
629 | # TODO could add iter* variants | ||
630 | |||
631 | def keys(self): | ||
632 | return list(_version2fieldlist(self['Metadata-Version'])) | ||
633 | |||
634 | def __iter__(self): | ||
635 | for key in self.keys(): | ||
636 | yield key | ||
637 | |||
638 | def values(self): | ||
639 | return [self[key] for key in self.keys()] | ||
640 | |||
641 | def items(self): | ||
642 | return [(key, self[key]) for key in self.keys()] | ||
643 | |||
644 | def __repr__(self): | ||
645 | return '<%s %s %s>' % (self.__class__.__name__, self.name, | ||
646 | self.version) | ||
647 | |||
648 | |||
649 | METADATA_FILENAME = 'pydist.json' | ||
650 | WHEEL_METADATA_FILENAME = 'metadata.json' | ||
651 | |||
652 | |||
653 | class Metadata(object): | ||
654 | """ | ||
655 | The metadata of a release. This implementation uses 2.0 (JSON) | ||
656 | metadata where possible. If not possible, it wraps a LegacyMetadata | ||
657 | instance which handles the key-value metadata format. | ||
658 | """ | ||
659 | |||
660 | METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$') | ||
661 | |||
662 | NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) | ||
663 | |||
664 | VERSION_MATCHER = PEP440_VERSION_RE | ||
665 | |||
666 | SUMMARY_MATCHER = re.compile('.{1,2047}') | ||
667 | |||
668 | METADATA_VERSION = '2.0' | ||
669 | |||
670 | GENERATOR = 'distlib (%s)' % __version__ | ||
671 | |||
672 | MANDATORY_KEYS = { | ||
673 | 'name': (), | ||
674 | 'version': (), | ||
675 | 'summary': ('legacy',), | ||
676 | } | ||
677 | |||
678 | INDEX_KEYS = ('name version license summary description author ' | ||
679 | 'author_email keywords platform home_page classifiers ' | ||
680 | 'download_url') | ||
681 | |||
682 | DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' | ||
683 | 'dev_requires provides meta_requires obsoleted_by ' | ||
684 | 'supports_environments') | ||
685 | |||
686 | SYNTAX_VALIDATORS = { | ||
687 | 'metadata_version': (METADATA_VERSION_MATCHER, ()), | ||
688 | 'name': (NAME_MATCHER, ('legacy',)), | ||
689 | 'version': (VERSION_MATCHER, ('legacy',)), | ||
690 | 'summary': (SUMMARY_MATCHER, ('legacy',)), | ||
691 | } | ||
692 | |||
693 | __slots__ = ('_legacy', '_data', 'scheme') | ||
694 | |||
695 | def __init__(self, path=None, fileobj=None, mapping=None, | ||
696 | scheme='default'): | ||
697 | if [path, fileobj, mapping].count(None) < 2: | ||
698 | raise TypeError('path, fileobj and mapping are exclusive') | ||
699 | self._legacy = None | ||
700 | self._data = None | ||
701 | self.scheme = scheme | ||
702 | #import pdb; pdb.set_trace() | ||
703 | if mapping is not None: | ||
704 | try: | ||
705 | self._validate_mapping(mapping, scheme) | ||
706 | self._data = mapping | ||
707 | except MetadataUnrecognizedVersionError: | ||
708 | self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) | ||
709 | self.validate() | ||
710 | else: | ||
711 | data = None | ||
712 | if path: | ||
713 | with open(path, 'rb') as f: | ||
714 | data = f.read() | ||
715 | elif fileobj: | ||
716 | data = fileobj.read() | ||
717 | if data is None: | ||
718 | # Initialised with no args - to be added | ||
719 | self._data = { | ||
720 | 'metadata_version': self.METADATA_VERSION, | ||
721 | 'generator': self.GENERATOR, | ||
722 | } | ||
723 | else: | ||
724 | if not isinstance(data, text_type): | ||
725 | data = data.decode('utf-8') | ||
726 | try: | ||
727 | self._data = json.loads(data) | ||
728 | self._validate_mapping(self._data, scheme) | ||
729 | except ValueError: | ||
730 | # Note: MetadataUnrecognizedVersionError does not | ||
731 | # inherit from ValueError (it's a DistlibException, | ||
732 | # which should not inherit from ValueError). | ||
733 | # The ValueError comes from the json.load - if that | ||
734 | # succeeds and we get a validation error, we want | ||
735 | # that to propagate | ||
736 | self._legacy = LegacyMetadata(fileobj=StringIO(data), | ||
737 | scheme=scheme) | ||
738 | self.validate() | ||
739 | |||
740 | common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) | ||
741 | |||
742 | none_list = (None, list) | ||
743 | none_dict = (None, dict) | ||
744 | |||
745 | mapped_keys = { | ||
746 | 'run_requires': ('Requires-Dist', list), | ||
747 | 'build_requires': ('Setup-Requires-Dist', list), | ||
748 | 'dev_requires': none_list, | ||
749 | 'test_requires': none_list, | ||
750 | 'meta_requires': none_list, | ||
751 | 'extras': ('Provides-Extra', list), | ||
752 | 'modules': none_list, | ||
753 | 'namespaces': none_list, | ||
754 | 'exports': none_dict, | ||
755 | 'commands': none_dict, | ||
756 | 'classifiers': ('Classifier', list), | ||
757 | 'source_url': ('Download-URL', None), | ||
758 | 'metadata_version': ('Metadata-Version', None), | ||
759 | } | ||
760 | |||
761 | del none_list, none_dict | ||
762 | |||
763 | def __getattribute__(self, key): | ||
764 | common = object.__getattribute__(self, 'common_keys') | ||
765 | mapped = object.__getattribute__(self, 'mapped_keys') | ||
766 | if key in mapped: | ||
767 | lk, maker = mapped[key] | ||
768 | if self._legacy: | ||
769 | if lk is None: | ||
770 | result = None if maker is None else maker() | ||
771 | else: | ||
772 | result = self._legacy.get(lk) | ||
773 | else: | ||
774 | value = None if maker is None else maker() | ||
775 | if key not in ('commands', 'exports', 'modules', 'namespaces', | ||
776 | 'classifiers'): | ||
777 | result = self._data.get(key, value) | ||
778 | else: | ||
779 | # special cases for PEP 459 | ||
780 | sentinel = object() | ||
781 | result = sentinel | ||
782 | d = self._data.get('extensions') | ||
783 | if d: | ||
784 | if key == 'commands': | ||
785 | result = d.get('python.commands', value) | ||
786 | elif key == 'classifiers': | ||
787 | d = d.get('python.details') | ||
788 | if d: | ||
789 | result = d.get(key, value) | ||
790 | else: | ||
791 | d = d.get('python.exports') | ||
792 | if not d: | ||
793 | d = self._data.get('python.exports') | ||
794 | if d: | ||
795 | result = d.get(key, value) | ||
796 | if result is sentinel: | ||
797 | result = value | ||
798 | elif key not in common: | ||
799 | result = object.__getattribute__(self, key) | ||
800 | elif self._legacy: | ||
801 | result = self._legacy.get(key) | ||
802 | else: | ||
803 | result = self._data.get(key) | ||
804 | return result | ||
805 | |||
806 | def _validate_value(self, key, value, scheme=None): | ||
807 | if key in self.SYNTAX_VALIDATORS: | ||
808 | pattern, exclusions = self.SYNTAX_VALIDATORS[key] | ||
809 | if (scheme or self.scheme) not in exclusions: | ||
810 | m = pattern.match(value) | ||
811 | if not m: | ||
812 | raise MetadataInvalidError("'%s' is an invalid value for " | ||
813 | "the '%s' property" % (value, | ||
814 | key)) | ||
815 | |||
816 | def __setattr__(self, key, value): | ||
817 | self._validate_value(key, value) | ||
818 | common = object.__getattribute__(self, 'common_keys') | ||
819 | mapped = object.__getattribute__(self, 'mapped_keys') | ||
820 | if key in mapped: | ||
821 | lk, _ = mapped[key] | ||
822 | if self._legacy: | ||
823 | if lk is None: | ||
824 | raise NotImplementedError | ||
825 | self._legacy[lk] = value | ||
826 | elif key not in ('commands', 'exports', 'modules', 'namespaces', | ||
827 | 'classifiers'): | ||
828 | self._data[key] = value | ||
829 | else: | ||
830 | # special cases for PEP 459 | ||
831 | d = self._data.setdefault('extensions', {}) | ||
832 | if key == 'commands': | ||
833 | d['python.commands'] = value | ||
834 | elif key == 'classifiers': | ||
835 | d = d.setdefault('python.details', {}) | ||
836 | d[key] = value | ||
837 | else: | ||
838 | d = d.setdefault('python.exports', {}) | ||
839 | d[key] = value | ||
840 | elif key not in common: | ||
841 | object.__setattr__(self, key, value) | ||
842 | else: | ||
843 | if key == 'keywords': | ||
844 | if isinstance(value, string_types): | ||
845 | value = value.strip() | ||
846 | if value: | ||
847 | value = value.split() | ||
848 | else: | ||
849 | value = [] | ||
850 | if self._legacy: | ||
851 | self._legacy[key] = value | ||
852 | else: | ||
853 | self._data[key] = value | ||
854 | |||
855 | @property | ||
856 | def name_and_version(self): | ||
857 | return _get_name_and_version(self.name, self.version, True) | ||
858 | |||
859 | @property | ||
860 | def provides(self): | ||
861 | if self._legacy: | ||
862 | result = self._legacy['Provides-Dist'] | ||
863 | else: | ||
864 | result = self._data.setdefault('provides', []) | ||
865 | s = '%s (%s)' % (self.name, self.version) | ||
866 | if s not in result: | ||
867 | result.append(s) | ||
868 | return result | ||
869 | |||
870 | @provides.setter | ||
871 | def provides(self, value): | ||
872 | if self._legacy: | ||
873 | self._legacy['Provides-Dist'] = value | ||
874 | else: | ||
875 | self._data['provides'] = value | ||
876 | |||
877 | def get_requirements(self, reqts, extras=None, env=None): | ||
878 | """ | ||
879 | Base method to get dependencies, given a set of extras | ||
880 | to satisfy and an optional environment context. | ||
881 | :param reqts: A list of sometimes-wanted dependencies, | ||
882 | perhaps dependent on extras and environment. | ||
883 | :param extras: A list of optional components being requested. | ||
884 | :param env: An optional environment for marker evaluation. | ||
885 | """ | ||
886 | if self._legacy: | ||
887 | result = reqts | ||
888 | else: | ||
889 | result = [] | ||
890 | extras = get_extras(extras or [], self.extras) | ||
891 | for d in reqts: | ||
892 | if 'extra' not in d and 'environment' not in d: | ||
893 | # unconditional | ||
894 | include = True | ||
895 | else: | ||
896 | if 'extra' not in d: | ||
897 | # Not extra-dependent - only environment-dependent | ||
898 | include = True | ||
899 | else: | ||
900 | include = d.get('extra') in extras | ||
901 | if include: | ||
902 | # Not excluded because of extras, check environment | ||
903 | marker = d.get('environment') | ||
904 | if marker: | ||
905 | include = interpret(marker, env) | ||
906 | if include: | ||
907 | result.extend(d['requires']) | ||
908 | for key in ('build', 'dev', 'test'): | ||
909 | e = ':%s:' % key | ||
910 | if e in extras: | ||
911 | extras.remove(e) | ||
912 | # A recursive call, but it should terminate since 'test' | ||
913 | # has been removed from the extras | ||
914 | reqts = self._data.get('%s_requires' % key, []) | ||
915 | result.extend(self.get_requirements(reqts, extras=extras, | ||
916 | env=env)) | ||
917 | return result | ||
918 | |||
919 | @property | ||
920 | def dictionary(self): | ||
921 | if self._legacy: | ||
922 | return self._from_legacy() | ||
923 | return self._data | ||
924 | |||
925 | @property | ||
926 | def dependencies(self): | ||
927 | if self._legacy: | ||
928 | raise NotImplementedError | ||
929 | else: | ||
930 | return extract_by_key(self._data, self.DEPENDENCY_KEYS) | ||
931 | |||
932 | @dependencies.setter | ||
933 | def dependencies(self, value): | ||
934 | if self._legacy: | ||
935 | raise NotImplementedError | ||
936 | else: | ||
937 | self._data.update(value) | ||
938 | |||
939 | def _validate_mapping(self, mapping, scheme): | ||
940 | if mapping.get('metadata_version') != self.METADATA_VERSION: | ||
941 | raise MetadataUnrecognizedVersionError() | ||
942 | missing = [] | ||
943 | for key, exclusions in self.MANDATORY_KEYS.items(): | ||
944 | if key not in mapping: | ||
945 | if scheme not in exclusions: | ||
946 | missing.append(key) | ||
947 | if missing: | ||
948 | msg = 'Missing metadata items: %s' % ', '.join(missing) | ||
949 | raise MetadataMissingError(msg) | ||
950 | for k, v in mapping.items(): | ||
951 | self._validate_value(k, v, scheme) | ||
952 | |||
953 | def validate(self): | ||
954 | if self._legacy: | ||
955 | missing, warnings = self._legacy.check(True) | ||
956 | if missing or warnings: | ||
957 | logger.warning('Metadata: missing: %s, warnings: %s', | ||
958 | missing, warnings) | ||
959 | else: | ||
960 | self._validate_mapping(self._data, self.scheme) | ||
961 | |||
962 | def todict(self): | ||
963 | if self._legacy: | ||
964 | return self._legacy.todict(True) | ||
965 | else: | ||
966 | result = extract_by_key(self._data, self.INDEX_KEYS) | ||
967 | return result | ||
968 | |||
969 | def _from_legacy(self): | ||
970 | assert self._legacy and not self._data | ||
971 | result = { | ||
972 | 'metadata_version': self.METADATA_VERSION, | ||
973 | 'generator': self.GENERATOR, | ||
974 | } | ||
975 | lmd = self._legacy.todict(True) # skip missing ones | ||
976 | for k in ('name', 'version', 'license', 'summary', 'description', | ||
977 | 'classifier'): | ||
978 | if k in lmd: | ||
979 | if k == 'classifier': | ||
980 | nk = 'classifiers' | ||
981 | else: | ||
982 | nk = k | ||
983 | result[nk] = lmd[k] | ||
984 | kw = lmd.get('Keywords', []) | ||
985 | if kw == ['']: | ||
986 | kw = [] | ||
987 | result['keywords'] = kw | ||
988 | keys = (('requires_dist', 'run_requires'), | ||
989 | ('setup_requires_dist', 'build_requires')) | ||
990 | for ok, nk in keys: | ||
991 | if ok in lmd and lmd[ok]: | ||
992 | result[nk] = [{'requires': lmd[ok]}] | ||
993 | result['provides'] = self.provides | ||
994 | author = {} | ||
995 | maintainer = {} | ||
996 | return result | ||
997 | |||
998 | LEGACY_MAPPING = { | ||
999 | 'name': 'Name', | ||
1000 | 'version': 'Version', | ||
1001 | 'license': 'License', | ||
1002 | 'summary': 'Summary', | ||
1003 | 'description': 'Description', | ||
1004 | 'classifiers': 'Classifier', | ||
1005 | } | ||
1006 | |||
1007 | def _to_legacy(self): | ||
1008 | def process_entries(entries): | ||
1009 | reqts = set() | ||
1010 | for e in entries: | ||
1011 | extra = e.get('extra') | ||
1012 | env = e.get('environment') | ||
1013 | rlist = e['requires'] | ||
1014 | for r in rlist: | ||
1015 | if not env and not extra: | ||
1016 | reqts.add(r) | ||
1017 | else: | ||
1018 | marker = '' | ||
1019 | if extra: | ||
1020 | marker = 'extra == "%s"' % extra | ||
1021 | if env: | ||
1022 | if marker: | ||
1023 | marker = '(%s) and %s' % (env, marker) | ||
1024 | else: | ||
1025 | marker = env | ||
1026 | reqts.add(';'.join((r, marker))) | ||
1027 | return reqts | ||
1028 | |||
1029 | assert self._data and not self._legacy | ||
1030 | result = LegacyMetadata() | ||
1031 | nmd = self._data | ||
1032 | for nk, ok in self.LEGACY_MAPPING.items(): | ||
1033 | if nk in nmd: | ||
1034 | result[ok] = nmd[nk] | ||
1035 | r1 = process_entries(self.run_requires + self.meta_requires) | ||
1036 | r2 = process_entries(self.build_requires + self.dev_requires) | ||
1037 | if self.extras: | ||
1038 | result['Provides-Extra'] = sorted(self.extras) | ||
1039 | result['Requires-Dist'] = sorted(r1) | ||
1040 | result['Setup-Requires-Dist'] = sorted(r2) | ||
1041 | # TODO: other fields such as contacts | ||
1042 | return result | ||
1043 | |||
1044 | def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): | ||
1045 | if [path, fileobj].count(None) != 1: | ||
1046 | raise ValueError('Exactly one of path and fileobj is needed') | ||
1047 | self.validate() | ||
1048 | if legacy: | ||
1049 | if self._legacy: | ||
1050 | legacy_md = self._legacy | ||
1051 | else: | ||
1052 | legacy_md = self._to_legacy() | ||
1053 | if path: | ||
1054 | legacy_md.write(path, skip_unknown=skip_unknown) | ||
1055 | else: | ||
1056 | legacy_md.write_file(fileobj, skip_unknown=skip_unknown) | ||
1057 | else: | ||
1058 | if self._legacy: | ||
1059 | d = self._from_legacy() | ||
1060 | else: | ||
1061 | d = self._data | ||
1062 | if fileobj: | ||
1063 | json.dump(d, fileobj, ensure_ascii=True, indent=2, | ||
1064 | sort_keys=True) | ||
1065 | else: | ||
1066 | with codecs.open(path, 'w', 'utf-8') as f: | ||
1067 | json.dump(d, f, ensure_ascii=True, indent=2, | ||
1068 | sort_keys=True) | ||
1069 | |||
1070 | def add_requirements(self, requirements): | ||
1071 | if self._legacy: | ||
1072 | self._legacy.add_requirements(requirements) | ||
1073 | else: | ||
1074 | run_requires = self._data.setdefault('run_requires', []) | ||
1075 | always = None | ||
1076 | for entry in run_requires: | ||
1077 | if 'environment' not in entry and 'extra' not in entry: | ||
1078 | always = entry | ||
1079 | break | ||
1080 | if always is None: | ||
1081 | always = { 'requires': requirements } | ||
1082 | run_requires.insert(0, always) | ||
1083 | else: | ||
1084 | rset = set(always['requires']) | set(requirements) | ||
1085 | always['requires'] = sorted(rset) | ||
1086 | |||
1087 | def __repr__(self): | ||
1088 | name = self.name or '(no name)' | ||
1089 | version = self.version or 'no version' | ||
1090 | return '<%s %s %s (%s)>' % (self.__class__.__name__, | ||
1091 | self.metadata_version, name, version) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..cd618a6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/resources.py | |||
@@ -0,0 +1,355 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | from __future__ import unicode_literals | ||
8 | |||
9 | import bisect | ||
10 | import io | ||
11 | import logging | ||
12 | import os | ||
13 | import pkgutil | ||
14 | import shutil | ||
15 | import sys | ||
16 | import types | ||
17 | import zipimport | ||
18 | |||
19 | from . import DistlibException | ||
20 | from .util import cached_property, get_cache_base, path_to_cache_dir, Cache | ||
21 | |||
22 | logger = logging.getLogger(__name__) | ||
23 | |||
24 | |||
25 | cache = None # created when needed | ||
26 | |||
27 | |||
28 | class ResourceCache(Cache): | ||
29 | def __init__(self, base=None): | ||
30 | if base is None: | ||
31 | # Use native string to avoid issues on 2.x: see Python #20140. | ||
32 | base = os.path.join(get_cache_base(), str('resource-cache')) | ||
33 | super(ResourceCache, self).__init__(base) | ||
34 | |||
35 | def is_stale(self, resource, path): | ||
36 | """ | ||
37 | Is the cache stale for the given resource? | ||
38 | |||
39 | :param resource: The :class:`Resource` being cached. | ||
40 | :param path: The path of the resource in the cache. | ||
41 | :return: True if the cache is stale. | ||
42 | """ | ||
43 | # Cache invalidation is a hard problem :-) | ||
44 | return True | ||
45 | |||
46 | def get(self, resource): | ||
47 | """ | ||
48 | Get a resource into the cache, | ||
49 | |||
50 | :param resource: A :class:`Resource` instance. | ||
51 | :return: The pathname of the resource in the cache. | ||
52 | """ | ||
53 | prefix, path = resource.finder.get_cache_info(resource) | ||
54 | if prefix is None: | ||
55 | result = path | ||
56 | else: | ||
57 | result = os.path.join(self.base, self.prefix_to_dir(prefix), path) | ||
58 | dirname = os.path.dirname(result) | ||
59 | if not os.path.isdir(dirname): | ||
60 | os.makedirs(dirname) | ||
61 | if not os.path.exists(result): | ||
62 | stale = True | ||
63 | else: | ||
64 | stale = self.is_stale(resource, path) | ||
65 | if stale: | ||
66 | # write the bytes of the resource to the cache location | ||
67 | with open(result, 'wb') as f: | ||
68 | f.write(resource.bytes) | ||
69 | return result | ||
70 | |||
71 | |||
72 | class ResourceBase(object): | ||
73 | def __init__(self, finder, name): | ||
74 | self.finder = finder | ||
75 | self.name = name | ||
76 | |||
77 | |||
78 | class Resource(ResourceBase): | ||
79 | """ | ||
80 | A class representing an in-package resource, such as a data file. This is | ||
81 | not normally instantiated by user code, but rather by a | ||
82 | :class:`ResourceFinder` which manages the resource. | ||
83 | """ | ||
84 | is_container = False # Backwards compatibility | ||
85 | |||
86 | def as_stream(self): | ||
87 | """ | ||
88 | Get the resource as a stream. | ||
89 | |||
90 | This is not a property to make it obvious that it returns a new stream | ||
91 | each time. | ||
92 | """ | ||
93 | return self.finder.get_stream(self) | ||
94 | |||
95 | @cached_property | ||
96 | def file_path(self): | ||
97 | global cache | ||
98 | if cache is None: | ||
99 | cache = ResourceCache() | ||
100 | return cache.get(self) | ||
101 | |||
102 | @cached_property | ||
103 | def bytes(self): | ||
104 | return self.finder.get_bytes(self) | ||
105 | |||
106 | @cached_property | ||
107 | def size(self): | ||
108 | return self.finder.get_size(self) | ||
109 | |||
110 | |||
111 | class ResourceContainer(ResourceBase): | ||
112 | is_container = True # Backwards compatibility | ||
113 | |||
114 | @cached_property | ||
115 | def resources(self): | ||
116 | return self.finder.get_resources(self) | ||
117 | |||
118 | |||
119 | class ResourceFinder(object): | ||
120 | """ | ||
121 | Resource finder for file system resources. | ||
122 | """ | ||
123 | |||
124 | if sys.platform.startswith('java'): | ||
125 | skipped_extensions = ('.pyc', '.pyo', '.class') | ||
126 | else: | ||
127 | skipped_extensions = ('.pyc', '.pyo') | ||
128 | |||
129 | def __init__(self, module): | ||
130 | self.module = module | ||
131 | self.loader = getattr(module, '__loader__', None) | ||
132 | self.base = os.path.dirname(getattr(module, '__file__', '')) | ||
133 | |||
134 | def _adjust_path(self, path): | ||
135 | return os.path.realpath(path) | ||
136 | |||
137 | def _make_path(self, resource_name): | ||
138 | # Issue #50: need to preserve type of path on Python 2.x | ||
139 | # like os.path._get_sep | ||
140 | if isinstance(resource_name, bytes): # should only happen on 2.x | ||
141 | sep = b'/' | ||
142 | else: | ||
143 | sep = '/' | ||
144 | parts = resource_name.split(sep) | ||
145 | parts.insert(0, self.base) | ||
146 | result = os.path.join(*parts) | ||
147 | return self._adjust_path(result) | ||
148 | |||
149 | def _find(self, path): | ||
150 | return os.path.exists(path) | ||
151 | |||
152 | def get_cache_info(self, resource): | ||
153 | return None, resource.path | ||
154 | |||
155 | def find(self, resource_name): | ||
156 | path = self._make_path(resource_name) | ||
157 | if not self._find(path): | ||
158 | result = None | ||
159 | else: | ||
160 | if self._is_directory(path): | ||
161 | result = ResourceContainer(self, resource_name) | ||
162 | else: | ||
163 | result = Resource(self, resource_name) | ||
164 | result.path = path | ||
165 | return result | ||
166 | |||
167 | def get_stream(self, resource): | ||
168 | return open(resource.path, 'rb') | ||
169 | |||
170 | def get_bytes(self, resource): | ||
171 | with open(resource.path, 'rb') as f: | ||
172 | return f.read() | ||
173 | |||
174 | def get_size(self, resource): | ||
175 | return os.path.getsize(resource.path) | ||
176 | |||
177 | def get_resources(self, resource): | ||
178 | def allowed(f): | ||
179 | return (f != '__pycache__' and not | ||
180 | f.endswith(self.skipped_extensions)) | ||
181 | return set([f for f in os.listdir(resource.path) if allowed(f)]) | ||
182 | |||
183 | def is_container(self, resource): | ||
184 | return self._is_directory(resource.path) | ||
185 | |||
186 | _is_directory = staticmethod(os.path.isdir) | ||
187 | |||
188 | def iterator(self, resource_name): | ||
189 | resource = self.find(resource_name) | ||
190 | if resource is not None: | ||
191 | todo = [resource] | ||
192 | while todo: | ||
193 | resource = todo.pop(0) | ||
194 | yield resource | ||
195 | if resource.is_container: | ||
196 | rname = resource.name | ||
197 | for name in resource.resources: | ||
198 | if not rname: | ||
199 | new_name = name | ||
200 | else: | ||
201 | new_name = '/'.join([rname, name]) | ||
202 | child = self.find(new_name) | ||
203 | if child.is_container: | ||
204 | todo.append(child) | ||
205 | else: | ||
206 | yield child | ||
207 | |||
208 | |||
209 | class ZipResourceFinder(ResourceFinder): | ||
210 | """ | ||
211 | Resource finder for resources in .zip files. | ||
212 | """ | ||
213 | def __init__(self, module): | ||
214 | super(ZipResourceFinder, self).__init__(module) | ||
215 | archive = self.loader.archive | ||
216 | self.prefix_len = 1 + len(archive) | ||
217 | # PyPy doesn't have a _files attr on zipimporter, and you can't set one | ||
218 | if hasattr(self.loader, '_files'): | ||
219 | self._files = self.loader._files | ||
220 | else: | ||
221 | self._files = zipimport._zip_directory_cache[archive] | ||
222 | self.index = sorted(self._files) | ||
223 | |||
224 | def _adjust_path(self, path): | ||
225 | return path | ||
226 | |||
227 | def _find(self, path): | ||
228 | path = path[self.prefix_len:] | ||
229 | if path in self._files: | ||
230 | result = True | ||
231 | else: | ||
232 | if path and path[-1] != os.sep: | ||
233 | path = path + os.sep | ||
234 | i = bisect.bisect(self.index, path) | ||
235 | try: | ||
236 | result = self.index[i].startswith(path) | ||
237 | except IndexError: | ||
238 | result = False | ||
239 | if not result: | ||
240 | logger.debug('_find failed: %r %r', path, self.loader.prefix) | ||
241 | else: | ||
242 | logger.debug('_find worked: %r %r', path, self.loader.prefix) | ||
243 | return result | ||
244 | |||
245 | def get_cache_info(self, resource): | ||
246 | prefix = self.loader.archive | ||
247 | path = resource.path[1 + len(prefix):] | ||
248 | return prefix, path | ||
249 | |||
250 | def get_bytes(self, resource): | ||
251 | return self.loader.get_data(resource.path) | ||
252 | |||
253 | def get_stream(self, resource): | ||
254 | return io.BytesIO(self.get_bytes(resource)) | ||
255 | |||
256 | def get_size(self, resource): | ||
257 | path = resource.path[self.prefix_len:] | ||
258 | return self._files[path][3] | ||
259 | |||
260 | def get_resources(self, resource): | ||
261 | path = resource.path[self.prefix_len:] | ||
262 | if path and path[-1] != os.sep: | ||
263 | path += os.sep | ||
264 | plen = len(path) | ||
265 | result = set() | ||
266 | i = bisect.bisect(self.index, path) | ||
267 | while i < len(self.index): | ||
268 | if not self.index[i].startswith(path): | ||
269 | break | ||
270 | s = self.index[i][plen:] | ||
271 | result.add(s.split(os.sep, 1)[0]) # only immediate children | ||
272 | i += 1 | ||
273 | return result | ||
274 | |||
275 | def _is_directory(self, path): | ||
276 | path = path[self.prefix_len:] | ||
277 | if path and path[-1] != os.sep: | ||
278 | path += os.sep | ||
279 | i = bisect.bisect(self.index, path) | ||
280 | try: | ||
281 | result = self.index[i].startswith(path) | ||
282 | except IndexError: | ||
283 | result = False | ||
284 | return result | ||
285 | |||
286 | _finder_registry = { | ||
287 | type(None): ResourceFinder, | ||
288 | zipimport.zipimporter: ZipResourceFinder | ||
289 | } | ||
290 | |||
291 | try: | ||
292 | # In Python 3.6, _frozen_importlib -> _frozen_importlib_external | ||
293 | try: | ||
294 | import _frozen_importlib_external as _fi | ||
295 | except ImportError: | ||
296 | import _frozen_importlib as _fi | ||
297 | _finder_registry[_fi.SourceFileLoader] = ResourceFinder | ||
298 | _finder_registry[_fi.FileFinder] = ResourceFinder | ||
299 | del _fi | ||
300 | except (ImportError, AttributeError): | ||
301 | pass | ||
302 | |||
303 | |||
304 | def register_finder(loader, finder_maker): | ||
305 | _finder_registry[type(loader)] = finder_maker | ||
306 | |||
307 | _finder_cache = {} | ||
308 | |||
309 | |||
310 | def finder(package): | ||
311 | """ | ||
312 | Return a resource finder for a package. | ||
313 | :param package: The name of the package. | ||
314 | :return: A :class:`ResourceFinder` instance for the package. | ||
315 | """ | ||
316 | if package in _finder_cache: | ||
317 | result = _finder_cache[package] | ||
318 | else: | ||
319 | if package not in sys.modules: | ||
320 | __import__(package) | ||
321 | module = sys.modules[package] | ||
322 | path = getattr(module, '__path__', None) | ||
323 | if path is None: | ||
324 | raise DistlibException('You cannot get a finder for a module, ' | ||
325 | 'only for a package') | ||
326 | loader = getattr(module, '__loader__', None) | ||
327 | finder_maker = _finder_registry.get(type(loader)) | ||
328 | if finder_maker is None: | ||
329 | raise DistlibException('Unable to locate finder for %r' % package) | ||
330 | result = finder_maker(module) | ||
331 | _finder_cache[package] = result | ||
332 | return result | ||
333 | |||
334 | |||
335 | _dummy_module = types.ModuleType(str('__dummy__')) | ||
336 | |||
337 | |||
338 | def finder_for_path(path): | ||
339 | """ | ||
340 | Return a resource finder for a path, which should represent a container. | ||
341 | |||
342 | :param path: The path. | ||
343 | :return: A :class:`ResourceFinder` instance for the path. | ||
344 | """ | ||
345 | result = None | ||
346 | # calls any path hooks, gets importer into cache | ||
347 | pkgutil.get_importer(path) | ||
348 | loader = sys.path_importer_cache.get(path) | ||
349 | finder = _finder_registry.get(type(loader)) | ||
350 | if finder: | ||
351 | module = _dummy_module | ||
352 | module.__file__ = os.path.join(path, '') | ||
353 | module.__loader__ = loader | ||
354 | result = finder(module) | ||
355 | return result | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..440bd30 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/scripts.py | |||
@@ -0,0 +1,415 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013-2015 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | from io import BytesIO | ||
8 | import logging | ||
9 | import os | ||
10 | import re | ||
11 | import struct | ||
12 | import sys | ||
13 | |||
14 | from .compat import sysconfig, detect_encoding, ZipFile | ||
15 | from .resources import finder | ||
16 | from .util import (FileOperator, get_export_entry, convert_path, | ||
17 | get_executable, in_venv) | ||
18 | |||
19 | logger = logging.getLogger(__name__) | ||
20 | |||
21 | _DEFAULT_MANIFEST = ''' | ||
22 | <?xml version="1.0" encoding="UTF-8" standalone="yes"?> | ||
23 | <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> | ||
24 | <assemblyIdentity version="1.0.0.0" | ||
25 | processorArchitecture="X86" | ||
26 | name="%s" | ||
27 | type="win32"/> | ||
28 | |||
29 | <!-- Identify the application security requirements. --> | ||
30 | <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> | ||
31 | <security> | ||
32 | <requestedPrivileges> | ||
33 | <requestedExecutionLevel level="asInvoker" uiAccess="false"/> | ||
34 | </requestedPrivileges> | ||
35 | </security> | ||
36 | </trustInfo> | ||
37 | </assembly>'''.strip() | ||
38 | |||
39 | # check if Python is called on the first line with this expression | ||
40 | FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') | ||
41 | SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*- | ||
42 | if __name__ == '__main__': | ||
43 | import sys, re | ||
44 | |||
45 | def _resolve(module, func): | ||
46 | __import__(module) | ||
47 | mod = sys.modules[module] | ||
48 | parts = func.split('.') | ||
49 | result = getattr(mod, parts.pop(0)) | ||
50 | for p in parts: | ||
51 | result = getattr(result, p) | ||
52 | return result | ||
53 | |||
54 | try: | ||
55 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | ||
56 | |||
57 | func = _resolve('%(module)s', '%(func)s') | ||
58 | rc = func() # None interpreted as 0 | ||
59 | except Exception as e: # only supporting Python >= 2.6 | ||
60 | sys.stderr.write('%%s\n' %% e) | ||
61 | rc = 1 | ||
62 | sys.exit(rc) | ||
63 | ''' | ||
64 | |||
65 | |||
66 | def _enquote_executable(executable): | ||
67 | if ' ' in executable: | ||
68 | # make sure we quote only the executable in case of env | ||
69 | # for example /usr/bin/env "/dir with spaces/bin/jython" | ||
70 | # instead of "/usr/bin/env /dir with spaces/bin/jython" | ||
71 | # otherwise whole | ||
72 | if executable.startswith('/usr/bin/env '): | ||
73 | env, _executable = executable.split(' ', 1) | ||
74 | if ' ' in _executable and not _executable.startswith('"'): | ||
75 | executable = '%s "%s"' % (env, _executable) | ||
76 | else: | ||
77 | if not executable.startswith('"'): | ||
78 | executable = '"%s"' % executable | ||
79 | return executable | ||
80 | |||
81 | |||
82 | class ScriptMaker(object): | ||
83 | """ | ||
84 | A class to copy or create scripts from source scripts or callable | ||
85 | specifications. | ||
86 | """ | ||
87 | script_template = SCRIPT_TEMPLATE | ||
88 | |||
89 | executable = None # for shebangs | ||
90 | |||
91 | def __init__(self, source_dir, target_dir, add_launchers=True, | ||
92 | dry_run=False, fileop=None): | ||
93 | self.source_dir = source_dir | ||
94 | self.target_dir = target_dir | ||
95 | self.add_launchers = add_launchers | ||
96 | self.force = False | ||
97 | self.clobber = False | ||
98 | # It only makes sense to set mode bits on POSIX. | ||
99 | self.set_mode = (os.name == 'posix') or (os.name == 'java' and | ||
100 | os._name == 'posix') | ||
101 | self.variants = set(('', 'X.Y')) | ||
102 | self._fileop = fileop or FileOperator(dry_run) | ||
103 | |||
104 | self._is_nt = os.name == 'nt' or ( | ||
105 | os.name == 'java' and os._name == 'nt') | ||
106 | |||
107 | def _get_alternate_executable(self, executable, options): | ||
108 | if options.get('gui', False) and self._is_nt: # pragma: no cover | ||
109 | dn, fn = os.path.split(executable) | ||
110 | fn = fn.replace('python', 'pythonw') | ||
111 | executable = os.path.join(dn, fn) | ||
112 | return executable | ||
113 | |||
114 | if sys.platform.startswith('java'): # pragma: no cover | ||
115 | def _is_shell(self, executable): | ||
116 | """ | ||
117 | Determine if the specified executable is a script | ||
118 | (contains a #! line) | ||
119 | """ | ||
120 | try: | ||
121 | with open(executable) as fp: | ||
122 | return fp.read(2) == '#!' | ||
123 | except (OSError, IOError): | ||
124 | logger.warning('Failed to open %s', executable) | ||
125 | return False | ||
126 | |||
127 | def _fix_jython_executable(self, executable): | ||
128 | if self._is_shell(executable): | ||
129 | # Workaround for Jython is not needed on Linux systems. | ||
130 | import java | ||
131 | |||
132 | if java.lang.System.getProperty('os.name') == 'Linux': | ||
133 | return executable | ||
134 | elif executable.lower().endswith('jython.exe'): | ||
135 | # Use wrapper exe for Jython on Windows | ||
136 | return executable | ||
137 | return '/usr/bin/env %s' % executable | ||
138 | |||
139 | def _build_shebang(self, executable, post_interp): | ||
140 | """ | ||
141 | Build a shebang line. In the simple case (on Windows, or a shebang line | ||
142 | which is not too long or contains spaces) use a simple formulation for | ||
143 | the shebang. Otherwise, use /bin/sh as the executable, with a contrived | ||
144 | shebang which allows the script to run either under Python or sh, using | ||
145 | suitable quoting. Thanks to Harald Nordgren for his input. | ||
146 | |||
147 | See also: http://www.in-ulm.de/~mascheck/various/shebang/#length | ||
148 | https://hg.mozilla.org/mozilla-central/file/tip/mach | ||
149 | """ | ||
150 | if os.name != 'posix': | ||
151 | simple_shebang = True | ||
152 | else: | ||
153 | # Add 3 for '#!' prefix and newline suffix. | ||
154 | shebang_length = len(executable) + len(post_interp) + 3 | ||
155 | if sys.platform == 'darwin': | ||
156 | max_shebang_length = 512 | ||
157 | else: | ||
158 | max_shebang_length = 127 | ||
159 | simple_shebang = ((b' ' not in executable) and | ||
160 | (shebang_length <= max_shebang_length)) | ||
161 | |||
162 | if simple_shebang: | ||
163 | result = b'#!' + executable + post_interp + b'\n' | ||
164 | else: | ||
165 | result = b'#!/bin/sh\n' | ||
166 | result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n' | ||
167 | result += b"' '''" | ||
168 | return result | ||
169 | |||
170 | def _get_shebang(self, encoding, post_interp=b'', options=None): | ||
171 | enquote = True | ||
172 | if self.executable: | ||
173 | executable = self.executable | ||
174 | enquote = False # assume this will be taken care of | ||
175 | elif not sysconfig.is_python_build(): | ||
176 | executable = get_executable() | ||
177 | elif in_venv(): # pragma: no cover | ||
178 | executable = os.path.join(sysconfig.get_path('scripts'), | ||
179 | 'python%s' % sysconfig.get_config_var('EXE')) | ||
180 | else: # pragma: no cover | ||
181 | executable = os.path.join( | ||
182 | sysconfig.get_config_var('BINDIR'), | ||
183 | 'python%s%s' % (sysconfig.get_config_var('VERSION'), | ||
184 | sysconfig.get_config_var('EXE'))) | ||
185 | if options: | ||
186 | executable = self._get_alternate_executable(executable, options) | ||
187 | |||
188 | if sys.platform.startswith('java'): # pragma: no cover | ||
189 | executable = self._fix_jython_executable(executable) | ||
190 | # Normalise case for Windows | ||
191 | executable = os.path.normcase(executable) | ||
192 | # If the user didn't specify an executable, it may be necessary to | ||
193 | # cater for executable paths with spaces (not uncommon on Windows) | ||
194 | if enquote: | ||
195 | executable = _enquote_executable(executable) | ||
196 | # Issue #51: don't use fsencode, since we later try to | ||
197 | # check that the shebang is decodable using utf-8. | ||
198 | executable = executable.encode('utf-8') | ||
199 | # in case of IronPython, play safe and enable frames support | ||
200 | if (sys.platform == 'cli' and '-X:Frames' not in post_interp | ||
201 | and '-X:FullFrames' not in post_interp): # pragma: no cover | ||
202 | post_interp += b' -X:Frames' | ||
203 | shebang = self._build_shebang(executable, post_interp) | ||
204 | # Python parser starts to read a script using UTF-8 until | ||
205 | # it gets a #coding:xxx cookie. The shebang has to be the | ||
206 | # first line of a file, the #coding:xxx cookie cannot be | ||
207 | # written before. So the shebang has to be decodable from | ||
208 | # UTF-8. | ||
209 | try: | ||
210 | shebang.decode('utf-8') | ||
211 | except UnicodeDecodeError: # pragma: no cover | ||
212 | raise ValueError( | ||
213 | 'The shebang (%r) is not decodable from utf-8' % shebang) | ||
214 | # If the script is encoded to a custom encoding (use a | ||
215 | # #coding:xxx cookie), the shebang has to be decodable from | ||
216 | # the script encoding too. | ||
217 | if encoding != 'utf-8': | ||
218 | try: | ||
219 | shebang.decode(encoding) | ||
220 | except UnicodeDecodeError: # pragma: no cover | ||
221 | raise ValueError( | ||
222 | 'The shebang (%r) is not decodable ' | ||
223 | 'from the script encoding (%r)' % (shebang, encoding)) | ||
224 | return shebang | ||
225 | |||
226 | def _get_script_text(self, entry): | ||
227 | return self.script_template % dict(module=entry.prefix, | ||
228 | func=entry.suffix) | ||
229 | |||
230 | manifest = _DEFAULT_MANIFEST | ||
231 | |||
232 | def get_manifest(self, exename): | ||
233 | base = os.path.basename(exename) | ||
234 | return self.manifest % base | ||
235 | |||
236 | def _write_script(self, names, shebang, script_bytes, filenames, ext): | ||
237 | use_launcher = self.add_launchers and self._is_nt | ||
238 | linesep = os.linesep.encode('utf-8') | ||
239 | if not use_launcher: | ||
240 | script_bytes = shebang + linesep + script_bytes | ||
241 | else: # pragma: no cover | ||
242 | if ext == 'py': | ||
243 | launcher = self._get_launcher('t') | ||
244 | else: | ||
245 | launcher = self._get_launcher('w') | ||
246 | stream = BytesIO() | ||
247 | with ZipFile(stream, 'w') as zf: | ||
248 | zf.writestr('__main__.py', script_bytes) | ||
249 | zip_data = stream.getvalue() | ||
250 | script_bytes = launcher + shebang + linesep + zip_data | ||
251 | for name in names: | ||
252 | outname = os.path.join(self.target_dir, name) | ||
253 | if use_launcher: # pragma: no cover | ||
254 | n, e = os.path.splitext(outname) | ||
255 | if e.startswith('.py'): | ||
256 | outname = n | ||
257 | outname = '%s.exe' % outname | ||
258 | try: | ||
259 | self._fileop.write_binary_file(outname, script_bytes) | ||
260 | except Exception: | ||
261 | # Failed writing an executable - it might be in use. | ||
262 | logger.warning('Failed to write executable - trying to ' | ||
263 | 'use .deleteme logic') | ||
264 | dfname = '%s.deleteme' % outname | ||
265 | if os.path.exists(dfname): | ||
266 | os.remove(dfname) # Not allowed to fail here | ||
267 | os.rename(outname, dfname) # nor here | ||
268 | self._fileop.write_binary_file(outname, script_bytes) | ||
269 | logger.debug('Able to replace executable using ' | ||
270 | '.deleteme logic') | ||
271 | try: | ||
272 | os.remove(dfname) | ||
273 | except Exception: | ||
274 | pass # still in use - ignore error | ||
275 | else: | ||
276 | if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover | ||
277 | outname = '%s.%s' % (outname, ext) | ||
278 | if os.path.exists(outname) and not self.clobber: | ||
279 | logger.warning('Skipping existing file %s', outname) | ||
280 | continue | ||
281 | self._fileop.write_binary_file(outname, script_bytes) | ||
282 | if self.set_mode: | ||
283 | self._fileop.set_executable_mode([outname]) | ||
284 | filenames.append(outname) | ||
285 | |||
286 | def _make_script(self, entry, filenames, options=None): | ||
287 | post_interp = b'' | ||
288 | if options: | ||
289 | args = options.get('interpreter_args', []) | ||
290 | if args: | ||
291 | args = ' %s' % ' '.join(args) | ||
292 | post_interp = args.encode('utf-8') | ||
293 | shebang = self._get_shebang('utf-8', post_interp, options=options) | ||
294 | script = self._get_script_text(entry).encode('utf-8') | ||
295 | name = entry.name | ||
296 | scriptnames = set() | ||
297 | if '' in self.variants: | ||
298 | scriptnames.add(name) | ||
299 | if 'X' in self.variants: | ||
300 | scriptnames.add('%s%s' % (name, sys.version[0])) | ||
301 | if 'X.Y' in self.variants: | ||
302 | scriptnames.add('%s-%s' % (name, sys.version[:3])) | ||
303 | if options and options.get('gui', False): | ||
304 | ext = 'pyw' | ||
305 | else: | ||
306 | ext = 'py' | ||
307 | self._write_script(scriptnames, shebang, script, filenames, ext) | ||
308 | |||
309 | def _copy_script(self, script, filenames): | ||
310 | adjust = False | ||
311 | script = os.path.join(self.source_dir, convert_path(script)) | ||
312 | outname = os.path.join(self.target_dir, os.path.basename(script)) | ||
313 | if not self.force and not self._fileop.newer(script, outname): | ||
314 | logger.debug('not copying %s (up-to-date)', script) | ||
315 | return | ||
316 | |||
317 | # Always open the file, but ignore failures in dry-run mode -- | ||
318 | # that way, we'll get accurate feedback if we can read the | ||
319 | # script. | ||
320 | try: | ||
321 | f = open(script, 'rb') | ||
322 | except IOError: # pragma: no cover | ||
323 | if not self.dry_run: | ||
324 | raise | ||
325 | f = None | ||
326 | else: | ||
327 | first_line = f.readline() | ||
328 | if not first_line: # pragma: no cover | ||
329 | logger.warning('%s: %s is an empty file (skipping)', | ||
330 | self.get_command_name(), script) | ||
331 | return | ||
332 | |||
333 | match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) | ||
334 | if match: | ||
335 | adjust = True | ||
336 | post_interp = match.group(1) or b'' | ||
337 | |||
338 | if not adjust: | ||
339 | if f: | ||
340 | f.close() | ||
341 | self._fileop.copy_file(script, outname) | ||
342 | if self.set_mode: | ||
343 | self._fileop.set_executable_mode([outname]) | ||
344 | filenames.append(outname) | ||
345 | else: | ||
346 | logger.info('copying and adjusting %s -> %s', script, | ||
347 | self.target_dir) | ||
348 | if not self._fileop.dry_run: | ||
349 | encoding, lines = detect_encoding(f.readline) | ||
350 | f.seek(0) | ||
351 | shebang = self._get_shebang(encoding, post_interp) | ||
352 | if b'pythonw' in first_line: # pragma: no cover | ||
353 | ext = 'pyw' | ||
354 | else: | ||
355 | ext = 'py' | ||
356 | n = os.path.basename(outname) | ||
357 | self._write_script([n], shebang, f.read(), filenames, ext) | ||
358 | if f: | ||
359 | f.close() | ||
360 | |||
361 | @property | ||
362 | def dry_run(self): | ||
363 | return self._fileop.dry_run | ||
364 | |||
365 | @dry_run.setter | ||
366 | def dry_run(self, value): | ||
367 | self._fileop.dry_run = value | ||
368 | |||
369 | if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover | ||
370 | # Executable launcher support. | ||
371 | # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ | ||
372 | |||
373 | def _get_launcher(self, kind): | ||
374 | if struct.calcsize('P') == 8: # 64-bit | ||
375 | bits = '64' | ||
376 | else: | ||
377 | bits = '32' | ||
378 | name = '%s%s.exe' % (kind, bits) | ||
379 | # Issue 31: don't hardcode an absolute package name, but | ||
380 | # determine it relative to the current package | ||
381 | distlib_package = __name__.rsplit('.', 1)[0] | ||
382 | result = finder(distlib_package).find(name).bytes | ||
383 | return result | ||
384 | |||
385 | # Public API follows | ||
386 | |||
387 | def make(self, specification, options=None): | ||
388 | """ | ||
389 | Make a script. | ||
390 | |||
391 | :param specification: The specification, which is either a valid export | ||
392 | entry specification (to make a script from a | ||
393 | callable) or a filename (to make a script by | ||
394 | copying from a source location). | ||
395 | :param options: A dictionary of options controlling script generation. | ||
396 | :return: A list of all absolute pathnames written to. | ||
397 | """ | ||
398 | filenames = [] | ||
399 | entry = get_export_entry(specification) | ||
400 | if entry is None: | ||
401 | self._copy_script(specification, filenames) | ||
402 | else: | ||
403 | self._make_script(entry, filenames, options=options) | ||
404 | return filenames | ||
405 | |||
406 | def make_multiple(self, specifications, options=None): | ||
407 | """ | ||
408 | Take a list of specifications and make scripts from them, | ||
409 | :param specifications: A list of specifications. | ||
410 | :return: A list of all absolute pathnames written to, | ||
411 | """ | ||
412 | filenames = [] | ||
413 | for specification in specifications: | ||
414 | filenames.extend(self.make(specification, options)) | ||
415 | return filenames | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe new file mode 100755 index 0000000..a09d926 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t32.exe | |||
Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe new file mode 100755 index 0000000..9da9b40 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/t64.exe | |||
Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..b1d3f90 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/util.py | |||
@@ -0,0 +1,1755 @@ | |||
1 | # | ||
2 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
3 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
4 | # | ||
5 | import codecs | ||
6 | from collections import deque | ||
7 | import contextlib | ||
8 | import csv | ||
9 | from glob import iglob as std_iglob | ||
10 | import io | ||
11 | import json | ||
12 | import logging | ||
13 | import os | ||
14 | import py_compile | ||
15 | import re | ||
16 | import socket | ||
17 | try: | ||
18 | import ssl | ||
19 | except ImportError: # pragma: no cover | ||
20 | ssl = None | ||
21 | import subprocess | ||
22 | import sys | ||
23 | import tarfile | ||
24 | import tempfile | ||
25 | import textwrap | ||
26 | |||
27 | try: | ||
28 | import threading | ||
29 | except ImportError: # pragma: no cover | ||
30 | import dummy_threading as threading | ||
31 | import time | ||
32 | |||
33 | from . import DistlibException | ||
34 | from .compat import (string_types, text_type, shutil, raw_input, StringIO, | ||
35 | cache_from_source, urlopen, urljoin, httplib, xmlrpclib, | ||
36 | splittype, HTTPHandler, BaseConfigurator, valid_ident, | ||
37 | Container, configparser, URLError, ZipFile, fsdecode, | ||
38 | unquote, urlparse) | ||
39 | |||
40 | logger = logging.getLogger(__name__) | ||
41 | |||
42 | # | ||
43 | # Requirement parsing code as per PEP 508 | ||
44 | # | ||
45 | |||
46 | IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') | ||
47 | VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') | ||
48 | COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') | ||
49 | MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') | ||
50 | OR = re.compile(r'^or\b\s*') | ||
51 | AND = re.compile(r'^and\b\s*') | ||
52 | NON_SPACE = re.compile(r'(\S+)\s*') | ||
53 | STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') | ||
54 | |||
55 | |||
56 | def parse_marker(marker_string): | ||
57 | """ | ||
58 | Parse a marker string and return a dictionary containing a marker expression. | ||
59 | |||
60 | The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in | ||
61 | the expression grammar, or strings. A string contained in quotes is to be | ||
62 | interpreted as a literal string, and a string not contained in quotes is a | ||
63 | variable (such as os_name). | ||
64 | """ | ||
65 | def marker_var(remaining): | ||
66 | # either identifier, or literal string | ||
67 | m = IDENTIFIER.match(remaining) | ||
68 | if m: | ||
69 | result = m.groups()[0] | ||
70 | remaining = remaining[m.end():] | ||
71 | elif not remaining: | ||
72 | raise SyntaxError('unexpected end of input') | ||
73 | else: | ||
74 | q = remaining[0] | ||
75 | if q not in '\'"': | ||
76 | raise SyntaxError('invalid expression: %s' % remaining) | ||
77 | oq = '\'"'.replace(q, '') | ||
78 | remaining = remaining[1:] | ||
79 | parts = [q] | ||
80 | while remaining: | ||
81 | # either a string chunk, or oq, or q to terminate | ||
82 | if remaining[0] == q: | ||
83 | break | ||
84 | elif remaining[0] == oq: | ||
85 | parts.append(oq) | ||
86 | remaining = remaining[1:] | ||
87 | else: | ||
88 | m = STRING_CHUNK.match(remaining) | ||
89 | if not m: | ||
90 | raise SyntaxError('error in string literal: %s' % remaining) | ||
91 | parts.append(m.groups()[0]) | ||
92 | remaining = remaining[m.end():] | ||
93 | else: | ||
94 | s = ''.join(parts) | ||
95 | raise SyntaxError('unterminated string: %s' % s) | ||
96 | parts.append(q) | ||
97 | result = ''.join(parts) | ||
98 | remaining = remaining[1:].lstrip() # skip past closing quote | ||
99 | return result, remaining | ||
100 | |||
101 | def marker_expr(remaining): | ||
102 | if remaining and remaining[0] == '(': | ||
103 | result, remaining = marker(remaining[1:].lstrip()) | ||
104 | if remaining[0] != ')': | ||
105 | raise SyntaxError('unterminated parenthesis: %s' % remaining) | ||
106 | remaining = remaining[1:].lstrip() | ||
107 | else: | ||
108 | lhs, remaining = marker_var(remaining) | ||
109 | while remaining: | ||
110 | m = MARKER_OP.match(remaining) | ||
111 | if not m: | ||
112 | break | ||
113 | op = m.groups()[0] | ||
114 | remaining = remaining[m.end():] | ||
115 | rhs, remaining = marker_var(remaining) | ||
116 | lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} | ||
117 | result = lhs | ||
118 | return result, remaining | ||
119 | |||
120 | def marker_and(remaining): | ||
121 | lhs, remaining = marker_expr(remaining) | ||
122 | while remaining: | ||
123 | m = AND.match(remaining) | ||
124 | if not m: | ||
125 | break | ||
126 | remaining = remaining[m.end():] | ||
127 | rhs, remaining = marker_expr(remaining) | ||
128 | lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} | ||
129 | return lhs, remaining | ||
130 | |||
131 | def marker(remaining): | ||
132 | lhs, remaining = marker_and(remaining) | ||
133 | while remaining: | ||
134 | m = OR.match(remaining) | ||
135 | if not m: | ||
136 | break | ||
137 | remaining = remaining[m.end():] | ||
138 | rhs, remaining = marker_and(remaining) | ||
139 | lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} | ||
140 | return lhs, remaining | ||
141 | |||
142 | return marker(marker_string) | ||
143 | |||
144 | |||
145 | def parse_requirement(req): | ||
146 | """ | ||
147 | Parse a requirement passed in as a string. Return a Container | ||
148 | whose attributes contain the various parts of the requirement. | ||
149 | """ | ||
150 | remaining = req.strip() | ||
151 | if not remaining or remaining.startswith('#'): | ||
152 | return None | ||
153 | m = IDENTIFIER.match(remaining) | ||
154 | if not m: | ||
155 | raise SyntaxError('name expected: %s' % remaining) | ||
156 | distname = m.groups()[0] | ||
157 | remaining = remaining[m.end():] | ||
158 | extras = mark_expr = versions = uri = None | ||
159 | if remaining and remaining[0] == '[': | ||
160 | i = remaining.find(']', 1) | ||
161 | if i < 0: | ||
162 | raise SyntaxError('unterminated extra: %s' % remaining) | ||
163 | s = remaining[1:i] | ||
164 | remaining = remaining[i + 1:].lstrip() | ||
165 | extras = [] | ||
166 | while s: | ||
167 | m = IDENTIFIER.match(s) | ||
168 | if not m: | ||
169 | raise SyntaxError('malformed extra: %s' % s) | ||
170 | extras.append(m.groups()[0]) | ||
171 | s = s[m.end():] | ||
172 | if not s: | ||
173 | break | ||
174 | if s[0] != ',': | ||
175 | raise SyntaxError('comma expected in extras: %s' % s) | ||
176 | s = s[1:].lstrip() | ||
177 | if not extras: | ||
178 | extras = None | ||
179 | if remaining: | ||
180 | if remaining[0] == '@': | ||
181 | # it's a URI | ||
182 | remaining = remaining[1:].lstrip() | ||
183 | m = NON_SPACE.match(remaining) | ||
184 | if not m: | ||
185 | raise SyntaxError('invalid URI: %s' % remaining) | ||
186 | uri = m.groups()[0] | ||
187 | t = urlparse(uri) | ||
188 | # there are issues with Python and URL parsing, so this test | ||
189 | # is a bit crude. See bpo-20271, bpo-23505. Python doesn't | ||
190 | # always parse invalid URLs correctly - it should raise | ||
191 | # exceptions for malformed URLs | ||
192 | if not (t.scheme and t.netloc): | ||
193 | raise SyntaxError('Invalid URL: %s' % uri) | ||
194 | remaining = remaining[m.end():].lstrip() | ||
195 | else: | ||
196 | |||
197 | def get_versions(ver_remaining): | ||
198 | """ | ||
199 | Return a list of operator, version tuples if any are | ||
200 | specified, else None. | ||
201 | """ | ||
202 | m = COMPARE_OP.match(ver_remaining) | ||
203 | versions = None | ||
204 | if m: | ||
205 | versions = [] | ||
206 | while True: | ||
207 | op = m.groups()[0] | ||
208 | ver_remaining = ver_remaining[m.end():] | ||
209 | m = VERSION_IDENTIFIER.match(ver_remaining) | ||
210 | if not m: | ||
211 | raise SyntaxError('invalid version: %s' % ver_remaining) | ||
212 | v = m.groups()[0] | ||
213 | versions.append((op, v)) | ||
214 | ver_remaining = ver_remaining[m.end():] | ||
215 | if not ver_remaining or ver_remaining[0] != ',': | ||
216 | break | ||
217 | ver_remaining = ver_remaining[1:].lstrip() | ||
218 | m = COMPARE_OP.match(ver_remaining) | ||
219 | if not m: | ||
220 | raise SyntaxError('invalid constraint: %s' % ver_remaining) | ||
221 | if not versions: | ||
222 | versions = None | ||
223 | return versions, ver_remaining | ||
224 | |||
225 | if remaining[0] != '(': | ||
226 | versions, remaining = get_versions(remaining) | ||
227 | else: | ||
228 | i = remaining.find(')', 1) | ||
229 | if i < 0: | ||
230 | raise SyntaxError('unterminated parenthesis: %s' % remaining) | ||
231 | s = remaining[1:i] | ||
232 | remaining = remaining[i + 1:].lstrip() | ||
233 | # As a special diversion from PEP 508, allow a version number | ||
234 | # a.b.c in parentheses as a synonym for ~= a.b.c (because this | ||
235 | # is allowed in earlier PEPs) | ||
236 | if COMPARE_OP.match(s): | ||
237 | versions, _ = get_versions(s) | ||
238 | else: | ||
239 | m = VERSION_IDENTIFIER.match(s) | ||
240 | if not m: | ||
241 | raise SyntaxError('invalid constraint: %s' % s) | ||
242 | v = m.groups()[0] | ||
243 | s = s[m.end():].lstrip() | ||
244 | if s: | ||
245 | raise SyntaxError('invalid constraint: %s' % s) | ||
246 | versions = [('~=', v)] | ||
247 | |||
248 | if remaining: | ||
249 | if remaining[0] != ';': | ||
250 | raise SyntaxError('invalid requirement: %s' % remaining) | ||
251 | remaining = remaining[1:].lstrip() | ||
252 | |||
253 | mark_expr, remaining = parse_marker(remaining) | ||
254 | |||
255 | if remaining and remaining[0] != '#': | ||
256 | raise SyntaxError('unexpected trailing data: %s' % remaining) | ||
257 | |||
258 | if not versions: | ||
259 | rs = distname | ||
260 | else: | ||
261 | rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) | ||
262 | return Container(name=distname, extras=extras, constraints=versions, | ||
263 | marker=mark_expr, url=uri, requirement=rs) | ||
264 | |||
265 | |||
266 | def get_resources_dests(resources_root, rules): | ||
267 | """Find destinations for resources files""" | ||
268 | |||
269 | def get_rel_path(root, path): | ||
270 | # normalizes and returns a lstripped-/-separated path | ||
271 | root = root.replace(os.path.sep, '/') | ||
272 | path = path.replace(os.path.sep, '/') | ||
273 | assert path.startswith(root) | ||
274 | return path[len(root):].lstrip('/') | ||
275 | |||
276 | destinations = {} | ||
277 | for base, suffix, dest in rules: | ||
278 | prefix = os.path.join(resources_root, base) | ||
279 | for abs_base in iglob(prefix): | ||
280 | abs_glob = os.path.join(abs_base, suffix) | ||
281 | for abs_path in iglob(abs_glob): | ||
282 | resource_file = get_rel_path(resources_root, abs_path) | ||
283 | if dest is None: # remove the entry if it was here | ||
284 | destinations.pop(resource_file, None) | ||
285 | else: | ||
286 | rel_path = get_rel_path(abs_base, abs_path) | ||
287 | rel_dest = dest.replace(os.path.sep, '/').rstrip('/') | ||
288 | destinations[resource_file] = rel_dest + '/' + rel_path | ||
289 | return destinations | ||
290 | |||
291 | |||
292 | def in_venv(): | ||
293 | if hasattr(sys, 'real_prefix'): | ||
294 | # virtualenv venvs | ||
295 | result = True | ||
296 | else: | ||
297 | # PEP 405 venvs | ||
298 | result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) | ||
299 | return result | ||
300 | |||
301 | |||
302 | def get_executable(): | ||
303 | # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as | ||
304 | # changes to the stub launcher mean that sys.executable always points | ||
305 | # to the stub on OS X | ||
306 | # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' | ||
307 | # in os.environ): | ||
308 | # result = os.environ['__PYVENV_LAUNCHER__'] | ||
309 | # else: | ||
310 | # result = sys.executable | ||
311 | # return result | ||
312 | result = os.path.normcase(sys.executable) | ||
313 | if not isinstance(result, text_type): | ||
314 | result = fsdecode(result) | ||
315 | return result | ||
316 | |||
317 | |||
318 | def proceed(prompt, allowed_chars, error_prompt=None, default=None): | ||
319 | p = prompt | ||
320 | while True: | ||
321 | s = raw_input(p) | ||
322 | p = prompt | ||
323 | if not s and default: | ||
324 | s = default | ||
325 | if s: | ||
326 | c = s[0].lower() | ||
327 | if c in allowed_chars: | ||
328 | break | ||
329 | if error_prompt: | ||
330 | p = '%c: %s\n%s' % (c, error_prompt, prompt) | ||
331 | return c | ||
332 | |||
333 | |||
334 | def extract_by_key(d, keys): | ||
335 | if isinstance(keys, string_types): | ||
336 | keys = keys.split() | ||
337 | result = {} | ||
338 | for key in keys: | ||
339 | if key in d: | ||
340 | result[key] = d[key] | ||
341 | return result | ||
342 | |||
343 | def read_exports(stream): | ||
344 | if sys.version_info[0] >= 3: | ||
345 | # needs to be a text stream | ||
346 | stream = codecs.getreader('utf-8')(stream) | ||
347 | # Try to load as JSON, falling back on legacy format | ||
348 | data = stream.read() | ||
349 | stream = StringIO(data) | ||
350 | try: | ||
351 | jdata = json.load(stream) | ||
352 | result = jdata['extensions']['python.exports']['exports'] | ||
353 | for group, entries in result.items(): | ||
354 | for k, v in entries.items(): | ||
355 | s = '%s = %s' % (k, v) | ||
356 | entry = get_export_entry(s) | ||
357 | assert entry is not None | ||
358 | entries[k] = entry | ||
359 | return result | ||
360 | except Exception: | ||
361 | stream.seek(0, 0) | ||
362 | |||
363 | def read_stream(cp, stream): | ||
364 | if hasattr(cp, 'read_file'): | ||
365 | cp.read_file(stream) | ||
366 | else: | ||
367 | cp.readfp(stream) | ||
368 | |||
369 | cp = configparser.ConfigParser() | ||
370 | try: | ||
371 | read_stream(cp, stream) | ||
372 | except configparser.MissingSectionHeaderError: | ||
373 | stream.close() | ||
374 | data = textwrap.dedent(data) | ||
375 | stream = StringIO(data) | ||
376 | read_stream(cp, stream) | ||
377 | |||
378 | result = {} | ||
379 | for key in cp.sections(): | ||
380 | result[key] = entries = {} | ||
381 | for name, value in cp.items(key): | ||
382 | s = '%s = %s' % (name, value) | ||
383 | entry = get_export_entry(s) | ||
384 | assert entry is not None | ||
385 | #entry.dist = self | ||
386 | entries[name] = entry | ||
387 | return result | ||
388 | |||
389 | |||
390 | def write_exports(exports, stream): | ||
391 | if sys.version_info[0] >= 3: | ||
392 | # needs to be a text stream | ||
393 | stream = codecs.getwriter('utf-8')(stream) | ||
394 | cp = configparser.ConfigParser() | ||
395 | for k, v in exports.items(): | ||
396 | # TODO check k, v for valid values | ||
397 | cp.add_section(k) | ||
398 | for entry in v.values(): | ||
399 | if entry.suffix is None: | ||
400 | s = entry.prefix | ||
401 | else: | ||
402 | s = '%s:%s' % (entry.prefix, entry.suffix) | ||
403 | if entry.flags: | ||
404 | s = '%s [%s]' % (s, ', '.join(entry.flags)) | ||
405 | cp.set(k, entry.name, s) | ||
406 | cp.write(stream) | ||
407 | |||
408 | |||
409 | @contextlib.contextmanager | ||
410 | def tempdir(): | ||
411 | td = tempfile.mkdtemp() | ||
412 | try: | ||
413 | yield td | ||
414 | finally: | ||
415 | shutil.rmtree(td) | ||
416 | |||
417 | @contextlib.contextmanager | ||
418 | def chdir(d): | ||
419 | cwd = os.getcwd() | ||
420 | try: | ||
421 | os.chdir(d) | ||
422 | yield | ||
423 | finally: | ||
424 | os.chdir(cwd) | ||
425 | |||
426 | |||
427 | @contextlib.contextmanager | ||
428 | def socket_timeout(seconds=15): | ||
429 | cto = socket.getdefaulttimeout() | ||
430 | try: | ||
431 | socket.setdefaulttimeout(seconds) | ||
432 | yield | ||
433 | finally: | ||
434 | socket.setdefaulttimeout(cto) | ||
435 | |||
436 | |||
437 | class cached_property(object): | ||
438 | def __init__(self, func): | ||
439 | self.func = func | ||
440 | #for attr in ('__name__', '__module__', '__doc__'): | ||
441 | # setattr(self, attr, getattr(func, attr, None)) | ||
442 | |||
443 | def __get__(self, obj, cls=None): | ||
444 | if obj is None: | ||
445 | return self | ||
446 | value = self.func(obj) | ||
447 | object.__setattr__(obj, self.func.__name__, value) | ||
448 | #obj.__dict__[self.func.__name__] = value = self.func(obj) | ||
449 | return value | ||
450 | |||
451 | def convert_path(pathname): | ||
452 | """Return 'pathname' as a name that will work on the native filesystem. | ||
453 | |||
454 | The path is split on '/' and put back together again using the current | ||
455 | directory separator. Needed because filenames in the setup script are | ||
456 | always supplied in Unix style, and have to be converted to the local | ||
457 | convention before we can actually use them in the filesystem. Raises | ||
458 | ValueError on non-Unix-ish systems if 'pathname' either starts or | ||
459 | ends with a slash. | ||
460 | """ | ||
461 | if os.sep == '/': | ||
462 | return pathname | ||
463 | if not pathname: | ||
464 | return pathname | ||
465 | if pathname[0] == '/': | ||
466 | raise ValueError("path '%s' cannot be absolute" % pathname) | ||
467 | if pathname[-1] == '/': | ||
468 | raise ValueError("path '%s' cannot end with '/'" % pathname) | ||
469 | |||
470 | paths = pathname.split('/') | ||
471 | while os.curdir in paths: | ||
472 | paths.remove(os.curdir) | ||
473 | if not paths: | ||
474 | return os.curdir | ||
475 | return os.path.join(*paths) | ||
476 | |||
477 | |||
478 | class FileOperator(object): | ||
479 | def __init__(self, dry_run=False): | ||
480 | self.dry_run = dry_run | ||
481 | self.ensured = set() | ||
482 | self._init_record() | ||
483 | |||
484 | def _init_record(self): | ||
485 | self.record = False | ||
486 | self.files_written = set() | ||
487 | self.dirs_created = set() | ||
488 | |||
489 | def record_as_written(self, path): | ||
490 | if self.record: | ||
491 | self.files_written.add(path) | ||
492 | |||
493 | def newer(self, source, target): | ||
494 | """Tell if the target is newer than the source. | ||
495 | |||
496 | Returns true if 'source' exists and is more recently modified than | ||
497 | 'target', or if 'source' exists and 'target' doesn't. | ||
498 | |||
499 | Returns false if both exist and 'target' is the same age or younger | ||
500 | than 'source'. Raise PackagingFileError if 'source' does not exist. | ||
501 | |||
502 | Note that this test is not very accurate: files created in the same | ||
503 | second will have the same "age". | ||
504 | """ | ||
505 | if not os.path.exists(source): | ||
506 | raise DistlibException("file '%r' does not exist" % | ||
507 | os.path.abspath(source)) | ||
508 | if not os.path.exists(target): | ||
509 | return True | ||
510 | |||
511 | return os.stat(source).st_mtime > os.stat(target).st_mtime | ||
512 | |||
513 | def copy_file(self, infile, outfile, check=True): | ||
514 | """Copy a file respecting dry-run and force flags. | ||
515 | """ | ||
516 | self.ensure_dir(os.path.dirname(outfile)) | ||
517 | logger.info('Copying %s to %s', infile, outfile) | ||
518 | if not self.dry_run: | ||
519 | msg = None | ||
520 | if check: | ||
521 | if os.path.islink(outfile): | ||
522 | msg = '%s is a symlink' % outfile | ||
523 | elif os.path.exists(outfile) and not os.path.isfile(outfile): | ||
524 | msg = '%s is a non-regular file' % outfile | ||
525 | if msg: | ||
526 | raise ValueError(msg + ' which would be overwritten') | ||
527 | shutil.copyfile(infile, outfile) | ||
528 | self.record_as_written(outfile) | ||
529 | |||
530 | def copy_stream(self, instream, outfile, encoding=None): | ||
531 | assert not os.path.isdir(outfile) | ||
532 | self.ensure_dir(os.path.dirname(outfile)) | ||
533 | logger.info('Copying stream %s to %s', instream, outfile) | ||
534 | if not self.dry_run: | ||
535 | if encoding is None: | ||
536 | outstream = open(outfile, 'wb') | ||
537 | else: | ||
538 | outstream = codecs.open(outfile, 'w', encoding=encoding) | ||
539 | try: | ||
540 | shutil.copyfileobj(instream, outstream) | ||
541 | finally: | ||
542 | outstream.close() | ||
543 | self.record_as_written(outfile) | ||
544 | |||
545 | def write_binary_file(self, path, data): | ||
546 | self.ensure_dir(os.path.dirname(path)) | ||
547 | if not self.dry_run: | ||
548 | with open(path, 'wb') as f: | ||
549 | f.write(data) | ||
550 | self.record_as_written(path) | ||
551 | |||
552 | def write_text_file(self, path, data, encoding): | ||
553 | self.ensure_dir(os.path.dirname(path)) | ||
554 | if not self.dry_run: | ||
555 | with open(path, 'wb') as f: | ||
556 | f.write(data.encode(encoding)) | ||
557 | self.record_as_written(path) | ||
558 | |||
559 | def set_mode(self, bits, mask, files): | ||
560 | if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): | ||
561 | # Set the executable bits (owner, group, and world) on | ||
562 | # all the files specified. | ||
563 | for f in files: | ||
564 | if self.dry_run: | ||
565 | logger.info("changing mode of %s", f) | ||
566 | else: | ||
567 | mode = (os.stat(f).st_mode | bits) & mask | ||
568 | logger.info("changing mode of %s to %o", f, mode) | ||
569 | os.chmod(f, mode) | ||
570 | |||
571 | set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) | ||
572 | |||
573 | def ensure_dir(self, path): | ||
574 | path = os.path.abspath(path) | ||
575 | if path not in self.ensured and not os.path.exists(path): | ||
576 | self.ensured.add(path) | ||
577 | d, f = os.path.split(path) | ||
578 | self.ensure_dir(d) | ||
579 | logger.info('Creating %s' % path) | ||
580 | if not self.dry_run: | ||
581 | os.mkdir(path) | ||
582 | if self.record: | ||
583 | self.dirs_created.add(path) | ||
584 | |||
585 | def byte_compile(self, path, optimize=False, force=False, prefix=None): | ||
586 | dpath = cache_from_source(path, not optimize) | ||
587 | logger.info('Byte-compiling %s to %s', path, dpath) | ||
588 | if not self.dry_run: | ||
589 | if force or self.newer(path, dpath): | ||
590 | if not prefix: | ||
591 | diagpath = None | ||
592 | else: | ||
593 | assert path.startswith(prefix) | ||
594 | diagpath = path[len(prefix):] | ||
595 | py_compile.compile(path, dpath, diagpath, True) # raise error | ||
596 | self.record_as_written(dpath) | ||
597 | return dpath | ||
598 | |||
599 | def ensure_removed(self, path): | ||
600 | if os.path.exists(path): | ||
601 | if os.path.isdir(path) and not os.path.islink(path): | ||
602 | logger.debug('Removing directory tree at %s', path) | ||
603 | if not self.dry_run: | ||
604 | shutil.rmtree(path) | ||
605 | if self.record: | ||
606 | if path in self.dirs_created: | ||
607 | self.dirs_created.remove(path) | ||
608 | else: | ||
609 | if os.path.islink(path): | ||
610 | s = 'link' | ||
611 | else: | ||
612 | s = 'file' | ||
613 | logger.debug('Removing %s %s', s, path) | ||
614 | if not self.dry_run: | ||
615 | os.remove(path) | ||
616 | if self.record: | ||
617 | if path in self.files_written: | ||
618 | self.files_written.remove(path) | ||
619 | |||
620 | def is_writable(self, path): | ||
621 | result = False | ||
622 | while not result: | ||
623 | if os.path.exists(path): | ||
624 | result = os.access(path, os.W_OK) | ||
625 | break | ||
626 | parent = os.path.dirname(path) | ||
627 | if parent == path: | ||
628 | break | ||
629 | path = parent | ||
630 | return result | ||
631 | |||
632 | def commit(self): | ||
633 | """ | ||
634 | Commit recorded changes, turn off recording, return | ||
635 | changes. | ||
636 | """ | ||
637 | assert self.record | ||
638 | result = self.files_written, self.dirs_created | ||
639 | self._init_record() | ||
640 | return result | ||
641 | |||
642 | def rollback(self): | ||
643 | if not self.dry_run: | ||
644 | for f in list(self.files_written): | ||
645 | if os.path.exists(f): | ||
646 | os.remove(f) | ||
647 | # dirs should all be empty now, except perhaps for | ||
648 | # __pycache__ subdirs | ||
649 | # reverse so that subdirs appear before their parents | ||
650 | dirs = sorted(self.dirs_created, reverse=True) | ||
651 | for d in dirs: | ||
652 | flist = os.listdir(d) | ||
653 | if flist: | ||
654 | assert flist == ['__pycache__'] | ||
655 | sd = os.path.join(d, flist[0]) | ||
656 | os.rmdir(sd) | ||
657 | os.rmdir(d) # should fail if non-empty | ||
658 | self._init_record() | ||
659 | |||
660 | def resolve(module_name, dotted_path): | ||
661 | if module_name in sys.modules: | ||
662 | mod = sys.modules[module_name] | ||
663 | else: | ||
664 | mod = __import__(module_name) | ||
665 | if dotted_path is None: | ||
666 | result = mod | ||
667 | else: | ||
668 | parts = dotted_path.split('.') | ||
669 | result = getattr(mod, parts.pop(0)) | ||
670 | for p in parts: | ||
671 | result = getattr(result, p) | ||
672 | return result | ||
673 | |||
674 | |||
675 | class ExportEntry(object): | ||
676 | def __init__(self, name, prefix, suffix, flags): | ||
677 | self.name = name | ||
678 | self.prefix = prefix | ||
679 | self.suffix = suffix | ||
680 | self.flags = flags | ||
681 | |||
682 | @cached_property | ||
683 | def value(self): | ||
684 | return resolve(self.prefix, self.suffix) | ||
685 | |||
686 | def __repr__(self): # pragma: no cover | ||
687 | return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, | ||
688 | self.suffix, self.flags) | ||
689 | |||
690 | def __eq__(self, other): | ||
691 | if not isinstance(other, ExportEntry): | ||
692 | result = False | ||
693 | else: | ||
694 | result = (self.name == other.name and | ||
695 | self.prefix == other.prefix and | ||
696 | self.suffix == other.suffix and | ||
697 | self.flags == other.flags) | ||
698 | return result | ||
699 | |||
700 | __hash__ = object.__hash__ | ||
701 | |||
702 | |||
703 | ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+) | ||
704 | \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) | ||
705 | \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? | ||
706 | ''', re.VERBOSE) | ||
707 | |||
708 | def get_export_entry(specification): | ||
709 | m = ENTRY_RE.search(specification) | ||
710 | if not m: | ||
711 | result = None | ||
712 | if '[' in specification or ']' in specification: | ||
713 | raise DistlibException("Invalid specification " | ||
714 | "'%s'" % specification) | ||
715 | else: | ||
716 | d = m.groupdict() | ||
717 | name = d['name'] | ||
718 | path = d['callable'] | ||
719 | colons = path.count(':') | ||
720 | if colons == 0: | ||
721 | prefix, suffix = path, None | ||
722 | else: | ||
723 | if colons != 1: | ||
724 | raise DistlibException("Invalid specification " | ||
725 | "'%s'" % specification) | ||
726 | prefix, suffix = path.split(':') | ||
727 | flags = d['flags'] | ||
728 | if flags is None: | ||
729 | if '[' in specification or ']' in specification: | ||
730 | raise DistlibException("Invalid specification " | ||
731 | "'%s'" % specification) | ||
732 | flags = [] | ||
733 | else: | ||
734 | flags = [f.strip() for f in flags.split(',')] | ||
735 | result = ExportEntry(name, prefix, suffix, flags) | ||
736 | return result | ||
737 | |||
738 | |||
739 | def get_cache_base(suffix=None): | ||
740 | """ | ||
741 | Return the default base location for distlib caches. If the directory does | ||
742 | not exist, it is created. Use the suffix provided for the base directory, | ||
743 | and default to '.distlib' if it isn't provided. | ||
744 | |||
745 | On Windows, if LOCALAPPDATA is defined in the environment, then it is | ||
746 | assumed to be a directory, and will be the parent directory of the result. | ||
747 | On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home | ||
748 | directory - using os.expanduser('~') - will be the parent directory of | ||
749 | the result. | ||
750 | |||
751 | The result is just the directory '.distlib' in the parent directory as | ||
752 | determined above, or with the name specified with ``suffix``. | ||
753 | """ | ||
754 | if suffix is None: | ||
755 | suffix = '.distlib' | ||
756 | if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: | ||
757 | result = os.path.expandvars('$localappdata') | ||
758 | else: | ||
759 | # Assume posix, or old Windows | ||
760 | result = os.path.expanduser('~') | ||
761 | # we use 'isdir' instead of 'exists', because we want to | ||
762 | # fail if there's a file with that name | ||
763 | if os.path.isdir(result): | ||
764 | usable = os.access(result, os.W_OK) | ||
765 | if not usable: | ||
766 | logger.warning('Directory exists but is not writable: %s', result) | ||
767 | else: | ||
768 | try: | ||
769 | os.makedirs(result) | ||
770 | usable = True | ||
771 | except OSError: | ||
772 | logger.warning('Unable to create %s', result, exc_info=True) | ||
773 | usable = False | ||
774 | if not usable: | ||
775 | result = tempfile.mkdtemp() | ||
776 | logger.warning('Default location unusable, using %s', result) | ||
777 | return os.path.join(result, suffix) | ||
778 | |||
779 | |||
780 | def path_to_cache_dir(path): | ||
781 | """ | ||
782 | Convert an absolute path to a directory name for use in a cache. | ||
783 | |||
784 | The algorithm used is: | ||
785 | |||
786 | #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. | ||
787 | #. Any occurrence of ``os.sep`` is replaced with ``'--'``. | ||
788 | #. ``'.cache'`` is appended. | ||
789 | """ | ||
790 | d, p = os.path.splitdrive(os.path.abspath(path)) | ||
791 | if d: | ||
792 | d = d.replace(':', '---') | ||
793 | p = p.replace(os.sep, '--') | ||
794 | return d + p + '.cache' | ||
795 | |||
796 | |||
797 | def ensure_slash(s): | ||
798 | if not s.endswith('/'): | ||
799 | return s + '/' | ||
800 | return s | ||
801 | |||
802 | |||
803 | def parse_credentials(netloc): | ||
804 | username = password = None | ||
805 | if '@' in netloc: | ||
806 | prefix, netloc = netloc.split('@', 1) | ||
807 | if ':' not in prefix: | ||
808 | username = prefix | ||
809 | else: | ||
810 | username, password = prefix.split(':', 1) | ||
811 | return username, password, netloc | ||
812 | |||
813 | |||
814 | def get_process_umask(): | ||
815 | result = os.umask(0o22) | ||
816 | os.umask(result) | ||
817 | return result | ||
818 | |||
819 | def is_string_sequence(seq): | ||
820 | result = True | ||
821 | i = None | ||
822 | for i, s in enumerate(seq): | ||
823 | if not isinstance(s, string_types): | ||
824 | result = False | ||
825 | break | ||
826 | assert i is not None | ||
827 | return result | ||
828 | |||
829 | PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' | ||
830 | '([a-z0-9_.+-]+)', re.I) | ||
831 | PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') | ||
832 | |||
833 | |||
834 | def split_filename(filename, project_name=None): | ||
835 | """ | ||
836 | Extract name, version, python version from a filename (no extension) | ||
837 | |||
838 | Return name, version, pyver or None | ||
839 | """ | ||
840 | result = None | ||
841 | pyver = None | ||
842 | filename = unquote(filename).replace(' ', '-') | ||
843 | m = PYTHON_VERSION.search(filename) | ||
844 | if m: | ||
845 | pyver = m.group(1) | ||
846 | filename = filename[:m.start()] | ||
847 | if project_name and len(filename) > len(project_name) + 1: | ||
848 | m = re.match(re.escape(project_name) + r'\b', filename) | ||
849 | if m: | ||
850 | n = m.end() | ||
851 | result = filename[:n], filename[n + 1:], pyver | ||
852 | if result is None: | ||
853 | m = PROJECT_NAME_AND_VERSION.match(filename) | ||
854 | if m: | ||
855 | result = m.group(1), m.group(3), pyver | ||
856 | return result | ||
857 | |||
858 | # Allow spaces in name because of legacy dists like "Twisted Core" | ||
859 | NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' | ||
860 | r'\(\s*(?P<ver>[^\s)]+)\)$') | ||
861 | |||
862 | def parse_name_and_version(p): | ||
863 | """ | ||
864 | A utility method used to get name and version from a string. | ||
865 | |||
866 | From e.g. a Provides-Dist value. | ||
867 | |||
868 | :param p: A value in a form 'foo (1.0)' | ||
869 | :return: The name and version as a tuple. | ||
870 | """ | ||
871 | m = NAME_VERSION_RE.match(p) | ||
872 | if not m: | ||
873 | raise DistlibException('Ill-formed name/version string: \'%s\'' % p) | ||
874 | d = m.groupdict() | ||
875 | return d['name'].strip().lower(), d['ver'] | ||
876 | |||
877 | def get_extras(requested, available): | ||
878 | result = set() | ||
879 | requested = set(requested or []) | ||
880 | available = set(available or []) | ||
881 | if '*' in requested: | ||
882 | requested.remove('*') | ||
883 | result |= available | ||
884 | for r in requested: | ||
885 | if r == '-': | ||
886 | result.add(r) | ||
887 | elif r.startswith('-'): | ||
888 | unwanted = r[1:] | ||
889 | if unwanted not in available: | ||
890 | logger.warning('undeclared extra: %s' % unwanted) | ||
891 | if unwanted in result: | ||
892 | result.remove(unwanted) | ||
893 | else: | ||
894 | if r not in available: | ||
895 | logger.warning('undeclared extra: %s' % r) | ||
896 | result.add(r) | ||
897 | return result | ||
898 | # | ||
899 | # Extended metadata functionality | ||
900 | # | ||
901 | |||
902 | def _get_external_data(url): | ||
903 | result = {} | ||
904 | try: | ||
905 | # urlopen might fail if it runs into redirections, | ||
906 | # because of Python issue #13696. Fixed in locators | ||
907 | # using a custom redirect handler. | ||
908 | resp = urlopen(url) | ||
909 | headers = resp.info() | ||
910 | ct = headers.get('Content-Type') | ||
911 | if not ct.startswith('application/json'): | ||
912 | logger.debug('Unexpected response for JSON request: %s', ct) | ||
913 | else: | ||
914 | reader = codecs.getreader('utf-8')(resp) | ||
915 | #data = reader.read().decode('utf-8') | ||
916 | #result = json.loads(data) | ||
917 | result = json.load(reader) | ||
918 | except Exception as e: | ||
919 | logger.exception('Failed to get external data for %s: %s', url, e) | ||
920 | return result | ||
921 | |||
922 | _external_data_base_url = 'https://www.red-dove.com/pypi/projects/' | ||
923 | |||
924 | def get_project_data(name): | ||
925 | url = '%s/%s/project.json' % (name[0].upper(), name) | ||
926 | url = urljoin(_external_data_base_url, url) | ||
927 | result = _get_external_data(url) | ||
928 | return result | ||
929 | |||
930 | def get_package_data(name, version): | ||
931 | url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) | ||
932 | url = urljoin(_external_data_base_url, url) | ||
933 | return _get_external_data(url) | ||
934 | |||
935 | |||
936 | class Cache(object): | ||
937 | """ | ||
938 | A class implementing a cache for resources that need to live in the file system | ||
939 | e.g. shared libraries. This class was moved from resources to here because it | ||
940 | could be used by other modules, e.g. the wheel module. | ||
941 | """ | ||
942 | |||
943 | def __init__(self, base): | ||
944 | """ | ||
945 | Initialise an instance. | ||
946 | |||
947 | :param base: The base directory where the cache should be located. | ||
948 | """ | ||
949 | # we use 'isdir' instead of 'exists', because we want to | ||
950 | # fail if there's a file with that name | ||
951 | if not os.path.isdir(base): # pragma: no cover | ||
952 | os.makedirs(base) | ||
953 | if (os.stat(base).st_mode & 0o77) != 0: | ||
954 | logger.warning('Directory \'%s\' is not private', base) | ||
955 | self.base = os.path.abspath(os.path.normpath(base)) | ||
956 | |||
957 | def prefix_to_dir(self, prefix): | ||
958 | """ | ||
959 | Converts a resource prefix to a directory name in the cache. | ||
960 | """ | ||
961 | return path_to_cache_dir(prefix) | ||
962 | |||
963 | def clear(self): | ||
964 | """ | ||
965 | Clear the cache. | ||
966 | """ | ||
967 | not_removed = [] | ||
968 | for fn in os.listdir(self.base): | ||
969 | fn = os.path.join(self.base, fn) | ||
970 | try: | ||
971 | if os.path.islink(fn) or os.path.isfile(fn): | ||
972 | os.remove(fn) | ||
973 | elif os.path.isdir(fn): | ||
974 | shutil.rmtree(fn) | ||
975 | except Exception: | ||
976 | not_removed.append(fn) | ||
977 | return not_removed | ||
978 | |||
979 | |||
980 | class EventMixin(object): | ||
981 | """ | ||
982 | A very simple publish/subscribe system. | ||
983 | """ | ||
984 | def __init__(self): | ||
985 | self._subscribers = {} | ||
986 | |||
987 | def add(self, event, subscriber, append=True): | ||
988 | """ | ||
989 | Add a subscriber for an event. | ||
990 | |||
991 | :param event: The name of an event. | ||
992 | :param subscriber: The subscriber to be added (and called when the | ||
993 | event is published). | ||
994 | :param append: Whether to append or prepend the subscriber to an | ||
995 | existing subscriber list for the event. | ||
996 | """ | ||
997 | subs = self._subscribers | ||
998 | if event not in subs: | ||
999 | subs[event] = deque([subscriber]) | ||
1000 | else: | ||
1001 | sq = subs[event] | ||
1002 | if append: | ||
1003 | sq.append(subscriber) | ||
1004 | else: | ||
1005 | sq.appendleft(subscriber) | ||
1006 | |||
1007 | def remove(self, event, subscriber): | ||
1008 | """ | ||
1009 | Remove a subscriber for an event. | ||
1010 | |||
1011 | :param event: The name of an event. | ||
1012 | :param subscriber: The subscriber to be removed. | ||
1013 | """ | ||
1014 | subs = self._subscribers | ||
1015 | if event not in subs: | ||
1016 | raise ValueError('No subscribers: %r' % event) | ||
1017 | subs[event].remove(subscriber) | ||
1018 | |||
1019 | def get_subscribers(self, event): | ||
1020 | """ | ||
1021 | Return an iterator for the subscribers for an event. | ||
1022 | :param event: The event to return subscribers for. | ||
1023 | """ | ||
1024 | return iter(self._subscribers.get(event, ())) | ||
1025 | |||
1026 | def publish(self, event, *args, **kwargs): | ||
1027 | """ | ||
1028 | Publish a event and return a list of values returned by its | ||
1029 | subscribers. | ||
1030 | |||
1031 | :param event: The event to publish. | ||
1032 | :param args: The positional arguments to pass to the event's | ||
1033 | subscribers. | ||
1034 | :param kwargs: The keyword arguments to pass to the event's | ||
1035 | subscribers. | ||
1036 | """ | ||
1037 | result = [] | ||
1038 | for subscriber in self.get_subscribers(event): | ||
1039 | try: | ||
1040 | value = subscriber(event, *args, **kwargs) | ||
1041 | except Exception: | ||
1042 | logger.exception('Exception during event publication') | ||
1043 | value = None | ||
1044 | result.append(value) | ||
1045 | logger.debug('publish %s: args = %s, kwargs = %s, result = %s', | ||
1046 | event, args, kwargs, result) | ||
1047 | return result | ||
1048 | |||
1049 | # | ||
1050 | # Simple sequencing | ||
1051 | # | ||
1052 | class Sequencer(object): | ||
1053 | def __init__(self): | ||
1054 | self._preds = {} | ||
1055 | self._succs = {} | ||
1056 | self._nodes = set() # nodes with no preds/succs | ||
1057 | |||
1058 | def add_node(self, node): | ||
1059 | self._nodes.add(node) | ||
1060 | |||
1061 | def remove_node(self, node, edges=False): | ||
1062 | if node in self._nodes: | ||
1063 | self._nodes.remove(node) | ||
1064 | if edges: | ||
1065 | for p in set(self._preds.get(node, ())): | ||
1066 | self.remove(p, node) | ||
1067 | for s in set(self._succs.get(node, ())): | ||
1068 | self.remove(node, s) | ||
1069 | # Remove empties | ||
1070 | for k, v in list(self._preds.items()): | ||
1071 | if not v: | ||
1072 | del self._preds[k] | ||
1073 | for k, v in list(self._succs.items()): | ||
1074 | if not v: | ||
1075 | del self._succs[k] | ||
1076 | |||
1077 | def add(self, pred, succ): | ||
1078 | assert pred != succ | ||
1079 | self._preds.setdefault(succ, set()).add(pred) | ||
1080 | self._succs.setdefault(pred, set()).add(succ) | ||
1081 | |||
1082 | def remove(self, pred, succ): | ||
1083 | assert pred != succ | ||
1084 | try: | ||
1085 | preds = self._preds[succ] | ||
1086 | succs = self._succs[pred] | ||
1087 | except KeyError: # pragma: no cover | ||
1088 | raise ValueError('%r not a successor of anything' % succ) | ||
1089 | try: | ||
1090 | preds.remove(pred) | ||
1091 | succs.remove(succ) | ||
1092 | except KeyError: # pragma: no cover | ||
1093 | raise ValueError('%r not a successor of %r' % (succ, pred)) | ||
1094 | |||
1095 | def is_step(self, step): | ||
1096 | return (step in self._preds or step in self._succs or | ||
1097 | step in self._nodes) | ||
1098 | |||
1099 | def get_steps(self, final): | ||
1100 | if not self.is_step(final): | ||
1101 | raise ValueError('Unknown: %r' % final) | ||
1102 | result = [] | ||
1103 | todo = [] | ||
1104 | seen = set() | ||
1105 | todo.append(final) | ||
1106 | while todo: | ||
1107 | step = todo.pop(0) | ||
1108 | if step in seen: | ||
1109 | # if a step was already seen, | ||
1110 | # move it to the end (so it will appear earlier | ||
1111 | # when reversed on return) ... but not for the | ||
1112 | # final step, as that would be confusing for | ||
1113 | # users | ||
1114 | if step != final: | ||
1115 | result.remove(step) | ||
1116 | result.append(step) | ||
1117 | else: | ||
1118 | seen.add(step) | ||
1119 | result.append(step) | ||
1120 | preds = self._preds.get(step, ()) | ||
1121 | todo.extend(preds) | ||
1122 | return reversed(result) | ||
1123 | |||
1124 | @property | ||
1125 | def strong_connections(self): | ||
1126 | #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm | ||
1127 | index_counter = [0] | ||
1128 | stack = [] | ||
1129 | lowlinks = {} | ||
1130 | index = {} | ||
1131 | result = [] | ||
1132 | |||
1133 | graph = self._succs | ||
1134 | |||
1135 | def strongconnect(node): | ||
1136 | # set the depth index for this node to the smallest unused index | ||
1137 | index[node] = index_counter[0] | ||
1138 | lowlinks[node] = index_counter[0] | ||
1139 | index_counter[0] += 1 | ||
1140 | stack.append(node) | ||
1141 | |||
1142 | # Consider successors | ||
1143 | try: | ||
1144 | successors = graph[node] | ||
1145 | except Exception: | ||
1146 | successors = [] | ||
1147 | for successor in successors: | ||
1148 | if successor not in lowlinks: | ||
1149 | # Successor has not yet been visited | ||
1150 | strongconnect(successor) | ||
1151 | lowlinks[node] = min(lowlinks[node],lowlinks[successor]) | ||
1152 | elif successor in stack: | ||
1153 | # the successor is in the stack and hence in the current | ||
1154 | # strongly connected component (SCC) | ||
1155 | lowlinks[node] = min(lowlinks[node],index[successor]) | ||
1156 | |||
1157 | # If `node` is a root node, pop the stack and generate an SCC | ||
1158 | if lowlinks[node] == index[node]: | ||
1159 | connected_component = [] | ||
1160 | |||
1161 | while True: | ||
1162 | successor = stack.pop() | ||
1163 | connected_component.append(successor) | ||
1164 | if successor == node: break | ||
1165 | component = tuple(connected_component) | ||
1166 | # storing the result | ||
1167 | result.append(component) | ||
1168 | |||
1169 | for node in graph: | ||
1170 | if node not in lowlinks: | ||
1171 | strongconnect(node) | ||
1172 | |||
1173 | return result | ||
1174 | |||
1175 | @property | ||
1176 | def dot(self): | ||
1177 | result = ['digraph G {'] | ||
1178 | for succ in self._preds: | ||
1179 | preds = self._preds[succ] | ||
1180 | for pred in preds: | ||
1181 | result.append(' %s -> %s;' % (pred, succ)) | ||
1182 | for node in self._nodes: | ||
1183 | result.append(' %s;' % node) | ||
1184 | result.append('}') | ||
1185 | return '\n'.join(result) | ||
1186 | |||
1187 | # | ||
1188 | # Unarchiving functionality for zip, tar, tgz, tbz, whl | ||
1189 | # | ||
1190 | |||
1191 | ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', | ||
1192 | '.tgz', '.tbz', '.whl') | ||
1193 | |||
1194 | def unarchive(archive_filename, dest_dir, format=None, check=True): | ||
1195 | |||
1196 | def check_path(path): | ||
1197 | if not isinstance(path, text_type): | ||
1198 | path = path.decode('utf-8') | ||
1199 | p = os.path.abspath(os.path.join(dest_dir, path)) | ||
1200 | if not p.startswith(dest_dir) or p[plen] != os.sep: | ||
1201 | raise ValueError('path outside destination: %r' % p) | ||
1202 | |||
1203 | dest_dir = os.path.abspath(dest_dir) | ||
1204 | plen = len(dest_dir) | ||
1205 | archive = None | ||
1206 | if format is None: | ||
1207 | if archive_filename.endswith(('.zip', '.whl')): | ||
1208 | format = 'zip' | ||
1209 | elif archive_filename.endswith(('.tar.gz', '.tgz')): | ||
1210 | format = 'tgz' | ||
1211 | mode = 'r:gz' | ||
1212 | elif archive_filename.endswith(('.tar.bz2', '.tbz')): | ||
1213 | format = 'tbz' | ||
1214 | mode = 'r:bz2' | ||
1215 | elif archive_filename.endswith('.tar'): | ||
1216 | format = 'tar' | ||
1217 | mode = 'r' | ||
1218 | else: # pragma: no cover | ||
1219 | raise ValueError('Unknown format for %r' % archive_filename) | ||
1220 | try: | ||
1221 | if format == 'zip': | ||
1222 | archive = ZipFile(archive_filename, 'r') | ||
1223 | if check: | ||
1224 | names = archive.namelist() | ||
1225 | for name in names: | ||
1226 | check_path(name) | ||
1227 | else: | ||
1228 | archive = tarfile.open(archive_filename, mode) | ||
1229 | if check: | ||
1230 | names = archive.getnames() | ||
1231 | for name in names: | ||
1232 | check_path(name) | ||
1233 | if format != 'zip' and sys.version_info[0] < 3: | ||
1234 | # See Python issue 17153. If the dest path contains Unicode, | ||
1235 | # tarfile extraction fails on Python 2.x if a member path name | ||
1236 | # contains non-ASCII characters - it leads to an implicit | ||
1237 | # bytes -> unicode conversion using ASCII to decode. | ||
1238 | for tarinfo in archive.getmembers(): | ||
1239 | if not isinstance(tarinfo.name, text_type): | ||
1240 | tarinfo.name = tarinfo.name.decode('utf-8') | ||
1241 | archive.extractall(dest_dir) | ||
1242 | |||
1243 | finally: | ||
1244 | if archive: | ||
1245 | archive.close() | ||
1246 | |||
1247 | |||
1248 | def zip_dir(directory): | ||
1249 | """zip a directory tree into a BytesIO object""" | ||
1250 | result = io.BytesIO() | ||
1251 | dlen = len(directory) | ||
1252 | with ZipFile(result, "w") as zf: | ||
1253 | for root, dirs, files in os.walk(directory): | ||
1254 | for name in files: | ||
1255 | full = os.path.join(root, name) | ||
1256 | rel = root[dlen:] | ||
1257 | dest = os.path.join(rel, name) | ||
1258 | zf.write(full, dest) | ||
1259 | return result | ||
1260 | |||
1261 | # | ||
1262 | # Simple progress bar | ||
1263 | # | ||
1264 | |||
1265 | UNITS = ('', 'K', 'M', 'G','T','P') | ||
1266 | |||
1267 | |||
1268 | class Progress(object): | ||
1269 | unknown = 'UNKNOWN' | ||
1270 | |||
1271 | def __init__(self, minval=0, maxval=100): | ||
1272 | assert maxval is None or maxval >= minval | ||
1273 | self.min = self.cur = minval | ||
1274 | self.max = maxval | ||
1275 | self.started = None | ||
1276 | self.elapsed = 0 | ||
1277 | self.done = False | ||
1278 | |||
1279 | def update(self, curval): | ||
1280 | assert self.min <= curval | ||
1281 | assert self.max is None or curval <= self.max | ||
1282 | self.cur = curval | ||
1283 | now = time.time() | ||
1284 | if self.started is None: | ||
1285 | self.started = now | ||
1286 | else: | ||
1287 | self.elapsed = now - self.started | ||
1288 | |||
1289 | def increment(self, incr): | ||
1290 | assert incr >= 0 | ||
1291 | self.update(self.cur + incr) | ||
1292 | |||
1293 | def start(self): | ||
1294 | self.update(self.min) | ||
1295 | return self | ||
1296 | |||
1297 | def stop(self): | ||
1298 | if self.max is not None: | ||
1299 | self.update(self.max) | ||
1300 | self.done = True | ||
1301 | |||
1302 | @property | ||
1303 | def maximum(self): | ||
1304 | return self.unknown if self.max is None else self.max | ||
1305 | |||
1306 | @property | ||
1307 | def percentage(self): | ||
1308 | if self.done: | ||
1309 | result = '100 %' | ||
1310 | elif self.max is None: | ||
1311 | result = ' ?? %' | ||
1312 | else: | ||
1313 | v = 100.0 * (self.cur - self.min) / (self.max - self.min) | ||
1314 | result = '%3d %%' % v | ||
1315 | return result | ||
1316 | |||
1317 | def format_duration(self, duration): | ||
1318 | if (duration <= 0) and self.max is None or self.cur == self.min: | ||
1319 | result = '??:??:??' | ||
1320 | #elif duration < 1: | ||
1321 | # result = '--:--:--' | ||
1322 | else: | ||
1323 | result = time.strftime('%H:%M:%S', time.gmtime(duration)) | ||
1324 | return result | ||
1325 | |||
1326 | @property | ||
1327 | def ETA(self): | ||
1328 | if self.done: | ||
1329 | prefix = 'Done' | ||
1330 | t = self.elapsed | ||
1331 | #import pdb; pdb.set_trace() | ||
1332 | else: | ||
1333 | prefix = 'ETA ' | ||
1334 | if self.max is None: | ||
1335 | t = -1 | ||
1336 | elif self.elapsed == 0 or (self.cur == self.min): | ||
1337 | t = 0 | ||
1338 | else: | ||
1339 | #import pdb; pdb.set_trace() | ||
1340 | t = float(self.max - self.min) | ||
1341 | t /= self.cur - self.min | ||
1342 | t = (t - 1) * self.elapsed | ||
1343 | return '%s: %s' % (prefix, self.format_duration(t)) | ||
1344 | |||
1345 | @property | ||
1346 | def speed(self): | ||
1347 | if self.elapsed == 0: | ||
1348 | result = 0.0 | ||
1349 | else: | ||
1350 | result = (self.cur - self.min) / self.elapsed | ||
1351 | for unit in UNITS: | ||
1352 | if result < 1000: | ||
1353 | break | ||
1354 | result /= 1000.0 | ||
1355 | return '%d %sB/s' % (result, unit) | ||
1356 | |||
1357 | # | ||
1358 | # Glob functionality | ||
1359 | # | ||
1360 | |||
1361 | RICH_GLOB = re.compile(r'\{([^}]*)\}') | ||
1362 | _CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') | ||
1363 | _CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') | ||
1364 | |||
1365 | |||
1366 | def iglob(path_glob): | ||
1367 | """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" | ||
1368 | if _CHECK_RECURSIVE_GLOB.search(path_glob): | ||
1369 | msg = """invalid glob %r: recursive glob "**" must be used alone""" | ||
1370 | raise ValueError(msg % path_glob) | ||
1371 | if _CHECK_MISMATCH_SET.search(path_glob): | ||
1372 | msg = """invalid glob %r: mismatching set marker '{' or '}'""" | ||
1373 | raise ValueError(msg % path_glob) | ||
1374 | return _iglob(path_glob) | ||
1375 | |||
1376 | |||
1377 | def _iglob(path_glob): | ||
1378 | rich_path_glob = RICH_GLOB.split(path_glob, 1) | ||
1379 | if len(rich_path_glob) > 1: | ||
1380 | assert len(rich_path_glob) == 3, rich_path_glob | ||
1381 | prefix, set, suffix = rich_path_glob | ||
1382 | for item in set.split(','): | ||
1383 | for path in _iglob(''.join((prefix, item, suffix))): | ||
1384 | yield path | ||
1385 | else: | ||
1386 | if '**' not in path_glob: | ||
1387 | for item in std_iglob(path_glob): | ||
1388 | yield item | ||
1389 | else: | ||
1390 | prefix, radical = path_glob.split('**', 1) | ||
1391 | if prefix == '': | ||
1392 | prefix = '.' | ||
1393 | if radical == '': | ||
1394 | radical = '*' | ||
1395 | else: | ||
1396 | # we support both | ||
1397 | radical = radical.lstrip('/') | ||
1398 | radical = radical.lstrip('\\') | ||
1399 | for path, dir, files in os.walk(prefix): | ||
1400 | path = os.path.normpath(path) | ||
1401 | for fn in _iglob(os.path.join(path, radical)): | ||
1402 | yield fn | ||
1403 | |||
1404 | if ssl: | ||
1405 | from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, | ||
1406 | CertificateError) | ||
1407 | |||
1408 | |||
1409 | # | ||
1410 | # HTTPSConnection which verifies certificates/matches domains | ||
1411 | # | ||
1412 | |||
1413 | class HTTPSConnection(httplib.HTTPSConnection): | ||
1414 | ca_certs = None # set this to the path to the certs file (.pem) | ||
1415 | check_domain = True # only used if ca_certs is not None | ||
1416 | |||
1417 | # noinspection PyPropertyAccess | ||
1418 | def connect(self): | ||
1419 | sock = socket.create_connection((self.host, self.port), self.timeout) | ||
1420 | if getattr(self, '_tunnel_host', False): | ||
1421 | self.sock = sock | ||
1422 | self._tunnel() | ||
1423 | |||
1424 | if not hasattr(ssl, 'SSLContext'): | ||
1425 | # For 2.x | ||
1426 | if self.ca_certs: | ||
1427 | cert_reqs = ssl.CERT_REQUIRED | ||
1428 | else: | ||
1429 | cert_reqs = ssl.CERT_NONE | ||
1430 | self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, | ||
1431 | cert_reqs=cert_reqs, | ||
1432 | ssl_version=ssl.PROTOCOL_SSLv23, | ||
1433 | ca_certs=self.ca_certs) | ||
1434 | else: # pragma: no cover | ||
1435 | context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) | ||
1436 | context.options |= ssl.OP_NO_SSLv2 | ||
1437 | if self.cert_file: | ||
1438 | context.load_cert_chain(self.cert_file, self.key_file) | ||
1439 | kwargs = {} | ||
1440 | if self.ca_certs: | ||
1441 | context.verify_mode = ssl.CERT_REQUIRED | ||
1442 | context.load_verify_locations(cafile=self.ca_certs) | ||
1443 | if getattr(ssl, 'HAS_SNI', False): | ||
1444 | kwargs['server_hostname'] = self.host | ||
1445 | self.sock = context.wrap_socket(sock, **kwargs) | ||
1446 | if self.ca_certs and self.check_domain: | ||
1447 | try: | ||
1448 | match_hostname(self.sock.getpeercert(), self.host) | ||
1449 | logger.debug('Host verified: %s', self.host) | ||
1450 | except CertificateError: # pragma: no cover | ||
1451 | self.sock.shutdown(socket.SHUT_RDWR) | ||
1452 | self.sock.close() | ||
1453 | raise | ||
1454 | |||
1455 | class HTTPSHandler(BaseHTTPSHandler): | ||
1456 | def __init__(self, ca_certs, check_domain=True): | ||
1457 | BaseHTTPSHandler.__init__(self) | ||
1458 | self.ca_certs = ca_certs | ||
1459 | self.check_domain = check_domain | ||
1460 | |||
1461 | def _conn_maker(self, *args, **kwargs): | ||
1462 | """ | ||
1463 | This is called to create a connection instance. Normally you'd | ||
1464 | pass a connection class to do_open, but it doesn't actually check for | ||
1465 | a class, and just expects a callable. As long as we behave just as a | ||
1466 | constructor would have, we should be OK. If it ever changes so that | ||
1467 | we *must* pass a class, we'll create an UnsafeHTTPSConnection class | ||
1468 | which just sets check_domain to False in the class definition, and | ||
1469 | choose which one to pass to do_open. | ||
1470 | """ | ||
1471 | result = HTTPSConnection(*args, **kwargs) | ||
1472 | if self.ca_certs: | ||
1473 | result.ca_certs = self.ca_certs | ||
1474 | result.check_domain = self.check_domain | ||
1475 | return result | ||
1476 | |||
1477 | def https_open(self, req): | ||
1478 | try: | ||
1479 | return self.do_open(self._conn_maker, req) | ||
1480 | except URLError as e: | ||
1481 | if 'certificate verify failed' in str(e.reason): | ||
1482 | raise CertificateError('Unable to verify server certificate ' | ||
1483 | 'for %s' % req.host) | ||
1484 | else: | ||
1485 | raise | ||
1486 | |||
1487 | # | ||
1488 | # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- | ||
1489 | # Middle proxy using HTTP listens on port 443, or an index mistakenly serves | ||
1490 | # HTML containing a http://xyz link when it should be https://xyz), | ||
1491 | # you can use the following handler class, which does not allow HTTP traffic. | ||
1492 | # | ||
1493 | # It works by inheriting from HTTPHandler - so build_opener won't add a | ||
1494 | # handler for HTTP itself. | ||
1495 | # | ||
1496 | class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): | ||
1497 | def http_open(self, req): | ||
1498 | raise URLError('Unexpected HTTP request on what should be a secure ' | ||
1499 | 'connection: %s' % req) | ||
1500 | |||
1501 | # | ||
1502 | # XML-RPC with timeouts | ||
1503 | # | ||
1504 | |||
1505 | _ver_info = sys.version_info[:2] | ||
1506 | |||
1507 | if _ver_info == (2, 6): | ||
1508 | class HTTP(httplib.HTTP): | ||
1509 | def __init__(self, host='', port=None, **kwargs): | ||
1510 | if port == 0: # 0 means use port 0, not the default port | ||
1511 | port = None | ||
1512 | self._setup(self._connection_class(host, port, **kwargs)) | ||
1513 | |||
1514 | |||
1515 | if ssl: | ||
1516 | class HTTPS(httplib.HTTPS): | ||
1517 | def __init__(self, host='', port=None, **kwargs): | ||
1518 | if port == 0: # 0 means use port 0, not the default port | ||
1519 | port = None | ||
1520 | self._setup(self._connection_class(host, port, **kwargs)) | ||
1521 | |||
1522 | |||
1523 | class Transport(xmlrpclib.Transport): | ||
1524 | def __init__(self, timeout, use_datetime=0): | ||
1525 | self.timeout = timeout | ||
1526 | xmlrpclib.Transport.__init__(self, use_datetime) | ||
1527 | |||
1528 | def make_connection(self, host): | ||
1529 | h, eh, x509 = self.get_host_info(host) | ||
1530 | if _ver_info == (2, 6): | ||
1531 | result = HTTP(h, timeout=self.timeout) | ||
1532 | else: | ||
1533 | if not self._connection or host != self._connection[0]: | ||
1534 | self._extra_headers = eh | ||
1535 | self._connection = host, httplib.HTTPConnection(h) | ||
1536 | result = self._connection[1] | ||
1537 | return result | ||
1538 | |||
1539 | if ssl: | ||
1540 | class SafeTransport(xmlrpclib.SafeTransport): | ||
1541 | def __init__(self, timeout, use_datetime=0): | ||
1542 | self.timeout = timeout | ||
1543 | xmlrpclib.SafeTransport.__init__(self, use_datetime) | ||
1544 | |||
1545 | def make_connection(self, host): | ||
1546 | h, eh, kwargs = self.get_host_info(host) | ||
1547 | if not kwargs: | ||
1548 | kwargs = {} | ||
1549 | kwargs['timeout'] = self.timeout | ||
1550 | if _ver_info == (2, 6): | ||
1551 | result = HTTPS(host, None, **kwargs) | ||
1552 | else: | ||
1553 | if not self._connection or host != self._connection[0]: | ||
1554 | self._extra_headers = eh | ||
1555 | self._connection = host, httplib.HTTPSConnection(h, None, | ||
1556 | **kwargs) | ||
1557 | result = self._connection[1] | ||
1558 | return result | ||
1559 | |||
1560 | |||
1561 | class ServerProxy(xmlrpclib.ServerProxy): | ||
1562 | def __init__(self, uri, **kwargs): | ||
1563 | self.timeout = timeout = kwargs.pop('timeout', None) | ||
1564 | # The above classes only come into play if a timeout | ||
1565 | # is specified | ||
1566 | if timeout is not None: | ||
1567 | scheme, _ = splittype(uri) | ||
1568 | use_datetime = kwargs.get('use_datetime', 0) | ||
1569 | if scheme == 'https': | ||
1570 | tcls = SafeTransport | ||
1571 | else: | ||
1572 | tcls = Transport | ||
1573 | kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) | ||
1574 | self.transport = t | ||
1575 | xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) | ||
1576 | |||
1577 | # | ||
1578 | # CSV functionality. This is provided because on 2.x, the csv module can't | ||
1579 | # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. | ||
1580 | # | ||
1581 | |||
1582 | def _csv_open(fn, mode, **kwargs): | ||
1583 | if sys.version_info[0] < 3: | ||
1584 | mode += 'b' | ||
1585 | else: | ||
1586 | kwargs['newline'] = '' | ||
1587 | # Python 3 determines encoding from locale. Force 'utf-8' | ||
1588 | # file encoding to match other forced utf-8 encoding | ||
1589 | kwargs['encoding'] = 'utf-8' | ||
1590 | return open(fn, mode, **kwargs) | ||
1591 | |||
1592 | |||
1593 | class CSVBase(object): | ||
1594 | defaults = { | ||
1595 | 'delimiter': str(','), # The strs are used because we need native | ||
1596 | 'quotechar': str('"'), # str in the csv API (2.x won't take | ||
1597 | 'lineterminator': str('\n') # Unicode) | ||
1598 | } | ||
1599 | |||
1600 | def __enter__(self): | ||
1601 | return self | ||
1602 | |||
1603 | def __exit__(self, *exc_info): | ||
1604 | self.stream.close() | ||
1605 | |||
1606 | |||
1607 | class CSVReader(CSVBase): | ||
1608 | def __init__(self, **kwargs): | ||
1609 | if 'stream' in kwargs: | ||
1610 | stream = kwargs['stream'] | ||
1611 | if sys.version_info[0] >= 3: | ||
1612 | # needs to be a text stream | ||
1613 | stream = codecs.getreader('utf-8')(stream) | ||
1614 | self.stream = stream | ||
1615 | else: | ||
1616 | self.stream = _csv_open(kwargs['path'], 'r') | ||
1617 | self.reader = csv.reader(self.stream, **self.defaults) | ||
1618 | |||
1619 | def __iter__(self): | ||
1620 | return self | ||
1621 | |||
1622 | def next(self): | ||
1623 | result = next(self.reader) | ||
1624 | if sys.version_info[0] < 3: | ||
1625 | for i, item in enumerate(result): | ||
1626 | if not isinstance(item, text_type): | ||
1627 | result[i] = item.decode('utf-8') | ||
1628 | return result | ||
1629 | |||
1630 | __next__ = next | ||
1631 | |||
1632 | class CSVWriter(CSVBase): | ||
1633 | def __init__(self, fn, **kwargs): | ||
1634 | self.stream = _csv_open(fn, 'w') | ||
1635 | self.writer = csv.writer(self.stream, **self.defaults) | ||
1636 | |||
1637 | def writerow(self, row): | ||
1638 | if sys.version_info[0] < 3: | ||
1639 | r = [] | ||
1640 | for item in row: | ||
1641 | if isinstance(item, text_type): | ||
1642 | item = item.encode('utf-8') | ||
1643 | r.append(item) | ||
1644 | row = r | ||
1645 | self.writer.writerow(row) | ||
1646 | |||
1647 | # | ||
1648 | # Configurator functionality | ||
1649 | # | ||
1650 | |||
1651 | class Configurator(BaseConfigurator): | ||
1652 | |||
1653 | value_converters = dict(BaseConfigurator.value_converters) | ||
1654 | value_converters['inc'] = 'inc_convert' | ||
1655 | |||
1656 | def __init__(self, config, base=None): | ||
1657 | super(Configurator, self).__init__(config) | ||
1658 | self.base = base or os.getcwd() | ||
1659 | |||
1660 | def configure_custom(self, config): | ||
1661 | def convert(o): | ||
1662 | if isinstance(o, (list, tuple)): | ||
1663 | result = type(o)([convert(i) for i in o]) | ||
1664 | elif isinstance(o, dict): | ||
1665 | if '()' in o: | ||
1666 | result = self.configure_custom(o) | ||
1667 | else: | ||
1668 | result = {} | ||
1669 | for k in o: | ||
1670 | result[k] = convert(o[k]) | ||
1671 | else: | ||
1672 | result = self.convert(o) | ||
1673 | return result | ||
1674 | |||
1675 | c = config.pop('()') | ||
1676 | if not callable(c): | ||
1677 | c = self.resolve(c) | ||
1678 | props = config.pop('.', None) | ||
1679 | # Check for valid identifiers | ||
1680 | args = config.pop('[]', ()) | ||
1681 | if args: | ||
1682 | args = tuple([convert(o) for o in args]) | ||
1683 | items = [(k, convert(config[k])) for k in config if valid_ident(k)] | ||
1684 | kwargs = dict(items) | ||
1685 | result = c(*args, **kwargs) | ||
1686 | if props: | ||
1687 | for n, v in props.items(): | ||
1688 | setattr(result, n, convert(v)) | ||
1689 | return result | ||
1690 | |||
1691 | def __getitem__(self, key): | ||
1692 | result = self.config[key] | ||
1693 | if isinstance(result, dict) and '()' in result: | ||
1694 | self.config[key] = result = self.configure_custom(result) | ||
1695 | return result | ||
1696 | |||
1697 | def inc_convert(self, value): | ||
1698 | """Default converter for the inc:// protocol.""" | ||
1699 | if not os.path.isabs(value): | ||
1700 | value = os.path.join(self.base, value) | ||
1701 | with codecs.open(value, 'r', encoding='utf-8') as f: | ||
1702 | result = json.load(f) | ||
1703 | return result | ||
1704 | |||
1705 | |||
1706 | class SubprocessMixin(object): | ||
1707 | """ | ||
1708 | Mixin for running subprocesses and capturing their output | ||
1709 | """ | ||
1710 | def __init__(self, verbose=False, progress=None): | ||
1711 | self.verbose = verbose | ||
1712 | self.progress = progress | ||
1713 | |||
1714 | def reader(self, stream, context): | ||
1715 | """ | ||
1716 | Read lines from a subprocess' output stream and either pass to a progress | ||
1717 | callable (if specified) or write progress information to sys.stderr. | ||
1718 | """ | ||
1719 | progress = self.progress | ||
1720 | verbose = self.verbose | ||
1721 | while True: | ||
1722 | s = stream.readline() | ||
1723 | if not s: | ||
1724 | break | ||
1725 | if progress is not None: | ||
1726 | progress(s, context) | ||
1727 | else: | ||
1728 | if not verbose: | ||
1729 | sys.stderr.write('.') | ||
1730 | else: | ||
1731 | sys.stderr.write(s.decode('utf-8')) | ||
1732 | sys.stderr.flush() | ||
1733 | stream.close() | ||
1734 | |||
1735 | def run_command(self, cmd, **kwargs): | ||
1736 | p = subprocess.Popen(cmd, stdout=subprocess.PIPE, | ||
1737 | stderr=subprocess.PIPE, **kwargs) | ||
1738 | t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) | ||
1739 | t1.start() | ||
1740 | t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) | ||
1741 | t2.start() | ||
1742 | p.wait() | ||
1743 | t1.join() | ||
1744 | t2.join() | ||
1745 | if self.progress is not None: | ||
1746 | self.progress('done.', 'main') | ||
1747 | elif self.verbose: | ||
1748 | sys.stderr.write('done.\n') | ||
1749 | return p | ||
1750 | |||
1751 | |||
1752 | def normalize_name(name): | ||
1753 | """Normalize a python package name a la PEP 503""" | ||
1754 | # https://www.python.org/dev/peps/pep-0503/#normalized-names | ||
1755 | return re.sub('[-_.]+', '-', name).lower() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..959f153 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/version.py | |||
@@ -0,0 +1,736 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012-2017 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """ | ||
7 | Implementation of a flexible versioning scheme providing support for PEP-440, | ||
8 | setuptools-compatible and semantic versioning. | ||
9 | """ | ||
10 | |||
11 | import logging | ||
12 | import re | ||
13 | |||
14 | from .compat import string_types | ||
15 | from .util import parse_requirement | ||
16 | |||
17 | __all__ = ['NormalizedVersion', 'NormalizedMatcher', | ||
18 | 'LegacyVersion', 'LegacyMatcher', | ||
19 | 'SemanticVersion', 'SemanticMatcher', | ||
20 | 'UnsupportedVersionError', 'get_scheme'] | ||
21 | |||
22 | logger = logging.getLogger(__name__) | ||
23 | |||
24 | |||
25 | class UnsupportedVersionError(ValueError): | ||
26 | """This is an unsupported version.""" | ||
27 | pass | ||
28 | |||
29 | |||
30 | class Version(object): | ||
31 | def __init__(self, s): | ||
32 | self._string = s = s.strip() | ||
33 | self._parts = parts = self.parse(s) | ||
34 | assert isinstance(parts, tuple) | ||
35 | assert len(parts) > 0 | ||
36 | |||
37 | def parse(self, s): | ||
38 | raise NotImplementedError('please implement in a subclass') | ||
39 | |||
40 | def _check_compatible(self, other): | ||
41 | if type(self) != type(other): | ||
42 | raise TypeError('cannot compare %r and %r' % (self, other)) | ||
43 | |||
44 | def __eq__(self, other): | ||
45 | self._check_compatible(other) | ||
46 | return self._parts == other._parts | ||
47 | |||
48 | def __ne__(self, other): | ||
49 | return not self.__eq__(other) | ||
50 | |||
51 | def __lt__(self, other): | ||
52 | self._check_compatible(other) | ||
53 | return self._parts < other._parts | ||
54 | |||
55 | def __gt__(self, other): | ||
56 | return not (self.__lt__(other) or self.__eq__(other)) | ||
57 | |||
58 | def __le__(self, other): | ||
59 | return self.__lt__(other) or self.__eq__(other) | ||
60 | |||
61 | def __ge__(self, other): | ||
62 | return self.__gt__(other) or self.__eq__(other) | ||
63 | |||
64 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
65 | def __hash__(self): | ||
66 | return hash(self._parts) | ||
67 | |||
68 | def __repr__(self): | ||
69 | return "%s('%s')" % (self.__class__.__name__, self._string) | ||
70 | |||
71 | def __str__(self): | ||
72 | return self._string | ||
73 | |||
74 | @property | ||
75 | def is_prerelease(self): | ||
76 | raise NotImplementedError('Please implement in subclasses.') | ||
77 | |||
78 | |||
79 | class Matcher(object): | ||
80 | version_class = None | ||
81 | |||
82 | # value is either a callable or the name of a method | ||
83 | _operators = { | ||
84 | '<': lambda v, c, p: v < c, | ||
85 | '>': lambda v, c, p: v > c, | ||
86 | '<=': lambda v, c, p: v == c or v < c, | ||
87 | '>=': lambda v, c, p: v == c or v > c, | ||
88 | '==': lambda v, c, p: v == c, | ||
89 | '===': lambda v, c, p: v == c, | ||
90 | # by default, compatible => >=. | ||
91 | '~=': lambda v, c, p: v == c or v > c, | ||
92 | '!=': lambda v, c, p: v != c, | ||
93 | } | ||
94 | |||
95 | # this is a method only to support alternative implementations | ||
96 | # via overriding | ||
97 | def parse_requirement(self, s): | ||
98 | return parse_requirement(s) | ||
99 | |||
100 | def __init__(self, s): | ||
101 | if self.version_class is None: | ||
102 | raise ValueError('Please specify a version class') | ||
103 | self._string = s = s.strip() | ||
104 | r = self.parse_requirement(s) | ||
105 | if not r: | ||
106 | raise ValueError('Not valid: %r' % s) | ||
107 | self.name = r.name | ||
108 | self.key = self.name.lower() # for case-insensitive comparisons | ||
109 | clist = [] | ||
110 | if r.constraints: | ||
111 | # import pdb; pdb.set_trace() | ||
112 | for op, s in r.constraints: | ||
113 | if s.endswith('.*'): | ||
114 | if op not in ('==', '!='): | ||
115 | raise ValueError('\'.*\' not allowed for ' | ||
116 | '%r constraints' % op) | ||
117 | # Could be a partial version (e.g. for '2.*') which | ||
118 | # won't parse as a version, so keep it as a string | ||
119 | vn, prefix = s[:-2], True | ||
120 | # Just to check that vn is a valid version | ||
121 | self.version_class(vn) | ||
122 | else: | ||
123 | # Should parse as a version, so we can create an | ||
124 | # instance for the comparison | ||
125 | vn, prefix = self.version_class(s), False | ||
126 | clist.append((op, vn, prefix)) | ||
127 | self._parts = tuple(clist) | ||
128 | |||
129 | def match(self, version): | ||
130 | """ | ||
131 | Check if the provided version matches the constraints. | ||
132 | |||
133 | :param version: The version to match against this instance. | ||
134 | :type version: String or :class:`Version` instance. | ||
135 | """ | ||
136 | if isinstance(version, string_types): | ||
137 | version = self.version_class(version) | ||
138 | for operator, constraint, prefix in self._parts: | ||
139 | f = self._operators.get(operator) | ||
140 | if isinstance(f, string_types): | ||
141 | f = getattr(self, f) | ||
142 | if not f: | ||
143 | msg = ('%r not implemented ' | ||
144 | 'for %s' % (operator, self.__class__.__name__)) | ||
145 | raise NotImplementedError(msg) | ||
146 | if not f(version, constraint, prefix): | ||
147 | return False | ||
148 | return True | ||
149 | |||
150 | @property | ||
151 | def exact_version(self): | ||
152 | result = None | ||
153 | if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): | ||
154 | result = self._parts[0][1] | ||
155 | return result | ||
156 | |||
157 | def _check_compatible(self, other): | ||
158 | if type(self) != type(other) or self.name != other.name: | ||
159 | raise TypeError('cannot compare %s and %s' % (self, other)) | ||
160 | |||
161 | def __eq__(self, other): | ||
162 | self._check_compatible(other) | ||
163 | return self.key == other.key and self._parts == other._parts | ||
164 | |||
165 | def __ne__(self, other): | ||
166 | return not self.__eq__(other) | ||
167 | |||
168 | # See http://docs.python.org/reference/datamodel#object.__hash__ | ||
169 | def __hash__(self): | ||
170 | return hash(self.key) + hash(self._parts) | ||
171 | |||
172 | def __repr__(self): | ||
173 | return "%s(%r)" % (self.__class__.__name__, self._string) | ||
174 | |||
175 | def __str__(self): | ||
176 | return self._string | ||
177 | |||
178 | |||
179 | PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' | ||
180 | r'(\.(post)(\d+))?(\.(dev)(\d+))?' | ||
181 | r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') | ||
182 | |||
183 | |||
184 | def _pep_440_key(s): | ||
185 | s = s.strip() | ||
186 | m = PEP440_VERSION_RE.match(s) | ||
187 | if not m: | ||
188 | raise UnsupportedVersionError('Not a valid version: %s' % s) | ||
189 | groups = m.groups() | ||
190 | nums = tuple(int(v) for v in groups[1].split('.')) | ||
191 | while len(nums) > 1 and nums[-1] == 0: | ||
192 | nums = nums[:-1] | ||
193 | |||
194 | if not groups[0]: | ||
195 | epoch = 0 | ||
196 | else: | ||
197 | epoch = int(groups[0]) | ||
198 | pre = groups[4:6] | ||
199 | post = groups[7:9] | ||
200 | dev = groups[10:12] | ||
201 | local = groups[13] | ||
202 | if pre == (None, None): | ||
203 | pre = () | ||
204 | else: | ||
205 | pre = pre[0], int(pre[1]) | ||
206 | if post == (None, None): | ||
207 | post = () | ||
208 | else: | ||
209 | post = post[0], int(post[1]) | ||
210 | if dev == (None, None): | ||
211 | dev = () | ||
212 | else: | ||
213 | dev = dev[0], int(dev[1]) | ||
214 | if local is None: | ||
215 | local = () | ||
216 | else: | ||
217 | parts = [] | ||
218 | for part in local.split('.'): | ||
219 | # to ensure that numeric compares as > lexicographic, avoid | ||
220 | # comparing them directly, but encode a tuple which ensures | ||
221 | # correct sorting | ||
222 | if part.isdigit(): | ||
223 | part = (1, int(part)) | ||
224 | else: | ||
225 | part = (0, part) | ||
226 | parts.append(part) | ||
227 | local = tuple(parts) | ||
228 | if not pre: | ||
229 | # either before pre-release, or final release and after | ||
230 | if not post and dev: | ||
231 | # before pre-release | ||
232 | pre = ('a', -1) # to sort before a0 | ||
233 | else: | ||
234 | pre = ('z',) # to sort after all pre-releases | ||
235 | # now look at the state of post and dev. | ||
236 | if not post: | ||
237 | post = ('_',) # sort before 'a' | ||
238 | if not dev: | ||
239 | dev = ('final',) | ||
240 | |||
241 | #print('%s -> %s' % (s, m.groups())) | ||
242 | return epoch, nums, pre, post, dev, local | ||
243 | |||
244 | |||
245 | _normalized_key = _pep_440_key | ||
246 | |||
247 | |||
248 | class NormalizedVersion(Version): | ||
249 | """A rational version. | ||
250 | |||
251 | Good: | ||
252 | 1.2 # equivalent to "1.2.0" | ||
253 | 1.2.0 | ||
254 | 1.2a1 | ||
255 | 1.2.3a2 | ||
256 | 1.2.3b1 | ||
257 | 1.2.3c1 | ||
258 | 1.2.3.4 | ||
259 | TODO: fill this out | ||
260 | |||
261 | Bad: | ||
262 | 1 # minimum two numbers | ||
263 | 1.2a # release level must have a release serial | ||
264 | 1.2.3b | ||
265 | """ | ||
266 | def parse(self, s): | ||
267 | result = _normalized_key(s) | ||
268 | # _normalized_key loses trailing zeroes in the release | ||
269 | # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 | ||
270 | # However, PEP 440 prefix matching needs it: for example, | ||
271 | # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). | ||
272 | m = PEP440_VERSION_RE.match(s) # must succeed | ||
273 | groups = m.groups() | ||
274 | self._release_clause = tuple(int(v) for v in groups[1].split('.')) | ||
275 | return result | ||
276 | |||
277 | PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) | ||
278 | |||
279 | @property | ||
280 | def is_prerelease(self): | ||
281 | return any(t[0] in self.PREREL_TAGS for t in self._parts if t) | ||
282 | |||
283 | |||
284 | def _match_prefix(x, y): | ||
285 | x = str(x) | ||
286 | y = str(y) | ||
287 | if x == y: | ||
288 | return True | ||
289 | if not x.startswith(y): | ||
290 | return False | ||
291 | n = len(y) | ||
292 | return x[n] == '.' | ||
293 | |||
294 | |||
295 | class NormalizedMatcher(Matcher): | ||
296 | version_class = NormalizedVersion | ||
297 | |||
298 | # value is either a callable or the name of a method | ||
299 | _operators = { | ||
300 | '~=': '_match_compatible', | ||
301 | '<': '_match_lt', | ||
302 | '>': '_match_gt', | ||
303 | '<=': '_match_le', | ||
304 | '>=': '_match_ge', | ||
305 | '==': '_match_eq', | ||
306 | '===': '_match_arbitrary', | ||
307 | '!=': '_match_ne', | ||
308 | } | ||
309 | |||
310 | def _adjust_local(self, version, constraint, prefix): | ||
311 | if prefix: | ||
312 | strip_local = '+' not in constraint and version._parts[-1] | ||
313 | else: | ||
314 | # both constraint and version are | ||
315 | # NormalizedVersion instances. | ||
316 | # If constraint does not have a local component, | ||
317 | # ensure the version doesn't, either. | ||
318 | strip_local = not constraint._parts[-1] and version._parts[-1] | ||
319 | if strip_local: | ||
320 | s = version._string.split('+', 1)[0] | ||
321 | version = self.version_class(s) | ||
322 | return version, constraint | ||
323 | |||
324 | def _match_lt(self, version, constraint, prefix): | ||
325 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
326 | if version >= constraint: | ||
327 | return False | ||
328 | release_clause = constraint._release_clause | ||
329 | pfx = '.'.join([str(i) for i in release_clause]) | ||
330 | return not _match_prefix(version, pfx) | ||
331 | |||
332 | def _match_gt(self, version, constraint, prefix): | ||
333 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
334 | if version <= constraint: | ||
335 | return False | ||
336 | release_clause = constraint._release_clause | ||
337 | pfx = '.'.join([str(i) for i in release_clause]) | ||
338 | return not _match_prefix(version, pfx) | ||
339 | |||
340 | def _match_le(self, version, constraint, prefix): | ||
341 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
342 | return version <= constraint | ||
343 | |||
344 | def _match_ge(self, version, constraint, prefix): | ||
345 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
346 | return version >= constraint | ||
347 | |||
348 | def _match_eq(self, version, constraint, prefix): | ||
349 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
350 | if not prefix: | ||
351 | result = (version == constraint) | ||
352 | else: | ||
353 | result = _match_prefix(version, constraint) | ||
354 | return result | ||
355 | |||
356 | def _match_arbitrary(self, version, constraint, prefix): | ||
357 | return str(version) == str(constraint) | ||
358 | |||
359 | def _match_ne(self, version, constraint, prefix): | ||
360 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
361 | if not prefix: | ||
362 | result = (version != constraint) | ||
363 | else: | ||
364 | result = not _match_prefix(version, constraint) | ||
365 | return result | ||
366 | |||
367 | def _match_compatible(self, version, constraint, prefix): | ||
368 | version, constraint = self._adjust_local(version, constraint, prefix) | ||
369 | if version == constraint: | ||
370 | return True | ||
371 | if version < constraint: | ||
372 | return False | ||
373 | # if not prefix: | ||
374 | # return True | ||
375 | release_clause = constraint._release_clause | ||
376 | if len(release_clause) > 1: | ||
377 | release_clause = release_clause[:-1] | ||
378 | pfx = '.'.join([str(i) for i in release_clause]) | ||
379 | return _match_prefix(version, pfx) | ||
380 | |||
381 | _REPLACEMENTS = ( | ||
382 | (re.compile('[.+-]$'), ''), # remove trailing puncts | ||
383 | (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start | ||
384 | (re.compile('^[.-]'), ''), # remove leading puncts | ||
385 | (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses | ||
386 | (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) | ||
387 | (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) | ||
388 | (re.compile('[.]{2,}'), '.'), # multiple runs of '.' | ||
389 | (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha | ||
390 | (re.compile(r'\b(pre-alpha|prealpha)\b'), | ||
391 | 'pre.alpha'), # standardise | ||
392 | (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses | ||
393 | ) | ||
394 | |||
395 | _SUFFIX_REPLACEMENTS = ( | ||
396 | (re.compile('^[:~._+-]+'), ''), # remove leading puncts | ||
397 | (re.compile('[,*")([\\]]'), ''), # remove unwanted chars | ||
398 | (re.compile('[~:+_ -]'), '.'), # replace illegal chars | ||
399 | (re.compile('[.]{2,}'), '.'), # multiple runs of '.' | ||
400 | (re.compile(r'\.$'), ''), # trailing '.' | ||
401 | ) | ||
402 | |||
403 | _NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') | ||
404 | |||
405 | |||
406 | def _suggest_semantic_version(s): | ||
407 | """ | ||
408 | Try to suggest a semantic form for a version for which | ||
409 | _suggest_normalized_version couldn't come up with anything. | ||
410 | """ | ||
411 | result = s.strip().lower() | ||
412 | for pat, repl in _REPLACEMENTS: | ||
413 | result = pat.sub(repl, result) | ||
414 | if not result: | ||
415 | result = '0.0.0' | ||
416 | |||
417 | # Now look for numeric prefix, and separate it out from | ||
418 | # the rest. | ||
419 | #import pdb; pdb.set_trace() | ||
420 | m = _NUMERIC_PREFIX.match(result) | ||
421 | if not m: | ||
422 | prefix = '0.0.0' | ||
423 | suffix = result | ||
424 | else: | ||
425 | prefix = m.groups()[0].split('.') | ||
426 | prefix = [int(i) for i in prefix] | ||
427 | while len(prefix) < 3: | ||
428 | prefix.append(0) | ||
429 | if len(prefix) == 3: | ||
430 | suffix = result[m.end():] | ||
431 | else: | ||
432 | suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] | ||
433 | prefix = prefix[:3] | ||
434 | prefix = '.'.join([str(i) for i in prefix]) | ||
435 | suffix = suffix.strip() | ||
436 | if suffix: | ||
437 | #import pdb; pdb.set_trace() | ||
438 | # massage the suffix. | ||
439 | for pat, repl in _SUFFIX_REPLACEMENTS: | ||
440 | suffix = pat.sub(repl, suffix) | ||
441 | |||
442 | if not suffix: | ||
443 | result = prefix | ||
444 | else: | ||
445 | sep = '-' if 'dev' in suffix else '+' | ||
446 | result = prefix + sep + suffix | ||
447 | if not is_semver(result): | ||
448 | result = None | ||
449 | return result | ||
450 | |||
451 | |||
452 | def _suggest_normalized_version(s): | ||
453 | """Suggest a normalized version close to the given version string. | ||
454 | |||
455 | If you have a version string that isn't rational (i.e. NormalizedVersion | ||
456 | doesn't like it) then you might be able to get an equivalent (or close) | ||
457 | rational version from this function. | ||
458 | |||
459 | This does a number of simple normalizations to the given string, based | ||
460 | on observation of versions currently in use on PyPI. Given a dump of | ||
461 | those version during PyCon 2009, 4287 of them: | ||
462 | - 2312 (53.93%) match NormalizedVersion without change | ||
463 | with the automatic suggestion | ||
464 | - 3474 (81.04%) match when using this suggestion method | ||
465 | |||
466 | @param s {str} An irrational version string. | ||
467 | @returns A rational version string, or None, if couldn't determine one. | ||
468 | """ | ||
469 | try: | ||
470 | _normalized_key(s) | ||
471 | return s # already rational | ||
472 | except UnsupportedVersionError: | ||
473 | pass | ||
474 | |||
475 | rs = s.lower() | ||
476 | |||
477 | # part of this could use maketrans | ||
478 | for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), | ||
479 | ('beta', 'b'), ('rc', 'c'), ('-final', ''), | ||
480 | ('-pre', 'c'), | ||
481 | ('-release', ''), ('.release', ''), ('-stable', ''), | ||
482 | ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), | ||
483 | ('final', '')): | ||
484 | rs = rs.replace(orig, repl) | ||
485 | |||
486 | # if something ends with dev or pre, we add a 0 | ||
487 | rs = re.sub(r"pre$", r"pre0", rs) | ||
488 | rs = re.sub(r"dev$", r"dev0", rs) | ||
489 | |||
490 | # if we have something like "b-2" or "a.2" at the end of the | ||
491 | # version, that is probably beta, alpha, etc | ||
492 | # let's remove the dash or dot | ||
493 | rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) | ||
494 | |||
495 | # 1.0-dev-r371 -> 1.0.dev371 | ||
496 | # 0.1-dev-r79 -> 0.1.dev79 | ||
497 | rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) | ||
498 | |||
499 | # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 | ||
500 | rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) | ||
501 | |||
502 | # Clean: v0.3, v1.0 | ||
503 | if rs.startswith('v'): | ||
504 | rs = rs[1:] | ||
505 | |||
506 | # Clean leading '0's on numbers. | ||
507 | #TODO: unintended side-effect on, e.g., "2003.05.09" | ||
508 | # PyPI stats: 77 (~2%) better | ||
509 | rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) | ||
510 | |||
511 | # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers | ||
512 | # zero. | ||
513 | # PyPI stats: 245 (7.56%) better | ||
514 | rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) | ||
515 | |||
516 | # the 'dev-rNNN' tag is a dev tag | ||
517 | rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) | ||
518 | |||
519 | # clean the - when used as a pre delimiter | ||
520 | rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) | ||
521 | |||
522 | # a terminal "dev" or "devel" can be changed into ".dev0" | ||
523 | rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) | ||
524 | |||
525 | # a terminal "dev" can be changed into ".dev0" | ||
526 | rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) | ||
527 | |||
528 | # a terminal "final" or "stable" can be removed | ||
529 | rs = re.sub(r"(final|stable)$", "", rs) | ||
530 | |||
531 | # The 'r' and the '-' tags are post release tags | ||
532 | # 0.4a1.r10 -> 0.4a1.post10 | ||
533 | # 0.9.33-17222 -> 0.9.33.post17222 | ||
534 | # 0.9.33-r17222 -> 0.9.33.post17222 | ||
535 | rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) | ||
536 | |||
537 | # Clean 'r' instead of 'dev' usage: | ||
538 | # 0.9.33+r17222 -> 0.9.33.dev17222 | ||
539 | # 1.0dev123 -> 1.0.dev123 | ||
540 | # 1.0.git123 -> 1.0.dev123 | ||
541 | # 1.0.bzr123 -> 1.0.dev123 | ||
542 | # 0.1a0dev.123 -> 0.1a0.dev123 | ||
543 | # PyPI stats: ~150 (~4%) better | ||
544 | rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) | ||
545 | |||
546 | # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: | ||
547 | # 0.2.pre1 -> 0.2c1 | ||
548 | # 0.2-c1 -> 0.2c1 | ||
549 | # 1.0preview123 -> 1.0c123 | ||
550 | # PyPI stats: ~21 (0.62%) better | ||
551 | rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) | ||
552 | |||
553 | # Tcl/Tk uses "px" for their post release markers | ||
554 | rs = re.sub(r"p(\d+)$", r".post\1", rs) | ||
555 | |||
556 | try: | ||
557 | _normalized_key(rs) | ||
558 | except UnsupportedVersionError: | ||
559 | rs = None | ||
560 | return rs | ||
561 | |||
562 | # | ||
563 | # Legacy version processing (distribute-compatible) | ||
564 | # | ||
565 | |||
566 | _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) | ||
567 | _VERSION_REPLACE = { | ||
568 | 'pre': 'c', | ||
569 | 'preview': 'c', | ||
570 | '-': 'final-', | ||
571 | 'rc': 'c', | ||
572 | 'dev': '@', | ||
573 | '': None, | ||
574 | '.': None, | ||
575 | } | ||
576 | |||
577 | |||
578 | def _legacy_key(s): | ||
579 | def get_parts(s): | ||
580 | result = [] | ||
581 | for p in _VERSION_PART.split(s.lower()): | ||
582 | p = _VERSION_REPLACE.get(p, p) | ||
583 | if p: | ||
584 | if '0' <= p[:1] <= '9': | ||
585 | p = p.zfill(8) | ||
586 | else: | ||
587 | p = '*' + p | ||
588 | result.append(p) | ||
589 | result.append('*final') | ||
590 | return result | ||
591 | |||
592 | result = [] | ||
593 | for p in get_parts(s): | ||
594 | if p.startswith('*'): | ||
595 | if p < '*final': | ||
596 | while result and result[-1] == '*final-': | ||
597 | result.pop() | ||
598 | while result and result[-1] == '00000000': | ||
599 | result.pop() | ||
600 | result.append(p) | ||
601 | return tuple(result) | ||
602 | |||
603 | |||
604 | class LegacyVersion(Version): | ||
605 | def parse(self, s): | ||
606 | return _legacy_key(s) | ||
607 | |||
608 | @property | ||
609 | def is_prerelease(self): | ||
610 | result = False | ||
611 | for x in self._parts: | ||
612 | if (isinstance(x, string_types) and x.startswith('*') and | ||
613 | x < '*final'): | ||
614 | result = True | ||
615 | break | ||
616 | return result | ||
617 | |||
618 | |||
619 | class LegacyMatcher(Matcher): | ||
620 | version_class = LegacyVersion | ||
621 | |||
622 | _operators = dict(Matcher._operators) | ||
623 | _operators['~='] = '_match_compatible' | ||
624 | |||
625 | numeric_re = re.compile(r'^(\d+(\.\d+)*)') | ||
626 | |||
627 | def _match_compatible(self, version, constraint, prefix): | ||
628 | if version < constraint: | ||
629 | return False | ||
630 | m = self.numeric_re.match(str(constraint)) | ||
631 | if not m: | ||
632 | logger.warning('Cannot compute compatible match for version %s ' | ||
633 | ' and constraint %s', version, constraint) | ||
634 | return True | ||
635 | s = m.groups()[0] | ||
636 | if '.' in s: | ||
637 | s = s.rsplit('.', 1)[0] | ||
638 | return _match_prefix(version, s) | ||
639 | |||
640 | # | ||
641 | # Semantic versioning | ||
642 | # | ||
643 | |||
644 | _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' | ||
645 | r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' | ||
646 | r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) | ||
647 | |||
648 | |||
649 | def is_semver(s): | ||
650 | return _SEMVER_RE.match(s) | ||
651 | |||
652 | |||
653 | def _semantic_key(s): | ||
654 | def make_tuple(s, absent): | ||
655 | if s is None: | ||
656 | result = (absent,) | ||
657 | else: | ||
658 | parts = s[1:].split('.') | ||
659 | # We can't compare ints and strings on Python 3, so fudge it | ||
660 | # by zero-filling numeric values so simulate a numeric comparison | ||
661 | result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) | ||
662 | return result | ||
663 | |||
664 | m = is_semver(s) | ||
665 | if not m: | ||
666 | raise UnsupportedVersionError(s) | ||
667 | groups = m.groups() | ||
668 | major, minor, patch = [int(i) for i in groups[:3]] | ||
669 | # choose the '|' and '*' so that versions sort correctly | ||
670 | pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') | ||
671 | return (major, minor, patch), pre, build | ||
672 | |||
673 | |||
674 | class SemanticVersion(Version): | ||
675 | def parse(self, s): | ||
676 | return _semantic_key(s) | ||
677 | |||
678 | @property | ||
679 | def is_prerelease(self): | ||
680 | return self._parts[1][0] != '|' | ||
681 | |||
682 | |||
683 | class SemanticMatcher(Matcher): | ||
684 | version_class = SemanticVersion | ||
685 | |||
686 | |||
687 | class VersionScheme(object): | ||
688 | def __init__(self, key, matcher, suggester=None): | ||
689 | self.key = key | ||
690 | self.matcher = matcher | ||
691 | self.suggester = suggester | ||
692 | |||
693 | def is_valid_version(self, s): | ||
694 | try: | ||
695 | self.matcher.version_class(s) | ||
696 | result = True | ||
697 | except UnsupportedVersionError: | ||
698 | result = False | ||
699 | return result | ||
700 | |||
701 | def is_valid_matcher(self, s): | ||
702 | try: | ||
703 | self.matcher(s) | ||
704 | result = True | ||
705 | except UnsupportedVersionError: | ||
706 | result = False | ||
707 | return result | ||
708 | |||
709 | def is_valid_constraint_list(self, s): | ||
710 | """ | ||
711 | Used for processing some metadata fields | ||
712 | """ | ||
713 | return self.is_valid_matcher('dummy_name (%s)' % s) | ||
714 | |||
715 | def suggest(self, s): | ||
716 | if self.suggester is None: | ||
717 | result = None | ||
718 | else: | ||
719 | result = self.suggester(s) | ||
720 | return result | ||
721 | |||
722 | _SCHEMES = { | ||
723 | 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, | ||
724 | _suggest_normalized_version), | ||
725 | 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), | ||
726 | 'semantic': VersionScheme(_semantic_key, SemanticMatcher, | ||
727 | _suggest_semantic_version), | ||
728 | } | ||
729 | |||
730 | _SCHEMES['default'] = _SCHEMES['normalized'] | ||
731 | |||
732 | |||
733 | def get_scheme(name): | ||
734 | if name not in _SCHEMES: | ||
735 | raise ValueError('unknown scheme name: %r' % name) | ||
736 | return _SCHEMES[name] | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe new file mode 100755 index 0000000..732215a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w32.exe | |||
Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe new file mode 100755 index 0000000..c41bd0a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/w64.exe | |||
Binary files differ | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py new file mode 100644 index 0000000..3693410 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/wheel.py | |||
@@ -0,0 +1,984 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2013-2017 Vinay Sajip. | ||
4 | # Licensed to the Python Software Foundation under a contributor agreement. | ||
5 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
6 | # | ||
7 | from __future__ import unicode_literals | ||
8 | |||
9 | import base64 | ||
10 | import codecs | ||
11 | import datetime | ||
12 | import distutils.util | ||
13 | from email import message_from_file | ||
14 | import hashlib | ||
15 | import imp | ||
16 | import json | ||
17 | import logging | ||
18 | import os | ||
19 | import posixpath | ||
20 | import re | ||
21 | import shutil | ||
22 | import sys | ||
23 | import tempfile | ||
24 | import zipfile | ||
25 | |||
26 | from . import __version__, DistlibException | ||
27 | from .compat import sysconfig, ZipFile, fsdecode, text_type, filter | ||
28 | from .database import InstalledDistribution | ||
29 | from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME | ||
30 | from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, | ||
31 | cached_property, get_cache_base, read_exports, tempdir) | ||
32 | from .version import NormalizedVersion, UnsupportedVersionError | ||
33 | |||
34 | logger = logging.getLogger(__name__) | ||
35 | |||
36 | cache = None # created when needed | ||
37 | |||
38 | if hasattr(sys, 'pypy_version_info'): # pragma: no cover | ||
39 | IMP_PREFIX = 'pp' | ||
40 | elif sys.platform.startswith('java'): # pragma: no cover | ||
41 | IMP_PREFIX = 'jy' | ||
42 | elif sys.platform == 'cli': # pragma: no cover | ||
43 | IMP_PREFIX = 'ip' | ||
44 | else: | ||
45 | IMP_PREFIX = 'cp' | ||
46 | |||
47 | VER_SUFFIX = sysconfig.get_config_var('py_version_nodot') | ||
48 | if not VER_SUFFIX: # pragma: no cover | ||
49 | VER_SUFFIX = '%s%s' % sys.version_info[:2] | ||
50 | PYVER = 'py' + VER_SUFFIX | ||
51 | IMPVER = IMP_PREFIX + VER_SUFFIX | ||
52 | |||
53 | ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_') | ||
54 | |||
55 | ABI = sysconfig.get_config_var('SOABI') | ||
56 | if ABI and ABI.startswith('cpython-'): | ||
57 | ABI = ABI.replace('cpython-', 'cp') | ||
58 | else: | ||
59 | def _derive_abi(): | ||
60 | parts = ['cp', VER_SUFFIX] | ||
61 | if sysconfig.get_config_var('Py_DEBUG'): | ||
62 | parts.append('d') | ||
63 | if sysconfig.get_config_var('WITH_PYMALLOC'): | ||
64 | parts.append('m') | ||
65 | if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4: | ||
66 | parts.append('u') | ||
67 | return ''.join(parts) | ||
68 | ABI = _derive_abi() | ||
69 | del _derive_abi | ||
70 | |||
71 | FILENAME_RE = re.compile(r''' | ||
72 | (?P<nm>[^-]+) | ||
73 | -(?P<vn>\d+[^-]*) | ||
74 | (-(?P<bn>\d+[^-]*))? | ||
75 | -(?P<py>\w+\d+(\.\w+\d+)*) | ||
76 | -(?P<bi>\w+) | ||
77 | -(?P<ar>\w+(\.\w+)*) | ||
78 | \.whl$ | ||
79 | ''', re.IGNORECASE | re.VERBOSE) | ||
80 | |||
81 | NAME_VERSION_RE = re.compile(r''' | ||
82 | (?P<nm>[^-]+) | ||
83 | -(?P<vn>\d+[^-]*) | ||
84 | (-(?P<bn>\d+[^-]*))?$ | ||
85 | ''', re.IGNORECASE | re.VERBOSE) | ||
86 | |||
87 | SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') | ||
88 | SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') | ||
89 | SHEBANG_PYTHON = b'#!python' | ||
90 | SHEBANG_PYTHONW = b'#!pythonw' | ||
91 | |||
92 | if os.sep == '/': | ||
93 | to_posix = lambda o: o | ||
94 | else: | ||
95 | to_posix = lambda o: o.replace(os.sep, '/') | ||
96 | |||
97 | |||
98 | class Mounter(object): | ||
99 | def __init__(self): | ||
100 | self.impure_wheels = {} | ||
101 | self.libs = {} | ||
102 | |||
103 | def add(self, pathname, extensions): | ||
104 | self.impure_wheels[pathname] = extensions | ||
105 | self.libs.update(extensions) | ||
106 | |||
107 | def remove(self, pathname): | ||
108 | extensions = self.impure_wheels.pop(pathname) | ||
109 | for k, v in extensions: | ||
110 | if k in self.libs: | ||
111 | del self.libs[k] | ||
112 | |||
113 | def find_module(self, fullname, path=None): | ||
114 | if fullname in self.libs: | ||
115 | result = self | ||
116 | else: | ||
117 | result = None | ||
118 | return result | ||
119 | |||
120 | def load_module(self, fullname): | ||
121 | if fullname in sys.modules: | ||
122 | result = sys.modules[fullname] | ||
123 | else: | ||
124 | if fullname not in self.libs: | ||
125 | raise ImportError('unable to find extension for %s' % fullname) | ||
126 | result = imp.load_dynamic(fullname, self.libs[fullname]) | ||
127 | result.__loader__ = self | ||
128 | parts = fullname.rsplit('.', 1) | ||
129 | if len(parts) > 1: | ||
130 | result.__package__ = parts[0] | ||
131 | return result | ||
132 | |||
133 | _hook = Mounter() | ||
134 | |||
135 | |||
136 | class Wheel(object): | ||
137 | """ | ||
138 | Class to build and install from Wheel files (PEP 427). | ||
139 | """ | ||
140 | |||
141 | wheel_version = (1, 1) | ||
142 | hash_kind = 'sha256' | ||
143 | |||
144 | def __init__(self, filename=None, sign=False, verify=False): | ||
145 | """ | ||
146 | Initialise an instance using a (valid) filename. | ||
147 | """ | ||
148 | self.sign = sign | ||
149 | self.should_verify = verify | ||
150 | self.buildver = '' | ||
151 | self.pyver = [PYVER] | ||
152 | self.abi = ['none'] | ||
153 | self.arch = ['any'] | ||
154 | self.dirname = os.getcwd() | ||
155 | if filename is None: | ||
156 | self.name = 'dummy' | ||
157 | self.version = '0.1' | ||
158 | self._filename = self.filename | ||
159 | else: | ||
160 | m = NAME_VERSION_RE.match(filename) | ||
161 | if m: | ||
162 | info = m.groupdict('') | ||
163 | self.name = info['nm'] | ||
164 | # Reinstate the local version separator | ||
165 | self.version = info['vn'].replace('_', '-') | ||
166 | self.buildver = info['bn'] | ||
167 | self._filename = self.filename | ||
168 | else: | ||
169 | dirname, filename = os.path.split(filename) | ||
170 | m = FILENAME_RE.match(filename) | ||
171 | if not m: | ||
172 | raise DistlibException('Invalid name or ' | ||
173 | 'filename: %r' % filename) | ||
174 | if dirname: | ||
175 | self.dirname = os.path.abspath(dirname) | ||
176 | self._filename = filename | ||
177 | info = m.groupdict('') | ||
178 | self.name = info['nm'] | ||
179 | self.version = info['vn'] | ||
180 | self.buildver = info['bn'] | ||
181 | self.pyver = info['py'].split('.') | ||
182 | self.abi = info['bi'].split('.') | ||
183 | self.arch = info['ar'].split('.') | ||
184 | |||
185 | @property | ||
186 | def filename(self): | ||
187 | """ | ||
188 | Build and return a filename from the various components. | ||
189 | """ | ||
190 | if self.buildver: | ||
191 | buildver = '-' + self.buildver | ||
192 | else: | ||
193 | buildver = '' | ||
194 | pyver = '.'.join(self.pyver) | ||
195 | abi = '.'.join(self.abi) | ||
196 | arch = '.'.join(self.arch) | ||
197 | # replace - with _ as a local version separator | ||
198 | version = self.version.replace('-', '_') | ||
199 | return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, | ||
200 | pyver, abi, arch) | ||
201 | |||
202 | @property | ||
203 | def exists(self): | ||
204 | path = os.path.join(self.dirname, self.filename) | ||
205 | return os.path.isfile(path) | ||
206 | |||
207 | @property | ||
208 | def tags(self): | ||
209 | for pyver in self.pyver: | ||
210 | for abi in self.abi: | ||
211 | for arch in self.arch: | ||
212 | yield pyver, abi, arch | ||
213 | |||
214 | @cached_property | ||
215 | def metadata(self): | ||
216 | pathname = os.path.join(self.dirname, self.filename) | ||
217 | name_ver = '%s-%s' % (self.name, self.version) | ||
218 | info_dir = '%s.dist-info' % name_ver | ||
219 | wrapper = codecs.getreader('utf-8') | ||
220 | with ZipFile(pathname, 'r') as zf: | ||
221 | wheel_metadata = self.get_wheel_metadata(zf) | ||
222 | wv = wheel_metadata['Wheel-Version'].split('.', 1) | ||
223 | file_version = tuple([int(i) for i in wv]) | ||
224 | if file_version < (1, 1): | ||
225 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA'] | ||
226 | else: | ||
227 | fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME] | ||
228 | result = None | ||
229 | for fn in fns: | ||
230 | try: | ||
231 | metadata_filename = posixpath.join(info_dir, fn) | ||
232 | with zf.open(metadata_filename) as bf: | ||
233 | wf = wrapper(bf) | ||
234 | result = Metadata(fileobj=wf) | ||
235 | if result: | ||
236 | break | ||
237 | except KeyError: | ||
238 | pass | ||
239 | if not result: | ||
240 | raise ValueError('Invalid wheel, because metadata is ' | ||
241 | 'missing: looked in %s' % ', '.join(fns)) | ||
242 | return result | ||
243 | |||
244 | def get_wheel_metadata(self, zf): | ||
245 | name_ver = '%s-%s' % (self.name, self.version) | ||
246 | info_dir = '%s.dist-info' % name_ver | ||
247 | metadata_filename = posixpath.join(info_dir, 'WHEEL') | ||
248 | with zf.open(metadata_filename) as bf: | ||
249 | wf = codecs.getreader('utf-8')(bf) | ||
250 | message = message_from_file(wf) | ||
251 | return dict(message) | ||
252 | |||
253 | @cached_property | ||
254 | def info(self): | ||
255 | pathname = os.path.join(self.dirname, self.filename) | ||
256 | with ZipFile(pathname, 'r') as zf: | ||
257 | result = self.get_wheel_metadata(zf) | ||
258 | return result | ||
259 | |||
260 | def process_shebang(self, data): | ||
261 | m = SHEBANG_RE.match(data) | ||
262 | if m: | ||
263 | end = m.end() | ||
264 | shebang, data_after_shebang = data[:end], data[end:] | ||
265 | # Preserve any arguments after the interpreter | ||
266 | if b'pythonw' in shebang.lower(): | ||
267 | shebang_python = SHEBANG_PYTHONW | ||
268 | else: | ||
269 | shebang_python = SHEBANG_PYTHON | ||
270 | m = SHEBANG_DETAIL_RE.match(shebang) | ||
271 | if m: | ||
272 | args = b' ' + m.groups()[-1] | ||
273 | else: | ||
274 | args = b'' | ||
275 | shebang = shebang_python + args | ||
276 | data = shebang + data_after_shebang | ||
277 | else: | ||
278 | cr = data.find(b'\r') | ||
279 | lf = data.find(b'\n') | ||
280 | if cr < 0 or cr > lf: | ||
281 | term = b'\n' | ||
282 | else: | ||
283 | if data[cr:cr + 2] == b'\r\n': | ||
284 | term = b'\r\n' | ||
285 | else: | ||
286 | term = b'\r' | ||
287 | data = SHEBANG_PYTHON + term + data | ||
288 | return data | ||
289 | |||
290 | def get_hash(self, data, hash_kind=None): | ||
291 | if hash_kind is None: | ||
292 | hash_kind = self.hash_kind | ||
293 | try: | ||
294 | hasher = getattr(hashlib, hash_kind) | ||
295 | except AttributeError: | ||
296 | raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) | ||
297 | result = hasher(data).digest() | ||
298 | result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') | ||
299 | return hash_kind, result | ||
300 | |||
301 | def write_record(self, records, record_path, base): | ||
302 | records = list(records) # make a copy for sorting | ||
303 | p = to_posix(os.path.relpath(record_path, base)) | ||
304 | records.append((p, '', '')) | ||
305 | records.sort() | ||
306 | with CSVWriter(record_path) as writer: | ||
307 | for row in records: | ||
308 | writer.writerow(row) | ||
309 | |||
310 | def write_records(self, info, libdir, archive_paths): | ||
311 | records = [] | ||
312 | distinfo, info_dir = info | ||
313 | hasher = getattr(hashlib, self.hash_kind) | ||
314 | for ap, p in archive_paths: | ||
315 | with open(p, 'rb') as f: | ||
316 | data = f.read() | ||
317 | digest = '%s=%s' % self.get_hash(data) | ||
318 | size = os.path.getsize(p) | ||
319 | records.append((ap, digest, size)) | ||
320 | |||
321 | p = os.path.join(distinfo, 'RECORD') | ||
322 | self.write_record(records, p, libdir) | ||
323 | ap = to_posix(os.path.join(info_dir, 'RECORD')) | ||
324 | archive_paths.append((ap, p)) | ||
325 | |||
326 | def build_zip(self, pathname, archive_paths): | ||
327 | with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: | ||
328 | for ap, p in archive_paths: | ||
329 | logger.debug('Wrote %s to %s in wheel', p, ap) | ||
330 | zf.write(p, ap) | ||
331 | |||
332 | def build(self, paths, tags=None, wheel_version=None): | ||
333 | """ | ||
334 | Build a wheel from files in specified paths, and use any specified tags | ||
335 | when determining the name of the wheel. | ||
336 | """ | ||
337 | if tags is None: | ||
338 | tags = {} | ||
339 | |||
340 | libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] | ||
341 | if libkey == 'platlib': | ||
342 | is_pure = 'false' | ||
343 | default_pyver = [IMPVER] | ||
344 | default_abi = [ABI] | ||
345 | default_arch = [ARCH] | ||
346 | else: | ||
347 | is_pure = 'true' | ||
348 | default_pyver = [PYVER] | ||
349 | default_abi = ['none'] | ||
350 | default_arch = ['any'] | ||
351 | |||
352 | self.pyver = tags.get('pyver', default_pyver) | ||
353 | self.abi = tags.get('abi', default_abi) | ||
354 | self.arch = tags.get('arch', default_arch) | ||
355 | |||
356 | libdir = paths[libkey] | ||
357 | |||
358 | name_ver = '%s-%s' % (self.name, self.version) | ||
359 | data_dir = '%s.data' % name_ver | ||
360 | info_dir = '%s.dist-info' % name_ver | ||
361 | |||
362 | archive_paths = [] | ||
363 | |||
364 | # First, stuff which is not in site-packages | ||
365 | for key in ('data', 'headers', 'scripts'): | ||
366 | if key not in paths: | ||
367 | continue | ||
368 | path = paths[key] | ||
369 | if os.path.isdir(path): | ||
370 | for root, dirs, files in os.walk(path): | ||
371 | for fn in files: | ||
372 | p = fsdecode(os.path.join(root, fn)) | ||
373 | rp = os.path.relpath(p, path) | ||
374 | ap = to_posix(os.path.join(data_dir, key, rp)) | ||
375 | archive_paths.append((ap, p)) | ||
376 | if key == 'scripts' and not p.endswith('.exe'): | ||
377 | with open(p, 'rb') as f: | ||
378 | data = f.read() | ||
379 | data = self.process_shebang(data) | ||
380 | with open(p, 'wb') as f: | ||
381 | f.write(data) | ||
382 | |||
383 | # Now, stuff which is in site-packages, other than the | ||
384 | # distinfo stuff. | ||
385 | path = libdir | ||
386 | distinfo = None | ||
387 | for root, dirs, files in os.walk(path): | ||
388 | if root == path: | ||
389 | # At the top level only, save distinfo for later | ||
390 | # and skip it for now | ||
391 | for i, dn in enumerate(dirs): | ||
392 | dn = fsdecode(dn) | ||
393 | if dn.endswith('.dist-info'): | ||
394 | distinfo = os.path.join(root, dn) | ||
395 | del dirs[i] | ||
396 | break | ||
397 | assert distinfo, '.dist-info directory expected, not found' | ||
398 | |||
399 | for fn in files: | ||
400 | # comment out next suite to leave .pyc files in | ||
401 | if fsdecode(fn).endswith(('.pyc', '.pyo')): | ||
402 | continue | ||
403 | p = os.path.join(root, fn) | ||
404 | rp = to_posix(os.path.relpath(p, path)) | ||
405 | archive_paths.append((rp, p)) | ||
406 | |||
407 | # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. | ||
408 | files = os.listdir(distinfo) | ||
409 | for fn in files: | ||
410 | if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): | ||
411 | p = fsdecode(os.path.join(distinfo, fn)) | ||
412 | ap = to_posix(os.path.join(info_dir, fn)) | ||
413 | archive_paths.append((ap, p)) | ||
414 | |||
415 | wheel_metadata = [ | ||
416 | 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), | ||
417 | 'Generator: distlib %s' % __version__, | ||
418 | 'Root-Is-Purelib: %s' % is_pure, | ||
419 | ] | ||
420 | for pyver, abi, arch in self.tags: | ||
421 | wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) | ||
422 | p = os.path.join(distinfo, 'WHEEL') | ||
423 | with open(p, 'w') as f: | ||
424 | f.write('\n'.join(wheel_metadata)) | ||
425 | ap = to_posix(os.path.join(info_dir, 'WHEEL')) | ||
426 | archive_paths.append((ap, p)) | ||
427 | |||
428 | # Now, at last, RECORD. | ||
429 | # Paths in here are archive paths - nothing else makes sense. | ||
430 | self.write_records((distinfo, info_dir), libdir, archive_paths) | ||
431 | # Now, ready to build the zip file | ||
432 | pathname = os.path.join(self.dirname, self.filename) | ||
433 | self.build_zip(pathname, archive_paths) | ||
434 | return pathname | ||
435 | |||
436 | def install(self, paths, maker, **kwargs): | ||
437 | """ | ||
438 | Install a wheel to the specified paths. If kwarg ``warner`` is | ||
439 | specified, it should be a callable, which will be called with two | ||
440 | tuples indicating the wheel version of this software and the wheel | ||
441 | version in the file, if there is a discrepancy in the versions. | ||
442 | This can be used to issue any warnings to raise any exceptions. | ||
443 | If kwarg ``lib_only`` is True, only the purelib/platlib files are | ||
444 | installed, and the headers, scripts, data and dist-info metadata are | ||
445 | not written. | ||
446 | |||
447 | The return value is a :class:`InstalledDistribution` instance unless | ||
448 | ``options.lib_only`` is True, in which case the return value is ``None``. | ||
449 | """ | ||
450 | |||
451 | dry_run = maker.dry_run | ||
452 | warner = kwargs.get('warner') | ||
453 | lib_only = kwargs.get('lib_only', False) | ||
454 | |||
455 | pathname = os.path.join(self.dirname, self.filename) | ||
456 | name_ver = '%s-%s' % (self.name, self.version) | ||
457 | data_dir = '%s.data' % name_ver | ||
458 | info_dir = '%s.dist-info' % name_ver | ||
459 | |||
460 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
461 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
462 | record_name = posixpath.join(info_dir, 'RECORD') | ||
463 | |||
464 | wrapper = codecs.getreader('utf-8') | ||
465 | |||
466 | with ZipFile(pathname, 'r') as zf: | ||
467 | with zf.open(wheel_metadata_name) as bwf: | ||
468 | wf = wrapper(bwf) | ||
469 | message = message_from_file(wf) | ||
470 | wv = message['Wheel-Version'].split('.', 1) | ||
471 | file_version = tuple([int(i) for i in wv]) | ||
472 | if (file_version != self.wheel_version) and warner: | ||
473 | warner(self.wheel_version, file_version) | ||
474 | |||
475 | if message['Root-Is-Purelib'] == 'true': | ||
476 | libdir = paths['purelib'] | ||
477 | else: | ||
478 | libdir = paths['platlib'] | ||
479 | |||
480 | records = {} | ||
481 | with zf.open(record_name) as bf: | ||
482 | with CSVReader(stream=bf) as reader: | ||
483 | for row in reader: | ||
484 | p = row[0] | ||
485 | records[p] = row | ||
486 | |||
487 | data_pfx = posixpath.join(data_dir, '') | ||
488 | info_pfx = posixpath.join(info_dir, '') | ||
489 | script_pfx = posixpath.join(data_dir, 'scripts', '') | ||
490 | |||
491 | # make a new instance rather than a copy of maker's, | ||
492 | # as we mutate it | ||
493 | fileop = FileOperator(dry_run=dry_run) | ||
494 | fileop.record = True # so we can rollback if needed | ||
495 | |||
496 | bc = not sys.dont_write_bytecode # Double negatives. Lovely! | ||
497 | |||
498 | outfiles = [] # for RECORD writing | ||
499 | |||
500 | # for script copying/shebang processing | ||
501 | workdir = tempfile.mkdtemp() | ||
502 | # set target dir later | ||
503 | # we default add_launchers to False, as the | ||
504 | # Python Launcher should be used instead | ||
505 | maker.source_dir = workdir | ||
506 | maker.target_dir = None | ||
507 | try: | ||
508 | for zinfo in zf.infolist(): | ||
509 | arcname = zinfo.filename | ||
510 | if isinstance(arcname, text_type): | ||
511 | u_arcname = arcname | ||
512 | else: | ||
513 | u_arcname = arcname.decode('utf-8') | ||
514 | # The signature file won't be in RECORD, | ||
515 | # and we don't currently don't do anything with it | ||
516 | if u_arcname.endswith('/RECORD.jws'): | ||
517 | continue | ||
518 | row = records[u_arcname] | ||
519 | if row[2] and str(zinfo.file_size) != row[2]: | ||
520 | raise DistlibException('size mismatch for ' | ||
521 | '%s' % u_arcname) | ||
522 | if row[1]: | ||
523 | kind, value = row[1].split('=', 1) | ||
524 | with zf.open(arcname) as bf: | ||
525 | data = bf.read() | ||
526 | _, digest = self.get_hash(data, kind) | ||
527 | if digest != value: | ||
528 | raise DistlibException('digest mismatch for ' | ||
529 | '%s' % arcname) | ||
530 | |||
531 | if lib_only and u_arcname.startswith((info_pfx, data_pfx)): | ||
532 | logger.debug('lib_only: skipping %s', u_arcname) | ||
533 | continue | ||
534 | is_script = (u_arcname.startswith(script_pfx) | ||
535 | and not u_arcname.endswith('.exe')) | ||
536 | |||
537 | if u_arcname.startswith(data_pfx): | ||
538 | _, where, rp = u_arcname.split('/', 2) | ||
539 | outfile = os.path.join(paths[where], convert_path(rp)) | ||
540 | else: | ||
541 | # meant for site-packages. | ||
542 | if u_arcname in (wheel_metadata_name, record_name): | ||
543 | continue | ||
544 | outfile = os.path.join(libdir, convert_path(u_arcname)) | ||
545 | if not is_script: | ||
546 | with zf.open(arcname) as bf: | ||
547 | fileop.copy_stream(bf, outfile) | ||
548 | outfiles.append(outfile) | ||
549 | # Double check the digest of the written file | ||
550 | if not dry_run and row[1]: | ||
551 | with open(outfile, 'rb') as bf: | ||
552 | data = bf.read() | ||
553 | _, newdigest = self.get_hash(data, kind) | ||
554 | if newdigest != digest: | ||
555 | raise DistlibException('digest mismatch ' | ||
556 | 'on write for ' | ||
557 | '%s' % outfile) | ||
558 | if bc and outfile.endswith('.py'): | ||
559 | try: | ||
560 | pyc = fileop.byte_compile(outfile) | ||
561 | outfiles.append(pyc) | ||
562 | except Exception: | ||
563 | # Don't give up if byte-compilation fails, | ||
564 | # but log it and perhaps warn the user | ||
565 | logger.warning('Byte-compilation failed', | ||
566 | exc_info=True) | ||
567 | else: | ||
568 | fn = os.path.basename(convert_path(arcname)) | ||
569 | workname = os.path.join(workdir, fn) | ||
570 | with zf.open(arcname) as bf: | ||
571 | fileop.copy_stream(bf, workname) | ||
572 | |||
573 | dn, fn = os.path.split(outfile) | ||
574 | maker.target_dir = dn | ||
575 | filenames = maker.make(fn) | ||
576 | fileop.set_executable_mode(filenames) | ||
577 | outfiles.extend(filenames) | ||
578 | |||
579 | if lib_only: | ||
580 | logger.debug('lib_only: returning None') | ||
581 | dist = None | ||
582 | else: | ||
583 | # Generate scripts | ||
584 | |||
585 | # Try to get pydist.json so we can see if there are | ||
586 | # any commands to generate. If this fails (e.g. because | ||
587 | # of a legacy wheel), log a warning but don't give up. | ||
588 | commands = None | ||
589 | file_version = self.info['Wheel-Version'] | ||
590 | if file_version == '1.0': | ||
591 | # Use legacy info | ||
592 | ep = posixpath.join(info_dir, 'entry_points.txt') | ||
593 | try: | ||
594 | with zf.open(ep) as bwf: | ||
595 | epdata = read_exports(bwf) | ||
596 | commands = {} | ||
597 | for key in ('console', 'gui'): | ||
598 | k = '%s_scripts' % key | ||
599 | if k in epdata: | ||
600 | commands['wrap_%s' % key] = d = {} | ||
601 | for v in epdata[k].values(): | ||
602 | s = '%s:%s' % (v.prefix, v.suffix) | ||
603 | if v.flags: | ||
604 | s += ' %s' % v.flags | ||
605 | d[v.name] = s | ||
606 | except Exception: | ||
607 | logger.warning('Unable to read legacy script ' | ||
608 | 'metadata, so cannot generate ' | ||
609 | 'scripts') | ||
610 | else: | ||
611 | try: | ||
612 | with zf.open(metadata_name) as bwf: | ||
613 | wf = wrapper(bwf) | ||
614 | commands = json.load(wf).get('extensions') | ||
615 | if commands: | ||
616 | commands = commands.get('python.commands') | ||
617 | except Exception: | ||
618 | logger.warning('Unable to read JSON metadata, so ' | ||
619 | 'cannot generate scripts') | ||
620 | if commands: | ||
621 | console_scripts = commands.get('wrap_console', {}) | ||
622 | gui_scripts = commands.get('wrap_gui', {}) | ||
623 | if console_scripts or gui_scripts: | ||
624 | script_dir = paths.get('scripts', '') | ||
625 | if not os.path.isdir(script_dir): | ||
626 | raise ValueError('Valid script path not ' | ||
627 | 'specified') | ||
628 | maker.target_dir = script_dir | ||
629 | for k, v in console_scripts.items(): | ||
630 | script = '%s = %s' % (k, v) | ||
631 | filenames = maker.make(script) | ||
632 | fileop.set_executable_mode(filenames) | ||
633 | |||
634 | if gui_scripts: | ||
635 | options = {'gui': True } | ||
636 | for k, v in gui_scripts.items(): | ||
637 | script = '%s = %s' % (k, v) | ||
638 | filenames = maker.make(script, options) | ||
639 | fileop.set_executable_mode(filenames) | ||
640 | |||
641 | p = os.path.join(libdir, info_dir) | ||
642 | dist = InstalledDistribution(p) | ||
643 | |||
644 | # Write SHARED | ||
645 | paths = dict(paths) # don't change passed in dict | ||
646 | del paths['purelib'] | ||
647 | del paths['platlib'] | ||
648 | paths['lib'] = libdir | ||
649 | p = dist.write_shared_locations(paths, dry_run) | ||
650 | if p: | ||
651 | outfiles.append(p) | ||
652 | |||
653 | # Write RECORD | ||
654 | dist.write_installed_files(outfiles, paths['prefix'], | ||
655 | dry_run) | ||
656 | return dist | ||
657 | except Exception: # pragma: no cover | ||
658 | logger.exception('installation failed.') | ||
659 | fileop.rollback() | ||
660 | raise | ||
661 | finally: | ||
662 | shutil.rmtree(workdir) | ||
663 | |||
664 | def _get_dylib_cache(self): | ||
665 | global cache | ||
666 | if cache is None: | ||
667 | # Use native string to avoid issues on 2.x: see Python #20140. | ||
668 | base = os.path.join(get_cache_base(), str('dylib-cache'), | ||
669 | sys.version[:3]) | ||
670 | cache = Cache(base) | ||
671 | return cache | ||
672 | |||
673 | def _get_extensions(self): | ||
674 | pathname = os.path.join(self.dirname, self.filename) | ||
675 | name_ver = '%s-%s' % (self.name, self.version) | ||
676 | info_dir = '%s.dist-info' % name_ver | ||
677 | arcname = posixpath.join(info_dir, 'EXTENSIONS') | ||
678 | wrapper = codecs.getreader('utf-8') | ||
679 | result = [] | ||
680 | with ZipFile(pathname, 'r') as zf: | ||
681 | try: | ||
682 | with zf.open(arcname) as bf: | ||
683 | wf = wrapper(bf) | ||
684 | extensions = json.load(wf) | ||
685 | cache = self._get_dylib_cache() | ||
686 | prefix = cache.prefix_to_dir(pathname) | ||
687 | cache_base = os.path.join(cache.base, prefix) | ||
688 | if not os.path.isdir(cache_base): | ||
689 | os.makedirs(cache_base) | ||
690 | for name, relpath in extensions.items(): | ||
691 | dest = os.path.join(cache_base, convert_path(relpath)) | ||
692 | if not os.path.exists(dest): | ||
693 | extract = True | ||
694 | else: | ||
695 | file_time = os.stat(dest).st_mtime | ||
696 | file_time = datetime.datetime.fromtimestamp(file_time) | ||
697 | info = zf.getinfo(relpath) | ||
698 | wheel_time = datetime.datetime(*info.date_time) | ||
699 | extract = wheel_time > file_time | ||
700 | if extract: | ||
701 | zf.extract(relpath, cache_base) | ||
702 | result.append((name, dest)) | ||
703 | except KeyError: | ||
704 | pass | ||
705 | return result | ||
706 | |||
707 | def is_compatible(self): | ||
708 | """ | ||
709 | Determine if a wheel is compatible with the running system. | ||
710 | """ | ||
711 | return is_compatible(self) | ||
712 | |||
713 | def is_mountable(self): | ||
714 | """ | ||
715 | Determine if a wheel is asserted as mountable by its metadata. | ||
716 | """ | ||
717 | return True # for now - metadata details TBD | ||
718 | |||
719 | def mount(self, append=False): | ||
720 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
721 | if not self.is_compatible(): | ||
722 | msg = 'Wheel %s not compatible with this Python.' % pathname | ||
723 | raise DistlibException(msg) | ||
724 | if not self.is_mountable(): | ||
725 | msg = 'Wheel %s is marked as not mountable.' % pathname | ||
726 | raise DistlibException(msg) | ||
727 | if pathname in sys.path: | ||
728 | logger.debug('%s already in path', pathname) | ||
729 | else: | ||
730 | if append: | ||
731 | sys.path.append(pathname) | ||
732 | else: | ||
733 | sys.path.insert(0, pathname) | ||
734 | extensions = self._get_extensions() | ||
735 | if extensions: | ||
736 | if _hook not in sys.meta_path: | ||
737 | sys.meta_path.append(_hook) | ||
738 | _hook.add(pathname, extensions) | ||
739 | |||
740 | def unmount(self): | ||
741 | pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) | ||
742 | if pathname not in sys.path: | ||
743 | logger.debug('%s not in path', pathname) | ||
744 | else: | ||
745 | sys.path.remove(pathname) | ||
746 | if pathname in _hook.impure_wheels: | ||
747 | _hook.remove(pathname) | ||
748 | if not _hook.impure_wheels: | ||
749 | if _hook in sys.meta_path: | ||
750 | sys.meta_path.remove(_hook) | ||
751 | |||
752 | def verify(self): | ||
753 | pathname = os.path.join(self.dirname, self.filename) | ||
754 | name_ver = '%s-%s' % (self.name, self.version) | ||
755 | data_dir = '%s.data' % name_ver | ||
756 | info_dir = '%s.dist-info' % name_ver | ||
757 | |||
758 | metadata_name = posixpath.join(info_dir, METADATA_FILENAME) | ||
759 | wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') | ||
760 | record_name = posixpath.join(info_dir, 'RECORD') | ||
761 | |||
762 | wrapper = codecs.getreader('utf-8') | ||
763 | |||
764 | with ZipFile(pathname, 'r') as zf: | ||
765 | with zf.open(wheel_metadata_name) as bwf: | ||
766 | wf = wrapper(bwf) | ||
767 | message = message_from_file(wf) | ||
768 | wv = message['Wheel-Version'].split('.', 1) | ||
769 | file_version = tuple([int(i) for i in wv]) | ||
770 | # TODO version verification | ||
771 | |||
772 | records = {} | ||
773 | with zf.open(record_name) as bf: | ||
774 | with CSVReader(stream=bf) as reader: | ||
775 | for row in reader: | ||
776 | p = row[0] | ||
777 | records[p] = row | ||
778 | |||
779 | for zinfo in zf.infolist(): | ||
780 | arcname = zinfo.filename | ||
781 | if isinstance(arcname, text_type): | ||
782 | u_arcname = arcname | ||
783 | else: | ||
784 | u_arcname = arcname.decode('utf-8') | ||
785 | if '..' in u_arcname: | ||
786 | raise DistlibException('invalid entry in ' | ||
787 | 'wheel: %r' % u_arcname) | ||
788 | |||
789 | # The signature file won't be in RECORD, | ||
790 | # and we don't currently don't do anything with it | ||
791 | if u_arcname.endswith('/RECORD.jws'): | ||
792 | continue | ||
793 | row = records[u_arcname] | ||
794 | if row[2] and str(zinfo.file_size) != row[2]: | ||
795 | raise DistlibException('size mismatch for ' | ||
796 | '%s' % u_arcname) | ||
797 | if row[1]: | ||
798 | kind, value = row[1].split('=', 1) | ||
799 | with zf.open(arcname) as bf: | ||
800 | data = bf.read() | ||
801 | _, digest = self.get_hash(data, kind) | ||
802 | if digest != value: | ||
803 | raise DistlibException('digest mismatch for ' | ||
804 | '%s' % arcname) | ||
805 | |||
806 | def update(self, modifier, dest_dir=None, **kwargs): | ||
807 | """ | ||
808 | Update the contents of a wheel in a generic way. The modifier should | ||
809 | be a callable which expects a dictionary argument: its keys are | ||
810 | archive-entry paths, and its values are absolute filesystem paths | ||
811 | where the contents the corresponding archive entries can be found. The | ||
812 | modifier is free to change the contents of the files pointed to, add | ||
813 | new entries and remove entries, before returning. This method will | ||
814 | extract the entire contents of the wheel to a temporary location, call | ||
815 | the modifier, and then use the passed (and possibly updated) | ||
816 | dictionary to write a new wheel. If ``dest_dir`` is specified, the new | ||
817 | wheel is written there -- otherwise, the original wheel is overwritten. | ||
818 | |||
819 | The modifier should return True if it updated the wheel, else False. | ||
820 | This method returns the same value the modifier returns. | ||
821 | """ | ||
822 | |||
823 | def get_version(path_map, info_dir): | ||
824 | version = path = None | ||
825 | key = '%s/%s' % (info_dir, METADATA_FILENAME) | ||
826 | if key not in path_map: | ||
827 | key = '%s/PKG-INFO' % info_dir | ||
828 | if key in path_map: | ||
829 | path = path_map[key] | ||
830 | version = Metadata(path=path).version | ||
831 | return version, path | ||
832 | |||
833 | def update_version(version, path): | ||
834 | updated = None | ||
835 | try: | ||
836 | v = NormalizedVersion(version) | ||
837 | i = version.find('-') | ||
838 | if i < 0: | ||
839 | updated = '%s+1' % version | ||
840 | else: | ||
841 | parts = [int(s) for s in version[i + 1:].split('.')] | ||
842 | parts[-1] += 1 | ||
843 | updated = '%s+%s' % (version[:i], | ||
844 | '.'.join(str(i) for i in parts)) | ||
845 | except UnsupportedVersionError: | ||
846 | logger.debug('Cannot update non-compliant (PEP-440) ' | ||
847 | 'version %r', version) | ||
848 | if updated: | ||
849 | md = Metadata(path=path) | ||
850 | md.version = updated | ||
851 | legacy = not path.endswith(METADATA_FILENAME) | ||
852 | md.write(path=path, legacy=legacy) | ||
853 | logger.debug('Version updated from %r to %r', version, | ||
854 | updated) | ||
855 | |||
856 | pathname = os.path.join(self.dirname, self.filename) | ||
857 | name_ver = '%s-%s' % (self.name, self.version) | ||
858 | info_dir = '%s.dist-info' % name_ver | ||
859 | record_name = posixpath.join(info_dir, 'RECORD') | ||
860 | with tempdir() as workdir: | ||
861 | with ZipFile(pathname, 'r') as zf: | ||
862 | path_map = {} | ||
863 | for zinfo in zf.infolist(): | ||
864 | arcname = zinfo.filename | ||
865 | if isinstance(arcname, text_type): | ||
866 | u_arcname = arcname | ||
867 | else: | ||
868 | u_arcname = arcname.decode('utf-8') | ||
869 | if u_arcname == record_name: | ||
870 | continue | ||
871 | if '..' in u_arcname: | ||
872 | raise DistlibException('invalid entry in ' | ||
873 | 'wheel: %r' % u_arcname) | ||
874 | zf.extract(zinfo, workdir) | ||
875 | path = os.path.join(workdir, convert_path(u_arcname)) | ||
876 | path_map[u_arcname] = path | ||
877 | |||
878 | # Remember the version. | ||
879 | original_version, _ = get_version(path_map, info_dir) | ||
880 | # Files extracted. Call the modifier. | ||
881 | modified = modifier(path_map, **kwargs) | ||
882 | if modified: | ||
883 | # Something changed - need to build a new wheel. | ||
884 | current_version, path = get_version(path_map, info_dir) | ||
885 | if current_version and (current_version == original_version): | ||
886 | # Add or update local version to signify changes. | ||
887 | update_version(current_version, path) | ||
888 | # Decide where the new wheel goes. | ||
889 | if dest_dir is None: | ||
890 | fd, newpath = tempfile.mkstemp(suffix='.whl', | ||
891 | prefix='wheel-update-', | ||
892 | dir=workdir) | ||
893 | os.close(fd) | ||
894 | else: | ||
895 | if not os.path.isdir(dest_dir): | ||
896 | raise DistlibException('Not a directory: %r' % dest_dir) | ||
897 | newpath = os.path.join(dest_dir, self.filename) | ||
898 | archive_paths = list(path_map.items()) | ||
899 | distinfo = os.path.join(workdir, info_dir) | ||
900 | info = distinfo, info_dir | ||
901 | self.write_records(info, workdir, archive_paths) | ||
902 | self.build_zip(newpath, archive_paths) | ||
903 | if dest_dir is None: | ||
904 | shutil.copyfile(newpath, pathname) | ||
905 | return modified | ||
906 | |||
907 | def compatible_tags(): | ||
908 | """ | ||
909 | Return (pyver, abi, arch) tuples compatible with this Python. | ||
910 | """ | ||
911 | versions = [VER_SUFFIX] | ||
912 | major = VER_SUFFIX[0] | ||
913 | for minor in range(sys.version_info[1] - 1, - 1, -1): | ||
914 | versions.append(''.join([major, str(minor)])) | ||
915 | |||
916 | abis = [] | ||
917 | for suffix, _, _ in imp.get_suffixes(): | ||
918 | if suffix.startswith('.abi'): | ||
919 | abis.append(suffix.split('.', 2)[1]) | ||
920 | abis.sort() | ||
921 | if ABI != 'none': | ||
922 | abis.insert(0, ABI) | ||
923 | abis.append('none') | ||
924 | result = [] | ||
925 | |||
926 | arches = [ARCH] | ||
927 | if sys.platform == 'darwin': | ||
928 | m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) | ||
929 | if m: | ||
930 | name, major, minor, arch = m.groups() | ||
931 | minor = int(minor) | ||
932 | matches = [arch] | ||
933 | if arch in ('i386', 'ppc'): | ||
934 | matches.append('fat') | ||
935 | if arch in ('i386', 'ppc', 'x86_64'): | ||
936 | matches.append('fat3') | ||
937 | if arch in ('ppc64', 'x86_64'): | ||
938 | matches.append('fat64') | ||
939 | if arch in ('i386', 'x86_64'): | ||
940 | matches.append('intel') | ||
941 | if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): | ||
942 | matches.append('universal') | ||
943 | while minor >= 0: | ||
944 | for match in matches: | ||
945 | s = '%s_%s_%s_%s' % (name, major, minor, match) | ||
946 | if s != ARCH: # already there | ||
947 | arches.append(s) | ||
948 | minor -= 1 | ||
949 | |||
950 | # Most specific - our Python version, ABI and arch | ||
951 | for abi in abis: | ||
952 | for arch in arches: | ||
953 | result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) | ||
954 | |||
955 | # where no ABI / arch dependency, but IMP_PREFIX dependency | ||
956 | for i, version in enumerate(versions): | ||
957 | result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) | ||
958 | if i == 0: | ||
959 | result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) | ||
960 | |||
961 | # no IMP_PREFIX, ABI or arch dependency | ||
962 | for i, version in enumerate(versions): | ||
963 | result.append((''.join(('py', version)), 'none', 'any')) | ||
964 | if i == 0: | ||
965 | result.append((''.join(('py', version[0])), 'none', 'any')) | ||
966 | return set(result) | ||
967 | |||
968 | |||
969 | COMPATIBLE_TAGS = compatible_tags() | ||
970 | |||
971 | del compatible_tags | ||
972 | |||
973 | |||
974 | def is_compatible(wheel, tags=None): | ||
975 | if not isinstance(wheel, Wheel): | ||
976 | wheel = Wheel(wheel) # assume it's a filename | ||
977 | result = False | ||
978 | if tags is None: | ||
979 | tags = COMPATIBLE_TAGS | ||
980 | for ver, abi, arch in tags: | ||
981 | if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: | ||
982 | result = True | ||
983 | break | ||
984 | return result | ||