diff options
author | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
commit | 227b2d30a8675b44918f9d9ca89b24144a938215 (patch) | |
tree | 9f8e6a28724514b6fdf463a9ab2067a7ef309b72 /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py | |
parent | 842a8cfbbbdb1f92889d892e4859dbd5d40c5be8 (diff) |
removing venv files
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py')
-rw-r--r-- | venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py | 761 |
1 files changed, 0 insertions, 761 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py deleted file mode 100644 index becbfd7..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py +++ /dev/null | |||
@@ -1,761 +0,0 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2012 The Python Software Foundation. | ||
4 | # See LICENSE.txt and CONTRIBUTORS.txt. | ||
5 | # | ||
6 | """Utility functions for copying and archiving files and directory trees. | ||
7 | |||
8 | XXX The functions here don't copy the resource fork or other metadata on Mac. | ||
9 | |||
10 | """ | ||
11 | |||
12 | import os | ||
13 | import sys | ||
14 | import stat | ||
15 | from os.path import abspath | ||
16 | import fnmatch | ||
17 | import collections | ||
18 | import errno | ||
19 | from . import tarfile | ||
20 | |||
21 | try: | ||
22 | import bz2 | ||
23 | _BZ2_SUPPORTED = True | ||
24 | except ImportError: | ||
25 | _BZ2_SUPPORTED = False | ||
26 | |||
27 | try: | ||
28 | from pwd import getpwnam | ||
29 | except ImportError: | ||
30 | getpwnam = None | ||
31 | |||
32 | try: | ||
33 | from grp import getgrnam | ||
34 | except ImportError: | ||
35 | getgrnam = None | ||
36 | |||
37 | __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", | ||
38 | "copytree", "move", "rmtree", "Error", "SpecialFileError", | ||
39 | "ExecError", "make_archive", "get_archive_formats", | ||
40 | "register_archive_format", "unregister_archive_format", | ||
41 | "get_unpack_formats", "register_unpack_format", | ||
42 | "unregister_unpack_format", "unpack_archive", "ignore_patterns"] | ||
43 | |||
44 | class Error(EnvironmentError): | ||
45 | pass | ||
46 | |||
47 | class SpecialFileError(EnvironmentError): | ||
48 | """Raised when trying to do a kind of operation (e.g. copying) which is | ||
49 | not supported on a special file (e.g. a named pipe)""" | ||
50 | |||
51 | class ExecError(EnvironmentError): | ||
52 | """Raised when a command could not be executed""" | ||
53 | |||
54 | class ReadError(EnvironmentError): | ||
55 | """Raised when an archive cannot be read""" | ||
56 | |||
57 | class RegistryError(Exception): | ||
58 | """Raised when a registry operation with the archiving | ||
59 | and unpacking registries fails""" | ||
60 | |||
61 | |||
62 | try: | ||
63 | WindowsError | ||
64 | except NameError: | ||
65 | WindowsError = None | ||
66 | |||
67 | def copyfileobj(fsrc, fdst, length=16*1024): | ||
68 | """copy data from file-like object fsrc to file-like object fdst""" | ||
69 | while 1: | ||
70 | buf = fsrc.read(length) | ||
71 | if not buf: | ||
72 | break | ||
73 | fdst.write(buf) | ||
74 | |||
75 | def _samefile(src, dst): | ||
76 | # Macintosh, Unix. | ||
77 | if hasattr(os.path, 'samefile'): | ||
78 | try: | ||
79 | return os.path.samefile(src, dst) | ||
80 | except OSError: | ||
81 | return False | ||
82 | |||
83 | # All other platforms: check for same pathname. | ||
84 | return (os.path.normcase(os.path.abspath(src)) == | ||
85 | os.path.normcase(os.path.abspath(dst))) | ||
86 | |||
87 | def copyfile(src, dst): | ||
88 | """Copy data from src to dst""" | ||
89 | if _samefile(src, dst): | ||
90 | raise Error("`%s` and `%s` are the same file" % (src, dst)) | ||
91 | |||
92 | for fn in [src, dst]: | ||
93 | try: | ||
94 | st = os.stat(fn) | ||
95 | except OSError: | ||
96 | # File most likely does not exist | ||
97 | pass | ||
98 | else: | ||
99 | # XXX What about other special files? (sockets, devices...) | ||
100 | if stat.S_ISFIFO(st.st_mode): | ||
101 | raise SpecialFileError("`%s` is a named pipe" % fn) | ||
102 | |||
103 | with open(src, 'rb') as fsrc: | ||
104 | with open(dst, 'wb') as fdst: | ||
105 | copyfileobj(fsrc, fdst) | ||
106 | |||
107 | def copymode(src, dst): | ||
108 | """Copy mode bits from src to dst""" | ||
109 | if hasattr(os, 'chmod'): | ||
110 | st = os.stat(src) | ||
111 | mode = stat.S_IMODE(st.st_mode) | ||
112 | os.chmod(dst, mode) | ||
113 | |||
114 | def copystat(src, dst): | ||
115 | """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" | ||
116 | st = os.stat(src) | ||
117 | mode = stat.S_IMODE(st.st_mode) | ||
118 | if hasattr(os, 'utime'): | ||
119 | os.utime(dst, (st.st_atime, st.st_mtime)) | ||
120 | if hasattr(os, 'chmod'): | ||
121 | os.chmod(dst, mode) | ||
122 | if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): | ||
123 | try: | ||
124 | os.chflags(dst, st.st_flags) | ||
125 | except OSError as why: | ||
126 | if (not hasattr(errno, 'EOPNOTSUPP') or | ||
127 | why.errno != errno.EOPNOTSUPP): | ||
128 | raise | ||
129 | |||
130 | def copy(src, dst): | ||
131 | """Copy data and mode bits ("cp src dst"). | ||
132 | |||
133 | The destination may be a directory. | ||
134 | |||
135 | """ | ||
136 | if os.path.isdir(dst): | ||
137 | dst = os.path.join(dst, os.path.basename(src)) | ||
138 | copyfile(src, dst) | ||
139 | copymode(src, dst) | ||
140 | |||
141 | def copy2(src, dst): | ||
142 | """Copy data and all stat info ("cp -p src dst"). | ||
143 | |||
144 | The destination may be a directory. | ||
145 | |||
146 | """ | ||
147 | if os.path.isdir(dst): | ||
148 | dst = os.path.join(dst, os.path.basename(src)) | ||
149 | copyfile(src, dst) | ||
150 | copystat(src, dst) | ||
151 | |||
152 | def ignore_patterns(*patterns): | ||
153 | """Function that can be used as copytree() ignore parameter. | ||
154 | |||
155 | Patterns is a sequence of glob-style patterns | ||
156 | that are used to exclude files""" | ||
157 | def _ignore_patterns(path, names): | ||
158 | ignored_names = [] | ||
159 | for pattern in patterns: | ||
160 | ignored_names.extend(fnmatch.filter(names, pattern)) | ||
161 | return set(ignored_names) | ||
162 | return _ignore_patterns | ||
163 | |||
164 | def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, | ||
165 | ignore_dangling_symlinks=False): | ||
166 | """Recursively copy a directory tree. | ||
167 | |||
168 | The destination directory must not already exist. | ||
169 | If exception(s) occur, an Error is raised with a list of reasons. | ||
170 | |||
171 | If the optional symlinks flag is true, symbolic links in the | ||
172 | source tree result in symbolic links in the destination tree; if | ||
173 | it is false, the contents of the files pointed to by symbolic | ||
174 | links are copied. If the file pointed by the symlink doesn't | ||
175 | exist, an exception will be added in the list of errors raised in | ||
176 | an Error exception at the end of the copy process. | ||
177 | |||
178 | You can set the optional ignore_dangling_symlinks flag to true if you | ||
179 | want to silence this exception. Notice that this has no effect on | ||
180 | platforms that don't support os.symlink. | ||
181 | |||
182 | The optional ignore argument is a callable. If given, it | ||
183 | is called with the `src` parameter, which is the directory | ||
184 | being visited by copytree(), and `names` which is the list of | ||
185 | `src` contents, as returned by os.listdir(): | ||
186 | |||
187 | callable(src, names) -> ignored_names | ||
188 | |||
189 | Since copytree() is called recursively, the callable will be | ||
190 | called once for each directory that is copied. It returns a | ||
191 | list of names relative to the `src` directory that should | ||
192 | not be copied. | ||
193 | |||
194 | The optional copy_function argument is a callable that will be used | ||
195 | to copy each file. It will be called with the source path and the | ||
196 | destination path as arguments. By default, copy2() is used, but any | ||
197 | function that supports the same signature (like copy()) can be used. | ||
198 | |||
199 | """ | ||
200 | names = os.listdir(src) | ||
201 | if ignore is not None: | ||
202 | ignored_names = ignore(src, names) | ||
203 | else: | ||
204 | ignored_names = set() | ||
205 | |||
206 | os.makedirs(dst) | ||
207 | errors = [] | ||
208 | for name in names: | ||
209 | if name in ignored_names: | ||
210 | continue | ||
211 | srcname = os.path.join(src, name) | ||
212 | dstname = os.path.join(dst, name) | ||
213 | try: | ||
214 | if os.path.islink(srcname): | ||
215 | linkto = os.readlink(srcname) | ||
216 | if symlinks: | ||
217 | os.symlink(linkto, dstname) | ||
218 | else: | ||
219 | # ignore dangling symlink if the flag is on | ||
220 | if not os.path.exists(linkto) and ignore_dangling_symlinks: | ||
221 | continue | ||
222 | # otherwise let the copy occurs. copy2 will raise an error | ||
223 | copy_function(srcname, dstname) | ||
224 | elif os.path.isdir(srcname): | ||
225 | copytree(srcname, dstname, symlinks, ignore, copy_function) | ||
226 | else: | ||
227 | # Will raise a SpecialFileError for unsupported file types | ||
228 | copy_function(srcname, dstname) | ||
229 | # catch the Error from the recursive copytree so that we can | ||
230 | # continue with other files | ||
231 | except Error as err: | ||
232 | errors.extend(err.args[0]) | ||
233 | except EnvironmentError as why: | ||
234 | errors.append((srcname, dstname, str(why))) | ||
235 | try: | ||
236 | copystat(src, dst) | ||
237 | except OSError as why: | ||
238 | if WindowsError is not None and isinstance(why, WindowsError): | ||
239 | # Copying file access times may fail on Windows | ||
240 | pass | ||
241 | else: | ||
242 | errors.extend((src, dst, str(why))) | ||
243 | if errors: | ||
244 | raise Error(errors) | ||
245 | |||
246 | def rmtree(path, ignore_errors=False, onerror=None): | ||
247 | """Recursively delete a directory tree. | ||
248 | |||
249 | If ignore_errors is set, errors are ignored; otherwise, if onerror | ||
250 | is set, it is called to handle the error with arguments (func, | ||
251 | path, exc_info) where func is os.listdir, os.remove, or os.rmdir; | ||
252 | path is the argument to that function that caused it to fail; and | ||
253 | exc_info is a tuple returned by sys.exc_info(). If ignore_errors | ||
254 | is false and onerror is None, an exception is raised. | ||
255 | |||
256 | """ | ||
257 | if ignore_errors: | ||
258 | def onerror(*args): | ||
259 | pass | ||
260 | elif onerror is None: | ||
261 | def onerror(*args): | ||
262 | raise | ||
263 | try: | ||
264 | if os.path.islink(path): | ||
265 | # symlinks to directories are forbidden, see bug #1669 | ||
266 | raise OSError("Cannot call rmtree on a symbolic link") | ||
267 | except OSError: | ||
268 | onerror(os.path.islink, path, sys.exc_info()) | ||
269 | # can't continue even if onerror hook returns | ||
270 | return | ||
271 | names = [] | ||
272 | try: | ||
273 | names = os.listdir(path) | ||
274 | except os.error: | ||
275 | onerror(os.listdir, path, sys.exc_info()) | ||
276 | for name in names: | ||
277 | fullname = os.path.join(path, name) | ||
278 | try: | ||
279 | mode = os.lstat(fullname).st_mode | ||
280 | except os.error: | ||
281 | mode = 0 | ||
282 | if stat.S_ISDIR(mode): | ||
283 | rmtree(fullname, ignore_errors, onerror) | ||
284 | else: | ||
285 | try: | ||
286 | os.remove(fullname) | ||
287 | except os.error: | ||
288 | onerror(os.remove, fullname, sys.exc_info()) | ||
289 | try: | ||
290 | os.rmdir(path) | ||
291 | except os.error: | ||
292 | onerror(os.rmdir, path, sys.exc_info()) | ||
293 | |||
294 | |||
295 | def _basename(path): | ||
296 | # A basename() variant which first strips the trailing slash, if present. | ||
297 | # Thus we always get the last component of the path, even for directories. | ||
298 | return os.path.basename(path.rstrip(os.path.sep)) | ||
299 | |||
300 | def move(src, dst): | ||
301 | """Recursively move a file or directory to another location. This is | ||
302 | similar to the Unix "mv" command. | ||
303 | |||
304 | If the destination is a directory or a symlink to a directory, the source | ||
305 | is moved inside the directory. The destination path must not already | ||
306 | exist. | ||
307 | |||
308 | If the destination already exists but is not a directory, it may be | ||
309 | overwritten depending on os.rename() semantics. | ||
310 | |||
311 | If the destination is on our current filesystem, then rename() is used. | ||
312 | Otherwise, src is copied to the destination and then removed. | ||
313 | A lot more could be done here... A look at a mv.c shows a lot of | ||
314 | the issues this implementation glosses over. | ||
315 | |||
316 | """ | ||
317 | real_dst = dst | ||
318 | if os.path.isdir(dst): | ||
319 | if _samefile(src, dst): | ||
320 | # We might be on a case insensitive filesystem, | ||
321 | # perform the rename anyway. | ||
322 | os.rename(src, dst) | ||
323 | return | ||
324 | |||
325 | real_dst = os.path.join(dst, _basename(src)) | ||
326 | if os.path.exists(real_dst): | ||
327 | raise Error("Destination path '%s' already exists" % real_dst) | ||
328 | try: | ||
329 | os.rename(src, real_dst) | ||
330 | except OSError: | ||
331 | if os.path.isdir(src): | ||
332 | if _destinsrc(src, dst): | ||
333 | raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) | ||
334 | copytree(src, real_dst, symlinks=True) | ||
335 | rmtree(src) | ||
336 | else: | ||
337 | copy2(src, real_dst) | ||
338 | os.unlink(src) | ||
339 | |||
340 | def _destinsrc(src, dst): | ||
341 | src = abspath(src) | ||
342 | dst = abspath(dst) | ||
343 | if not src.endswith(os.path.sep): | ||
344 | src += os.path.sep | ||
345 | if not dst.endswith(os.path.sep): | ||
346 | dst += os.path.sep | ||
347 | return dst.startswith(src) | ||
348 | |||
349 | def _get_gid(name): | ||
350 | """Returns a gid, given a group name.""" | ||
351 | if getgrnam is None or name is None: | ||
352 | return None | ||
353 | try: | ||
354 | result = getgrnam(name) | ||
355 | except KeyError: | ||
356 | result = None | ||
357 | if result is not None: | ||
358 | return result[2] | ||
359 | return None | ||
360 | |||
361 | def _get_uid(name): | ||
362 | """Returns an uid, given a user name.""" | ||
363 | if getpwnam is None or name is None: | ||
364 | return None | ||
365 | try: | ||
366 | result = getpwnam(name) | ||
367 | except KeyError: | ||
368 | result = None | ||
369 | if result is not None: | ||
370 | return result[2] | ||
371 | return None | ||
372 | |||
373 | def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, | ||
374 | owner=None, group=None, logger=None): | ||
375 | """Create a (possibly compressed) tar file from all the files under | ||
376 | 'base_dir'. | ||
377 | |||
378 | 'compress' must be "gzip" (the default), "bzip2", or None. | ||
379 | |||
380 | 'owner' and 'group' can be used to define an owner and a group for the | ||
381 | archive that is being built. If not provided, the current owner and group | ||
382 | will be used. | ||
383 | |||
384 | The output tar file will be named 'base_name' + ".tar", possibly plus | ||
385 | the appropriate compression extension (".gz", or ".bz2"). | ||
386 | |||
387 | Returns the output filename. | ||
388 | """ | ||
389 | tar_compression = {'gzip': 'gz', None: ''} | ||
390 | compress_ext = {'gzip': '.gz'} | ||
391 | |||
392 | if _BZ2_SUPPORTED: | ||
393 | tar_compression['bzip2'] = 'bz2' | ||
394 | compress_ext['bzip2'] = '.bz2' | ||
395 | |||
396 | # flags for compression program, each element of list will be an argument | ||
397 | if compress is not None and compress not in compress_ext: | ||
398 | raise ValueError("bad value for 'compress', or compression format not " | ||
399 | "supported : {0}".format(compress)) | ||
400 | |||
401 | archive_name = base_name + '.tar' + compress_ext.get(compress, '') | ||
402 | archive_dir = os.path.dirname(archive_name) | ||
403 | |||
404 | if not os.path.exists(archive_dir): | ||
405 | if logger is not None: | ||
406 | logger.info("creating %s", archive_dir) | ||
407 | if not dry_run: | ||
408 | os.makedirs(archive_dir) | ||
409 | |||
410 | # creating the tarball | ||
411 | if logger is not None: | ||
412 | logger.info('Creating tar archive') | ||
413 | |||
414 | uid = _get_uid(owner) | ||
415 | gid = _get_gid(group) | ||
416 | |||
417 | def _set_uid_gid(tarinfo): | ||
418 | if gid is not None: | ||
419 | tarinfo.gid = gid | ||
420 | tarinfo.gname = group | ||
421 | if uid is not None: | ||
422 | tarinfo.uid = uid | ||
423 | tarinfo.uname = owner | ||
424 | return tarinfo | ||
425 | |||
426 | if not dry_run: | ||
427 | tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) | ||
428 | try: | ||
429 | tar.add(base_dir, filter=_set_uid_gid) | ||
430 | finally: | ||
431 | tar.close() | ||
432 | |||
433 | return archive_name | ||
434 | |||
435 | def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): | ||
436 | # XXX see if we want to keep an external call here | ||
437 | if verbose: | ||
438 | zipoptions = "-r" | ||
439 | else: | ||
440 | zipoptions = "-rq" | ||
441 | from distutils.errors import DistutilsExecError | ||
442 | from distutils.spawn import spawn | ||
443 | try: | ||
444 | spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) | ||
445 | except DistutilsExecError: | ||
446 | # XXX really should distinguish between "couldn't find | ||
447 | # external 'zip' command" and "zip failed". | ||
448 | raise ExecError("unable to create zip file '%s': " | ||
449 | "could neither import the 'zipfile' module nor " | ||
450 | "find a standalone zip utility") % zip_filename | ||
451 | |||
452 | def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): | ||
453 | """Create a zip file from all the files under 'base_dir'. | ||
454 | |||
455 | The output zip file will be named 'base_name' + ".zip". Uses either the | ||
456 | "zipfile" Python module (if available) or the InfoZIP "zip" utility | ||
457 | (if installed and found on the default search path). If neither tool is | ||
458 | available, raises ExecError. Returns the name of the output zip | ||
459 | file. | ||
460 | """ | ||
461 | zip_filename = base_name + ".zip" | ||
462 | archive_dir = os.path.dirname(base_name) | ||
463 | |||
464 | if not os.path.exists(archive_dir): | ||
465 | if logger is not None: | ||
466 | logger.info("creating %s", archive_dir) | ||
467 | if not dry_run: | ||
468 | os.makedirs(archive_dir) | ||
469 | |||
470 | # If zipfile module is not available, try spawning an external 'zip' | ||
471 | # command. | ||
472 | try: | ||
473 | import zipfile | ||
474 | except ImportError: | ||
475 | zipfile = None | ||
476 | |||
477 | if zipfile is None: | ||
478 | _call_external_zip(base_dir, zip_filename, verbose, dry_run) | ||
479 | else: | ||
480 | if logger is not None: | ||
481 | logger.info("creating '%s' and adding '%s' to it", | ||
482 | zip_filename, base_dir) | ||
483 | |||
484 | if not dry_run: | ||
485 | zip = zipfile.ZipFile(zip_filename, "w", | ||
486 | compression=zipfile.ZIP_DEFLATED) | ||
487 | |||
488 | for dirpath, dirnames, filenames in os.walk(base_dir): | ||
489 | for name in filenames: | ||
490 | path = os.path.normpath(os.path.join(dirpath, name)) | ||
491 | if os.path.isfile(path): | ||
492 | zip.write(path, path) | ||
493 | if logger is not None: | ||
494 | logger.info("adding '%s'", path) | ||
495 | zip.close() | ||
496 | |||
497 | return zip_filename | ||
498 | |||
499 | _ARCHIVE_FORMATS = { | ||
500 | 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), | ||
501 | 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), | ||
502 | 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), | ||
503 | 'zip': (_make_zipfile, [], "ZIP file"), | ||
504 | } | ||
505 | |||
506 | if _BZ2_SUPPORTED: | ||
507 | _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], | ||
508 | "bzip2'ed tar-file") | ||
509 | |||
510 | def get_archive_formats(): | ||
511 | """Returns a list of supported formats for archiving and unarchiving. | ||
512 | |||
513 | Each element of the returned sequence is a tuple (name, description) | ||
514 | """ | ||
515 | formats = [(name, registry[2]) for name, registry in | ||
516 | _ARCHIVE_FORMATS.items()] | ||
517 | formats.sort() | ||
518 | return formats | ||
519 | |||
520 | def register_archive_format(name, function, extra_args=None, description=''): | ||
521 | """Registers an archive format. | ||
522 | |||
523 | name is the name of the format. function is the callable that will be | ||
524 | used to create archives. If provided, extra_args is a sequence of | ||
525 | (name, value) tuples that will be passed as arguments to the callable. | ||
526 | description can be provided to describe the format, and will be returned | ||
527 | by the get_archive_formats() function. | ||
528 | """ | ||
529 | if extra_args is None: | ||
530 | extra_args = [] | ||
531 | if not isinstance(function, collections.Callable): | ||
532 | raise TypeError('The %s object is not callable' % function) | ||
533 | if not isinstance(extra_args, (tuple, list)): | ||
534 | raise TypeError('extra_args needs to be a sequence') | ||
535 | for element in extra_args: | ||
536 | if not isinstance(element, (tuple, list)) or len(element) !=2: | ||
537 | raise TypeError('extra_args elements are : (arg_name, value)') | ||
538 | |||
539 | _ARCHIVE_FORMATS[name] = (function, extra_args, description) | ||
540 | |||
541 | def unregister_archive_format(name): | ||
542 | del _ARCHIVE_FORMATS[name] | ||
543 | |||
544 | def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, | ||
545 | dry_run=0, owner=None, group=None, logger=None): | ||
546 | """Create an archive file (eg. zip or tar). | ||
547 | |||
548 | 'base_name' is the name of the file to create, minus any format-specific | ||
549 | extension; 'format' is the archive format: one of "zip", "tar", "bztar" | ||
550 | or "gztar". | ||
551 | |||
552 | 'root_dir' is a directory that will be the root directory of the | ||
553 | archive; ie. we typically chdir into 'root_dir' before creating the | ||
554 | archive. 'base_dir' is the directory where we start archiving from; | ||
555 | ie. 'base_dir' will be the common prefix of all files and | ||
556 | directories in the archive. 'root_dir' and 'base_dir' both default | ||
557 | to the current directory. Returns the name of the archive file. | ||
558 | |||
559 | 'owner' and 'group' are used when creating a tar archive. By default, | ||
560 | uses the current owner and group. | ||
561 | """ | ||
562 | save_cwd = os.getcwd() | ||
563 | if root_dir is not None: | ||
564 | if logger is not None: | ||
565 | logger.debug("changing into '%s'", root_dir) | ||
566 | base_name = os.path.abspath(base_name) | ||
567 | if not dry_run: | ||
568 | os.chdir(root_dir) | ||
569 | |||
570 | if base_dir is None: | ||
571 | base_dir = os.curdir | ||
572 | |||
573 | kwargs = {'dry_run': dry_run, 'logger': logger} | ||
574 | |||
575 | try: | ||
576 | format_info = _ARCHIVE_FORMATS[format] | ||
577 | except KeyError: | ||
578 | raise ValueError("unknown archive format '%s'" % format) | ||
579 | |||
580 | func = format_info[0] | ||
581 | for arg, val in format_info[1]: | ||
582 | kwargs[arg] = val | ||
583 | |||
584 | if format != 'zip': | ||
585 | kwargs['owner'] = owner | ||
586 | kwargs['group'] = group | ||
587 | |||
588 | try: | ||
589 | filename = func(base_name, base_dir, **kwargs) | ||
590 | finally: | ||
591 | if root_dir is not None: | ||
592 | if logger is not None: | ||
593 | logger.debug("changing back to '%s'", save_cwd) | ||
594 | os.chdir(save_cwd) | ||
595 | |||
596 | return filename | ||
597 | |||
598 | |||
599 | def get_unpack_formats(): | ||
600 | """Returns a list of supported formats for unpacking. | ||
601 | |||
602 | Each element of the returned sequence is a tuple | ||
603 | (name, extensions, description) | ||
604 | """ | ||
605 | formats = [(name, info[0], info[3]) for name, info in | ||
606 | _UNPACK_FORMATS.items()] | ||
607 | formats.sort() | ||
608 | return formats | ||
609 | |||
610 | def _check_unpack_options(extensions, function, extra_args): | ||
611 | """Checks what gets registered as an unpacker.""" | ||
612 | # first make sure no other unpacker is registered for this extension | ||
613 | existing_extensions = {} | ||
614 | for name, info in _UNPACK_FORMATS.items(): | ||
615 | for ext in info[0]: | ||
616 | existing_extensions[ext] = name | ||
617 | |||
618 | for extension in extensions: | ||
619 | if extension in existing_extensions: | ||
620 | msg = '%s is already registered for "%s"' | ||
621 | raise RegistryError(msg % (extension, | ||
622 | existing_extensions[extension])) | ||
623 | |||
624 | if not isinstance(function, collections.Callable): | ||
625 | raise TypeError('The registered function must be a callable') | ||
626 | |||
627 | |||
628 | def register_unpack_format(name, extensions, function, extra_args=None, | ||
629 | description=''): | ||
630 | """Registers an unpack format. | ||
631 | |||
632 | `name` is the name of the format. `extensions` is a list of extensions | ||
633 | corresponding to the format. | ||
634 | |||
635 | `function` is the callable that will be | ||
636 | used to unpack archives. The callable will receive archives to unpack. | ||
637 | If it's unable to handle an archive, it needs to raise a ReadError | ||
638 | exception. | ||
639 | |||
640 | If provided, `extra_args` is a sequence of | ||
641 | (name, value) tuples that will be passed as arguments to the callable. | ||
642 | description can be provided to describe the format, and will be returned | ||
643 | by the get_unpack_formats() function. | ||
644 | """ | ||
645 | if extra_args is None: | ||
646 | extra_args = [] | ||
647 | _check_unpack_options(extensions, function, extra_args) | ||
648 | _UNPACK_FORMATS[name] = extensions, function, extra_args, description | ||
649 | |||
650 | def unregister_unpack_format(name): | ||
651 | """Removes the pack format from the registry.""" | ||
652 | del _UNPACK_FORMATS[name] | ||
653 | |||
654 | def _ensure_directory(path): | ||
655 | """Ensure that the parent directory of `path` exists""" | ||
656 | dirname = os.path.dirname(path) | ||
657 | if not os.path.isdir(dirname): | ||
658 | os.makedirs(dirname) | ||
659 | |||
660 | def _unpack_zipfile(filename, extract_dir): | ||
661 | """Unpack zip `filename` to `extract_dir` | ||
662 | """ | ||
663 | try: | ||
664 | import zipfile | ||
665 | except ImportError: | ||
666 | raise ReadError('zlib not supported, cannot unpack this archive.') | ||
667 | |||
668 | if not zipfile.is_zipfile(filename): | ||
669 | raise ReadError("%s is not a zip file" % filename) | ||
670 | |||
671 | zip = zipfile.ZipFile(filename) | ||
672 | try: | ||
673 | for info in zip.infolist(): | ||
674 | name = info.filename | ||
675 | |||
676 | # don't extract absolute paths or ones with .. in them | ||
677 | if name.startswith('/') or '..' in name: | ||
678 | continue | ||
679 | |||
680 | target = os.path.join(extract_dir, *name.split('/')) | ||
681 | if not target: | ||
682 | continue | ||
683 | |||
684 | _ensure_directory(target) | ||
685 | if not name.endswith('/'): | ||
686 | # file | ||
687 | data = zip.read(info.filename) | ||
688 | f = open(target, 'wb') | ||
689 | try: | ||
690 | f.write(data) | ||
691 | finally: | ||
692 | f.close() | ||
693 | del data | ||
694 | finally: | ||
695 | zip.close() | ||
696 | |||
697 | def _unpack_tarfile(filename, extract_dir): | ||
698 | """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` | ||
699 | """ | ||
700 | try: | ||
701 | tarobj = tarfile.open(filename) | ||
702 | except tarfile.TarError: | ||
703 | raise ReadError( | ||
704 | "%s is not a compressed or uncompressed tar file" % filename) | ||
705 | try: | ||
706 | tarobj.extractall(extract_dir) | ||
707 | finally: | ||
708 | tarobj.close() | ||
709 | |||
710 | _UNPACK_FORMATS = { | ||
711 | 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), | ||
712 | 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), | ||
713 | 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") | ||
714 | } | ||
715 | |||
716 | if _BZ2_SUPPORTED: | ||
717 | _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], | ||
718 | "bzip2'ed tar-file") | ||
719 | |||
720 | def _find_unpack_format(filename): | ||
721 | for name, info in _UNPACK_FORMATS.items(): | ||
722 | for extension in info[0]: | ||
723 | if filename.endswith(extension): | ||
724 | return name | ||
725 | return None | ||
726 | |||
727 | def unpack_archive(filename, extract_dir=None, format=None): | ||
728 | """Unpack an archive. | ||
729 | |||
730 | `filename` is the name of the archive. | ||
731 | |||
732 | `extract_dir` is the name of the target directory, where the archive | ||
733 | is unpacked. If not provided, the current working directory is used. | ||
734 | |||
735 | `format` is the archive format: one of "zip", "tar", or "gztar". Or any | ||
736 | other registered format. If not provided, unpack_archive will use the | ||
737 | filename extension and see if an unpacker was registered for that | ||
738 | extension. | ||
739 | |||
740 | In case none is found, a ValueError is raised. | ||
741 | """ | ||
742 | if extract_dir is None: | ||
743 | extract_dir = os.getcwd() | ||
744 | |||
745 | if format is not None: | ||
746 | try: | ||
747 | format_info = _UNPACK_FORMATS[format] | ||
748 | except KeyError: | ||
749 | raise ValueError("Unknown unpack format '{0}'".format(format)) | ||
750 | |||
751 | func = format_info[1] | ||
752 | func(filename, extract_dir, **dict(format_info[2])) | ||
753 | else: | ||
754 | # we need to look at the registered unpackers supported extensions | ||
755 | format = _find_unpack_format(filename) | ||
756 | if format is None: | ||
757 | raise ReadError("Unknown archive format '{0}'".format(filename)) | ||
758 | |||
759 | func = _UNPACK_FORMATS[format][1] | ||
760 | kwargs = dict(_UNPACK_FORMATS[format][2]) | ||
761 | func(filename, extract_dir, **kwargs) | ||