summaryrefslogtreecommitdiff
path: root/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py
diff options
context:
space:
mode:
authorShubham Saini <shubham6405@gmail.com>2018-12-11 10:01:23 +0000
committerShubham Saini <shubham6405@gmail.com>2018-12-11 10:01:23 +0000
commit68df54d6629ec019142eb149dd037774f2d11e7c (patch)
tree345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py')
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py3125
1 files changed, 3125 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 0000000..6e1fb52
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3125 @@
1# coding: utf-8
2"""
3Package resource API
4--------------------
5
6A resource is a logical file contained within a package, or a logical
7subdirectory thereof. The package resource API expects resource names
8to have their path parts separated with ``/``, *not* whatever the local
9path separator is. Do not use os.path operations to manipulate resource
10names being passed into the API.
11
12The package resource API is designed to work with normal filesystem packages,
13.egg files, and unpacked .egg files. It can also work in a limited way with
14.zip files and with custom PEP 302 loaders that support the ``get_data()``
15method.
16"""
17
18from __future__ import absolute_import
19
20import sys
21import os
22import io
23import time
24import re
25import types
26import zipfile
27import zipimport
28import warnings
29import stat
30import functools
31import pkgutil
32import operator
33import platform
34import collections
35import plistlib
36import email.parser
37import errno
38import tempfile
39import textwrap
40import itertools
41import inspect
42from pkgutil import get_importer
43
44try:
45 import _imp
46except ImportError:
47 # Python 3.2 compatibility
48 import imp as _imp
49
50from pip._vendor import six
51from pip._vendor.six.moves import urllib, map, filter
52
53# capture these to bypass sandboxing
54from os import utime
55try:
56 from os import mkdir, rename, unlink
57 WRITE_SUPPORT = True
58except ImportError:
59 # no write support, probably under GAE
60 WRITE_SUPPORT = False
61
62from os import open as os_open
63from os.path import isdir, split
64
65try:
66 import importlib.machinery as importlib_machinery
67 # access attribute to force import under delayed import mechanisms.
68 importlib_machinery.__name__
69except ImportError:
70 importlib_machinery = None
71
72from . import py31compat
73from pip._vendor import appdirs
74from pip._vendor import packaging
75__import__('pip._vendor.packaging.version')
76__import__('pip._vendor.packaging.specifiers')
77__import__('pip._vendor.packaging.requirements')
78__import__('pip._vendor.packaging.markers')
79
80
81if (3, 0) < sys.version_info < (3, 3):
82 raise RuntimeError("Python 3.3 or later is required")
83
84if six.PY2:
85 # Those builtin exceptions are only defined in Python 3
86 PermissionError = None
87 NotADirectoryError = None
88
89# declare some globals that will be defined later to
90# satisfy the linters.
91require = None
92working_set = None
93add_activation_listener = None
94resources_stream = None
95cleanup_resources = None
96resource_dir = None
97resource_stream = None
98set_extraction_path = None
99resource_isdir = None
100resource_string = None
101iter_entry_points = None
102resource_listdir = None
103resource_filename = None
104resource_exists = None
105_distribution_finders = None
106_namespace_handlers = None
107_namespace_packages = None
108
109
110class PEP440Warning(RuntimeWarning):
111 """
112 Used when there is an issue with a version or specifier not complying with
113 PEP 440.
114 """
115
116
117def parse_version(v):
118 try:
119 return packaging.version.Version(v)
120 except packaging.version.InvalidVersion:
121 return packaging.version.LegacyVersion(v)
122
123
124_state_vars = {}
125
126
127def _declare_state(vartype, **kw):
128 globals().update(kw)
129 _state_vars.update(dict.fromkeys(kw, vartype))
130
131
132def __getstate__():
133 state = {}
134 g = globals()
135 for k, v in _state_vars.items():
136 state[k] = g['_sget_' + v](g[k])
137 return state
138
139
140def __setstate__(state):
141 g = globals()
142 for k, v in state.items():
143 g['_sset_' + _state_vars[k]](k, g[k], v)
144 return state
145
146
147def _sget_dict(val):
148 return val.copy()
149
150
151def _sset_dict(key, ob, state):
152 ob.clear()
153 ob.update(state)
154
155
156def _sget_object(val):
157 return val.__getstate__()
158
159
160def _sset_object(key, ob, state):
161 ob.__setstate__(state)
162
163
164_sget_none = _sset_none = lambda *args: None
165
166
167def get_supported_platform():
168 """Return this platform's maximum compatible version.
169
170 distutils.util.get_platform() normally reports the minimum version
171 of Mac OS X that would be required to *use* extensions produced by
172 distutils. But what we want when checking compatibility is to know the
173 version of Mac OS X that we are *running*. To allow usage of packages that
174 explicitly require a newer version of Mac OS X, we must also know the
175 current version of the OS.
176
177 If this condition occurs for any other platform with a version in its
178 platform strings, this function should be extended accordingly.
179 """
180 plat = get_build_platform()
181 m = macosVersionString.match(plat)
182 if m is not None and sys.platform == "darwin":
183 try:
184 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
185 except ValueError:
186 # not Mac OS X
187 pass
188 return plat
189
190
191__all__ = [
192 # Basic resource access and distribution/entry point discovery
193 'require', 'run_script', 'get_provider', 'get_distribution',
194 'load_entry_point', 'get_entry_map', 'get_entry_info',
195 'iter_entry_points',
196 'resource_string', 'resource_stream', 'resource_filename',
197 'resource_listdir', 'resource_exists', 'resource_isdir',
198
199 # Environmental control
200 'declare_namespace', 'working_set', 'add_activation_listener',
201 'find_distributions', 'set_extraction_path', 'cleanup_resources',
202 'get_default_cache',
203
204 # Primary implementation classes
205 'Environment', 'WorkingSet', 'ResourceManager',
206 'Distribution', 'Requirement', 'EntryPoint',
207
208 # Exceptions
209 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
210 'UnknownExtra', 'ExtractionError',
211
212 # Warnings
213 'PEP440Warning',
214
215 # Parsing functions and string utilities
216 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
217 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
218 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
219
220 # filesystem utilities
221 'ensure_directory', 'normalize_path',
222
223 # Distribution "precedence" constants
224 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
225
226 # "Provider" interfaces, implementations, and registration/lookup APIs
227 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
228 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
229 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
230 'register_finder', 'register_namespace_handler', 'register_loader_type',
231 'fixup_namespace_packages', 'get_importer',
232
233 # Deprecated/backward compatibility only
234 'run_main', 'AvailableDistributions',
235]
236
237
238class ResolutionError(Exception):
239 """Abstract base for dependency resolution errors"""
240
241 def __repr__(self):
242 return self.__class__.__name__ + repr(self.args)
243
244
245class VersionConflict(ResolutionError):
246 """
247 An already-installed version conflicts with the requested version.
248
249 Should be initialized with the installed Distribution and the requested
250 Requirement.
251 """
252
253 _template = "{self.dist} is installed but {self.req} is required"
254
255 @property
256 def dist(self):
257 return self.args[0]
258
259 @property
260 def req(self):
261 return self.args[1]
262
263 def report(self):
264 return self._template.format(**locals())
265
266 def with_context(self, required_by):
267 """
268 If required_by is non-empty, return a version of self that is a
269 ContextualVersionConflict.
270 """
271 if not required_by:
272 return self
273 args = self.args + (required_by,)
274 return ContextualVersionConflict(*args)
275
276
277class ContextualVersionConflict(VersionConflict):
278 """
279 A VersionConflict that accepts a third parameter, the set of the
280 requirements that required the installed Distribution.
281 """
282
283 _template = VersionConflict._template + ' by {self.required_by}'
284
285 @property
286 def required_by(self):
287 return self.args[2]
288
289
290class DistributionNotFound(ResolutionError):
291 """A requested distribution was not found"""
292
293 _template = ("The '{self.req}' distribution was not found "
294 "and is required by {self.requirers_str}")
295
296 @property
297 def req(self):
298 return self.args[0]
299
300 @property
301 def requirers(self):
302 return self.args[1]
303
304 @property
305 def requirers_str(self):
306 if not self.requirers:
307 return 'the application'
308 return ', '.join(self.requirers)
309
310 def report(self):
311 return self._template.format(**locals())
312
313 def __str__(self):
314 return self.report()
315
316
317class UnknownExtra(ResolutionError):
318 """Distribution doesn't have an "extra feature" of the given name"""
319
320
321_provider_factories = {}
322
323PY_MAJOR = sys.version[:3]
324EGG_DIST = 3
325BINARY_DIST = 2
326SOURCE_DIST = 1
327CHECKOUT_DIST = 0
328DEVELOP_DIST = -1
329
330
331def register_loader_type(loader_type, provider_factory):
332 """Register `provider_factory` to make providers for `loader_type`
333
334 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
335 and `provider_factory` is a function that, passed a *module* object,
336 returns an ``IResourceProvider`` for that module.
337 """
338 _provider_factories[loader_type] = provider_factory
339
340
341def get_provider(moduleOrReq):
342 """Return an IResourceProvider for the named module or requirement"""
343 if isinstance(moduleOrReq, Requirement):
344 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
345 try:
346 module = sys.modules[moduleOrReq]
347 except KeyError:
348 __import__(moduleOrReq)
349 module = sys.modules[moduleOrReq]
350 loader = getattr(module, '__loader__', None)
351 return _find_adapter(_provider_factories, loader)(module)
352
353
354def _macosx_vers(_cache=[]):
355 if not _cache:
356 version = platform.mac_ver()[0]
357 # fallback for MacPorts
358 if version == '':
359 plist = '/System/Library/CoreServices/SystemVersion.plist'
360 if os.path.exists(plist):
361 if hasattr(plistlib, 'readPlist'):
362 plist_content = plistlib.readPlist(plist)
363 if 'ProductVersion' in plist_content:
364 version = plist_content['ProductVersion']
365
366 _cache.append(version.split('.'))
367 return _cache[0]
368
369
370def _macosx_arch(machine):
371 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
372
373
374def get_build_platform():
375 """Return this platform's string for platform-specific distributions
376
377 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
378 needs some hacks for Linux and Mac OS X.
379 """
380 try:
381 # Python 2.7 or >=3.2
382 from sysconfig import get_platform
383 except ImportError:
384 from distutils.util import get_platform
385
386 plat = get_platform()
387 if sys.platform == "darwin" and not plat.startswith('macosx-'):
388 try:
389 version = _macosx_vers()
390 machine = os.uname()[4].replace(" ", "_")
391 return "macosx-%d.%d-%s" % (
392 int(version[0]), int(version[1]),
393 _macosx_arch(machine),
394 )
395 except ValueError:
396 # if someone is running a non-Mac darwin system, this will fall
397 # through to the default implementation
398 pass
399 return plat
400
401
402macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
403darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
404# XXX backward compat
405get_platform = get_build_platform
406
407
408def compatible_platforms(provided, required):
409 """Can code for the `provided` platform run on the `required` platform?
410
411 Returns true if either platform is ``None``, or the platforms are equal.
412
413 XXX Needs compatibility checks for Linux and other unixy OSes.
414 """
415 if provided is None or required is None or provided == required:
416 # easy case
417 return True
418
419 # Mac OS X special cases
420 reqMac = macosVersionString.match(required)
421 if reqMac:
422 provMac = macosVersionString.match(provided)
423
424 # is this a Mac package?
425 if not provMac:
426 # this is backwards compatibility for packages built before
427 # setuptools 0.6. All packages built after this point will
428 # use the new macosx designation.
429 provDarwin = darwinVersionString.match(provided)
430 if provDarwin:
431 dversion = int(provDarwin.group(1))
432 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
433 if dversion == 7 and macosversion >= "10.3" or \
434 dversion == 8 and macosversion >= "10.4":
435 return True
436 # egg isn't macosx or legacy darwin
437 return False
438
439 # are they the same major version and machine type?
440 if provMac.group(1) != reqMac.group(1) or \
441 provMac.group(3) != reqMac.group(3):
442 return False
443
444 # is the required OS major update >= the provided one?
445 if int(provMac.group(2)) > int(reqMac.group(2)):
446 return False
447
448 return True
449
450 # XXX Linux and other platforms' special cases should go here
451 return False
452
453
454def run_script(dist_spec, script_name):
455 """Locate distribution `dist_spec` and run its `script_name` script"""
456 ns = sys._getframe(1).f_globals
457 name = ns['__name__']
458 ns.clear()
459 ns['__name__'] = name
460 require(dist_spec)[0].run_script(script_name, ns)
461
462
463# backward compatibility
464run_main = run_script
465
466
467def get_distribution(dist):
468 """Return a current distribution object for a Requirement or string"""
469 if isinstance(dist, six.string_types):
470 dist = Requirement.parse(dist)
471 if isinstance(dist, Requirement):
472 dist = get_provider(dist)
473 if not isinstance(dist, Distribution):
474 raise TypeError("Expected string, Requirement, or Distribution", dist)
475 return dist
476
477
478def load_entry_point(dist, group, name):
479 """Return `name` entry point of `group` for `dist` or raise ImportError"""
480 return get_distribution(dist).load_entry_point(group, name)
481
482
483def get_entry_map(dist, group=None):
484 """Return the entry point map for `group`, or the full entry map"""
485 return get_distribution(dist).get_entry_map(group)
486
487
488def get_entry_info(dist, group, name):
489 """Return the EntryPoint object for `group`+`name`, or ``None``"""
490 return get_distribution(dist).get_entry_info(group, name)
491
492
493class IMetadataProvider:
494 def has_metadata(name):
495 """Does the package's distribution contain the named metadata?"""
496
497 def get_metadata(name):
498 """The named metadata resource as a string"""
499
500 def get_metadata_lines(name):
501 """Yield named metadata resource as list of non-blank non-comment lines
502
503 Leading and trailing whitespace is stripped from each line, and lines
504 with ``#`` as the first non-blank character are omitted."""
505
506 def metadata_isdir(name):
507 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
508
509 def metadata_listdir(name):
510 """List of metadata names in the directory (like ``os.listdir()``)"""
511
512 def run_script(script_name, namespace):
513 """Execute the named script in the supplied namespace dictionary"""
514
515
516class IResourceProvider(IMetadataProvider):
517 """An object that provides access to package resources"""
518
519 def get_resource_filename(manager, resource_name):
520 """Return a true filesystem path for `resource_name`
521
522 `manager` must be an ``IResourceManager``"""
523
524 def get_resource_stream(manager, resource_name):
525 """Return a readable file-like object for `resource_name`
526
527 `manager` must be an ``IResourceManager``"""
528
529 def get_resource_string(manager, resource_name):
530 """Return a string containing the contents of `resource_name`
531
532 `manager` must be an ``IResourceManager``"""
533
534 def has_resource(resource_name):
535 """Does the package contain the named resource?"""
536
537 def resource_isdir(resource_name):
538 """Is the named resource a directory? (like ``os.path.isdir()``)"""
539
540 def resource_listdir(resource_name):
541 """List of resource names in the directory (like ``os.listdir()``)"""
542
543
544class WorkingSet(object):
545 """A collection of active distributions on sys.path (or a similar list)"""
546
547 def __init__(self, entries=None):
548 """Create working set from list of path entries (default=sys.path)"""
549 self.entries = []
550 self.entry_keys = {}
551 self.by_key = {}
552 self.callbacks = []
553
554 if entries is None:
555 entries = sys.path
556
557 for entry in entries:
558 self.add_entry(entry)
559
560 @classmethod
561 def _build_master(cls):
562 """
563 Prepare the master working set.
564 """
565 ws = cls()
566 try:
567 from __main__ import __requires__
568 except ImportError:
569 # The main program does not list any requirements
570 return ws
571
572 # ensure the requirements are met
573 try:
574 ws.require(__requires__)
575 except VersionConflict:
576 return cls._build_from_requirements(__requires__)
577
578 return ws
579
580 @classmethod
581 def _build_from_requirements(cls, req_spec):
582 """
583 Build a working set from a requirement spec. Rewrites sys.path.
584 """
585 # try it without defaults already on sys.path
586 # by starting with an empty path
587 ws = cls([])
588 reqs = parse_requirements(req_spec)
589 dists = ws.resolve(reqs, Environment())
590 for dist in dists:
591 ws.add(dist)
592
593 # add any missing entries from sys.path
594 for entry in sys.path:
595 if entry not in ws.entries:
596 ws.add_entry(entry)
597
598 # then copy back to sys.path
599 sys.path[:] = ws.entries
600 return ws
601
602 def add_entry(self, entry):
603 """Add a path item to ``.entries``, finding any distributions on it
604
605 ``find_distributions(entry, True)`` is used to find distributions
606 corresponding to the path entry, and they are added. `entry` is
607 always appended to ``.entries``, even if it is already present.
608 (This is because ``sys.path`` can contain the same value more than
609 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
610 equal ``sys.path``.)
611 """
612 self.entry_keys.setdefault(entry, [])
613 self.entries.append(entry)
614 for dist in find_distributions(entry, True):
615 self.add(dist, entry, False)
616
617 def __contains__(self, dist):
618 """True if `dist` is the active distribution for its project"""
619 return self.by_key.get(dist.key) == dist
620
621 def find(self, req):
622 """Find a distribution matching requirement `req`
623
624 If there is an active distribution for the requested project, this
625 returns it as long as it meets the version requirement specified by
626 `req`. But, if there is an active distribution for the project and it
627 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
628 If there is no active distribution for the requested project, ``None``
629 is returned.
630 """
631 dist = self.by_key.get(req.key)
632 if dist is not None and dist not in req:
633 # XXX add more info
634 raise VersionConflict(dist, req)
635 return dist
636
637 def iter_entry_points(self, group, name=None):
638 """Yield entry point objects from `group` matching `name`
639
640 If `name` is None, yields all entry points in `group` from all
641 distributions in the working set, otherwise only ones matching
642 both `group` and `name` are yielded (in distribution order).
643 """
644 for dist in self:
645 entries = dist.get_entry_map(group)
646 if name is None:
647 for ep in entries.values():
648 yield ep
649 elif name in entries:
650 yield entries[name]
651
652 def run_script(self, requires, script_name):
653 """Locate distribution for `requires` and run `script_name` script"""
654 ns = sys._getframe(1).f_globals
655 name = ns['__name__']
656 ns.clear()
657 ns['__name__'] = name
658 self.require(requires)[0].run_script(script_name, ns)
659
660 def __iter__(self):
661 """Yield distributions for non-duplicate projects in the working set
662
663 The yield order is the order in which the items' path entries were
664 added to the working set.
665 """
666 seen = {}
667 for item in self.entries:
668 if item not in self.entry_keys:
669 # workaround a cache issue
670 continue
671
672 for key in self.entry_keys[item]:
673 if key not in seen:
674 seen[key] = 1
675 yield self.by_key[key]
676
677 def add(self, dist, entry=None, insert=True, replace=False):
678 """Add `dist` to working set, associated with `entry`
679
680 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
681 On exit from this routine, `entry` is added to the end of the working
682 set's ``.entries`` (if it wasn't already present).
683
684 `dist` is only added to the working set if it's for a project that
685 doesn't already have a distribution in the set, unless `replace=True`.
686 If it's added, any callbacks registered with the ``subscribe()`` method
687 will be called.
688 """
689 if insert:
690 dist.insert_on(self.entries, entry, replace=replace)
691
692 if entry is None:
693 entry = dist.location
694 keys = self.entry_keys.setdefault(entry, [])
695 keys2 = self.entry_keys.setdefault(dist.location, [])
696 if not replace and dist.key in self.by_key:
697 # ignore hidden distros
698 return
699
700 self.by_key[dist.key] = dist
701 if dist.key not in keys:
702 keys.append(dist.key)
703 if dist.key not in keys2:
704 keys2.append(dist.key)
705 self._added_new(dist)
706
707 def resolve(self, requirements, env=None, installer=None,
708 replace_conflicting=False, extras=None):
709 """List all distributions needed to (recursively) meet `requirements`
710
711 `requirements` must be a sequence of ``Requirement`` objects. `env`,
712 if supplied, should be an ``Environment`` instance. If
713 not supplied, it defaults to all distributions available within any
714 entry or distribution in the working set. `installer`, if supplied,
715 will be invoked with each requirement that cannot be met by an
716 already-installed distribution; it should return a ``Distribution`` or
717 ``None``.
718
719 Unless `replace_conflicting=True`, raises a VersionConflict exception
720 if
721 any requirements are found on the path that have the correct name but
722 the wrong version. Otherwise, if an `installer` is supplied it will be
723 invoked to obtain the correct version of the requirement and activate
724 it.
725
726 `extras` is a list of the extras to be used with these requirements.
727 This is important because extra requirements may look like `my_req;
728 extra = "my_extra"`, which would otherwise be interpreted as a purely
729 optional requirement. Instead, we want to be able to assert that these
730 requirements are truly required.
731 """
732
733 # set up the stack
734 requirements = list(requirements)[::-1]
735 # set of processed requirements
736 processed = {}
737 # key -> dist
738 best = {}
739 to_activate = []
740
741 req_extras = _ReqExtras()
742
743 # Mapping of requirement to set of distributions that required it;
744 # useful for reporting info about conflicts.
745 required_by = collections.defaultdict(set)
746
747 while requirements:
748 # process dependencies breadth-first
749 req = requirements.pop(0)
750 if req in processed:
751 # Ignore cyclic or redundant dependencies
752 continue
753
754 if not req_extras.markers_pass(req, extras):
755 continue
756
757 dist = best.get(req.key)
758 if dist is None:
759 # Find the best distribution and add it to the map
760 dist = self.by_key.get(req.key)
761 if dist is None or (dist not in req and replace_conflicting):
762 ws = self
763 if env is None:
764 if dist is None:
765 env = Environment(self.entries)
766 else:
767 # Use an empty environment and workingset to avoid
768 # any further conflicts with the conflicting
769 # distribution
770 env = Environment([])
771 ws = WorkingSet([])
772 dist = best[req.key] = env.best_match(
773 req, ws, installer,
774 replace_conflicting=replace_conflicting
775 )
776 if dist is None:
777 requirers = required_by.get(req, None)
778 raise DistributionNotFound(req, requirers)
779 to_activate.append(dist)
780 if dist not in req:
781 # Oops, the "best" so far conflicts with a dependency
782 dependent_req = required_by[req]
783 raise VersionConflict(dist, req).with_context(dependent_req)
784
785 # push the new requirements onto the stack
786 new_requirements = dist.requires(req.extras)[::-1]
787 requirements.extend(new_requirements)
788
789 # Register the new requirements needed by req
790 for new_requirement in new_requirements:
791 required_by[new_requirement].add(req.project_name)
792 req_extras[new_requirement] = req.extras
793
794 processed[req] = True
795
796 # return list of distros to activate
797 return to_activate
798
799 def find_plugins(
800 self, plugin_env, full_env=None, installer=None, fallback=True):
801 """Find all activatable distributions in `plugin_env`
802
803 Example usage::
804
805 distributions, errors = working_set.find_plugins(
806 Environment(plugin_dirlist)
807 )
808 # add plugins+libs to sys.path
809 map(working_set.add, distributions)
810 # display errors
811 print('Could not load', errors)
812
813 The `plugin_env` should be an ``Environment`` instance that contains
814 only distributions that are in the project's "plugin directory" or
815 directories. The `full_env`, if supplied, should be an ``Environment``
816 contains all currently-available distributions. If `full_env` is not
817 supplied, one is created automatically from the ``WorkingSet`` this
818 method is called on, which will typically mean that every directory on
819 ``sys.path`` will be scanned for distributions.
820
821 `installer` is a standard installer callback as used by the
822 ``resolve()`` method. The `fallback` flag indicates whether we should
823 attempt to resolve older versions of a plugin if the newest version
824 cannot be resolved.
825
826 This method returns a 2-tuple: (`distributions`, `error_info`), where
827 `distributions` is a list of the distributions found in `plugin_env`
828 that were loadable, along with any other distributions that are needed
829 to resolve their dependencies. `error_info` is a dictionary mapping
830 unloadable plugin distributions to an exception instance describing the
831 error that occurred. Usually this will be a ``DistributionNotFound`` or
832 ``VersionConflict`` instance.
833 """
834
835 plugin_projects = list(plugin_env)
836 # scan project names in alphabetic order
837 plugin_projects.sort()
838
839 error_info = {}
840 distributions = {}
841
842 if full_env is None:
843 env = Environment(self.entries)
844 env += plugin_env
845 else:
846 env = full_env + plugin_env
847
848 shadow_set = self.__class__([])
849 # put all our entries in shadow_set
850 list(map(shadow_set.add, self))
851
852 for project_name in plugin_projects:
853
854 for dist in plugin_env[project_name]:
855
856 req = [dist.as_requirement()]
857
858 try:
859 resolvees = shadow_set.resolve(req, env, installer)
860
861 except ResolutionError as v:
862 # save error info
863 error_info[dist] = v
864 if fallback:
865 # try the next older version of project
866 continue
867 else:
868 # give up on this project, keep going
869 break
870
871 else:
872 list(map(shadow_set.add, resolvees))
873 distributions.update(dict.fromkeys(resolvees))
874
875 # success, no need to try any more versions of this project
876 break
877
878 distributions = list(distributions)
879 distributions.sort()
880
881 return distributions, error_info
882
883 def require(self, *requirements):
884 """Ensure that distributions matching `requirements` are activated
885
886 `requirements` must be a string or a (possibly-nested) sequence
887 thereof, specifying the distributions and versions required. The
888 return value is a sequence of the distributions that needed to be
889 activated to fulfill the requirements; all relevant distributions are
890 included, even if they were already activated in this working set.
891 """
892 needed = self.resolve(parse_requirements(requirements))
893
894 for dist in needed:
895 self.add(dist)
896
897 return needed
898
899 def subscribe(self, callback, existing=True):
900 """Invoke `callback` for all distributions
901
902 If `existing=True` (default),
903 call on all existing ones, as well.
904 """
905 if callback in self.callbacks:
906 return
907 self.callbacks.append(callback)
908 if not existing:
909 return
910 for dist in self:
911 callback(dist)
912
913 def _added_new(self, dist):
914 for callback in self.callbacks:
915 callback(dist)
916
917 def __getstate__(self):
918 return (
919 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
920 self.callbacks[:]
921 )
922
923 def __setstate__(self, e_k_b_c):
924 entries, keys, by_key, callbacks = e_k_b_c
925 self.entries = entries[:]
926 self.entry_keys = keys.copy()
927 self.by_key = by_key.copy()
928 self.callbacks = callbacks[:]
929
930
931class _ReqExtras(dict):
932 """
933 Map each requirement to the extras that demanded it.
934 """
935
936 def markers_pass(self, req, extras=None):
937 """
938 Evaluate markers for req against each extra that
939 demanded it.
940
941 Return False if the req has a marker and fails
942 evaluation. Otherwise, return True.
943 """
944 extra_evals = (
945 req.marker.evaluate({'extra': extra})
946 for extra in self.get(req, ()) + (extras or (None,))
947 )
948 return not req.marker or any(extra_evals)
949
950
951class Environment(object):
952 """Searchable snapshot of distributions on a search path"""
953
954 def __init__(
955 self, search_path=None, platform=get_supported_platform(),
956 python=PY_MAJOR):
957 """Snapshot distributions available on a search path
958
959 Any distributions found on `search_path` are added to the environment.
960 `search_path` should be a sequence of ``sys.path`` items. If not
961 supplied, ``sys.path`` is used.
962
963 `platform` is an optional string specifying the name of the platform
964 that platform-specific distributions must be compatible with. If
965 unspecified, it defaults to the current platform. `python` is an
966 optional string naming the desired version of Python (e.g. ``'3.3'``);
967 it defaults to the current version.
968
969 You may explicitly set `platform` (and/or `python`) to ``None`` if you
970 wish to map *all* distributions, not just those compatible with the
971 running platform or Python version.
972 """
973 self._distmap = {}
974 self.platform = platform
975 self.python = python
976 self.scan(search_path)
977
978 def can_add(self, dist):
979 """Is distribution `dist` acceptable for this environment?
980
981 The distribution must match the platform and python version
982 requirements specified when this environment was created, or False
983 is returned.
984 """
985 py_compat = (
986 self.python is None
987 or dist.py_version is None
988 or dist.py_version == self.python
989 )
990 return py_compat and compatible_platforms(dist.platform, self.platform)
991
992 def remove(self, dist):
993 """Remove `dist` from the environment"""
994 self._distmap[dist.key].remove(dist)
995
996 def scan(self, search_path=None):
997 """Scan `search_path` for distributions usable in this environment
998
999 Any distributions found are added to the environment.
1000 `search_path` should be a sequence of ``sys.path`` items. If not
1001 supplied, ``sys.path`` is used. Only distributions conforming to
1002 the platform/python version defined at initialization are added.
1003 """
1004 if search_path is None:
1005 search_path = sys.path
1006
1007 for item in search_path:
1008 for dist in find_distributions(item):
1009 self.add(dist)
1010
1011 def __getitem__(self, project_name):
1012 """Return a newest-to-oldest list of distributions for `project_name`
1013
1014 Uses case-insensitive `project_name` comparison, assuming all the
1015 project's distributions use their project's name converted to all
1016 lowercase as their key.
1017
1018 """
1019 distribution_key = project_name.lower()
1020 return self._distmap.get(distribution_key, [])
1021
1022 def add(self, dist):
1023 """Add `dist` if we ``can_add()`` it and it has not already been added
1024 """
1025 if self.can_add(dist) and dist.has_version():
1026 dists = self._distmap.setdefault(dist.key, [])
1027 if dist not in dists:
1028 dists.append(dist)
1029 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1030
1031 def best_match(
1032 self, req, working_set, installer=None, replace_conflicting=False):
1033 """Find distribution best matching `req` and usable on `working_set`
1034
1035 This calls the ``find(req)`` method of the `working_set` to see if a
1036 suitable distribution is already active. (This may raise
1037 ``VersionConflict`` if an unsuitable version of the project is already
1038 active in the specified `working_set`.) If a suitable distribution
1039 isn't active, this method returns the newest distribution in the
1040 environment that meets the ``Requirement`` in `req`. If no suitable
1041 distribution is found, and `installer` is supplied, then the result of
1042 calling the environment's ``obtain(req, installer)`` method will be
1043 returned.
1044 """
1045 try:
1046 dist = working_set.find(req)
1047 except VersionConflict:
1048 if not replace_conflicting:
1049 raise
1050 dist = None
1051 if dist is not None:
1052 return dist
1053 for dist in self[req.key]:
1054 if dist in req:
1055 return dist
1056 # try to download/install
1057 return self.obtain(req, installer)
1058
1059 def obtain(self, requirement, installer=None):
1060 """Obtain a distribution matching `requirement` (e.g. via download)
1061
1062 Obtain a distro that matches requirement (e.g. via download). In the
1063 base ``Environment`` class, this routine just returns
1064 ``installer(requirement)``, unless `installer` is None, in which case
1065 None is returned instead. This method is a hook that allows subclasses
1066 to attempt other ways of obtaining a distribution before falling back
1067 to the `installer` argument."""
1068 if installer is not None:
1069 return installer(requirement)
1070
1071 def __iter__(self):
1072 """Yield the unique project names of the available distributions"""
1073 for key in self._distmap.keys():
1074 if self[key]:
1075 yield key
1076
1077 def __iadd__(self, other):
1078 """In-place addition of a distribution or environment"""
1079 if isinstance(other, Distribution):
1080 self.add(other)
1081 elif isinstance(other, Environment):
1082 for project in other:
1083 for dist in other[project]:
1084 self.add(dist)
1085 else:
1086 raise TypeError("Can't add %r to environment" % (other,))
1087 return self
1088
1089 def __add__(self, other):
1090 """Add an environment or distribution to an environment"""
1091 new = self.__class__([], platform=None, python=None)
1092 for env in self, other:
1093 new += env
1094 return new
1095
1096
1097# XXX backward compatibility
1098AvailableDistributions = Environment
1099
1100
1101class ExtractionError(RuntimeError):
1102 """An error occurred extracting a resource
1103
1104 The following attributes are available from instances of this exception:
1105
1106 manager
1107 The resource manager that raised this exception
1108
1109 cache_path
1110 The base directory for resource extraction
1111
1112 original_error
1113 The exception instance that caused extraction to fail
1114 """
1115
1116
1117class ResourceManager:
1118 """Manage resource extraction and packages"""
1119 extraction_path = None
1120
1121 def __init__(self):
1122 self.cached_files = {}
1123
1124 def resource_exists(self, package_or_requirement, resource_name):
1125 """Does the named resource exist?"""
1126 return get_provider(package_or_requirement).has_resource(resource_name)
1127
1128 def resource_isdir(self, package_or_requirement, resource_name):
1129 """Is the named resource an existing directory?"""
1130 return get_provider(package_or_requirement).resource_isdir(
1131 resource_name
1132 )
1133
1134 def resource_filename(self, package_or_requirement, resource_name):
1135 """Return a true filesystem path for specified resource"""
1136 return get_provider(package_or_requirement).get_resource_filename(
1137 self, resource_name
1138 )
1139
1140 def resource_stream(self, package_or_requirement, resource_name):
1141 """Return a readable file-like object for specified resource"""
1142 return get_provider(package_or_requirement).get_resource_stream(
1143 self, resource_name
1144 )
1145
1146 def resource_string(self, package_or_requirement, resource_name):
1147 """Return specified resource as a string"""
1148 return get_provider(package_or_requirement).get_resource_string(
1149 self, resource_name
1150 )
1151
1152 def resource_listdir(self, package_or_requirement, resource_name):
1153 """List the contents of the named resource directory"""
1154 return get_provider(package_or_requirement).resource_listdir(
1155 resource_name
1156 )
1157
1158 def extraction_error(self):
1159 """Give an error message for problems extracting file(s)"""
1160
1161 old_exc = sys.exc_info()[1]
1162 cache_path = self.extraction_path or get_default_cache()
1163
1164 tmpl = textwrap.dedent("""
1165 Can't extract file(s) to egg cache
1166
1167 The following error occurred while trying to extract file(s)
1168 to the Python egg cache:
1169
1170 {old_exc}
1171
1172 The Python egg cache directory is currently set to:
1173
1174 {cache_path}
1175
1176 Perhaps your account does not have write access to this directory?
1177 You can change the cache directory by setting the PYTHON_EGG_CACHE
1178 environment variable to point to an accessible directory.
1179 """).lstrip()
1180 err = ExtractionError(tmpl.format(**locals()))
1181 err.manager = self
1182 err.cache_path = cache_path
1183 err.original_error = old_exc
1184 raise err
1185
1186 def get_cache_path(self, archive_name, names=()):
1187 """Return absolute location in cache for `archive_name` and `names`
1188
1189 The parent directory of the resulting path will be created if it does
1190 not already exist. `archive_name` should be the base filename of the
1191 enclosing egg (which may not be the name of the enclosing zipfile!),
1192 including its ".egg" extension. `names`, if provided, should be a
1193 sequence of path name parts "under" the egg's extraction location.
1194
1195 This method should only be called by resource providers that need to
1196 obtain an extraction location, and only for names they intend to
1197 extract, as it tracks the generated names for possible cleanup later.
1198 """
1199 extract_path = self.extraction_path or get_default_cache()
1200 target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1201 try:
1202 _bypass_ensure_directory(target_path)
1203 except Exception:
1204 self.extraction_error()
1205
1206 self._warn_unsafe_extraction_path(extract_path)
1207
1208 self.cached_files[target_path] = 1
1209 return target_path
1210
1211 @staticmethod
1212 def _warn_unsafe_extraction_path(path):
1213 """
1214 If the default extraction path is overridden and set to an insecure
1215 location, such as /tmp, it opens up an opportunity for an attacker to
1216 replace an extracted file with an unauthorized payload. Warn the user
1217 if a known insecure location is used.
1218
1219 See Distribute #375 for more details.
1220 """
1221 if os.name == 'nt' and not path.startswith(os.environ['windir']):
1222 # On Windows, permissions are generally restrictive by default
1223 # and temp directories are not writable by other users, so
1224 # bypass the warning.
1225 return
1226 mode = os.stat(path).st_mode
1227 if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1228 msg = (
1229 "%s is writable by group/others and vulnerable to attack "
1230 "when "
1231 "used with get_resource_filename. Consider a more secure "
1232 "location (set with .set_extraction_path or the "
1233 "PYTHON_EGG_CACHE environment variable)." % path
1234 )
1235 warnings.warn(msg, UserWarning)
1236
1237 def postprocess(self, tempname, filename):
1238 """Perform any platform-specific postprocessing of `tempname`
1239
1240 This is where Mac header rewrites should be done; other platforms don't
1241 have anything special they should do.
1242
1243 Resource providers should call this method ONLY after successfully
1244 extracting a compressed resource. They must NOT call it on resources
1245 that are already in the filesystem.
1246
1247 `tempname` is the current (temporary) name of the file, and `filename`
1248 is the name it will be renamed to by the caller after this routine
1249 returns.
1250 """
1251
1252 if os.name == 'posix':
1253 # Make the resource executable
1254 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1255 os.chmod(tempname, mode)
1256
1257 def set_extraction_path(self, path):
1258 """Set the base path where resources will be extracted to, if needed.
1259
1260 If you do not call this routine before any extractions take place, the
1261 path defaults to the return value of ``get_default_cache()``. (Which
1262 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1263 platform-specific fallbacks. See that routine's documentation for more
1264 details.)
1265
1266 Resources are extracted to subdirectories of this path based upon
1267 information given by the ``IResourceProvider``. You may set this to a
1268 temporary directory, but then you must call ``cleanup_resources()`` to
1269 delete the extracted files when done. There is no guarantee that
1270 ``cleanup_resources()`` will be able to remove all extracted files.
1271
1272 (Note: you may not change the extraction path for a given resource
1273 manager once resources have been extracted, unless you first call
1274 ``cleanup_resources()``.)
1275 """
1276 if self.cached_files:
1277 raise ValueError(
1278 "Can't change extraction path, files already extracted"
1279 )
1280
1281 self.extraction_path = path
1282
1283 def cleanup_resources(self, force=False):
1284 """
1285 Delete all extracted resource files and directories, returning a list
1286 of the file and directory names that could not be successfully removed.
1287 This function does not have any concurrency protection, so it should
1288 generally only be called when the extraction path is a temporary
1289 directory exclusive to a single process. This method is not
1290 automatically called; you must call it explicitly or register it as an
1291 ``atexit`` function if you wish to ensure cleanup of a temporary
1292 directory used for extractions.
1293 """
1294 # XXX
1295
1296
1297def get_default_cache():
1298 """
1299 Return the ``PYTHON_EGG_CACHE`` environment variable
1300 or a platform-relevant user cache dir for an app
1301 named "Python-Eggs".
1302 """
1303 return (
1304 os.environ.get('PYTHON_EGG_CACHE')
1305 or appdirs.user_cache_dir(appname='Python-Eggs')
1306 )
1307
1308
1309def safe_name(name):
1310 """Convert an arbitrary string to a standard distribution name
1311
1312 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1313 """
1314 return re.sub('[^A-Za-z0-9.]+', '-', name)
1315
1316
1317def safe_version(version):
1318 """
1319 Convert an arbitrary string to a standard version string
1320 """
1321 try:
1322 # normalize the version
1323 return str(packaging.version.Version(version))
1324 except packaging.version.InvalidVersion:
1325 version = version.replace(' ', '.')
1326 return re.sub('[^A-Za-z0-9.]+', '-', version)
1327
1328
1329def safe_extra(extra):
1330 """Convert an arbitrary string to a standard 'extra' name
1331
1332 Any runs of non-alphanumeric characters are replaced with a single '_',
1333 and the result is always lowercased.
1334 """
1335 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1336
1337
1338def to_filename(name):
1339 """Convert a project or version name to its filename-escaped form
1340
1341 Any '-' characters are currently replaced with '_'.
1342 """
1343 return name.replace('-', '_')
1344
1345
1346def invalid_marker(text):
1347 """
1348 Validate text as a PEP 508 environment marker; return an exception
1349 if invalid or False otherwise.
1350 """
1351 try:
1352 evaluate_marker(text)
1353 except SyntaxError as e:
1354 e.filename = None
1355 e.lineno = None
1356 return e
1357 return False
1358
1359
1360def evaluate_marker(text, extra=None):
1361 """
1362 Evaluate a PEP 508 environment marker.
1363 Return a boolean indicating the marker result in this environment.
1364 Raise SyntaxError if marker is invalid.
1365
1366 This implementation uses the 'pyparsing' module.
1367 """
1368 try:
1369 marker = packaging.markers.Marker(text)
1370 return marker.evaluate()
1371 except packaging.markers.InvalidMarker as e:
1372 raise SyntaxError(e)
1373
1374
1375class NullProvider:
1376 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1377
1378 egg_name = None
1379 egg_info = None
1380 loader = None
1381
1382 def __init__(self, module):
1383 self.loader = getattr(module, '__loader__', None)
1384 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1385
1386 def get_resource_filename(self, manager, resource_name):
1387 return self._fn(self.module_path, resource_name)
1388
1389 def get_resource_stream(self, manager, resource_name):
1390 return io.BytesIO(self.get_resource_string(manager, resource_name))
1391
1392 def get_resource_string(self, manager, resource_name):
1393 return self._get(self._fn(self.module_path, resource_name))
1394
1395 def has_resource(self, resource_name):
1396 return self._has(self._fn(self.module_path, resource_name))
1397
1398 def has_metadata(self, name):
1399 return self.egg_info and self._has(self._fn(self.egg_info, name))
1400
1401 def get_metadata(self, name):
1402 if not self.egg_info:
1403 return ""
1404 value = self._get(self._fn(self.egg_info, name))
1405 return value.decode('utf-8') if six.PY3 else value
1406
1407 def get_metadata_lines(self, name):
1408 return yield_lines(self.get_metadata(name))
1409
1410 def resource_isdir(self, resource_name):
1411 return self._isdir(self._fn(self.module_path, resource_name))
1412
1413 def metadata_isdir(self, name):
1414 return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1415
1416 def resource_listdir(self, resource_name):
1417 return self._listdir(self._fn(self.module_path, resource_name))
1418
1419 def metadata_listdir(self, name):
1420 if self.egg_info:
1421 return self._listdir(self._fn(self.egg_info, name))
1422 return []
1423
1424 def run_script(self, script_name, namespace):
1425 script = 'scripts/' + script_name
1426 if not self.has_metadata(script):
1427 raise ResolutionError(
1428 "Script {script!r} not found in metadata at {self.egg_info!r}"
1429 .format(**locals()),
1430 )
1431 script_text = self.get_metadata(script).replace('\r\n', '\n')
1432 script_text = script_text.replace('\r', '\n')
1433 script_filename = self._fn(self.egg_info, script)
1434 namespace['__file__'] = script_filename
1435 if os.path.exists(script_filename):
1436 source = open(script_filename).read()
1437 code = compile(source, script_filename, 'exec')
1438 exec(code, namespace, namespace)
1439 else:
1440 from linecache import cache
1441 cache[script_filename] = (
1442 len(script_text), 0, script_text.split('\n'), script_filename
1443 )
1444 script_code = compile(script_text, script_filename, 'exec')
1445 exec(script_code, namespace, namespace)
1446
1447 def _has(self, path):
1448 raise NotImplementedError(
1449 "Can't perform this operation for unregistered loader type"
1450 )
1451
1452 def _isdir(self, path):
1453 raise NotImplementedError(
1454 "Can't perform this operation for unregistered loader type"
1455 )
1456
1457 def _listdir(self, path):
1458 raise NotImplementedError(
1459 "Can't perform this operation for unregistered loader type"
1460 )
1461
1462 def _fn(self, base, resource_name):
1463 if resource_name:
1464 return os.path.join(base, *resource_name.split('/'))
1465 return base
1466
1467 def _get(self, path):
1468 if hasattr(self.loader, 'get_data'):
1469 return self.loader.get_data(path)
1470 raise NotImplementedError(
1471 "Can't perform this operation for loaders without 'get_data()'"
1472 )
1473
1474
1475register_loader_type(object, NullProvider)
1476
1477
1478class EggProvider(NullProvider):
1479 """Provider based on a virtual filesystem"""
1480
1481 def __init__(self, module):
1482 NullProvider.__init__(self, module)
1483 self._setup_prefix()
1484
1485 def _setup_prefix(self):
1486 # we assume here that our metadata may be nested inside a "basket"
1487 # of multiple eggs; that's why we use module_path instead of .archive
1488 path = self.module_path
1489 old = None
1490 while path != old:
1491 if _is_egg_path(path):
1492 self.egg_name = os.path.basename(path)
1493 self.egg_info = os.path.join(path, 'EGG-INFO')
1494 self.egg_root = path
1495 break
1496 old = path
1497 path, base = os.path.split(path)
1498
1499
1500class DefaultProvider(EggProvider):
1501 """Provides access to package resources in the filesystem"""
1502
1503 def _has(self, path):
1504 return os.path.exists(path)
1505
1506 def _isdir(self, path):
1507 return os.path.isdir(path)
1508
1509 def _listdir(self, path):
1510 return os.listdir(path)
1511
1512 def get_resource_stream(self, manager, resource_name):
1513 return open(self._fn(self.module_path, resource_name), 'rb')
1514
1515 def _get(self, path):
1516 with open(path, 'rb') as stream:
1517 return stream.read()
1518
1519 @classmethod
1520 def _register(cls):
1521 loader_cls = getattr(
1522 importlib_machinery,
1523 'SourceFileLoader',
1524 type(None),
1525 )
1526 register_loader_type(loader_cls, cls)
1527
1528
1529DefaultProvider._register()
1530
1531
1532class EmptyProvider(NullProvider):
1533 """Provider that returns nothing for all requests"""
1534
1535 module_path = None
1536
1537 _isdir = _has = lambda self, path: False
1538
1539 def _get(self, path):
1540 return ''
1541
1542 def _listdir(self, path):
1543 return []
1544
1545 def __init__(self):
1546 pass
1547
1548
1549empty_provider = EmptyProvider()
1550
1551
1552class ZipManifests(dict):
1553 """
1554 zip manifest builder
1555 """
1556
1557 @classmethod
1558 def build(cls, path):
1559 """
1560 Build a dictionary similar to the zipimport directory
1561 caches, except instead of tuples, store ZipInfo objects.
1562
1563 Use a platform-specific path separator (os.sep) for the path keys
1564 for compatibility with pypy on Windows.
1565 """
1566 with zipfile.ZipFile(path) as zfile:
1567 items = (
1568 (
1569 name.replace('/', os.sep),
1570 zfile.getinfo(name),
1571 )
1572 for name in zfile.namelist()
1573 )
1574 return dict(items)
1575
1576 load = build
1577
1578
1579class MemoizedZipManifests(ZipManifests):
1580 """
1581 Memoized zipfile manifests.
1582 """
1583 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1584
1585 def load(self, path):
1586 """
1587 Load a manifest at path or return a suitable manifest already loaded.
1588 """
1589 path = os.path.normpath(path)
1590 mtime = os.stat(path).st_mtime
1591
1592 if path not in self or self[path].mtime != mtime:
1593 manifest = self.build(path)
1594 self[path] = self.manifest_mod(manifest, mtime)
1595
1596 return self[path].manifest
1597
1598
1599class ZipProvider(EggProvider):
1600 """Resource support for zips and eggs"""
1601
1602 eagers = None
1603 _zip_manifests = MemoizedZipManifests()
1604
1605 def __init__(self, module):
1606 EggProvider.__init__(self, module)
1607 self.zip_pre = self.loader.archive + os.sep
1608
1609 def _zipinfo_name(self, fspath):
1610 # Convert a virtual filename (full path to file) into a zipfile subpath
1611 # usable with the zipimport directory cache for our target archive
1612 fspath = fspath.rstrip(os.sep)
1613 if fspath == self.loader.archive:
1614 return ''
1615 if fspath.startswith(self.zip_pre):
1616 return fspath[len(self.zip_pre):]
1617 raise AssertionError(
1618 "%s is not a subpath of %s" % (fspath, self.zip_pre)
1619 )
1620
1621 def _parts(self, zip_path):
1622 # Convert a zipfile subpath into an egg-relative path part list.
1623 # pseudo-fs path
1624 fspath = self.zip_pre + zip_path
1625 if fspath.startswith(self.egg_root + os.sep):
1626 return fspath[len(self.egg_root) + 1:].split(os.sep)
1627 raise AssertionError(
1628 "%s is not a subpath of %s" % (fspath, self.egg_root)
1629 )
1630
1631 @property
1632 def zipinfo(self):
1633 return self._zip_manifests.load(self.loader.archive)
1634
1635 def get_resource_filename(self, manager, resource_name):
1636 if not self.egg_name:
1637 raise NotImplementedError(
1638 "resource_filename() only supported for .egg, not .zip"
1639 )
1640 # no need to lock for extraction, since we use temp names
1641 zip_path = self._resource_to_zip(resource_name)
1642 eagers = self._get_eager_resources()
1643 if '/'.join(self._parts(zip_path)) in eagers:
1644 for name in eagers:
1645 self._extract_resource(manager, self._eager_to_zip(name))
1646 return self._extract_resource(manager, zip_path)
1647
1648 @staticmethod
1649 def _get_date_and_size(zip_stat):
1650 size = zip_stat.file_size
1651 # ymdhms+wday, yday, dst
1652 date_time = zip_stat.date_time + (0, 0, -1)
1653 # 1980 offset already done
1654 timestamp = time.mktime(date_time)
1655 return timestamp, size
1656
1657 def _extract_resource(self, manager, zip_path):
1658
1659 if zip_path in self._index():
1660 for name in self._index()[zip_path]:
1661 last = self._extract_resource(
1662 manager, os.path.join(zip_path, name)
1663 )
1664 # return the extracted directory name
1665 return os.path.dirname(last)
1666
1667 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1668
1669 if not WRITE_SUPPORT:
1670 raise IOError('"os.rename" and "os.unlink" are not supported '
1671 'on this platform')
1672 try:
1673
1674 real_path = manager.get_cache_path(
1675 self.egg_name, self._parts(zip_path)
1676 )
1677
1678 if self._is_current(real_path, zip_path):
1679 return real_path
1680
1681 outf, tmpnam = _mkstemp(
1682 ".$extract",
1683 dir=os.path.dirname(real_path),
1684 )
1685 os.write(outf, self.loader.get_data(zip_path))
1686 os.close(outf)
1687 utime(tmpnam, (timestamp, timestamp))
1688 manager.postprocess(tmpnam, real_path)
1689
1690 try:
1691 rename(tmpnam, real_path)
1692
1693 except os.error:
1694 if os.path.isfile(real_path):
1695 if self._is_current(real_path, zip_path):
1696 # the file became current since it was checked above,
1697 # so proceed.
1698 return real_path
1699 # Windows, del old file and retry
1700 elif os.name == 'nt':
1701 unlink(real_path)
1702 rename(tmpnam, real_path)
1703 return real_path
1704 raise
1705
1706 except os.error:
1707 # report a user-friendly error
1708 manager.extraction_error()
1709
1710 return real_path
1711
1712 def _is_current(self, file_path, zip_path):
1713 """
1714 Return True if the file_path is current for this zip_path
1715 """
1716 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1717 if not os.path.isfile(file_path):
1718 return False
1719 stat = os.stat(file_path)
1720 if stat.st_size != size or stat.st_mtime != timestamp:
1721 return False
1722 # check that the contents match
1723 zip_contents = self.loader.get_data(zip_path)
1724 with open(file_path, 'rb') as f:
1725 file_contents = f.read()
1726 return zip_contents == file_contents
1727
1728 def _get_eager_resources(self):
1729 if self.eagers is None:
1730 eagers = []
1731 for name in ('native_libs.txt', 'eager_resources.txt'):
1732 if self.has_metadata(name):
1733 eagers.extend(self.get_metadata_lines(name))
1734 self.eagers = eagers
1735 return self.eagers
1736
1737 def _index(self):
1738 try:
1739 return self._dirindex
1740 except AttributeError:
1741 ind = {}
1742 for path in self.zipinfo:
1743 parts = path.split(os.sep)
1744 while parts:
1745 parent = os.sep.join(parts[:-1])
1746 if parent in ind:
1747 ind[parent].append(parts[-1])
1748 break
1749 else:
1750 ind[parent] = [parts.pop()]
1751 self._dirindex = ind
1752 return ind
1753
1754 def _has(self, fspath):
1755 zip_path = self._zipinfo_name(fspath)
1756 return zip_path in self.zipinfo or zip_path in self._index()
1757
1758 def _isdir(self, fspath):
1759 return self._zipinfo_name(fspath) in self._index()
1760
1761 def _listdir(self, fspath):
1762 return list(self._index().get(self._zipinfo_name(fspath), ()))
1763
1764 def _eager_to_zip(self, resource_name):
1765 return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1766
1767 def _resource_to_zip(self, resource_name):
1768 return self._zipinfo_name(self._fn(self.module_path, resource_name))
1769
1770
1771register_loader_type(zipimport.zipimporter, ZipProvider)
1772
1773
1774class FileMetadata(EmptyProvider):
1775 """Metadata handler for standalone PKG-INFO files
1776
1777 Usage::
1778
1779 metadata = FileMetadata("/path/to/PKG-INFO")
1780
1781 This provider rejects all data and metadata requests except for PKG-INFO,
1782 which is treated as existing, and will be the contents of the file at
1783 the provided location.
1784 """
1785
1786 def __init__(self, path):
1787 self.path = path
1788
1789 def has_metadata(self, name):
1790 return name == 'PKG-INFO' and os.path.isfile(self.path)
1791
1792 def get_metadata(self, name):
1793 if name != 'PKG-INFO':
1794 raise KeyError("No metadata except PKG-INFO is available")
1795
1796 with io.open(self.path, encoding='utf-8', errors="replace") as f:
1797 metadata = f.read()
1798 self._warn_on_replacement(metadata)
1799 return metadata
1800
1801 def _warn_on_replacement(self, metadata):
1802 # Python 2.7 compat for: replacement_char = '�'
1803 replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1804 if replacement_char in metadata:
1805 tmpl = "{self.path} could not be properly decoded in UTF-8"
1806 msg = tmpl.format(**locals())
1807 warnings.warn(msg)
1808
1809 def get_metadata_lines(self, name):
1810 return yield_lines(self.get_metadata(name))
1811
1812
1813class PathMetadata(DefaultProvider):
1814 """Metadata provider for egg directories
1815
1816 Usage::
1817
1818 # Development eggs:
1819
1820 egg_info = "/path/to/PackageName.egg-info"
1821 base_dir = os.path.dirname(egg_info)
1822 metadata = PathMetadata(base_dir, egg_info)
1823 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1824 dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1825
1826 # Unpacked egg directories:
1827
1828 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1829 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1830 dist = Distribution.from_filename(egg_path, metadata=metadata)
1831 """
1832
1833 def __init__(self, path, egg_info):
1834 self.module_path = path
1835 self.egg_info = egg_info
1836
1837
1838class EggMetadata(ZipProvider):
1839 """Metadata provider for .egg files"""
1840
1841 def __init__(self, importer):
1842 """Create a metadata provider from a zipimporter"""
1843
1844 self.zip_pre = importer.archive + os.sep
1845 self.loader = importer
1846 if importer.prefix:
1847 self.module_path = os.path.join(importer.archive, importer.prefix)
1848 else:
1849 self.module_path = importer.archive
1850 self._setup_prefix()
1851
1852
1853_declare_state('dict', _distribution_finders={})
1854
1855
1856def register_finder(importer_type, distribution_finder):
1857 """Register `distribution_finder` to find distributions in sys.path items
1858
1859 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1860 handler), and `distribution_finder` is a callable that, passed a path
1861 item and the importer instance, yields ``Distribution`` instances found on
1862 that path item. See ``pkg_resources.find_on_path`` for an example."""
1863 _distribution_finders[importer_type] = distribution_finder
1864
1865
1866def find_distributions(path_item, only=False):
1867 """Yield distributions accessible via `path_item`"""
1868 importer = get_importer(path_item)
1869 finder = _find_adapter(_distribution_finders, importer)
1870 return finder(importer, path_item, only)
1871
1872
1873def find_eggs_in_zip(importer, path_item, only=False):
1874 """
1875 Find eggs in zip files; possibly multiple nested eggs.
1876 """
1877 if importer.archive.endswith('.whl'):
1878 # wheels are not supported with this finder
1879 # they don't have PKG-INFO metadata, and won't ever contain eggs
1880 return
1881 metadata = EggMetadata(importer)
1882 if metadata.has_metadata('PKG-INFO'):
1883 yield Distribution.from_filename(path_item, metadata=metadata)
1884 if only:
1885 # don't yield nested distros
1886 return
1887 for subitem in metadata.resource_listdir('/'):
1888 if _is_egg_path(subitem):
1889 subpath = os.path.join(path_item, subitem)
1890 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
1891 for dist in dists:
1892 yield dist
1893 elif subitem.lower().endswith('.dist-info'):
1894 subpath = os.path.join(path_item, subitem)
1895 submeta = EggMetadata(zipimport.zipimporter(subpath))
1896 submeta.egg_info = subpath
1897 yield Distribution.from_location(path_item, subitem, submeta)
1898
1899
1900register_finder(zipimport.zipimporter, find_eggs_in_zip)
1901
1902
1903def find_nothing(importer, path_item, only=False):
1904 return ()
1905
1906
1907register_finder(object, find_nothing)
1908
1909
1910def _by_version_descending(names):
1911 """
1912 Given a list of filenames, return them in descending order
1913 by version number.
1914
1915 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
1916 >>> _by_version_descending(names)
1917 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
1918 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
1919 >>> _by_version_descending(names)
1920 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
1921 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
1922 >>> _by_version_descending(names)
1923 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
1924 """
1925 def _by_version(name):
1926 """
1927 Parse each component of the filename
1928 """
1929 name, ext = os.path.splitext(name)
1930 parts = itertools.chain(name.split('-'), [ext])
1931 return [packaging.version.parse(part) for part in parts]
1932
1933 return sorted(names, key=_by_version, reverse=True)
1934
1935
1936def find_on_path(importer, path_item, only=False):
1937 """Yield distributions accessible on a sys.path directory"""
1938 path_item = _normalize_cached(path_item)
1939
1940 if _is_unpacked_egg(path_item):
1941 yield Distribution.from_filename(
1942 path_item, metadata=PathMetadata(
1943 path_item, os.path.join(path_item, 'EGG-INFO')
1944 )
1945 )
1946 return
1947
1948 entries = safe_listdir(path_item)
1949
1950 # for performance, before sorting by version,
1951 # screen entries for only those that will yield
1952 # distributions
1953 filtered = (
1954 entry
1955 for entry in entries
1956 if dist_factory(path_item, entry, only)
1957 )
1958
1959 # scan for .egg and .egg-info in directory
1960 path_item_entries = _by_version_descending(filtered)
1961 for entry in path_item_entries:
1962 fullpath = os.path.join(path_item, entry)
1963 factory = dist_factory(path_item, entry, only)
1964 for dist in factory(fullpath):
1965 yield dist
1966
1967
1968def dist_factory(path_item, entry, only):
1969 """
1970 Return a dist_factory for a path_item and entry
1971 """
1972 lower = entry.lower()
1973 is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
1974 return (
1975 distributions_from_metadata
1976 if is_meta else
1977 find_distributions
1978 if not only and _is_egg_path(entry) else
1979 resolve_egg_link
1980 if not only and lower.endswith('.egg-link') else
1981 NoDists()
1982 )
1983
1984
1985class NoDists:
1986 """
1987 >>> bool(NoDists())
1988 False
1989
1990 >>> list(NoDists()('anything'))
1991 []
1992 """
1993 def __bool__(self):
1994 return False
1995 if six.PY2:
1996 __nonzero__ = __bool__
1997
1998 def __call__(self, fullpath):
1999 return iter(())
2000
2001
2002def safe_listdir(path):
2003 """
2004 Attempt to list contents of path, but suppress some exceptions.
2005 """
2006 try:
2007 return os.listdir(path)
2008 except (PermissionError, NotADirectoryError):
2009 pass
2010 except OSError as e:
2011 # Ignore the directory if does not exist, not a directory or
2012 # permission denied
2013 ignorable = (
2014 e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
2015 # Python 2 on Windows needs to be handled this way :(
2016 or getattr(e, "winerror", None) == 267
2017 )
2018 if not ignorable:
2019 raise
2020 return ()
2021
2022
2023def distributions_from_metadata(path):
2024 root = os.path.dirname(path)
2025 if os.path.isdir(path):
2026 if len(os.listdir(path)) == 0:
2027 # empty metadata dir; skip
2028 return
2029 metadata = PathMetadata(root, path)
2030 else:
2031 metadata = FileMetadata(path)
2032 entry = os.path.basename(path)
2033 yield Distribution.from_location(
2034 root, entry, metadata, precedence=DEVELOP_DIST,
2035 )
2036
2037
2038def non_empty_lines(path):
2039 """
2040 Yield non-empty lines from file at path
2041 """
2042 with open(path) as f:
2043 for line in f:
2044 line = line.strip()
2045 if line:
2046 yield line
2047
2048
2049def resolve_egg_link(path):
2050 """
2051 Given a path to an .egg-link, resolve distributions
2052 present in the referenced path.
2053 """
2054 referenced_paths = non_empty_lines(path)
2055 resolved_paths = (
2056 os.path.join(os.path.dirname(path), ref)
2057 for ref in referenced_paths
2058 )
2059 dist_groups = map(find_distributions, resolved_paths)
2060 return next(dist_groups, ())
2061
2062
2063register_finder(pkgutil.ImpImporter, find_on_path)
2064
2065if hasattr(importlib_machinery, 'FileFinder'):
2066 register_finder(importlib_machinery.FileFinder, find_on_path)
2067
2068_declare_state('dict', _namespace_handlers={})
2069_declare_state('dict', _namespace_packages={})
2070
2071
2072def register_namespace_handler(importer_type, namespace_handler):
2073 """Register `namespace_handler` to declare namespace packages
2074
2075 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2076 handler), and `namespace_handler` is a callable like this::
2077
2078 def namespace_handler(importer, path_entry, moduleName, module):
2079 # return a path_entry to use for child packages
2080
2081 Namespace handlers are only called if the importer object has already
2082 agreed that it can handle the relevant path item, and they should only
2083 return a subpath if the module __path__ does not already contain an
2084 equivalent subpath. For an example namespace handler, see
2085 ``pkg_resources.file_ns_handler``.
2086 """
2087 _namespace_handlers[importer_type] = namespace_handler
2088
2089
2090def _handle_ns(packageName, path_item):
2091 """Ensure that named package includes a subpath of path_item (if needed)"""
2092
2093 importer = get_importer(path_item)
2094 if importer is None:
2095 return None
2096 loader = importer.find_module(packageName)
2097 if loader is None:
2098 return None
2099 module = sys.modules.get(packageName)
2100 if module is None:
2101 module = sys.modules[packageName] = types.ModuleType(packageName)
2102 module.__path__ = []
2103 _set_parent_ns(packageName)
2104 elif not hasattr(module, '__path__'):
2105 raise TypeError("Not a package:", packageName)
2106 handler = _find_adapter(_namespace_handlers, importer)
2107 subpath = handler(importer, path_item, packageName, module)
2108 if subpath is not None:
2109 path = module.__path__
2110 path.append(subpath)
2111 loader.load_module(packageName)
2112 _rebuild_mod_path(path, packageName, module)
2113 return subpath
2114
2115
2116def _rebuild_mod_path(orig_path, package_name, module):
2117 """
2118 Rebuild module.__path__ ensuring that all entries are ordered
2119 corresponding to their sys.path order
2120 """
2121 sys_path = [_normalize_cached(p) for p in sys.path]
2122
2123 def safe_sys_path_index(entry):
2124 """
2125 Workaround for #520 and #513.
2126 """
2127 try:
2128 return sys_path.index(entry)
2129 except ValueError:
2130 return float('inf')
2131
2132 def position_in_sys_path(path):
2133 """
2134 Return the ordinal of the path based on its position in sys.path
2135 """
2136 path_parts = path.split(os.sep)
2137 module_parts = package_name.count('.') + 1
2138 parts = path_parts[:-module_parts]
2139 return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2140
2141 if not isinstance(orig_path, list):
2142 # Is this behavior useful when module.__path__ is not a list?
2143 return
2144
2145 orig_path.sort(key=position_in_sys_path)
2146 module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2147
2148
2149def declare_namespace(packageName):
2150 """Declare that package 'packageName' is a namespace package"""
2151
2152 _imp.acquire_lock()
2153 try:
2154 if packageName in _namespace_packages:
2155 return
2156
2157 path, parent = sys.path, None
2158 if '.' in packageName:
2159 parent = '.'.join(packageName.split('.')[:-1])
2160 declare_namespace(parent)
2161 if parent not in _namespace_packages:
2162 __import__(parent)
2163 try:
2164 path = sys.modules[parent].__path__
2165 except AttributeError:
2166 raise TypeError("Not a package:", parent)
2167
2168 # Track what packages are namespaces, so when new path items are added,
2169 # they can be updated
2170 _namespace_packages.setdefault(parent, []).append(packageName)
2171 _namespace_packages.setdefault(packageName, [])
2172
2173 for path_item in path:
2174 # Ensure all the parent's path items are reflected in the child,
2175 # if they apply
2176 _handle_ns(packageName, path_item)
2177
2178 finally:
2179 _imp.release_lock()
2180
2181
2182def fixup_namespace_packages(path_item, parent=None):
2183 """Ensure that previously-declared namespace packages include path_item"""
2184 _imp.acquire_lock()
2185 try:
2186 for package in _namespace_packages.get(parent, ()):
2187 subpath = _handle_ns(package, path_item)
2188 if subpath:
2189 fixup_namespace_packages(subpath, package)
2190 finally:
2191 _imp.release_lock()
2192
2193
2194def file_ns_handler(importer, path_item, packageName, module):
2195 """Compute an ns-package subpath for a filesystem or zipfile importer"""
2196
2197 subpath = os.path.join(path_item, packageName.split('.')[-1])
2198 normalized = _normalize_cached(subpath)
2199 for item in module.__path__:
2200 if _normalize_cached(item) == normalized:
2201 break
2202 else:
2203 # Only return the path if it's not already there
2204 return subpath
2205
2206
2207register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2208register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2209
2210if hasattr(importlib_machinery, 'FileFinder'):
2211 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2212
2213
2214def null_ns_handler(importer, path_item, packageName, module):
2215 return None
2216
2217
2218register_namespace_handler(object, null_ns_handler)
2219
2220
2221def normalize_path(filename):
2222 """Normalize a file/dir name for comparison purposes"""
2223 return os.path.normcase(os.path.realpath(filename))
2224
2225
2226def _normalize_cached(filename, _cache={}):
2227 try:
2228 return _cache[filename]
2229 except KeyError:
2230 _cache[filename] = result = normalize_path(filename)
2231 return result
2232
2233
2234def _is_egg_path(path):
2235 """
2236 Determine if given path appears to be an egg.
2237 """
2238 return path.lower().endswith('.egg')
2239
2240
2241def _is_unpacked_egg(path):
2242 """
2243 Determine if given path appears to be an unpacked egg.
2244 """
2245 return (
2246 _is_egg_path(path) and
2247 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2248 )
2249
2250
2251def _set_parent_ns(packageName):
2252 parts = packageName.split('.')
2253 name = parts.pop()
2254 if parts:
2255 parent = '.'.join(parts)
2256 setattr(sys.modules[parent], name, sys.modules[packageName])
2257
2258
2259def yield_lines(strs):
2260 """Yield non-empty/non-comment lines of a string or sequence"""
2261 if isinstance(strs, six.string_types):
2262 for s in strs.splitlines():
2263 s = s.strip()
2264 # skip blank lines/comments
2265 if s and not s.startswith('#'):
2266 yield s
2267 else:
2268 for ss in strs:
2269 for s in yield_lines(ss):
2270 yield s
2271
2272
2273MODULE = re.compile(r"\w+(\.\w+)*$").match
2274EGG_NAME = re.compile(
2275 r"""
2276 (?P<name>[^-]+) (
2277 -(?P<ver>[^-]+) (
2278 -py(?P<pyver>[^-]+) (
2279 -(?P<plat>.+)
2280 )?
2281 )?
2282 )?
2283 """,
2284 re.VERBOSE | re.IGNORECASE,
2285).match
2286
2287
2288class EntryPoint(object):
2289 """Object representing an advertised importable object"""
2290
2291 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2292 if not MODULE(module_name):
2293 raise ValueError("Invalid module name", module_name)
2294 self.name = name
2295 self.module_name = module_name
2296 self.attrs = tuple(attrs)
2297 self.extras = tuple(extras)
2298 self.dist = dist
2299
2300 def __str__(self):
2301 s = "%s = %s" % (self.name, self.module_name)
2302 if self.attrs:
2303 s += ':' + '.'.join(self.attrs)
2304 if self.extras:
2305 s += ' [%s]' % ','.join(self.extras)
2306 return s
2307
2308 def __repr__(self):
2309 return "EntryPoint.parse(%r)" % str(self)
2310
2311 def load(self, require=True, *args, **kwargs):
2312 """
2313 Require packages for this EntryPoint, then resolve it.
2314 """
2315 if not require or args or kwargs:
2316 warnings.warn(
2317 "Parameters to load are deprecated. Call .resolve and "
2318 ".require separately.",
2319 DeprecationWarning,
2320 stacklevel=2,
2321 )
2322 if require:
2323 self.require(*args, **kwargs)
2324 return self.resolve()
2325
2326 def resolve(self):
2327 """
2328 Resolve the entry point from its module and attrs.
2329 """
2330 module = __import__(self.module_name, fromlist=['__name__'], level=0)
2331 try:
2332 return functools.reduce(getattr, self.attrs, module)
2333 except AttributeError as exc:
2334 raise ImportError(str(exc))
2335
2336 def require(self, env=None, installer=None):
2337 if self.extras and not self.dist:
2338 raise UnknownExtra("Can't require() without a distribution", self)
2339
2340 # Get the requirements for this entry point with all its extras and
2341 # then resolve them. We have to pass `extras` along when resolving so
2342 # that the working set knows what extras we want. Otherwise, for
2343 # dist-info distributions, the working set will assume that the
2344 # requirements for that extra are purely optional and skip over them.
2345 reqs = self.dist.requires(self.extras)
2346 items = working_set.resolve(reqs, env, installer, extras=self.extras)
2347 list(map(working_set.add, items))
2348
2349 pattern = re.compile(
2350 r'\s*'
2351 r'(?P<name>.+?)\s*'
2352 r'=\s*'
2353 r'(?P<module>[\w.]+)\s*'
2354 r'(:\s*(?P<attr>[\w.]+))?\s*'
2355 r'(?P<extras>\[.*\])?\s*$'
2356 )
2357
2358 @classmethod
2359 def parse(cls, src, dist=None):
2360 """Parse a single entry point from string `src`
2361
2362 Entry point syntax follows the form::
2363
2364 name = some.module:some.attr [extra1, extra2]
2365
2366 The entry name and module name are required, but the ``:attrs`` and
2367 ``[extras]`` parts are optional
2368 """
2369 m = cls.pattern.match(src)
2370 if not m:
2371 msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2372 raise ValueError(msg, src)
2373 res = m.groupdict()
2374 extras = cls._parse_extras(res['extras'])
2375 attrs = res['attr'].split('.') if res['attr'] else ()
2376 return cls(res['name'], res['module'], attrs, extras, dist)
2377
2378 @classmethod
2379 def _parse_extras(cls, extras_spec):
2380 if not extras_spec:
2381 return ()
2382 req = Requirement.parse('x' + extras_spec)
2383 if req.specs:
2384 raise ValueError()
2385 return req.extras
2386
2387 @classmethod
2388 def parse_group(cls, group, lines, dist=None):
2389 """Parse an entry point group"""
2390 if not MODULE(group):
2391 raise ValueError("Invalid group name", group)
2392 this = {}
2393 for line in yield_lines(lines):
2394 ep = cls.parse(line, dist)
2395 if ep.name in this:
2396 raise ValueError("Duplicate entry point", group, ep.name)
2397 this[ep.name] = ep
2398 return this
2399
2400 @classmethod
2401 def parse_map(cls, data, dist=None):
2402 """Parse a map of entry point groups"""
2403 if isinstance(data, dict):
2404 data = data.items()
2405 else:
2406 data = split_sections(data)
2407 maps = {}
2408 for group, lines in data:
2409 if group is None:
2410 if not lines:
2411 continue
2412 raise ValueError("Entry points must be listed in groups")
2413 group = group.strip()
2414 if group in maps:
2415 raise ValueError("Duplicate group name", group)
2416 maps[group] = cls.parse_group(group, lines, dist)
2417 return maps
2418
2419
2420def _remove_md5_fragment(location):
2421 if not location:
2422 return ''
2423 parsed = urllib.parse.urlparse(location)
2424 if parsed[-1].startswith('md5='):
2425 return urllib.parse.urlunparse(parsed[:-1] + ('',))
2426 return location
2427
2428
2429def _version_from_file(lines):
2430 """
2431 Given an iterable of lines from a Metadata file, return
2432 the value of the Version field, if present, or None otherwise.
2433 """
2434 def is_version_line(line):
2435 return line.lower().startswith('version:')
2436 version_lines = filter(is_version_line, lines)
2437 line = next(iter(version_lines), '')
2438 _, _, value = line.partition(':')
2439 return safe_version(value.strip()) or None
2440
2441
2442class Distribution(object):
2443 """Wrap an actual or potential sys.path entry w/metadata"""
2444 PKG_INFO = 'PKG-INFO'
2445
2446 def __init__(
2447 self, location=None, metadata=None, project_name=None,
2448 version=None, py_version=PY_MAJOR, platform=None,
2449 precedence=EGG_DIST):
2450 self.project_name = safe_name(project_name or 'Unknown')
2451 if version is not None:
2452 self._version = safe_version(version)
2453 self.py_version = py_version
2454 self.platform = platform
2455 self.location = location
2456 self.precedence = precedence
2457 self._provider = metadata or empty_provider
2458
2459 @classmethod
2460 def from_location(cls, location, basename, metadata=None, **kw):
2461 project_name, version, py_version, platform = [None] * 4
2462 basename, ext = os.path.splitext(basename)
2463 if ext.lower() in _distributionImpl:
2464 cls = _distributionImpl[ext.lower()]
2465
2466 match = EGG_NAME(basename)
2467 if match:
2468 project_name, version, py_version, platform = match.group(
2469 'name', 'ver', 'pyver', 'plat'
2470 )
2471 return cls(
2472 location, metadata, project_name=project_name, version=version,
2473 py_version=py_version, platform=platform, **kw
2474 )._reload_version()
2475
2476 def _reload_version(self):
2477 return self
2478
2479 @property
2480 def hashcmp(self):
2481 return (
2482 self.parsed_version,
2483 self.precedence,
2484 self.key,
2485 _remove_md5_fragment(self.location),
2486 self.py_version or '',
2487 self.platform or '',
2488 )
2489
2490 def __hash__(self):
2491 return hash(self.hashcmp)
2492
2493 def __lt__(self, other):
2494 return self.hashcmp < other.hashcmp
2495
2496 def __le__(self, other):
2497 return self.hashcmp <= other.hashcmp
2498
2499 def __gt__(self, other):
2500 return self.hashcmp > other.hashcmp
2501
2502 def __ge__(self, other):
2503 return self.hashcmp >= other.hashcmp
2504
2505 def __eq__(self, other):
2506 if not isinstance(other, self.__class__):
2507 # It's not a Distribution, so they are not equal
2508 return False
2509 return self.hashcmp == other.hashcmp
2510
2511 def __ne__(self, other):
2512 return not self == other
2513
2514 # These properties have to be lazy so that we don't have to load any
2515 # metadata until/unless it's actually needed. (i.e., some distributions
2516 # may not know their name or version without loading PKG-INFO)
2517
2518 @property
2519 def key(self):
2520 try:
2521 return self._key
2522 except AttributeError:
2523 self._key = key = self.project_name.lower()
2524 return key
2525
2526 @property
2527 def parsed_version(self):
2528 if not hasattr(self, "_parsed_version"):
2529 self._parsed_version = parse_version(self.version)
2530
2531 return self._parsed_version
2532
2533 def _warn_legacy_version(self):
2534 LV = packaging.version.LegacyVersion
2535 is_legacy = isinstance(self._parsed_version, LV)
2536 if not is_legacy:
2537 return
2538
2539 # While an empty version is technically a legacy version and
2540 # is not a valid PEP 440 version, it's also unlikely to
2541 # actually come from someone and instead it is more likely that
2542 # it comes from setuptools attempting to parse a filename and
2543 # including it in the list. So for that we'll gate this warning
2544 # on if the version is anything at all or not.
2545 if not self.version:
2546 return
2547
2548 tmpl = textwrap.dedent("""
2549 '{project_name} ({version})' is being parsed as a legacy,
2550 non PEP 440,
2551 version. You may find odd behavior and sort order.
2552 In particular it will be sorted as less than 0.0. It
2553 is recommended to migrate to PEP 440 compatible
2554 versions.
2555 """).strip().replace('\n', ' ')
2556
2557 warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2558
2559 @property
2560 def version(self):
2561 try:
2562 return self._version
2563 except AttributeError:
2564 version = _version_from_file(self._get_metadata(self.PKG_INFO))
2565 if version is None:
2566 tmpl = "Missing 'Version:' header and/or %s file"
2567 raise ValueError(tmpl % self.PKG_INFO, self)
2568 return version
2569
2570 @property
2571 def _dep_map(self):
2572 """
2573 A map of extra to its list of (direct) requirements
2574 for this distribution, including the null extra.
2575 """
2576 try:
2577 return self.__dep_map
2578 except AttributeError:
2579 self.__dep_map = self._filter_extras(self._build_dep_map())
2580 return self.__dep_map
2581
2582 @staticmethod
2583 def _filter_extras(dm):
2584 """
2585 Given a mapping of extras to dependencies, strip off
2586 environment markers and filter out any dependencies
2587 not matching the markers.
2588 """
2589 for extra in list(filter(None, dm)):
2590 new_extra = extra
2591 reqs = dm.pop(extra)
2592 new_extra, _, marker = extra.partition(':')
2593 fails_marker = marker and (
2594 invalid_marker(marker)
2595 or not evaluate_marker(marker)
2596 )
2597 if fails_marker:
2598 reqs = []
2599 new_extra = safe_extra(new_extra) or None
2600
2601 dm.setdefault(new_extra, []).extend(reqs)
2602 return dm
2603
2604 def _build_dep_map(self):
2605 dm = {}
2606 for name in 'requires.txt', 'depends.txt':
2607 for extra, reqs in split_sections(self._get_metadata(name)):
2608 dm.setdefault(extra, []).extend(parse_requirements(reqs))
2609 return dm
2610
2611 def requires(self, extras=()):
2612 """List of Requirements needed for this distro if `extras` are used"""
2613 dm = self._dep_map
2614 deps = []
2615 deps.extend(dm.get(None, ()))
2616 for ext in extras:
2617 try:
2618 deps.extend(dm[safe_extra(ext)])
2619 except KeyError:
2620 raise UnknownExtra(
2621 "%s has no such extra feature %r" % (self, ext)
2622 )
2623 return deps
2624
2625 def _get_metadata(self, name):
2626 if self.has_metadata(name):
2627 for line in self.get_metadata_lines(name):
2628 yield line
2629
2630 def activate(self, path=None, replace=False):
2631 """Ensure distribution is importable on `path` (default=sys.path)"""
2632 if path is None:
2633 path = sys.path
2634 self.insert_on(path, replace=replace)
2635 if path is sys.path:
2636 fixup_namespace_packages(self.location)
2637 for pkg in self._get_metadata('namespace_packages.txt'):
2638 if pkg in sys.modules:
2639 declare_namespace(pkg)
2640
2641 def egg_name(self):
2642 """Return what this distribution's standard .egg filename should be"""
2643 filename = "%s-%s-py%s" % (
2644 to_filename(self.project_name), to_filename(self.version),
2645 self.py_version or PY_MAJOR
2646 )
2647
2648 if self.platform:
2649 filename += '-' + self.platform
2650 return filename
2651
2652 def __repr__(self):
2653 if self.location:
2654 return "%s (%s)" % (self, self.location)
2655 else:
2656 return str(self)
2657
2658 def __str__(self):
2659 try:
2660 version = getattr(self, 'version', None)
2661 except ValueError:
2662 version = None
2663 version = version or "[unknown version]"
2664 return "%s %s" % (self.project_name, version)
2665
2666 def __getattr__(self, attr):
2667 """Delegate all unrecognized public attributes to .metadata provider"""
2668 if attr.startswith('_'):
2669 raise AttributeError(attr)
2670 return getattr(self._provider, attr)
2671
2672 @classmethod
2673 def from_filename(cls, filename, metadata=None, **kw):
2674 return cls.from_location(
2675 _normalize_cached(filename), os.path.basename(filename), metadata,
2676 **kw
2677 )
2678
2679 def as_requirement(self):
2680 """Return a ``Requirement`` that matches this distribution exactly"""
2681 if isinstance(self.parsed_version, packaging.version.Version):
2682 spec = "%s==%s" % (self.project_name, self.parsed_version)
2683 else:
2684 spec = "%s===%s" % (self.project_name, self.parsed_version)
2685
2686 return Requirement.parse(spec)
2687
2688 def load_entry_point(self, group, name):
2689 """Return the `name` entry point of `group` or raise ImportError"""
2690 ep = self.get_entry_info(group, name)
2691 if ep is None:
2692 raise ImportError("Entry point %r not found" % ((group, name),))
2693 return ep.load()
2694
2695 def get_entry_map(self, group=None):
2696 """Return the entry point map for `group`, or the full entry map"""
2697 try:
2698 ep_map = self._ep_map
2699 except AttributeError:
2700 ep_map = self._ep_map = EntryPoint.parse_map(
2701 self._get_metadata('entry_points.txt'), self
2702 )
2703 if group is not None:
2704 return ep_map.get(group, {})
2705 return ep_map
2706
2707 def get_entry_info(self, group, name):
2708 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2709 return self.get_entry_map(group).get(name)
2710
2711 def insert_on(self, path, loc=None, replace=False):
2712 """Ensure self.location is on path
2713
2714 If replace=False (default):
2715 - If location is already in path anywhere, do nothing.
2716 - Else:
2717 - If it's an egg and its parent directory is on path,
2718 insert just ahead of the parent.
2719 - Else: add to the end of path.
2720 If replace=True:
2721 - If location is already on path anywhere (not eggs)
2722 or higher priority than its parent (eggs)
2723 do nothing.
2724 - Else:
2725 - If it's an egg and its parent directory is on path,
2726 insert just ahead of the parent,
2727 removing any lower-priority entries.
2728 - Else: add it to the front of path.
2729 """
2730
2731 loc = loc or self.location
2732 if not loc:
2733 return
2734
2735 nloc = _normalize_cached(loc)
2736 bdir = os.path.dirname(nloc)
2737 npath = [(p and _normalize_cached(p) or p) for p in path]
2738
2739 for p, item in enumerate(npath):
2740 if item == nloc:
2741 if replace:
2742 break
2743 else:
2744 # don't modify path (even removing duplicates) if
2745 # found and not replace
2746 return
2747 elif item == bdir and self.precedence == EGG_DIST:
2748 # if it's an .egg, give it precedence over its directory
2749 # UNLESS it's already been added to sys.path and replace=False
2750 if (not replace) and nloc in npath[p:]:
2751 return
2752 if path is sys.path:
2753 self.check_version_conflict()
2754 path.insert(p, loc)
2755 npath.insert(p, nloc)
2756 break
2757 else:
2758 if path is sys.path:
2759 self.check_version_conflict()
2760 if replace:
2761 path.insert(0, loc)
2762 else:
2763 path.append(loc)
2764 return
2765
2766 # p is the spot where we found or inserted loc; now remove duplicates
2767 while True:
2768 try:
2769 np = npath.index(nloc, p + 1)
2770 except ValueError:
2771 break
2772 else:
2773 del npath[np], path[np]
2774 # ha!
2775 p = np
2776
2777 return
2778
2779 def check_version_conflict(self):
2780 if self.key == 'setuptools':
2781 # ignore the inevitable setuptools self-conflicts :(
2782 return
2783
2784 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2785 loc = normalize_path(self.location)
2786 for modname in self._get_metadata('top_level.txt'):
2787 if (modname not in sys.modules or modname in nsp
2788 or modname in _namespace_packages):
2789 continue
2790 if modname in ('pkg_resources', 'setuptools', 'site'):
2791 continue
2792 fn = getattr(sys.modules[modname], '__file__', None)
2793 if fn and (normalize_path(fn).startswith(loc) or
2794 fn.startswith(self.location)):
2795 continue
2796 issue_warning(
2797 "Module %s was already imported from %s, but %s is being added"
2798 " to sys.path" % (modname, fn, self.location),
2799 )
2800
2801 def has_version(self):
2802 try:
2803 self.version
2804 except ValueError:
2805 issue_warning("Unbuilt egg for " + repr(self))
2806 return False
2807 return True
2808
2809 def clone(self, **kw):
2810 """Copy this distribution, substituting in any changed keyword args"""
2811 names = 'project_name version py_version platform location precedence'
2812 for attr in names.split():
2813 kw.setdefault(attr, getattr(self, attr, None))
2814 kw.setdefault('metadata', self._provider)
2815 return self.__class__(**kw)
2816
2817 @property
2818 def extras(self):
2819 return [dep for dep in self._dep_map if dep]
2820
2821
2822class EggInfoDistribution(Distribution):
2823 def _reload_version(self):
2824 """
2825 Packages installed by distutils (e.g. numpy or scipy),
2826 which uses an old safe_version, and so
2827 their version numbers can get mangled when
2828 converted to filenames (e.g., 1.11.0.dev0+2329eae to
2829 1.11.0.dev0_2329eae). These distributions will not be
2830 parsed properly
2831 downstream by Distribution and safe_version, so
2832 take an extra step and try to get the version number from
2833 the metadata file itself instead of the filename.
2834 """
2835 md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2836 if md_version:
2837 self._version = md_version
2838 return self
2839
2840
2841class DistInfoDistribution(Distribution):
2842 """
2843 Wrap an actual or potential sys.path entry
2844 w/metadata, .dist-info style.
2845 """
2846 PKG_INFO = 'METADATA'
2847 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2848
2849 @property
2850 def _parsed_pkg_info(self):
2851 """Parse and cache metadata"""
2852 try:
2853 return self._pkg_info
2854 except AttributeError:
2855 metadata = self.get_metadata(self.PKG_INFO)
2856 self._pkg_info = email.parser.Parser().parsestr(metadata)
2857 return self._pkg_info
2858
2859 @property
2860 def _dep_map(self):
2861 try:
2862 return self.__dep_map
2863 except AttributeError:
2864 self.__dep_map = self._compute_dependencies()
2865 return self.__dep_map
2866
2867 def _compute_dependencies(self):
2868 """Recompute this distribution's dependencies."""
2869 dm = self.__dep_map = {None: []}
2870
2871 reqs = []
2872 # Including any condition expressions
2873 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2874 reqs.extend(parse_requirements(req))
2875
2876 def reqs_for_extra(extra):
2877 for req in reqs:
2878 if not req.marker or req.marker.evaluate({'extra': extra}):
2879 yield req
2880
2881 common = frozenset(reqs_for_extra(None))
2882 dm[None].extend(common)
2883
2884 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2885 s_extra = safe_extra(extra.strip())
2886 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
2887
2888 return dm
2889
2890
2891_distributionImpl = {
2892 '.egg': Distribution,
2893 '.egg-info': EggInfoDistribution,
2894 '.dist-info': DistInfoDistribution,
2895}
2896
2897
2898def issue_warning(*args, **kw):
2899 level = 1
2900 g = globals()
2901 try:
2902 # find the first stack frame that is *not* code in
2903 # the pkg_resources module, to use for the warning
2904 while sys._getframe(level).f_globals is g:
2905 level += 1
2906 except ValueError:
2907 pass
2908 warnings.warn(stacklevel=level + 1, *args, **kw)
2909
2910
2911class RequirementParseError(ValueError):
2912 def __str__(self):
2913 return ' '.join(self.args)
2914
2915
2916def parse_requirements(strs):
2917 """Yield ``Requirement`` objects for each specification in `strs`
2918
2919 `strs` must be a string, or a (possibly-nested) iterable thereof.
2920 """
2921 # create a steppable iterator, so we can handle \-continuations
2922 lines = iter(yield_lines(strs))
2923
2924 for line in lines:
2925 # Drop comments -- a hash without a space may be in a URL.
2926 if ' #' in line:
2927 line = line[:line.find(' #')]
2928 # If there is a line continuation, drop it, and append the next line.
2929 if line.endswith('\\'):
2930 line = line[:-2].strip()
2931 try:
2932 line += next(lines)
2933 except StopIteration:
2934 return
2935 yield Requirement(line)
2936
2937
2938class Requirement(packaging.requirements.Requirement):
2939 def __init__(self, requirement_string):
2940 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2941 try:
2942 super(Requirement, self).__init__(requirement_string)
2943 except packaging.requirements.InvalidRequirement as e:
2944 raise RequirementParseError(str(e))
2945 self.unsafe_name = self.name
2946 project_name = safe_name(self.name)
2947 self.project_name, self.key = project_name, project_name.lower()
2948 self.specs = [
2949 (spec.operator, spec.version) for spec in self.specifier]
2950 self.extras = tuple(map(safe_extra, self.extras))
2951 self.hashCmp = (
2952 self.key,
2953 self.specifier,
2954 frozenset(self.extras),
2955 str(self.marker) if self.marker else None,
2956 )
2957 self.__hash = hash(self.hashCmp)
2958
2959 def __eq__(self, other):
2960 return (
2961 isinstance(other, Requirement) and
2962 self.hashCmp == other.hashCmp
2963 )
2964
2965 def __ne__(self, other):
2966 return not self == other
2967
2968 def __contains__(self, item):
2969 if isinstance(item, Distribution):
2970 if item.key != self.key:
2971 return False
2972
2973 item = item.version
2974
2975 # Allow prereleases always in order to match the previous behavior of
2976 # this method. In the future this should be smarter and follow PEP 440
2977 # more accurately.
2978 return self.specifier.contains(item, prereleases=True)
2979
2980 def __hash__(self):
2981 return self.__hash
2982
2983 def __repr__(self):
2984 return "Requirement.parse(%r)" % str(self)
2985
2986 @staticmethod
2987 def parse(s):
2988 req, = parse_requirements(s)
2989 return req
2990
2991
2992def _always_object(classes):
2993 """
2994 Ensure object appears in the mro even
2995 for old-style classes.
2996 """
2997 if object not in classes:
2998 return classes + (object,)
2999 return classes
3000
3001
3002def _find_adapter(registry, ob):
3003 """Return an adapter factory for `ob` from `registry`"""
3004 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3005 for t in types:
3006 if t in registry:
3007 return registry[t]
3008
3009
3010def ensure_directory(path):
3011 """Ensure that the parent directory of `path` exists"""
3012 dirname = os.path.dirname(path)
3013 py31compat.makedirs(dirname, exist_ok=True)
3014
3015
3016def _bypass_ensure_directory(path):
3017 """Sandbox-bypassing version of ensure_directory()"""
3018 if not WRITE_SUPPORT:
3019 raise IOError('"os.mkdir" not supported on this platform.')
3020 dirname, filename = split(path)
3021 if dirname and filename and not isdir(dirname):
3022 _bypass_ensure_directory(dirname)
3023 mkdir(dirname, 0o755)
3024
3025
3026def split_sections(s):
3027 """Split a string or iterable thereof into (section, content) pairs
3028
3029 Each ``section`` is a stripped version of the section header ("[section]")
3030 and each ``content`` is a list of stripped lines excluding blank lines and
3031 comment-only lines. If there are any such lines before the first section
3032 header, they're returned in a first ``section`` of ``None``.
3033 """
3034 section = None
3035 content = []
3036 for line in yield_lines(s):
3037 if line.startswith("["):
3038 if line.endswith("]"):
3039 if section or content:
3040 yield section, content
3041 section = line[1:-1].strip()
3042 content = []
3043 else:
3044 raise ValueError("Invalid section heading", line)
3045 else:
3046 content.append(line)
3047
3048 # wrap up last segment
3049 yield section, content
3050
3051
3052def _mkstemp(*args, **kw):
3053 old_open = os.open
3054 try:
3055 # temporarily bypass sandboxing
3056 os.open = os_open
3057 return tempfile.mkstemp(*args, **kw)
3058 finally:
3059 # and then put it back
3060 os.open = old_open
3061
3062
3063# Silence the PEP440Warning by default, so that end users don't get hit by it
3064# randomly just because they use pkg_resources. We want to append the rule
3065# because we want earlier uses of filterwarnings to take precedence over this
3066# one.
3067warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3068
3069
3070# from jaraco.functools 1.3
3071def _call_aside(f, *args, **kwargs):
3072 f(*args, **kwargs)
3073 return f
3074
3075
3076@_call_aside
3077def _initialize(g=globals()):
3078 "Set up global resource manager (deliberately not state-saved)"
3079 manager = ResourceManager()
3080 g['_manager'] = manager
3081 g.update(
3082 (name, getattr(manager, name))
3083 for name in dir(manager)
3084 if not name.startswith('_')
3085 )
3086
3087
3088@_call_aside
3089def _initialize_master_working_set():
3090 """
3091 Prepare the master working set and make the ``require()``
3092 API available.
3093
3094 This function has explicit effects on the global state
3095 of pkg_resources. It is intended to be invoked once at
3096 the initialization of this module.
3097
3098 Invocation by other packages is unsupported and done
3099 at their own risk.
3100 """
3101 working_set = WorkingSet._build_master()
3102 _declare_state('object', working_set=working_set)
3103
3104 require = working_set.require
3105 iter_entry_points = working_set.iter_entry_points
3106 add_activation_listener = working_set.subscribe
3107 run_script = working_set.run_script
3108 # backward compatibility
3109 run_main = run_script
3110 # Activate all distributions already on sys.path with replace=False and
3111 # ensure that all distributions added to the working set in the future
3112 # (e.g. by calling ``require()``) will get activated as well,
3113 # with higher priority (replace=True).
3114 tuple(
3115 dist.activate(replace=False)
3116 for dist in working_set
3117 )
3118 add_activation_listener(
3119 lambda dist: dist.activate(replace=True),
3120 existing=False,
3121 )
3122 working_set.entries = []
3123 # match order
3124 list(map(working_set.add_entry, sys.path))
3125 globals().update(locals())