diff options
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py')
| -rw-r--r-- | venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py | 354 |
1 files changed, 0 insertions, 354 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py deleted file mode 100644 index 189827e..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/resolve.py +++ /dev/null | |||
| @@ -1,354 +0,0 @@ | |||
| 1 | """Dependency Resolution | ||
| 2 | |||
| 3 | The dependency resolution in pip is performed as follows: | ||
| 4 | |||
| 5 | for top-level requirements: | ||
| 6 | a. only one spec allowed per project, regardless of conflicts or not. | ||
| 7 | otherwise a "double requirement" exception is raised | ||
| 8 | b. they override sub-dependency requirements. | ||
| 9 | for sub-dependencies | ||
| 10 | a. "first found, wins" (where the order is breadth first) | ||
| 11 | """ | ||
| 12 | |||
| 13 | import logging | ||
| 14 | from collections import defaultdict | ||
| 15 | from itertools import chain | ||
| 16 | |||
| 17 | from pip._internal.exceptions import ( | ||
| 18 | BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, | ||
| 19 | UnsupportedPythonVersion, | ||
| 20 | ) | ||
| 21 | |||
| 22 | from pip._internal.req.req_install import InstallRequirement | ||
| 23 | from pip._internal.utils.logging import indent_log | ||
| 24 | from pip._internal.utils.misc import dist_in_usersite, ensure_dir | ||
| 25 | from pip._internal.utils.packaging import check_dist_requires_python | ||
| 26 | |||
| 27 | logger = logging.getLogger(__name__) | ||
| 28 | |||
| 29 | |||
| 30 | class Resolver(object): | ||
| 31 | """Resolves which packages need to be installed/uninstalled to perform \ | ||
| 32 | the requested operation without breaking the requirements of any package. | ||
| 33 | """ | ||
| 34 | |||
| 35 | _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} | ||
| 36 | |||
| 37 | def __init__(self, preparer, session, finder, wheel_cache, use_user_site, | ||
| 38 | ignore_dependencies, ignore_installed, ignore_requires_python, | ||
| 39 | force_reinstall, isolated, upgrade_strategy): | ||
| 40 | super(Resolver, self).__init__() | ||
| 41 | assert upgrade_strategy in self._allowed_strategies | ||
| 42 | |||
| 43 | self.preparer = preparer | ||
| 44 | self.finder = finder | ||
| 45 | self.session = session | ||
| 46 | |||
| 47 | # NOTE: This would eventually be replaced with a cache that can give | ||
| 48 | # information about both sdist and wheels transparently. | ||
| 49 | self.wheel_cache = wheel_cache | ||
| 50 | |||
| 51 | self.require_hashes = None # This is set in resolve | ||
| 52 | |||
| 53 | self.upgrade_strategy = upgrade_strategy | ||
| 54 | self.force_reinstall = force_reinstall | ||
| 55 | self.isolated = isolated | ||
| 56 | self.ignore_dependencies = ignore_dependencies | ||
| 57 | self.ignore_installed = ignore_installed | ||
| 58 | self.ignore_requires_python = ignore_requires_python | ||
| 59 | self.use_user_site = use_user_site | ||
| 60 | |||
| 61 | self._discovered_dependencies = defaultdict(list) | ||
| 62 | |||
| 63 | def resolve(self, requirement_set): | ||
| 64 | """Resolve what operations need to be done | ||
| 65 | |||
| 66 | As a side-effect of this method, the packages (and their dependencies) | ||
| 67 | are downloaded, unpacked and prepared for installation. This | ||
| 68 | preparation is done by ``pip.operations.prepare``. | ||
| 69 | |||
| 70 | Once PyPI has static dependency metadata available, it would be | ||
| 71 | possible to move the preparation to become a step separated from | ||
| 72 | dependency resolution. | ||
| 73 | """ | ||
| 74 | # make the wheelhouse | ||
| 75 | if self.preparer.wheel_download_dir: | ||
| 76 | ensure_dir(self.preparer.wheel_download_dir) | ||
| 77 | |||
| 78 | # If any top-level requirement has a hash specified, enter | ||
| 79 | # hash-checking mode, which requires hashes from all. | ||
| 80 | root_reqs = ( | ||
| 81 | requirement_set.unnamed_requirements + | ||
| 82 | list(requirement_set.requirements.values()) | ||
| 83 | ) | ||
| 84 | self.require_hashes = ( | ||
| 85 | requirement_set.require_hashes or | ||
| 86 | any(req.has_hash_options for req in root_reqs) | ||
| 87 | ) | ||
| 88 | |||
| 89 | # Display where finder is looking for packages | ||
| 90 | locations = self.finder.get_formatted_locations() | ||
| 91 | if locations: | ||
| 92 | logger.info(locations) | ||
| 93 | |||
| 94 | # Actually prepare the files, and collect any exceptions. Most hash | ||
| 95 | # exceptions cannot be checked ahead of time, because | ||
| 96 | # req.populate_link() needs to be called before we can make decisions | ||
| 97 | # based on link type. | ||
| 98 | discovered_reqs = [] | ||
| 99 | hash_errors = HashErrors() | ||
| 100 | for req in chain(root_reqs, discovered_reqs): | ||
| 101 | try: | ||
| 102 | discovered_reqs.extend( | ||
| 103 | self._resolve_one(requirement_set, req) | ||
| 104 | ) | ||
| 105 | except HashError as exc: | ||
| 106 | exc.req = req | ||
| 107 | hash_errors.append(exc) | ||
| 108 | |||
| 109 | if hash_errors: | ||
| 110 | raise hash_errors | ||
| 111 | |||
| 112 | def _is_upgrade_allowed(self, req): | ||
| 113 | if self.upgrade_strategy == "to-satisfy-only": | ||
| 114 | return False | ||
| 115 | elif self.upgrade_strategy == "eager": | ||
| 116 | return True | ||
| 117 | else: | ||
| 118 | assert self.upgrade_strategy == "only-if-needed" | ||
| 119 | return req.is_direct | ||
| 120 | |||
| 121 | def _set_req_to_reinstall(self, req): | ||
| 122 | """ | ||
| 123 | Set a requirement to be installed. | ||
| 124 | """ | ||
| 125 | # Don't uninstall the conflict if doing a user install and the | ||
| 126 | # conflict is not a user install. | ||
| 127 | if not self.use_user_site or dist_in_usersite(req.satisfied_by): | ||
| 128 | req.conflicts_with = req.satisfied_by | ||
| 129 | req.satisfied_by = None | ||
| 130 | |||
| 131 | # XXX: Stop passing requirement_set for options | ||
| 132 | def _check_skip_installed(self, req_to_install): | ||
| 133 | """Check if req_to_install should be skipped. | ||
| 134 | |||
| 135 | This will check if the req is installed, and whether we should upgrade | ||
| 136 | or reinstall it, taking into account all the relevant user options. | ||
| 137 | |||
| 138 | After calling this req_to_install will only have satisfied_by set to | ||
| 139 | None if the req_to_install is to be upgraded/reinstalled etc. Any | ||
| 140 | other value will be a dist recording the current thing installed that | ||
| 141 | satisfies the requirement. | ||
| 142 | |||
| 143 | Note that for vcs urls and the like we can't assess skipping in this | ||
| 144 | routine - we simply identify that we need to pull the thing down, | ||
| 145 | then later on it is pulled down and introspected to assess upgrade/ | ||
| 146 | reinstalls etc. | ||
| 147 | |||
| 148 | :return: A text reason for why it was skipped, or None. | ||
| 149 | """ | ||
| 150 | if self.ignore_installed: | ||
| 151 | return None | ||
| 152 | |||
| 153 | req_to_install.check_if_exists(self.use_user_site) | ||
| 154 | if not req_to_install.satisfied_by: | ||
| 155 | return None | ||
| 156 | |||
| 157 | if self.force_reinstall: | ||
| 158 | self._set_req_to_reinstall(req_to_install) | ||
| 159 | return None | ||
| 160 | |||
| 161 | if not self._is_upgrade_allowed(req_to_install): | ||
| 162 | if self.upgrade_strategy == "only-if-needed": | ||
| 163 | return 'not upgraded as not directly required' | ||
| 164 | return 'already satisfied' | ||
| 165 | |||
| 166 | # Check for the possibility of an upgrade. For link-based | ||
| 167 | # requirements we have to pull the tree down and inspect to assess | ||
| 168 | # the version #, so it's handled way down. | ||
| 169 | if not req_to_install.link: | ||
| 170 | try: | ||
| 171 | self.finder.find_requirement(req_to_install, upgrade=True) | ||
| 172 | except BestVersionAlreadyInstalled: | ||
| 173 | # Then the best version is installed. | ||
| 174 | return 'already up-to-date' | ||
| 175 | except DistributionNotFound: | ||
| 176 | # No distribution found, so we squash the error. It will | ||
| 177 | # be raised later when we re-try later to do the install. | ||
| 178 | # Why don't we just raise here? | ||
| 179 | pass | ||
| 180 | |||
| 181 | self._set_req_to_reinstall(req_to_install) | ||
| 182 | return None | ||
| 183 | |||
| 184 | def _get_abstract_dist_for(self, req): | ||
| 185 | """Takes a InstallRequirement and returns a single AbstractDist \ | ||
| 186 | representing a prepared variant of the same. | ||
| 187 | """ | ||
| 188 | assert self.require_hashes is not None, ( | ||
| 189 | "require_hashes should have been set in Resolver.resolve()" | ||
| 190 | ) | ||
| 191 | |||
| 192 | if req.editable: | ||
| 193 | return self.preparer.prepare_editable_requirement( | ||
| 194 | req, self.require_hashes, self.use_user_site, self.finder, | ||
| 195 | ) | ||
| 196 | |||
| 197 | # satisfied_by is only evaluated by calling _check_skip_installed, | ||
| 198 | # so it must be None here. | ||
| 199 | assert req.satisfied_by is None | ||
| 200 | skip_reason = self._check_skip_installed(req) | ||
| 201 | |||
| 202 | if req.satisfied_by: | ||
| 203 | return self.preparer.prepare_installed_requirement( | ||
| 204 | req, self.require_hashes, skip_reason | ||
| 205 | ) | ||
| 206 | |||
| 207 | upgrade_allowed = self._is_upgrade_allowed(req) | ||
| 208 | abstract_dist = self.preparer.prepare_linked_requirement( | ||
| 209 | req, self.session, self.finder, upgrade_allowed, | ||
| 210 | self.require_hashes | ||
| 211 | ) | ||
| 212 | |||
| 213 | # NOTE | ||
| 214 | # The following portion is for determining if a certain package is | ||
| 215 | # going to be re-installed/upgraded or not and reporting to the user. | ||
| 216 | # This should probably get cleaned up in a future refactor. | ||
| 217 | |||
| 218 | # req.req is only avail after unpack for URL | ||
| 219 | # pkgs repeat check_if_exists to uninstall-on-upgrade | ||
| 220 | # (#14) | ||
| 221 | if not self.ignore_installed: | ||
| 222 | req.check_if_exists(self.use_user_site) | ||
| 223 | |||
| 224 | if req.satisfied_by: | ||
| 225 | should_modify = ( | ||
| 226 | self.upgrade_strategy != "to-satisfy-only" or | ||
| 227 | self.force_reinstall or | ||
| 228 | self.ignore_installed or | ||
| 229 | req.link.scheme == 'file' | ||
| 230 | ) | ||
| 231 | if should_modify: | ||
| 232 | self._set_req_to_reinstall(req) | ||
| 233 | else: | ||
| 234 | logger.info( | ||
| 235 | 'Requirement already satisfied (use --upgrade to upgrade):' | ||
| 236 | ' %s', req, | ||
| 237 | ) | ||
| 238 | |||
| 239 | return abstract_dist | ||
| 240 | |||
| 241 | def _resolve_one(self, requirement_set, req_to_install): | ||
| 242 | """Prepare a single requirements file. | ||
| 243 | |||
| 244 | :return: A list of additional InstallRequirements to also install. | ||
| 245 | """ | ||
| 246 | # Tell user what we are doing for this requirement: | ||
| 247 | # obtain (editable), skipping, processing (local url), collecting | ||
| 248 | # (remote url or package name) | ||
| 249 | if req_to_install.constraint or req_to_install.prepared: | ||
| 250 | return [] | ||
| 251 | |||
| 252 | req_to_install.prepared = True | ||
| 253 | |||
| 254 | # register tmp src for cleanup in case something goes wrong | ||
| 255 | requirement_set.reqs_to_cleanup.append(req_to_install) | ||
| 256 | |||
| 257 | abstract_dist = self._get_abstract_dist_for(req_to_install) | ||
| 258 | |||
| 259 | # Parse and return dependencies | ||
| 260 | dist = abstract_dist.dist(self.finder) | ||
| 261 | try: | ||
| 262 | check_dist_requires_python(dist) | ||
| 263 | except UnsupportedPythonVersion as err: | ||
| 264 | if self.ignore_requires_python: | ||
| 265 | logger.warning(err.args[0]) | ||
| 266 | else: | ||
| 267 | raise | ||
| 268 | |||
| 269 | more_reqs = [] | ||
| 270 | |||
| 271 | def add_req(subreq, extras_requested): | ||
| 272 | sub_install_req = InstallRequirement.from_req( | ||
| 273 | str(subreq), | ||
| 274 | req_to_install, | ||
| 275 | isolated=self.isolated, | ||
| 276 | wheel_cache=self.wheel_cache, | ||
| 277 | ) | ||
| 278 | parent_req_name = req_to_install.name | ||
| 279 | to_scan_again, add_to_parent = requirement_set.add_requirement( | ||
| 280 | sub_install_req, | ||
| 281 | parent_req_name=parent_req_name, | ||
| 282 | extras_requested=extras_requested, | ||
| 283 | ) | ||
| 284 | if parent_req_name and add_to_parent: | ||
| 285 | self._discovered_dependencies[parent_req_name].append( | ||
| 286 | add_to_parent | ||
| 287 | ) | ||
| 288 | more_reqs.extend(to_scan_again) | ||
| 289 | |||
| 290 | with indent_log(): | ||
| 291 | # We add req_to_install before its dependencies, so that we | ||
| 292 | # can refer to it when adding dependencies. | ||
| 293 | if not requirement_set.has_requirement(req_to_install.name): | ||
| 294 | # 'unnamed' requirements will get added here | ||
| 295 | req_to_install.is_direct = True | ||
| 296 | requirement_set.add_requirement( | ||
| 297 | req_to_install, parent_req_name=None, | ||
| 298 | ) | ||
| 299 | |||
| 300 | if not self.ignore_dependencies: | ||
| 301 | if req_to_install.extras: | ||
| 302 | logger.debug( | ||
| 303 | "Installing extra requirements: %r", | ||
| 304 | ','.join(req_to_install.extras), | ||
| 305 | ) | ||
| 306 | missing_requested = sorted( | ||
| 307 | set(req_to_install.extras) - set(dist.extras) | ||
| 308 | ) | ||
| 309 | for missing in missing_requested: | ||
| 310 | logger.warning( | ||
| 311 | '%s does not provide the extra \'%s\'', | ||
| 312 | dist, missing | ||
| 313 | ) | ||
| 314 | |||
| 315 | available_requested = sorted( | ||
| 316 | set(dist.extras) & set(req_to_install.extras) | ||
| 317 | ) | ||
| 318 | for subreq in dist.requires(available_requested): | ||
| 319 | add_req(subreq, extras_requested=available_requested) | ||
| 320 | |||
| 321 | if not req_to_install.editable and not req_to_install.satisfied_by: | ||
| 322 | # XXX: --no-install leads this to report 'Successfully | ||
| 323 | # downloaded' for only non-editable reqs, even though we took | ||
| 324 | # action on them. | ||
| 325 | requirement_set.successfully_downloaded.append(req_to_install) | ||
| 326 | |||
| 327 | return more_reqs | ||
| 328 | |||
| 329 | def get_installation_order(self, req_set): | ||
| 330 | """Create the installation order. | ||
| 331 | |||
| 332 | The installation order is topological - requirements are installed | ||
| 333 | before the requiring thing. We break cycles at an arbitrary point, | ||
| 334 | and make no other guarantees. | ||
| 335 | """ | ||
| 336 | # The current implementation, which we may change at any point | ||
| 337 | # installs the user specified things in the order given, except when | ||
| 338 | # dependencies must come earlier to achieve topological order. | ||
| 339 | order = [] | ||
| 340 | ordered_reqs = set() | ||
| 341 | |||
| 342 | def schedule(req): | ||
| 343 | if req.satisfied_by or req in ordered_reqs: | ||
| 344 | return | ||
| 345 | if req.constraint: | ||
| 346 | return | ||
| 347 | ordered_reqs.add(req) | ||
| 348 | for dep in self._discovered_dependencies[req.name]: | ||
| 349 | schedule(dep) | ||
| 350 | order.append(req) | ||
| 351 | |||
| 352 | for install_req in req_set.requirements.values(): | ||
| 353 | schedule(install_req) | ||
| 354 | return order | ||
