summaryrefslogtreecommitdiff
path: root/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations
diff options
context:
space:
mode:
authorShubham Saini <shubham6405@gmail.com>2019-08-05 08:32:33 +0000
committerShubham Saini <shubham6405@gmail.com>2019-08-05 08:32:33 +0000
commit227b2d30a8675b44918f9d9ca89b24144a938215 (patch)
tree9f8e6a28724514b6fdf463a9ab2067a7ef309b72 /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations
parent842a8cfbbbdb1f92889d892e4859dbd5d40c5be8 (diff)
removing venv files
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations')
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py0
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py106
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py252
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py380
4 files changed, 0 insertions, 738 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py
deleted file mode 100644
index bab6b9f..0000000
--- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/check.py
+++ /dev/null
@@ -1,106 +0,0 @@
1"""Validation of dependencies of packages
2"""
3
4from collections import namedtuple
5
6from pip._vendor.packaging.utils import canonicalize_name
7
8from pip._internal.operations.prepare import make_abstract_dist
9
10from pip._internal.utils.misc import get_installed_distributions
11from pip._internal.utils.typing import MYPY_CHECK_RUNNING
12
13if MYPY_CHECK_RUNNING:
14 from pip._internal.req.req_install import InstallRequirement
15 from typing import Any, Dict, Iterator, Set, Tuple, List
16
17 # Shorthands
18 PackageSet = Dict[str, 'PackageDetails']
19 Missing = Tuple[str, Any]
20 Conflicting = Tuple[str, str, Any]
21
22 MissingDict = Dict[str, List[Missing]]
23 ConflictingDict = Dict[str, List[Conflicting]]
24 CheckResult = Tuple[MissingDict, ConflictingDict]
25
26PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
27
28
29def create_package_set_from_installed(**kwargs):
30 # type: (**Any) -> PackageSet
31 """Converts a list of distributions into a PackageSet.
32 """
33 # Default to using all packages installed on the system
34 if kwargs == {}:
35 kwargs = {"local_only": False, "skip": ()}
36 retval = {}
37 for dist in get_installed_distributions(**kwargs):
38 name = canonicalize_name(dist.project_name)
39 retval[name] = PackageDetails(dist.version, dist.requires())
40 return retval
41
42
43def check_package_set(package_set):
44 # type: (PackageSet) -> CheckResult
45 """Check if a package set is consistent
46 """
47 missing = dict()
48 conflicting = dict()
49
50 for package_name in package_set:
51 # Info about dependencies of package_name
52 missing_deps = set() # type: Set[Missing]
53 conflicting_deps = set() # type: Set[Conflicting]
54
55 for req in package_set[package_name].requires:
56 name = canonicalize_name(req.project_name) # type: str
57
58 # Check if it's missing
59 if name not in package_set:
60 missed = True
61 if req.marker is not None:
62 missed = req.marker.evaluate()
63 if missed:
64 missing_deps.add((name, req))
65 continue
66
67 # Check if there's a conflict
68 version = package_set[name].version # type: str
69 if not req.specifier.contains(version, prereleases=True):
70 conflicting_deps.add((name, version, req))
71
72 def str_key(x):
73 return str(x)
74
75 if missing_deps:
76 missing[package_name] = sorted(missing_deps, key=str_key)
77 if conflicting_deps:
78 conflicting[package_name] = sorted(conflicting_deps, key=str_key)
79
80 return missing, conflicting
81
82
83def check_install_conflicts(to_install):
84 # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
85 """For checking if the dependency graph would be consistent after \
86 installing given requirements
87 """
88 # Start from the current state
89 state = create_package_set_from_installed()
90 _simulate_installation_of(to_install, state)
91 return state, check_package_set(state)
92
93
94# NOTE from @pradyunsg
95# This required a minor update in dependency link handling logic over at
96# operations.prepare.IsSDist.dist() to get it working
97def _simulate_installation_of(to_install, state):
98 # type: (List[InstallRequirement], PackageSet) -> None
99 """Computes the version of packages after installing to_install.
100 """
101
102 # Modify it as installing requirement_set would (assuming no errors)
103 for inst_req in to_install:
104 dist = make_abstract_dist(inst_req).dist(finder=None)
105 name = canonicalize_name(dist.key)
106 state[name] = PackageDetails(dist.version, dist.requires())
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py
deleted file mode 100644
index 000102d..0000000
--- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/freeze.py
+++ /dev/null
@@ -1,252 +0,0 @@
1from __future__ import absolute_import
2
3import collections
4import logging
5import os
6import re
7import warnings
8
9from pip._vendor import pkg_resources, six
10from pip._vendor.packaging.utils import canonicalize_name
11from pip._vendor.pkg_resources import RequirementParseError
12
13from pip._internal.exceptions import InstallationError
14from pip._internal.req import InstallRequirement
15from pip._internal.req.req_file import COMMENT_RE
16from pip._internal.utils.deprecation import RemovedInPip11Warning
17from pip._internal.utils.misc import (
18 dist_is_editable, get_installed_distributions,
19)
20
21logger = logging.getLogger(__name__)
22
23
24def freeze(
25 requirement=None,
26 find_links=None, local_only=None, user_only=None, skip_regex=None,
27 isolated=False,
28 wheel_cache=None,
29 exclude_editable=False,
30 skip=()):
31 find_links = find_links or []
32 skip_match = None
33
34 if skip_regex:
35 skip_match = re.compile(skip_regex).search
36
37 dependency_links = []
38
39 for dist in pkg_resources.working_set:
40 if dist.has_metadata('dependency_links.txt'):
41 dependency_links.extend(
42 dist.get_metadata_lines('dependency_links.txt')
43 )
44 for link in find_links:
45 if '#egg=' in link:
46 dependency_links.append(link)
47 for link in find_links:
48 yield '-f %s' % link
49 installations = {}
50 for dist in get_installed_distributions(local_only=local_only,
51 skip=(),
52 user_only=user_only):
53 try:
54 req = FrozenRequirement.from_dist(
55 dist,
56 dependency_links
57 )
58 except RequirementParseError:
59 logger.warning(
60 "Could not parse requirement: %s",
61 dist.project_name
62 )
63 continue
64 if exclude_editable and req.editable:
65 continue
66 installations[req.name] = req
67
68 if requirement:
69 # the options that don't get turned into an InstallRequirement
70 # should only be emitted once, even if the same option is in multiple
71 # requirements files, so we need to keep track of what has been emitted
72 # so that we don't emit it again if it's seen again
73 emitted_options = set()
74 # keep track of which files a requirement is in so that we can
75 # give an accurate warning if a requirement appears multiple times.
76 req_files = collections.defaultdict(list)
77 for req_file_path in requirement:
78 with open(req_file_path) as req_file:
79 for line in req_file:
80 if (not line.strip() or
81 line.strip().startswith('#') or
82 (skip_match and skip_match(line)) or
83 line.startswith((
84 '-r', '--requirement',
85 '-Z', '--always-unzip',
86 '-f', '--find-links',
87 '-i', '--index-url',
88 '--pre',
89 '--trusted-host',
90 '--process-dependency-links',
91 '--extra-index-url'))):
92 line = line.rstrip()
93 if line not in emitted_options:
94 emitted_options.add(line)
95 yield line
96 continue
97
98 if line.startswith('-e') or line.startswith('--editable'):
99 if line.startswith('-e'):
100 line = line[2:].strip()
101 else:
102 line = line[len('--editable'):].strip().lstrip('=')
103 line_req = InstallRequirement.from_editable(
104 line,
105 isolated=isolated,
106 wheel_cache=wheel_cache,
107 )
108 else:
109 line_req = InstallRequirement.from_line(
110 COMMENT_RE.sub('', line).strip(),
111 isolated=isolated,
112 wheel_cache=wheel_cache,
113 )
114
115 if not line_req.name:
116 logger.info(
117 "Skipping line in requirement file [%s] because "
118 "it's not clear what it would install: %s",
119 req_file_path, line.strip(),
120 )
121 logger.info(
122 " (add #egg=PackageName to the URL to avoid"
123 " this warning)"
124 )
125 elif line_req.name not in installations:
126 # either it's not installed, or it is installed
127 # but has been processed already
128 if not req_files[line_req.name]:
129 logger.warning(
130 "Requirement file [%s] contains %s, but that "
131 "package is not installed",
132 req_file_path,
133 COMMENT_RE.sub('', line).strip(),
134 )
135 else:
136 req_files[line_req.name].append(req_file_path)
137 else:
138 yield str(installations[line_req.name]).rstrip()
139 del installations[line_req.name]
140 req_files[line_req.name].append(req_file_path)
141
142 # Warn about requirements that were included multiple times (in a
143 # single requirements file or in different requirements files).
144 for name, files in six.iteritems(req_files):
145 if len(files) > 1:
146 logger.warning("Requirement %s included multiple times [%s]",
147 name, ', '.join(sorted(set(files))))
148
149 yield(
150 '## The following requirements were added by '
151 'pip freeze:'
152 )
153 for installation in sorted(
154 installations.values(), key=lambda x: x.name.lower()):
155 if canonicalize_name(installation.name) not in skip:
156 yield str(installation).rstrip()
157
158
159class FrozenRequirement(object):
160 def __init__(self, name, req, editable, comments=()):
161 self.name = name
162 self.req = req
163 self.editable = editable
164 self.comments = comments
165
166 _rev_re = re.compile(r'-r(\d+)$')
167 _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
168
169 @classmethod
170 def from_dist(cls, dist, dependency_links):
171 location = os.path.normcase(os.path.abspath(dist.location))
172 comments = []
173 from pip._internal.vcs import vcs, get_src_requirement
174 if dist_is_editable(dist) and vcs.get_backend_name(location):
175 editable = True
176 try:
177 req = get_src_requirement(dist, location)
178 except InstallationError as exc:
179 logger.warning(
180 "Error when trying to get requirement for VCS system %s, "
181 "falling back to uneditable format", exc
182 )
183 req = None
184 if req is None:
185 logger.warning(
186 'Could not determine repository location of %s', location
187 )
188 comments.append(
189 '## !! Could not determine repository location'
190 )
191 req = dist.as_requirement()
192 editable = False
193 else:
194 editable = False
195 req = dist.as_requirement()
196 specs = req.specs
197 assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
198 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
199 (specs, dist)
200 version = specs[0][1]
201 ver_match = cls._rev_re.search(version)
202 date_match = cls._date_re.search(version)
203 if ver_match or date_match:
204 svn_backend = vcs.get_backend('svn')
205 if svn_backend:
206 svn_location = svn_backend().get_location(
207 dist,
208 dependency_links,
209 )
210 if not svn_location:
211 logger.warning(
212 'Warning: cannot find svn location for %s', req,
213 )
214 comments.append(
215 '## FIXME: could not find svn URL in dependency_links '
216 'for this package:'
217 )
218 else:
219 warnings.warn(
220 "SVN editable detection based on dependency links "
221 "will be dropped in the future.",
222 RemovedInPip11Warning,
223 )
224 comments.append(
225 '# Installing as editable to satisfy requirement %s:' %
226 req
227 )
228 if ver_match:
229 rev = ver_match.group(1)
230 else:
231 rev = '{%s}' % date_match.group(1)
232 editable = True
233 req = '%s@%s#egg=%s' % (
234 svn_location,
235 rev,
236 cls.egg_name(dist)
237 )
238 return cls(dist.project_name, req, editable, comments)
239
240 @staticmethod
241 def egg_name(dist):
242 name = dist.egg_name()
243 match = re.search(r'-py\d\.\d$', name)
244 if match:
245 name = name[:match.start()]
246 return name
247
248 def __str__(self):
249 req = self.req
250 if self.editable:
251 req = '-e %s' % req
252 return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py
deleted file mode 100644
index c1e8158..0000000
--- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py
+++ /dev/null
@@ -1,380 +0,0 @@
1"""Prepares a distribution for installation
2"""
3
4import itertools
5import logging
6import os
7import sys
8from copy import copy
9
10from pip._vendor import pkg_resources, requests
11
12from pip._internal.build_env import NoOpBuildEnvironment
13from pip._internal.compat import expanduser
14from pip._internal.download import (
15 is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
16)
17from pip._internal.exceptions import (
18 DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
19 PreviousBuildDirError, VcsHashUnsupported,
20)
21from pip._internal.index import FormatControl
22from pip._internal.req.req_install import InstallRequirement
23from pip._internal.utils.hashes import MissingHashes
24from pip._internal.utils.logging import indent_log
25from pip._internal.utils.misc import (
26 call_subprocess, display_path, normalize_path,
27)
28from pip._internal.utils.ui import open_spinner
29from pip._internal.vcs import vcs
30
31logger = logging.getLogger(__name__)
32
33
34def make_abstract_dist(req):
35 """Factory to make an abstract dist object.
36
37 Preconditions: Either an editable req with a source_dir, or satisfied_by or
38 a wheel link, or a non-editable req with a source_dir.
39
40 :return: A concrete DistAbstraction.
41 """
42 if req.editable:
43 return IsSDist(req)
44 elif req.link and req.link.is_wheel:
45 return IsWheel(req)
46 else:
47 return IsSDist(req)
48
49
50def _install_build_reqs(finder, prefix, build_requirements):
51 # NOTE: What follows is not a very good thing.
52 # Eventually, this should move into the BuildEnvironment class and
53 # that should handle all the isolation and sub-process invocation.
54 finder = copy(finder)
55 finder.format_control = FormatControl(set(), set([":all:"]))
56 urls = [
57 finder.find_requirement(
58 InstallRequirement.from_line(r), upgrade=False).url
59 for r in build_requirements
60 ]
61 args = [
62 sys.executable, '-m', 'pip', 'install', '--ignore-installed',
63 '--no-user', '--prefix', prefix,
64 ] + list(urls)
65
66 with open_spinner("Installing build dependencies") as spinner:
67 call_subprocess(args, show_stdout=False, spinner=spinner)
68
69
70class DistAbstraction(object):
71 """Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
72
73 The requirements for anything installable are as follows:
74 - we must be able to determine the requirement name
75 (or we can't correctly handle the non-upgrade case).
76 - we must be able to generate a list of run-time dependencies
77 without installing any additional packages (or we would
78 have to either burn time by doing temporary isolated installs
79 or alternatively violate pips 'don't start installing unless
80 all requirements are available' rule - neither of which are
81 desirable).
82 - for packages with setup requirements, we must also be able
83 to determine their requirements without installing additional
84 packages (for the same reason as run-time dependencies)
85 - we must be able to create a Distribution object exposing the
86 above metadata.
87 """
88
89 def __init__(self, req):
90 self.req = req
91
92 def dist(self, finder):
93 """Return a setuptools Dist object."""
94 raise NotImplementedError(self.dist)
95
96 def prep_for_dist(self, finder):
97 """Ensure that we can get a Dist for this requirement."""
98 raise NotImplementedError(self.dist)
99
100
101class IsWheel(DistAbstraction):
102
103 def dist(self, finder):
104 return list(pkg_resources.find_distributions(
105 self.req.source_dir))[0]
106
107 def prep_for_dist(self, finder, build_isolation):
108 # FIXME:https://github.com/pypa/pip/issues/1112
109 pass
110
111
112class IsSDist(DistAbstraction):
113
114 def dist(self, finder):
115 dist = self.req.get_dist()
116 # FIXME: shouldn't be globally added.
117 if finder and dist.has_metadata('dependency_links.txt'):
118 finder.add_dependency_links(
119 dist.get_metadata_lines('dependency_links.txt')
120 )
121 return dist
122
123 def prep_for_dist(self, finder, build_isolation):
124 # Before calling "setup.py egg_info", we need to set-up the build
125 # environment.
126 build_requirements, isolate = self.req.get_pep_518_info()
127 should_isolate = build_isolation and isolate
128
129 minimum_requirements = ('setuptools', 'wheel')
130 missing_requirements = set(minimum_requirements) - set(
131 pkg_resources.Requirement(r).key
132 for r in build_requirements
133 )
134 if missing_requirements:
135 def format_reqs(rs):
136 return ' and '.join(map(repr, sorted(rs)))
137 logger.warning(
138 "Missing build time requirements in pyproject.toml for %s: "
139 "%s.", self.req, format_reqs(missing_requirements)
140 )
141 logger.warning(
142 "This version of pip does not implement PEP 517 so it cannot "
143 "build a wheel without %s.", format_reqs(minimum_requirements)
144 )
145
146 if should_isolate:
147 with self.req.build_env:
148 pass
149 _install_build_reqs(finder, self.req.build_env.path,
150 build_requirements)
151 else:
152 self.req.build_env = NoOpBuildEnvironment(no_clean=False)
153
154 self.req.run_egg_info()
155 self.req.assert_source_matches_version()
156
157
158class Installed(DistAbstraction):
159
160 def dist(self, finder):
161 return self.req.satisfied_by
162
163 def prep_for_dist(self, finder):
164 pass
165
166
167class RequirementPreparer(object):
168 """Prepares a Requirement
169 """
170
171 def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
172 progress_bar, build_isolation):
173 super(RequirementPreparer, self).__init__()
174
175 self.src_dir = src_dir
176 self.build_dir = build_dir
177
178 # Where still packed archives should be written to. If None, they are
179 # not saved, and are deleted immediately after unpacking.
180 self.download_dir = download_dir
181
182 # Where still-packed .whl files should be written to. If None, they are
183 # written to the download_dir parameter. Separate to download_dir to
184 # permit only keeping wheel archives for pip wheel.
185 if wheel_download_dir:
186 wheel_download_dir = normalize_path(wheel_download_dir)
187 self.wheel_download_dir = wheel_download_dir
188
189 # NOTE
190 # download_dir and wheel_download_dir overlap semantically and may
191 # be combined if we're willing to have non-wheel archives present in
192 # the wheelhouse output by 'pip wheel'.
193
194 self.progress_bar = progress_bar
195
196 # Is build isolation allowed?
197 self.build_isolation = build_isolation
198
199 @property
200 def _download_should_save(self):
201 # TODO: Modify to reduce indentation needed
202 if self.download_dir:
203 self.download_dir = expanduser(self.download_dir)
204 if os.path.exists(self.download_dir):
205 return True
206 else:
207 logger.critical('Could not find download directory')
208 raise InstallationError(
209 "Could not find or access download directory '%s'"
210 % display_path(self.download_dir))
211 return False
212
213 def prepare_linked_requirement(self, req, session, finder,
214 upgrade_allowed, require_hashes):
215 """Prepare a requirement that would be obtained from req.link
216 """
217 # TODO: Breakup into smaller functions
218 if req.link and req.link.scheme == 'file':
219 path = url_to_path(req.link.url)
220 logger.info('Processing %s', display_path(path))
221 else:
222 logger.info('Collecting %s', req)
223
224 with indent_log():
225 # @@ if filesystem packages are not marked
226 # editable in a req, a non deterministic error
227 # occurs when the script attempts to unpack the
228 # build directory
229 req.ensure_has_source_dir(self.build_dir)
230 # If a checkout exists, it's unwise to keep going. version
231 # inconsistencies are logged later, but do not fail the
232 # installation.
233 # FIXME: this won't upgrade when there's an existing
234 # package unpacked in `req.source_dir`
235 # package unpacked in `req.source_dir`
236 if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
237 raise PreviousBuildDirError(
238 "pip can't proceed with requirements '%s' due to a"
239 " pre-existing build directory (%s). This is "
240 "likely due to a previous installation that failed"
241 ". pip is being responsible and not assuming it "
242 "can delete this. Please delete it and try again."
243 % (req, req.source_dir)
244 )
245 req.populate_link(finder, upgrade_allowed, require_hashes)
246
247 # We can't hit this spot and have populate_link return None.
248 # req.satisfied_by is None here (because we're
249 # guarded) and upgrade has no impact except when satisfied_by
250 # is not None.
251 # Then inside find_requirement existing_applicable -> False
252 # If no new versions are found, DistributionNotFound is raised,
253 # otherwise a result is guaranteed.
254 assert req.link
255 link = req.link
256
257 # Now that we have the real link, we can tell what kind of
258 # requirements we have and raise some more informative errors
259 # than otherwise. (For example, we can raise VcsHashUnsupported
260 # for a VCS URL rather than HashMissing.)
261 if require_hashes:
262 # We could check these first 2 conditions inside
263 # unpack_url and save repetition of conditions, but then
264 # we would report less-useful error messages for
265 # unhashable requirements, complaining that there's no
266 # hash provided.
267 if is_vcs_url(link):
268 raise VcsHashUnsupported()
269 elif is_file_url(link) and is_dir_url(link):
270 raise DirectoryUrlHashUnsupported()
271 if not req.original_link and not req.is_pinned:
272 # Unpinned packages are asking for trouble when a new
273 # version is uploaded. This isn't a security check, but
274 # it saves users a surprising hash mismatch in the
275 # future.
276 #
277 # file:/// URLs aren't pinnable, so don't complain
278 # about them not being pinned.
279 raise HashUnpinned()
280
281 hashes = req.hashes(trust_internet=not require_hashes)
282 if require_hashes and not hashes:
283 # Known-good hashes are missing for this requirement, so
284 # shim it with a facade object that will provoke hash
285 # computation and then raise a HashMissing exception
286 # showing the user what the hash should be.
287 hashes = MissingHashes()
288
289 try:
290 download_dir = self.download_dir
291 # We always delete unpacked sdists after pip ran.
292 autodelete_unpacked = True
293 if req.link.is_wheel and self.wheel_download_dir:
294 # when doing 'pip wheel` we download wheels to a
295 # dedicated dir.
296 download_dir = self.wheel_download_dir
297 if req.link.is_wheel:
298 if download_dir:
299 # When downloading, we only unpack wheels to get
300 # metadata.
301 autodelete_unpacked = True
302 else:
303 # When installing a wheel, we use the unpacked
304 # wheel.
305 autodelete_unpacked = False
306 unpack_url(
307 req.link, req.source_dir,
308 download_dir, autodelete_unpacked,
309 session=session, hashes=hashes,
310 progress_bar=self.progress_bar
311 )
312 except requests.HTTPError as exc:
313 logger.critical(
314 'Could not install requirement %s because of error %s',
315 req,
316 exc,
317 )
318 raise InstallationError(
319 'Could not install requirement %s because of HTTP '
320 'error %s for URL %s' %
321 (req, exc, req.link)
322 )
323 abstract_dist = make_abstract_dist(req)
324 abstract_dist.prep_for_dist(finder, self.build_isolation)
325 if self._download_should_save:
326 # Make a .zip of the source_dir we already created.
327 if req.link.scheme in vcs.all_schemes:
328 req.archive(self.download_dir)
329 return abstract_dist
330
331 def prepare_editable_requirement(self, req, require_hashes, use_user_site,
332 finder):
333 """Prepare an editable requirement
334 """
335 assert req.editable, "cannot prepare a non-editable req as editable"
336
337 logger.info('Obtaining %s', req)
338
339 with indent_log():
340 if require_hashes:
341 raise InstallationError(
342 'The editable requirement %s cannot be installed when '
343 'requiring hashes, because there is no single file to '
344 'hash.' % req
345 )
346 req.ensure_has_source_dir(self.src_dir)
347 req.update_editable(not self._download_should_save)
348
349 abstract_dist = make_abstract_dist(req)
350 abstract_dist.prep_for_dist(finder, self.build_isolation)
351
352 if self._download_should_save:
353 req.archive(self.download_dir)
354 req.check_if_exists(use_user_site)
355
356 return abstract_dist
357
358 def prepare_installed_requirement(self, req, require_hashes, skip_reason):
359 """Prepare an already-installed requirement
360 """
361 assert req.satisfied_by, "req should have been satisfied but isn't"
362 assert skip_reason is not None, (
363 "did not get skip reason skipped but req.satisfied_by "
364 "is set to %r" % (req.satisfied_by,)
365 )
366 logger.info(
367 'Requirement %s: %s (%s)',
368 skip_reason, req, req.satisfied_by.version
369 )
370 with indent_log():
371 if require_hashes:
372 logger.debug(
373 'Since it is already installed, we are trusting this '
374 'package without checking its hash. To ensure a '
375 'completely repeatable environment, install into an '
376 'empty virtualenv.'
377 )
378 abstract_dist = Installed(req)
379
380 return abstract_dist