diff options
author | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2019-08-05 08:32:33 +0000 |
commit | 227b2d30a8675b44918f9d9ca89b24144a938215 (patch) | |
tree | 9f8e6a28724514b6fdf463a9ab2067a7ef309b72 /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py | |
parent | 842a8cfbbbdb1f92889d892e4859dbd5d40c5be8 (diff) |
removing venv files
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py')
-rw-r--r-- | venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py | 380 |
1 files changed, 0 insertions, 380 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py deleted file mode 100644 index c1e8158..0000000 --- a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/operations/prepare.py +++ /dev/null | |||
@@ -1,380 +0,0 @@ | |||
1 | """Prepares a distribution for installation | ||
2 | """ | ||
3 | |||
4 | import itertools | ||
5 | import logging | ||
6 | import os | ||
7 | import sys | ||
8 | from copy import copy | ||
9 | |||
10 | from pip._vendor import pkg_resources, requests | ||
11 | |||
12 | from pip._internal.build_env import NoOpBuildEnvironment | ||
13 | from pip._internal.compat import expanduser | ||
14 | from pip._internal.download import ( | ||
15 | is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, | ||
16 | ) | ||
17 | from pip._internal.exceptions import ( | ||
18 | DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, | ||
19 | PreviousBuildDirError, VcsHashUnsupported, | ||
20 | ) | ||
21 | from pip._internal.index import FormatControl | ||
22 | from pip._internal.req.req_install import InstallRequirement | ||
23 | from pip._internal.utils.hashes import MissingHashes | ||
24 | from pip._internal.utils.logging import indent_log | ||
25 | from pip._internal.utils.misc import ( | ||
26 | call_subprocess, display_path, normalize_path, | ||
27 | ) | ||
28 | from pip._internal.utils.ui import open_spinner | ||
29 | from pip._internal.vcs import vcs | ||
30 | |||
31 | logger = logging.getLogger(__name__) | ||
32 | |||
33 | |||
34 | def make_abstract_dist(req): | ||
35 | """Factory to make an abstract dist object. | ||
36 | |||
37 | Preconditions: Either an editable req with a source_dir, or satisfied_by or | ||
38 | a wheel link, or a non-editable req with a source_dir. | ||
39 | |||
40 | :return: A concrete DistAbstraction. | ||
41 | """ | ||
42 | if req.editable: | ||
43 | return IsSDist(req) | ||
44 | elif req.link and req.link.is_wheel: | ||
45 | return IsWheel(req) | ||
46 | else: | ||
47 | return IsSDist(req) | ||
48 | |||
49 | |||
50 | def _install_build_reqs(finder, prefix, build_requirements): | ||
51 | # NOTE: What follows is not a very good thing. | ||
52 | # Eventually, this should move into the BuildEnvironment class and | ||
53 | # that should handle all the isolation and sub-process invocation. | ||
54 | finder = copy(finder) | ||
55 | finder.format_control = FormatControl(set(), set([":all:"])) | ||
56 | urls = [ | ||
57 | finder.find_requirement( | ||
58 | InstallRequirement.from_line(r), upgrade=False).url | ||
59 | for r in build_requirements | ||
60 | ] | ||
61 | args = [ | ||
62 | sys.executable, '-m', 'pip', 'install', '--ignore-installed', | ||
63 | '--no-user', '--prefix', prefix, | ||
64 | ] + list(urls) | ||
65 | |||
66 | with open_spinner("Installing build dependencies") as spinner: | ||
67 | call_subprocess(args, show_stdout=False, spinner=spinner) | ||
68 | |||
69 | |||
70 | class DistAbstraction(object): | ||
71 | """Abstracts out the wheel vs non-wheel Resolver.resolve() logic. | ||
72 | |||
73 | The requirements for anything installable are as follows: | ||
74 | - we must be able to determine the requirement name | ||
75 | (or we can't correctly handle the non-upgrade case). | ||
76 | - we must be able to generate a list of run-time dependencies | ||
77 | without installing any additional packages (or we would | ||
78 | have to either burn time by doing temporary isolated installs | ||
79 | or alternatively violate pips 'don't start installing unless | ||
80 | all requirements are available' rule - neither of which are | ||
81 | desirable). | ||
82 | - for packages with setup requirements, we must also be able | ||
83 | to determine their requirements without installing additional | ||
84 | packages (for the same reason as run-time dependencies) | ||
85 | - we must be able to create a Distribution object exposing the | ||
86 | above metadata. | ||
87 | """ | ||
88 | |||
89 | def __init__(self, req): | ||
90 | self.req = req | ||
91 | |||
92 | def dist(self, finder): | ||
93 | """Return a setuptools Dist object.""" | ||
94 | raise NotImplementedError(self.dist) | ||
95 | |||
96 | def prep_for_dist(self, finder): | ||
97 | """Ensure that we can get a Dist for this requirement.""" | ||
98 | raise NotImplementedError(self.dist) | ||
99 | |||
100 | |||
101 | class IsWheel(DistAbstraction): | ||
102 | |||
103 | def dist(self, finder): | ||
104 | return list(pkg_resources.find_distributions( | ||
105 | self.req.source_dir))[0] | ||
106 | |||
107 | def prep_for_dist(self, finder, build_isolation): | ||
108 | # FIXME:https://github.com/pypa/pip/issues/1112 | ||
109 | pass | ||
110 | |||
111 | |||
112 | class IsSDist(DistAbstraction): | ||
113 | |||
114 | def dist(self, finder): | ||
115 | dist = self.req.get_dist() | ||
116 | # FIXME: shouldn't be globally added. | ||
117 | if finder and dist.has_metadata('dependency_links.txt'): | ||
118 | finder.add_dependency_links( | ||
119 | dist.get_metadata_lines('dependency_links.txt') | ||
120 | ) | ||
121 | return dist | ||
122 | |||
123 | def prep_for_dist(self, finder, build_isolation): | ||
124 | # Before calling "setup.py egg_info", we need to set-up the build | ||
125 | # environment. | ||
126 | build_requirements, isolate = self.req.get_pep_518_info() | ||
127 | should_isolate = build_isolation and isolate | ||
128 | |||
129 | minimum_requirements = ('setuptools', 'wheel') | ||
130 | missing_requirements = set(minimum_requirements) - set( | ||
131 | pkg_resources.Requirement(r).key | ||
132 | for r in build_requirements | ||
133 | ) | ||
134 | if missing_requirements: | ||
135 | def format_reqs(rs): | ||
136 | return ' and '.join(map(repr, sorted(rs))) | ||
137 | logger.warning( | ||
138 | "Missing build time requirements in pyproject.toml for %s: " | ||
139 | "%s.", self.req, format_reqs(missing_requirements) | ||
140 | ) | ||
141 | logger.warning( | ||
142 | "This version of pip does not implement PEP 517 so it cannot " | ||
143 | "build a wheel without %s.", format_reqs(minimum_requirements) | ||
144 | ) | ||
145 | |||
146 | if should_isolate: | ||
147 | with self.req.build_env: | ||
148 | pass | ||
149 | _install_build_reqs(finder, self.req.build_env.path, | ||
150 | build_requirements) | ||
151 | else: | ||
152 | self.req.build_env = NoOpBuildEnvironment(no_clean=False) | ||
153 | |||
154 | self.req.run_egg_info() | ||
155 | self.req.assert_source_matches_version() | ||
156 | |||
157 | |||
158 | class Installed(DistAbstraction): | ||
159 | |||
160 | def dist(self, finder): | ||
161 | return self.req.satisfied_by | ||
162 | |||
163 | def prep_for_dist(self, finder): | ||
164 | pass | ||
165 | |||
166 | |||
167 | class RequirementPreparer(object): | ||
168 | """Prepares a Requirement | ||
169 | """ | ||
170 | |||
171 | def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, | ||
172 | progress_bar, build_isolation): | ||
173 | super(RequirementPreparer, self).__init__() | ||
174 | |||
175 | self.src_dir = src_dir | ||
176 | self.build_dir = build_dir | ||
177 | |||
178 | # Where still packed archives should be written to. If None, they are | ||
179 | # not saved, and are deleted immediately after unpacking. | ||
180 | self.download_dir = download_dir | ||
181 | |||
182 | # Where still-packed .whl files should be written to. If None, they are | ||
183 | # written to the download_dir parameter. Separate to download_dir to | ||
184 | # permit only keeping wheel archives for pip wheel. | ||
185 | if wheel_download_dir: | ||
186 | wheel_download_dir = normalize_path(wheel_download_dir) | ||
187 | self.wheel_download_dir = wheel_download_dir | ||
188 | |||
189 | # NOTE | ||
190 | # download_dir and wheel_download_dir overlap semantically and may | ||
191 | # be combined if we're willing to have non-wheel archives present in | ||
192 | # the wheelhouse output by 'pip wheel'. | ||
193 | |||
194 | self.progress_bar = progress_bar | ||
195 | |||
196 | # Is build isolation allowed? | ||
197 | self.build_isolation = build_isolation | ||
198 | |||
199 | @property | ||
200 | def _download_should_save(self): | ||
201 | # TODO: Modify to reduce indentation needed | ||
202 | if self.download_dir: | ||
203 | self.download_dir = expanduser(self.download_dir) | ||
204 | if os.path.exists(self.download_dir): | ||
205 | return True | ||
206 | else: | ||
207 | logger.critical('Could not find download directory') | ||
208 | raise InstallationError( | ||
209 | "Could not find or access download directory '%s'" | ||
210 | % display_path(self.download_dir)) | ||
211 | return False | ||
212 | |||
213 | def prepare_linked_requirement(self, req, session, finder, | ||
214 | upgrade_allowed, require_hashes): | ||
215 | """Prepare a requirement that would be obtained from req.link | ||
216 | """ | ||
217 | # TODO: Breakup into smaller functions | ||
218 | if req.link and req.link.scheme == 'file': | ||
219 | path = url_to_path(req.link.url) | ||
220 | logger.info('Processing %s', display_path(path)) | ||
221 | else: | ||
222 | logger.info('Collecting %s', req) | ||
223 | |||
224 | with indent_log(): | ||
225 | # @@ if filesystem packages are not marked | ||
226 | # editable in a req, a non deterministic error | ||
227 | # occurs when the script attempts to unpack the | ||
228 | # build directory | ||
229 | req.ensure_has_source_dir(self.build_dir) | ||
230 | # If a checkout exists, it's unwise to keep going. version | ||
231 | # inconsistencies are logged later, but do not fail the | ||
232 | # installation. | ||
233 | # FIXME: this won't upgrade when there's an existing | ||
234 | # package unpacked in `req.source_dir` | ||
235 | # package unpacked in `req.source_dir` | ||
236 | if os.path.exists(os.path.join(req.source_dir, 'setup.py')): | ||
237 | raise PreviousBuildDirError( | ||
238 | "pip can't proceed with requirements '%s' due to a" | ||
239 | " pre-existing build directory (%s). This is " | ||
240 | "likely due to a previous installation that failed" | ||
241 | ". pip is being responsible and not assuming it " | ||
242 | "can delete this. Please delete it and try again." | ||
243 | % (req, req.source_dir) | ||
244 | ) | ||
245 | req.populate_link(finder, upgrade_allowed, require_hashes) | ||
246 | |||
247 | # We can't hit this spot and have populate_link return None. | ||
248 | # req.satisfied_by is None here (because we're | ||
249 | # guarded) and upgrade has no impact except when satisfied_by | ||
250 | # is not None. | ||
251 | # Then inside find_requirement existing_applicable -> False | ||
252 | # If no new versions are found, DistributionNotFound is raised, | ||
253 | # otherwise a result is guaranteed. | ||
254 | assert req.link | ||
255 | link = req.link | ||
256 | |||
257 | # Now that we have the real link, we can tell what kind of | ||
258 | # requirements we have and raise some more informative errors | ||
259 | # than otherwise. (For example, we can raise VcsHashUnsupported | ||
260 | # for a VCS URL rather than HashMissing.) | ||
261 | if require_hashes: | ||
262 | # We could check these first 2 conditions inside | ||
263 | # unpack_url and save repetition of conditions, but then | ||
264 | # we would report less-useful error messages for | ||
265 | # unhashable requirements, complaining that there's no | ||
266 | # hash provided. | ||
267 | if is_vcs_url(link): | ||
268 | raise VcsHashUnsupported() | ||
269 | elif is_file_url(link) and is_dir_url(link): | ||
270 | raise DirectoryUrlHashUnsupported() | ||
271 | if not req.original_link and not req.is_pinned: | ||
272 | # Unpinned packages are asking for trouble when a new | ||
273 | # version is uploaded. This isn't a security check, but | ||
274 | # it saves users a surprising hash mismatch in the | ||
275 | # future. | ||
276 | # | ||
277 | # file:/// URLs aren't pinnable, so don't complain | ||
278 | # about them not being pinned. | ||
279 | raise HashUnpinned() | ||
280 | |||
281 | hashes = req.hashes(trust_internet=not require_hashes) | ||
282 | if require_hashes and not hashes: | ||
283 | # Known-good hashes are missing for this requirement, so | ||
284 | # shim it with a facade object that will provoke hash | ||
285 | # computation and then raise a HashMissing exception | ||
286 | # showing the user what the hash should be. | ||
287 | hashes = MissingHashes() | ||
288 | |||
289 | try: | ||
290 | download_dir = self.download_dir | ||
291 | # We always delete unpacked sdists after pip ran. | ||
292 | autodelete_unpacked = True | ||
293 | if req.link.is_wheel and self.wheel_download_dir: | ||
294 | # when doing 'pip wheel` we download wheels to a | ||
295 | # dedicated dir. | ||
296 | download_dir = self.wheel_download_dir | ||
297 | if req.link.is_wheel: | ||
298 | if download_dir: | ||
299 | # When downloading, we only unpack wheels to get | ||
300 | # metadata. | ||
301 | autodelete_unpacked = True | ||
302 | else: | ||
303 | # When installing a wheel, we use the unpacked | ||
304 | # wheel. | ||
305 | autodelete_unpacked = False | ||
306 | unpack_url( | ||
307 | req.link, req.source_dir, | ||
308 | download_dir, autodelete_unpacked, | ||
309 | session=session, hashes=hashes, | ||
310 | progress_bar=self.progress_bar | ||
311 | ) | ||
312 | except requests.HTTPError as exc: | ||
313 | logger.critical( | ||
314 | 'Could not install requirement %s because of error %s', | ||
315 | req, | ||
316 | exc, | ||
317 | ) | ||
318 | raise InstallationError( | ||
319 | 'Could not install requirement %s because of HTTP ' | ||
320 | 'error %s for URL %s' % | ||
321 | (req, exc, req.link) | ||
322 | ) | ||
323 | abstract_dist = make_abstract_dist(req) | ||
324 | abstract_dist.prep_for_dist(finder, self.build_isolation) | ||
325 | if self._download_should_save: | ||
326 | # Make a .zip of the source_dir we already created. | ||
327 | if req.link.scheme in vcs.all_schemes: | ||
328 | req.archive(self.download_dir) | ||
329 | return abstract_dist | ||
330 | |||
331 | def prepare_editable_requirement(self, req, require_hashes, use_user_site, | ||
332 | finder): | ||
333 | """Prepare an editable requirement | ||
334 | """ | ||
335 | assert req.editable, "cannot prepare a non-editable req as editable" | ||
336 | |||
337 | logger.info('Obtaining %s', req) | ||
338 | |||
339 | with indent_log(): | ||
340 | if require_hashes: | ||
341 | raise InstallationError( | ||
342 | 'The editable requirement %s cannot be installed when ' | ||
343 | 'requiring hashes, because there is no single file to ' | ||
344 | 'hash.' % req | ||
345 | ) | ||
346 | req.ensure_has_source_dir(self.src_dir) | ||
347 | req.update_editable(not self._download_should_save) | ||
348 | |||
349 | abstract_dist = make_abstract_dist(req) | ||
350 | abstract_dist.prep_for_dist(finder, self.build_isolation) | ||
351 | |||
352 | if self._download_should_save: | ||
353 | req.archive(self.download_dir) | ||
354 | req.check_if_exists(use_user_site) | ||
355 | |||
356 | return abstract_dist | ||
357 | |||
358 | def prepare_installed_requirement(self, req, require_hashes, skip_reason): | ||
359 | """Prepare an already-installed requirement | ||
360 | """ | ||
361 | assert req.satisfied_by, "req should have been satisfied but isn't" | ||
362 | assert skip_reason is not None, ( | ||
363 | "did not get skip reason skipped but req.satisfied_by " | ||
364 | "is set to %r" % (req.satisfied_by,) | ||
365 | ) | ||
366 | logger.info( | ||
367 | 'Requirement %s: %s (%s)', | ||
368 | skip_reason, req, req.satisfied_by.version | ||
369 | ) | ||
370 | with indent_log(): | ||
371 | if require_hashes: | ||
372 | logger.debug( | ||
373 | 'Since it is already installed, we are trusting this ' | ||
374 | 'package without checking its hash. To ensure a ' | ||
375 | 'completely repeatable environment, install into an ' | ||
376 | 'empty virtualenv.' | ||
377 | ) | ||
378 | abstract_dist = Installed(req) | ||
379 | |||
380 | return abstract_dist | ||