diff options
author | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
commit | 68df54d6629ec019142eb149dd037774f2d11e7c (patch) | |
tree | 345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils |
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils')
15 files changed, 2330 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/__init__.py | |||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000..0eb87ca --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/appdirs.py | |||
@@ -0,0 +1,258 @@ | |||
1 | """ | ||
2 | This code was taken from https://github.com/ActiveState/appdirs and modified | ||
3 | to suit our purposes. | ||
4 | """ | ||
5 | from __future__ import absolute_import | ||
6 | |||
7 | import os | ||
8 | import sys | ||
9 | |||
10 | from pip._vendor.six import PY2, text_type | ||
11 | |||
12 | from pip._internal.compat import WINDOWS, expanduser | ||
13 | |||
14 | |||
15 | def user_cache_dir(appname): | ||
16 | r""" | ||
17 | Return full path to the user-specific cache dir for this application. | ||
18 | |||
19 | "appname" is the name of application. | ||
20 | |||
21 | Typical user cache directories are: | ||
22 | macOS: ~/Library/Caches/<AppName> | ||
23 | Unix: ~/.cache/<AppName> (XDG default) | ||
24 | Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache | ||
25 | |||
26 | On Windows the only suggestion in the MSDN docs is that local settings go | ||
27 | in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the | ||
28 | non-roaming app data dir (the default returned by `user_data_dir`). Apps | ||
29 | typically put cache data somewhere *under* the given dir here. Some | ||
30 | examples: | ||
31 | ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache | ||
32 | ...\Acme\SuperApp\Cache\1.0 | ||
33 | |||
34 | OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. | ||
35 | """ | ||
36 | if WINDOWS: | ||
37 | # Get the base path | ||
38 | path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) | ||
39 | |||
40 | # When using Python 2, return paths as bytes on Windows like we do on | ||
41 | # other operating systems. See helper function docs for more details. | ||
42 | if PY2 and isinstance(path, text_type): | ||
43 | path = _win_path_to_bytes(path) | ||
44 | |||
45 | # Add our app name and Cache directory to it | ||
46 | path = os.path.join(path, appname, "Cache") | ||
47 | elif sys.platform == "darwin": | ||
48 | # Get the base path | ||
49 | path = expanduser("~/Library/Caches") | ||
50 | |||
51 | # Add our app name to it | ||
52 | path = os.path.join(path, appname) | ||
53 | else: | ||
54 | # Get the base path | ||
55 | path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) | ||
56 | |||
57 | # Add our app name to it | ||
58 | path = os.path.join(path, appname) | ||
59 | |||
60 | return path | ||
61 | |||
62 | |||
63 | def user_data_dir(appname, roaming=False): | ||
64 | r""" | ||
65 | Return full path to the user-specific data dir for this application. | ||
66 | |||
67 | "appname" is the name of application. | ||
68 | If None, just the system directory is returned. | ||
69 | "roaming" (boolean, default False) can be set True to use the Windows | ||
70 | roaming appdata directory. That means that for users on a Windows | ||
71 | network setup for roaming profiles, this user data will be | ||
72 | sync'd on login. See | ||
73 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
74 | for a discussion of issues. | ||
75 | |||
76 | Typical user data directories are: | ||
77 | macOS: ~/Library/Application Support/<AppName> | ||
78 | if it exists, else ~/.config/<AppName> | ||
79 | Unix: ~/.local/share/<AppName> # or in | ||
80 | $XDG_DATA_HOME, if defined | ||
81 | Win XP (not roaming): C:\Documents and Settings\<username>\ ... | ||
82 | ...Application Data\<AppName> | ||
83 | Win XP (roaming): C:\Documents and Settings\<username>\Local ... | ||
84 | ...Settings\Application Data\<AppName> | ||
85 | Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> | ||
86 | Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> | ||
87 | |||
88 | For Unix, we follow the XDG spec and support $XDG_DATA_HOME. | ||
89 | That means, by default "~/.local/share/<AppName>". | ||
90 | """ | ||
91 | if WINDOWS: | ||
92 | const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" | ||
93 | path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) | ||
94 | elif sys.platform == "darwin": | ||
95 | path = os.path.join( | ||
96 | expanduser('~/Library/Application Support/'), | ||
97 | appname, | ||
98 | ) if os.path.isdir(os.path.join( | ||
99 | expanduser('~/Library/Application Support/'), | ||
100 | appname, | ||
101 | ) | ||
102 | ) else os.path.join( | ||
103 | expanduser('~/.config/'), | ||
104 | appname, | ||
105 | ) | ||
106 | else: | ||
107 | path = os.path.join( | ||
108 | os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), | ||
109 | appname, | ||
110 | ) | ||
111 | |||
112 | return path | ||
113 | |||
114 | |||
115 | def user_config_dir(appname, roaming=True): | ||
116 | """Return full path to the user-specific config dir for this application. | ||
117 | |||
118 | "appname" is the name of application. | ||
119 | If None, just the system directory is returned. | ||
120 | "roaming" (boolean, default True) can be set False to not use the | ||
121 | Windows roaming appdata directory. That means that for users on a | ||
122 | Windows network setup for roaming profiles, this user data will be | ||
123 | sync'd on login. See | ||
124 | <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | ||
125 | for a discussion of issues. | ||
126 | |||
127 | Typical user data directories are: | ||
128 | macOS: same as user_data_dir | ||
129 | Unix: ~/.config/<AppName> | ||
130 | Win *: same as user_data_dir | ||
131 | |||
132 | For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. | ||
133 | That means, by default "~/.config/<AppName>". | ||
134 | """ | ||
135 | if WINDOWS: | ||
136 | path = user_data_dir(appname, roaming=roaming) | ||
137 | elif sys.platform == "darwin": | ||
138 | path = user_data_dir(appname) | ||
139 | else: | ||
140 | path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) | ||
141 | path = os.path.join(path, appname) | ||
142 | |||
143 | return path | ||
144 | |||
145 | |||
146 | # for the discussion regarding site_config_dirs locations | ||
147 | # see <https://github.com/pypa/pip/issues/1733> | ||
148 | def site_config_dirs(appname): | ||
149 | r"""Return a list of potential user-shared config dirs for this application. | ||
150 | |||
151 | "appname" is the name of application. | ||
152 | |||
153 | Typical user config directories are: | ||
154 | macOS: /Library/Application Support/<AppName>/ | ||
155 | Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in | ||
156 | $XDG_CONFIG_DIRS | ||
157 | Win XP: C:\Documents and Settings\All Users\Application ... | ||
158 | ...Data\<AppName>\ | ||
159 | Vista: (Fail! "C:\ProgramData" is a hidden *system* directory | ||
160 | on Vista.) | ||
161 | Win 7: Hidden, but writeable on Win 7: | ||
162 | C:\ProgramData\<AppName>\ | ||
163 | """ | ||
164 | if WINDOWS: | ||
165 | path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) | ||
166 | pathlist = [os.path.join(path, appname)] | ||
167 | elif sys.platform == 'darwin': | ||
168 | pathlist = [os.path.join('/Library/Application Support', appname)] | ||
169 | else: | ||
170 | # try looking in $XDG_CONFIG_DIRS | ||
171 | xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') | ||
172 | if xdg_config_dirs: | ||
173 | pathlist = [ | ||
174 | os.path.join(expanduser(x), appname) | ||
175 | for x in xdg_config_dirs.split(os.pathsep) | ||
176 | ] | ||
177 | else: | ||
178 | pathlist = [] | ||
179 | |||
180 | # always look in /etc directly as well | ||
181 | pathlist.append('/etc') | ||
182 | |||
183 | return pathlist | ||
184 | |||
185 | |||
186 | # -- Windows support functions -- | ||
187 | |||
188 | def _get_win_folder_from_registry(csidl_name): | ||
189 | """ | ||
190 | This is a fallback technique at best. I'm not sure if using the | ||
191 | registry for this guarantees us the correct answer for all CSIDL_* | ||
192 | names. | ||
193 | """ | ||
194 | import _winreg | ||
195 | |||
196 | shell_folder_name = { | ||
197 | "CSIDL_APPDATA": "AppData", | ||
198 | "CSIDL_COMMON_APPDATA": "Common AppData", | ||
199 | "CSIDL_LOCAL_APPDATA": "Local AppData", | ||
200 | }[csidl_name] | ||
201 | |||
202 | key = _winreg.OpenKey( | ||
203 | _winreg.HKEY_CURRENT_USER, | ||
204 | r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | ||
205 | ) | ||
206 | directory, _type = _winreg.QueryValueEx(key, shell_folder_name) | ||
207 | return directory | ||
208 | |||
209 | |||
210 | def _get_win_folder_with_ctypes(csidl_name): | ||
211 | csidl_const = { | ||
212 | "CSIDL_APPDATA": 26, | ||
213 | "CSIDL_COMMON_APPDATA": 35, | ||
214 | "CSIDL_LOCAL_APPDATA": 28, | ||
215 | }[csidl_name] | ||
216 | |||
217 | buf = ctypes.create_unicode_buffer(1024) | ||
218 | ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) | ||
219 | |||
220 | # Downgrade to short path name if have highbit chars. See | ||
221 | # <http://bugs.activestate.com/show_bug.cgi?id=85099>. | ||
222 | has_high_char = False | ||
223 | for c in buf: | ||
224 | if ord(c) > 255: | ||
225 | has_high_char = True | ||
226 | break | ||
227 | if has_high_char: | ||
228 | buf2 = ctypes.create_unicode_buffer(1024) | ||
229 | if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): | ||
230 | buf = buf2 | ||
231 | |||
232 | return buf.value | ||
233 | |||
234 | |||
235 | if WINDOWS: | ||
236 | try: | ||
237 | import ctypes | ||
238 | _get_win_folder = _get_win_folder_with_ctypes | ||
239 | except ImportError: | ||
240 | _get_win_folder = _get_win_folder_from_registry | ||
241 | |||
242 | |||
243 | def _win_path_to_bytes(path): | ||
244 | """Encode Windows paths to bytes. Only used on Python 2. | ||
245 | |||
246 | Motivation is to be consistent with other operating systems where paths | ||
247 | are also returned as bytes. This avoids problems mixing bytes and Unicode | ||
248 | elsewhere in the codebase. For more details and discussion see | ||
249 | <https://github.com/pypa/pip/issues/3463>. | ||
250 | |||
251 | If encoding using ASCII and MBCS fails, return the original Unicode path. | ||
252 | """ | ||
253 | for encoding in ('ASCII', 'MBCS'): | ||
254 | try: | ||
255 | return path.encode(encoding) | ||
256 | except (UnicodeEncodeError, LookupError): | ||
257 | pass | ||
258 | return path | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000..c0e3884 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/deprecation.py | |||
@@ -0,0 +1,77 @@ | |||
1 | """ | ||
2 | A module that implements tooling to enable easy warnings about deprecations. | ||
3 | """ | ||
4 | from __future__ import absolute_import | ||
5 | |||
6 | import logging | ||
7 | import warnings | ||
8 | |||
9 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
10 | |||
11 | if MYPY_CHECK_RUNNING: | ||
12 | from typing import Any | ||
13 | |||
14 | |||
15 | class PipDeprecationWarning(Warning): | ||
16 | pass | ||
17 | |||
18 | |||
19 | class Pending(object): | ||
20 | pass | ||
21 | |||
22 | |||
23 | class RemovedInPip11Warning(PipDeprecationWarning): | ||
24 | pass | ||
25 | |||
26 | |||
27 | class RemovedInPip12Warning(PipDeprecationWarning, Pending): | ||
28 | pass | ||
29 | |||
30 | |||
31 | # Warnings <-> Logging Integration | ||
32 | |||
33 | |||
34 | _warnings_showwarning = None # type: Any | ||
35 | |||
36 | |||
37 | def _showwarning(message, category, filename, lineno, file=None, line=None): | ||
38 | if file is not None: | ||
39 | if _warnings_showwarning is not None: | ||
40 | _warnings_showwarning( | ||
41 | message, category, filename, lineno, file, line, | ||
42 | ) | ||
43 | else: | ||
44 | if issubclass(category, PipDeprecationWarning): | ||
45 | # We use a specially named logger which will handle all of the | ||
46 | # deprecation messages for pip. | ||
47 | logger = logging.getLogger("pip._internal.deprecations") | ||
48 | |||
49 | # This is purposely using the % formatter here instead of letting | ||
50 | # the logging module handle the interpolation. This is because we | ||
51 | # want it to appear as if someone typed this entire message out. | ||
52 | log_message = "DEPRECATION: %s" % message | ||
53 | |||
54 | # PipDeprecationWarnings that are Pending still have at least 2 | ||
55 | # versions to go until they are removed so they can just be | ||
56 | # warnings. Otherwise, they will be removed in the very next | ||
57 | # version of pip. We want these to be more obvious so we use the | ||
58 | # ERROR logging level. | ||
59 | if issubclass(category, Pending): | ||
60 | logger.warning(log_message) | ||
61 | else: | ||
62 | logger.error(log_message) | ||
63 | else: | ||
64 | _warnings_showwarning( | ||
65 | message, category, filename, lineno, file, line, | ||
66 | ) | ||
67 | |||
68 | |||
69 | def install_warning_logger(): | ||
70 | # Enable our Deprecation Warnings | ||
71 | warnings.simplefilter("default", PipDeprecationWarning, append=True) | ||
72 | |||
73 | global _warnings_showwarning | ||
74 | |||
75 | if _warnings_showwarning is None: | ||
76 | _warnings_showwarning = warnings.showwarning | ||
77 | warnings.showwarning = _showwarning | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py new file mode 100644 index 0000000..831f3f6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/encoding.py | |||
@@ -0,0 +1,33 @@ | |||
1 | import codecs | ||
2 | import locale | ||
3 | import re | ||
4 | import sys | ||
5 | |||
6 | BOMS = [ | ||
7 | (codecs.BOM_UTF8, 'utf8'), | ||
8 | (codecs.BOM_UTF16, 'utf16'), | ||
9 | (codecs.BOM_UTF16_BE, 'utf16-be'), | ||
10 | (codecs.BOM_UTF16_LE, 'utf16-le'), | ||
11 | (codecs.BOM_UTF32, 'utf32'), | ||
12 | (codecs.BOM_UTF32_BE, 'utf32-be'), | ||
13 | (codecs.BOM_UTF32_LE, 'utf32-le'), | ||
14 | ] | ||
15 | |||
16 | ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') | ||
17 | |||
18 | |||
19 | def auto_decode(data): | ||
20 | """Check a bytes string for a BOM to correctly detect the encoding | ||
21 | |||
22 | Fallback to locale.getpreferredencoding(False) like open() on Python3""" | ||
23 | for bom, encoding in BOMS: | ||
24 | if data.startswith(bom): | ||
25 | return data[len(bom):].decode(encoding) | ||
26 | # Lets check the first two lines as in PEP263 | ||
27 | for line in data.split(b'\n')[:2]: | ||
28 | if line[0:1] == b'#' and ENCODING_RE.search(line): | ||
29 | encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') | ||
30 | return data.decode(encoding) | ||
31 | return data.decode( | ||
32 | locale.getpreferredencoding(False) or sys.getdefaultencoding(), | ||
33 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000..94fa2c6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/filesystem.py | |||
@@ -0,0 +1,28 @@ | |||
1 | import os | ||
2 | import os.path | ||
3 | |||
4 | from pip._internal.compat import get_path_uid | ||
5 | |||
6 | |||
7 | def check_path_owner(path): | ||
8 | # If we don't have a way to check the effective uid of this process, then | ||
9 | # we'll just assume that we own the directory. | ||
10 | if not hasattr(os, "geteuid"): | ||
11 | return True | ||
12 | |||
13 | previous = None | ||
14 | while path != previous: | ||
15 | if os.path.lexists(path): | ||
16 | # Check if path is writable by current user. | ||
17 | if os.geteuid() == 0: | ||
18 | # Special handling for root user in order to handle properly | ||
19 | # cases where users use sudo without -H flag. | ||
20 | try: | ||
21 | path_uid = get_path_uid(path) | ||
22 | except OSError: | ||
23 | return False | ||
24 | return path_uid == 0 | ||
25 | else: | ||
26 | return os.access(path, os.W_OK) | ||
27 | else: | ||
28 | previous, path = path, os.path.dirname(path) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py new file mode 100644 index 0000000..5900a10 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/glibc.py | |||
@@ -0,0 +1,84 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import ctypes | ||
4 | import re | ||
5 | import warnings | ||
6 | |||
7 | |||
8 | def glibc_version_string(): | ||
9 | "Returns glibc version string, or None if not using glibc." | ||
10 | |||
11 | # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen | ||
12 | # manpage says, "If filename is NULL, then the returned handle is for the | ||
13 | # main program". This way we can let the linker do the work to figure out | ||
14 | # which libc our process is actually using. | ||
15 | process_namespace = ctypes.CDLL(None) | ||
16 | try: | ||
17 | gnu_get_libc_version = process_namespace.gnu_get_libc_version | ||
18 | except AttributeError: | ||
19 | # Symbol doesn't exist -> therefore, we are not linked to | ||
20 | # glibc. | ||
21 | return None | ||
22 | |||
23 | # Call gnu_get_libc_version, which returns a string like "2.5" | ||
24 | gnu_get_libc_version.restype = ctypes.c_char_p | ||
25 | version_str = gnu_get_libc_version() | ||
26 | # py2 / py3 compatibility: | ||
27 | if not isinstance(version_str, str): | ||
28 | version_str = version_str.decode("ascii") | ||
29 | |||
30 | return version_str | ||
31 | |||
32 | |||
33 | # Separated out from have_compatible_glibc for easier unit testing | ||
34 | def check_glibc_version(version_str, required_major, minimum_minor): | ||
35 | # Parse string and check against requested version. | ||
36 | # | ||
37 | # We use a regexp instead of str.split because we want to discard any | ||
38 | # random junk that might come after the minor version -- this might happen | ||
39 | # in patched/forked versions of glibc (e.g. Linaro's version of glibc | ||
40 | # uses version strings like "2.20-2014.11"). See gh-3588. | ||
41 | m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) | ||
42 | if not m: | ||
43 | warnings.warn("Expected glibc version with 2 components major.minor," | ||
44 | " got: %s" % version_str, RuntimeWarning) | ||
45 | return False | ||
46 | return (int(m.group("major")) == required_major and | ||
47 | int(m.group("minor")) >= minimum_minor) | ||
48 | |||
49 | |||
50 | def have_compatible_glibc(required_major, minimum_minor): | ||
51 | version_str = glibc_version_string() | ||
52 | if version_str is None: | ||
53 | return False | ||
54 | return check_glibc_version(version_str, required_major, minimum_minor) | ||
55 | |||
56 | |||
57 | # platform.libc_ver regularly returns completely nonsensical glibc | ||
58 | # versions. E.g. on my computer, platform says: | ||
59 | # | ||
60 | # ~$ python2.7 -c 'import platform; print(platform.libc_ver())' | ||
61 | # ('glibc', '2.7') | ||
62 | # ~$ python3.5 -c 'import platform; print(platform.libc_ver())' | ||
63 | # ('glibc', '2.9') | ||
64 | # | ||
65 | # But the truth is: | ||
66 | # | ||
67 | # ~$ ldd --version | ||
68 | # ldd (Debian GLIBC 2.22-11) 2.22 | ||
69 | # | ||
70 | # This is unfortunate, because it means that the linehaul data on libc | ||
71 | # versions that was generated by pip 8.1.2 and earlier is useless and | ||
72 | # misleading. Solution: instead of using platform, use our code that actually | ||
73 | # works. | ||
74 | def libc_ver(): | ||
75 | """Try to determine the glibc version | ||
76 | |||
77 | Returns a tuple of strings (lib, version) which default to empty strings | ||
78 | in case the lookup fails. | ||
79 | """ | ||
80 | glibc_version = glibc_version_string() | ||
81 | if glibc_version is None: | ||
82 | return ("", "") | ||
83 | else: | ||
84 | return ("glibc", glibc_version) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py new file mode 100644 index 0000000..8cf6367 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/hashes.py | |||
@@ -0,0 +1,94 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import hashlib | ||
4 | |||
5 | from pip._vendor.six import iteritems, iterkeys, itervalues | ||
6 | |||
7 | from pip._internal.exceptions import ( | ||
8 | HashMismatch, HashMissing, InstallationError, | ||
9 | ) | ||
10 | from pip._internal.utils.misc import read_chunks | ||
11 | |||
12 | # The recommended hash algo of the moment. Change this whenever the state of | ||
13 | # the art changes; it won't hurt backward compatibility. | ||
14 | FAVORITE_HASH = 'sha256' | ||
15 | |||
16 | |||
17 | # Names of hashlib algorithms allowed by the --hash option and ``pip hash`` | ||
18 | # Currently, those are the ones at least as collision-resistant as sha256. | ||
19 | STRONG_HASHES = ['sha256', 'sha384', 'sha512'] | ||
20 | |||
21 | |||
22 | class Hashes(object): | ||
23 | """A wrapper that builds multiple hashes at once and checks them against | ||
24 | known-good values | ||
25 | |||
26 | """ | ||
27 | def __init__(self, hashes=None): | ||
28 | """ | ||
29 | :param hashes: A dict of algorithm names pointing to lists of allowed | ||
30 | hex digests | ||
31 | """ | ||
32 | self._allowed = {} if hashes is None else hashes | ||
33 | |||
34 | def check_against_chunks(self, chunks): | ||
35 | """Check good hashes against ones built from iterable of chunks of | ||
36 | data. | ||
37 | |||
38 | Raise HashMismatch if none match. | ||
39 | |||
40 | """ | ||
41 | gots = {} | ||
42 | for hash_name in iterkeys(self._allowed): | ||
43 | try: | ||
44 | gots[hash_name] = hashlib.new(hash_name) | ||
45 | except (ValueError, TypeError): | ||
46 | raise InstallationError('Unknown hash name: %s' % hash_name) | ||
47 | |||
48 | for chunk in chunks: | ||
49 | for hash in itervalues(gots): | ||
50 | hash.update(chunk) | ||
51 | |||
52 | for hash_name, got in iteritems(gots): | ||
53 | if got.hexdigest() in self._allowed[hash_name]: | ||
54 | return | ||
55 | self._raise(gots) | ||
56 | |||
57 | def _raise(self, gots): | ||
58 | raise HashMismatch(self._allowed, gots) | ||
59 | |||
60 | def check_against_file(self, file): | ||
61 | """Check good hashes against a file-like object | ||
62 | |||
63 | Raise HashMismatch if none match. | ||
64 | |||
65 | """ | ||
66 | return self.check_against_chunks(read_chunks(file)) | ||
67 | |||
68 | def check_against_path(self, path): | ||
69 | with open(path, 'rb') as file: | ||
70 | return self.check_against_file(file) | ||
71 | |||
72 | def __nonzero__(self): | ||
73 | """Return whether I know any known-good hashes.""" | ||
74 | return bool(self._allowed) | ||
75 | |||
76 | def __bool__(self): | ||
77 | return self.__nonzero__() | ||
78 | |||
79 | |||
80 | class MissingHashes(Hashes): | ||
81 | """A workalike for Hashes used when we're missing a hash for a requirement | ||
82 | |||
83 | It computes the actual hash of the requirement and raises a HashMissing | ||
84 | exception showing it to the user. | ||
85 | |||
86 | """ | ||
87 | def __init__(self): | ||
88 | """Don't offer the ``hashes`` kwarg.""" | ||
89 | # Pass our favorite hash in to generate a "gotten hash". With the | ||
90 | # empty list, it will never match, so an error will always raise. | ||
91 | super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) | ||
92 | |||
93 | def _raise(self, gots): | ||
94 | raise HashMissing(gots[FAVORITE_HASH].hexdigest()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py new file mode 100644 index 0000000..1fb3e8a --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/logging.py | |||
@@ -0,0 +1,132 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import contextlib | ||
4 | import logging | ||
5 | import logging.handlers | ||
6 | import os | ||
7 | |||
8 | from pip._internal.compat import WINDOWS | ||
9 | from pip._internal.utils.misc import ensure_dir | ||
10 | |||
11 | try: | ||
12 | import threading | ||
13 | except ImportError: | ||
14 | import dummy_threading as threading # type: ignore | ||
15 | |||
16 | |||
17 | try: | ||
18 | from pip._vendor import colorama | ||
19 | # Lots of different errors can come from this, including SystemError and | ||
20 | # ImportError. | ||
21 | except Exception: | ||
22 | colorama = None | ||
23 | |||
24 | |||
25 | _log_state = threading.local() | ||
26 | _log_state.indentation = 0 | ||
27 | |||
28 | |||
29 | @contextlib.contextmanager | ||
30 | def indent_log(num=2): | ||
31 | """ | ||
32 | A context manager which will cause the log output to be indented for any | ||
33 | log messages emitted inside it. | ||
34 | """ | ||
35 | _log_state.indentation += num | ||
36 | try: | ||
37 | yield | ||
38 | finally: | ||
39 | _log_state.indentation -= num | ||
40 | |||
41 | |||
42 | def get_indentation(): | ||
43 | return getattr(_log_state, 'indentation', 0) | ||
44 | |||
45 | |||
46 | class IndentingFormatter(logging.Formatter): | ||
47 | |||
48 | def format(self, record): | ||
49 | """ | ||
50 | Calls the standard formatter, but will indent all of the log messages | ||
51 | by our current indentation level. | ||
52 | """ | ||
53 | formatted = logging.Formatter.format(self, record) | ||
54 | formatted = "".join([ | ||
55 | (" " * get_indentation()) + line | ||
56 | for line in formatted.splitlines(True) | ||
57 | ]) | ||
58 | return formatted | ||
59 | |||
60 | |||
61 | def _color_wrap(*colors): | ||
62 | def wrapped(inp): | ||
63 | return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) | ||
64 | return wrapped | ||
65 | |||
66 | |||
67 | class ColorizedStreamHandler(logging.StreamHandler): | ||
68 | |||
69 | # Don't build up a list of colors if we don't have colorama | ||
70 | if colorama: | ||
71 | COLORS = [ | ||
72 | # This needs to be in order from highest logging level to lowest. | ||
73 | (logging.ERROR, _color_wrap(colorama.Fore.RED)), | ||
74 | (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), | ||
75 | ] | ||
76 | else: | ||
77 | COLORS = [] | ||
78 | |||
79 | def __init__(self, stream=None, no_color=None): | ||
80 | logging.StreamHandler.__init__(self, stream) | ||
81 | self._no_color = no_color | ||
82 | |||
83 | if WINDOWS and colorama: | ||
84 | self.stream = colorama.AnsiToWin32(self.stream) | ||
85 | |||
86 | def should_color(self): | ||
87 | # Don't colorize things if we do not have colorama or if told not to | ||
88 | if not colorama or self._no_color: | ||
89 | return False | ||
90 | |||
91 | real_stream = ( | ||
92 | self.stream if not isinstance(self.stream, colorama.AnsiToWin32) | ||
93 | else self.stream.wrapped | ||
94 | ) | ||
95 | |||
96 | # If the stream is a tty we should color it | ||
97 | if hasattr(real_stream, "isatty") and real_stream.isatty(): | ||
98 | return True | ||
99 | |||
100 | # If we have an ASNI term we should color it | ||
101 | if os.environ.get("TERM") == "ANSI": | ||
102 | return True | ||
103 | |||
104 | # If anything else we should not color it | ||
105 | return False | ||
106 | |||
107 | def format(self, record): | ||
108 | msg = logging.StreamHandler.format(self, record) | ||
109 | |||
110 | if self.should_color(): | ||
111 | for level, color in self.COLORS: | ||
112 | if record.levelno >= level: | ||
113 | msg = color(msg) | ||
114 | break | ||
115 | |||
116 | return msg | ||
117 | |||
118 | |||
119 | class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): | ||
120 | |||
121 | def _open(self): | ||
122 | ensure_dir(os.path.dirname(self.baseFilename)) | ||
123 | return logging.handlers.RotatingFileHandler._open(self) | ||
124 | |||
125 | |||
126 | class MaxLevelFilter(logging.Filter): | ||
127 | |||
128 | def __init__(self, level): | ||
129 | self.level = level | ||
130 | |||
131 | def filter(self, record): | ||
132 | return record.levelno < self.level | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py new file mode 100644 index 0000000..db84a7c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py | |||
@@ -0,0 +1,851 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import contextlib | ||
4 | import errno | ||
5 | import io | ||
6 | import locale | ||
7 | # we have a submodule named 'logging' which would shadow this if we used the | ||
8 | # regular name: | ||
9 | import logging as std_logging | ||
10 | import os | ||
11 | import posixpath | ||
12 | import re | ||
13 | import shutil | ||
14 | import stat | ||
15 | import subprocess | ||
16 | import sys | ||
17 | import tarfile | ||
18 | import zipfile | ||
19 | from collections import deque | ||
20 | |||
21 | from pip._vendor import pkg_resources | ||
22 | # NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is | ||
23 | # why we ignore the type on this import. | ||
24 | from pip._vendor.retrying import retry # type: ignore | ||
25 | from pip._vendor.six import PY2 | ||
26 | from pip._vendor.six.moves import input | ||
27 | |||
28 | from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs | ||
29 | from pip._internal.exceptions import InstallationError | ||
30 | from pip._internal.locations import ( | ||
31 | running_under_virtualenv, site_packages, user_site, virtualenv_no_global, | ||
32 | write_delete_marker_file, | ||
33 | ) | ||
34 | |||
35 | if PY2: | ||
36 | from io import BytesIO as StringIO | ||
37 | else: | ||
38 | from io import StringIO | ||
39 | |||
40 | __all__ = ['rmtree', 'display_path', 'backup_dir', | ||
41 | 'ask', 'splitext', | ||
42 | 'format_size', 'is_installable_dir', | ||
43 | 'is_svn_page', 'file_contents', | ||
44 | 'split_leading_dir', 'has_leading_dir', | ||
45 | 'normalize_path', | ||
46 | 'renames', 'get_prog', | ||
47 | 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', | ||
48 | 'captured_stdout', 'ensure_dir', | ||
49 | 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', | ||
50 | 'get_installed_version'] | ||
51 | |||
52 | |||
53 | logger = std_logging.getLogger(__name__) | ||
54 | |||
55 | BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') | ||
56 | XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') | ||
57 | ZIP_EXTENSIONS = ('.zip', '.whl') | ||
58 | TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') | ||
59 | ARCHIVE_EXTENSIONS = ( | ||
60 | ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) | ||
61 | SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS | ||
62 | try: | ||
63 | import bz2 # noqa | ||
64 | SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS | ||
65 | except ImportError: | ||
66 | logger.debug('bz2 module is not available') | ||
67 | |||
68 | try: | ||
69 | # Only for Python 3.3+ | ||
70 | import lzma # noqa | ||
71 | SUPPORTED_EXTENSIONS += XZ_EXTENSIONS | ||
72 | except ImportError: | ||
73 | logger.debug('lzma module is not available') | ||
74 | |||
75 | |||
76 | def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): | ||
77 | try: | ||
78 | return __import__(pkg_or_module_string) | ||
79 | except ImportError: | ||
80 | raise ExceptionType(*args, **kwargs) | ||
81 | |||
82 | |||
83 | def ensure_dir(path): | ||
84 | """os.path.makedirs without EEXIST.""" | ||
85 | try: | ||
86 | os.makedirs(path) | ||
87 | except OSError as e: | ||
88 | if e.errno != errno.EEXIST: | ||
89 | raise | ||
90 | |||
91 | |||
92 | def get_prog(): | ||
93 | try: | ||
94 | prog = os.path.basename(sys.argv[0]) | ||
95 | if prog in ('__main__.py', '-c'): | ||
96 | return "%s -m pip" % sys.executable | ||
97 | else: | ||
98 | return prog | ||
99 | except (AttributeError, TypeError, IndexError): | ||
100 | pass | ||
101 | return 'pip' | ||
102 | |||
103 | |||
104 | # Retry every half second for up to 3 seconds | ||
105 | @retry(stop_max_delay=3000, wait_fixed=500) | ||
106 | def rmtree(dir, ignore_errors=False): | ||
107 | shutil.rmtree(dir, ignore_errors=ignore_errors, | ||
108 | onerror=rmtree_errorhandler) | ||
109 | |||
110 | |||
111 | def rmtree_errorhandler(func, path, exc_info): | ||
112 | """On Windows, the files in .svn are read-only, so when rmtree() tries to | ||
113 | remove them, an exception is thrown. We catch that here, remove the | ||
114 | read-only attribute, and hopefully continue without problems.""" | ||
115 | # if file type currently read only | ||
116 | if os.stat(path).st_mode & stat.S_IREAD: | ||
117 | # convert to read/write | ||
118 | os.chmod(path, stat.S_IWRITE) | ||
119 | # use the original function to repeat the operation | ||
120 | func(path) | ||
121 | return | ||
122 | else: | ||
123 | raise | ||
124 | |||
125 | |||
126 | def display_path(path): | ||
127 | """Gives the display value for a given path, making it relative to cwd | ||
128 | if possible.""" | ||
129 | path = os.path.normcase(os.path.abspath(path)) | ||
130 | if sys.version_info[0] == 2: | ||
131 | path = path.decode(sys.getfilesystemencoding(), 'replace') | ||
132 | path = path.encode(sys.getdefaultencoding(), 'replace') | ||
133 | if path.startswith(os.getcwd() + os.path.sep): | ||
134 | path = '.' + path[len(os.getcwd()):] | ||
135 | return path | ||
136 | |||
137 | |||
138 | def backup_dir(dir, ext='.bak'): | ||
139 | """Figure out the name of a directory to back up the given dir to | ||
140 | (adding .bak, .bak2, etc)""" | ||
141 | n = 1 | ||
142 | extension = ext | ||
143 | while os.path.exists(dir + extension): | ||
144 | n += 1 | ||
145 | extension = ext + str(n) | ||
146 | return dir + extension | ||
147 | |||
148 | |||
149 | def ask_path_exists(message, options): | ||
150 | for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): | ||
151 | if action in options: | ||
152 | return action | ||
153 | return ask(message, options) | ||
154 | |||
155 | |||
156 | def ask(message, options): | ||
157 | """Ask the message interactively, with the given possible responses""" | ||
158 | while 1: | ||
159 | if os.environ.get('PIP_NO_INPUT'): | ||
160 | raise Exception( | ||
161 | 'No input was expected ($PIP_NO_INPUT set); question: %s' % | ||
162 | message | ||
163 | ) | ||
164 | response = input(message) | ||
165 | response = response.strip().lower() | ||
166 | if response not in options: | ||
167 | print( | ||
168 | 'Your response (%r) was not one of the expected responses: ' | ||
169 | '%s' % (response, ', '.join(options)) | ||
170 | ) | ||
171 | else: | ||
172 | return response | ||
173 | |||
174 | |||
175 | def format_size(bytes): | ||
176 | if bytes > 1000 * 1000: | ||
177 | return '%.1fMB' % (bytes / 1000.0 / 1000) | ||
178 | elif bytes > 10 * 1000: | ||
179 | return '%ikB' % (bytes / 1000) | ||
180 | elif bytes > 1000: | ||
181 | return '%.1fkB' % (bytes / 1000.0) | ||
182 | else: | ||
183 | return '%ibytes' % bytes | ||
184 | |||
185 | |||
186 | def is_installable_dir(path): | ||
187 | """Return True if `path` is a directory containing a setup.py file.""" | ||
188 | if not os.path.isdir(path): | ||
189 | return False | ||
190 | setup_py = os.path.join(path, 'setup.py') | ||
191 | if os.path.isfile(setup_py): | ||
192 | return True | ||
193 | return False | ||
194 | |||
195 | |||
196 | def is_svn_page(html): | ||
197 | """ | ||
198 | Returns true if the page appears to be the index page of an svn repository | ||
199 | """ | ||
200 | return (re.search(r'<title>[^<]*Revision \d+:', html) and | ||
201 | re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) | ||
202 | |||
203 | |||
204 | def file_contents(filename): | ||
205 | with open(filename, 'rb') as fp: | ||
206 | return fp.read().decode('utf-8') | ||
207 | |||
208 | |||
209 | def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): | ||
210 | """Yield pieces of data from a file-like object until EOF.""" | ||
211 | while True: | ||
212 | chunk = file.read(size) | ||
213 | if not chunk: | ||
214 | break | ||
215 | yield chunk | ||
216 | |||
217 | |||
218 | def split_leading_dir(path): | ||
219 | path = path.lstrip('/').lstrip('\\') | ||
220 | if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or | ||
221 | '\\' not in path): | ||
222 | return path.split('/', 1) | ||
223 | elif '\\' in path: | ||
224 | return path.split('\\', 1) | ||
225 | else: | ||
226 | return path, '' | ||
227 | |||
228 | |||
229 | def has_leading_dir(paths): | ||
230 | """Returns true if all the paths have the same leading path name | ||
231 | (i.e., everything is in one subdirectory in an archive)""" | ||
232 | common_prefix = None | ||
233 | for path in paths: | ||
234 | prefix, rest = split_leading_dir(path) | ||
235 | if not prefix: | ||
236 | return False | ||
237 | elif common_prefix is None: | ||
238 | common_prefix = prefix | ||
239 | elif prefix != common_prefix: | ||
240 | return False | ||
241 | return True | ||
242 | |||
243 | |||
244 | def normalize_path(path, resolve_symlinks=True): | ||
245 | """ | ||
246 | Convert a path to its canonical, case-normalized, absolute version. | ||
247 | |||
248 | """ | ||
249 | path = expanduser(path) | ||
250 | if resolve_symlinks: | ||
251 | path = os.path.realpath(path) | ||
252 | else: | ||
253 | path = os.path.abspath(path) | ||
254 | return os.path.normcase(path) | ||
255 | |||
256 | |||
257 | def splitext(path): | ||
258 | """Like os.path.splitext, but take off .tar too""" | ||
259 | base, ext = posixpath.splitext(path) | ||
260 | if base.lower().endswith('.tar'): | ||
261 | ext = base[-4:] + ext | ||
262 | base = base[:-4] | ||
263 | return base, ext | ||
264 | |||
265 | |||
266 | def renames(old, new): | ||
267 | """Like os.renames(), but handles renaming across devices.""" | ||
268 | # Implementation borrowed from os.renames(). | ||
269 | head, tail = os.path.split(new) | ||
270 | if head and tail and not os.path.exists(head): | ||
271 | os.makedirs(head) | ||
272 | |||
273 | shutil.move(old, new) | ||
274 | |||
275 | head, tail = os.path.split(old) | ||
276 | if head and tail: | ||
277 | try: | ||
278 | os.removedirs(head) | ||
279 | except OSError: | ||
280 | pass | ||
281 | |||
282 | |||
283 | def is_local(path): | ||
284 | """ | ||
285 | Return True if path is within sys.prefix, if we're running in a virtualenv. | ||
286 | |||
287 | If we're not in a virtualenv, all paths are considered "local." | ||
288 | |||
289 | """ | ||
290 | if not running_under_virtualenv(): | ||
291 | return True | ||
292 | return normalize_path(path).startswith(normalize_path(sys.prefix)) | ||
293 | |||
294 | |||
295 | def dist_is_local(dist): | ||
296 | """ | ||
297 | Return True if given Distribution object is installed locally | ||
298 | (i.e. within current virtualenv). | ||
299 | |||
300 | Always True if we're not in a virtualenv. | ||
301 | |||
302 | """ | ||
303 | return is_local(dist_location(dist)) | ||
304 | |||
305 | |||
306 | def dist_in_usersite(dist): | ||
307 | """ | ||
308 | Return True if given Distribution is installed in user site. | ||
309 | """ | ||
310 | norm_path = normalize_path(dist_location(dist)) | ||
311 | return norm_path.startswith(normalize_path(user_site)) | ||
312 | |||
313 | |||
314 | def dist_in_site_packages(dist): | ||
315 | """ | ||
316 | Return True if given Distribution is installed in | ||
317 | sysconfig.get_python_lib(). | ||
318 | """ | ||
319 | return normalize_path( | ||
320 | dist_location(dist) | ||
321 | ).startswith(normalize_path(site_packages)) | ||
322 | |||
323 | |||
324 | def dist_is_editable(dist): | ||
325 | """Is distribution an editable install?""" | ||
326 | for path_item in sys.path: | ||
327 | egg_link = os.path.join(path_item, dist.project_name + '.egg-link') | ||
328 | if os.path.isfile(egg_link): | ||
329 | return True | ||
330 | return False | ||
331 | |||
332 | |||
333 | def get_installed_distributions(local_only=True, | ||
334 | skip=stdlib_pkgs, | ||
335 | include_editables=True, | ||
336 | editables_only=False, | ||
337 | user_only=False): | ||
338 | """ | ||
339 | Return a list of installed Distribution objects. | ||
340 | |||
341 | If ``local_only`` is True (default), only return installations | ||
342 | local to the current virtualenv, if in a virtualenv. | ||
343 | |||
344 | ``skip`` argument is an iterable of lower-case project names to | ||
345 | ignore; defaults to stdlib_pkgs | ||
346 | |||
347 | If ``include_editables`` is False, don't report editables. | ||
348 | |||
349 | If ``editables_only`` is True , only report editables. | ||
350 | |||
351 | If ``user_only`` is True , only report installations in the user | ||
352 | site directory. | ||
353 | |||
354 | """ | ||
355 | if local_only: | ||
356 | local_test = dist_is_local | ||
357 | else: | ||
358 | def local_test(d): | ||
359 | return True | ||
360 | |||
361 | if include_editables: | ||
362 | def editable_test(d): | ||
363 | return True | ||
364 | else: | ||
365 | def editable_test(d): | ||
366 | return not dist_is_editable(d) | ||
367 | |||
368 | if editables_only: | ||
369 | def editables_only_test(d): | ||
370 | return dist_is_editable(d) | ||
371 | else: | ||
372 | def editables_only_test(d): | ||
373 | return True | ||
374 | |||
375 | if user_only: | ||
376 | user_test = dist_in_usersite | ||
377 | else: | ||
378 | def user_test(d): | ||
379 | return True | ||
380 | |||
381 | return [d for d in pkg_resources.working_set | ||
382 | if local_test(d) and | ||
383 | d.key not in skip and | ||
384 | editable_test(d) and | ||
385 | editables_only_test(d) and | ||
386 | user_test(d) | ||
387 | ] | ||
388 | |||
389 | |||
390 | def egg_link_path(dist): | ||
391 | """ | ||
392 | Return the path for the .egg-link file if it exists, otherwise, None. | ||
393 | |||
394 | There's 3 scenarios: | ||
395 | 1) not in a virtualenv | ||
396 | try to find in site.USER_SITE, then site_packages | ||
397 | 2) in a no-global virtualenv | ||
398 | try to find in site_packages | ||
399 | 3) in a yes-global virtualenv | ||
400 | try to find in site_packages, then site.USER_SITE | ||
401 | (don't look in global location) | ||
402 | |||
403 | For #1 and #3, there could be odd cases, where there's an egg-link in 2 | ||
404 | locations. | ||
405 | |||
406 | This method will just return the first one found. | ||
407 | """ | ||
408 | sites = [] | ||
409 | if running_under_virtualenv(): | ||
410 | if virtualenv_no_global(): | ||
411 | sites.append(site_packages) | ||
412 | else: | ||
413 | sites.append(site_packages) | ||
414 | if user_site: | ||
415 | sites.append(user_site) | ||
416 | else: | ||
417 | if user_site: | ||
418 | sites.append(user_site) | ||
419 | sites.append(site_packages) | ||
420 | |||
421 | for site in sites: | ||
422 | egglink = os.path.join(site, dist.project_name) + '.egg-link' | ||
423 | if os.path.isfile(egglink): | ||
424 | return egglink | ||
425 | |||
426 | |||
427 | def dist_location(dist): | ||
428 | """ | ||
429 | Get the site-packages location of this distribution. Generally | ||
430 | this is dist.location, except in the case of develop-installed | ||
431 | packages, where dist.location is the source code location, and we | ||
432 | want to know where the egg-link file is. | ||
433 | |||
434 | """ | ||
435 | egg_link = egg_link_path(dist) | ||
436 | if egg_link: | ||
437 | return egg_link | ||
438 | return dist.location | ||
439 | |||
440 | |||
441 | def current_umask(): | ||
442 | """Get the current umask which involves having to set it temporarily.""" | ||
443 | mask = os.umask(0) | ||
444 | os.umask(mask) | ||
445 | return mask | ||
446 | |||
447 | |||
448 | def unzip_file(filename, location, flatten=True): | ||
449 | """ | ||
450 | Unzip the file (with path `filename`) to the destination `location`. All | ||
451 | files are written based on system defaults and umask (i.e. permissions are | ||
452 | not preserved), except that regular file members with any execute | ||
453 | permissions (user, group, or world) have "chmod +x" applied after being | ||
454 | written. Note that for windows, any execute changes using os.chmod are | ||
455 | no-ops per the python docs. | ||
456 | """ | ||
457 | ensure_dir(location) | ||
458 | zipfp = open(filename, 'rb') | ||
459 | try: | ||
460 | zip = zipfile.ZipFile(zipfp, allowZip64=True) | ||
461 | leading = has_leading_dir(zip.namelist()) and flatten | ||
462 | for info in zip.infolist(): | ||
463 | name = info.filename | ||
464 | data = zip.read(name) | ||
465 | fn = name | ||
466 | if leading: | ||
467 | fn = split_leading_dir(name)[1] | ||
468 | fn = os.path.join(location, fn) | ||
469 | dir = os.path.dirname(fn) | ||
470 | if fn.endswith('/') or fn.endswith('\\'): | ||
471 | # A directory | ||
472 | ensure_dir(fn) | ||
473 | else: | ||
474 | ensure_dir(dir) | ||
475 | fp = open(fn, 'wb') | ||
476 | try: | ||
477 | fp.write(data) | ||
478 | finally: | ||
479 | fp.close() | ||
480 | mode = info.external_attr >> 16 | ||
481 | # if mode and regular file and any execute permissions for | ||
482 | # user/group/world? | ||
483 | if mode and stat.S_ISREG(mode) and mode & 0o111: | ||
484 | # make dest file have execute for user/group/world | ||
485 | # (chmod +x) no-op on windows per python docs | ||
486 | os.chmod(fn, (0o777 - current_umask() | 0o111)) | ||
487 | finally: | ||
488 | zipfp.close() | ||
489 | |||
490 | |||
491 | def untar_file(filename, location): | ||
492 | """ | ||
493 | Untar the file (with path `filename`) to the destination `location`. | ||
494 | All files are written based on system defaults and umask (i.e. permissions | ||
495 | are not preserved), except that regular file members with any execute | ||
496 | permissions (user, group, or world) have "chmod +x" applied after being | ||
497 | written. Note that for windows, any execute changes using os.chmod are | ||
498 | no-ops per the python docs. | ||
499 | """ | ||
500 | ensure_dir(location) | ||
501 | if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): | ||
502 | mode = 'r:gz' | ||
503 | elif filename.lower().endswith(BZ2_EXTENSIONS): | ||
504 | mode = 'r:bz2' | ||
505 | elif filename.lower().endswith(XZ_EXTENSIONS): | ||
506 | mode = 'r:xz' | ||
507 | elif filename.lower().endswith('.tar'): | ||
508 | mode = 'r' | ||
509 | else: | ||
510 | logger.warning( | ||
511 | 'Cannot determine compression type for file %s', filename, | ||
512 | ) | ||
513 | mode = 'r:*' | ||
514 | tar = tarfile.open(filename, mode) | ||
515 | try: | ||
516 | # note: python<=2.5 doesn't seem to know about pax headers, filter them | ||
517 | leading = has_leading_dir([ | ||
518 | member.name for member in tar.getmembers() | ||
519 | if member.name != 'pax_global_header' | ||
520 | ]) | ||
521 | for member in tar.getmembers(): | ||
522 | fn = member.name | ||
523 | if fn == 'pax_global_header': | ||
524 | continue | ||
525 | if leading: | ||
526 | fn = split_leading_dir(fn)[1] | ||
527 | path = os.path.join(location, fn) | ||
528 | if member.isdir(): | ||
529 | ensure_dir(path) | ||
530 | elif member.issym(): | ||
531 | try: | ||
532 | tar._extract_member(member, path) | ||
533 | except Exception as exc: | ||
534 | # Some corrupt tar files seem to produce this | ||
535 | # (specifically bad symlinks) | ||
536 | logger.warning( | ||
537 | 'In the tar file %s the member %s is invalid: %s', | ||
538 | filename, member.name, exc, | ||
539 | ) | ||
540 | continue | ||
541 | else: | ||
542 | try: | ||
543 | fp = tar.extractfile(member) | ||
544 | except (KeyError, AttributeError) as exc: | ||
545 | # Some corrupt tar files seem to produce this | ||
546 | # (specifically bad symlinks) | ||
547 | logger.warning( | ||
548 | 'In the tar file %s the member %s is invalid: %s', | ||
549 | filename, member.name, exc, | ||
550 | ) | ||
551 | continue | ||
552 | ensure_dir(os.path.dirname(path)) | ||
553 | with open(path, 'wb') as destfp: | ||
554 | shutil.copyfileobj(fp, destfp) | ||
555 | fp.close() | ||
556 | # Update the timestamp (useful for cython compiled files) | ||
557 | tar.utime(member, path) | ||
558 | # member have any execute permissions for user/group/world? | ||
559 | if member.mode & 0o111: | ||
560 | # make dest file have execute for user/group/world | ||
561 | # no-op on windows per python docs | ||
562 | os.chmod(path, (0o777 - current_umask() | 0o111)) | ||
563 | finally: | ||
564 | tar.close() | ||
565 | |||
566 | |||
567 | def unpack_file(filename, location, content_type, link): | ||
568 | filename = os.path.realpath(filename) | ||
569 | if (content_type == 'application/zip' or | ||
570 | filename.lower().endswith(ZIP_EXTENSIONS) or | ||
571 | zipfile.is_zipfile(filename)): | ||
572 | unzip_file( | ||
573 | filename, | ||
574 | location, | ||
575 | flatten=not filename.endswith('.whl') | ||
576 | ) | ||
577 | elif (content_type == 'application/x-gzip' or | ||
578 | tarfile.is_tarfile(filename) or | ||
579 | filename.lower().endswith( | ||
580 | TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): | ||
581 | untar_file(filename, location) | ||
582 | elif (content_type and content_type.startswith('text/html') and | ||
583 | is_svn_page(file_contents(filename))): | ||
584 | # We don't really care about this | ||
585 | from pip._internal.vcs.subversion import Subversion | ||
586 | Subversion('svn+' + link.url).unpack(location) | ||
587 | else: | ||
588 | # FIXME: handle? | ||
589 | # FIXME: magic signatures? | ||
590 | logger.critical( | ||
591 | 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' | ||
592 | 'cannot detect archive format', | ||
593 | filename, location, content_type, | ||
594 | ) | ||
595 | raise InstallationError( | ||
596 | 'Cannot determine archive format of %s' % location | ||
597 | ) | ||
598 | |||
599 | |||
600 | def call_subprocess(cmd, show_stdout=True, cwd=None, | ||
601 | on_returncode='raise', | ||
602 | command_desc=None, | ||
603 | extra_environ=None, unset_environ=None, spinner=None): | ||
604 | """ | ||
605 | Args: | ||
606 | unset_environ: an iterable of environment variable names to unset | ||
607 | prior to calling subprocess.Popen(). | ||
608 | """ | ||
609 | if unset_environ is None: | ||
610 | unset_environ = [] | ||
611 | # This function's handling of subprocess output is confusing and I | ||
612 | # previously broke it terribly, so as penance I will write a long comment | ||
613 | # explaining things. | ||
614 | # | ||
615 | # The obvious thing that affects output is the show_stdout= | ||
616 | # kwarg. show_stdout=True means, let the subprocess write directly to our | ||
617 | # stdout. Even though it is nominally the default, it is almost never used | ||
618 | # inside pip (and should not be used in new code without a very good | ||
619 | # reason); as of 2016-02-22 it is only used in a few places inside the VCS | ||
620 | # wrapper code. Ideally we should get rid of it entirely, because it | ||
621 | # creates a lot of complexity here for a rarely used feature. | ||
622 | # | ||
623 | # Most places in pip set show_stdout=False. What this means is: | ||
624 | # - We connect the child stdout to a pipe, which we read. | ||
625 | # - By default, we hide the output but show a spinner -- unless the | ||
626 | # subprocess exits with an error, in which case we show the output. | ||
627 | # - If the --verbose option was passed (= loglevel is DEBUG), then we show | ||
628 | # the output unconditionally. (But in this case we don't want to show | ||
629 | # the output a second time if it turns out that there was an error.) | ||
630 | # | ||
631 | # stderr is always merged with stdout (even if show_stdout=True). | ||
632 | if show_stdout: | ||
633 | stdout = None | ||
634 | else: | ||
635 | stdout = subprocess.PIPE | ||
636 | if command_desc is None: | ||
637 | cmd_parts = [] | ||
638 | for part in cmd: | ||
639 | if ' ' in part or '\n' in part or '"' in part or "'" in part: | ||
640 | part = '"%s"' % part.replace('"', '\\"') | ||
641 | cmd_parts.append(part) | ||
642 | command_desc = ' '.join(cmd_parts) | ||
643 | logger.debug("Running command %s", command_desc) | ||
644 | env = os.environ.copy() | ||
645 | if extra_environ: | ||
646 | env.update(extra_environ) | ||
647 | for name in unset_environ: | ||
648 | env.pop(name, None) | ||
649 | try: | ||
650 | proc = subprocess.Popen( | ||
651 | cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, | ||
652 | stdout=stdout, cwd=cwd, env=env, | ||
653 | ) | ||
654 | proc.stdin.close() | ||
655 | except Exception as exc: | ||
656 | logger.critical( | ||
657 | "Error %s while executing command %s", exc, command_desc, | ||
658 | ) | ||
659 | raise | ||
660 | all_output = [] | ||
661 | if stdout is not None: | ||
662 | while True: | ||
663 | line = console_to_str(proc.stdout.readline()) | ||
664 | if not line: | ||
665 | break | ||
666 | line = line.rstrip() | ||
667 | all_output.append(line + '\n') | ||
668 | if logger.getEffectiveLevel() <= std_logging.DEBUG: | ||
669 | # Show the line immediately | ||
670 | logger.debug(line) | ||
671 | else: | ||
672 | # Update the spinner | ||
673 | if spinner is not None: | ||
674 | spinner.spin() | ||
675 | try: | ||
676 | proc.wait() | ||
677 | finally: | ||
678 | if proc.stdout: | ||
679 | proc.stdout.close() | ||
680 | if spinner is not None: | ||
681 | if proc.returncode: | ||
682 | spinner.finish("error") | ||
683 | else: | ||
684 | spinner.finish("done") | ||
685 | if proc.returncode: | ||
686 | if on_returncode == 'raise': | ||
687 | if (logger.getEffectiveLevel() > std_logging.DEBUG and | ||
688 | not show_stdout): | ||
689 | logger.info( | ||
690 | 'Complete output from command %s:', command_desc, | ||
691 | ) | ||
692 | logger.info( | ||
693 | ''.join(all_output) + | ||
694 | '\n----------------------------------------' | ||
695 | ) | ||
696 | raise InstallationError( | ||
697 | 'Command "%s" failed with error code %s in %s' | ||
698 | % (command_desc, proc.returncode, cwd)) | ||
699 | elif on_returncode == 'warn': | ||
700 | logger.warning( | ||
701 | 'Command "%s" had error code %s in %s', | ||
702 | command_desc, proc.returncode, cwd, | ||
703 | ) | ||
704 | elif on_returncode == 'ignore': | ||
705 | pass | ||
706 | else: | ||
707 | raise ValueError('Invalid value: on_returncode=%s' % | ||
708 | repr(on_returncode)) | ||
709 | if not show_stdout: | ||
710 | return ''.join(all_output) | ||
711 | |||
712 | |||
713 | def read_text_file(filename): | ||
714 | """Return the contents of *filename*. | ||
715 | |||
716 | Try to decode the file contents with utf-8, the preferred system encoding | ||
717 | (e.g., cp1252 on some Windows machines), and latin1, in that order. | ||
718 | Decoding a byte string with latin1 will never raise an error. In the worst | ||
719 | case, the returned string will contain some garbage characters. | ||
720 | |||
721 | """ | ||
722 | with open(filename, 'rb') as fp: | ||
723 | data = fp.read() | ||
724 | |||
725 | encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] | ||
726 | for enc in encodings: | ||
727 | try: | ||
728 | data = data.decode(enc) | ||
729 | except UnicodeDecodeError: | ||
730 | continue | ||
731 | break | ||
732 | |||
733 | assert type(data) != bytes # Latin1 should have worked. | ||
734 | return data | ||
735 | |||
736 | |||
737 | def _make_build_dir(build_dir): | ||
738 | os.makedirs(build_dir) | ||
739 | write_delete_marker_file(build_dir) | ||
740 | |||
741 | |||
742 | class FakeFile(object): | ||
743 | """Wrap a list of lines in an object with readline() to make | ||
744 | ConfigParser happy.""" | ||
745 | def __init__(self, lines): | ||
746 | self._gen = (l for l in lines) | ||
747 | |||
748 | def readline(self): | ||
749 | try: | ||
750 | try: | ||
751 | return next(self._gen) | ||
752 | except NameError: | ||
753 | return self._gen.next() | ||
754 | except StopIteration: | ||
755 | return '' | ||
756 | |||
757 | def __iter__(self): | ||
758 | return self._gen | ||
759 | |||
760 | |||
761 | class StreamWrapper(StringIO): | ||
762 | |||
763 | @classmethod | ||
764 | def from_stream(cls, orig_stream): | ||
765 | cls.orig_stream = orig_stream | ||
766 | return cls() | ||
767 | |||
768 | # compileall.compile_dir() needs stdout.encoding to print to stdout | ||
769 | @property | ||
770 | def encoding(self): | ||
771 | return self.orig_stream.encoding | ||
772 | |||
773 | |||
774 | @contextlib.contextmanager | ||
775 | def captured_output(stream_name): | ||
776 | """Return a context manager used by captured_stdout/stdin/stderr | ||
777 | that temporarily replaces the sys stream *stream_name* with a StringIO. | ||
778 | |||
779 | Taken from Lib/support/__init__.py in the CPython repo. | ||
780 | """ | ||
781 | orig_stdout = getattr(sys, stream_name) | ||
782 | setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) | ||
783 | try: | ||
784 | yield getattr(sys, stream_name) | ||
785 | finally: | ||
786 | setattr(sys, stream_name, orig_stdout) | ||
787 | |||
788 | |||
789 | def captured_stdout(): | ||
790 | """Capture the output of sys.stdout: | ||
791 | |||
792 | with captured_stdout() as stdout: | ||
793 | print('hello') | ||
794 | self.assertEqual(stdout.getvalue(), 'hello\n') | ||
795 | |||
796 | Taken from Lib/support/__init__.py in the CPython repo. | ||
797 | """ | ||
798 | return captured_output('stdout') | ||
799 | |||
800 | |||
801 | class cached_property(object): | ||
802 | """A property that is only computed once per instance and then replaces | ||
803 | itself with an ordinary attribute. Deleting the attribute resets the | ||
804 | property. | ||
805 | |||
806 | Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 | ||
807 | """ | ||
808 | |||
809 | def __init__(self, func): | ||
810 | self.__doc__ = getattr(func, '__doc__') | ||
811 | self.func = func | ||
812 | |||
813 | def __get__(self, obj, cls): | ||
814 | if obj is None: | ||
815 | # We're being accessed from the class itself, not from an object | ||
816 | return self | ||
817 | value = obj.__dict__[self.func.__name__] = self.func(obj) | ||
818 | return value | ||
819 | |||
820 | |||
821 | def get_installed_version(dist_name, lookup_dirs=None): | ||
822 | """Get the installed version of dist_name avoiding pkg_resources cache""" | ||
823 | # Create a requirement that we'll look for inside of setuptools. | ||
824 | req = pkg_resources.Requirement.parse(dist_name) | ||
825 | |||
826 | # We want to avoid having this cached, so we need to construct a new | ||
827 | # working set each time. | ||
828 | if lookup_dirs is None: | ||
829 | working_set = pkg_resources.WorkingSet() | ||
830 | else: | ||
831 | working_set = pkg_resources.WorkingSet(lookup_dirs) | ||
832 | |||
833 | # Get the installed distribution from our working set | ||
834 | dist = working_set.find(req) | ||
835 | |||
836 | # Check to see if we got an installed distribution or not, if we did | ||
837 | # we want to return it's version. | ||
838 | return dist.version if dist else None | ||
839 | |||
840 | |||
841 | def consume(iterator): | ||
842 | """Consume an iterable at C speed.""" | ||
843 | deque(iterator, maxlen=0) | ||
844 | |||
845 | |||
846 | # Simulates an enum | ||
847 | def enum(*sequential, **named): | ||
848 | enums = dict(zip(sequential, range(len(sequential))), **named) | ||
849 | reverse = {value: key for key, value in enums.items()} | ||
850 | enums['reverse_mapping'] = reverse | ||
851 | return type('Enum', (), enums) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py new file mode 100644 index 0000000..f4572ab --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/outdated.py | |||
@@ -0,0 +1,163 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import datetime | ||
4 | import json | ||
5 | import logging | ||
6 | import os.path | ||
7 | import sys | ||
8 | |||
9 | from pip._vendor import lockfile | ||
10 | from pip._vendor.packaging import version as packaging_version | ||
11 | |||
12 | from pip._internal.compat import WINDOWS | ||
13 | from pip._internal.index import PackageFinder | ||
14 | from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv | ||
15 | from pip._internal.utils.filesystem import check_path_owner | ||
16 | from pip._internal.utils.misc import ensure_dir, get_installed_version | ||
17 | |||
18 | SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" | ||
19 | |||
20 | |||
21 | logger = logging.getLogger(__name__) | ||
22 | |||
23 | |||
24 | class VirtualenvSelfCheckState(object): | ||
25 | def __init__(self): | ||
26 | self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") | ||
27 | |||
28 | # Load the existing state | ||
29 | try: | ||
30 | with open(self.statefile_path) as statefile: | ||
31 | self.state = json.load(statefile) | ||
32 | except (IOError, ValueError): | ||
33 | self.state = {} | ||
34 | |||
35 | def save(self, pypi_version, current_time): | ||
36 | # Attempt to write out our version check file | ||
37 | with open(self.statefile_path, "w") as statefile: | ||
38 | json.dump( | ||
39 | { | ||
40 | "last_check": current_time.strftime(SELFCHECK_DATE_FMT), | ||
41 | "pypi_version": pypi_version, | ||
42 | }, | ||
43 | statefile, | ||
44 | sort_keys=True, | ||
45 | separators=(",", ":") | ||
46 | ) | ||
47 | |||
48 | |||
49 | class GlobalSelfCheckState(object): | ||
50 | def __init__(self): | ||
51 | self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") | ||
52 | |||
53 | # Load the existing state | ||
54 | try: | ||
55 | with open(self.statefile_path) as statefile: | ||
56 | self.state = json.load(statefile)[sys.prefix] | ||
57 | except (IOError, ValueError, KeyError): | ||
58 | self.state = {} | ||
59 | |||
60 | def save(self, pypi_version, current_time): | ||
61 | # Check to make sure that we own the directory | ||
62 | if not check_path_owner(os.path.dirname(self.statefile_path)): | ||
63 | return | ||
64 | |||
65 | # Now that we've ensured the directory is owned by this user, we'll go | ||
66 | # ahead and make sure that all our directories are created. | ||
67 | ensure_dir(os.path.dirname(self.statefile_path)) | ||
68 | |||
69 | # Attempt to write out our version check file | ||
70 | with lockfile.LockFile(self.statefile_path): | ||
71 | if os.path.exists(self.statefile_path): | ||
72 | with open(self.statefile_path) as statefile: | ||
73 | state = json.load(statefile) | ||
74 | else: | ||
75 | state = {} | ||
76 | |||
77 | state[sys.prefix] = { | ||
78 | "last_check": current_time.strftime(SELFCHECK_DATE_FMT), | ||
79 | "pypi_version": pypi_version, | ||
80 | } | ||
81 | |||
82 | with open(self.statefile_path, "w") as statefile: | ||
83 | json.dump(state, statefile, sort_keys=True, | ||
84 | separators=(",", ":")) | ||
85 | |||
86 | |||
87 | def load_selfcheck_statefile(): | ||
88 | if running_under_virtualenv(): | ||
89 | return VirtualenvSelfCheckState() | ||
90 | else: | ||
91 | return GlobalSelfCheckState() | ||
92 | |||
93 | |||
94 | def pip_version_check(session, options): | ||
95 | """Check for an update for pip. | ||
96 | |||
97 | Limit the frequency of checks to once per week. State is stored either in | ||
98 | the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix | ||
99 | of the pip script path. | ||
100 | """ | ||
101 | installed_version = get_installed_version("pip") | ||
102 | if not installed_version: | ||
103 | return | ||
104 | |||
105 | pip_version = packaging_version.parse(installed_version) | ||
106 | pypi_version = None | ||
107 | |||
108 | try: | ||
109 | state = load_selfcheck_statefile() | ||
110 | |||
111 | current_time = datetime.datetime.utcnow() | ||
112 | # Determine if we need to refresh the state | ||
113 | if "last_check" in state.state and "pypi_version" in state.state: | ||
114 | last_check = datetime.datetime.strptime( | ||
115 | state.state["last_check"], | ||
116 | SELFCHECK_DATE_FMT | ||
117 | ) | ||
118 | if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: | ||
119 | pypi_version = state.state["pypi_version"] | ||
120 | |||
121 | # Refresh the version if we need to or just see if we need to warn | ||
122 | if pypi_version is None: | ||
123 | # Lets use PackageFinder to see what the latest pip version is | ||
124 | finder = PackageFinder( | ||
125 | find_links=options.find_links, | ||
126 | index_urls=[options.index_url] + options.extra_index_urls, | ||
127 | allow_all_prereleases=False, # Explicitly set to False | ||
128 | trusted_hosts=options.trusted_hosts, | ||
129 | process_dependency_links=options.process_dependency_links, | ||
130 | session=session, | ||
131 | ) | ||
132 | all_candidates = finder.find_all_candidates("pip") | ||
133 | if not all_candidates: | ||
134 | return | ||
135 | pypi_version = str( | ||
136 | max(all_candidates, key=lambda c: c.version).version | ||
137 | ) | ||
138 | |||
139 | # save that we've performed a check | ||
140 | state.save(pypi_version, current_time) | ||
141 | |||
142 | remote_version = packaging_version.parse(pypi_version) | ||
143 | |||
144 | # Determine if our pypi_version is older | ||
145 | if (pip_version < remote_version and | ||
146 | pip_version.base_version != remote_version.base_version): | ||
147 | # Advise "python -m pip" on Windows to avoid issues | ||
148 | # with overwriting pip.exe. | ||
149 | if WINDOWS: | ||
150 | pip_cmd = "python -m pip" | ||
151 | else: | ||
152 | pip_cmd = "pip" | ||
153 | logger.warning( | ||
154 | "You are using pip version %s, however version %s is " | ||
155 | "available.\nYou should consider upgrading via the " | ||
156 | "'%s install --upgrade pip' command.", | ||
157 | pip_version, pypi_version, pip_cmd | ||
158 | ) | ||
159 | except Exception: | ||
160 | logger.debug( | ||
161 | "There was an error checking the latest version of pip", | ||
162 | exc_info=True, | ||
163 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py new file mode 100644 index 0000000..d523953 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/packaging.py | |||
@@ -0,0 +1,70 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import logging | ||
4 | import sys | ||
5 | from email.parser import FeedParser # type: ignore | ||
6 | |||
7 | from pip._vendor import pkg_resources | ||
8 | from pip._vendor.packaging import specifiers, version | ||
9 | |||
10 | from pip._internal import exceptions | ||
11 | |||
12 | logger = logging.getLogger(__name__) | ||
13 | |||
14 | |||
15 | def check_requires_python(requires_python): | ||
16 | """ | ||
17 | Check if the python version in use match the `requires_python` specifier. | ||
18 | |||
19 | Returns `True` if the version of python in use matches the requirement. | ||
20 | Returns `False` if the version of python in use does not matches the | ||
21 | requirement. | ||
22 | |||
23 | Raises an InvalidSpecifier if `requires_python` have an invalid format. | ||
24 | """ | ||
25 | if requires_python is None: | ||
26 | # The package provides no information | ||
27 | return True | ||
28 | requires_python_specifier = specifiers.SpecifierSet(requires_python) | ||
29 | |||
30 | # We only use major.minor.micro | ||
31 | python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) | ||
32 | return python_version in requires_python_specifier | ||
33 | |||
34 | |||
35 | def get_metadata(dist): | ||
36 | if (isinstance(dist, pkg_resources.DistInfoDistribution) and | ||
37 | dist.has_metadata('METADATA')): | ||
38 | return dist.get_metadata('METADATA') | ||
39 | elif dist.has_metadata('PKG-INFO'): | ||
40 | return dist.get_metadata('PKG-INFO') | ||
41 | |||
42 | |||
43 | def check_dist_requires_python(dist): | ||
44 | metadata = get_metadata(dist) | ||
45 | feed_parser = FeedParser() | ||
46 | feed_parser.feed(metadata) | ||
47 | pkg_info_dict = feed_parser.close() | ||
48 | requires_python = pkg_info_dict.get('Requires-Python') | ||
49 | try: | ||
50 | if not check_requires_python(requires_python): | ||
51 | raise exceptions.UnsupportedPythonVersion( | ||
52 | "%s requires Python '%s' but the running Python is %s" % ( | ||
53 | dist.project_name, | ||
54 | requires_python, | ||
55 | '.'.join(map(str, sys.version_info[:3])),) | ||
56 | ) | ||
57 | except specifiers.InvalidSpecifier as e: | ||
58 | logger.warning( | ||
59 | "Package %s has an invalid Requires-Python entry %s - %s", | ||
60 | dist.project_name, requires_python, e, | ||
61 | ) | ||
62 | return | ||
63 | |||
64 | |||
65 | def get_installer(dist): | ||
66 | if dist.has_metadata('INSTALLER'): | ||
67 | for line in dist.get_metadata_lines('INSTALLER'): | ||
68 | if line.strip(): | ||
69 | return line.strip() | ||
70 | return '' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000..9d32174 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/setuptools_build.py | |||
@@ -0,0 +1,8 @@ | |||
1 | # Shim to wrap setup.py invocation with setuptools | ||
2 | SETUPTOOLS_SHIM = ( | ||
3 | "import setuptools, tokenize;__file__=%r;" | ||
4 | "f=getattr(tokenize, 'open', open)(__file__);" | ||
5 | "code=f.read().replace('\\r\\n', '\\n');" | ||
6 | "f.close();" | ||
7 | "exec(compile(code, __file__, 'exec'))" | ||
8 | ) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000..25bc0d9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/temp_dir.py | |||
@@ -0,0 +1,82 @@ | |||
1 | from __future__ import absolute_import | ||
2 | |||
3 | import logging | ||
4 | import os.path | ||
5 | import tempfile | ||
6 | |||
7 | from pip._internal.utils.misc import rmtree | ||
8 | |||
9 | logger = logging.getLogger(__name__) | ||
10 | |||
11 | |||
12 | class TempDirectory(object): | ||
13 | """Helper class that owns and cleans up a temporary directory. | ||
14 | |||
15 | This class can be used as a context manager or as an OO representation of a | ||
16 | temporary directory. | ||
17 | |||
18 | Attributes: | ||
19 | path | ||
20 | Location to the created temporary directory or None | ||
21 | delete | ||
22 | Whether the directory should be deleted when exiting | ||
23 | (when used as a contextmanager) | ||
24 | |||
25 | Methods: | ||
26 | create() | ||
27 | Creates a temporary directory and stores its path in the path | ||
28 | attribute. | ||
29 | cleanup() | ||
30 | Deletes the temporary directory and sets path attribute to None | ||
31 | |||
32 | When used as a context manager, a temporary directory is created on | ||
33 | entering the context and, if the delete attribute is True, on exiting the | ||
34 | context the created directory is deleted. | ||
35 | """ | ||
36 | |||
37 | def __init__(self, path=None, delete=None, kind="temp"): | ||
38 | super(TempDirectory, self).__init__() | ||
39 | |||
40 | if path is None and delete is None: | ||
41 | # If we were not given an explicit directory, and we were not given | ||
42 | # an explicit delete option, then we'll default to deleting. | ||
43 | delete = True | ||
44 | |||
45 | self.path = path | ||
46 | self.delete = delete | ||
47 | self.kind = kind | ||
48 | |||
49 | def __repr__(self): | ||
50 | return "<{} {!r}>".format(self.__class__.__name__, self.path) | ||
51 | |||
52 | def __enter__(self): | ||
53 | self.create() | ||
54 | return self | ||
55 | |||
56 | def __exit__(self, exc, value, tb): | ||
57 | if self.delete: | ||
58 | self.cleanup() | ||
59 | |||
60 | def create(self): | ||
61 | """Create a temporary directory and store it's path in self.path | ||
62 | """ | ||
63 | if self.path is not None: | ||
64 | logger.debug( | ||
65 | "Skipped creation of temporary directory: {}".format(self.path) | ||
66 | ) | ||
67 | return | ||
68 | # We realpath here because some systems have their default tmpdir | ||
69 | # symlinked to another directory. This tends to confuse build | ||
70 | # scripts, so we canonicalize the path by traversing potential | ||
71 | # symlinks here. | ||
72 | self.path = os.path.realpath( | ||
73 | tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) | ||
74 | ) | ||
75 | logger.debug("Created temporary directory: {}".format(self.path)) | ||
76 | |||
77 | def cleanup(self): | ||
78 | """Remove the temporary directory created and reset state | ||
79 | """ | ||
80 | if self.path is not None and os.path.exists(self.path): | ||
81 | rmtree(self.path) | ||
82 | self.path = None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py new file mode 100644 index 0000000..4e25ae6 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py | |||
@@ -0,0 +1,29 @@ | |||
1 | """For neatly implementing static typing in pip. | ||
2 | |||
3 | `mypy` - the static type analysis tool we use - uses the `typing` module, which | ||
4 | provides core functionality fundamental to mypy's functioning. | ||
5 | |||
6 | Generally, `typing` would be imported at runtime and used in that fashion - | ||
7 | it acts as a no-op at runtime and does not have any run-time overhead by | ||
8 | design. | ||
9 | |||
10 | As it turns out, `typing` is not vendorable - it uses separate sources for | ||
11 | Python 2/Python 3. Thus, this codebase can not expect it to be present. | ||
12 | To work around this, mypy allows the typing import to be behind a False-y | ||
13 | optional to prevent it from running at runtime and type-comments can be used | ||
14 | to remove the need for the types to be accessible directly during runtime. | ||
15 | |||
16 | This module provides the False-y guard in a nicely named fashion so that a | ||
17 | curious maintainer can reach here to read this. | ||
18 | |||
19 | In pip, all static-typing related imports should be guarded as follows: | ||
20 | |||
21 | from pip.utils.typing import MYPY_CHECK_RUNNING | ||
22 | |||
23 | if MYPY_CHECK_RUNNING: | ||
24 | from typing import ... | ||
25 | |||
26 | Ref: https://github.com/python/mypy/issues/3216 | ||
27 | """ | ||
28 | |||
29 | MYPY_CHECK_RUNNING = False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py new file mode 100644 index 0000000..d97ea36 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/ui.py | |||
@@ -0,0 +1,421 @@ | |||
1 | from __future__ import absolute_import, division | ||
2 | |||
3 | import contextlib | ||
4 | import itertools | ||
5 | import logging | ||
6 | import sys | ||
7 | import time | ||
8 | from signal import SIGINT, default_int_handler, signal | ||
9 | |||
10 | from pip._vendor import six | ||
11 | from pip._vendor.progress.bar import ( | ||
12 | Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, | ||
13 | ShadyBar, | ||
14 | ) | ||
15 | from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin | ||
16 | from pip._vendor.progress.spinner import Spinner | ||
17 | |||
18 | from pip._internal.compat import WINDOWS | ||
19 | from pip._internal.utils.logging import get_indentation | ||
20 | from pip._internal.utils.misc import format_size | ||
21 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING | ||
22 | |||
23 | if MYPY_CHECK_RUNNING: | ||
24 | from typing import Any | ||
25 | |||
26 | try: | ||
27 | from pip._vendor import colorama | ||
28 | # Lots of different errors can come from this, including SystemError and | ||
29 | # ImportError. | ||
30 | except Exception: | ||
31 | colorama = None | ||
32 | |||
33 | logger = logging.getLogger(__name__) | ||
34 | |||
35 | |||
36 | def _select_progress_class(preferred, fallback): | ||
37 | encoding = getattr(preferred.file, "encoding", None) | ||
38 | |||
39 | # If we don't know what encoding this file is in, then we'll just assume | ||
40 | # that it doesn't support unicode and use the ASCII bar. | ||
41 | if not encoding: | ||
42 | return fallback | ||
43 | |||
44 | # Collect all of the possible characters we want to use with the preferred | ||
45 | # bar. | ||
46 | characters = [ | ||
47 | getattr(preferred, "empty_fill", six.text_type()), | ||
48 | getattr(preferred, "fill", six.text_type()), | ||
49 | ] | ||
50 | characters += list(getattr(preferred, "phases", [])) | ||
51 | |||
52 | # Try to decode the characters we're using for the bar using the encoding | ||
53 | # of the given file, if this works then we'll assume that we can use the | ||
54 | # fancier bar and if not we'll fall back to the plaintext bar. | ||
55 | try: | ||
56 | six.text_type().join(characters).encode(encoding) | ||
57 | except UnicodeEncodeError: | ||
58 | return fallback | ||
59 | else: | ||
60 | return preferred | ||
61 | |||
62 | |||
63 | _BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any | ||
64 | |||
65 | |||
66 | class InterruptibleMixin(object): | ||
67 | """ | ||
68 | Helper to ensure that self.finish() gets called on keyboard interrupt. | ||
69 | |||
70 | This allows downloads to be interrupted without leaving temporary state | ||
71 | (like hidden cursors) behind. | ||
72 | |||
73 | This class is similar to the progress library's existing SigIntMixin | ||
74 | helper, but as of version 1.2, that helper has the following problems: | ||
75 | |||
76 | 1. It calls sys.exit(). | ||
77 | 2. It discards the existing SIGINT handler completely. | ||
78 | 3. It leaves its own handler in place even after an uninterrupted finish, | ||
79 | which will have unexpected delayed effects if the user triggers an | ||
80 | unrelated keyboard interrupt some time after a progress-displaying | ||
81 | download has already completed, for example. | ||
82 | """ | ||
83 | |||
84 | def __init__(self, *args, **kwargs): | ||
85 | """ | ||
86 | Save the original SIGINT handler for later. | ||
87 | """ | ||
88 | super(InterruptibleMixin, self).__init__(*args, **kwargs) | ||
89 | |||
90 | self.original_handler = signal(SIGINT, self.handle_sigint) | ||
91 | |||
92 | # If signal() returns None, the previous handler was not installed from | ||
93 | # Python, and we cannot restore it. This probably should not happen, | ||
94 | # but if it does, we must restore something sensible instead, at least. | ||
95 | # The least bad option should be Python's default SIGINT handler, which | ||
96 | # just raises KeyboardInterrupt. | ||
97 | if self.original_handler is None: | ||
98 | self.original_handler = default_int_handler | ||
99 | |||
100 | def finish(self): | ||
101 | """ | ||
102 | Restore the original SIGINT handler after finishing. | ||
103 | |||
104 | This should happen regardless of whether the progress display finishes | ||
105 | normally, or gets interrupted. | ||
106 | """ | ||
107 | super(InterruptibleMixin, self).finish() | ||
108 | signal(SIGINT, self.original_handler) | ||
109 | |||
110 | def handle_sigint(self, signum, frame): | ||
111 | """ | ||
112 | Call self.finish() before delegating to the original SIGINT handler. | ||
113 | |||
114 | This handler should only be in place while the progress display is | ||
115 | active. | ||
116 | """ | ||
117 | self.finish() | ||
118 | self.original_handler(signum, frame) | ||
119 | |||
120 | |||
121 | class SilentBar(Bar): | ||
122 | |||
123 | def update(self): | ||
124 | pass | ||
125 | |||
126 | |||
127 | class BlueEmojiBar(IncrementalBar): | ||
128 | |||
129 | suffix = "%(percent)d%%" | ||
130 | bar_prefix = " " | ||
131 | bar_suffix = " " | ||
132 | phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any | ||
133 | |||
134 | |||
135 | class DownloadProgressMixin(object): | ||
136 | |||
137 | def __init__(self, *args, **kwargs): | ||
138 | super(DownloadProgressMixin, self).__init__(*args, **kwargs) | ||
139 | self.message = (" " * (get_indentation() + 2)) + self.message | ||
140 | |||
141 | @property | ||
142 | def downloaded(self): | ||
143 | return format_size(self.index) | ||
144 | |||
145 | @property | ||
146 | def download_speed(self): | ||
147 | # Avoid zero division errors... | ||
148 | if self.avg == 0.0: | ||
149 | return "..." | ||
150 | return format_size(1 / self.avg) + "/s" | ||
151 | |||
152 | @property | ||
153 | def pretty_eta(self): | ||
154 | if self.eta: | ||
155 | return "eta %s" % self.eta_td | ||
156 | return "" | ||
157 | |||
158 | def iter(self, it, n=1): | ||
159 | for x in it: | ||
160 | yield x | ||
161 | self.next(n) | ||
162 | self.finish() | ||
163 | |||
164 | |||
165 | class WindowsMixin(object): | ||
166 | |||
167 | def __init__(self, *args, **kwargs): | ||
168 | # The Windows terminal does not support the hide/show cursor ANSI codes | ||
169 | # even with colorama. So we'll ensure that hide_cursor is False on | ||
170 | # Windows. | ||
171 | # This call neds to go before the super() call, so that hide_cursor | ||
172 | # is set in time. The base progress bar class writes the "hide cursor" | ||
173 | # code to the terminal in its init, so if we don't set this soon | ||
174 | # enough, we get a "hide" with no corresponding "show"... | ||
175 | if WINDOWS and self.hide_cursor: | ||
176 | self.hide_cursor = False | ||
177 | |||
178 | super(WindowsMixin, self).__init__(*args, **kwargs) | ||
179 | |||
180 | # Check if we are running on Windows and we have the colorama module, | ||
181 | # if we do then wrap our file with it. | ||
182 | if WINDOWS and colorama: | ||
183 | self.file = colorama.AnsiToWin32(self.file) | ||
184 | # The progress code expects to be able to call self.file.isatty() | ||
185 | # but the colorama.AnsiToWin32() object doesn't have that, so we'll | ||
186 | # add it. | ||
187 | self.file.isatty = lambda: self.file.wrapped.isatty() | ||
188 | # The progress code expects to be able to call self.file.flush() | ||
189 | # but the colorama.AnsiToWin32() object doesn't have that, so we'll | ||
190 | # add it. | ||
191 | self.file.flush = lambda: self.file.wrapped.flush() | ||
192 | |||
193 | |||
194 | class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, | ||
195 | DownloadProgressMixin): | ||
196 | |||
197 | file = sys.stdout | ||
198 | message = "%(percent)d%%" | ||
199 | suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" | ||
200 | |||
201 | # NOTE: The "type: ignore" comments on the following classes are there to | ||
202 | # work around https://github.com/python/typing/issues/241 | ||
203 | |||
204 | |||
205 | class DefaultDownloadProgressBar(BaseDownloadProgressBar, | ||
206 | _BaseBar): # type: ignore | ||
207 | pass | ||
208 | |||
209 | |||
210 | class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore | ||
211 | pass | ||
212 | |||
213 | |||
214 | class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore | ||
215 | IncrementalBar): | ||
216 | pass | ||
217 | |||
218 | |||
219 | class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore | ||
220 | ChargingBar): | ||
221 | pass | ||
222 | |||
223 | |||
224 | class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore | ||
225 | pass | ||
226 | |||
227 | |||
228 | class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore | ||
229 | FillingSquaresBar): | ||
230 | pass | ||
231 | |||
232 | |||
233 | class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore | ||
234 | FillingCirclesBar): | ||
235 | pass | ||
236 | |||
237 | |||
238 | class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore | ||
239 | BlueEmojiBar): | ||
240 | pass | ||
241 | |||
242 | |||
243 | class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, | ||
244 | DownloadProgressMixin, WritelnMixin, Spinner): | ||
245 | |||
246 | file = sys.stdout | ||
247 | suffix = "%(downloaded)s %(download_speed)s" | ||
248 | |||
249 | def next_phase(self): | ||
250 | if not hasattr(self, "_phaser"): | ||
251 | self._phaser = itertools.cycle(self.phases) | ||
252 | return next(self._phaser) | ||
253 | |||
254 | def update(self): | ||
255 | message = self.message % self | ||
256 | phase = self.next_phase() | ||
257 | suffix = self.suffix % self | ||
258 | line = ''.join([ | ||
259 | message, | ||
260 | " " if message else "", | ||
261 | phase, | ||
262 | " " if suffix else "", | ||
263 | suffix, | ||
264 | ]) | ||
265 | |||
266 | self.writeln(line) | ||
267 | |||
268 | |||
269 | BAR_TYPES = { | ||
270 | "off": (DownloadSilentBar, DownloadSilentBar), | ||
271 | "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), | ||
272 | "ascii": (DownloadIncrementalBar, DownloadProgressSpinner), | ||
273 | "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), | ||
274 | "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) | ||
275 | } | ||
276 | |||
277 | |||
278 | def DownloadProgressProvider(progress_bar, max=None): | ||
279 | if max is None or max == 0: | ||
280 | return BAR_TYPES[progress_bar][1]().iter | ||
281 | else: | ||
282 | return BAR_TYPES[progress_bar][0](max=max).iter | ||
283 | |||
284 | |||
285 | ################################################################ | ||
286 | # Generic "something is happening" spinners | ||
287 | # | ||
288 | # We don't even try using progress.spinner.Spinner here because it's actually | ||
289 | # simpler to reimplement from scratch than to coerce their code into doing | ||
290 | # what we need. | ||
291 | ################################################################ | ||
292 | |||
293 | @contextlib.contextmanager | ||
294 | def hidden_cursor(file): | ||
295 | # The Windows terminal does not support the hide/show cursor ANSI codes, | ||
296 | # even via colorama. So don't even try. | ||
297 | if WINDOWS: | ||
298 | yield | ||
299 | # We don't want to clutter the output with control characters if we're | ||
300 | # writing to a file, or if the user is running with --quiet. | ||
301 | # See https://github.com/pypa/pip/issues/3418 | ||
302 | elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: | ||
303 | yield | ||
304 | else: | ||
305 | file.write(HIDE_CURSOR) | ||
306 | try: | ||
307 | yield | ||
308 | finally: | ||
309 | file.write(SHOW_CURSOR) | ||
310 | |||
311 | |||
312 | class RateLimiter(object): | ||
313 | def __init__(self, min_update_interval_seconds): | ||
314 | self._min_update_interval_seconds = min_update_interval_seconds | ||
315 | self._last_update = 0 | ||
316 | |||
317 | def ready(self): | ||
318 | now = time.time() | ||
319 | delta = now - self._last_update | ||
320 | return delta >= self._min_update_interval_seconds | ||
321 | |||
322 | def reset(self): | ||
323 | self._last_update = time.time() | ||
324 | |||
325 | |||
326 | class InteractiveSpinner(object): | ||
327 | def __init__(self, message, file=None, spin_chars="-\\|/", | ||
328 | # Empirically, 8 updates/second looks nice | ||
329 | min_update_interval_seconds=0.125): | ||
330 | self._message = message | ||
331 | if file is None: | ||
332 | file = sys.stdout | ||
333 | self._file = file | ||
334 | self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||
335 | self._finished = False | ||
336 | |||
337 | self._spin_cycle = itertools.cycle(spin_chars) | ||
338 | |||
339 | self._file.write(" " * get_indentation() + self._message + " ... ") | ||
340 | self._width = 0 | ||
341 | |||
342 | def _write(self, status): | ||
343 | assert not self._finished | ||
344 | # Erase what we wrote before by backspacing to the beginning, writing | ||
345 | # spaces to overwrite the old text, and then backspacing again | ||
346 | backup = "\b" * self._width | ||
347 | self._file.write(backup + " " * self._width + backup) | ||
348 | # Now we have a blank slate to add our status | ||
349 | self._file.write(status) | ||
350 | self._width = len(status) | ||
351 | self._file.flush() | ||
352 | self._rate_limiter.reset() | ||
353 | |||
354 | def spin(self): | ||
355 | if self._finished: | ||
356 | return | ||
357 | if not self._rate_limiter.ready(): | ||
358 | return | ||
359 | self._write(next(self._spin_cycle)) | ||
360 | |||
361 | def finish(self, final_status): | ||
362 | if self._finished: | ||
363 | return | ||
364 | self._write(final_status) | ||
365 | self._file.write("\n") | ||
366 | self._file.flush() | ||
367 | self._finished = True | ||
368 | |||
369 | |||
370 | # Used for dumb terminals, non-interactive installs (no tty), etc. | ||
371 | # We still print updates occasionally (once every 60 seconds by default) to | ||
372 | # act as a keep-alive for systems like Travis-CI that take lack-of-output as | ||
373 | # an indication that a task has frozen. | ||
374 | class NonInteractiveSpinner(object): | ||
375 | def __init__(self, message, min_update_interval_seconds=60): | ||
376 | self._message = message | ||
377 | self._finished = False | ||
378 | self._rate_limiter = RateLimiter(min_update_interval_seconds) | ||
379 | self._update("started") | ||
380 | |||
381 | def _update(self, status): | ||
382 | assert not self._finished | ||
383 | self._rate_limiter.reset() | ||
384 | logger.info("%s: %s", self._message, status) | ||
385 | |||
386 | def spin(self): | ||
387 | if self._finished: | ||
388 | return | ||
389 | if not self._rate_limiter.ready(): | ||
390 | return | ||
391 | self._update("still running...") | ||
392 | |||
393 | def finish(self, final_status): | ||
394 | if self._finished: | ||
395 | return | ||
396 | self._update("finished with status '%s'" % (final_status,)) | ||
397 | self._finished = True | ||
398 | |||
399 | |||
400 | @contextlib.contextmanager | ||
401 | def open_spinner(message): | ||
402 | # Interactive spinner goes directly to sys.stdout rather than being routed | ||
403 | # through the logging system, but it acts like it has level INFO, | ||
404 | # i.e. it's only displayed if we're at level INFO or better. | ||
405 | # Non-interactive spinner goes through the logging system, so it is always | ||
406 | # in sync with logging configuration. | ||
407 | if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: | ||
408 | spinner = InteractiveSpinner(message) | ||
409 | else: | ||
410 | spinner = NonInteractiveSpinner(message) | ||
411 | try: | ||
412 | with hidden_cursor(sys.stdout): | ||
413 | yield spinner | ||
414 | except KeyboardInterrupt: | ||
415 | spinner.finish("canceled") | ||
416 | raise | ||
417 | except Exception: | ||
418 | spinner.finish("error") | ||
419 | raise | ||
420 | else: | ||
421 | spinner.finish("done") | ||