diff options
author | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
---|---|---|
committer | Shubham Saini <shubham6405@gmail.com> | 2018-12-11 10:01:23 +0000 |
commit | 68df54d6629ec019142eb149dd037774f2d11e7c (patch) | |
tree | 345bc22d46b4e01a4ba8303b94278952a4ed2b9e /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests |
First commit
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests')
18 files changed, 4859 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py new file mode 100644 index 0000000..f9565cb --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__init__.py | |||
@@ -0,0 +1,123 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | # __ | ||
4 | # /__) _ _ _ _ _/ _ | ||
5 | # / ( (- (/ (/ (- _) / _) | ||
6 | # / | ||
7 | |||
8 | """ | ||
9 | Requests HTTP Library | ||
10 | ~~~~~~~~~~~~~~~~~~~~~ | ||
11 | |||
12 | Requests is an HTTP library, written in Python, for human beings. Basic GET | ||
13 | usage: | ||
14 | |||
15 | >>> import requests | ||
16 | >>> r = requests.get('https://www.python.org') | ||
17 | >>> r.status_code | ||
18 | 200 | ||
19 | >>> 'Python is a programming language' in r.content | ||
20 | True | ||
21 | |||
22 | ... or POST: | ||
23 | |||
24 | >>> payload = dict(key1='value1', key2='value2') | ||
25 | >>> r = requests.post('http://httpbin.org/post', data=payload) | ||
26 | >>> print(r.text) | ||
27 | { | ||
28 | ... | ||
29 | "form": { | ||
30 | "key2": "value2", | ||
31 | "key1": "value1" | ||
32 | }, | ||
33 | ... | ||
34 | } | ||
35 | |||
36 | The other HTTP methods are supported - see `requests.api`. Full documentation | ||
37 | is at <http://python-requests.org>. | ||
38 | |||
39 | :copyright: (c) 2017 by Kenneth Reitz. | ||
40 | :license: Apache 2.0, see LICENSE for more details. | ||
41 | """ | ||
42 | |||
43 | from pip._vendor import urllib3 | ||
44 | from pip._vendor import chardet | ||
45 | import warnings | ||
46 | from .exceptions import RequestsDependencyWarning | ||
47 | |||
48 | |||
49 | def check_compatibility(urllib3_version, chardet_version): | ||
50 | urllib3_version = urllib3_version.split('.') | ||
51 | assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. | ||
52 | |||
53 | # Sometimes, urllib3 only reports its version as 16.1. | ||
54 | if len(urllib3_version) == 2: | ||
55 | urllib3_version.append('0') | ||
56 | |||
57 | # Check urllib3 for compatibility. | ||
58 | major, minor, patch = urllib3_version # noqa: F811 | ||
59 | major, minor, patch = int(major), int(minor), int(patch) | ||
60 | # urllib3 >= 1.21.1, <= 1.22 | ||
61 | assert major == 1 | ||
62 | assert minor >= 21 | ||
63 | assert minor <= 22 | ||
64 | |||
65 | # Check chardet for compatibility. | ||
66 | major, minor, patch = chardet_version.split('.')[:3] | ||
67 | major, minor, patch = int(major), int(minor), int(patch) | ||
68 | # chardet >= 3.0.2, < 3.1.0 | ||
69 | assert major == 3 | ||
70 | assert minor < 1 | ||
71 | assert patch >= 2 | ||
72 | |||
73 | |||
74 | # Check imported dependencies for compatibility. | ||
75 | try: | ||
76 | check_compatibility(urllib3.__version__, chardet.__version__) | ||
77 | except (AssertionError, ValueError): | ||
78 | warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported " | ||
79 | "version!".format(urllib3.__version__, chardet.__version__), | ||
80 | RequestsDependencyWarning) | ||
81 | |||
82 | # Attempt to enable urllib3's SNI support, if possible | ||
83 | from pip._internal.compat import WINDOWS | ||
84 | if not WINDOWS: | ||
85 | try: | ||
86 | from pip._vendor.urllib3.contrib import pyopenssl | ||
87 | pyopenssl.inject_into_urllib3() | ||
88 | except ImportError: | ||
89 | pass | ||
90 | |||
91 | # urllib3's DependencyWarnings should be silenced. | ||
92 | from pip._vendor.urllib3.exceptions import DependencyWarning | ||
93 | warnings.simplefilter('ignore', DependencyWarning) | ||
94 | |||
95 | from .__version__ import __title__, __description__, __url__, __version__ | ||
96 | from .__version__ import __build__, __author__, __author_email__, __license__ | ||
97 | from .__version__ import __copyright__, __cake__ | ||
98 | |||
99 | from . import utils | ||
100 | from . import packages | ||
101 | from .models import Request, Response, PreparedRequest | ||
102 | from .api import request, get, head, post, patch, put, delete, options | ||
103 | from .sessions import session, Session | ||
104 | from .status_codes import codes | ||
105 | from .exceptions import ( | ||
106 | RequestException, Timeout, URLRequired, | ||
107 | TooManyRedirects, HTTPError, ConnectionError, | ||
108 | FileModeWarning, ConnectTimeout, ReadTimeout | ||
109 | ) | ||
110 | |||
111 | # Set default logging handler to avoid "No handler found" warnings. | ||
112 | import logging | ||
113 | try: # Python 2.7+ | ||
114 | from logging import NullHandler | ||
115 | except ImportError: | ||
116 | class NullHandler(logging.Handler): | ||
117 | def emit(self, record): | ||
118 | pass | ||
119 | |||
120 | logging.getLogger(__name__).addHandler(NullHandler()) | ||
121 | |||
122 | # FileModeWarnings go off per the default. | ||
123 | warnings.simplefilter('default', FileModeWarning, append=True) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py new file mode 100644 index 0000000..d380286 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/__version__.py | |||
@@ -0,0 +1,14 @@ | |||
1 | # .-. .-. .-. . . .-. .-. .-. .-. | ||
2 | # |( |- |.| | | |- `-. | `-. | ||
3 | # ' ' `-' `-`.`-' `-' `-' ' `-' | ||
4 | |||
5 | __title__ = 'requests' | ||
6 | __description__ = 'Python HTTP for Humans.' | ||
7 | __url__ = 'http://python-requests.org' | ||
8 | __version__ = '2.18.4' | ||
9 | __build__ = 0x021804 | ||
10 | __author__ = 'Kenneth Reitz' | ||
11 | __author_email__ = 'me@kennethreitz.org' | ||
12 | __license__ = 'Apache 2.0' | ||
13 | __copyright__ = 'Copyright 2017 Kenneth Reitz' | ||
14 | __cake__ = u'\u2728 \U0001f370 \u2728' | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py new file mode 100644 index 0000000..405b025 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/_internal_utils.py | |||
@@ -0,0 +1,42 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests._internal_utils | ||
5 | ~~~~~~~~~~~~~~ | ||
6 | |||
7 | Provides utility functions that are consumed internally by Requests | ||
8 | which depend on extremely few external helpers (such as compat) | ||
9 | """ | ||
10 | |||
11 | from .compat import is_py2, builtin_str, str | ||
12 | |||
13 | |||
14 | def to_native_string(string, encoding='ascii'): | ||
15 | """Given a string object, regardless of type, returns a representation of | ||
16 | that string in the native string type, encoding and decoding where | ||
17 | necessary. This assumes ASCII unless told otherwise. | ||
18 | """ | ||
19 | if isinstance(string, builtin_str): | ||
20 | out = string | ||
21 | else: | ||
22 | if is_py2: | ||
23 | out = string.encode(encoding) | ||
24 | else: | ||
25 | out = string.decode(encoding) | ||
26 | |||
27 | return out | ||
28 | |||
29 | |||
30 | def unicode_is_ascii(u_string): | ||
31 | """Determine if unicode string only contains ASCII characters. | ||
32 | |||
33 | :param str u_string: unicode string to check. Must be unicode | ||
34 | and not Python 2 `str`. | ||
35 | :rtype: bool | ||
36 | """ | ||
37 | assert isinstance(u_string, str) | ||
38 | try: | ||
39 | u_string.encode('ascii') | ||
40 | return True | ||
41 | except UnicodeEncodeError: | ||
42 | return False | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py new file mode 100644 index 0000000..c50585c --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/adapters.py | |||
@@ -0,0 +1,525 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.adapters | ||
5 | ~~~~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module contains the transport adapters that Requests uses to define | ||
8 | and maintain connections. | ||
9 | """ | ||
10 | |||
11 | import os.path | ||
12 | import socket | ||
13 | |||
14 | from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url | ||
15 | from pip._vendor.urllib3.response import HTTPResponse | ||
16 | from pip._vendor.urllib3.util import Timeout as TimeoutSauce | ||
17 | from pip._vendor.urllib3.util.retry import Retry | ||
18 | from pip._vendor.urllib3.exceptions import ClosedPoolError | ||
19 | from pip._vendor.urllib3.exceptions import ConnectTimeoutError | ||
20 | from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError | ||
21 | from pip._vendor.urllib3.exceptions import MaxRetryError | ||
22 | from pip._vendor.urllib3.exceptions import NewConnectionError | ||
23 | from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError | ||
24 | from pip._vendor.urllib3.exceptions import ProtocolError | ||
25 | from pip._vendor.urllib3.exceptions import ReadTimeoutError | ||
26 | from pip._vendor.urllib3.exceptions import SSLError as _SSLError | ||
27 | from pip._vendor.urllib3.exceptions import ResponseError | ||
28 | |||
29 | from .models import Response | ||
30 | from .compat import urlparse, basestring | ||
31 | from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, | ||
32 | prepend_scheme_if_needed, get_auth_from_url, urldefragauth, | ||
33 | select_proxy) | ||
34 | from .structures import CaseInsensitiveDict | ||
35 | from .cookies import extract_cookies_to_jar | ||
36 | from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, | ||
37 | ProxyError, RetryError, InvalidSchema) | ||
38 | from .auth import _basic_auth_str | ||
39 | |||
40 | try: | ||
41 | from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager | ||
42 | except ImportError: | ||
43 | def SOCKSProxyManager(*args, **kwargs): | ||
44 | raise InvalidSchema("Missing dependencies for SOCKS support.") | ||
45 | |||
46 | DEFAULT_POOLBLOCK = False | ||
47 | DEFAULT_POOLSIZE = 10 | ||
48 | DEFAULT_RETRIES = 0 | ||
49 | DEFAULT_POOL_TIMEOUT = None | ||
50 | |||
51 | |||
52 | class BaseAdapter(object): | ||
53 | """The Base Transport Adapter""" | ||
54 | |||
55 | def __init__(self): | ||
56 | super(BaseAdapter, self).__init__() | ||
57 | |||
58 | def send(self, request, stream=False, timeout=None, verify=True, | ||
59 | cert=None, proxies=None): | ||
60 | """Sends PreparedRequest object. Returns Response object. | ||
61 | |||
62 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
63 | :param stream: (optional) Whether to stream the request content. | ||
64 | :param timeout: (optional) How long to wait for the server to send | ||
65 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
66 | read timeout) <timeouts>` tuple. | ||
67 | :type timeout: float or tuple | ||
68 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
69 | the server's TLS certificate, or a string, in which case it must be a path | ||
70 | to a CA bundle to use | ||
71 | :param cert: (optional) Any user-provided SSL certificate to be trusted. | ||
72 | :param proxies: (optional) The proxies dictionary to apply to the request. | ||
73 | """ | ||
74 | raise NotImplementedError | ||
75 | |||
76 | def close(self): | ||
77 | """Cleans up adapter specific items.""" | ||
78 | raise NotImplementedError | ||
79 | |||
80 | |||
81 | class HTTPAdapter(BaseAdapter): | ||
82 | """The built-in HTTP Adapter for urllib3. | ||
83 | |||
84 | Provides a general-case interface for Requests sessions to contact HTTP and | ||
85 | HTTPS urls by implementing the Transport Adapter interface. This class will | ||
86 | usually be created by the :class:`Session <Session>` class under the | ||
87 | covers. | ||
88 | |||
89 | :param pool_connections: The number of urllib3 connection pools to cache. | ||
90 | :param pool_maxsize: The maximum number of connections to save in the pool. | ||
91 | :param max_retries: The maximum number of retries each connection | ||
92 | should attempt. Note, this applies only to failed DNS lookups, socket | ||
93 | connections and connection timeouts, never to requests where data has | ||
94 | made it to the server. By default, Requests does not retry failed | ||
95 | connections. If you need granular control over the conditions under | ||
96 | which we retry a request, import urllib3's ``Retry`` class and pass | ||
97 | that instead. | ||
98 | :param pool_block: Whether the connection pool should block for connections. | ||
99 | |||
100 | Usage:: | ||
101 | |||
102 | >>> import requests | ||
103 | >>> s = requests.Session() | ||
104 | >>> a = requests.adapters.HTTPAdapter(max_retries=3) | ||
105 | >>> s.mount('http://', a) | ||
106 | """ | ||
107 | __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', | ||
108 | '_pool_block'] | ||
109 | |||
110 | def __init__(self, pool_connections=DEFAULT_POOLSIZE, | ||
111 | pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, | ||
112 | pool_block=DEFAULT_POOLBLOCK): | ||
113 | if max_retries == DEFAULT_RETRIES: | ||
114 | self.max_retries = Retry(0, read=False) | ||
115 | else: | ||
116 | self.max_retries = Retry.from_int(max_retries) | ||
117 | self.config = {} | ||
118 | self.proxy_manager = {} | ||
119 | |||
120 | super(HTTPAdapter, self).__init__() | ||
121 | |||
122 | self._pool_connections = pool_connections | ||
123 | self._pool_maxsize = pool_maxsize | ||
124 | self._pool_block = pool_block | ||
125 | |||
126 | self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) | ||
127 | |||
128 | def __getstate__(self): | ||
129 | return dict((attr, getattr(self, attr, None)) for attr in | ||
130 | self.__attrs__) | ||
131 | |||
132 | def __setstate__(self, state): | ||
133 | # Can't handle by adding 'proxy_manager' to self.__attrs__ because | ||
134 | # self.poolmanager uses a lambda function, which isn't pickleable. | ||
135 | self.proxy_manager = {} | ||
136 | self.config = {} | ||
137 | |||
138 | for attr, value in state.items(): | ||
139 | setattr(self, attr, value) | ||
140 | |||
141 | self.init_poolmanager(self._pool_connections, self._pool_maxsize, | ||
142 | block=self._pool_block) | ||
143 | |||
144 | def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): | ||
145 | """Initializes a urllib3 PoolManager. | ||
146 | |||
147 | This method should not be called from user code, and is only | ||
148 | exposed for use when subclassing the | ||
149 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
150 | |||
151 | :param connections: The number of urllib3 connection pools to cache. | ||
152 | :param maxsize: The maximum number of connections to save in the pool. | ||
153 | :param block: Block when no free connections are available. | ||
154 | :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. | ||
155 | """ | ||
156 | # save these values for pickling | ||
157 | self._pool_connections = connections | ||
158 | self._pool_maxsize = maxsize | ||
159 | self._pool_block = block | ||
160 | |||
161 | self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, | ||
162 | block=block, strict=True, **pool_kwargs) | ||
163 | |||
164 | def proxy_manager_for(self, proxy, **proxy_kwargs): | ||
165 | """Return urllib3 ProxyManager for the given proxy. | ||
166 | |||
167 | This method should not be called from user code, and is only | ||
168 | exposed for use when subclassing the | ||
169 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
170 | |||
171 | :param proxy: The proxy to return a urllib3 ProxyManager for. | ||
172 | :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. | ||
173 | :returns: ProxyManager | ||
174 | :rtype: urllib3.ProxyManager | ||
175 | """ | ||
176 | if proxy in self.proxy_manager: | ||
177 | manager = self.proxy_manager[proxy] | ||
178 | elif proxy.lower().startswith('socks'): | ||
179 | username, password = get_auth_from_url(proxy) | ||
180 | manager = self.proxy_manager[proxy] = SOCKSProxyManager( | ||
181 | proxy, | ||
182 | username=username, | ||
183 | password=password, | ||
184 | num_pools=self._pool_connections, | ||
185 | maxsize=self._pool_maxsize, | ||
186 | block=self._pool_block, | ||
187 | **proxy_kwargs | ||
188 | ) | ||
189 | else: | ||
190 | proxy_headers = self.proxy_headers(proxy) | ||
191 | manager = self.proxy_manager[proxy] = proxy_from_url( | ||
192 | proxy, | ||
193 | proxy_headers=proxy_headers, | ||
194 | num_pools=self._pool_connections, | ||
195 | maxsize=self._pool_maxsize, | ||
196 | block=self._pool_block, | ||
197 | **proxy_kwargs) | ||
198 | |||
199 | return manager | ||
200 | |||
201 | def cert_verify(self, conn, url, verify, cert): | ||
202 | """Verify a SSL certificate. This method should not be called from user | ||
203 | code, and is only exposed for use when subclassing the | ||
204 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
205 | |||
206 | :param conn: The urllib3 connection object associated with the cert. | ||
207 | :param url: The requested URL. | ||
208 | :param verify: Either a boolean, in which case it controls whether we verify | ||
209 | the server's TLS certificate, or a string, in which case it must be a path | ||
210 | to a CA bundle to use | ||
211 | :param cert: The SSL certificate to verify. | ||
212 | """ | ||
213 | if url.lower().startswith('https') and verify: | ||
214 | |||
215 | cert_loc = None | ||
216 | |||
217 | # Allow self-specified cert location. | ||
218 | if verify is not True: | ||
219 | cert_loc = verify | ||
220 | |||
221 | if not cert_loc: | ||
222 | cert_loc = DEFAULT_CA_BUNDLE_PATH | ||
223 | |||
224 | if not cert_loc or not os.path.exists(cert_loc): | ||
225 | raise IOError("Could not find a suitable TLS CA certificate bundle, " | ||
226 | "invalid path: {0}".format(cert_loc)) | ||
227 | |||
228 | conn.cert_reqs = 'CERT_REQUIRED' | ||
229 | |||
230 | if not os.path.isdir(cert_loc): | ||
231 | conn.ca_certs = cert_loc | ||
232 | else: | ||
233 | conn.ca_cert_dir = cert_loc | ||
234 | else: | ||
235 | conn.cert_reqs = 'CERT_NONE' | ||
236 | conn.ca_certs = None | ||
237 | conn.ca_cert_dir = None | ||
238 | |||
239 | if cert: | ||
240 | if not isinstance(cert, basestring): | ||
241 | conn.cert_file = cert[0] | ||
242 | conn.key_file = cert[1] | ||
243 | else: | ||
244 | conn.cert_file = cert | ||
245 | conn.key_file = None | ||
246 | if conn.cert_file and not os.path.exists(conn.cert_file): | ||
247 | raise IOError("Could not find the TLS certificate file, " | ||
248 | "invalid path: {0}".format(conn.cert_file)) | ||
249 | if conn.key_file and not os.path.exists(conn.key_file): | ||
250 | raise IOError("Could not find the TLS key file, " | ||
251 | "invalid path: {0}".format(conn.key_file)) | ||
252 | |||
253 | def build_response(self, req, resp): | ||
254 | """Builds a :class:`Response <requests.Response>` object from a urllib3 | ||
255 | response. This should not be called from user code, and is only exposed | ||
256 | for use when subclassing the | ||
257 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` | ||
258 | |||
259 | :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. | ||
260 | :param resp: The urllib3 response object. | ||
261 | :rtype: requests.Response | ||
262 | """ | ||
263 | response = Response() | ||
264 | |||
265 | # Fallback to None if there's no status_code, for whatever reason. | ||
266 | response.status_code = getattr(resp, 'status', None) | ||
267 | |||
268 | # Make headers case-insensitive. | ||
269 | response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) | ||
270 | |||
271 | # Set encoding. | ||
272 | response.encoding = get_encoding_from_headers(response.headers) | ||
273 | response.raw = resp | ||
274 | response.reason = response.raw.reason | ||
275 | |||
276 | if isinstance(req.url, bytes): | ||
277 | response.url = req.url.decode('utf-8') | ||
278 | else: | ||
279 | response.url = req.url | ||
280 | |||
281 | # Add new cookies from the server. | ||
282 | extract_cookies_to_jar(response.cookies, req, resp) | ||
283 | |||
284 | # Give the Response some context. | ||
285 | response.request = req | ||
286 | response.connection = self | ||
287 | |||
288 | return response | ||
289 | |||
290 | def get_connection(self, url, proxies=None): | ||
291 | """Returns a urllib3 connection for the given URL. This should not be | ||
292 | called from user code, and is only exposed for use when subclassing the | ||
293 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
294 | |||
295 | :param url: The URL to connect to. | ||
296 | :param proxies: (optional) A Requests-style dictionary of proxies used on this request. | ||
297 | :rtype: urllib3.ConnectionPool | ||
298 | """ | ||
299 | proxy = select_proxy(url, proxies) | ||
300 | |||
301 | if proxy: | ||
302 | proxy = prepend_scheme_if_needed(proxy, 'http') | ||
303 | proxy_manager = self.proxy_manager_for(proxy) | ||
304 | conn = proxy_manager.connection_from_url(url) | ||
305 | else: | ||
306 | # Only scheme should be lower case | ||
307 | parsed = urlparse(url) | ||
308 | url = parsed.geturl() | ||
309 | conn = self.poolmanager.connection_from_url(url) | ||
310 | |||
311 | return conn | ||
312 | |||
313 | def close(self): | ||
314 | """Disposes of any internal state. | ||
315 | |||
316 | Currently, this closes the PoolManager and any active ProxyManager, | ||
317 | which closes any pooled connections. | ||
318 | """ | ||
319 | self.poolmanager.clear() | ||
320 | for proxy in self.proxy_manager.values(): | ||
321 | proxy.clear() | ||
322 | |||
323 | def request_url(self, request, proxies): | ||
324 | """Obtain the url to use when making the final request. | ||
325 | |||
326 | If the message is being sent through a HTTP proxy, the full URL has to | ||
327 | be used. Otherwise, we should only use the path portion of the URL. | ||
328 | |||
329 | This should not be called from user code, and is only exposed for use | ||
330 | when subclassing the | ||
331 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
332 | |||
333 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
334 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. | ||
335 | :rtype: str | ||
336 | """ | ||
337 | proxy = select_proxy(request.url, proxies) | ||
338 | scheme = urlparse(request.url).scheme | ||
339 | |||
340 | is_proxied_http_request = (proxy and scheme != 'https') | ||
341 | using_socks_proxy = False | ||
342 | if proxy: | ||
343 | proxy_scheme = urlparse(proxy).scheme.lower() | ||
344 | using_socks_proxy = proxy_scheme.startswith('socks') | ||
345 | |||
346 | url = request.path_url | ||
347 | if is_proxied_http_request and not using_socks_proxy: | ||
348 | url = urldefragauth(request.url) | ||
349 | |||
350 | return url | ||
351 | |||
352 | def add_headers(self, request, **kwargs): | ||
353 | """Add any headers needed by the connection. As of v2.0 this does | ||
354 | nothing by default, but is left for overriding by users that subclass | ||
355 | the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
356 | |||
357 | This should not be called from user code, and is only exposed for use | ||
358 | when subclassing the | ||
359 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
360 | |||
361 | :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. | ||
362 | :param kwargs: The keyword arguments from the call to send(). | ||
363 | """ | ||
364 | pass | ||
365 | |||
366 | def proxy_headers(self, proxy): | ||
367 | """Returns a dictionary of the headers to add to any request sent | ||
368 | through a proxy. This works with urllib3 magic to ensure that they are | ||
369 | correctly sent to the proxy, rather than in a tunnelled request if | ||
370 | CONNECT is being used. | ||
371 | |||
372 | This should not be called from user code, and is only exposed for use | ||
373 | when subclassing the | ||
374 | :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | ||
375 | |||
376 | :param proxies: The url of the proxy being used for this request. | ||
377 | :rtype: dict | ||
378 | """ | ||
379 | headers = {} | ||
380 | username, password = get_auth_from_url(proxy) | ||
381 | |||
382 | if username: | ||
383 | headers['Proxy-Authorization'] = _basic_auth_str(username, | ||
384 | password) | ||
385 | |||
386 | return headers | ||
387 | |||
388 | def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): | ||
389 | """Sends PreparedRequest object. Returns Response object. | ||
390 | |||
391 | :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. | ||
392 | :param stream: (optional) Whether to stream the request content. | ||
393 | :param timeout: (optional) How long to wait for the server to send | ||
394 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
395 | read timeout) <timeouts>` tuple. | ||
396 | :type timeout: float or tuple or urllib3 Timeout object | ||
397 | :param verify: (optional) Either a boolean, in which case it controls whether | ||
398 | we verify the server's TLS certificate, or a string, in which case it | ||
399 | must be a path to a CA bundle to use | ||
400 | :param cert: (optional) Any user-provided SSL certificate to be trusted. | ||
401 | :param proxies: (optional) The proxies dictionary to apply to the request. | ||
402 | :rtype: requests.Response | ||
403 | """ | ||
404 | |||
405 | conn = self.get_connection(request.url, proxies) | ||
406 | |||
407 | self.cert_verify(conn, request.url, verify, cert) | ||
408 | url = self.request_url(request, proxies) | ||
409 | self.add_headers(request) | ||
410 | |||
411 | chunked = not (request.body is None or 'Content-Length' in request.headers) | ||
412 | |||
413 | if isinstance(timeout, tuple): | ||
414 | try: | ||
415 | connect, read = timeout | ||
416 | timeout = TimeoutSauce(connect=connect, read=read) | ||
417 | except ValueError as e: | ||
418 | # this may raise a string formatting error. | ||
419 | err = ("Invalid timeout {0}. Pass a (connect, read) " | ||
420 | "timeout tuple, or a single float to set " | ||
421 | "both timeouts to the same value".format(timeout)) | ||
422 | raise ValueError(err) | ||
423 | elif isinstance(timeout, TimeoutSauce): | ||
424 | pass | ||
425 | else: | ||
426 | timeout = TimeoutSauce(connect=timeout, read=timeout) | ||
427 | |||
428 | try: | ||
429 | if not chunked: | ||
430 | resp = conn.urlopen( | ||
431 | method=request.method, | ||
432 | url=url, | ||
433 | body=request.body, | ||
434 | headers=request.headers, | ||
435 | redirect=False, | ||
436 | assert_same_host=False, | ||
437 | preload_content=False, | ||
438 | decode_content=False, | ||
439 | retries=self.max_retries, | ||
440 | timeout=timeout | ||
441 | ) | ||
442 | |||
443 | # Send the request. | ||
444 | else: | ||
445 | if hasattr(conn, 'proxy_pool'): | ||
446 | conn = conn.proxy_pool | ||
447 | |||
448 | low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) | ||
449 | |||
450 | try: | ||
451 | low_conn.putrequest(request.method, | ||
452 | url, | ||
453 | skip_accept_encoding=True) | ||
454 | |||
455 | for header, value in request.headers.items(): | ||
456 | low_conn.putheader(header, value) | ||
457 | |||
458 | low_conn.endheaders() | ||
459 | |||
460 | for i in request.body: | ||
461 | low_conn.send(hex(len(i))[2:].encode('utf-8')) | ||
462 | low_conn.send(b'\r\n') | ||
463 | low_conn.send(i) | ||
464 | low_conn.send(b'\r\n') | ||
465 | low_conn.send(b'0\r\n\r\n') | ||
466 | |||
467 | # Receive the response from the server | ||
468 | try: | ||
469 | # For Python 2.7+ versions, use buffering of HTTP | ||
470 | # responses | ||
471 | r = low_conn.getresponse(buffering=True) | ||
472 | except TypeError: | ||
473 | # For compatibility with Python 2.6 versions and back | ||
474 | r = low_conn.getresponse() | ||
475 | |||
476 | resp = HTTPResponse.from_httplib( | ||
477 | r, | ||
478 | pool=conn, | ||
479 | connection=low_conn, | ||
480 | preload_content=False, | ||
481 | decode_content=False | ||
482 | ) | ||
483 | except: | ||
484 | # If we hit any problems here, clean up the connection. | ||
485 | # Then, reraise so that we can handle the actual exception. | ||
486 | low_conn.close() | ||
487 | raise | ||
488 | |||
489 | except (ProtocolError, socket.error) as err: | ||
490 | raise ConnectionError(err, request=request) | ||
491 | |||
492 | except MaxRetryError as e: | ||
493 | if isinstance(e.reason, ConnectTimeoutError): | ||
494 | # TODO: Remove this in 3.0.0: see #2811 | ||
495 | if not isinstance(e.reason, NewConnectionError): | ||
496 | raise ConnectTimeout(e, request=request) | ||
497 | |||
498 | if isinstance(e.reason, ResponseError): | ||
499 | raise RetryError(e, request=request) | ||
500 | |||
501 | if isinstance(e.reason, _ProxyError): | ||
502 | raise ProxyError(e, request=request) | ||
503 | |||
504 | if isinstance(e.reason, _SSLError): | ||
505 | # This branch is for urllib3 v1.22 and later. | ||
506 | raise SSLError(e, request=request) | ||
507 | |||
508 | raise ConnectionError(e, request=request) | ||
509 | |||
510 | except ClosedPoolError as e: | ||
511 | raise ConnectionError(e, request=request) | ||
512 | |||
513 | except _ProxyError as e: | ||
514 | raise ProxyError(e) | ||
515 | |||
516 | except (_SSLError, _HTTPError) as e: | ||
517 | if isinstance(e, _SSLError): | ||
518 | # This branch is for urllib3 versions earlier than v1.22 | ||
519 | raise SSLError(e, request=request) | ||
520 | elif isinstance(e, ReadTimeoutError): | ||
521 | raise ReadTimeout(e, request=request) | ||
522 | else: | ||
523 | raise | ||
524 | |||
525 | return self.build_response(request, resp) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py new file mode 100644 index 0000000..f9ffabf --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/api.py | |||
@@ -0,0 +1,152 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.api | ||
5 | ~~~~~~~~~~~~ | ||
6 | |||
7 | This module implements the Requests API. | ||
8 | |||
9 | :copyright: (c) 2012 by Kenneth Reitz. | ||
10 | :license: Apache2, see LICENSE for more details. | ||
11 | """ | ||
12 | |||
13 | from . import sessions | ||
14 | |||
15 | |||
16 | def request(method, url, **kwargs): | ||
17 | """Constructs and sends a :class:`Request <Request>`. | ||
18 | |||
19 | :param method: method for the new :class:`Request` object. | ||
20 | :param url: URL for the new :class:`Request` object. | ||
21 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. | ||
22 | :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
23 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
24 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. | ||
25 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. | ||
26 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. | ||
27 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` | ||
28 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string | ||
29 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers | ||
30 | to add for the file. | ||
31 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. | ||
32 | :param timeout: (optional) How many seconds to wait for the server to send data | ||
33 | before giving up, as a float, or a :ref:`(connect timeout, read | ||
34 | timeout) <timeouts>` tuple. | ||
35 | :type timeout: float or tuple | ||
36 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. | ||
37 | :type allow_redirects: bool | ||
38 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. | ||
39 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
40 | the server's TLS certificate, or a string, in which case it must be a path | ||
41 | to a CA bundle to use. Defaults to ``True``. | ||
42 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. | ||
43 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. | ||
44 | :return: :class:`Response <Response>` object | ||
45 | :rtype: requests.Response | ||
46 | |||
47 | Usage:: | ||
48 | |||
49 | >>> import requests | ||
50 | >>> req = requests.request('GET', 'http://httpbin.org/get') | ||
51 | <Response [200]> | ||
52 | """ | ||
53 | |||
54 | # By using the 'with' statement we are sure the session is closed, thus we | ||
55 | # avoid leaving sockets open which can trigger a ResourceWarning in some | ||
56 | # cases, and look like a memory leak in others. | ||
57 | with sessions.Session() as session: | ||
58 | return session.request(method=method, url=url, **kwargs) | ||
59 | |||
60 | |||
61 | def get(url, params=None, **kwargs): | ||
62 | r"""Sends a GET request. | ||
63 | |||
64 | :param url: URL for the new :class:`Request` object. | ||
65 | :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. | ||
66 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
67 | :return: :class:`Response <Response>` object | ||
68 | :rtype: requests.Response | ||
69 | """ | ||
70 | |||
71 | kwargs.setdefault('allow_redirects', True) | ||
72 | return request('get', url, params=params, **kwargs) | ||
73 | |||
74 | |||
75 | def options(url, **kwargs): | ||
76 | r"""Sends an OPTIONS request. | ||
77 | |||
78 | :param url: URL for the new :class:`Request` object. | ||
79 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
80 | :return: :class:`Response <Response>` object | ||
81 | :rtype: requests.Response | ||
82 | """ | ||
83 | |||
84 | kwargs.setdefault('allow_redirects', True) | ||
85 | return request('options', url, **kwargs) | ||
86 | |||
87 | |||
88 | def head(url, **kwargs): | ||
89 | r"""Sends a HEAD request. | ||
90 | |||
91 | :param url: URL for the new :class:`Request` object. | ||
92 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
93 | :return: :class:`Response <Response>` object | ||
94 | :rtype: requests.Response | ||
95 | """ | ||
96 | |||
97 | kwargs.setdefault('allow_redirects', False) | ||
98 | return request('head', url, **kwargs) | ||
99 | |||
100 | |||
101 | def post(url, data=None, json=None, **kwargs): | ||
102 | r"""Sends a POST request. | ||
103 | |||
104 | :param url: URL for the new :class:`Request` object. | ||
105 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
106 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
107 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
108 | :return: :class:`Response <Response>` object | ||
109 | :rtype: requests.Response | ||
110 | """ | ||
111 | |||
112 | return request('post', url, data=data, json=json, **kwargs) | ||
113 | |||
114 | |||
115 | def put(url, data=None, **kwargs): | ||
116 | r"""Sends a PUT request. | ||
117 | |||
118 | :param url: URL for the new :class:`Request` object. | ||
119 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
120 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
121 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
122 | :return: :class:`Response <Response>` object | ||
123 | :rtype: requests.Response | ||
124 | """ | ||
125 | |||
126 | return request('put', url, data=data, **kwargs) | ||
127 | |||
128 | |||
129 | def patch(url, data=None, **kwargs): | ||
130 | r"""Sends a PATCH request. | ||
131 | |||
132 | :param url: URL for the new :class:`Request` object. | ||
133 | :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. | ||
134 | :param json: (optional) json data to send in the body of the :class:`Request`. | ||
135 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
136 | :return: :class:`Response <Response>` object | ||
137 | :rtype: requests.Response | ||
138 | """ | ||
139 | |||
140 | return request('patch', url, data=data, **kwargs) | ||
141 | |||
142 | |||
143 | def delete(url, **kwargs): | ||
144 | r"""Sends a DELETE request. | ||
145 | |||
146 | :param url: URL for the new :class:`Request` object. | ||
147 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
148 | :return: :class:`Response <Response>` object | ||
149 | :rtype: requests.Response | ||
150 | """ | ||
151 | |||
152 | return request('delete', url, **kwargs) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py new file mode 100644 index 0000000..73e4534 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/auth.py | |||
@@ -0,0 +1,293 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.auth | ||
5 | ~~~~~~~~~~~~~ | ||
6 | |||
7 | This module contains the authentication handlers for Requests. | ||
8 | """ | ||
9 | |||
10 | import os | ||
11 | import re | ||
12 | import time | ||
13 | import hashlib | ||
14 | import threading | ||
15 | import warnings | ||
16 | |||
17 | from base64 import b64encode | ||
18 | |||
19 | from .compat import urlparse, str, basestring | ||
20 | from .cookies import extract_cookies_to_jar | ||
21 | from ._internal_utils import to_native_string | ||
22 | from .utils import parse_dict_header | ||
23 | |||
24 | CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' | ||
25 | CONTENT_TYPE_MULTI_PART = 'multipart/form-data' | ||
26 | |||
27 | |||
28 | def _basic_auth_str(username, password): | ||
29 | """Returns a Basic Auth string.""" | ||
30 | |||
31 | # "I want us to put a big-ol' comment on top of it that | ||
32 | # says that this behaviour is dumb but we need to preserve | ||
33 | # it because people are relying on it." | ||
34 | # - Lukasa | ||
35 | # | ||
36 | # These are here solely to maintain backwards compatibility | ||
37 | # for things like ints. This will be removed in 3.0.0. | ||
38 | if not isinstance(username, basestring): | ||
39 | warnings.warn( | ||
40 | "Non-string usernames will no longer be supported in Requests " | ||
41 | "3.0.0. Please convert the object you've passed in ({0!r}) to " | ||
42 | "a string or bytes object in the near future to avoid " | ||
43 | "problems.".format(username), | ||
44 | category=DeprecationWarning, | ||
45 | ) | ||
46 | username = str(username) | ||
47 | |||
48 | if not isinstance(password, basestring): | ||
49 | warnings.warn( | ||
50 | "Non-string passwords will no longer be supported in Requests " | ||
51 | "3.0.0. Please convert the object you've passed in ({0!r}) to " | ||
52 | "a string or bytes object in the near future to avoid " | ||
53 | "problems.".format(password), | ||
54 | category=DeprecationWarning, | ||
55 | ) | ||
56 | password = str(password) | ||
57 | # -- End Removal -- | ||
58 | |||
59 | if isinstance(username, str): | ||
60 | username = username.encode('latin1') | ||
61 | |||
62 | if isinstance(password, str): | ||
63 | password = password.encode('latin1') | ||
64 | |||
65 | authstr = 'Basic ' + to_native_string( | ||
66 | b64encode(b':'.join((username, password))).strip() | ||
67 | ) | ||
68 | |||
69 | return authstr | ||
70 | |||
71 | |||
72 | class AuthBase(object): | ||
73 | """Base class that all auth implementations derive from""" | ||
74 | |||
75 | def __call__(self, r): | ||
76 | raise NotImplementedError('Auth hooks must be callable.') | ||
77 | |||
78 | |||
79 | class HTTPBasicAuth(AuthBase): | ||
80 | """Attaches HTTP Basic Authentication to the given Request object.""" | ||
81 | |||
82 | def __init__(self, username, password): | ||
83 | self.username = username | ||
84 | self.password = password | ||
85 | |||
86 | def __eq__(self, other): | ||
87 | return all([ | ||
88 | self.username == getattr(other, 'username', None), | ||
89 | self.password == getattr(other, 'password', None) | ||
90 | ]) | ||
91 | |||
92 | def __ne__(self, other): | ||
93 | return not self == other | ||
94 | |||
95 | def __call__(self, r): | ||
96 | r.headers['Authorization'] = _basic_auth_str(self.username, self.password) | ||
97 | return r | ||
98 | |||
99 | |||
100 | class HTTPProxyAuth(HTTPBasicAuth): | ||
101 | """Attaches HTTP Proxy Authentication to a given Request object.""" | ||
102 | |||
103 | def __call__(self, r): | ||
104 | r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) | ||
105 | return r | ||
106 | |||
107 | |||
108 | class HTTPDigestAuth(AuthBase): | ||
109 | """Attaches HTTP Digest Authentication to the given Request object.""" | ||
110 | |||
111 | def __init__(self, username, password): | ||
112 | self.username = username | ||
113 | self.password = password | ||
114 | # Keep state in per-thread local storage | ||
115 | self._thread_local = threading.local() | ||
116 | |||
117 | def init_per_thread_state(self): | ||
118 | # Ensure state is initialized just once per-thread | ||
119 | if not hasattr(self._thread_local, 'init'): | ||
120 | self._thread_local.init = True | ||
121 | self._thread_local.last_nonce = '' | ||
122 | self._thread_local.nonce_count = 0 | ||
123 | self._thread_local.chal = {} | ||
124 | self._thread_local.pos = None | ||
125 | self._thread_local.num_401_calls = None | ||
126 | |||
127 | def build_digest_header(self, method, url): | ||
128 | """ | ||
129 | :rtype: str | ||
130 | """ | ||
131 | |||
132 | realm = self._thread_local.chal['realm'] | ||
133 | nonce = self._thread_local.chal['nonce'] | ||
134 | qop = self._thread_local.chal.get('qop') | ||
135 | algorithm = self._thread_local.chal.get('algorithm') | ||
136 | opaque = self._thread_local.chal.get('opaque') | ||
137 | hash_utf8 = None | ||
138 | |||
139 | if algorithm is None: | ||
140 | _algorithm = 'MD5' | ||
141 | else: | ||
142 | _algorithm = algorithm.upper() | ||
143 | # lambdas assume digest modules are imported at the top level | ||
144 | if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': | ||
145 | def md5_utf8(x): | ||
146 | if isinstance(x, str): | ||
147 | x = x.encode('utf-8') | ||
148 | return hashlib.md5(x).hexdigest() | ||
149 | hash_utf8 = md5_utf8 | ||
150 | elif _algorithm == 'SHA': | ||
151 | def sha_utf8(x): | ||
152 | if isinstance(x, str): | ||
153 | x = x.encode('utf-8') | ||
154 | return hashlib.sha1(x).hexdigest() | ||
155 | hash_utf8 = sha_utf8 | ||
156 | |||
157 | KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) | ||
158 | |||
159 | if hash_utf8 is None: | ||
160 | return None | ||
161 | |||
162 | # XXX not implemented yet | ||
163 | entdig = None | ||
164 | p_parsed = urlparse(url) | ||
165 | #: path is request-uri defined in RFC 2616 which should not be empty | ||
166 | path = p_parsed.path or "/" | ||
167 | if p_parsed.query: | ||
168 | path += '?' + p_parsed.query | ||
169 | |||
170 | A1 = '%s:%s:%s' % (self.username, realm, self.password) | ||
171 | A2 = '%s:%s' % (method, path) | ||
172 | |||
173 | HA1 = hash_utf8(A1) | ||
174 | HA2 = hash_utf8(A2) | ||
175 | |||
176 | if nonce == self._thread_local.last_nonce: | ||
177 | self._thread_local.nonce_count += 1 | ||
178 | else: | ||
179 | self._thread_local.nonce_count = 1 | ||
180 | ncvalue = '%08x' % self._thread_local.nonce_count | ||
181 | s = str(self._thread_local.nonce_count).encode('utf-8') | ||
182 | s += nonce.encode('utf-8') | ||
183 | s += time.ctime().encode('utf-8') | ||
184 | s += os.urandom(8) | ||
185 | |||
186 | cnonce = (hashlib.sha1(s).hexdigest()[:16]) | ||
187 | if _algorithm == 'MD5-SESS': | ||
188 | HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) | ||
189 | |||
190 | if not qop: | ||
191 | respdig = KD(HA1, "%s:%s" % (nonce, HA2)) | ||
192 | elif qop == 'auth' or 'auth' in qop.split(','): | ||
193 | noncebit = "%s:%s:%s:%s:%s" % ( | ||
194 | nonce, ncvalue, cnonce, 'auth', HA2 | ||
195 | ) | ||
196 | respdig = KD(HA1, noncebit) | ||
197 | else: | ||
198 | # XXX handle auth-int. | ||
199 | return None | ||
200 | |||
201 | self._thread_local.last_nonce = nonce | ||
202 | |||
203 | # XXX should the partial digests be encoded too? | ||
204 | base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ | ||
205 | 'response="%s"' % (self.username, realm, nonce, path, respdig) | ||
206 | if opaque: | ||
207 | base += ', opaque="%s"' % opaque | ||
208 | if algorithm: | ||
209 | base += ', algorithm="%s"' % algorithm | ||
210 | if entdig: | ||
211 | base += ', digest="%s"' % entdig | ||
212 | if qop: | ||
213 | base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) | ||
214 | |||
215 | return 'Digest %s' % (base) | ||
216 | |||
217 | def handle_redirect(self, r, **kwargs): | ||
218 | """Reset num_401_calls counter on redirects.""" | ||
219 | if r.is_redirect: | ||
220 | self._thread_local.num_401_calls = 1 | ||
221 | |||
222 | def handle_401(self, r, **kwargs): | ||
223 | """ | ||
224 | Takes the given response and tries digest-auth, if needed. | ||
225 | |||
226 | :rtype: requests.Response | ||
227 | """ | ||
228 | |||
229 | # If response is not 4xx, do not auth | ||
230 | # See https://github.com/requests/requests/issues/3772 | ||
231 | if not 400 <= r.status_code < 500: | ||
232 | self._thread_local.num_401_calls = 1 | ||
233 | return r | ||
234 | |||
235 | if self._thread_local.pos is not None: | ||
236 | # Rewind the file position indicator of the body to where | ||
237 | # it was to resend the request. | ||
238 | r.request.body.seek(self._thread_local.pos) | ||
239 | s_auth = r.headers.get('www-authenticate', '') | ||
240 | |||
241 | if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: | ||
242 | |||
243 | self._thread_local.num_401_calls += 1 | ||
244 | pat = re.compile(r'digest ', flags=re.IGNORECASE) | ||
245 | self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) | ||
246 | |||
247 | # Consume content and release the original connection | ||
248 | # to allow our new request to reuse the same one. | ||
249 | r.content | ||
250 | r.close() | ||
251 | prep = r.request.copy() | ||
252 | extract_cookies_to_jar(prep._cookies, r.request, r.raw) | ||
253 | prep.prepare_cookies(prep._cookies) | ||
254 | |||
255 | prep.headers['Authorization'] = self.build_digest_header( | ||
256 | prep.method, prep.url) | ||
257 | _r = r.connection.send(prep, **kwargs) | ||
258 | _r.history.append(r) | ||
259 | _r.request = prep | ||
260 | |||
261 | return _r | ||
262 | |||
263 | self._thread_local.num_401_calls = 1 | ||
264 | return r | ||
265 | |||
266 | def __call__(self, r): | ||
267 | # Initialize per-thread state, if needed | ||
268 | self.init_per_thread_state() | ||
269 | # If we have a saved nonce, skip the 401 | ||
270 | if self._thread_local.last_nonce: | ||
271 | r.headers['Authorization'] = self.build_digest_header(r.method, r.url) | ||
272 | try: | ||
273 | self._thread_local.pos = r.body.tell() | ||
274 | except AttributeError: | ||
275 | # In the case of HTTPDigestAuth being reused and the body of | ||
276 | # the previous request was a file-like object, pos has the | ||
277 | # file position of the previous body. Ensure it's set to | ||
278 | # None. | ||
279 | self._thread_local.pos = None | ||
280 | r.register_hook('response', self.handle_401) | ||
281 | r.register_hook('response', self.handle_redirect) | ||
282 | self._thread_local.num_401_calls = 1 | ||
283 | |||
284 | return r | ||
285 | |||
286 | def __eq__(self, other): | ||
287 | return all([ | ||
288 | self.username == getattr(other, 'username', None), | ||
289 | self.password == getattr(other, 'password', None) | ||
290 | ]) | ||
291 | |||
292 | def __ne__(self, other): | ||
293 | return not self == other | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py new file mode 100644 index 0000000..9742f6e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/certs.py | |||
@@ -0,0 +1,18 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # -*- coding: utf-8 -*- | ||
3 | |||
4 | """ | ||
5 | requests.certs | ||
6 | ~~~~~~~~~~~~~~ | ||
7 | |||
8 | This module returns the preferred default CA certificate bundle. There is | ||
9 | only one — the one from the certifi package. | ||
10 | |||
11 | If you are packaging Requests, e.g., for a Linux distribution or a managed | ||
12 | environment, you can change the definition of where() to return a separately | ||
13 | packaged CA bundle. | ||
14 | """ | ||
15 | from pip._vendor.certifi import where | ||
16 | |||
17 | if __name__ == '__main__': | ||
18 | print(where()) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py new file mode 100644 index 0000000..4cea25e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/compat.py | |||
@@ -0,0 +1,73 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.compat | ||
5 | ~~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module handles import compatibility issues between Python 2 and | ||
8 | Python 3. | ||
9 | """ | ||
10 | |||
11 | from pip._vendor import chardet | ||
12 | |||
13 | import sys | ||
14 | |||
15 | # ------- | ||
16 | # Pythons | ||
17 | # ------- | ||
18 | |||
19 | # Syntax sugar. | ||
20 | _ver = sys.version_info | ||
21 | |||
22 | #: Python 2.x? | ||
23 | is_py2 = (_ver[0] == 2) | ||
24 | |||
25 | #: Python 3.x? | ||
26 | is_py3 = (_ver[0] == 3) | ||
27 | |||
28 | # Note: We've patched out simplejson support in pip because it prevents | ||
29 | # upgrading simplejson on Windows. | ||
30 | # try: | ||
31 | # import simplejson as json | ||
32 | # except (ImportError, SyntaxError): | ||
33 | # # simplejson does not support Python 3.2, it throws a SyntaxError | ||
34 | # # because of u'...' Unicode literals. | ||
35 | import json | ||
36 | |||
37 | # --------- | ||
38 | # Specifics | ||
39 | # --------- | ||
40 | |||
41 | if is_py2: | ||
42 | from urllib import ( | ||
43 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, | ||
44 | proxy_bypass, proxy_bypass_environment, getproxies_environment) | ||
45 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag | ||
46 | from urllib2 import parse_http_list | ||
47 | import cookielib | ||
48 | from Cookie import Morsel | ||
49 | from StringIO import StringIO | ||
50 | |||
51 | from pip._vendor.urllib3.packages.ordered_dict import OrderedDict | ||
52 | |||
53 | builtin_str = str | ||
54 | bytes = str | ||
55 | str = unicode | ||
56 | basestring = basestring | ||
57 | numeric_types = (int, long, float) | ||
58 | integer_types = (int, long) | ||
59 | |||
60 | elif is_py3: | ||
61 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag | ||
62 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment | ||
63 | from http import cookiejar as cookielib | ||
64 | from http.cookies import Morsel | ||
65 | from io import StringIO | ||
66 | from collections import OrderedDict | ||
67 | |||
68 | builtin_str = str | ||
69 | str = str | ||
70 | bytes = bytes | ||
71 | basestring = (str, bytes) | ||
72 | numeric_types = (int, float) | ||
73 | integer_types = (int,) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py new file mode 100644 index 0000000..e69d22e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/cookies.py | |||
@@ -0,0 +1,542 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.cookies | ||
5 | ~~~~~~~~~~~~~~~~ | ||
6 | |||
7 | Compatibility code to be able to use `cookielib.CookieJar` with requests. | ||
8 | |||
9 | requests.utils imports from here, so be careful with imports. | ||
10 | """ | ||
11 | |||
12 | import copy | ||
13 | import time | ||
14 | import calendar | ||
15 | import collections | ||
16 | |||
17 | from ._internal_utils import to_native_string | ||
18 | from .compat import cookielib, urlparse, urlunparse, Morsel | ||
19 | |||
20 | try: | ||
21 | import threading | ||
22 | except ImportError: | ||
23 | import dummy_threading as threading | ||
24 | |||
25 | |||
26 | class MockRequest(object): | ||
27 | """Wraps a `requests.Request` to mimic a `urllib2.Request`. | ||
28 | |||
29 | The code in `cookielib.CookieJar` expects this interface in order to correctly | ||
30 | manage cookie policies, i.e., determine whether a cookie can be set, given the | ||
31 | domains of the request and the cookie. | ||
32 | |||
33 | The original request object is read-only. The client is responsible for collecting | ||
34 | the new headers via `get_new_headers()` and interpreting them appropriately. You | ||
35 | probably want `get_cookie_header`, defined below. | ||
36 | """ | ||
37 | |||
38 | def __init__(self, request): | ||
39 | self._r = request | ||
40 | self._new_headers = {} | ||
41 | self.type = urlparse(self._r.url).scheme | ||
42 | |||
43 | def get_type(self): | ||
44 | return self.type | ||
45 | |||
46 | def get_host(self): | ||
47 | return urlparse(self._r.url).netloc | ||
48 | |||
49 | def get_origin_req_host(self): | ||
50 | return self.get_host() | ||
51 | |||
52 | def get_full_url(self): | ||
53 | # Only return the response's URL if the user hadn't set the Host | ||
54 | # header | ||
55 | if not self._r.headers.get('Host'): | ||
56 | return self._r.url | ||
57 | # If they did set it, retrieve it and reconstruct the expected domain | ||
58 | host = to_native_string(self._r.headers['Host'], encoding='utf-8') | ||
59 | parsed = urlparse(self._r.url) | ||
60 | # Reconstruct the URL as we expect it | ||
61 | return urlunparse([ | ||
62 | parsed.scheme, host, parsed.path, parsed.params, parsed.query, | ||
63 | parsed.fragment | ||
64 | ]) | ||
65 | |||
66 | def is_unverifiable(self): | ||
67 | return True | ||
68 | |||
69 | def has_header(self, name): | ||
70 | return name in self._r.headers or name in self._new_headers | ||
71 | |||
72 | def get_header(self, name, default=None): | ||
73 | return self._r.headers.get(name, self._new_headers.get(name, default)) | ||
74 | |||
75 | def add_header(self, key, val): | ||
76 | """cookielib has no legitimate use for this method; add it back if you find one.""" | ||
77 | raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") | ||
78 | |||
79 | def add_unredirected_header(self, name, value): | ||
80 | self._new_headers[name] = value | ||
81 | |||
82 | def get_new_headers(self): | ||
83 | return self._new_headers | ||
84 | |||
85 | @property | ||
86 | def unverifiable(self): | ||
87 | return self.is_unverifiable() | ||
88 | |||
89 | @property | ||
90 | def origin_req_host(self): | ||
91 | return self.get_origin_req_host() | ||
92 | |||
93 | @property | ||
94 | def host(self): | ||
95 | return self.get_host() | ||
96 | |||
97 | |||
98 | class MockResponse(object): | ||
99 | """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. | ||
100 | |||
101 | ...what? Basically, expose the parsed HTTP headers from the server response | ||
102 | the way `cookielib` expects to see them. | ||
103 | """ | ||
104 | |||
105 | def __init__(self, headers): | ||
106 | """Make a MockResponse for `cookielib` to read. | ||
107 | |||
108 | :param headers: a httplib.HTTPMessage or analogous carrying the headers | ||
109 | """ | ||
110 | self._headers = headers | ||
111 | |||
112 | def info(self): | ||
113 | return self._headers | ||
114 | |||
115 | def getheaders(self, name): | ||
116 | self._headers.getheaders(name) | ||
117 | |||
118 | |||
119 | def extract_cookies_to_jar(jar, request, response): | ||
120 | """Extract the cookies from the response into a CookieJar. | ||
121 | |||
122 | :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) | ||
123 | :param request: our own requests.Request object | ||
124 | :param response: urllib3.HTTPResponse object | ||
125 | """ | ||
126 | if not (hasattr(response, '_original_response') and | ||
127 | response._original_response): | ||
128 | return | ||
129 | # the _original_response field is the wrapped httplib.HTTPResponse object, | ||
130 | req = MockRequest(request) | ||
131 | # pull out the HTTPMessage with the headers and put it in the mock: | ||
132 | res = MockResponse(response._original_response.msg) | ||
133 | jar.extract_cookies(res, req) | ||
134 | |||
135 | |||
136 | def get_cookie_header(jar, request): | ||
137 | """ | ||
138 | Produce an appropriate Cookie header string to be sent with `request`, or None. | ||
139 | |||
140 | :rtype: str | ||
141 | """ | ||
142 | r = MockRequest(request) | ||
143 | jar.add_cookie_header(r) | ||
144 | return r.get_new_headers().get('Cookie') | ||
145 | |||
146 | |||
147 | def remove_cookie_by_name(cookiejar, name, domain=None, path=None): | ||
148 | """Unsets a cookie by name, by default over all domains and paths. | ||
149 | |||
150 | Wraps CookieJar.clear(), is O(n). | ||
151 | """ | ||
152 | clearables = [] | ||
153 | for cookie in cookiejar: | ||
154 | if cookie.name != name: | ||
155 | continue | ||
156 | if domain is not None and domain != cookie.domain: | ||
157 | continue | ||
158 | if path is not None and path != cookie.path: | ||
159 | continue | ||
160 | clearables.append((cookie.domain, cookie.path, cookie.name)) | ||
161 | |||
162 | for domain, path, name in clearables: | ||
163 | cookiejar.clear(domain, path, name) | ||
164 | |||
165 | |||
166 | class CookieConflictError(RuntimeError): | ||
167 | """There are two cookies that meet the criteria specified in the cookie jar. | ||
168 | Use .get and .set and include domain and path args in order to be more specific. | ||
169 | """ | ||
170 | |||
171 | |||
172 | class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): | ||
173 | """Compatibility class; is a cookielib.CookieJar, but exposes a dict | ||
174 | interface. | ||
175 | |||
176 | This is the CookieJar we create by default for requests and sessions that | ||
177 | don't specify one, since some clients may expect response.cookies and | ||
178 | session.cookies to support dict operations. | ||
179 | |||
180 | Requests does not use the dict interface internally; it's just for | ||
181 | compatibility with external client code. All requests code should work | ||
182 | out of the box with externally provided instances of ``CookieJar``, e.g. | ||
183 | ``LWPCookieJar`` and ``FileCookieJar``. | ||
184 | |||
185 | Unlike a regular CookieJar, this class is pickleable. | ||
186 | |||
187 | .. warning:: dictionary operations that are normally O(1) may be O(n). | ||
188 | """ | ||
189 | |||
190 | def get(self, name, default=None, domain=None, path=None): | ||
191 | """Dict-like get() that also supports optional domain and path args in | ||
192 | order to resolve naming collisions from using one cookie jar over | ||
193 | multiple domains. | ||
194 | |||
195 | .. warning:: operation is O(n), not O(1). | ||
196 | """ | ||
197 | try: | ||
198 | return self._find_no_duplicates(name, domain, path) | ||
199 | except KeyError: | ||
200 | return default | ||
201 | |||
202 | def set(self, name, value, **kwargs): | ||
203 | """Dict-like set() that also supports optional domain and path args in | ||
204 | order to resolve naming collisions from using one cookie jar over | ||
205 | multiple domains. | ||
206 | """ | ||
207 | # support client code that unsets cookies by assignment of a None value: | ||
208 | if value is None: | ||
209 | remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) | ||
210 | return | ||
211 | |||
212 | if isinstance(value, Morsel): | ||
213 | c = morsel_to_cookie(value) | ||
214 | else: | ||
215 | c = create_cookie(name, value, **kwargs) | ||
216 | self.set_cookie(c) | ||
217 | return c | ||
218 | |||
219 | def iterkeys(self): | ||
220 | """Dict-like iterkeys() that returns an iterator of names of cookies | ||
221 | from the jar. | ||
222 | |||
223 | .. seealso:: itervalues() and iteritems(). | ||
224 | """ | ||
225 | for cookie in iter(self): | ||
226 | yield cookie.name | ||
227 | |||
228 | def keys(self): | ||
229 | """Dict-like keys() that returns a list of names of cookies from the | ||
230 | jar. | ||
231 | |||
232 | .. seealso:: values() and items(). | ||
233 | """ | ||
234 | return list(self.iterkeys()) | ||
235 | |||
236 | def itervalues(self): | ||
237 | """Dict-like itervalues() that returns an iterator of values of cookies | ||
238 | from the jar. | ||
239 | |||
240 | .. seealso:: iterkeys() and iteritems(). | ||
241 | """ | ||
242 | for cookie in iter(self): | ||
243 | yield cookie.value | ||
244 | |||
245 | def values(self): | ||
246 | """Dict-like values() that returns a list of values of cookies from the | ||
247 | jar. | ||
248 | |||
249 | .. seealso:: keys() and items(). | ||
250 | """ | ||
251 | return list(self.itervalues()) | ||
252 | |||
253 | def iteritems(self): | ||
254 | """Dict-like iteritems() that returns an iterator of name-value tuples | ||
255 | from the jar. | ||
256 | |||
257 | .. seealso:: iterkeys() and itervalues(). | ||
258 | """ | ||
259 | for cookie in iter(self): | ||
260 | yield cookie.name, cookie.value | ||
261 | |||
262 | def items(self): | ||
263 | """Dict-like items() that returns a list of name-value tuples from the | ||
264 | jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a | ||
265 | vanilla python dict of key value pairs. | ||
266 | |||
267 | .. seealso:: keys() and values(). | ||
268 | """ | ||
269 | return list(self.iteritems()) | ||
270 | |||
271 | def list_domains(self): | ||
272 | """Utility method to list all the domains in the jar.""" | ||
273 | domains = [] | ||
274 | for cookie in iter(self): | ||
275 | if cookie.domain not in domains: | ||
276 | domains.append(cookie.domain) | ||
277 | return domains | ||
278 | |||
279 | def list_paths(self): | ||
280 | """Utility method to list all the paths in the jar.""" | ||
281 | paths = [] | ||
282 | for cookie in iter(self): | ||
283 | if cookie.path not in paths: | ||
284 | paths.append(cookie.path) | ||
285 | return paths | ||
286 | |||
287 | def multiple_domains(self): | ||
288 | """Returns True if there are multiple domains in the jar. | ||
289 | Returns False otherwise. | ||
290 | |||
291 | :rtype: bool | ||
292 | """ | ||
293 | domains = [] | ||
294 | for cookie in iter(self): | ||
295 | if cookie.domain is not None and cookie.domain in domains: | ||
296 | return True | ||
297 | domains.append(cookie.domain) | ||
298 | return False # there is only one domain in jar | ||
299 | |||
300 | def get_dict(self, domain=None, path=None): | ||
301 | """Takes as an argument an optional domain and path and returns a plain | ||
302 | old Python dict of name-value pairs of cookies that meet the | ||
303 | requirements. | ||
304 | |||
305 | :rtype: dict | ||
306 | """ | ||
307 | dictionary = {} | ||
308 | for cookie in iter(self): | ||
309 | if ( | ||
310 | (domain is None or cookie.domain == domain) and | ||
311 | (path is None or cookie.path == path) | ||
312 | ): | ||
313 | dictionary[cookie.name] = cookie.value | ||
314 | return dictionary | ||
315 | |||
316 | def __contains__(self, name): | ||
317 | try: | ||
318 | return super(RequestsCookieJar, self).__contains__(name) | ||
319 | except CookieConflictError: | ||
320 | return True | ||
321 | |||
322 | def __getitem__(self, name): | ||
323 | """Dict-like __getitem__() for compatibility with client code. Throws | ||
324 | exception if there are more than one cookie with name. In that case, | ||
325 | use the more explicit get() method instead. | ||
326 | |||
327 | .. warning:: operation is O(n), not O(1). | ||
328 | """ | ||
329 | return self._find_no_duplicates(name) | ||
330 | |||
331 | def __setitem__(self, name, value): | ||
332 | """Dict-like __setitem__ for compatibility with client code. Throws | ||
333 | exception if there is already a cookie of that name in the jar. In that | ||
334 | case, use the more explicit set() method instead. | ||
335 | """ | ||
336 | self.set(name, value) | ||
337 | |||
338 | def __delitem__(self, name): | ||
339 | """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s | ||
340 | ``remove_cookie_by_name()``. | ||
341 | """ | ||
342 | remove_cookie_by_name(self, name) | ||
343 | |||
344 | def set_cookie(self, cookie, *args, **kwargs): | ||
345 | if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): | ||
346 | cookie.value = cookie.value.replace('\\"', '') | ||
347 | return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) | ||
348 | |||
349 | def update(self, other): | ||
350 | """Updates this jar with cookies from another CookieJar or dict-like""" | ||
351 | if isinstance(other, cookielib.CookieJar): | ||
352 | for cookie in other: | ||
353 | self.set_cookie(copy.copy(cookie)) | ||
354 | else: | ||
355 | super(RequestsCookieJar, self).update(other) | ||
356 | |||
357 | def _find(self, name, domain=None, path=None): | ||
358 | """Requests uses this method internally to get cookie values. | ||
359 | |||
360 | If there are conflicting cookies, _find arbitrarily chooses one. | ||
361 | See _find_no_duplicates if you want an exception thrown if there are | ||
362 | conflicting cookies. | ||
363 | |||
364 | :param name: a string containing name of cookie | ||
365 | :param domain: (optional) string containing domain of cookie | ||
366 | :param path: (optional) string containing path of cookie | ||
367 | :return: cookie.value | ||
368 | """ | ||
369 | for cookie in iter(self): | ||
370 | if cookie.name == name: | ||
371 | if domain is None or cookie.domain == domain: | ||
372 | if path is None or cookie.path == path: | ||
373 | return cookie.value | ||
374 | |||
375 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) | ||
376 | |||
377 | def _find_no_duplicates(self, name, domain=None, path=None): | ||
378 | """Both ``__get_item__`` and ``get`` call this function: it's never | ||
379 | used elsewhere in Requests. | ||
380 | |||
381 | :param name: a string containing name of cookie | ||
382 | :param domain: (optional) string containing domain of cookie | ||
383 | :param path: (optional) string containing path of cookie | ||
384 | :raises KeyError: if cookie is not found | ||
385 | :raises CookieConflictError: if there are multiple cookies | ||
386 | that match name and optionally domain and path | ||
387 | :return: cookie.value | ||
388 | """ | ||
389 | toReturn = None | ||
390 | for cookie in iter(self): | ||
391 | if cookie.name == name: | ||
392 | if domain is None or cookie.domain == domain: | ||
393 | if path is None or cookie.path == path: | ||
394 | if toReturn is not None: # if there are multiple cookies that meet passed in criteria | ||
395 | raise CookieConflictError('There are multiple cookies with name, %r' % (name)) | ||
396 | toReturn = cookie.value # we will eventually return this as long as no cookie conflict | ||
397 | |||
398 | if toReturn: | ||
399 | return toReturn | ||
400 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) | ||
401 | |||
402 | def __getstate__(self): | ||
403 | """Unlike a normal CookieJar, this class is pickleable.""" | ||
404 | state = self.__dict__.copy() | ||
405 | # remove the unpickleable RLock object | ||
406 | state.pop('_cookies_lock') | ||
407 | return state | ||
408 | |||
409 | def __setstate__(self, state): | ||
410 | """Unlike a normal CookieJar, this class is pickleable.""" | ||
411 | self.__dict__.update(state) | ||
412 | if '_cookies_lock' not in self.__dict__: | ||
413 | self._cookies_lock = threading.RLock() | ||
414 | |||
415 | def copy(self): | ||
416 | """Return a copy of this RequestsCookieJar.""" | ||
417 | new_cj = RequestsCookieJar() | ||
418 | new_cj.update(self) | ||
419 | return new_cj | ||
420 | |||
421 | |||
422 | def _copy_cookie_jar(jar): | ||
423 | if jar is None: | ||
424 | return None | ||
425 | |||
426 | if hasattr(jar, 'copy'): | ||
427 | # We're dealing with an instance of RequestsCookieJar | ||
428 | return jar.copy() | ||
429 | # We're dealing with a generic CookieJar instance | ||
430 | new_jar = copy.copy(jar) | ||
431 | new_jar.clear() | ||
432 | for cookie in jar: | ||
433 | new_jar.set_cookie(copy.copy(cookie)) | ||
434 | return new_jar | ||
435 | |||
436 | |||
437 | def create_cookie(name, value, **kwargs): | ||
438 | """Make a cookie from underspecified parameters. | ||
439 | |||
440 | By default, the pair of `name` and `value` will be set for the domain '' | ||
441 | and sent on every request (this is sometimes called a "supercookie"). | ||
442 | """ | ||
443 | result = dict( | ||
444 | version=0, | ||
445 | name=name, | ||
446 | value=value, | ||
447 | port=None, | ||
448 | domain='', | ||
449 | path='/', | ||
450 | secure=False, | ||
451 | expires=None, | ||
452 | discard=True, | ||
453 | comment=None, | ||
454 | comment_url=None, | ||
455 | rest={'HttpOnly': None}, | ||
456 | rfc2109=False,) | ||
457 | |||
458 | badargs = set(kwargs) - set(result) | ||
459 | if badargs: | ||
460 | err = 'create_cookie() got unexpected keyword arguments: %s' | ||
461 | raise TypeError(err % list(badargs)) | ||
462 | |||
463 | result.update(kwargs) | ||
464 | result['port_specified'] = bool(result['port']) | ||
465 | result['domain_specified'] = bool(result['domain']) | ||
466 | result['domain_initial_dot'] = result['domain'].startswith('.') | ||
467 | result['path_specified'] = bool(result['path']) | ||
468 | |||
469 | return cookielib.Cookie(**result) | ||
470 | |||
471 | |||
472 | def morsel_to_cookie(morsel): | ||
473 | """Convert a Morsel object into a Cookie containing the one k/v pair.""" | ||
474 | |||
475 | expires = None | ||
476 | if morsel['max-age']: | ||
477 | try: | ||
478 | expires = int(time.time() + int(morsel['max-age'])) | ||
479 | except ValueError: | ||
480 | raise TypeError('max-age: %s must be integer' % morsel['max-age']) | ||
481 | elif morsel['expires']: | ||
482 | time_template = '%a, %d-%b-%Y %H:%M:%S GMT' | ||
483 | expires = calendar.timegm( | ||
484 | time.strptime(morsel['expires'], time_template) | ||
485 | ) | ||
486 | return create_cookie( | ||
487 | comment=morsel['comment'], | ||
488 | comment_url=bool(morsel['comment']), | ||
489 | discard=False, | ||
490 | domain=morsel['domain'], | ||
491 | expires=expires, | ||
492 | name=morsel.key, | ||
493 | path=morsel['path'], | ||
494 | port=None, | ||
495 | rest={'HttpOnly': morsel['httponly']}, | ||
496 | rfc2109=False, | ||
497 | secure=bool(morsel['secure']), | ||
498 | value=morsel.value, | ||
499 | version=morsel['version'] or 0, | ||
500 | ) | ||
501 | |||
502 | |||
503 | def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): | ||
504 | """Returns a CookieJar from a key/value dictionary. | ||
505 | |||
506 | :param cookie_dict: Dict of key/values to insert into CookieJar. | ||
507 | :param cookiejar: (optional) A cookiejar to add the cookies to. | ||
508 | :param overwrite: (optional) If False, will not replace cookies | ||
509 | already in the jar with new ones. | ||
510 | """ | ||
511 | if cookiejar is None: | ||
512 | cookiejar = RequestsCookieJar() | ||
513 | |||
514 | if cookie_dict is not None: | ||
515 | names_from_jar = [cookie.name for cookie in cookiejar] | ||
516 | for name in cookie_dict: | ||
517 | if overwrite or (name not in names_from_jar): | ||
518 | cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) | ||
519 | |||
520 | return cookiejar | ||
521 | |||
522 | |||
523 | def merge_cookies(cookiejar, cookies): | ||
524 | """Add cookies to cookiejar and returns a merged CookieJar. | ||
525 | |||
526 | :param cookiejar: CookieJar object to add the cookies to. | ||
527 | :param cookies: Dictionary or CookieJar object to be added. | ||
528 | """ | ||
529 | if not isinstance(cookiejar, cookielib.CookieJar): | ||
530 | raise ValueError('You can only merge into CookieJar') | ||
531 | |||
532 | if isinstance(cookies, dict): | ||
533 | cookiejar = cookiejar_from_dict( | ||
534 | cookies, cookiejar=cookiejar, overwrite=False) | ||
535 | elif isinstance(cookies, cookielib.CookieJar): | ||
536 | try: | ||
537 | cookiejar.update(cookies) | ||
538 | except AttributeError: | ||
539 | for cookie_in_jar in cookies: | ||
540 | cookiejar.set_cookie(cookie_in_jar) | ||
541 | |||
542 | return cookiejar | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py new file mode 100644 index 0000000..377c4c2 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/exceptions.py | |||
@@ -0,0 +1,122 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.exceptions | ||
5 | ~~~~~~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module contains the set of Requests' exceptions. | ||
8 | """ | ||
9 | from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError | ||
10 | |||
11 | |||
12 | class RequestException(IOError): | ||
13 | """There was an ambiguous exception that occurred while handling your | ||
14 | request. | ||
15 | """ | ||
16 | |||
17 | def __init__(self, *args, **kwargs): | ||
18 | """Initialize RequestException with `request` and `response` objects.""" | ||
19 | response = kwargs.pop('response', None) | ||
20 | self.response = response | ||
21 | self.request = kwargs.pop('request', None) | ||
22 | if (response is not None and not self.request and | ||
23 | hasattr(response, 'request')): | ||
24 | self.request = self.response.request | ||
25 | super(RequestException, self).__init__(*args, **kwargs) | ||
26 | |||
27 | |||
28 | class HTTPError(RequestException): | ||
29 | """An HTTP error occurred.""" | ||
30 | |||
31 | |||
32 | class ConnectionError(RequestException): | ||
33 | """A Connection error occurred.""" | ||
34 | |||
35 | |||
36 | class ProxyError(ConnectionError): | ||
37 | """A proxy error occurred.""" | ||
38 | |||
39 | |||
40 | class SSLError(ConnectionError): | ||
41 | """An SSL error occurred.""" | ||
42 | |||
43 | |||
44 | class Timeout(RequestException): | ||
45 | """The request timed out. | ||
46 | |||
47 | Catching this error will catch both | ||
48 | :exc:`~requests.exceptions.ConnectTimeout` and | ||
49 | :exc:`~requests.exceptions.ReadTimeout` errors. | ||
50 | """ | ||
51 | |||
52 | |||
53 | class ConnectTimeout(ConnectionError, Timeout): | ||
54 | """The request timed out while trying to connect to the remote server. | ||
55 | |||
56 | Requests that produced this error are safe to retry. | ||
57 | """ | ||
58 | |||
59 | |||
60 | class ReadTimeout(Timeout): | ||
61 | """The server did not send any data in the allotted amount of time.""" | ||
62 | |||
63 | |||
64 | class URLRequired(RequestException): | ||
65 | """A valid URL is required to make a request.""" | ||
66 | |||
67 | |||
68 | class TooManyRedirects(RequestException): | ||
69 | """Too many redirects.""" | ||
70 | |||
71 | |||
72 | class MissingSchema(RequestException, ValueError): | ||
73 | """The URL schema (e.g. http or https) is missing.""" | ||
74 | |||
75 | |||
76 | class InvalidSchema(RequestException, ValueError): | ||
77 | """See defaults.py for valid schemas.""" | ||
78 | |||
79 | |||
80 | class InvalidURL(RequestException, ValueError): | ||
81 | """The URL provided was somehow invalid.""" | ||
82 | |||
83 | |||
84 | class InvalidHeader(RequestException, ValueError): | ||
85 | """The header value provided was somehow invalid.""" | ||
86 | |||
87 | |||
88 | class ChunkedEncodingError(RequestException): | ||
89 | """The server declared chunked encoding but sent an invalid chunk.""" | ||
90 | |||
91 | |||
92 | class ContentDecodingError(RequestException, BaseHTTPError): | ||
93 | """Failed to decode response content""" | ||
94 | |||
95 | |||
96 | class StreamConsumedError(RequestException, TypeError): | ||
97 | """The content for this response was already consumed""" | ||
98 | |||
99 | |||
100 | class RetryError(RequestException): | ||
101 | """Custom retries logic failed""" | ||
102 | |||
103 | |||
104 | class UnrewindableBodyError(RequestException): | ||
105 | """Requests encountered an error when trying to rewind a body""" | ||
106 | |||
107 | # Warnings | ||
108 | |||
109 | |||
110 | class RequestsWarning(Warning): | ||
111 | """Base warning for Requests.""" | ||
112 | pass | ||
113 | |||
114 | |||
115 | class FileModeWarning(RequestsWarning, DeprecationWarning): | ||
116 | """A file was opened in text mode, but Requests determined its binary length.""" | ||
117 | pass | ||
118 | |||
119 | |||
120 | class RequestsDependencyWarning(RequestsWarning): | ||
121 | """An imported dependency doesn't match the expected version range.""" | ||
122 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py new file mode 100644 index 0000000..28385f8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/help.py | |||
@@ -0,0 +1,120 @@ | |||
1 | """Module containing bug report helper(s).""" | ||
2 | from __future__ import print_function | ||
3 | |||
4 | import json | ||
5 | import platform | ||
6 | import sys | ||
7 | import ssl | ||
8 | |||
9 | from pip._vendor import idna | ||
10 | from pip._vendor import urllib3 | ||
11 | from pip._vendor import chardet | ||
12 | |||
13 | from . import __version__ as requests_version | ||
14 | |||
15 | try: | ||
16 | from .packages.urllib3.contrib import pyopenssl | ||
17 | except ImportError: | ||
18 | pyopenssl = None | ||
19 | OpenSSL = None | ||
20 | cryptography = None | ||
21 | else: | ||
22 | import OpenSSL | ||
23 | import cryptography | ||
24 | |||
25 | |||
26 | def _implementation(): | ||
27 | """Return a dict with the Python implementation and version. | ||
28 | |||
29 | Provide both the name and the version of the Python implementation | ||
30 | currently running. For example, on CPython 2.7.5 it will return | ||
31 | {'name': 'CPython', 'version': '2.7.5'}. | ||
32 | |||
33 | This function works best on CPython and PyPy: in particular, it probably | ||
34 | doesn't work for Jython or IronPython. Future investigation should be done | ||
35 | to work out the correct shape of the code for those platforms. | ||
36 | """ | ||
37 | implementation = platform.python_implementation() | ||
38 | |||
39 | if implementation == 'CPython': | ||
40 | implementation_version = platform.python_version() | ||
41 | elif implementation == 'PyPy': | ||
42 | implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, | ||
43 | sys.pypy_version_info.minor, | ||
44 | sys.pypy_version_info.micro) | ||
45 | if sys.pypy_version_info.releaselevel != 'final': | ||
46 | implementation_version = ''.join([ | ||
47 | implementation_version, sys.pypy_version_info.releaselevel | ||
48 | ]) | ||
49 | elif implementation == 'Jython': | ||
50 | implementation_version = platform.python_version() # Complete Guess | ||
51 | elif implementation == 'IronPython': | ||
52 | implementation_version = platform.python_version() # Complete Guess | ||
53 | else: | ||
54 | implementation_version = 'Unknown' | ||
55 | |||
56 | return {'name': implementation, 'version': implementation_version} | ||
57 | |||
58 | |||
59 | def info(): | ||
60 | """Generate information for a bug report.""" | ||
61 | try: | ||
62 | platform_info = { | ||
63 | 'system': platform.system(), | ||
64 | 'release': platform.release(), | ||
65 | } | ||
66 | except IOError: | ||
67 | platform_info = { | ||
68 | 'system': 'Unknown', | ||
69 | 'release': 'Unknown', | ||
70 | } | ||
71 | |||
72 | implementation_info = _implementation() | ||
73 | urllib3_info = {'version': urllib3.__version__} | ||
74 | chardet_info = {'version': chardet.__version__} | ||
75 | |||
76 | pyopenssl_info = { | ||
77 | 'version': None, | ||
78 | 'openssl_version': '', | ||
79 | } | ||
80 | if OpenSSL: | ||
81 | pyopenssl_info = { | ||
82 | 'version': OpenSSL.__version__, | ||
83 | 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, | ||
84 | } | ||
85 | cryptography_info = { | ||
86 | 'version': getattr(cryptography, '__version__', ''), | ||
87 | } | ||
88 | idna_info = { | ||
89 | 'version': getattr(idna, '__version__', ''), | ||
90 | } | ||
91 | |||
92 | # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. | ||
93 | system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) | ||
94 | system_ssl_info = { | ||
95 | 'version': '%x' % system_ssl if system_ssl is not None else '' | ||
96 | } | ||
97 | |||
98 | return { | ||
99 | 'platform': platform_info, | ||
100 | 'implementation': implementation_info, | ||
101 | 'system_ssl': system_ssl_info, | ||
102 | 'using_pyopenssl': pyopenssl is not None, | ||
103 | 'pyOpenSSL': pyopenssl_info, | ||
104 | 'urllib3': urllib3_info, | ||
105 | 'chardet': chardet_info, | ||
106 | 'cryptography': cryptography_info, | ||
107 | 'idna': idna_info, | ||
108 | 'requests': { | ||
109 | 'version': requests_version, | ||
110 | }, | ||
111 | } | ||
112 | |||
113 | |||
114 | def main(): | ||
115 | """Pretty-print the bug information as JSON.""" | ||
116 | print(json.dumps(info(), sort_keys=True, indent=2)) | ||
117 | |||
118 | |||
119 | if __name__ == '__main__': | ||
120 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py new file mode 100644 index 0000000..14db0c8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/hooks.py | |||
@@ -0,0 +1,34 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.hooks | ||
5 | ~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module provides the capabilities for the Requests hooks system. | ||
8 | |||
9 | Available hooks: | ||
10 | |||
11 | ``response``: | ||
12 | The response generated from a Request. | ||
13 | """ | ||
14 | HOOKS = ['response'] | ||
15 | |||
16 | |||
17 | def default_hooks(): | ||
18 | return dict((event, []) for event in HOOKS) | ||
19 | |||
20 | # TODO: response is the only one | ||
21 | |||
22 | |||
23 | def dispatch_hook(key, hooks, hook_data, **kwargs): | ||
24 | """Dispatches a hook dictionary on a given piece of data.""" | ||
25 | hooks = hooks or dict() | ||
26 | hooks = hooks.get(key) | ||
27 | if hooks: | ||
28 | if hasattr(hooks, '__call__'): | ||
29 | hooks = [hooks] | ||
30 | for hook in hooks: | ||
31 | _hook_data = hook(hook_data, **kwargs) | ||
32 | if _hook_data is not None: | ||
33 | hook_data = _hook_data | ||
34 | return hook_data | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py new file mode 100644 index 0000000..6f5b0fb --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/models.py | |||
@@ -0,0 +1,948 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.models | ||
5 | ~~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module contains the primary objects that power Requests. | ||
8 | """ | ||
9 | |||
10 | import collections | ||
11 | import datetime | ||
12 | import sys | ||
13 | |||
14 | # Import encoding now, to avoid implicit import later. | ||
15 | # Implicit import within threads may cause LookupError when standard library is in a ZIP, | ||
16 | # such as in Embedded Python. See https://github.com/requests/requests/issues/3578. | ||
17 | import encodings.idna | ||
18 | |||
19 | from pip._vendor.urllib3.fields import RequestField | ||
20 | from pip._vendor.urllib3.filepost import encode_multipart_formdata | ||
21 | from pip._vendor.urllib3.util import parse_url | ||
22 | from pip._vendor.urllib3.exceptions import ( | ||
23 | DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) | ||
24 | |||
25 | from io import UnsupportedOperation | ||
26 | from .hooks import default_hooks | ||
27 | from .structures import CaseInsensitiveDict | ||
28 | |||
29 | from .auth import HTTPBasicAuth | ||
30 | from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar | ||
31 | from .exceptions import ( | ||
32 | HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, | ||
33 | ContentDecodingError, ConnectionError, StreamConsumedError) | ||
34 | from ._internal_utils import to_native_string, unicode_is_ascii | ||
35 | from .utils import ( | ||
36 | guess_filename, get_auth_from_url, requote_uri, | ||
37 | stream_decode_response_unicode, to_key_val_list, parse_header_links, | ||
38 | iter_slices, guess_json_utf, super_len, check_header_validity) | ||
39 | from .compat import ( | ||
40 | cookielib, urlunparse, urlsplit, urlencode, str, bytes, | ||
41 | is_py2, chardet, builtin_str, basestring) | ||
42 | from .compat import json as complexjson | ||
43 | from .status_codes import codes | ||
44 | |||
45 | #: The set of HTTP status codes that indicate an automatically | ||
46 | #: processable redirect. | ||
47 | REDIRECT_STATI = ( | ||
48 | codes.moved, # 301 | ||
49 | codes.found, # 302 | ||
50 | codes.other, # 303 | ||
51 | codes.temporary_redirect, # 307 | ||
52 | codes.permanent_redirect, # 308 | ||
53 | ) | ||
54 | |||
55 | DEFAULT_REDIRECT_LIMIT = 30 | ||
56 | CONTENT_CHUNK_SIZE = 10 * 1024 | ||
57 | ITER_CHUNK_SIZE = 512 | ||
58 | |||
59 | |||
60 | class RequestEncodingMixin(object): | ||
61 | @property | ||
62 | def path_url(self): | ||
63 | """Build the path URL to use.""" | ||
64 | |||
65 | url = [] | ||
66 | |||
67 | p = urlsplit(self.url) | ||
68 | |||
69 | path = p.path | ||
70 | if not path: | ||
71 | path = '/' | ||
72 | |||
73 | url.append(path) | ||
74 | |||
75 | query = p.query | ||
76 | if query: | ||
77 | url.append('?') | ||
78 | url.append(query) | ||
79 | |||
80 | return ''.join(url) | ||
81 | |||
82 | @staticmethod | ||
83 | def _encode_params(data): | ||
84 | """Encode parameters in a piece of data. | ||
85 | |||
86 | Will successfully encode parameters when passed as a dict or a list of | ||
87 | 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary | ||
88 | if parameters are supplied as a dict. | ||
89 | """ | ||
90 | |||
91 | if isinstance(data, (str, bytes)): | ||
92 | return data | ||
93 | elif hasattr(data, 'read'): | ||
94 | return data | ||
95 | elif hasattr(data, '__iter__'): | ||
96 | result = [] | ||
97 | for k, vs in to_key_val_list(data): | ||
98 | if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): | ||
99 | vs = [vs] | ||
100 | for v in vs: | ||
101 | if v is not None: | ||
102 | result.append( | ||
103 | (k.encode('utf-8') if isinstance(k, str) else k, | ||
104 | v.encode('utf-8') if isinstance(v, str) else v)) | ||
105 | return urlencode(result, doseq=True) | ||
106 | else: | ||
107 | return data | ||
108 | |||
109 | @staticmethod | ||
110 | def _encode_files(files, data): | ||
111 | """Build the body for a multipart/form-data request. | ||
112 | |||
113 | Will successfully encode files when passed as a dict or a list of | ||
114 | tuples. Order is retained if data is a list of tuples but arbitrary | ||
115 | if parameters are supplied as a dict. | ||
116 | The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) | ||
117 | or 4-tuples (filename, fileobj, contentype, custom_headers). | ||
118 | """ | ||
119 | if (not files): | ||
120 | raise ValueError("Files must be provided.") | ||
121 | elif isinstance(data, basestring): | ||
122 | raise ValueError("Data must not be a string.") | ||
123 | |||
124 | new_fields = [] | ||
125 | fields = to_key_val_list(data or {}) | ||
126 | files = to_key_val_list(files or {}) | ||
127 | |||
128 | for field, val in fields: | ||
129 | if isinstance(val, basestring) or not hasattr(val, '__iter__'): | ||
130 | val = [val] | ||
131 | for v in val: | ||
132 | if v is not None: | ||
133 | # Don't call str() on bytestrings: in Py3 it all goes wrong. | ||
134 | if not isinstance(v, bytes): | ||
135 | v = str(v) | ||
136 | |||
137 | new_fields.append( | ||
138 | (field.decode('utf-8') if isinstance(field, bytes) else field, | ||
139 | v.encode('utf-8') if isinstance(v, str) else v)) | ||
140 | |||
141 | for (k, v) in files: | ||
142 | # support for explicit filename | ||
143 | ft = None | ||
144 | fh = None | ||
145 | if isinstance(v, (tuple, list)): | ||
146 | if len(v) == 2: | ||
147 | fn, fp = v | ||
148 | elif len(v) == 3: | ||
149 | fn, fp, ft = v | ||
150 | else: | ||
151 | fn, fp, ft, fh = v | ||
152 | else: | ||
153 | fn = guess_filename(v) or k | ||
154 | fp = v | ||
155 | |||
156 | if isinstance(fp, (str, bytes, bytearray)): | ||
157 | fdata = fp | ||
158 | else: | ||
159 | fdata = fp.read() | ||
160 | |||
161 | rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) | ||
162 | rf.make_multipart(content_type=ft) | ||
163 | new_fields.append(rf) | ||
164 | |||
165 | body, content_type = encode_multipart_formdata(new_fields) | ||
166 | |||
167 | return body, content_type | ||
168 | |||
169 | |||
170 | class RequestHooksMixin(object): | ||
171 | def register_hook(self, event, hook): | ||
172 | """Properly register a hook.""" | ||
173 | |||
174 | if event not in self.hooks: | ||
175 | raise ValueError('Unsupported event specified, with event name "%s"' % (event)) | ||
176 | |||
177 | if isinstance(hook, collections.Callable): | ||
178 | self.hooks[event].append(hook) | ||
179 | elif hasattr(hook, '__iter__'): | ||
180 | self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) | ||
181 | |||
182 | def deregister_hook(self, event, hook): | ||
183 | """Deregister a previously registered hook. | ||
184 | Returns True if the hook existed, False if not. | ||
185 | """ | ||
186 | |||
187 | try: | ||
188 | self.hooks[event].remove(hook) | ||
189 | return True | ||
190 | except ValueError: | ||
191 | return False | ||
192 | |||
193 | |||
194 | class Request(RequestHooksMixin): | ||
195 | """A user-created :class:`Request <Request>` object. | ||
196 | |||
197 | Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. | ||
198 | |||
199 | :param method: HTTP method to use. | ||
200 | :param url: URL to send. | ||
201 | :param headers: dictionary of headers to send. | ||
202 | :param files: dictionary of {filename: fileobject} files to multipart upload. | ||
203 | :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. | ||
204 | :param json: json for the body to attach to the request (if files or data is not specified). | ||
205 | :param params: dictionary of URL parameters to append to the URL. | ||
206 | :param auth: Auth handler or (user, pass) tuple. | ||
207 | :param cookies: dictionary or CookieJar of cookies to attach to this request. | ||
208 | :param hooks: dictionary of callback hooks, for internal usage. | ||
209 | |||
210 | Usage:: | ||
211 | |||
212 | >>> import requests | ||
213 | >>> req = requests.Request('GET', 'http://httpbin.org/get') | ||
214 | >>> req.prepare() | ||
215 | <PreparedRequest [GET]> | ||
216 | """ | ||
217 | |||
218 | def __init__(self, | ||
219 | method=None, url=None, headers=None, files=None, data=None, | ||
220 | params=None, auth=None, cookies=None, hooks=None, json=None): | ||
221 | |||
222 | # Default empty dicts for dict params. | ||
223 | data = [] if data is None else data | ||
224 | files = [] if files is None else files | ||
225 | headers = {} if headers is None else headers | ||
226 | params = {} if params is None else params | ||
227 | hooks = {} if hooks is None else hooks | ||
228 | |||
229 | self.hooks = default_hooks() | ||
230 | for (k, v) in list(hooks.items()): | ||
231 | self.register_hook(event=k, hook=v) | ||
232 | |||
233 | self.method = method | ||
234 | self.url = url | ||
235 | self.headers = headers | ||
236 | self.files = files | ||
237 | self.data = data | ||
238 | self.json = json | ||
239 | self.params = params | ||
240 | self.auth = auth | ||
241 | self.cookies = cookies | ||
242 | |||
243 | def __repr__(self): | ||
244 | return '<Request [%s]>' % (self.method) | ||
245 | |||
246 | def prepare(self): | ||
247 | """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" | ||
248 | p = PreparedRequest() | ||
249 | p.prepare( | ||
250 | method=self.method, | ||
251 | url=self.url, | ||
252 | headers=self.headers, | ||
253 | files=self.files, | ||
254 | data=self.data, | ||
255 | json=self.json, | ||
256 | params=self.params, | ||
257 | auth=self.auth, | ||
258 | cookies=self.cookies, | ||
259 | hooks=self.hooks, | ||
260 | ) | ||
261 | return p | ||
262 | |||
263 | |||
264 | class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): | ||
265 | """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, | ||
266 | containing the exact bytes that will be sent to the server. | ||
267 | |||
268 | Generated from either a :class:`Request <Request>` object or manually. | ||
269 | |||
270 | Usage:: | ||
271 | |||
272 | >>> import requests | ||
273 | >>> req = requests.Request('GET', 'http://httpbin.org/get') | ||
274 | >>> r = req.prepare() | ||
275 | <PreparedRequest [GET]> | ||
276 | |||
277 | >>> s = requests.Session() | ||
278 | >>> s.send(r) | ||
279 | <Response [200]> | ||
280 | """ | ||
281 | |||
282 | def __init__(self): | ||
283 | #: HTTP verb to send to the server. | ||
284 | self.method = None | ||
285 | #: HTTP URL to send the request to. | ||
286 | self.url = None | ||
287 | #: dictionary of HTTP headers. | ||
288 | self.headers = None | ||
289 | # The `CookieJar` used to create the Cookie header will be stored here | ||
290 | # after prepare_cookies is called | ||
291 | self._cookies = None | ||
292 | #: request body to send to the server. | ||
293 | self.body = None | ||
294 | #: dictionary of callback hooks, for internal usage. | ||
295 | self.hooks = default_hooks() | ||
296 | #: integer denoting starting position of a readable file-like body. | ||
297 | self._body_position = None | ||
298 | |||
299 | def prepare(self, | ||
300 | method=None, url=None, headers=None, files=None, data=None, | ||
301 | params=None, auth=None, cookies=None, hooks=None, json=None): | ||
302 | """Prepares the entire request with the given parameters.""" | ||
303 | |||
304 | self.prepare_method(method) | ||
305 | self.prepare_url(url, params) | ||
306 | self.prepare_headers(headers) | ||
307 | self.prepare_cookies(cookies) | ||
308 | self.prepare_body(data, files, json) | ||
309 | self.prepare_auth(auth, url) | ||
310 | |||
311 | # Note that prepare_auth must be last to enable authentication schemes | ||
312 | # such as OAuth to work on a fully prepared request. | ||
313 | |||
314 | # This MUST go after prepare_auth. Authenticators could add a hook | ||
315 | self.prepare_hooks(hooks) | ||
316 | |||
317 | def __repr__(self): | ||
318 | return '<PreparedRequest [%s]>' % (self.method) | ||
319 | |||
320 | def copy(self): | ||
321 | p = PreparedRequest() | ||
322 | p.method = self.method | ||
323 | p.url = self.url | ||
324 | p.headers = self.headers.copy() if self.headers is not None else None | ||
325 | p._cookies = _copy_cookie_jar(self._cookies) | ||
326 | p.body = self.body | ||
327 | p.hooks = self.hooks | ||
328 | p._body_position = self._body_position | ||
329 | return p | ||
330 | |||
331 | def prepare_method(self, method): | ||
332 | """Prepares the given HTTP method.""" | ||
333 | self.method = method | ||
334 | if self.method is not None: | ||
335 | self.method = to_native_string(self.method.upper()) | ||
336 | |||
337 | @staticmethod | ||
338 | def _get_idna_encoded_host(host): | ||
339 | from pip._vendor import idna | ||
340 | |||
341 | try: | ||
342 | host = idna.encode(host, uts46=True).decode('utf-8') | ||
343 | except idna.IDNAError: | ||
344 | raise UnicodeError | ||
345 | return host | ||
346 | |||
347 | def prepare_url(self, url, params): | ||
348 | """Prepares the given HTTP URL.""" | ||
349 | #: Accept objects that have string representations. | ||
350 | #: We're unable to blindly call unicode/str functions | ||
351 | #: as this will include the bytestring indicator (b'') | ||
352 | #: on python 3.x. | ||
353 | #: https://github.com/requests/requests/pull/2238 | ||
354 | if isinstance(url, bytes): | ||
355 | url = url.decode('utf8') | ||
356 | else: | ||
357 | url = unicode(url) if is_py2 else str(url) | ||
358 | |||
359 | # Remove leading whitespaces from url | ||
360 | url = url.lstrip() | ||
361 | |||
362 | # Don't do any URL preparation for non-HTTP schemes like `mailto`, | ||
363 | # `data` etc to work around exceptions from `url_parse`, which | ||
364 | # handles RFC 3986 only. | ||
365 | if ':' in url and not url.lower().startswith('http'): | ||
366 | self.url = url | ||
367 | return | ||
368 | |||
369 | # Support for unicode domain names and paths. | ||
370 | try: | ||
371 | scheme, auth, host, port, path, query, fragment = parse_url(url) | ||
372 | except LocationParseError as e: | ||
373 | raise InvalidURL(*e.args) | ||
374 | |||
375 | if not scheme: | ||
376 | error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") | ||
377 | error = error.format(to_native_string(url, 'utf8')) | ||
378 | |||
379 | raise MissingSchema(error) | ||
380 | |||
381 | if not host: | ||
382 | raise InvalidURL("Invalid URL %r: No host supplied" % url) | ||
383 | |||
384 | # In general, we want to try IDNA encoding the hostname if the string contains | ||
385 | # non-ASCII characters. This allows users to automatically get the correct IDNA | ||
386 | # behaviour. For strings containing only ASCII characters, we need to also verify | ||
387 | # it doesn't start with a wildcard (*), before allowing the unencoded hostname. | ||
388 | if not unicode_is_ascii(host): | ||
389 | try: | ||
390 | host = self._get_idna_encoded_host(host) | ||
391 | except UnicodeError: | ||
392 | raise InvalidURL('URL has an invalid label.') | ||
393 | elif host.startswith(u'*'): | ||
394 | raise InvalidURL('URL has an invalid label.') | ||
395 | |||
396 | # Carefully reconstruct the network location | ||
397 | netloc = auth or '' | ||
398 | if netloc: | ||
399 | netloc += '@' | ||
400 | netloc += host | ||
401 | if port: | ||
402 | netloc += ':' + str(port) | ||
403 | |||
404 | # Bare domains aren't valid URLs. | ||
405 | if not path: | ||
406 | path = '/' | ||
407 | |||
408 | if is_py2: | ||
409 | if isinstance(scheme, str): | ||
410 | scheme = scheme.encode('utf-8') | ||
411 | if isinstance(netloc, str): | ||
412 | netloc = netloc.encode('utf-8') | ||
413 | if isinstance(path, str): | ||
414 | path = path.encode('utf-8') | ||
415 | if isinstance(query, str): | ||
416 | query = query.encode('utf-8') | ||
417 | if isinstance(fragment, str): | ||
418 | fragment = fragment.encode('utf-8') | ||
419 | |||
420 | if isinstance(params, (str, bytes)): | ||
421 | params = to_native_string(params) | ||
422 | |||
423 | enc_params = self._encode_params(params) | ||
424 | if enc_params: | ||
425 | if query: | ||
426 | query = '%s&%s' % (query, enc_params) | ||
427 | else: | ||
428 | query = enc_params | ||
429 | |||
430 | url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) | ||
431 | self.url = url | ||
432 | |||
433 | def prepare_headers(self, headers): | ||
434 | """Prepares the given HTTP headers.""" | ||
435 | |||
436 | self.headers = CaseInsensitiveDict() | ||
437 | if headers: | ||
438 | for header in headers.items(): | ||
439 | # Raise exception on invalid header value. | ||
440 | check_header_validity(header) | ||
441 | name, value = header | ||
442 | self.headers[to_native_string(name)] = value | ||
443 | |||
444 | def prepare_body(self, data, files, json=None): | ||
445 | """Prepares the given HTTP body data.""" | ||
446 | |||
447 | # Check if file, fo, generator, iterator. | ||
448 | # If not, run through normal process. | ||
449 | |||
450 | # Nottin' on you. | ||
451 | body = None | ||
452 | content_type = None | ||
453 | |||
454 | if not data and json is not None: | ||
455 | # urllib3 requires a bytes-like body. Python 2's json.dumps | ||
456 | # provides this natively, but Python 3 gives a Unicode string. | ||
457 | content_type = 'application/json' | ||
458 | body = complexjson.dumps(json) | ||
459 | if not isinstance(body, bytes): | ||
460 | body = body.encode('utf-8') | ||
461 | |||
462 | is_stream = all([ | ||
463 | hasattr(data, '__iter__'), | ||
464 | not isinstance(data, (basestring, list, tuple, collections.Mapping)) | ||
465 | ]) | ||
466 | |||
467 | try: | ||
468 | length = super_len(data) | ||
469 | except (TypeError, AttributeError, UnsupportedOperation): | ||
470 | length = None | ||
471 | |||
472 | if is_stream: | ||
473 | body = data | ||
474 | |||
475 | if getattr(body, 'tell', None) is not None: | ||
476 | # Record the current file position before reading. | ||
477 | # This will allow us to rewind a file in the event | ||
478 | # of a redirect. | ||
479 | try: | ||
480 | self._body_position = body.tell() | ||
481 | except (IOError, OSError): | ||
482 | # This differentiates from None, allowing us to catch | ||
483 | # a failed `tell()` later when trying to rewind the body | ||
484 | self._body_position = object() | ||
485 | |||
486 | if files: | ||
487 | raise NotImplementedError('Streamed bodies and files are mutually exclusive.') | ||
488 | |||
489 | if length: | ||
490 | self.headers['Content-Length'] = builtin_str(length) | ||
491 | else: | ||
492 | self.headers['Transfer-Encoding'] = 'chunked' | ||
493 | else: | ||
494 | # Multi-part file uploads. | ||
495 | if files: | ||
496 | (body, content_type) = self._encode_files(files, data) | ||
497 | else: | ||
498 | if data: | ||
499 | body = self._encode_params(data) | ||
500 | if isinstance(data, basestring) or hasattr(data, 'read'): | ||
501 | content_type = None | ||
502 | else: | ||
503 | content_type = 'application/x-www-form-urlencoded' | ||
504 | |||
505 | self.prepare_content_length(body) | ||
506 | |||
507 | # Add content-type if it wasn't explicitly provided. | ||
508 | if content_type and ('content-type' not in self.headers): | ||
509 | self.headers['Content-Type'] = content_type | ||
510 | |||
511 | self.body = body | ||
512 | |||
513 | def prepare_content_length(self, body): | ||
514 | """Prepare Content-Length header based on request method and body""" | ||
515 | if body is not None: | ||
516 | length = super_len(body) | ||
517 | if length: | ||
518 | # If length exists, set it. Otherwise, we fallback | ||
519 | # to Transfer-Encoding: chunked. | ||
520 | self.headers['Content-Length'] = builtin_str(length) | ||
521 | elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: | ||
522 | # Set Content-Length to 0 for methods that can have a body | ||
523 | # but don't provide one. (i.e. not GET or HEAD) | ||
524 | self.headers['Content-Length'] = '0' | ||
525 | |||
526 | def prepare_auth(self, auth, url=''): | ||
527 | """Prepares the given HTTP auth data.""" | ||
528 | |||
529 | # If no Auth is explicitly provided, extract it from the URL first. | ||
530 | if auth is None: | ||
531 | url_auth = get_auth_from_url(self.url) | ||
532 | auth = url_auth if any(url_auth) else None | ||
533 | |||
534 | if auth: | ||
535 | if isinstance(auth, tuple) and len(auth) == 2: | ||
536 | # special-case basic HTTP auth | ||
537 | auth = HTTPBasicAuth(*auth) | ||
538 | |||
539 | # Allow auth to make its changes. | ||
540 | r = auth(self) | ||
541 | |||
542 | # Update self to reflect the auth changes. | ||
543 | self.__dict__.update(r.__dict__) | ||
544 | |||
545 | # Recompute Content-Length | ||
546 | self.prepare_content_length(self.body) | ||
547 | |||
548 | def prepare_cookies(self, cookies): | ||
549 | """Prepares the given HTTP cookie data. | ||
550 | |||
551 | This function eventually generates a ``Cookie`` header from the | ||
552 | given cookies using cookielib. Due to cookielib's design, the header | ||
553 | will not be regenerated if it already exists, meaning this function | ||
554 | can only be called once for the life of the | ||
555 | :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls | ||
556 | to ``prepare_cookies`` will have no actual effect, unless the "Cookie" | ||
557 | header is removed beforehand. | ||
558 | """ | ||
559 | if isinstance(cookies, cookielib.CookieJar): | ||
560 | self._cookies = cookies | ||
561 | else: | ||
562 | self._cookies = cookiejar_from_dict(cookies) | ||
563 | |||
564 | cookie_header = get_cookie_header(self._cookies, self) | ||
565 | if cookie_header is not None: | ||
566 | self.headers['Cookie'] = cookie_header | ||
567 | |||
568 | def prepare_hooks(self, hooks): | ||
569 | """Prepares the given hooks.""" | ||
570 | # hooks can be passed as None to the prepare method and to this | ||
571 | # method. To prevent iterating over None, simply use an empty list | ||
572 | # if hooks is False-y | ||
573 | hooks = hooks or [] | ||
574 | for event in hooks: | ||
575 | self.register_hook(event, hooks[event]) | ||
576 | |||
577 | |||
578 | class Response(object): | ||
579 | """The :class:`Response <Response>` object, which contains a | ||
580 | server's response to an HTTP request. | ||
581 | """ | ||
582 | |||
583 | __attrs__ = [ | ||
584 | '_content', 'status_code', 'headers', 'url', 'history', | ||
585 | 'encoding', 'reason', 'cookies', 'elapsed', 'request' | ||
586 | ] | ||
587 | |||
588 | def __init__(self): | ||
589 | self._content = False | ||
590 | self._content_consumed = False | ||
591 | self._next = None | ||
592 | |||
593 | #: Integer Code of responded HTTP Status, e.g. 404 or 200. | ||
594 | self.status_code = None | ||
595 | |||
596 | #: Case-insensitive Dictionary of Response Headers. | ||
597 | #: For example, ``headers['content-encoding']`` will return the | ||
598 | #: value of a ``'Content-Encoding'`` response header. | ||
599 | self.headers = CaseInsensitiveDict() | ||
600 | |||
601 | #: File-like object representation of response (for advanced usage). | ||
602 | #: Use of ``raw`` requires that ``stream=True`` be set on the request. | ||
603 | # This requirement does not apply for use internally to Requests. | ||
604 | self.raw = None | ||
605 | |||
606 | #: Final URL location of Response. | ||
607 | self.url = None | ||
608 | |||
609 | #: Encoding to decode with when accessing r.text. | ||
610 | self.encoding = None | ||
611 | |||
612 | #: A list of :class:`Response <Response>` objects from | ||
613 | #: the history of the Request. Any redirect responses will end | ||
614 | #: up here. The list is sorted from the oldest to the most recent request. | ||
615 | self.history = [] | ||
616 | |||
617 | #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". | ||
618 | self.reason = None | ||
619 | |||
620 | #: A CookieJar of Cookies the server sent back. | ||
621 | self.cookies = cookiejar_from_dict({}) | ||
622 | |||
623 | #: The amount of time elapsed between sending the request | ||
624 | #: and the arrival of the response (as a timedelta). | ||
625 | #: This property specifically measures the time taken between sending | ||
626 | #: the first byte of the request and finishing parsing the headers. It | ||
627 | #: is therefore unaffected by consuming the response content or the | ||
628 | #: value of the ``stream`` keyword argument. | ||
629 | self.elapsed = datetime.timedelta(0) | ||
630 | |||
631 | #: The :class:`PreparedRequest <PreparedRequest>` object to which this | ||
632 | #: is a response. | ||
633 | self.request = None | ||
634 | |||
635 | def __enter__(self): | ||
636 | return self | ||
637 | |||
638 | def __exit__(self, *args): | ||
639 | self.close() | ||
640 | |||
641 | def __getstate__(self): | ||
642 | # Consume everything; accessing the content attribute makes | ||
643 | # sure the content has been fully read. | ||
644 | if not self._content_consumed: | ||
645 | self.content | ||
646 | |||
647 | return dict( | ||
648 | (attr, getattr(self, attr, None)) | ||
649 | for attr in self.__attrs__ | ||
650 | ) | ||
651 | |||
652 | def __setstate__(self, state): | ||
653 | for name, value in state.items(): | ||
654 | setattr(self, name, value) | ||
655 | |||
656 | # pickled objects do not have .raw | ||
657 | setattr(self, '_content_consumed', True) | ||
658 | setattr(self, 'raw', None) | ||
659 | |||
660 | def __repr__(self): | ||
661 | return '<Response [%s]>' % (self.status_code) | ||
662 | |||
663 | def __bool__(self): | ||
664 | """Returns True if :attr:`status_code` is less than 400. | ||
665 | |||
666 | This attribute checks if the status code of the response is between | ||
667 | 400 and 600 to see if there was a client error or a server error. If | ||
668 | the status code, is between 200 and 400, this will return True. This | ||
669 | is **not** a check to see if the response code is ``200 OK``. | ||
670 | """ | ||
671 | return self.ok | ||
672 | |||
673 | def __nonzero__(self): | ||
674 | """Returns True if :attr:`status_code` is less than 400. | ||
675 | |||
676 | This attribute checks if the status code of the response is between | ||
677 | 400 and 600 to see if there was a client error or a server error. If | ||
678 | the status code, is between 200 and 400, this will return True. This | ||
679 | is **not** a check to see if the response code is ``200 OK``. | ||
680 | """ | ||
681 | return self.ok | ||
682 | |||
683 | def __iter__(self): | ||
684 | """Allows you to use a response as an iterator.""" | ||
685 | return self.iter_content(128) | ||
686 | |||
687 | @property | ||
688 | def ok(self): | ||
689 | """Returns True if :attr:`status_code` is less than 400. | ||
690 | |||
691 | This attribute checks if the status code of the response is between | ||
692 | 400 and 600 to see if there was a client error or a server error. If | ||
693 | the status code, is between 200 and 400, this will return True. This | ||
694 | is **not** a check to see if the response code is ``200 OK``. | ||
695 | """ | ||
696 | try: | ||
697 | self.raise_for_status() | ||
698 | except HTTPError: | ||
699 | return False | ||
700 | return True | ||
701 | |||
702 | @property | ||
703 | def is_redirect(self): | ||
704 | """True if this Response is a well-formed HTTP redirect that could have | ||
705 | been processed automatically (by :meth:`Session.resolve_redirects`). | ||
706 | """ | ||
707 | return ('location' in self.headers and self.status_code in REDIRECT_STATI) | ||
708 | |||
709 | @property | ||
710 | def is_permanent_redirect(self): | ||
711 | """True if this Response one of the permanent versions of redirect.""" | ||
712 | return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) | ||
713 | |||
714 | @property | ||
715 | def next(self): | ||
716 | """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" | ||
717 | return self._next | ||
718 | |||
719 | @property | ||
720 | def apparent_encoding(self): | ||
721 | """The apparent encoding, provided by the chardet library.""" | ||
722 | return chardet.detect(self.content)['encoding'] | ||
723 | |||
724 | def iter_content(self, chunk_size=1, decode_unicode=False): | ||
725 | """Iterates over the response data. When stream=True is set on the | ||
726 | request, this avoids reading the content at once into memory for | ||
727 | large responses. The chunk size is the number of bytes it should | ||
728 | read into memory. This is not necessarily the length of each item | ||
729 | returned as decoding can take place. | ||
730 | |||
731 | chunk_size must be of type int or None. A value of None will | ||
732 | function differently depending on the value of `stream`. | ||
733 | stream=True will read data as it arrives in whatever size the | ||
734 | chunks are received. If stream=False, data is returned as | ||
735 | a single chunk. | ||
736 | |||
737 | If decode_unicode is True, content will be decoded using the best | ||
738 | available encoding based on the response. | ||
739 | """ | ||
740 | |||
741 | def generate(): | ||
742 | # Special case for urllib3. | ||
743 | if hasattr(self.raw, 'stream'): | ||
744 | try: | ||
745 | for chunk in self.raw.stream(chunk_size, decode_content=True): | ||
746 | yield chunk | ||
747 | except ProtocolError as e: | ||
748 | raise ChunkedEncodingError(e) | ||
749 | except DecodeError as e: | ||
750 | raise ContentDecodingError(e) | ||
751 | except ReadTimeoutError as e: | ||
752 | raise ConnectionError(e) | ||
753 | else: | ||
754 | # Standard file-like object. | ||
755 | while True: | ||
756 | chunk = self.raw.read(chunk_size) | ||
757 | if not chunk: | ||
758 | break | ||
759 | yield chunk | ||
760 | |||
761 | self._content_consumed = True | ||
762 | |||
763 | if self._content_consumed and isinstance(self._content, bool): | ||
764 | raise StreamConsumedError() | ||
765 | elif chunk_size is not None and not isinstance(chunk_size, int): | ||
766 | raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) | ||
767 | # simulate reading small chunks of the content | ||
768 | reused_chunks = iter_slices(self._content, chunk_size) | ||
769 | |||
770 | stream_chunks = generate() | ||
771 | |||
772 | chunks = reused_chunks if self._content_consumed else stream_chunks | ||
773 | |||
774 | if decode_unicode: | ||
775 | chunks = stream_decode_response_unicode(chunks, self) | ||
776 | |||
777 | return chunks | ||
778 | |||
779 | def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): | ||
780 | """Iterates over the response data, one line at a time. When | ||
781 | stream=True is set on the request, this avoids reading the | ||
782 | content at once into memory for large responses. | ||
783 | |||
784 | .. note:: This method is not reentrant safe. | ||
785 | """ | ||
786 | |||
787 | pending = None | ||
788 | |||
789 | for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): | ||
790 | |||
791 | if pending is not None: | ||
792 | chunk = pending + chunk | ||
793 | |||
794 | if delimiter: | ||
795 | lines = chunk.split(delimiter) | ||
796 | else: | ||
797 | lines = chunk.splitlines() | ||
798 | |||
799 | if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: | ||
800 | pending = lines.pop() | ||
801 | else: | ||
802 | pending = None | ||
803 | |||
804 | for line in lines: | ||
805 | yield line | ||
806 | |||
807 | if pending is not None: | ||
808 | yield pending | ||
809 | |||
810 | @property | ||
811 | def content(self): | ||
812 | """Content of the response, in bytes.""" | ||
813 | |||
814 | if self._content is False: | ||
815 | # Read the contents. | ||
816 | if self._content_consumed: | ||
817 | raise RuntimeError( | ||
818 | 'The content for this response was already consumed') | ||
819 | |||
820 | if self.status_code == 0 or self.raw is None: | ||
821 | self._content = None | ||
822 | else: | ||
823 | self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() | ||
824 | |||
825 | self._content_consumed = True | ||
826 | # don't need to release the connection; that's been handled by urllib3 | ||
827 | # since we exhausted the data. | ||
828 | return self._content | ||
829 | |||
830 | @property | ||
831 | def text(self): | ||
832 | """Content of the response, in unicode. | ||
833 | |||
834 | If Response.encoding is None, encoding will be guessed using | ||
835 | ``chardet``. | ||
836 | |||
837 | The encoding of the response content is determined based solely on HTTP | ||
838 | headers, following RFC 2616 to the letter. If you can take advantage of | ||
839 | non-HTTP knowledge to make a better guess at the encoding, you should | ||
840 | set ``r.encoding`` appropriately before accessing this property. | ||
841 | """ | ||
842 | |||
843 | # Try charset from content-type | ||
844 | content = None | ||
845 | encoding = self.encoding | ||
846 | |||
847 | if not self.content: | ||
848 | return str('') | ||
849 | |||
850 | # Fallback to auto-detected encoding. | ||
851 | if self.encoding is None: | ||
852 | encoding = self.apparent_encoding | ||
853 | |||
854 | # Decode unicode from given encoding. | ||
855 | try: | ||
856 | content = str(self.content, encoding, errors='replace') | ||
857 | except (LookupError, TypeError): | ||
858 | # A LookupError is raised if the encoding was not found which could | ||
859 | # indicate a misspelling or similar mistake. | ||
860 | # | ||
861 | # A TypeError can be raised if encoding is None | ||
862 | # | ||
863 | # So we try blindly encoding. | ||
864 | content = str(self.content, errors='replace') | ||
865 | |||
866 | return content | ||
867 | |||
868 | def json(self, **kwargs): | ||
869 | r"""Returns the json-encoded content of a response, if any. | ||
870 | |||
871 | :param \*\*kwargs: Optional arguments that ``json.loads`` takes. | ||
872 | :raises ValueError: If the response body does not contain valid json. | ||
873 | """ | ||
874 | |||
875 | if not self.encoding and self.content and len(self.content) > 3: | ||
876 | # No encoding set. JSON RFC 4627 section 3 states we should expect | ||
877 | # UTF-8, -16 or -32. Detect which one to use; If the detection or | ||
878 | # decoding fails, fall back to `self.text` (using chardet to make | ||
879 | # a best guess). | ||
880 | encoding = guess_json_utf(self.content) | ||
881 | if encoding is not None: | ||
882 | try: | ||
883 | return complexjson.loads( | ||
884 | self.content.decode(encoding), **kwargs | ||
885 | ) | ||
886 | except UnicodeDecodeError: | ||
887 | # Wrong UTF codec detected; usually because it's not UTF-8 | ||
888 | # but some other 8-bit codec. This is an RFC violation, | ||
889 | # and the server didn't bother to tell us what codec *was* | ||
890 | # used. | ||
891 | pass | ||
892 | return complexjson.loads(self.text, **kwargs) | ||
893 | |||
894 | @property | ||
895 | def links(self): | ||
896 | """Returns the parsed header links of the response, if any.""" | ||
897 | |||
898 | header = self.headers.get('link') | ||
899 | |||
900 | # l = MultiDict() | ||
901 | l = {} | ||
902 | |||
903 | if header: | ||
904 | links = parse_header_links(header) | ||
905 | |||
906 | for link in links: | ||
907 | key = link.get('rel') or link.get('url') | ||
908 | l[key] = link | ||
909 | |||
910 | return l | ||
911 | |||
912 | def raise_for_status(self): | ||
913 | """Raises stored :class:`HTTPError`, if one occurred.""" | ||
914 | |||
915 | http_error_msg = '' | ||
916 | if isinstance(self.reason, bytes): | ||
917 | # We attempt to decode utf-8 first because some servers | ||
918 | # choose to localize their reason strings. If the string | ||
919 | # isn't utf-8, we fall back to iso-8859-1 for all other | ||
920 | # encodings. (See PR #3538) | ||
921 | try: | ||
922 | reason = self.reason.decode('utf-8') | ||
923 | except UnicodeDecodeError: | ||
924 | reason = self.reason.decode('iso-8859-1') | ||
925 | else: | ||
926 | reason = self.reason | ||
927 | |||
928 | if 400 <= self.status_code < 500: | ||
929 | http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) | ||
930 | |||
931 | elif 500 <= self.status_code < 600: | ||
932 | http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) | ||
933 | |||
934 | if http_error_msg: | ||
935 | raise HTTPError(http_error_msg, response=self) | ||
936 | |||
937 | def close(self): | ||
938 | """Releases the connection back to the pool. Once this method has been | ||
939 | called the underlying ``raw`` object must not be accessed again. | ||
940 | |||
941 | *Note: Should not normally need to be called explicitly.* | ||
942 | """ | ||
943 | if not self._content_consumed: | ||
944 | self.raw.close() | ||
945 | |||
946 | release_conn = getattr(self.raw, 'release_conn', None) | ||
947 | if release_conn is not None: | ||
948 | release_conn() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py new file mode 100644 index 0000000..c91d9c7 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/packages.py | |||
@@ -0,0 +1,16 @@ | |||
1 | import sys | ||
2 | |||
3 | # This code exists for backwards compatibility reasons. | ||
4 | # I don't like it either. Just look the other way. :) | ||
5 | |||
6 | for package in ('urllib3', 'idna', 'chardet'): | ||
7 | vendored_package = "pip._vendor." + package | ||
8 | locals()[package] = __import__(vendored_package) | ||
9 | # This traversal is apparently necessary such that the identities are | ||
10 | # preserved (requests.packages.urllib3.* is urllib3.*) | ||
11 | for mod in list(sys.modules): | ||
12 | if mod == vendored_package or mod.startswith(vendored_package + '.'): | ||
13 | unprefixed_mod = mod[len("pip._vendor."):] | ||
14 | sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] | ||
15 | |||
16 | # Kinda cool, though, right? | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py new file mode 100644 index 0000000..d8eafa8 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/sessions.py | |||
@@ -0,0 +1,737 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.session | ||
5 | ~~~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module provides a Session object to manage and persist settings across | ||
8 | requests (cookies, auth, proxies). | ||
9 | """ | ||
10 | import os | ||
11 | import platform | ||
12 | import time | ||
13 | from collections import Mapping | ||
14 | from datetime import timedelta | ||
15 | |||
16 | from .auth import _basic_auth_str | ||
17 | from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse | ||
18 | from .cookies import ( | ||
19 | cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) | ||
20 | from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT | ||
21 | from .hooks import default_hooks, dispatch_hook | ||
22 | from ._internal_utils import to_native_string | ||
23 | from .utils import to_key_val_list, default_headers | ||
24 | from .exceptions import ( | ||
25 | TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) | ||
26 | |||
27 | from .structures import CaseInsensitiveDict | ||
28 | from .adapters import HTTPAdapter | ||
29 | |||
30 | from .utils import ( | ||
31 | requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, | ||
32 | get_auth_from_url, rewind_body | ||
33 | ) | ||
34 | |||
35 | from .status_codes import codes | ||
36 | |||
37 | # formerly defined here, reexposed here for backward compatibility | ||
38 | from .models import REDIRECT_STATI | ||
39 | |||
40 | # Preferred clock, based on which one is more accurate on a given system. | ||
41 | if platform.system() == 'Windows': | ||
42 | try: # Python 3.3+ | ||
43 | preferred_clock = time.perf_counter | ||
44 | except AttributeError: # Earlier than Python 3. | ||
45 | preferred_clock = time.clock | ||
46 | else: | ||
47 | preferred_clock = time.time | ||
48 | |||
49 | |||
50 | def merge_setting(request_setting, session_setting, dict_class=OrderedDict): | ||
51 | """Determines appropriate setting for a given request, taking into account | ||
52 | the explicit setting on that request, and the setting in the session. If a | ||
53 | setting is a dictionary, they will be merged together using `dict_class` | ||
54 | """ | ||
55 | |||
56 | if session_setting is None: | ||
57 | return request_setting | ||
58 | |||
59 | if request_setting is None: | ||
60 | return session_setting | ||
61 | |||
62 | # Bypass if not a dictionary (e.g. verify) | ||
63 | if not ( | ||
64 | isinstance(session_setting, Mapping) and | ||
65 | isinstance(request_setting, Mapping) | ||
66 | ): | ||
67 | return request_setting | ||
68 | |||
69 | merged_setting = dict_class(to_key_val_list(session_setting)) | ||
70 | merged_setting.update(to_key_val_list(request_setting)) | ||
71 | |||
72 | # Remove keys that are set to None. Extract keys first to avoid altering | ||
73 | # the dictionary during iteration. | ||
74 | none_keys = [k for (k, v) in merged_setting.items() if v is None] | ||
75 | for key in none_keys: | ||
76 | del merged_setting[key] | ||
77 | |||
78 | return merged_setting | ||
79 | |||
80 | |||
81 | def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): | ||
82 | """Properly merges both requests and session hooks. | ||
83 | |||
84 | This is necessary because when request_hooks == {'response': []}, the | ||
85 | merge breaks Session hooks entirely. | ||
86 | """ | ||
87 | if session_hooks is None or session_hooks.get('response') == []: | ||
88 | return request_hooks | ||
89 | |||
90 | if request_hooks is None or request_hooks.get('response') == []: | ||
91 | return session_hooks | ||
92 | |||
93 | return merge_setting(request_hooks, session_hooks, dict_class) | ||
94 | |||
95 | |||
96 | class SessionRedirectMixin(object): | ||
97 | |||
98 | def get_redirect_target(self, resp): | ||
99 | """Receives a Response. Returns a redirect URI or ``None``""" | ||
100 | # Due to the nature of how requests processes redirects this method will | ||
101 | # be called at least once upon the original response and at least twice | ||
102 | # on each subsequent redirect response (if any). | ||
103 | # If a custom mixin is used to handle this logic, it may be advantageous | ||
104 | # to cache the redirect location onto the response object as a private | ||
105 | # attribute. | ||
106 | if resp.is_redirect: | ||
107 | location = resp.headers['location'] | ||
108 | # Currently the underlying http module on py3 decode headers | ||
109 | # in latin1, but empirical evidence suggests that latin1 is very | ||
110 | # rarely used with non-ASCII characters in HTTP headers. | ||
111 | # It is more likely to get UTF8 header rather than latin1. | ||
112 | # This causes incorrect handling of UTF8 encoded location headers. | ||
113 | # To solve this, we re-encode the location in latin1. | ||
114 | if is_py3: | ||
115 | location = location.encode('latin1') | ||
116 | return to_native_string(location, 'utf8') | ||
117 | return None | ||
118 | |||
119 | def resolve_redirects(self, resp, req, stream=False, timeout=None, | ||
120 | verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): | ||
121 | """Receives a Response. Returns a generator of Responses or Requests.""" | ||
122 | |||
123 | hist = [] # keep track of history | ||
124 | |||
125 | url = self.get_redirect_target(resp) | ||
126 | while url: | ||
127 | prepared_request = req.copy() | ||
128 | |||
129 | # Update history and keep track of redirects. | ||
130 | # resp.history must ignore the original request in this loop | ||
131 | hist.append(resp) | ||
132 | resp.history = hist[1:] | ||
133 | |||
134 | try: | ||
135 | resp.content # Consume socket so it can be released | ||
136 | except (ChunkedEncodingError, ContentDecodingError, RuntimeError): | ||
137 | resp.raw.read(decode_content=False) | ||
138 | |||
139 | if len(resp.history) >= self.max_redirects: | ||
140 | raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) | ||
141 | |||
142 | # Release the connection back into the pool. | ||
143 | resp.close() | ||
144 | |||
145 | # Handle redirection without scheme (see: RFC 1808 Section 4) | ||
146 | if url.startswith('//'): | ||
147 | parsed_rurl = urlparse(resp.url) | ||
148 | url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) | ||
149 | |||
150 | # The scheme should be lower case... | ||
151 | parsed = urlparse(url) | ||
152 | url = parsed.geturl() | ||
153 | |||
154 | # Facilitate relative 'location' headers, as allowed by RFC 7231. | ||
155 | # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') | ||
156 | # Compliant with RFC3986, we percent encode the url. | ||
157 | if not parsed.netloc: | ||
158 | url = urljoin(resp.url, requote_uri(url)) | ||
159 | else: | ||
160 | url = requote_uri(url) | ||
161 | |||
162 | prepared_request.url = to_native_string(url) | ||
163 | |||
164 | self.rebuild_method(prepared_request, resp) | ||
165 | |||
166 | # https://github.com/requests/requests/issues/1084 | ||
167 | if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): | ||
168 | # https://github.com/requests/requests/issues/3490 | ||
169 | purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') | ||
170 | for header in purged_headers: | ||
171 | prepared_request.headers.pop(header, None) | ||
172 | prepared_request.body = None | ||
173 | |||
174 | headers = prepared_request.headers | ||
175 | try: | ||
176 | del headers['Cookie'] | ||
177 | except KeyError: | ||
178 | pass | ||
179 | |||
180 | # Extract any cookies sent on the response to the cookiejar | ||
181 | # in the new request. Because we've mutated our copied prepared | ||
182 | # request, use the old one that we haven't yet touched. | ||
183 | extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) | ||
184 | merge_cookies(prepared_request._cookies, self.cookies) | ||
185 | prepared_request.prepare_cookies(prepared_request._cookies) | ||
186 | |||
187 | # Rebuild auth and proxy information. | ||
188 | proxies = self.rebuild_proxies(prepared_request, proxies) | ||
189 | self.rebuild_auth(prepared_request, resp) | ||
190 | |||
191 | # A failed tell() sets `_body_position` to `object()`. This non-None | ||
192 | # value ensures `rewindable` will be True, allowing us to raise an | ||
193 | # UnrewindableBodyError, instead of hanging the connection. | ||
194 | rewindable = ( | ||
195 | prepared_request._body_position is not None and | ||
196 | ('Content-Length' in headers or 'Transfer-Encoding' in headers) | ||
197 | ) | ||
198 | |||
199 | # Attempt to rewind consumed file-like object. | ||
200 | if rewindable: | ||
201 | rewind_body(prepared_request) | ||
202 | |||
203 | # Override the original request. | ||
204 | req = prepared_request | ||
205 | |||
206 | if yield_requests: | ||
207 | yield req | ||
208 | else: | ||
209 | |||
210 | resp = self.send( | ||
211 | req, | ||
212 | stream=stream, | ||
213 | timeout=timeout, | ||
214 | verify=verify, | ||
215 | cert=cert, | ||
216 | proxies=proxies, | ||
217 | allow_redirects=False, | ||
218 | **adapter_kwargs | ||
219 | ) | ||
220 | |||
221 | extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) | ||
222 | |||
223 | # extract redirect url, if any, for the next loop | ||
224 | url = self.get_redirect_target(resp) | ||
225 | yield resp | ||
226 | |||
227 | def rebuild_auth(self, prepared_request, response): | ||
228 | """When being redirected we may want to strip authentication from the | ||
229 | request to avoid leaking credentials. This method intelligently removes | ||
230 | and reapplies authentication where possible to avoid credential loss. | ||
231 | """ | ||
232 | headers = prepared_request.headers | ||
233 | url = prepared_request.url | ||
234 | |||
235 | if 'Authorization' in headers: | ||
236 | # If we get redirected to a new host, we should strip out any | ||
237 | # authentication headers. | ||
238 | original_parsed = urlparse(response.request.url) | ||
239 | redirect_parsed = urlparse(url) | ||
240 | |||
241 | if (original_parsed.hostname != redirect_parsed.hostname): | ||
242 | del headers['Authorization'] | ||
243 | |||
244 | # .netrc might have more auth for us on our new host. | ||
245 | new_auth = get_netrc_auth(url) if self.trust_env else None | ||
246 | if new_auth is not None: | ||
247 | prepared_request.prepare_auth(new_auth) | ||
248 | |||
249 | return | ||
250 | |||
251 | def rebuild_proxies(self, prepared_request, proxies): | ||
252 | """This method re-evaluates the proxy configuration by considering the | ||
253 | environment variables. If we are redirected to a URL covered by | ||
254 | NO_PROXY, we strip the proxy configuration. Otherwise, we set missing | ||
255 | proxy keys for this URL (in case they were stripped by a previous | ||
256 | redirect). | ||
257 | |||
258 | This method also replaces the Proxy-Authorization header where | ||
259 | necessary. | ||
260 | |||
261 | :rtype: dict | ||
262 | """ | ||
263 | proxies = proxies if proxies is not None else {} | ||
264 | headers = prepared_request.headers | ||
265 | url = prepared_request.url | ||
266 | scheme = urlparse(url).scheme | ||
267 | new_proxies = proxies.copy() | ||
268 | no_proxy = proxies.get('no_proxy') | ||
269 | |||
270 | bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) | ||
271 | if self.trust_env and not bypass_proxy: | ||
272 | environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) | ||
273 | |||
274 | proxy = environ_proxies.get(scheme, environ_proxies.get('all')) | ||
275 | |||
276 | if proxy: | ||
277 | new_proxies.setdefault(scheme, proxy) | ||
278 | |||
279 | if 'Proxy-Authorization' in headers: | ||
280 | del headers['Proxy-Authorization'] | ||
281 | |||
282 | try: | ||
283 | username, password = get_auth_from_url(new_proxies[scheme]) | ||
284 | except KeyError: | ||
285 | username, password = None, None | ||
286 | |||
287 | if username and password: | ||
288 | headers['Proxy-Authorization'] = _basic_auth_str(username, password) | ||
289 | |||
290 | return new_proxies | ||
291 | |||
292 | def rebuild_method(self, prepared_request, response): | ||
293 | """When being redirected we may want to change the method of the request | ||
294 | based on certain specs or browser behavior. | ||
295 | """ | ||
296 | method = prepared_request.method | ||
297 | |||
298 | # http://tools.ietf.org/html/rfc7231#section-6.4.4 | ||
299 | if response.status_code == codes.see_other and method != 'HEAD': | ||
300 | method = 'GET' | ||
301 | |||
302 | # Do what the browsers do, despite standards... | ||
303 | # First, turn 302s into GETs. | ||
304 | if response.status_code == codes.found and method != 'HEAD': | ||
305 | method = 'GET' | ||
306 | |||
307 | # Second, if a POST is responded to with a 301, turn it into a GET. | ||
308 | # This bizarre behaviour is explained in Issue 1704. | ||
309 | if response.status_code == codes.moved and method == 'POST': | ||
310 | method = 'GET' | ||
311 | |||
312 | prepared_request.method = method | ||
313 | |||
314 | |||
315 | class Session(SessionRedirectMixin): | ||
316 | """A Requests session. | ||
317 | |||
318 | Provides cookie persistence, connection-pooling, and configuration. | ||
319 | |||
320 | Basic Usage:: | ||
321 | |||
322 | >>> import requests | ||
323 | >>> s = requests.Session() | ||
324 | >>> s.get('http://httpbin.org/get') | ||
325 | <Response [200]> | ||
326 | |||
327 | Or as a context manager:: | ||
328 | |||
329 | >>> with requests.Session() as s: | ||
330 | >>> s.get('http://httpbin.org/get') | ||
331 | <Response [200]> | ||
332 | """ | ||
333 | |||
334 | __attrs__ = [ | ||
335 | 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', | ||
336 | 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', | ||
337 | 'max_redirects', | ||
338 | ] | ||
339 | |||
340 | def __init__(self): | ||
341 | |||
342 | #: A case-insensitive dictionary of headers to be sent on each | ||
343 | #: :class:`Request <Request>` sent from this | ||
344 | #: :class:`Session <Session>`. | ||
345 | self.headers = default_headers() | ||
346 | |||
347 | #: Default Authentication tuple or object to attach to | ||
348 | #: :class:`Request <Request>`. | ||
349 | self.auth = None | ||
350 | |||
351 | #: Dictionary mapping protocol or protocol and host to the URL of the proxy | ||
352 | #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to | ||
353 | #: be used on each :class:`Request <Request>`. | ||
354 | self.proxies = {} | ||
355 | |||
356 | #: Event-handling hooks. | ||
357 | self.hooks = default_hooks() | ||
358 | |||
359 | #: Dictionary of querystring data to attach to each | ||
360 | #: :class:`Request <Request>`. The dictionary values may be lists for | ||
361 | #: representing multivalued query parameters. | ||
362 | self.params = {} | ||
363 | |||
364 | #: Stream response content default. | ||
365 | self.stream = False | ||
366 | |||
367 | #: SSL Verification default. | ||
368 | self.verify = True | ||
369 | |||
370 | #: SSL client certificate default, if String, path to ssl client | ||
371 | #: cert file (.pem). If Tuple, ('cert', 'key') pair. | ||
372 | self.cert = None | ||
373 | |||
374 | #: Maximum number of redirects allowed. If the request exceeds this | ||
375 | #: limit, a :class:`TooManyRedirects` exception is raised. | ||
376 | #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is | ||
377 | #: 30. | ||
378 | self.max_redirects = DEFAULT_REDIRECT_LIMIT | ||
379 | |||
380 | #: Trust environment settings for proxy configuration, default | ||
381 | #: authentication and similar. | ||
382 | self.trust_env = True | ||
383 | |||
384 | #: A CookieJar containing all currently outstanding cookies set on this | ||
385 | #: session. By default it is a | ||
386 | #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but | ||
387 | #: may be any other ``cookielib.CookieJar`` compatible object. | ||
388 | self.cookies = cookiejar_from_dict({}) | ||
389 | |||
390 | # Default connection adapters. | ||
391 | self.adapters = OrderedDict() | ||
392 | self.mount('https://', HTTPAdapter()) | ||
393 | self.mount('http://', HTTPAdapter()) | ||
394 | |||
395 | def __enter__(self): | ||
396 | return self | ||
397 | |||
398 | def __exit__(self, *args): | ||
399 | self.close() | ||
400 | |||
401 | def prepare_request(self, request): | ||
402 | """Constructs a :class:`PreparedRequest <PreparedRequest>` for | ||
403 | transmission and returns it. The :class:`PreparedRequest` has settings | ||
404 | merged from the :class:`Request <Request>` instance and those of the | ||
405 | :class:`Session`. | ||
406 | |||
407 | :param request: :class:`Request` instance to prepare with this | ||
408 | session's settings. | ||
409 | :rtype: requests.PreparedRequest | ||
410 | """ | ||
411 | cookies = request.cookies or {} | ||
412 | |||
413 | # Bootstrap CookieJar. | ||
414 | if not isinstance(cookies, cookielib.CookieJar): | ||
415 | cookies = cookiejar_from_dict(cookies) | ||
416 | |||
417 | # Merge with session cookies | ||
418 | merged_cookies = merge_cookies( | ||
419 | merge_cookies(RequestsCookieJar(), self.cookies), cookies) | ||
420 | |||
421 | # Set environment's basic authentication if not explicitly set. | ||
422 | auth = request.auth | ||
423 | if self.trust_env and not auth and not self.auth: | ||
424 | auth = get_netrc_auth(request.url) | ||
425 | |||
426 | p = PreparedRequest() | ||
427 | p.prepare( | ||
428 | method=request.method.upper(), | ||
429 | url=request.url, | ||
430 | files=request.files, | ||
431 | data=request.data, | ||
432 | json=request.json, | ||
433 | headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), | ||
434 | params=merge_setting(request.params, self.params), | ||
435 | auth=merge_setting(auth, self.auth), | ||
436 | cookies=merged_cookies, | ||
437 | hooks=merge_hooks(request.hooks, self.hooks), | ||
438 | ) | ||
439 | return p | ||
440 | |||
441 | def request(self, method, url, | ||
442 | params=None, data=None, headers=None, cookies=None, files=None, | ||
443 | auth=None, timeout=None, allow_redirects=True, proxies=None, | ||
444 | hooks=None, stream=None, verify=None, cert=None, json=None): | ||
445 | """Constructs a :class:`Request <Request>`, prepares it and sends it. | ||
446 | Returns :class:`Response <Response>` object. | ||
447 | |||
448 | :param method: method for the new :class:`Request` object. | ||
449 | :param url: URL for the new :class:`Request` object. | ||
450 | :param params: (optional) Dictionary or bytes to be sent in the query | ||
451 | string for the :class:`Request`. | ||
452 | :param data: (optional) Dictionary, bytes, or file-like object to send | ||
453 | in the body of the :class:`Request`. | ||
454 | :param json: (optional) json to send in the body of the | ||
455 | :class:`Request`. | ||
456 | :param headers: (optional) Dictionary of HTTP Headers to send with the | ||
457 | :class:`Request`. | ||
458 | :param cookies: (optional) Dict or CookieJar object to send with the | ||
459 | :class:`Request`. | ||
460 | :param files: (optional) Dictionary of ``'filename': file-like-objects`` | ||
461 | for multipart encoding upload. | ||
462 | :param auth: (optional) Auth tuple or callable to enable | ||
463 | Basic/Digest/Custom HTTP Auth. | ||
464 | :param timeout: (optional) How long to wait for the server to send | ||
465 | data before giving up, as a float, or a :ref:`(connect timeout, | ||
466 | read timeout) <timeouts>` tuple. | ||
467 | :type timeout: float or tuple | ||
468 | :param allow_redirects: (optional) Set to True by default. | ||
469 | :type allow_redirects: bool | ||
470 | :param proxies: (optional) Dictionary mapping protocol or protocol and | ||
471 | hostname to the URL of the proxy. | ||
472 | :param stream: (optional) whether to immediately download the response | ||
473 | content. Defaults to ``False``. | ||
474 | :param verify: (optional) Either a boolean, in which case it controls whether we verify | ||
475 | the server's TLS certificate, or a string, in which case it must be a path | ||
476 | to a CA bundle to use. Defaults to ``True``. | ||
477 | :param cert: (optional) if String, path to ssl client cert file (.pem). | ||
478 | If Tuple, ('cert', 'key') pair. | ||
479 | :rtype: requests.Response | ||
480 | """ | ||
481 | # Create the Request. | ||
482 | req = Request( | ||
483 | method=method.upper(), | ||
484 | url=url, | ||
485 | headers=headers, | ||
486 | files=files, | ||
487 | data=data or {}, | ||
488 | json=json, | ||
489 | params=params or {}, | ||
490 | auth=auth, | ||
491 | cookies=cookies, | ||
492 | hooks=hooks, | ||
493 | ) | ||
494 | prep = self.prepare_request(req) | ||
495 | |||
496 | proxies = proxies or {} | ||
497 | |||
498 | settings = self.merge_environment_settings( | ||
499 | prep.url, proxies, stream, verify, cert | ||
500 | ) | ||
501 | |||
502 | # Send the request. | ||
503 | send_kwargs = { | ||
504 | 'timeout': timeout, | ||
505 | 'allow_redirects': allow_redirects, | ||
506 | } | ||
507 | send_kwargs.update(settings) | ||
508 | resp = self.send(prep, **send_kwargs) | ||
509 | |||
510 | return resp | ||
511 | |||
512 | def get(self, url, **kwargs): | ||
513 | r"""Sends a GET request. Returns :class:`Response` object. | ||
514 | |||
515 | :param url: URL for the new :class:`Request` object. | ||
516 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
517 | :rtype: requests.Response | ||
518 | """ | ||
519 | |||
520 | kwargs.setdefault('allow_redirects', True) | ||
521 | return self.request('GET', url, **kwargs) | ||
522 | |||
523 | def options(self, url, **kwargs): | ||
524 | r"""Sends a OPTIONS request. Returns :class:`Response` object. | ||
525 | |||
526 | :param url: URL for the new :class:`Request` object. | ||
527 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
528 | :rtype: requests.Response | ||
529 | """ | ||
530 | |||
531 | kwargs.setdefault('allow_redirects', True) | ||
532 | return self.request('OPTIONS', url, **kwargs) | ||
533 | |||
534 | def head(self, url, **kwargs): | ||
535 | r"""Sends a HEAD request. Returns :class:`Response` object. | ||
536 | |||
537 | :param url: URL for the new :class:`Request` object. | ||
538 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
539 | :rtype: requests.Response | ||
540 | """ | ||
541 | |||
542 | kwargs.setdefault('allow_redirects', False) | ||
543 | return self.request('HEAD', url, **kwargs) | ||
544 | |||
545 | def post(self, url, data=None, json=None, **kwargs): | ||
546 | r"""Sends a POST request. Returns :class:`Response` object. | ||
547 | |||
548 | :param url: URL for the new :class:`Request` object. | ||
549 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
550 | :param json: (optional) json to send in the body of the :class:`Request`. | ||
551 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
552 | :rtype: requests.Response | ||
553 | """ | ||
554 | |||
555 | return self.request('POST', url, data=data, json=json, **kwargs) | ||
556 | |||
557 | def put(self, url, data=None, **kwargs): | ||
558 | r"""Sends a PUT request. Returns :class:`Response` object. | ||
559 | |||
560 | :param url: URL for the new :class:`Request` object. | ||
561 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
562 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
563 | :rtype: requests.Response | ||
564 | """ | ||
565 | |||
566 | return self.request('PUT', url, data=data, **kwargs) | ||
567 | |||
568 | def patch(self, url, data=None, **kwargs): | ||
569 | r"""Sends a PATCH request. Returns :class:`Response` object. | ||
570 | |||
571 | :param url: URL for the new :class:`Request` object. | ||
572 | :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. | ||
573 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
574 | :rtype: requests.Response | ||
575 | """ | ||
576 | |||
577 | return self.request('PATCH', url, data=data, **kwargs) | ||
578 | |||
579 | def delete(self, url, **kwargs): | ||
580 | r"""Sends a DELETE request. Returns :class:`Response` object. | ||
581 | |||
582 | :param url: URL for the new :class:`Request` object. | ||
583 | :param \*\*kwargs: Optional arguments that ``request`` takes. | ||
584 | :rtype: requests.Response | ||
585 | """ | ||
586 | |||
587 | return self.request('DELETE', url, **kwargs) | ||
588 | |||
589 | def send(self, request, **kwargs): | ||
590 | """Send a given PreparedRequest. | ||
591 | |||
592 | :rtype: requests.Response | ||
593 | """ | ||
594 | # Set defaults that the hooks can utilize to ensure they always have | ||
595 | # the correct parameters to reproduce the previous request. | ||
596 | kwargs.setdefault('stream', self.stream) | ||
597 | kwargs.setdefault('verify', self.verify) | ||
598 | kwargs.setdefault('cert', self.cert) | ||
599 | kwargs.setdefault('proxies', self.proxies) | ||
600 | |||
601 | # It's possible that users might accidentally send a Request object. | ||
602 | # Guard against that specific failure case. | ||
603 | if isinstance(request, Request): | ||
604 | raise ValueError('You can only send PreparedRequests.') | ||
605 | |||
606 | # Set up variables needed for resolve_redirects and dispatching of hooks | ||
607 | allow_redirects = kwargs.pop('allow_redirects', True) | ||
608 | stream = kwargs.get('stream') | ||
609 | hooks = request.hooks | ||
610 | |||
611 | # Get the appropriate adapter to use | ||
612 | adapter = self.get_adapter(url=request.url) | ||
613 | |||
614 | # Start time (approximately) of the request | ||
615 | start = preferred_clock() | ||
616 | |||
617 | # Send the request | ||
618 | r = adapter.send(request, **kwargs) | ||
619 | |||
620 | # Total elapsed time of the request (approximately) | ||
621 | elapsed = preferred_clock() - start | ||
622 | r.elapsed = timedelta(seconds=elapsed) | ||
623 | |||
624 | # Response manipulation hooks | ||
625 | r = dispatch_hook('response', hooks, r, **kwargs) | ||
626 | |||
627 | # Persist cookies | ||
628 | if r.history: | ||
629 | |||
630 | # If the hooks create history then we want those cookies too | ||
631 | for resp in r.history: | ||
632 | extract_cookies_to_jar(self.cookies, resp.request, resp.raw) | ||
633 | |||
634 | extract_cookies_to_jar(self.cookies, request, r.raw) | ||
635 | |||
636 | # Redirect resolving generator. | ||
637 | gen = self.resolve_redirects(r, request, **kwargs) | ||
638 | |||
639 | # Resolve redirects if allowed. | ||
640 | history = [resp for resp in gen] if allow_redirects else [] | ||
641 | |||
642 | # Shuffle things around if there's history. | ||
643 | if history: | ||
644 | # Insert the first (original) request at the start | ||
645 | history.insert(0, r) | ||
646 | # Get the last request made | ||
647 | r = history.pop() | ||
648 | r.history = history | ||
649 | |||
650 | # If redirects aren't being followed, store the response on the Request for Response.next(). | ||
651 | if not allow_redirects: | ||
652 | try: | ||
653 | r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) | ||
654 | except StopIteration: | ||
655 | pass | ||
656 | |||
657 | if not stream: | ||
658 | r.content | ||
659 | |||
660 | return r | ||
661 | |||
662 | def merge_environment_settings(self, url, proxies, stream, verify, cert): | ||
663 | """ | ||
664 | Check the environment and merge it with some settings. | ||
665 | |||
666 | :rtype: dict | ||
667 | """ | ||
668 | # Gather clues from the surrounding environment. | ||
669 | if self.trust_env: | ||
670 | # Set environment's proxies. | ||
671 | no_proxy = proxies.get('no_proxy') if proxies is not None else None | ||
672 | env_proxies = get_environ_proxies(url, no_proxy=no_proxy) | ||
673 | for (k, v) in env_proxies.items(): | ||
674 | proxies.setdefault(k, v) | ||
675 | |||
676 | # Look for requests environment configuration and be compatible | ||
677 | # with cURL. | ||
678 | if verify is True or verify is None: | ||
679 | verify = (os.environ.get('REQUESTS_CA_BUNDLE') or | ||
680 | os.environ.get('CURL_CA_BUNDLE')) | ||
681 | |||
682 | # Merge all the kwargs. | ||
683 | proxies = merge_setting(proxies, self.proxies) | ||
684 | stream = merge_setting(stream, self.stream) | ||
685 | verify = merge_setting(verify, self.verify) | ||
686 | cert = merge_setting(cert, self.cert) | ||
687 | |||
688 | return {'verify': verify, 'proxies': proxies, 'stream': stream, | ||
689 | 'cert': cert} | ||
690 | |||
691 | def get_adapter(self, url): | ||
692 | """ | ||
693 | Returns the appropriate connection adapter for the given URL. | ||
694 | |||
695 | :rtype: requests.adapters.BaseAdapter | ||
696 | """ | ||
697 | for (prefix, adapter) in self.adapters.items(): | ||
698 | |||
699 | if url.lower().startswith(prefix): | ||
700 | return adapter | ||
701 | |||
702 | # Nothing matches :-/ | ||
703 | raise InvalidSchema("No connection adapters were found for '%s'" % url) | ||
704 | |||
705 | def close(self): | ||
706 | """Closes all adapters and as such the session""" | ||
707 | for v in self.adapters.values(): | ||
708 | v.close() | ||
709 | |||
710 | def mount(self, prefix, adapter): | ||
711 | """Registers a connection adapter to a prefix. | ||
712 | |||
713 | Adapters are sorted in descending order by prefix length. | ||
714 | """ | ||
715 | self.adapters[prefix] = adapter | ||
716 | keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] | ||
717 | |||
718 | for key in keys_to_move: | ||
719 | self.adapters[key] = self.adapters.pop(key) | ||
720 | |||
721 | def __getstate__(self): | ||
722 | state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) | ||
723 | return state | ||
724 | |||
725 | def __setstate__(self, state): | ||
726 | for attr, value in state.items(): | ||
727 | setattr(self, attr, value) | ||
728 | |||
729 | |||
730 | def session(): | ||
731 | """ | ||
732 | Returns a :class:`Session` for context-management. | ||
733 | |||
734 | :rtype: Session | ||
735 | """ | ||
736 | |||
737 | return Session() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py new file mode 100644 index 0000000..85d9bbc --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/status_codes.py | |||
@@ -0,0 +1,91 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | from .structures import LookupDict | ||
4 | |||
5 | _codes = { | ||
6 | |||
7 | # Informational. | ||
8 | 100: ('continue',), | ||
9 | 101: ('switching_protocols',), | ||
10 | 102: ('processing',), | ||
11 | 103: ('checkpoint',), | ||
12 | 122: ('uri_too_long', 'request_uri_too_long'), | ||
13 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), | ||
14 | 201: ('created',), | ||
15 | 202: ('accepted',), | ||
16 | 203: ('non_authoritative_info', 'non_authoritative_information'), | ||
17 | 204: ('no_content',), | ||
18 | 205: ('reset_content', 'reset'), | ||
19 | 206: ('partial_content', 'partial'), | ||
20 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), | ||
21 | 208: ('already_reported',), | ||
22 | 226: ('im_used',), | ||
23 | |||
24 | # Redirection. | ||
25 | 300: ('multiple_choices',), | ||
26 | 301: ('moved_permanently', 'moved', '\\o-'), | ||
27 | 302: ('found',), | ||
28 | 303: ('see_other', 'other'), | ||
29 | 304: ('not_modified',), | ||
30 | 305: ('use_proxy',), | ||
31 | 306: ('switch_proxy',), | ||
32 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'), | ||
33 | 308: ('permanent_redirect', | ||
34 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 | ||
35 | |||
36 | # Client Error. | ||
37 | 400: ('bad_request', 'bad'), | ||
38 | 401: ('unauthorized',), | ||
39 | 402: ('payment_required', 'payment'), | ||
40 | 403: ('forbidden',), | ||
41 | 404: ('not_found', '-o-'), | ||
42 | 405: ('method_not_allowed', 'not_allowed'), | ||
43 | 406: ('not_acceptable',), | ||
44 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), | ||
45 | 408: ('request_timeout', 'timeout'), | ||
46 | 409: ('conflict',), | ||
47 | 410: ('gone',), | ||
48 | 411: ('length_required',), | ||
49 | 412: ('precondition_failed', 'precondition'), | ||
50 | 413: ('request_entity_too_large',), | ||
51 | 414: ('request_uri_too_large',), | ||
52 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), | ||
53 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), | ||
54 | 417: ('expectation_failed',), | ||
55 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), | ||
56 | 421: ('misdirected_request',), | ||
57 | 422: ('unprocessable_entity', 'unprocessable'), | ||
58 | 423: ('locked',), | ||
59 | 424: ('failed_dependency', 'dependency'), | ||
60 | 425: ('unordered_collection', 'unordered'), | ||
61 | 426: ('upgrade_required', 'upgrade'), | ||
62 | 428: ('precondition_required', 'precondition'), | ||
63 | 429: ('too_many_requests', 'too_many'), | ||
64 | 431: ('header_fields_too_large', 'fields_too_large'), | ||
65 | 444: ('no_response', 'none'), | ||
66 | 449: ('retry_with', 'retry'), | ||
67 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'), | ||
68 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'), | ||
69 | 499: ('client_closed_request',), | ||
70 | |||
71 | # Server Error. | ||
72 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'), | ||
73 | 501: ('not_implemented',), | ||
74 | 502: ('bad_gateway',), | ||
75 | 503: ('service_unavailable', 'unavailable'), | ||
76 | 504: ('gateway_timeout',), | ||
77 | 505: ('http_version_not_supported', 'http_version'), | ||
78 | 506: ('variant_also_negotiates',), | ||
79 | 507: ('insufficient_storage',), | ||
80 | 509: ('bandwidth_limit_exceeded', 'bandwidth'), | ||
81 | 510: ('not_extended',), | ||
82 | 511: ('network_authentication_required', 'network_auth', 'network_authentication'), | ||
83 | } | ||
84 | |||
85 | codes = LookupDict(name='status_codes') | ||
86 | |||
87 | for code, titles in _codes.items(): | ||
88 | for title in titles: | ||
89 | setattr(codes, title, code) | ||
90 | if not title.startswith(('\\', '/')): | ||
91 | setattr(codes, title.upper(), code) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py new file mode 100644 index 0000000..ce775ba --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/structures.py | |||
@@ -0,0 +1,105 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.structures | ||
5 | ~~~~~~~~~~~~~~~~~~~ | ||
6 | |||
7 | Data structures that power Requests. | ||
8 | """ | ||
9 | |||
10 | import collections | ||
11 | |||
12 | from .compat import OrderedDict | ||
13 | |||
14 | |||
15 | class CaseInsensitiveDict(collections.MutableMapping): | ||
16 | """A case-insensitive ``dict``-like object. | ||
17 | |||
18 | Implements all methods and operations of | ||
19 | ``collections.MutableMapping`` as well as dict's ``copy``. Also | ||
20 | provides ``lower_items``. | ||
21 | |||
22 | All keys are expected to be strings. The structure remembers the | ||
23 | case of the last key to be set, and ``iter(instance)``, | ||
24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` | ||
25 | will contain case-sensitive keys. However, querying and contains | ||
26 | testing is case insensitive:: | ||
27 | |||
28 | cid = CaseInsensitiveDict() | ||
29 | cid['Accept'] = 'application/json' | ||
30 | cid['aCCEPT'] == 'application/json' # True | ||
31 | list(cid) == ['Accept'] # True | ||
32 | |||
33 | For example, ``headers['content-encoding']`` will return the | ||
34 | value of a ``'Content-Encoding'`` response header, regardless | ||
35 | of how the header name was originally stored. | ||
36 | |||
37 | If the constructor, ``.update``, or equality comparison | ||
38 | operations are given keys that have equal ``.lower()``s, the | ||
39 | behavior is undefined. | ||
40 | """ | ||
41 | |||
42 | def __init__(self, data=None, **kwargs): | ||
43 | self._store = OrderedDict() | ||
44 | if data is None: | ||
45 | data = {} | ||
46 | self.update(data, **kwargs) | ||
47 | |||
48 | def __setitem__(self, key, value): | ||
49 | # Use the lowercased key for lookups, but store the actual | ||
50 | # key alongside the value. | ||
51 | self._store[key.lower()] = (key, value) | ||
52 | |||
53 | def __getitem__(self, key): | ||
54 | return self._store[key.lower()][1] | ||
55 | |||
56 | def __delitem__(self, key): | ||
57 | del self._store[key.lower()] | ||
58 | |||
59 | def __iter__(self): | ||
60 | return (casedkey for casedkey, mappedvalue in self._store.values()) | ||
61 | |||
62 | def __len__(self): | ||
63 | return len(self._store) | ||
64 | |||
65 | def lower_items(self): | ||
66 | """Like iteritems(), but with all lowercase keys.""" | ||
67 | return ( | ||
68 | (lowerkey, keyval[1]) | ||
69 | for (lowerkey, keyval) | ||
70 | in self._store.items() | ||
71 | ) | ||
72 | |||
73 | def __eq__(self, other): | ||
74 | if isinstance(other, collections.Mapping): | ||
75 | other = CaseInsensitiveDict(other) | ||
76 | else: | ||
77 | return NotImplemented | ||
78 | # Compare insensitively | ||
79 | return dict(self.lower_items()) == dict(other.lower_items()) | ||
80 | |||
81 | # Copy is required | ||
82 | def copy(self): | ||
83 | return CaseInsensitiveDict(self._store.values()) | ||
84 | |||
85 | def __repr__(self): | ||
86 | return str(dict(self.items())) | ||
87 | |||
88 | |||
89 | class LookupDict(dict): | ||
90 | """Dictionary lookup object.""" | ||
91 | |||
92 | def __init__(self, name=None): | ||
93 | self.name = name | ||
94 | super(LookupDict, self).__init__() | ||
95 | |||
96 | def __repr__(self): | ||
97 | return '<lookup \'%s\'>' % (self.name) | ||
98 | |||
99 | def __getitem__(self, key): | ||
100 | # We allow fall-through here, so values default to None | ||
101 | |||
102 | return self.__dict__.get(key, None) | ||
103 | |||
104 | def get(self, key, default=None): | ||
105 | return self.__dict__.get(key, default) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py new file mode 100644 index 0000000..fc4f894 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/requests/utils.py | |||
@@ -0,0 +1,904 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | |||
3 | """ | ||
4 | requests.utils | ||
5 | ~~~~~~~~~~~~~~ | ||
6 | |||
7 | This module provides utility functions that are used within Requests | ||
8 | that are also useful for external consumption. | ||
9 | """ | ||
10 | |||
11 | import cgi | ||
12 | import codecs | ||
13 | import collections | ||
14 | import contextlib | ||
15 | import io | ||
16 | import os | ||
17 | import platform | ||
18 | import re | ||
19 | import socket | ||
20 | import struct | ||
21 | import warnings | ||
22 | |||
23 | from .__version__ import __version__ | ||
24 | from . import certs | ||
25 | # to_native_string is unused here, but imported here for backwards compatibility | ||
26 | from ._internal_utils import to_native_string | ||
27 | from .compat import parse_http_list as _parse_list_header | ||
28 | from .compat import ( | ||
29 | quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, | ||
30 | proxy_bypass, urlunparse, basestring, integer_types, is_py3, | ||
31 | proxy_bypass_environment, getproxies_environment) | ||
32 | from .cookies import cookiejar_from_dict | ||
33 | from .structures import CaseInsensitiveDict | ||
34 | from .exceptions import ( | ||
35 | InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) | ||
36 | |||
37 | NETRC_FILES = ('.netrc', '_netrc') | ||
38 | |||
39 | DEFAULT_CA_BUNDLE_PATH = certs.where() | ||
40 | |||
41 | |||
42 | if platform.system() == 'Windows': | ||
43 | # provide a proxy_bypass version on Windows without DNS lookups | ||
44 | |||
45 | def proxy_bypass_registry(host): | ||
46 | if is_py3: | ||
47 | import winreg | ||
48 | else: | ||
49 | import _winreg as winreg | ||
50 | try: | ||
51 | internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, | ||
52 | r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') | ||
53 | proxyEnable = winreg.QueryValueEx(internetSettings, | ||
54 | 'ProxyEnable')[0] | ||
55 | proxyOverride = winreg.QueryValueEx(internetSettings, | ||
56 | 'ProxyOverride')[0] | ||
57 | except OSError: | ||
58 | return False | ||
59 | if not proxyEnable or not proxyOverride: | ||
60 | return False | ||
61 | |||
62 | # make a check value list from the registry entry: replace the | ||
63 | # '<local>' string by the localhost entry and the corresponding | ||
64 | # canonical entry. | ||
65 | proxyOverride = proxyOverride.split(';') | ||
66 | # now check if we match one of the registry values. | ||
67 | for test in proxyOverride: | ||
68 | if test == '<local>': | ||
69 | if '.' not in host: | ||
70 | return True | ||
71 | test = test.replace(".", r"\.") # mask dots | ||
72 | test = test.replace("*", r".*") # change glob sequence | ||
73 | test = test.replace("?", r".") # change glob char | ||
74 | if re.match(test, host, re.I): | ||
75 | return True | ||
76 | return False | ||
77 | |||
78 | def proxy_bypass(host): # noqa | ||
79 | """Return True, if the host should be bypassed. | ||
80 | |||
81 | Checks proxy settings gathered from the environment, if specified, | ||
82 | or the registry. | ||
83 | """ | ||
84 | if getproxies_environment(): | ||
85 | return proxy_bypass_environment(host) | ||
86 | else: | ||
87 | return proxy_bypass_registry(host) | ||
88 | |||
89 | |||
90 | def dict_to_sequence(d): | ||
91 | """Returns an internal sequence dictionary update.""" | ||
92 | |||
93 | if hasattr(d, 'items'): | ||
94 | d = d.items() | ||
95 | |||
96 | return d | ||
97 | |||
98 | |||
99 | def super_len(o): | ||
100 | total_length = None | ||
101 | current_position = 0 | ||
102 | |||
103 | if hasattr(o, '__len__'): | ||
104 | total_length = len(o) | ||
105 | |||
106 | elif hasattr(o, 'len'): | ||
107 | total_length = o.len | ||
108 | |||
109 | elif hasattr(o, 'fileno'): | ||
110 | try: | ||
111 | fileno = o.fileno() | ||
112 | except io.UnsupportedOperation: | ||
113 | pass | ||
114 | else: | ||
115 | total_length = os.fstat(fileno).st_size | ||
116 | |||
117 | # Having used fstat to determine the file length, we need to | ||
118 | # confirm that this file was opened up in binary mode. | ||
119 | if 'b' not in o.mode: | ||
120 | warnings.warn(( | ||
121 | "Requests has determined the content-length for this " | ||
122 | "request using the binary size of the file: however, the " | ||
123 | "file has been opened in text mode (i.e. without the 'b' " | ||
124 | "flag in the mode). This may lead to an incorrect " | ||
125 | "content-length. In Requests 3.0, support will be removed " | ||
126 | "for files in text mode."), | ||
127 | FileModeWarning | ||
128 | ) | ||
129 | |||
130 | if hasattr(o, 'tell'): | ||
131 | try: | ||
132 | current_position = o.tell() | ||
133 | except (OSError, IOError): | ||
134 | # This can happen in some weird situations, such as when the file | ||
135 | # is actually a special file descriptor like stdin. In this | ||
136 | # instance, we don't know what the length is, so set it to zero and | ||
137 | # let requests chunk it instead. | ||
138 | if total_length is not None: | ||
139 | current_position = total_length | ||
140 | else: | ||
141 | if hasattr(o, 'seek') and total_length is None: | ||
142 | # StringIO and BytesIO have seek but no useable fileno | ||
143 | try: | ||
144 | # seek to end of file | ||
145 | o.seek(0, 2) | ||
146 | total_length = o.tell() | ||
147 | |||
148 | # seek back to current position to support | ||
149 | # partially read file-like objects | ||
150 | o.seek(current_position or 0) | ||
151 | except (OSError, IOError): | ||
152 | total_length = 0 | ||
153 | |||
154 | if total_length is None: | ||
155 | total_length = 0 | ||
156 | |||
157 | return max(0, total_length - current_position) | ||
158 | |||
159 | |||
160 | def get_netrc_auth(url, raise_errors=False): | ||
161 | """Returns the Requests tuple auth for a given url from netrc.""" | ||
162 | |||
163 | try: | ||
164 | from netrc import netrc, NetrcParseError | ||
165 | |||
166 | netrc_path = None | ||
167 | |||
168 | for f in NETRC_FILES: | ||
169 | try: | ||
170 | loc = os.path.expanduser('~/{0}'.format(f)) | ||
171 | except KeyError: | ||
172 | # os.path.expanduser can fail when $HOME is undefined and | ||
173 | # getpwuid fails. See http://bugs.python.org/issue20164 & | ||
174 | # https://github.com/requests/requests/issues/1846 | ||
175 | return | ||
176 | |||
177 | if os.path.exists(loc): | ||
178 | netrc_path = loc | ||
179 | break | ||
180 | |||
181 | # Abort early if there isn't one. | ||
182 | if netrc_path is None: | ||
183 | return | ||
184 | |||
185 | ri = urlparse(url) | ||
186 | |||
187 | # Strip port numbers from netloc. This weird `if...encode`` dance is | ||
188 | # used for Python 3.2, which doesn't support unicode literals. | ||
189 | splitstr = b':' | ||
190 | if isinstance(url, str): | ||
191 | splitstr = splitstr.decode('ascii') | ||
192 | host = ri.netloc.split(splitstr)[0] | ||
193 | |||
194 | try: | ||
195 | _netrc = netrc(netrc_path).authenticators(host) | ||
196 | if _netrc: | ||
197 | # Return with login / password | ||
198 | login_i = (0 if _netrc[0] else 1) | ||
199 | return (_netrc[login_i], _netrc[2]) | ||
200 | except (NetrcParseError, IOError): | ||
201 | # If there was a parsing error or a permissions issue reading the file, | ||
202 | # we'll just skip netrc auth unless explicitly asked to raise errors. | ||
203 | if raise_errors: | ||
204 | raise | ||
205 | |||
206 | # AppEngine hackiness. | ||
207 | except (ImportError, AttributeError): | ||
208 | pass | ||
209 | |||
210 | |||
211 | def guess_filename(obj): | ||
212 | """Tries to guess the filename of the given object.""" | ||
213 | name = getattr(obj, 'name', None) | ||
214 | if (name and isinstance(name, basestring) and name[0] != '<' and | ||
215 | name[-1] != '>'): | ||
216 | return os.path.basename(name) | ||
217 | |||
218 | |||
219 | def from_key_val_list(value): | ||
220 | """Take an object and test to see if it can be represented as a | ||
221 | dictionary. Unless it can not be represented as such, return an | ||
222 | OrderedDict, e.g., | ||
223 | |||
224 | :: | ||
225 | |||
226 | >>> from_key_val_list([('key', 'val')]) | ||
227 | OrderedDict([('key', 'val')]) | ||
228 | >>> from_key_val_list('string') | ||
229 | ValueError: need more than 1 value to unpack | ||
230 | >>> from_key_val_list({'key': 'val'}) | ||
231 | OrderedDict([('key', 'val')]) | ||
232 | |||
233 | :rtype: OrderedDict | ||
234 | """ | ||
235 | if value is None: | ||
236 | return None | ||
237 | |||
238 | if isinstance(value, (str, bytes, bool, int)): | ||
239 | raise ValueError('cannot encode objects that are not 2-tuples') | ||
240 | |||
241 | return OrderedDict(value) | ||
242 | |||
243 | |||
244 | def to_key_val_list(value): | ||
245 | """Take an object and test to see if it can be represented as a | ||
246 | dictionary. If it can be, return a list of tuples, e.g., | ||
247 | |||
248 | :: | ||
249 | |||
250 | >>> to_key_val_list([('key', 'val')]) | ||
251 | [('key', 'val')] | ||
252 | >>> to_key_val_list({'key': 'val'}) | ||
253 | [('key', 'val')] | ||
254 | >>> to_key_val_list('string') | ||
255 | ValueError: cannot encode objects that are not 2-tuples. | ||
256 | |||
257 | :rtype: list | ||
258 | """ | ||
259 | if value is None: | ||
260 | return None | ||
261 | |||
262 | if isinstance(value, (str, bytes, bool, int)): | ||
263 | raise ValueError('cannot encode objects that are not 2-tuples') | ||
264 | |||
265 | if isinstance(value, collections.Mapping): | ||
266 | value = value.items() | ||
267 | |||
268 | return list(value) | ||
269 | |||
270 | |||
271 | # From mitsuhiko/werkzeug (used with permission). | ||
272 | def parse_list_header(value): | ||
273 | """Parse lists as described by RFC 2068 Section 2. | ||
274 | |||
275 | In particular, parse comma-separated lists where the elements of | ||
276 | the list may include quoted-strings. A quoted-string could | ||
277 | contain a comma. A non-quoted string could have quotes in the | ||
278 | middle. Quotes are removed automatically after parsing. | ||
279 | |||
280 | It basically works like :func:`parse_set_header` just that items | ||
281 | may appear multiple times and case sensitivity is preserved. | ||
282 | |||
283 | The return value is a standard :class:`list`: | ||
284 | |||
285 | >>> parse_list_header('token, "quoted value"') | ||
286 | ['token', 'quoted value'] | ||
287 | |||
288 | To create a header from the :class:`list` again, use the | ||
289 | :func:`dump_header` function. | ||
290 | |||
291 | :param value: a string with a list header. | ||
292 | :return: :class:`list` | ||
293 | :rtype: list | ||
294 | """ | ||
295 | result = [] | ||
296 | for item in _parse_list_header(value): | ||
297 | if item[:1] == item[-1:] == '"': | ||
298 | item = unquote_header_value(item[1:-1]) | ||
299 | result.append(item) | ||
300 | return result | ||
301 | |||
302 | |||
303 | # From mitsuhiko/werkzeug (used with permission). | ||
304 | def parse_dict_header(value): | ||
305 | """Parse lists of key, value pairs as described by RFC 2068 Section 2 and | ||
306 | convert them into a python dict: | ||
307 | |||
308 | >>> d = parse_dict_header('foo="is a fish", bar="as well"') | ||
309 | >>> type(d) is dict | ||
310 | True | ||
311 | >>> sorted(d.items()) | ||
312 | [('bar', 'as well'), ('foo', 'is a fish')] | ||
313 | |||
314 | If there is no value for a key it will be `None`: | ||
315 | |||
316 | >>> parse_dict_header('key_without_value') | ||
317 | {'key_without_value': None} | ||
318 | |||
319 | To create a header from the :class:`dict` again, use the | ||
320 | :func:`dump_header` function. | ||
321 | |||
322 | :param value: a string with a dict header. | ||
323 | :return: :class:`dict` | ||
324 | :rtype: dict | ||
325 | """ | ||
326 | result = {} | ||
327 | for item in _parse_list_header(value): | ||
328 | if '=' not in item: | ||
329 | result[item] = None | ||
330 | continue | ||
331 | name, value = item.split('=', 1) | ||
332 | if value[:1] == value[-1:] == '"': | ||
333 | value = unquote_header_value(value[1:-1]) | ||
334 | result[name] = value | ||
335 | return result | ||
336 | |||
337 | |||
338 | # From mitsuhiko/werkzeug (used with permission). | ||
339 | def unquote_header_value(value, is_filename=False): | ||
340 | r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). | ||
341 | This does not use the real unquoting but what browsers are actually | ||
342 | using for quoting. | ||
343 | |||
344 | :param value: the header value to unquote. | ||
345 | :rtype: str | ||
346 | """ | ||
347 | if value and value[0] == value[-1] == '"': | ||
348 | # this is not the real unquoting, but fixing this so that the | ||
349 | # RFC is met will result in bugs with internet explorer and | ||
350 | # probably some other browsers as well. IE for example is | ||
351 | # uploading files with "C:\foo\bar.txt" as filename | ||
352 | value = value[1:-1] | ||
353 | |||
354 | # if this is a filename and the starting characters look like | ||
355 | # a UNC path, then just return the value without quotes. Using the | ||
356 | # replace sequence below on a UNC path has the effect of turning | ||
357 | # the leading double slash into a single slash and then | ||
358 | # _fix_ie_filename() doesn't work correctly. See #458. | ||
359 | if not is_filename or value[:2] != '\\\\': | ||
360 | return value.replace('\\\\', '\\').replace('\\"', '"') | ||
361 | return value | ||
362 | |||
363 | |||
364 | def dict_from_cookiejar(cj): | ||
365 | """Returns a key/value dictionary from a CookieJar. | ||
366 | |||
367 | :param cj: CookieJar object to extract cookies from. | ||
368 | :rtype: dict | ||
369 | """ | ||
370 | |||
371 | cookie_dict = {} | ||
372 | |||
373 | for cookie in cj: | ||
374 | cookie_dict[cookie.name] = cookie.value | ||
375 | |||
376 | return cookie_dict | ||
377 | |||
378 | |||
379 | def add_dict_to_cookiejar(cj, cookie_dict): | ||
380 | """Returns a CookieJar from a key/value dictionary. | ||
381 | |||
382 | :param cj: CookieJar to insert cookies into. | ||
383 | :param cookie_dict: Dict of key/values to insert into CookieJar. | ||
384 | :rtype: CookieJar | ||
385 | """ | ||
386 | |||
387 | return cookiejar_from_dict(cookie_dict, cj) | ||
388 | |||
389 | |||
390 | def get_encodings_from_content(content): | ||
391 | """Returns encodings from given content string. | ||
392 | |||
393 | :param content: bytestring to extract encodings from. | ||
394 | """ | ||
395 | warnings.warn(( | ||
396 | 'In requests 3.0, get_encodings_from_content will be removed. For ' | ||
397 | 'more information, please see the discussion on issue #2266. (This' | ||
398 | ' warning should only appear once.)'), | ||
399 | DeprecationWarning) | ||
400 | |||
401 | charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I) | ||
402 | pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I) | ||
403 | xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') | ||
404 | |||
405 | return (charset_re.findall(content) + | ||
406 | pragma_re.findall(content) + | ||
407 | xml_re.findall(content)) | ||
408 | |||
409 | |||
410 | def get_encoding_from_headers(headers): | ||
411 | """Returns encodings from given HTTP Header Dict. | ||
412 | |||
413 | :param headers: dictionary to extract encoding from. | ||
414 | :rtype: str | ||
415 | """ | ||
416 | |||
417 | content_type = headers.get('content-type') | ||
418 | |||
419 | if not content_type: | ||
420 | return None | ||
421 | |||
422 | content_type, params = cgi.parse_header(content_type) | ||
423 | |||
424 | if 'charset' in params: | ||
425 | return params['charset'].strip("'\"") | ||
426 | |||
427 | if 'text' in content_type: | ||
428 | return 'ISO-8859-1' | ||
429 | |||
430 | |||
431 | def stream_decode_response_unicode(iterator, r): | ||
432 | """Stream decodes a iterator.""" | ||
433 | |||
434 | if r.encoding is None: | ||
435 | for item in iterator: | ||
436 | yield item | ||
437 | return | ||
438 | |||
439 | decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') | ||
440 | for chunk in iterator: | ||
441 | rv = decoder.decode(chunk) | ||
442 | if rv: | ||
443 | yield rv | ||
444 | rv = decoder.decode(b'', final=True) | ||
445 | if rv: | ||
446 | yield rv | ||
447 | |||
448 | |||
449 | def iter_slices(string, slice_length): | ||
450 | """Iterate over slices of a string.""" | ||
451 | pos = 0 | ||
452 | if slice_length is None or slice_length <= 0: | ||
453 | slice_length = len(string) | ||
454 | while pos < len(string): | ||
455 | yield string[pos:pos + slice_length] | ||
456 | pos += slice_length | ||
457 | |||
458 | |||
459 | def get_unicode_from_response(r): | ||
460 | """Returns the requested content back in unicode. | ||
461 | |||
462 | :param r: Response object to get unicode content from. | ||
463 | |||
464 | Tried: | ||
465 | |||
466 | 1. charset from content-type | ||
467 | 2. fall back and replace all unicode characters | ||
468 | |||
469 | :rtype: str | ||
470 | """ | ||
471 | warnings.warn(( | ||
472 | 'In requests 3.0, get_unicode_from_response will be removed. For ' | ||
473 | 'more information, please see the discussion on issue #2266. (This' | ||
474 | ' warning should only appear once.)'), | ||
475 | DeprecationWarning) | ||
476 | |||
477 | tried_encodings = [] | ||
478 | |||
479 | # Try charset from content-type | ||
480 | encoding = get_encoding_from_headers(r.headers) | ||
481 | |||
482 | if encoding: | ||
483 | try: | ||
484 | return str(r.content, encoding) | ||
485 | except UnicodeError: | ||
486 | tried_encodings.append(encoding) | ||
487 | |||
488 | # Fall back: | ||
489 | try: | ||
490 | return str(r.content, encoding, errors='replace') | ||
491 | except TypeError: | ||
492 | return r.content | ||
493 | |||
494 | |||
495 | # The unreserved URI characters (RFC 3986) | ||
496 | UNRESERVED_SET = frozenset( | ||
497 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") | ||
498 | |||
499 | |||
500 | def unquote_unreserved(uri): | ||
501 | """Un-escape any percent-escape sequences in a URI that are unreserved | ||
502 | characters. This leaves all reserved, illegal and non-ASCII bytes encoded. | ||
503 | |||
504 | :rtype: str | ||
505 | """ | ||
506 | parts = uri.split('%') | ||
507 | for i in range(1, len(parts)): | ||
508 | h = parts[i][0:2] | ||
509 | if len(h) == 2 and h.isalnum(): | ||
510 | try: | ||
511 | c = chr(int(h, 16)) | ||
512 | except ValueError: | ||
513 | raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) | ||
514 | |||
515 | if c in UNRESERVED_SET: | ||
516 | parts[i] = c + parts[i][2:] | ||
517 | else: | ||
518 | parts[i] = '%' + parts[i] | ||
519 | else: | ||
520 | parts[i] = '%' + parts[i] | ||
521 | return ''.join(parts) | ||
522 | |||
523 | |||
524 | def requote_uri(uri): | ||
525 | """Re-quote the given URI. | ||
526 | |||
527 | This function passes the given URI through an unquote/quote cycle to | ||
528 | ensure that it is fully and consistently quoted. | ||
529 | |||
530 | :rtype: str | ||
531 | """ | ||
532 | safe_with_percent = "!#$%&'()*+,/:;=?@[]~" | ||
533 | safe_without_percent = "!#$&'()*+,/:;=?@[]~" | ||
534 | try: | ||
535 | # Unquote only the unreserved characters | ||
536 | # Then quote only illegal characters (do not quote reserved, | ||
537 | # unreserved, or '%') | ||
538 | return quote(unquote_unreserved(uri), safe=safe_with_percent) | ||
539 | except InvalidURL: | ||
540 | # We couldn't unquote the given URI, so let's try quoting it, but | ||
541 | # there may be unquoted '%'s in the URI. We need to make sure they're | ||
542 | # properly quoted so they do not cause issues elsewhere. | ||
543 | return quote(uri, safe=safe_without_percent) | ||
544 | |||
545 | |||
546 | def address_in_network(ip, net): | ||
547 | """This function allows you to check if an IP belongs to a network subnet | ||
548 | |||
549 | Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 | ||
550 | returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 | ||
551 | |||
552 | :rtype: bool | ||
553 | """ | ||
554 | ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] | ||
555 | netaddr, bits = net.split('/') | ||
556 | netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] | ||
557 | network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask | ||
558 | return (ipaddr & netmask) == (network & netmask) | ||
559 | |||
560 | |||
561 | def dotted_netmask(mask): | ||
562 | """Converts mask from /xx format to xxx.xxx.xxx.xxx | ||
563 | |||
564 | Example: if mask is 24 function returns 255.255.255.0 | ||
565 | |||
566 | :rtype: str | ||
567 | """ | ||
568 | bits = 0xffffffff ^ (1 << 32 - mask) - 1 | ||
569 | return socket.inet_ntoa(struct.pack('>I', bits)) | ||
570 | |||
571 | |||
572 | def is_ipv4_address(string_ip): | ||
573 | """ | ||
574 | :rtype: bool | ||
575 | """ | ||
576 | try: | ||
577 | socket.inet_aton(string_ip) | ||
578 | except socket.error: | ||
579 | return False | ||
580 | return True | ||
581 | |||
582 | |||
583 | def is_valid_cidr(string_network): | ||
584 | """ | ||
585 | Very simple check of the cidr format in no_proxy variable. | ||
586 | |||
587 | :rtype: bool | ||
588 | """ | ||
589 | if string_network.count('/') == 1: | ||
590 | try: | ||
591 | mask = int(string_network.split('/')[1]) | ||
592 | except ValueError: | ||
593 | return False | ||
594 | |||
595 | if mask < 1 or mask > 32: | ||
596 | return False | ||
597 | |||
598 | try: | ||
599 | socket.inet_aton(string_network.split('/')[0]) | ||
600 | except socket.error: | ||
601 | return False | ||
602 | else: | ||
603 | return False | ||
604 | return True | ||
605 | |||
606 | |||
607 | @contextlib.contextmanager | ||
608 | def set_environ(env_name, value): | ||
609 | """Set the environment variable 'env_name' to 'value' | ||
610 | |||
611 | Save previous value, yield, and then restore the previous value stored in | ||
612 | the environment variable 'env_name'. | ||
613 | |||
614 | If 'value' is None, do nothing""" | ||
615 | value_changed = value is not None | ||
616 | if value_changed: | ||
617 | old_value = os.environ.get(env_name) | ||
618 | os.environ[env_name] = value | ||
619 | try: | ||
620 | yield | ||
621 | finally: | ||
622 | if value_changed: | ||
623 | if old_value is None: | ||
624 | del os.environ[env_name] | ||
625 | else: | ||
626 | os.environ[env_name] = old_value | ||
627 | |||
628 | |||
629 | def should_bypass_proxies(url, no_proxy): | ||
630 | """ | ||
631 | Returns whether we should bypass proxies or not. | ||
632 | |||
633 | :rtype: bool | ||
634 | """ | ||
635 | get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) | ||
636 | |||
637 | # First check whether no_proxy is defined. If it is, check that the URL | ||
638 | # we're getting isn't in the no_proxy list. | ||
639 | no_proxy_arg = no_proxy | ||
640 | if no_proxy is None: | ||
641 | no_proxy = get_proxy('no_proxy') | ||
642 | netloc = urlparse(url).netloc | ||
643 | |||
644 | if no_proxy: | ||
645 | # We need to check whether we match here. We need to see if we match | ||
646 | # the end of the netloc, both with and without the port. | ||
647 | no_proxy = ( | ||
648 | host for host in no_proxy.replace(' ', '').split(',') if host | ||
649 | ) | ||
650 | |||
651 | ip = netloc.split(':')[0] | ||
652 | if is_ipv4_address(ip): | ||
653 | for proxy_ip in no_proxy: | ||
654 | if is_valid_cidr(proxy_ip): | ||
655 | if address_in_network(ip, proxy_ip): | ||
656 | return True | ||
657 | elif ip == proxy_ip: | ||
658 | # If no_proxy ip was defined in plain IP notation instead of cidr notation & | ||
659 | # matches the IP of the index | ||
660 | return True | ||
661 | else: | ||
662 | for host in no_proxy: | ||
663 | if netloc.endswith(host) or netloc.split(':')[0].endswith(host): | ||
664 | # The URL does match something in no_proxy, so we don't want | ||
665 | # to apply the proxies on this URL. | ||
666 | return True | ||
667 | |||
668 | # If the system proxy settings indicate that this URL should be bypassed, | ||
669 | # don't proxy. | ||
670 | # The proxy_bypass function is incredibly buggy on OS X in early versions | ||
671 | # of Python 2.6, so allow this call to fail. Only catch the specific | ||
672 | # exceptions we've seen, though: this call failing in other ways can reveal | ||
673 | # legitimate problems. | ||
674 | with set_environ('no_proxy', no_proxy_arg): | ||
675 | try: | ||
676 | bypass = proxy_bypass(netloc) | ||
677 | except (TypeError, socket.gaierror): | ||
678 | bypass = False | ||
679 | |||
680 | if bypass: | ||
681 | return True | ||
682 | |||
683 | return False | ||
684 | |||
685 | |||
686 | def get_environ_proxies(url, no_proxy=None): | ||
687 | """ | ||
688 | Return a dict of environment proxies. | ||
689 | |||
690 | :rtype: dict | ||
691 | """ | ||
692 | if should_bypass_proxies(url, no_proxy=no_proxy): | ||
693 | return {} | ||
694 | else: | ||
695 | return getproxies() | ||
696 | |||
697 | |||
698 | def select_proxy(url, proxies): | ||
699 | """Select a proxy for the url, if applicable. | ||
700 | |||
701 | :param url: The url being for the request | ||
702 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs | ||
703 | """ | ||
704 | proxies = proxies or {} | ||
705 | urlparts = urlparse(url) | ||
706 | if urlparts.hostname is None: | ||
707 | return proxies.get(urlparts.scheme, proxies.get('all')) | ||
708 | |||
709 | proxy_keys = [ | ||
710 | urlparts.scheme + '://' + urlparts.hostname, | ||
711 | urlparts.scheme, | ||
712 | 'all://' + urlparts.hostname, | ||
713 | 'all', | ||
714 | ] | ||
715 | proxy = None | ||
716 | for proxy_key in proxy_keys: | ||
717 | if proxy_key in proxies: | ||
718 | proxy = proxies[proxy_key] | ||
719 | break | ||
720 | |||
721 | return proxy | ||
722 | |||
723 | |||
724 | def default_user_agent(name="python-requests"): | ||
725 | """ | ||
726 | Return a string representing the default user agent. | ||
727 | |||
728 | :rtype: str | ||
729 | """ | ||
730 | return '%s/%s' % (name, __version__) | ||
731 | |||
732 | |||
733 | def default_headers(): | ||
734 | """ | ||
735 | :rtype: requests.structures.CaseInsensitiveDict | ||
736 | """ | ||
737 | return CaseInsensitiveDict({ | ||
738 | 'User-Agent': default_user_agent(), | ||
739 | 'Accept-Encoding': ', '.join(('gzip', 'deflate')), | ||
740 | 'Accept': '*/*', | ||
741 | 'Connection': 'keep-alive', | ||
742 | }) | ||
743 | |||
744 | |||
745 | def parse_header_links(value): | ||
746 | """Return a dict of parsed link headers proxies. | ||
747 | |||
748 | i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" | ||
749 | |||
750 | :rtype: list | ||
751 | """ | ||
752 | |||
753 | links = [] | ||
754 | |||
755 | replace_chars = ' \'"' | ||
756 | |||
757 | for val in re.split(', *<', value): | ||
758 | try: | ||
759 | url, params = val.split(';', 1) | ||
760 | except ValueError: | ||
761 | url, params = val, '' | ||
762 | |||
763 | link = {'url': url.strip('<> \'"')} | ||
764 | |||
765 | for param in params.split(';'): | ||
766 | try: | ||
767 | key, value = param.split('=') | ||
768 | except ValueError: | ||
769 | break | ||
770 | |||
771 | link[key.strip(replace_chars)] = value.strip(replace_chars) | ||
772 | |||
773 | links.append(link) | ||
774 | |||
775 | return links | ||
776 | |||
777 | |||
778 | # Null bytes; no need to recreate these on each call to guess_json_utf | ||
779 | _null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 | ||
780 | _null2 = _null * 2 | ||
781 | _null3 = _null * 3 | ||
782 | |||
783 | |||
784 | def guess_json_utf(data): | ||
785 | """ | ||
786 | :rtype: str | ||
787 | """ | ||
788 | # JSON always starts with two ASCII characters, so detection is as | ||
789 | # easy as counting the nulls and from their location and count | ||
790 | # determine the encoding. Also detect a BOM, if present. | ||
791 | sample = data[:4] | ||
792 | if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): | ||
793 | return 'utf-32' # BOM included | ||
794 | if sample[:3] == codecs.BOM_UTF8: | ||
795 | return 'utf-8-sig' # BOM included, MS style (discouraged) | ||
796 | if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): | ||
797 | return 'utf-16' # BOM included | ||
798 | nullcount = sample.count(_null) | ||
799 | if nullcount == 0: | ||
800 | return 'utf-8' | ||
801 | if nullcount == 2: | ||
802 | if sample[::2] == _null2: # 1st and 3rd are null | ||
803 | return 'utf-16-be' | ||
804 | if sample[1::2] == _null2: # 2nd and 4th are null | ||
805 | return 'utf-16-le' | ||
806 | # Did not detect 2 valid UTF-16 ascii-range characters | ||
807 | if nullcount == 3: | ||
808 | if sample[:3] == _null3: | ||
809 | return 'utf-32-be' | ||
810 | if sample[1:] == _null3: | ||
811 | return 'utf-32-le' | ||
812 | # Did not detect a valid UTF-32 ascii-range character | ||
813 | return None | ||
814 | |||
815 | |||
816 | def prepend_scheme_if_needed(url, new_scheme): | ||
817 | """Given a URL that may or may not have a scheme, prepend the given scheme. | ||
818 | Does not replace a present scheme with the one provided as an argument. | ||
819 | |||
820 | :rtype: str | ||
821 | """ | ||
822 | scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) | ||
823 | |||
824 | # urlparse is a finicky beast, and sometimes decides that there isn't a | ||
825 | # netloc present. Assume that it's being over-cautious, and switch netloc | ||
826 | # and path if urlparse decided there was no netloc. | ||
827 | if not netloc: | ||
828 | netloc, path = path, netloc | ||
829 | |||
830 | return urlunparse((scheme, netloc, path, params, query, fragment)) | ||
831 | |||
832 | |||
833 | def get_auth_from_url(url): | ||
834 | """Given a url with authentication components, extract them into a tuple of | ||
835 | username,password. | ||
836 | |||
837 | :rtype: (str,str) | ||
838 | """ | ||
839 | parsed = urlparse(url) | ||
840 | |||
841 | try: | ||
842 | auth = (unquote(parsed.username), unquote(parsed.password)) | ||
843 | except (AttributeError, TypeError): | ||
844 | auth = ('', '') | ||
845 | |||
846 | return auth | ||
847 | |||
848 | |||
849 | # Moved outside of function to avoid recompile every call | ||
850 | _CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') | ||
851 | _CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') | ||
852 | |||
853 | |||
854 | def check_header_validity(header): | ||
855 | """Verifies that header value is a string which doesn't contain | ||
856 | leading whitespace or return characters. This prevents unintended | ||
857 | header injection. | ||
858 | |||
859 | :param header: tuple, in the format (name, value). | ||
860 | """ | ||
861 | name, value = header | ||
862 | |||
863 | if isinstance(value, bytes): | ||
864 | pat = _CLEAN_HEADER_REGEX_BYTE | ||
865 | else: | ||
866 | pat = _CLEAN_HEADER_REGEX_STR | ||
867 | try: | ||
868 | if not pat.match(value): | ||
869 | raise InvalidHeader("Invalid return character or leading space in header: %s" % name) | ||
870 | except TypeError: | ||
871 | raise InvalidHeader("Value for header {%s: %s} must be of type str or " | ||
872 | "bytes, not %s" % (name, value, type(value))) | ||
873 | |||
874 | |||
875 | def urldefragauth(url): | ||
876 | """ | ||
877 | Given a url remove the fragment and the authentication part. | ||
878 | |||
879 | :rtype: str | ||
880 | """ | ||
881 | scheme, netloc, path, params, query, fragment = urlparse(url) | ||
882 | |||
883 | # see func:`prepend_scheme_if_needed` | ||
884 | if not netloc: | ||
885 | netloc, path = path, netloc | ||
886 | |||
887 | netloc = netloc.rsplit('@', 1)[-1] | ||
888 | |||
889 | return urlunparse((scheme, netloc, path, params, query, '')) | ||
890 | |||
891 | |||
892 | def rewind_body(prepared_request): | ||
893 | """Move file pointer back to its recorded starting position | ||
894 | so it can be read again on redirect. | ||
895 | """ | ||
896 | body_seek = getattr(prepared_request.body, 'seek', None) | ||
897 | if body_seek is not None and isinstance(prepared_request._body_position, integer_types): | ||
898 | try: | ||
899 | body_seek(prepared_request._body_position) | ||
900 | except (IOError, OSError): | ||
901 | raise UnrewindableBodyError("An error occurred when rewinding request " | ||
902 | "body for redirect.") | ||
903 | else: | ||
904 | raise UnrewindableBodyError("Unable to rewind request body for redirect.") | ||