summaryrefslogtreecommitdiff
path: root/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py')
-rw-r--r--venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py905
1 files changed, 905 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py
new file mode 100644
index 0000000..b099ca8
--- /dev/null
+++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/urllib3/connectionpool.py
@@ -0,0 +1,905 @@
1from __future__ import absolute_import
2import errno
3import logging
4import sys
5import warnings
6
7from socket import error as SocketError, timeout as SocketTimeout
8import socket
9
10
11from .exceptions import (
12 ClosedPoolError,
13 ProtocolError,
14 EmptyPoolError,
15 HeaderParsingError,
16 HostChangedError,
17 LocationValueError,
18 MaxRetryError,
19 ProxyError,
20 ReadTimeoutError,
21 SSLError,
22 TimeoutError,
23 InsecureRequestWarning,
24 NewConnectionError,
25)
26from .packages.ssl_match_hostname import CertificateError
27from .packages import six
28from .packages.six.moves import queue
29from .connection import (
30 port_by_scheme,
31 DummyConnection,
32 HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
33 HTTPException, BaseSSLError,
34)
35from .request import RequestMethods
36from .response import HTTPResponse
37
38from .util.connection import is_connection_dropped
39from .util.request import set_file_position
40from .util.response import assert_header_parsing
41from .util.retry import Retry
42from .util.timeout import Timeout
43from .util.url import get_host, Url
44
45
46if six.PY2:
47 # Queue is imported for side effects on MS Windows
48 import Queue as _unused_module_Queue # noqa: F401
49
50xrange = six.moves.xrange
51
52log = logging.getLogger(__name__)
53
54_Default = object()
55
56
57# Pool objects
58class ConnectionPool(object):
59 """
60 Base class for all connection pools, such as
61 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
62 """
63
64 scheme = None
65 QueueCls = queue.LifoQueue
66
67 def __init__(self, host, port=None):
68 if not host:
69 raise LocationValueError("No host specified.")
70
71 self.host = _ipv6_host(host).lower()
72 self._proxy_host = host.lower()
73 self.port = port
74
75 def __str__(self):
76 return '%s(host=%r, port=%r)' % (type(self).__name__,
77 self.host, self.port)
78
79 def __enter__(self):
80 return self
81
82 def __exit__(self, exc_type, exc_val, exc_tb):
83 self.close()
84 # Return False to re-raise any potential exceptions
85 return False
86
87 def close(self):
88 """
89 Close all pooled connections and disable the pool.
90 """
91 pass
92
93
94# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
95_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
96
97
98class HTTPConnectionPool(ConnectionPool, RequestMethods):
99 """
100 Thread-safe connection pool for one host.
101
102 :param host:
103 Host used for this HTTP Connection (e.g. "localhost"), passed into
104 :class:`httplib.HTTPConnection`.
105
106 :param port:
107 Port used for this HTTP Connection (None is equivalent to 80), passed
108 into :class:`httplib.HTTPConnection`.
109
110 :param strict:
111 Causes BadStatusLine to be raised if the status line can't be parsed
112 as a valid HTTP/1.0 or 1.1 status line, passed into
113 :class:`httplib.HTTPConnection`.
114
115 .. note::
116 Only works in Python 2. This parameter is ignored in Python 3.
117
118 :param timeout:
119 Socket timeout in seconds for each individual connection. This can
120 be a float or integer, which sets the timeout for the HTTP request,
121 or an instance of :class:`urllib3.util.Timeout` which gives you more
122 fine-grained control over request timeouts. After the constructor has
123 been parsed, this is always a `urllib3.util.Timeout` object.
124
125 :param maxsize:
126 Number of connections to save that can be reused. More than 1 is useful
127 in multithreaded situations. If ``block`` is set to False, more
128 connections will be created but they will not be saved once they've
129 been used.
130
131 :param block:
132 If set to True, no more than ``maxsize`` connections will be used at
133 a time. When no free connections are available, the call will block
134 until a connection has been released. This is a useful side effect for
135 particular multithreaded situations where one does not want to use more
136 than maxsize connections per host to prevent flooding.
137
138 :param headers:
139 Headers to include with all requests, unless other headers are given
140 explicitly.
141
142 :param retries:
143 Retry configuration to use by default with requests in this pool.
144
145 :param _proxy:
146 Parsed proxy URL, should not be used directly, instead, see
147 :class:`urllib3.connectionpool.ProxyManager`"
148
149 :param _proxy_headers:
150 A dictionary with proxy headers, should not be used directly,
151 instead, see :class:`urllib3.connectionpool.ProxyManager`"
152
153 :param \\**conn_kw:
154 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
155 :class:`urllib3.connection.HTTPSConnection` instances.
156 """
157
158 scheme = 'http'
159 ConnectionCls = HTTPConnection
160 ResponseCls = HTTPResponse
161
162 def __init__(self, host, port=None, strict=False,
163 timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
164 headers=None, retries=None,
165 _proxy=None, _proxy_headers=None,
166 **conn_kw):
167 ConnectionPool.__init__(self, host, port)
168 RequestMethods.__init__(self, headers)
169
170 self.strict = strict
171
172 if not isinstance(timeout, Timeout):
173 timeout = Timeout.from_float(timeout)
174
175 if retries is None:
176 retries = Retry.DEFAULT
177
178 self.timeout = timeout
179 self.retries = retries
180
181 self.pool = self.QueueCls(maxsize)
182 self.block = block
183
184 self.proxy = _proxy
185 self.proxy_headers = _proxy_headers or {}
186
187 # Fill the queue up so that doing get() on it will block properly
188 for _ in xrange(maxsize):
189 self.pool.put(None)
190
191 # These are mostly for testing and debugging purposes.
192 self.num_connections = 0
193 self.num_requests = 0
194 self.conn_kw = conn_kw
195
196 if self.proxy:
197 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
198 # We cannot know if the user has added default socket options, so we cannot replace the
199 # list.
200 self.conn_kw.setdefault('socket_options', [])
201
202 def _new_conn(self):
203 """
204 Return a fresh :class:`HTTPConnection`.
205 """
206 self.num_connections += 1
207 log.debug("Starting new HTTP connection (%d): %s",
208 self.num_connections, self.host)
209
210 conn = self.ConnectionCls(host=self.host, port=self.port,
211 timeout=self.timeout.connect_timeout,
212 strict=self.strict, **self.conn_kw)
213 return conn
214
215 def _get_conn(self, timeout=None):
216 """
217 Get a connection. Will return a pooled connection if one is available.
218
219 If no connections are available and :prop:`.block` is ``False``, then a
220 fresh connection is returned.
221
222 :param timeout:
223 Seconds to wait before giving up and raising
224 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
225 :prop:`.block` is ``True``.
226 """
227 conn = None
228 try:
229 conn = self.pool.get(block=self.block, timeout=timeout)
230
231 except AttributeError: # self.pool is None
232 raise ClosedPoolError(self, "Pool is closed.")
233
234 except queue.Empty:
235 if self.block:
236 raise EmptyPoolError(self,
237 "Pool reached maximum size and no more "
238 "connections are allowed.")
239 pass # Oh well, we'll create a new connection then
240
241 # If this is a persistent connection, check if it got disconnected
242 if conn and is_connection_dropped(conn):
243 log.debug("Resetting dropped connection: %s", self.host)
244 conn.close()
245 if getattr(conn, 'auto_open', 1) == 0:
246 # This is a proxied connection that has been mutated by
247 # httplib._tunnel() and cannot be reused (since it would
248 # attempt to bypass the proxy)
249 conn = None
250
251 return conn or self._new_conn()
252
253 def _put_conn(self, conn):
254 """
255 Put a connection back into the pool.
256
257 :param conn:
258 Connection object for the current host and port as returned by
259 :meth:`._new_conn` or :meth:`._get_conn`.
260
261 If the pool is already full, the connection is closed and discarded
262 because we exceeded maxsize. If connections are discarded frequently,
263 then maxsize should be increased.
264
265 If the pool is closed, then the connection will be closed and discarded.
266 """
267 try:
268 self.pool.put(conn, block=False)
269 return # Everything is dandy, done.
270 except AttributeError:
271 # self.pool is None.
272 pass
273 except queue.Full:
274 # This should never happen if self.block == True
275 log.warning(
276 "Connection pool is full, discarding connection: %s",
277 self.host)
278
279 # Connection never got put back into the pool, close it.
280 if conn:
281 conn.close()
282
283 def _validate_conn(self, conn):
284 """
285 Called right before a request is made, after the socket is created.
286 """
287 pass
288
289 def _prepare_proxy(self, conn):
290 # Nothing to do for HTTP connections.
291 pass
292
293 def _get_timeout(self, timeout):
294 """ Helper that always returns a :class:`urllib3.util.Timeout` """
295 if timeout is _Default:
296 return self.timeout.clone()
297
298 if isinstance(timeout, Timeout):
299 return timeout.clone()
300 else:
301 # User passed us an int/float. This is for backwards compatibility,
302 # can be removed later
303 return Timeout.from_float(timeout)
304
305 def _raise_timeout(self, err, url, timeout_value):
306 """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
307
308 if isinstance(err, SocketTimeout):
309 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
310
311 # See the above comment about EAGAIN in Python 3. In Python 2 we have
312 # to specifically catch it and throw the timeout error
313 if hasattr(err, 'errno') and err.errno in _blocking_errnos:
314 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
315
316 # Catch possible read timeouts thrown as SSL errors. If not the
317 # case, rethrow the original. We need to do this because of:
318 # http://bugs.python.org/issue10272
319 if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
320 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
321
322 def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
323 **httplib_request_kw):
324 """
325 Perform a request on a given urllib connection object taken from our
326 pool.
327
328 :param conn:
329 a connection from one of our connection pools
330
331 :param timeout:
332 Socket timeout in seconds for the request. This can be a
333 float or integer, which will set the same timeout value for
334 the socket connect and the socket read, or an instance of
335 :class:`urllib3.util.Timeout`, which gives you more fine-grained
336 control over your timeouts.
337 """
338 self.num_requests += 1
339
340 timeout_obj = self._get_timeout(timeout)
341 timeout_obj.start_connect()
342 conn.timeout = timeout_obj.connect_timeout
343
344 # Trigger any extra validation we need to do.
345 try:
346 self._validate_conn(conn)
347 except (SocketTimeout, BaseSSLError) as e:
348 # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
349 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
350 raise
351
352 # conn.request() calls httplib.*.request, not the method in
353 # urllib3.request. It also calls makefile (recv) on the socket.
354 if chunked:
355 conn.request_chunked(method, url, **httplib_request_kw)
356 else:
357 conn.request(method, url, **httplib_request_kw)
358
359 # Reset the timeout for the recv() on the socket
360 read_timeout = timeout_obj.read_timeout
361
362 # App Engine doesn't have a sock attr
363 if getattr(conn, 'sock', None):
364 # In Python 3 socket.py will catch EAGAIN and return None when you
365 # try and read into the file pointer created by http.client, which
366 # instead raises a BadStatusLine exception. Instead of catching
367 # the exception and assuming all BadStatusLine exceptions are read
368 # timeouts, check for a zero timeout before making the request.
369 if read_timeout == 0:
370 raise ReadTimeoutError(
371 self, url, "Read timed out. (read timeout=%s)" % read_timeout)
372 if read_timeout is Timeout.DEFAULT_TIMEOUT:
373 conn.sock.settimeout(socket.getdefaulttimeout())
374 else: # None or a value
375 conn.sock.settimeout(read_timeout)
376
377 # Receive the response from the server
378 try:
379 try: # Python 2.7, use buffering of HTTP responses
380 httplib_response = conn.getresponse(buffering=True)
381 except TypeError: # Python 2.6 and older, Python 3
382 try:
383 httplib_response = conn.getresponse()
384 except Exception as e:
385 # Remove the TypeError from the exception chain in Python 3;
386 # otherwise it looks like a programming error was the cause.
387 six.raise_from(e, None)
388 except (SocketTimeout, BaseSSLError, SocketError) as e:
389 self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
390 raise
391
392 # AppEngine doesn't have a version attr.
393 http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
394 log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port,
395 method, url, http_version, httplib_response.status,
396 httplib_response.length)
397
398 try:
399 assert_header_parsing(httplib_response.msg)
400 except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
401 log.warning(
402 'Failed to parse headers (url=%s): %s',
403 self._absolute_url(url), hpe, exc_info=True)
404
405 return httplib_response
406
407 def _absolute_url(self, path):
408 return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
409
410 def close(self):
411 """
412 Close all pooled connections and disable the pool.
413 """
414 # Disable access to the pool
415 old_pool, self.pool = self.pool, None
416
417 try:
418 while True:
419 conn = old_pool.get(block=False)
420 if conn:
421 conn.close()
422
423 except queue.Empty:
424 pass # Done.
425
426 def is_same_host(self, url):
427 """
428 Check if the given ``url`` is a member of the same host as this
429 connection pool.
430 """
431 if url.startswith('/'):
432 return True
433
434 # TODO: Add optional support for socket.gethostbyname checking.
435 scheme, host, port = get_host(url)
436
437 host = _ipv6_host(host).lower()
438
439 # Use explicit default port for comparison when none is given
440 if self.port and not port:
441 port = port_by_scheme.get(scheme)
442 elif not self.port and port == port_by_scheme.get(scheme):
443 port = None
444
445 return (scheme, host, port) == (self.scheme, self.host, self.port)
446
447 def urlopen(self, method, url, body=None, headers=None, retries=None,
448 redirect=True, assert_same_host=True, timeout=_Default,
449 pool_timeout=None, release_conn=None, chunked=False,
450 body_pos=None, **response_kw):
451 """
452 Get a connection from the pool and perform an HTTP request. This is the
453 lowest level call for making a request, so you'll need to specify all
454 the raw details.
455
456 .. note::
457
458 More commonly, it's appropriate to use a convenience method provided
459 by :class:`.RequestMethods`, such as :meth:`request`.
460
461 .. note::
462
463 `release_conn` will only behave as expected if
464 `preload_content=False` because we want to make
465 `preload_content=False` the default behaviour someday soon without
466 breaking backwards compatibility.
467
468 :param method:
469 HTTP request method (such as GET, POST, PUT, etc.)
470
471 :param body:
472 Data to send in the request body (useful for creating
473 POST requests, see HTTPConnectionPool.post_url for
474 more convenience).
475
476 :param headers:
477 Dictionary of custom headers to send, such as User-Agent,
478 If-None-Match, etc. If None, pool headers are used. If provided,
479 these headers completely replace any pool-specific headers.
480
481 :param retries:
482 Configure the number of retries to allow before raising a
483 :class:`~urllib3.exceptions.MaxRetryError` exception.
484
485 Pass ``None`` to retry until you receive a response. Pass a
486 :class:`~urllib3.util.retry.Retry` object for fine-grained control
487 over different types of retries.
488 Pass an integer number to retry connection errors that many times,
489 but no other types of errors. Pass zero to never retry.
490
491 If ``False``, then retries are disabled and any exception is raised
492 immediately. Also, instead of raising a MaxRetryError on redirects,
493 the redirect response will be returned.
494
495 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
496
497 :param redirect:
498 If True, automatically handle redirects (status codes 301, 302,
499 303, 307, 308). Each redirect counts as a retry. Disabling retries
500 will disable redirect, too.
501
502 :param assert_same_host:
503 If ``True``, will make sure that the host of the pool requests is
504 consistent else will raise HostChangedError. When False, you can
505 use the pool on an HTTP proxy and request foreign hosts.
506
507 :param timeout:
508 If specified, overrides the default timeout for this one
509 request. It may be a float (in seconds) or an instance of
510 :class:`urllib3.util.Timeout`.
511
512 :param pool_timeout:
513 If set and the pool is set to block=True, then this method will
514 block for ``pool_timeout`` seconds and raise EmptyPoolError if no
515 connection is available within the time period.
516
517 :param release_conn:
518 If False, then the urlopen call will not release the connection
519 back into the pool once a response is received (but will release if
520 you read the entire contents of the response such as when
521 `preload_content=True`). This is useful if you're not preloading
522 the response's content immediately. You will need to call
523 ``r.release_conn()`` on the response ``r`` to return the connection
524 back into the pool. If None, it takes the value of
525 ``response_kw.get('preload_content', True)``.
526
527 :param chunked:
528 If True, urllib3 will send the body using chunked transfer
529 encoding. Otherwise, urllib3 will send the body using the standard
530 content-length form. Defaults to False.
531
532 :param int body_pos:
533 Position to seek to in file-like body in the event of a retry or
534 redirect. Typically this won't need to be set because urllib3 will
535 auto-populate the value when needed.
536
537 :param \\**response_kw:
538 Additional parameters are passed to
539 :meth:`urllib3.response.HTTPResponse.from_httplib`
540 """
541 if headers is None:
542 headers = self.headers
543
544 if not isinstance(retries, Retry):
545 retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
546
547 if release_conn is None:
548 release_conn = response_kw.get('preload_content', True)
549
550 # Check host
551 if assert_same_host and not self.is_same_host(url):
552 raise HostChangedError(self, url, retries)
553
554 conn = None
555
556 # Track whether `conn` needs to be released before
557 # returning/raising/recursing. Update this variable if necessary, and
558 # leave `release_conn` constant throughout the function. That way, if
559 # the function recurses, the original value of `release_conn` will be
560 # passed down into the recursive call, and its value will be respected.
561 #
562 # See issue #651 [1] for details.
563 #
564 # [1] <https://github.com/shazow/urllib3/issues/651>
565 release_this_conn = release_conn
566
567 # Merge the proxy headers. Only do this in HTTP. We have to copy the
568 # headers dict so we can safely change it without those changes being
569 # reflected in anyone else's copy.
570 if self.scheme == 'http':
571 headers = headers.copy()
572 headers.update(self.proxy_headers)
573
574 # Must keep the exception bound to a separate variable or else Python 3
575 # complains about UnboundLocalError.
576 err = None
577
578 # Keep track of whether we cleanly exited the except block. This
579 # ensures we do proper cleanup in finally.
580 clean_exit = False
581
582 # Rewind body position, if needed. Record current position
583 # for future rewinds in the event of a redirect/retry.
584 body_pos = set_file_position(body, body_pos)
585
586 try:
587 # Request a connection from the queue.
588 timeout_obj = self._get_timeout(timeout)
589 conn = self._get_conn(timeout=pool_timeout)
590
591 conn.timeout = timeout_obj.connect_timeout
592
593 is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
594 if is_new_proxy_conn:
595 self._prepare_proxy(conn)
596
597 # Make the request on the httplib connection object.
598 httplib_response = self._make_request(conn, method, url,
599 timeout=timeout_obj,
600 body=body, headers=headers,
601 chunked=chunked)
602
603 # If we're going to release the connection in ``finally:``, then
604 # the response doesn't need to know about the connection. Otherwise
605 # it will also try to release it and we'll have a double-release
606 # mess.
607 response_conn = conn if not release_conn else None
608
609 # Pass method to Response for length checking
610 response_kw['request_method'] = method
611
612 # Import httplib's response into our own wrapper object
613 response = self.ResponseCls.from_httplib(httplib_response,
614 pool=self,
615 connection=response_conn,
616 retries=retries,
617 **response_kw)
618
619 # Everything went great!
620 clean_exit = True
621
622 except queue.Empty:
623 # Timed out by queue.
624 raise EmptyPoolError(self, "No pool connections are available.")
625
626 except (TimeoutError, HTTPException, SocketError, ProtocolError,
627 BaseSSLError, SSLError, CertificateError) as e:
628 # Discard the connection for these exceptions. It will be
629 # replaced during the next _get_conn() call.
630 clean_exit = False
631 if isinstance(e, (BaseSSLError, CertificateError)):
632 e = SSLError(e)
633 elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
634 e = ProxyError('Cannot connect to proxy.', e)
635 elif isinstance(e, (SocketError, HTTPException)):
636 e = ProtocolError('Connection aborted.', e)
637
638 retries = retries.increment(method, url, error=e, _pool=self,
639 _stacktrace=sys.exc_info()[2])
640 retries.sleep()
641
642 # Keep track of the error for the retry warning.
643 err = e
644
645 finally:
646 if not clean_exit:
647 # We hit some kind of exception, handled or otherwise. We need
648 # to throw the connection away unless explicitly told not to.
649 # Close the connection, set the variable to None, and make sure
650 # we put the None back in the pool to avoid leaking it.
651 conn = conn and conn.close()
652 release_this_conn = True
653
654 if release_this_conn:
655 # Put the connection back to be reused. If the connection is
656 # expired then it will be None, which will get replaced with a
657 # fresh connection during _get_conn.
658 self._put_conn(conn)
659
660 if not conn:
661 # Try again
662 log.warning("Retrying (%r) after connection "
663 "broken by '%r': %s", retries, err, url)
664 return self.urlopen(method, url, body, headers, retries,
665 redirect, assert_same_host,
666 timeout=timeout, pool_timeout=pool_timeout,
667 release_conn=release_conn, body_pos=body_pos,
668 **response_kw)
669
670 def drain_and_release_conn(response):
671 try:
672 # discard any remaining response body, the connection will be
673 # released back to the pool once the entire response is read
674 response.read()
675 except (TimeoutError, HTTPException, SocketError, ProtocolError,
676 BaseSSLError, SSLError) as e:
677 pass
678
679 # Handle redirect?
680 redirect_location = redirect and response.get_redirect_location()
681 if redirect_location:
682 if response.status == 303:
683 method = 'GET'
684
685 try:
686 retries = retries.increment(method, url, response=response, _pool=self)
687 except MaxRetryError:
688 if retries.raise_on_redirect:
689 # Drain and release the connection for this response, since
690 # we're not returning it to be released manually.
691 drain_and_release_conn(response)
692 raise
693 return response
694
695 # drain and return the connection to the pool before recursing
696 drain_and_release_conn(response)
697
698 retries.sleep_for_retry(response)
699 log.debug("Redirecting %s -> %s", url, redirect_location)
700 return self.urlopen(
701 method, redirect_location, body, headers,
702 retries=retries, redirect=redirect,
703 assert_same_host=assert_same_host,
704 timeout=timeout, pool_timeout=pool_timeout,
705 release_conn=release_conn, body_pos=body_pos,
706 **response_kw)
707
708 # Check if we should retry the HTTP response.
709 has_retry_after = bool(response.getheader('Retry-After'))
710 if retries.is_retry(method, response.status, has_retry_after):
711 try:
712 retries = retries.increment(method, url, response=response, _pool=self)
713 except MaxRetryError:
714 if retries.raise_on_status:
715 # Drain and release the connection for this response, since
716 # we're not returning it to be released manually.
717 drain_and_release_conn(response)
718 raise
719 return response
720
721 # drain and return the connection to the pool before recursing
722 drain_and_release_conn(response)
723
724 retries.sleep(response)
725 log.debug("Retry: %s", url)
726 return self.urlopen(
727 method, url, body, headers,
728 retries=retries, redirect=redirect,
729 assert_same_host=assert_same_host,
730 timeout=timeout, pool_timeout=pool_timeout,
731 release_conn=release_conn,
732 body_pos=body_pos, **response_kw)
733
734 return response
735
736
737class HTTPSConnectionPool(HTTPConnectionPool):
738 """
739 Same as :class:`.HTTPConnectionPool`, but HTTPS.
740
741 When Python is compiled with the :mod:`ssl` module, then
742 :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
743 instead of :class:`.HTTPSConnection`.
744
745 :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
746 ``assert_hostname`` and ``host`` in this order to verify connections.
747 If ``assert_hostname`` is False, no verification is done.
748
749 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
750 ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
751 available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
752 the connection socket into an SSL socket.
753 """
754
755 scheme = 'https'
756 ConnectionCls = HTTPSConnection
757
758 def __init__(self, host, port=None,
759 strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
760 block=False, headers=None, retries=None,
761 _proxy=None, _proxy_headers=None,
762 key_file=None, cert_file=None, cert_reqs=None,
763 ca_certs=None, ssl_version=None,
764 assert_hostname=None, assert_fingerprint=None,
765 ca_cert_dir=None, **conn_kw):
766
767 HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
768 block, headers, retries, _proxy, _proxy_headers,
769 **conn_kw)
770
771 if ca_certs and cert_reqs is None:
772 cert_reqs = 'CERT_REQUIRED'
773
774 self.key_file = key_file
775 self.cert_file = cert_file
776 self.cert_reqs = cert_reqs
777 self.ca_certs = ca_certs
778 self.ca_cert_dir = ca_cert_dir
779 self.ssl_version = ssl_version
780 self.assert_hostname = assert_hostname
781 self.assert_fingerprint = assert_fingerprint
782
783 def _prepare_conn(self, conn):
784 """
785 Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
786 and establish the tunnel if proxy is used.
787 """
788
789 if isinstance(conn, VerifiedHTTPSConnection):
790 conn.set_cert(key_file=self.key_file,
791 cert_file=self.cert_file,
792 cert_reqs=self.cert_reqs,
793 ca_certs=self.ca_certs,
794 ca_cert_dir=self.ca_cert_dir,
795 assert_hostname=self.assert_hostname,
796 assert_fingerprint=self.assert_fingerprint)
797 conn.ssl_version = self.ssl_version
798 return conn
799
800 def _prepare_proxy(self, conn):
801 """
802 Establish tunnel connection early, because otherwise httplib
803 would improperly set Host: header to proxy's IP:port.
804 """
805 # Python 2.7+
806 try:
807 set_tunnel = conn.set_tunnel
808 except AttributeError: # Platform-specific: Python 2.6
809 set_tunnel = conn._set_tunnel
810
811 if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
812 set_tunnel(self._proxy_host, self.port)
813 else:
814 set_tunnel(self._proxy_host, self.port, self.proxy_headers)
815
816 conn.connect()
817
818 def _new_conn(self):
819 """
820 Return a fresh :class:`httplib.HTTPSConnection`.
821 """
822 self.num_connections += 1
823 log.debug("Starting new HTTPS connection (%d): %s",
824 self.num_connections, self.host)
825
826 if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
827 raise SSLError("Can't connect to HTTPS URL because the SSL "
828 "module is not available.")
829
830 actual_host = self.host
831 actual_port = self.port
832 if self.proxy is not None:
833 actual_host = self.proxy.host
834 actual_port = self.proxy.port
835
836 conn = self.ConnectionCls(host=actual_host, port=actual_port,
837 timeout=self.timeout.connect_timeout,
838 strict=self.strict, **self.conn_kw)
839
840 return self._prepare_conn(conn)
841
842 def _validate_conn(self, conn):
843 """
844 Called right before a request is made, after the socket is created.
845 """
846 super(HTTPSConnectionPool, self)._validate_conn(conn)
847
848 # Force connect early to allow us to validate the connection.
849 if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
850 conn.connect()
851
852 if not conn.is_verified:
853 warnings.warn((
854 'Unverified HTTPS request is being made. '
855 'Adding certificate verification is strongly advised. See: '
856 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
857 '#ssl-warnings'),
858 InsecureRequestWarning)
859
860
861def connection_from_url(url, **kw):
862 """
863 Given a url, return an :class:`.ConnectionPool` instance of its host.
864
865 This is a shortcut for not having to parse out the scheme, host, and port
866 of the url before creating an :class:`.ConnectionPool` instance.
867
868 :param url:
869 Absolute URL string that must include the scheme. Port is optional.
870
871 :param \\**kw:
872 Passes additional parameters to the constructor of the appropriate
873 :class:`.ConnectionPool`. Useful for specifying things like
874 timeout, maxsize, headers, etc.
875
876 Example::
877
878 >>> conn = connection_from_url('http://google.com/')
879 >>> r = conn.request('GET', '/')
880 """
881 scheme, host, port = get_host(url)
882 port = port or port_by_scheme.get(scheme, 80)
883 if scheme == 'https':
884 return HTTPSConnectionPool(host, port=port, **kw)
885 else:
886 return HTTPConnectionPool(host, port=port, **kw)
887
888
889def _ipv6_host(host):
890 """
891 Process IPv6 address literals
892 """
893
894 # httplib doesn't like it when we include brackets in IPv6 addresses
895 # Specifically, if we include brackets but also pass the port then
896 # httplib crazily doubles up the square brackets on the Host header.
897 # Instead, we need to make sure we never pass ``None`` as the port.
898 # However, for backward compatibility reasons we can't actually
899 # *assert* that. See http://bugs.python.org/issue28539
900 #
901 # Also if an IPv6 address literal has a zone identifier, the
902 # percent sign might be URIencoded, convert it back into ASCII
903 if host.startswith('[') and host.endswith(']'):
904 host = host.replace('%25', '%').strip('[]')
905 return host