diff options
Diffstat (limited to 'venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol')
13 files changed, 1261 insertions, 0 deletions
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..ced6d94 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/__init__.py | |||
| @@ -0,0 +1,11 @@ | |||
| 1 | """CacheControl import Interface. | ||
| 2 | |||
| 3 | Make it easy to import from cachecontrol without long namespaces. | ||
| 4 | """ | ||
| 5 | __author__ = 'Eric Larson' | ||
| 6 | __email__ = 'eric@ionrock.org' | ||
| 7 | __version__ = '0.12.4' | ||
| 8 | |||
| 9 | from .wrapper import CacheControl | ||
| 10 | from .adapter import CacheControlAdapter | ||
| 11 | from .controller import CacheController | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..10bc01e --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py | |||
| @@ -0,0 +1,60 @@ | |||
| 1 | import logging | ||
| 2 | |||
| 3 | from pip._vendor import requests | ||
| 4 | |||
| 5 | from pip._vendor.cachecontrol.adapter import CacheControlAdapter | ||
| 6 | from pip._vendor.cachecontrol.cache import DictCache | ||
| 7 | from pip._vendor.cachecontrol.controller import logger | ||
| 8 | |||
| 9 | from argparse import ArgumentParser | ||
| 10 | |||
| 11 | |||
| 12 | def setup_logging(): | ||
| 13 | logger.setLevel(logging.DEBUG) | ||
| 14 | handler = logging.StreamHandler() | ||
| 15 | logger.addHandler(handler) | ||
| 16 | |||
| 17 | |||
| 18 | def get_session(): | ||
| 19 | adapter = CacheControlAdapter( | ||
| 20 | DictCache(), | ||
| 21 | cache_etags=True, | ||
| 22 | serializer=None, | ||
| 23 | heuristic=None, | ||
| 24 | ) | ||
| 25 | sess = requests.Session() | ||
| 26 | sess.mount('http://', adapter) | ||
| 27 | sess.mount('https://', adapter) | ||
| 28 | |||
| 29 | sess.cache_controller = adapter.controller | ||
| 30 | return sess | ||
| 31 | |||
| 32 | |||
| 33 | def get_args(): | ||
| 34 | parser = ArgumentParser() | ||
| 35 | parser.add_argument('url', help='The URL to try and cache') | ||
| 36 | return parser.parse_args() | ||
| 37 | |||
| 38 | |||
| 39 | def main(args=None): | ||
| 40 | args = get_args() | ||
| 41 | sess = get_session() | ||
| 42 | |||
| 43 | # Make a request to get a response | ||
| 44 | resp = sess.get(args.url) | ||
| 45 | |||
| 46 | # Turn on logging | ||
| 47 | setup_logging() | ||
| 48 | |||
| 49 | # try setting the cache | ||
| 50 | sess.cache_controller.cache_response(resp.request, resp.raw) | ||
| 51 | |||
| 52 | # Now try to get it | ||
| 53 | if sess.cache_controller.cached_request(resp.request): | ||
| 54 | print('Cached!') | ||
| 55 | else: | ||
| 56 | print('Not cached :(') | ||
| 57 | |||
| 58 | |||
| 59 | if __name__ == '__main__': | ||
| 60 | main() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..03c95c9 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/adapter.py | |||
| @@ -0,0 +1,134 @@ | |||
| 1 | import types | ||
| 2 | import functools | ||
| 3 | import zlib | ||
| 4 | |||
| 5 | from pip._vendor.requests.adapters import HTTPAdapter | ||
| 6 | |||
| 7 | from .controller import CacheController | ||
| 8 | from .cache import DictCache | ||
| 9 | from .filewrapper import CallbackFileWrapper | ||
| 10 | |||
| 11 | |||
| 12 | class CacheControlAdapter(HTTPAdapter): | ||
| 13 | invalidating_methods = set(['PUT', 'DELETE']) | ||
| 14 | |||
| 15 | def __init__(self, cache=None, | ||
| 16 | cache_etags=True, | ||
| 17 | controller_class=None, | ||
| 18 | serializer=None, | ||
| 19 | heuristic=None, | ||
| 20 | cacheable_methods=None, | ||
| 21 | *args, **kw): | ||
| 22 | super(CacheControlAdapter, self).__init__(*args, **kw) | ||
| 23 | self.cache = cache or DictCache() | ||
| 24 | self.heuristic = heuristic | ||
| 25 | self.cacheable_methods = cacheable_methods or ('GET',) | ||
| 26 | |||
| 27 | controller_factory = controller_class or CacheController | ||
| 28 | self.controller = controller_factory( | ||
| 29 | self.cache, | ||
| 30 | cache_etags=cache_etags, | ||
| 31 | serializer=serializer, | ||
| 32 | ) | ||
| 33 | |||
| 34 | def send(self, request, cacheable_methods=None, **kw): | ||
| 35 | """ | ||
| 36 | Send a request. Use the request information to see if it | ||
| 37 | exists in the cache and cache the response if we need to and can. | ||
| 38 | """ | ||
| 39 | cacheable = cacheable_methods or self.cacheable_methods | ||
| 40 | if request.method in cacheable: | ||
| 41 | try: | ||
| 42 | cached_response = self.controller.cached_request(request) | ||
| 43 | except zlib.error: | ||
| 44 | cached_response = None | ||
| 45 | if cached_response: | ||
| 46 | return self.build_response(request, cached_response, | ||
| 47 | from_cache=True) | ||
| 48 | |||
| 49 | # check for etags and add headers if appropriate | ||
| 50 | request.headers.update( | ||
| 51 | self.controller.conditional_headers(request) | ||
| 52 | ) | ||
| 53 | |||
| 54 | resp = super(CacheControlAdapter, self).send(request, **kw) | ||
| 55 | |||
| 56 | return resp | ||
| 57 | |||
| 58 | def build_response(self, request, response, from_cache=False, | ||
| 59 | cacheable_methods=None): | ||
| 60 | """ | ||
| 61 | Build a response by making a request or using the cache. | ||
| 62 | |||
| 63 | This will end up calling send and returning a potentially | ||
| 64 | cached response | ||
| 65 | """ | ||
| 66 | cacheable = cacheable_methods or self.cacheable_methods | ||
| 67 | if not from_cache and request.method in cacheable: | ||
| 68 | # Check for any heuristics that might update headers | ||
| 69 | # before trying to cache. | ||
| 70 | if self.heuristic: | ||
| 71 | response = self.heuristic.apply(response) | ||
| 72 | |||
| 73 | # apply any expiration heuristics | ||
| 74 | if response.status == 304: | ||
| 75 | # We must have sent an ETag request. This could mean | ||
| 76 | # that we've been expired already or that we simply | ||
| 77 | # have an etag. In either case, we want to try and | ||
| 78 | # update the cache if that is the case. | ||
| 79 | cached_response = self.controller.update_cached_response( | ||
| 80 | request, response | ||
| 81 | ) | ||
| 82 | |||
| 83 | if cached_response is not response: | ||
| 84 | from_cache = True | ||
| 85 | |||
| 86 | # We are done with the server response, read a | ||
| 87 | # possible response body (compliant servers will | ||
| 88 | # not return one, but we cannot be 100% sure) and | ||
| 89 | # release the connection back to the pool. | ||
| 90 | response.read(decode_content=False) | ||
| 91 | response.release_conn() | ||
| 92 | |||
| 93 | response = cached_response | ||
| 94 | |||
| 95 | # We always cache the 301 responses | ||
| 96 | elif response.status == 301: | ||
| 97 | self.controller.cache_response(request, response) | ||
| 98 | else: | ||
| 99 | # Wrap the response file with a wrapper that will cache the | ||
| 100 | # response when the stream has been consumed. | ||
| 101 | response._fp = CallbackFileWrapper( | ||
| 102 | response._fp, | ||
| 103 | functools.partial( | ||
| 104 | self.controller.cache_response, | ||
| 105 | request, | ||
| 106 | response, | ||
| 107 | ) | ||
| 108 | ) | ||
| 109 | if response.chunked: | ||
| 110 | super_update_chunk_length = response._update_chunk_length | ||
| 111 | |||
| 112 | def _update_chunk_length(self): | ||
| 113 | super_update_chunk_length() | ||
| 114 | if self.chunk_left == 0: | ||
| 115 | self._fp._close() | ||
| 116 | response._update_chunk_length = types.MethodType(_update_chunk_length, response) | ||
| 117 | |||
| 118 | resp = super(CacheControlAdapter, self).build_response( | ||
| 119 | request, response | ||
| 120 | ) | ||
| 121 | |||
| 122 | # See if we should invalidate the cache. | ||
| 123 | if request.method in self.invalidating_methods and resp.ok: | ||
| 124 | cache_url = self.controller.cache_url(request.url) | ||
| 125 | self.cache.delete(cache_url) | ||
| 126 | |||
| 127 | # Give the request a from_cache attr to let people use it | ||
| 128 | resp.from_cache = from_cache | ||
| 129 | |||
| 130 | return resp | ||
| 131 | |||
| 132 | def close(self): | ||
| 133 | self.cache.close() | ||
| 134 | super(CacheControlAdapter, self).close() | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..04d1488 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/cache.py | |||
| @@ -0,0 +1,39 @@ | |||
| 1 | """ | ||
| 2 | The cache object API for implementing caches. The default is a thread | ||
| 3 | safe in-memory dictionary. | ||
| 4 | """ | ||
| 5 | from threading import Lock | ||
| 6 | |||
| 7 | |||
| 8 | class BaseCache(object): | ||
| 9 | |||
| 10 | def get(self, key): | ||
| 11 | raise NotImplemented() | ||
| 12 | |||
| 13 | def set(self, key, value): | ||
| 14 | raise NotImplemented() | ||
| 15 | |||
| 16 | def delete(self, key): | ||
| 17 | raise NotImplemented() | ||
| 18 | |||
| 19 | def close(self): | ||
| 20 | pass | ||
| 21 | |||
| 22 | |||
| 23 | class DictCache(BaseCache): | ||
| 24 | |||
| 25 | def __init__(self, init_dict=None): | ||
| 26 | self.lock = Lock() | ||
| 27 | self.data = init_dict or {} | ||
| 28 | |||
| 29 | def get(self, key): | ||
| 30 | return self.data.get(key, None) | ||
| 31 | |||
| 32 | def set(self, key, value): | ||
| 33 | with self.lock: | ||
| 34 | self.data.update({key: value}) | ||
| 35 | |||
| 36 | def delete(self, key): | ||
| 37 | with self.lock: | ||
| 38 | if key in self.data: | ||
| 39 | self.data.pop(key) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..1193f26 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py | |||
| @@ -0,0 +1,2 @@ | |||
| 1 | from .file_cache import FileCache # noqa | ||
| 2 | from .redis_cache import RedisCache # noqa | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..f7eb890 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py | |||
| @@ -0,0 +1,133 @@ | |||
| 1 | import hashlib | ||
| 2 | import os | ||
| 3 | from textwrap import dedent | ||
| 4 | |||
| 5 | from ..cache import BaseCache | ||
| 6 | from ..controller import CacheController | ||
| 7 | |||
| 8 | try: | ||
| 9 | FileNotFoundError | ||
| 10 | except NameError: | ||
| 11 | # py2.X | ||
| 12 | FileNotFoundError = OSError | ||
| 13 | |||
| 14 | |||
| 15 | def _secure_open_write(filename, fmode): | ||
| 16 | # We only want to write to this file, so open it in write only mode | ||
| 17 | flags = os.O_WRONLY | ||
| 18 | |||
| 19 | # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only | ||
| 20 | # will open *new* files. | ||
| 21 | # We specify this because we want to ensure that the mode we pass is the | ||
| 22 | # mode of the file. | ||
| 23 | flags |= os.O_CREAT | os.O_EXCL | ||
| 24 | |||
| 25 | # Do not follow symlinks to prevent someone from making a symlink that | ||
| 26 | # we follow and insecurely open a cache file. | ||
| 27 | if hasattr(os, "O_NOFOLLOW"): | ||
| 28 | flags |= os.O_NOFOLLOW | ||
| 29 | |||
| 30 | # On Windows we'll mark this file as binary | ||
| 31 | if hasattr(os, "O_BINARY"): | ||
| 32 | flags |= os.O_BINARY | ||
| 33 | |||
| 34 | # Before we open our file, we want to delete any existing file that is | ||
| 35 | # there | ||
| 36 | try: | ||
| 37 | os.remove(filename) | ||
| 38 | except (IOError, OSError): | ||
| 39 | # The file must not exist already, so we can just skip ahead to opening | ||
| 40 | pass | ||
| 41 | |||
| 42 | # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a | ||
| 43 | # race condition happens between the os.remove and this line, that an | ||
| 44 | # error will be raised. Because we utilize a lockfile this should only | ||
| 45 | # happen if someone is attempting to attack us. | ||
| 46 | fd = os.open(filename, flags, fmode) | ||
| 47 | try: | ||
| 48 | return os.fdopen(fd, "wb") | ||
| 49 | except: | ||
| 50 | # An error occurred wrapping our FD in a file object | ||
| 51 | os.close(fd) | ||
| 52 | raise | ||
| 53 | |||
| 54 | |||
| 55 | class FileCache(BaseCache): | ||
| 56 | def __init__(self, directory, forever=False, filemode=0o0600, | ||
| 57 | dirmode=0o0700, use_dir_lock=None, lock_class=None): | ||
| 58 | |||
| 59 | if use_dir_lock is not None and lock_class is not None: | ||
| 60 | raise ValueError("Cannot use use_dir_lock and lock_class together") | ||
| 61 | |||
| 62 | try: | ||
| 63 | from pip._vendor.lockfile import LockFile | ||
| 64 | from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile | ||
| 65 | except ImportError: | ||
| 66 | notice = dedent(""" | ||
| 67 | NOTE: In order to use the FileCache you must have | ||
| 68 | lockfile installed. You can install it via pip: | ||
| 69 | pip install lockfile | ||
| 70 | """) | ||
| 71 | raise ImportError(notice) | ||
| 72 | else: | ||
| 73 | if use_dir_lock: | ||
| 74 | lock_class = MkdirLockFile | ||
| 75 | |||
| 76 | elif lock_class is None: | ||
| 77 | lock_class = LockFile | ||
| 78 | |||
| 79 | self.directory = directory | ||
| 80 | self.forever = forever | ||
| 81 | self.filemode = filemode | ||
| 82 | self.dirmode = dirmode | ||
| 83 | self.lock_class = lock_class | ||
| 84 | |||
| 85 | @staticmethod | ||
| 86 | def encode(x): | ||
| 87 | return hashlib.sha224(x.encode()).hexdigest() | ||
| 88 | |||
| 89 | def _fn(self, name): | ||
| 90 | # NOTE: This method should not change as some may depend on it. | ||
| 91 | # See: https://github.com/ionrock/cachecontrol/issues/63 | ||
| 92 | hashed = self.encode(name) | ||
| 93 | parts = list(hashed[:5]) + [hashed] | ||
| 94 | return os.path.join(self.directory, *parts) | ||
| 95 | |||
| 96 | def get(self, key): | ||
| 97 | name = self._fn(key) | ||
| 98 | if not os.path.exists(name): | ||
| 99 | return None | ||
| 100 | |||
| 101 | with open(name, 'rb') as fh: | ||
| 102 | return fh.read() | ||
| 103 | |||
| 104 | def set(self, key, value): | ||
| 105 | name = self._fn(key) | ||
| 106 | |||
| 107 | # Make sure the directory exists | ||
| 108 | try: | ||
| 109 | os.makedirs(os.path.dirname(name), self.dirmode) | ||
| 110 | except (IOError, OSError): | ||
| 111 | pass | ||
| 112 | |||
| 113 | with self.lock_class(name) as lock: | ||
| 114 | # Write our actual file | ||
| 115 | with _secure_open_write(lock.path, self.filemode) as fh: | ||
| 116 | fh.write(value) | ||
| 117 | |||
| 118 | def delete(self, key): | ||
| 119 | name = self._fn(key) | ||
| 120 | if not self.forever: | ||
| 121 | try: | ||
| 122 | os.remove(name) | ||
| 123 | except FileNotFoundError: | ||
| 124 | pass | ||
| 125 | |||
| 126 | |||
| 127 | def url_to_file_path(url, filecache): | ||
| 128 | """Return the file cache path based on the URL. | ||
| 129 | |||
| 130 | This does not ensure the file exists! | ||
| 131 | """ | ||
| 132 | key = CacheController.cache_url(url) | ||
| 133 | return filecache._fn(key) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..db1e09d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py | |||
| @@ -0,0 +1,43 @@ | |||
| 1 | from __future__ import division | ||
| 2 | |||
| 3 | from datetime import datetime | ||
| 4 | from pip._vendor.cachecontrol.cache import BaseCache | ||
| 5 | |||
| 6 | |||
| 7 | def total_seconds(td): | ||
| 8 | """Python 2.6 compatability""" | ||
| 9 | if hasattr(td, 'total_seconds'): | ||
| 10 | return int(td.total_seconds()) | ||
| 11 | |||
| 12 | ms = td.microseconds | ||
| 13 | secs = (td.seconds + td.days * 24 * 3600) | ||
| 14 | return int((ms + secs * 10**6) / 10**6) | ||
| 15 | |||
| 16 | |||
| 17 | class RedisCache(BaseCache): | ||
| 18 | |||
| 19 | def __init__(self, conn): | ||
| 20 | self.conn = conn | ||
| 21 | |||
| 22 | def get(self, key): | ||
| 23 | return self.conn.get(key) | ||
| 24 | |||
| 25 | def set(self, key, value, expires=None): | ||
| 26 | if not expires: | ||
| 27 | self.conn.set(key, value) | ||
| 28 | else: | ||
| 29 | expires = expires - datetime.utcnow() | ||
| 30 | self.conn.setex(key, total_seconds(expires), value) | ||
| 31 | |||
| 32 | def delete(self, key): | ||
| 33 | self.conn.delete(key) | ||
| 34 | |||
| 35 | def clear(self): | ||
| 36 | """Helper for clearing all the keys in a database. Use with | ||
| 37 | caution!""" | ||
| 38 | for key in self.conn.keys(): | ||
| 39 | self.conn.delete(key) | ||
| 40 | |||
| 41 | def close(self): | ||
| 42 | """Redis uses connection pooling, no need to close the connection.""" | ||
| 43 | pass | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..e3f3243 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/compat.py | |||
| @@ -0,0 +1,29 @@ | |||
| 1 | try: | ||
| 2 | from urllib.parse import urljoin | ||
| 3 | except ImportError: | ||
| 4 | from urlparse import urljoin | ||
| 5 | |||
| 6 | |||
| 7 | try: | ||
| 8 | import cPickle as pickle | ||
| 9 | except ImportError: | ||
| 10 | import pickle | ||
| 11 | |||
| 12 | |||
| 13 | # Handle the case where the requests module has been patched to not have | ||
| 14 | # urllib3 bundled as part of its source. | ||
| 15 | try: | ||
| 16 | from pip._vendor.requests.packages.urllib3.response import HTTPResponse | ||
| 17 | except ImportError: | ||
| 18 | from pip._vendor.urllib3.response import HTTPResponse | ||
| 19 | |||
| 20 | try: | ||
| 21 | from pip._vendor.requests.packages.urllib3.util import is_fp_closed | ||
| 22 | except ImportError: | ||
| 23 | from pip._vendor.urllib3.util import is_fp_closed | ||
| 24 | |||
| 25 | # Replicate some six behaviour | ||
| 26 | try: | ||
| 27 | text_type = unicode | ||
| 28 | except NameError: | ||
| 29 | text_type = str | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..bf4cc7f --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/controller.py | |||
| @@ -0,0 +1,373 @@ | |||
| 1 | """ | ||
| 2 | The httplib2 algorithms ported for use with requests. | ||
| 3 | """ | ||
| 4 | import logging | ||
| 5 | import re | ||
| 6 | import calendar | ||
| 7 | import time | ||
| 8 | from email.utils import parsedate_tz | ||
| 9 | |||
| 10 | from pip._vendor.requests.structures import CaseInsensitiveDict | ||
| 11 | |||
| 12 | from .cache import DictCache | ||
| 13 | from .serialize import Serializer | ||
| 14 | |||
| 15 | |||
| 16 | logger = logging.getLogger(__name__) | ||
| 17 | |||
| 18 | URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") | ||
| 19 | |||
| 20 | |||
| 21 | def parse_uri(uri): | ||
| 22 | """Parses a URI using the regex given in Appendix B of RFC 3986. | ||
| 23 | |||
| 24 | (scheme, authority, path, query, fragment) = parse_uri(uri) | ||
| 25 | """ | ||
| 26 | groups = URI.match(uri).groups() | ||
| 27 | return (groups[1], groups[3], groups[4], groups[6], groups[8]) | ||
| 28 | |||
| 29 | |||
| 30 | class CacheController(object): | ||
| 31 | """An interface to see if request should cached or not. | ||
| 32 | """ | ||
| 33 | def __init__(self, cache=None, cache_etags=True, serializer=None, | ||
| 34 | status_codes=None): | ||
| 35 | self.cache = cache or DictCache() | ||
| 36 | self.cache_etags = cache_etags | ||
| 37 | self.serializer = serializer or Serializer() | ||
| 38 | self.cacheable_status_codes = status_codes or (200, 203, 300, 301) | ||
| 39 | |||
| 40 | @classmethod | ||
| 41 | def _urlnorm(cls, uri): | ||
| 42 | """Normalize the URL to create a safe key for the cache""" | ||
| 43 | (scheme, authority, path, query, fragment) = parse_uri(uri) | ||
| 44 | if not scheme or not authority: | ||
| 45 | raise Exception("Only absolute URIs are allowed. uri = %s" % uri) | ||
| 46 | |||
| 47 | scheme = scheme.lower() | ||
| 48 | authority = authority.lower() | ||
| 49 | |||
| 50 | if not path: | ||
| 51 | path = "/" | ||
| 52 | |||
| 53 | # Could do syntax based normalization of the URI before | ||
| 54 | # computing the digest. See Section 6.2.2 of Std 66. | ||
| 55 | request_uri = query and "?".join([path, query]) or path | ||
| 56 | defrag_uri = scheme + "://" + authority + request_uri | ||
| 57 | |||
| 58 | return defrag_uri | ||
| 59 | |||
| 60 | @classmethod | ||
| 61 | def cache_url(cls, uri): | ||
| 62 | return cls._urlnorm(uri) | ||
| 63 | |||
| 64 | def parse_cache_control(self, headers): | ||
| 65 | known_directives = { | ||
| 66 | # https://tools.ietf.org/html/rfc7234#section-5.2 | ||
| 67 | 'max-age': (int, True,), | ||
| 68 | 'max-stale': (int, False,), | ||
| 69 | 'min-fresh': (int, True,), | ||
| 70 | 'no-cache': (None, False,), | ||
| 71 | 'no-store': (None, False,), | ||
| 72 | 'no-transform': (None, False,), | ||
| 73 | 'only-if-cached' : (None, False,), | ||
| 74 | 'must-revalidate': (None, False,), | ||
| 75 | 'public': (None, False,), | ||
| 76 | 'private': (None, False,), | ||
| 77 | 'proxy-revalidate': (None, False,), | ||
| 78 | 's-maxage': (int, True,) | ||
| 79 | } | ||
| 80 | |||
| 81 | cc_headers = headers.get('cache-control', | ||
| 82 | headers.get('Cache-Control', '')) | ||
| 83 | |||
| 84 | retval = {} | ||
| 85 | |||
| 86 | for cc_directive in cc_headers.split(','): | ||
| 87 | parts = cc_directive.split('=', 1) | ||
| 88 | directive = parts[0].strip() | ||
| 89 | |||
| 90 | try: | ||
| 91 | typ, required = known_directives[directive] | ||
| 92 | except KeyError: | ||
| 93 | logger.debug('Ignoring unknown cache-control directive: %s', | ||
| 94 | directive) | ||
| 95 | continue | ||
| 96 | |||
| 97 | if not typ or not required: | ||
| 98 | retval[directive] = None | ||
| 99 | if typ: | ||
| 100 | try: | ||
| 101 | retval[directive] = typ(parts[1].strip()) | ||
| 102 | except IndexError: | ||
| 103 | if required: | ||
| 104 | logger.debug('Missing value for cache-control ' | ||
| 105 | 'directive: %s', directive) | ||
| 106 | except ValueError: | ||
| 107 | logger.debug('Invalid value for cache-control directive ' | ||
| 108 | '%s, must be %s', directive, typ.__name__) | ||
| 109 | |||
| 110 | return retval | ||
| 111 | |||
| 112 | def cached_request(self, request): | ||
| 113 | """ | ||
| 114 | Return a cached response if it exists in the cache, otherwise | ||
| 115 | return False. | ||
| 116 | """ | ||
| 117 | cache_url = self.cache_url(request.url) | ||
| 118 | logger.debug('Looking up "%s" in the cache', cache_url) | ||
| 119 | cc = self.parse_cache_control(request.headers) | ||
| 120 | |||
| 121 | # Bail out if the request insists on fresh data | ||
| 122 | if 'no-cache' in cc: | ||
| 123 | logger.debug('Request header has "no-cache", cache bypassed') | ||
| 124 | return False | ||
| 125 | |||
| 126 | if 'max-age' in cc and cc['max-age'] == 0: | ||
| 127 | logger.debug('Request header has "max_age" as 0, cache bypassed') | ||
| 128 | return False | ||
| 129 | |||
| 130 | # Request allows serving from the cache, let's see if we find something | ||
| 131 | cache_data = self.cache.get(cache_url) | ||
| 132 | if cache_data is None: | ||
| 133 | logger.debug('No cache entry available') | ||
| 134 | return False | ||
| 135 | |||
| 136 | # Check whether it can be deserialized | ||
| 137 | resp = self.serializer.loads(request, cache_data) | ||
| 138 | if not resp: | ||
| 139 | logger.warning('Cache entry deserialization failed, entry ignored') | ||
| 140 | return False | ||
| 141 | |||
| 142 | # If we have a cached 301, return it immediately. We don't | ||
| 143 | # need to test our response for other headers b/c it is | ||
| 144 | # intrinsically "cacheable" as it is Permanent. | ||
| 145 | # See: | ||
| 146 | # https://tools.ietf.org/html/rfc7231#section-6.4.2 | ||
| 147 | # | ||
| 148 | # Client can try to refresh the value by repeating the request | ||
| 149 | # with cache busting headers as usual (ie no-cache). | ||
| 150 | if resp.status == 301: | ||
| 151 | msg = ('Returning cached "301 Moved Permanently" response ' | ||
| 152 | '(ignoring date and etag information)') | ||
| 153 | logger.debug(msg) | ||
| 154 | return resp | ||
| 155 | |||
| 156 | headers = CaseInsensitiveDict(resp.headers) | ||
| 157 | if not headers or 'date' not in headers: | ||
| 158 | if 'etag' not in headers: | ||
| 159 | # Without date or etag, the cached response can never be used | ||
| 160 | # and should be deleted. | ||
| 161 | logger.debug('Purging cached response: no date or etag') | ||
| 162 | self.cache.delete(cache_url) | ||
| 163 | logger.debug('Ignoring cached response: no date') | ||
| 164 | return False | ||
| 165 | |||
| 166 | now = time.time() | ||
| 167 | date = calendar.timegm( | ||
| 168 | parsedate_tz(headers['date']) | ||
| 169 | ) | ||
| 170 | current_age = max(0, now - date) | ||
| 171 | logger.debug('Current age based on date: %i', current_age) | ||
| 172 | |||
| 173 | # TODO: There is an assumption that the result will be a | ||
| 174 | # urllib3 response object. This may not be best since we | ||
| 175 | # could probably avoid instantiating or constructing the | ||
| 176 | # response until we know we need it. | ||
| 177 | resp_cc = self.parse_cache_control(headers) | ||
| 178 | |||
| 179 | # determine freshness | ||
| 180 | freshness_lifetime = 0 | ||
| 181 | |||
| 182 | # Check the max-age pragma in the cache control header | ||
| 183 | if 'max-age' in resp_cc: | ||
| 184 | freshness_lifetime = resp_cc['max-age'] | ||
| 185 | logger.debug('Freshness lifetime from max-age: %i', | ||
| 186 | freshness_lifetime) | ||
| 187 | |||
| 188 | # If there isn't a max-age, check for an expires header | ||
| 189 | elif 'expires' in headers: | ||
| 190 | expires = parsedate_tz(headers['expires']) | ||
| 191 | if expires is not None: | ||
| 192 | expire_time = calendar.timegm(expires) - date | ||
| 193 | freshness_lifetime = max(0, expire_time) | ||
| 194 | logger.debug("Freshness lifetime from expires: %i", | ||
| 195 | freshness_lifetime) | ||
| 196 | |||
| 197 | # Determine if we are setting freshness limit in the | ||
| 198 | # request. Note, this overrides what was in the response. | ||
| 199 | if 'max-age' in cc: | ||
| 200 | freshness_lifetime = cc['max-age'] | ||
| 201 | logger.debug('Freshness lifetime from request max-age: %i', | ||
| 202 | freshness_lifetime) | ||
| 203 | |||
| 204 | if 'min-fresh' in cc: | ||
| 205 | min_fresh = cc['min-fresh'] | ||
| 206 | # adjust our current age by our min fresh | ||
| 207 | current_age += min_fresh | ||
| 208 | logger.debug('Adjusted current age from min-fresh: %i', | ||
| 209 | current_age) | ||
| 210 | |||
| 211 | # Return entry if it is fresh enough | ||
| 212 | if freshness_lifetime > current_age: | ||
| 213 | logger.debug('The response is "fresh", returning cached response') | ||
| 214 | logger.debug('%i > %i', freshness_lifetime, current_age) | ||
| 215 | return resp | ||
| 216 | |||
| 217 | # we're not fresh. If we don't have an Etag, clear it out | ||
| 218 | if 'etag' not in headers: | ||
| 219 | logger.debug( | ||
| 220 | 'The cached response is "stale" with no etag, purging' | ||
| 221 | ) | ||
| 222 | self.cache.delete(cache_url) | ||
| 223 | |||
| 224 | # return the original handler | ||
| 225 | return False | ||
| 226 | |||
| 227 | def conditional_headers(self, request): | ||
| 228 | cache_url = self.cache_url(request.url) | ||
| 229 | resp = self.serializer.loads(request, self.cache.get(cache_url)) | ||
| 230 | new_headers = {} | ||
| 231 | |||
| 232 | if resp: | ||
| 233 | headers = CaseInsensitiveDict(resp.headers) | ||
| 234 | |||
| 235 | if 'etag' in headers: | ||
| 236 | new_headers['If-None-Match'] = headers['ETag'] | ||
| 237 | |||
| 238 | if 'last-modified' in headers: | ||
| 239 | new_headers['If-Modified-Since'] = headers['Last-Modified'] | ||
| 240 | |||
| 241 | return new_headers | ||
| 242 | |||
| 243 | def cache_response(self, request, response, body=None, | ||
| 244 | status_codes=None): | ||
| 245 | """ | ||
| 246 | Algorithm for caching requests. | ||
| 247 | |||
| 248 | This assumes a requests Response object. | ||
| 249 | """ | ||
| 250 | # From httplib2: Don't cache 206's since we aren't going to | ||
| 251 | # handle byte range requests | ||
| 252 | cacheable_status_codes = status_codes or self.cacheable_status_codes | ||
| 253 | if response.status not in cacheable_status_codes: | ||
| 254 | logger.debug( | ||
| 255 | 'Status code %s not in %s', | ||
| 256 | response.status, | ||
| 257 | cacheable_status_codes | ||
| 258 | ) | ||
| 259 | return | ||
| 260 | |||
| 261 | response_headers = CaseInsensitiveDict(response.headers) | ||
| 262 | |||
| 263 | # If we've been given a body, our response has a Content-Length, that | ||
| 264 | # Content-Length is valid then we can check to see if the body we've | ||
| 265 | # been given matches the expected size, and if it doesn't we'll just | ||
| 266 | # skip trying to cache it. | ||
| 267 | if (body is not None and | ||
| 268 | "content-length" in response_headers and | ||
| 269 | response_headers["content-length"].isdigit() and | ||
| 270 | int(response_headers["content-length"]) != len(body)): | ||
| 271 | return | ||
| 272 | |||
| 273 | cc_req = self.parse_cache_control(request.headers) | ||
| 274 | cc = self.parse_cache_control(response_headers) | ||
| 275 | |||
| 276 | cache_url = self.cache_url(request.url) | ||
| 277 | logger.debug('Updating cache with response from "%s"', cache_url) | ||
| 278 | |||
| 279 | # Delete it from the cache if we happen to have it stored there | ||
| 280 | no_store = False | ||
| 281 | if 'no-store' in cc: | ||
| 282 | no_store = True | ||
| 283 | logger.debug('Response header has "no-store"') | ||
| 284 | if 'no-store' in cc_req: | ||
| 285 | no_store = True | ||
| 286 | logger.debug('Request header has "no-store"') | ||
| 287 | if no_store and self.cache.get(cache_url): | ||
| 288 | logger.debug('Purging existing cache entry to honor "no-store"') | ||
| 289 | self.cache.delete(cache_url) | ||
| 290 | |||
| 291 | # If we've been given an etag, then keep the response | ||
| 292 | if self.cache_etags and 'etag' in response_headers: | ||
| 293 | logger.debug('Caching due to etag') | ||
| 294 | self.cache.set( | ||
| 295 | cache_url, | ||
| 296 | self.serializer.dumps(request, response, body=body), | ||
| 297 | ) | ||
| 298 | |||
| 299 | # Add to the cache any 301s. We do this before looking that | ||
| 300 | # the Date headers. | ||
| 301 | elif response.status == 301: | ||
| 302 | logger.debug('Caching permanant redirect') | ||
| 303 | self.cache.set( | ||
| 304 | cache_url, | ||
| 305 | self.serializer.dumps(request, response) | ||
| 306 | ) | ||
| 307 | |||
| 308 | # Add to the cache if the response headers demand it. If there | ||
| 309 | # is no date header then we can't do anything about expiring | ||
| 310 | # the cache. | ||
| 311 | elif 'date' in response_headers: | ||
| 312 | # cache when there is a max-age > 0 | ||
| 313 | if 'max-age' in cc and cc['max-age'] > 0: | ||
| 314 | logger.debug('Caching b/c date exists and max-age > 0') | ||
| 315 | self.cache.set( | ||
| 316 | cache_url, | ||
| 317 | self.serializer.dumps(request, response, body=body), | ||
| 318 | ) | ||
| 319 | |||
| 320 | # If the request can expire, it means we should cache it | ||
| 321 | # in the meantime. | ||
| 322 | elif 'expires' in response_headers: | ||
| 323 | if response_headers['expires']: | ||
| 324 | logger.debug('Caching b/c of expires header') | ||
| 325 | self.cache.set( | ||
| 326 | cache_url, | ||
| 327 | self.serializer.dumps(request, response, body=body), | ||
| 328 | ) | ||
| 329 | |||
| 330 | def update_cached_response(self, request, response): | ||
| 331 | """On a 304 we will get a new set of headers that we want to | ||
| 332 | update our cached value with, assuming we have one. | ||
| 333 | |||
| 334 | This should only ever be called when we've sent an ETag and | ||
| 335 | gotten a 304 as the response. | ||
| 336 | """ | ||
| 337 | cache_url = self.cache_url(request.url) | ||
| 338 | |||
| 339 | cached_response = self.serializer.loads( | ||
| 340 | request, | ||
| 341 | self.cache.get(cache_url) | ||
| 342 | ) | ||
| 343 | |||
| 344 | if not cached_response: | ||
| 345 | # we didn't have a cached response | ||
| 346 | return response | ||
| 347 | |||
| 348 | # Lets update our headers with the headers from the new request: | ||
| 349 | # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 | ||
| 350 | # | ||
| 351 | # The server isn't supposed to send headers that would make | ||
| 352 | # the cached body invalid. But... just in case, we'll be sure | ||
| 353 | # to strip out ones we know that might be problmatic due to | ||
| 354 | # typical assumptions. | ||
| 355 | excluded_headers = [ | ||
| 356 | "content-length", | ||
| 357 | ] | ||
| 358 | |||
| 359 | cached_response.headers.update( | ||
| 360 | dict((k, v) for k, v in response.headers.items() | ||
| 361 | if k.lower() not in excluded_headers) | ||
| 362 | ) | ||
| 363 | |||
| 364 | # we want a 200 b/c we have content via the cache | ||
| 365 | cached_response.status = 200 | ||
| 366 | |||
| 367 | # update our cache | ||
| 368 | self.cache.set( | ||
| 369 | cache_url, | ||
| 370 | self.serializer.dumps(request, cached_response), | ||
| 371 | ) | ||
| 372 | |||
| 373 | return cached_response | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..83ce912 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py | |||
| @@ -0,0 +1,78 @@ | |||
| 1 | from io import BytesIO | ||
| 2 | |||
| 3 | |||
| 4 | class CallbackFileWrapper(object): | ||
| 5 | """ | ||
| 6 | Small wrapper around a fp object which will tee everything read into a | ||
| 7 | buffer, and when that file is closed it will execute a callback with the | ||
| 8 | contents of that buffer. | ||
| 9 | |||
| 10 | All attributes are proxied to the underlying file object. | ||
| 11 | |||
| 12 | This class uses members with a double underscore (__) leading prefix so as | ||
| 13 | not to accidentally shadow an attribute. | ||
| 14 | """ | ||
| 15 | |||
| 16 | def __init__(self, fp, callback): | ||
| 17 | self.__buf = BytesIO() | ||
| 18 | self.__fp = fp | ||
| 19 | self.__callback = callback | ||
| 20 | |||
| 21 | def __getattr__(self, name): | ||
| 22 | # The vaguaries of garbage collection means that self.__fp is | ||
| 23 | # not always set. By using __getattribute__ and the private | ||
| 24 | # name[0] allows looking up the attribute value and raising an | ||
| 25 | # AttributeError when it doesn't exist. This stop thigns from | ||
| 26 | # infinitely recursing calls to getattr in the case where | ||
| 27 | # self.__fp hasn't been set. | ||
| 28 | # | ||
| 29 | # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers | ||
| 30 | fp = self.__getattribute__('_CallbackFileWrapper__fp') | ||
| 31 | return getattr(fp, name) | ||
| 32 | |||
| 33 | def __is_fp_closed(self): | ||
| 34 | try: | ||
| 35 | return self.__fp.fp is None | ||
| 36 | except AttributeError: | ||
| 37 | pass | ||
| 38 | |||
| 39 | try: | ||
| 40 | return self.__fp.closed | ||
| 41 | except AttributeError: | ||
| 42 | pass | ||
| 43 | |||
| 44 | # We just don't cache it then. | ||
| 45 | # TODO: Add some logging here... | ||
| 46 | return False | ||
| 47 | |||
| 48 | def _close(self): | ||
| 49 | if self.__callback: | ||
| 50 | self.__callback(self.__buf.getvalue()) | ||
| 51 | |||
| 52 | # We assign this to None here, because otherwise we can get into | ||
| 53 | # really tricky problems where the CPython interpreter dead locks | ||
| 54 | # because the callback is holding a reference to something which | ||
| 55 | # has a __del__ method. Setting this to None breaks the cycle | ||
| 56 | # and allows the garbage collector to do it's thing normally. | ||
| 57 | self.__callback = None | ||
| 58 | |||
| 59 | def read(self, amt=None): | ||
| 60 | data = self.__fp.read(amt) | ||
| 61 | self.__buf.write(data) | ||
| 62 | if self.__is_fp_closed(): | ||
| 63 | self._close() | ||
| 64 | |||
| 65 | return data | ||
| 66 | |||
| 67 | def _safe_read(self, amt): | ||
| 68 | data = self.__fp._safe_read(amt) | ||
| 69 | if amt == 2 and data == b'\r\n': | ||
| 70 | # urllib executes this read to toss the CRLF at the end | ||
| 71 | # of the chunk. | ||
| 72 | return data | ||
| 73 | |||
| 74 | self.__buf.write(data) | ||
| 75 | if self.__is_fp_closed(): | ||
| 76 | self._close() | ||
| 77 | |||
| 78 | return data | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..aad333d --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py | |||
| @@ -0,0 +1,138 @@ | |||
| 1 | import calendar | ||
| 2 | import time | ||
| 3 | |||
| 4 | from email.utils import formatdate, parsedate, parsedate_tz | ||
| 5 | |||
| 6 | from datetime import datetime, timedelta | ||
| 7 | |||
| 8 | TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" | ||
| 9 | |||
| 10 | |||
| 11 | def expire_after(delta, date=None): | ||
| 12 | date = date or datetime.utcnow() | ||
| 13 | return date + delta | ||
| 14 | |||
| 15 | |||
| 16 | def datetime_to_header(dt): | ||
| 17 | return formatdate(calendar.timegm(dt.timetuple())) | ||
| 18 | |||
| 19 | |||
| 20 | class BaseHeuristic(object): | ||
| 21 | |||
| 22 | def warning(self, response): | ||
| 23 | """ | ||
| 24 | Return a valid 1xx warning header value describing the cache | ||
| 25 | adjustments. | ||
| 26 | |||
| 27 | The response is provided too allow warnings like 113 | ||
| 28 | http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need | ||
| 29 | to explicitly say response is over 24 hours old. | ||
| 30 | """ | ||
| 31 | return '110 - "Response is Stale"' | ||
| 32 | |||
| 33 | def update_headers(self, response): | ||
| 34 | """Update the response headers with any new headers. | ||
| 35 | |||
| 36 | NOTE: This SHOULD always include some Warning header to | ||
| 37 | signify that the response was cached by the client, not | ||
| 38 | by way of the provided headers. | ||
| 39 | """ | ||
| 40 | return {} | ||
| 41 | |||
| 42 | def apply(self, response): | ||
| 43 | updated_headers = self.update_headers(response) | ||
| 44 | |||
| 45 | if updated_headers: | ||
| 46 | response.headers.update(updated_headers) | ||
| 47 | warning_header_value = self.warning(response) | ||
| 48 | if warning_header_value is not None: | ||
| 49 | response.headers.update({'Warning': warning_header_value}) | ||
| 50 | |||
| 51 | return response | ||
| 52 | |||
| 53 | |||
| 54 | class OneDayCache(BaseHeuristic): | ||
| 55 | """ | ||
| 56 | Cache the response by providing an expires 1 day in the | ||
| 57 | future. | ||
| 58 | """ | ||
| 59 | def update_headers(self, response): | ||
| 60 | headers = {} | ||
| 61 | |||
| 62 | if 'expires' not in response.headers: | ||
| 63 | date = parsedate(response.headers['date']) | ||
| 64 | expires = expire_after(timedelta(days=1), | ||
| 65 | date=datetime(*date[:6])) | ||
| 66 | headers['expires'] = datetime_to_header(expires) | ||
| 67 | headers['cache-control'] = 'public' | ||
| 68 | return headers | ||
| 69 | |||
| 70 | |||
| 71 | class ExpiresAfter(BaseHeuristic): | ||
| 72 | """ | ||
| 73 | Cache **all** requests for a defined time period. | ||
| 74 | """ | ||
| 75 | |||
| 76 | def __init__(self, **kw): | ||
| 77 | self.delta = timedelta(**kw) | ||
| 78 | |||
| 79 | def update_headers(self, response): | ||
| 80 | expires = expire_after(self.delta) | ||
| 81 | return { | ||
| 82 | 'expires': datetime_to_header(expires), | ||
| 83 | 'cache-control': 'public', | ||
| 84 | } | ||
| 85 | |||
| 86 | def warning(self, response): | ||
| 87 | tmpl = '110 - Automatically cached for %s. Response might be stale' | ||
| 88 | return tmpl % self.delta | ||
| 89 | |||
| 90 | |||
| 91 | class LastModified(BaseHeuristic): | ||
| 92 | """ | ||
| 93 | If there is no Expires header already, fall back on Last-Modified | ||
| 94 | using the heuristic from | ||
| 95 | http://tools.ietf.org/html/rfc7234#section-4.2.2 | ||
| 96 | to calculate a reasonable value. | ||
| 97 | |||
| 98 | Firefox also does something like this per | ||
| 99 | https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ | ||
| 100 | http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 | ||
| 101 | Unlike mozilla we limit this to 24-hr. | ||
| 102 | """ | ||
| 103 | cacheable_by_default_statuses = set([ | ||
| 104 | 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 | ||
| 105 | ]) | ||
| 106 | |||
| 107 | def update_headers(self, resp): | ||
| 108 | headers = resp.headers | ||
| 109 | |||
| 110 | if 'expires' in headers: | ||
| 111 | return {} | ||
| 112 | |||
| 113 | if 'cache-control' in headers and headers['cache-control'] != 'public': | ||
| 114 | return {} | ||
| 115 | |||
| 116 | if resp.status not in self.cacheable_by_default_statuses: | ||
| 117 | return {} | ||
| 118 | |||
| 119 | if 'date' not in headers or 'last-modified' not in headers: | ||
| 120 | return {} | ||
| 121 | |||
| 122 | date = calendar.timegm(parsedate_tz(headers['date'])) | ||
| 123 | last_modified = parsedate(headers['last-modified']) | ||
| 124 | if date is None or last_modified is None: | ||
| 125 | return {} | ||
| 126 | |||
| 127 | now = time.time() | ||
| 128 | current_age = max(0, now - date) | ||
| 129 | delta = date - calendar.timegm(last_modified) | ||
| 130 | freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) | ||
| 131 | if freshness_lifetime <= current_age: | ||
| 132 | return {} | ||
| 133 | |||
| 134 | expires = date + freshness_lifetime | ||
| 135 | return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} | ||
| 136 | |||
| 137 | def warning(self, resp): | ||
| 138 | return None | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..cd21cae --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/serialize.py | |||
| @@ -0,0 +1,194 @@ | |||
| 1 | import base64 | ||
| 2 | import io | ||
| 3 | import json | ||
| 4 | import zlib | ||
| 5 | |||
| 6 | from pip._vendor import msgpack | ||
| 7 | from pip._vendor.requests.structures import CaseInsensitiveDict | ||
| 8 | |||
| 9 | from .compat import HTTPResponse, pickle, text_type | ||
| 10 | |||
| 11 | |||
| 12 | def _b64_decode_bytes(b): | ||
| 13 | return base64.b64decode(b.encode("ascii")) | ||
| 14 | |||
| 15 | |||
| 16 | def _b64_decode_str(s): | ||
| 17 | return _b64_decode_bytes(s).decode("utf8") | ||
| 18 | |||
| 19 | |||
| 20 | class Serializer(object): | ||
| 21 | |||
| 22 | def dumps(self, request, response, body=None): | ||
| 23 | response_headers = CaseInsensitiveDict(response.headers) | ||
| 24 | |||
| 25 | if body is None: | ||
| 26 | body = response.read(decode_content=False) | ||
| 27 | |||
| 28 | # NOTE: 99% sure this is dead code. I'm only leaving it | ||
| 29 | # here b/c I don't have a test yet to prove | ||
| 30 | # it. Basically, before using | ||
| 31 | # `cachecontrol.filewrapper.CallbackFileWrapper`, | ||
| 32 | # this made an effort to reset the file handle. The | ||
| 33 | # `CallbackFileWrapper` short circuits this code by | ||
| 34 | # setting the body as the content is consumed, the | ||
| 35 | # result being a `body` argument is *always* passed | ||
| 36 | # into cache_response, and in turn, | ||
| 37 | # `Serializer.dump`. | ||
| 38 | response._fp = io.BytesIO(body) | ||
| 39 | |||
| 40 | # NOTE: This is all a bit weird, but it's really important that on | ||
| 41 | # Python 2.x these objects are unicode and not str, even when | ||
| 42 | # they contain only ascii. The problem here is that msgpack | ||
| 43 | # understands the difference between unicode and bytes and we | ||
| 44 | # have it set to differentiate between them, however Python 2 | ||
| 45 | # doesn't know the difference. Forcing these to unicode will be | ||
| 46 | # enough to have msgpack know the difference. | ||
| 47 | data = { | ||
| 48 | u"response": { | ||
| 49 | u"body": body, | ||
| 50 | u"headers": dict( | ||
| 51 | (text_type(k), text_type(v)) | ||
| 52 | for k, v in response.headers.items() | ||
| 53 | ), | ||
| 54 | u"status": response.status, | ||
| 55 | u"version": response.version, | ||
| 56 | u"reason": text_type(response.reason), | ||
| 57 | u"strict": response.strict, | ||
| 58 | u"decode_content": response.decode_content, | ||
| 59 | }, | ||
| 60 | } | ||
| 61 | |||
| 62 | # Construct our vary headers | ||
| 63 | data[u"vary"] = {} | ||
| 64 | if u"vary" in response_headers: | ||
| 65 | varied_headers = response_headers[u'vary'].split(',') | ||
| 66 | for header in varied_headers: | ||
| 67 | header = header.strip() | ||
| 68 | header_value = request.headers.get(header, None) | ||
| 69 | if header_value is not None: | ||
| 70 | header_value = text_type(header_value) | ||
| 71 | data[u"vary"][header] = header_value | ||
| 72 | |||
| 73 | return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) | ||
| 74 | |||
| 75 | def loads(self, request, data): | ||
| 76 | # Short circuit if we've been given an empty set of data | ||
| 77 | if not data: | ||
| 78 | return | ||
| 79 | |||
| 80 | # Determine what version of the serializer the data was serialized | ||
| 81 | # with | ||
| 82 | try: | ||
| 83 | ver, data = data.split(b",", 1) | ||
| 84 | except ValueError: | ||
| 85 | ver = b"cc=0" | ||
| 86 | |||
| 87 | # Make sure that our "ver" is actually a version and isn't a false | ||
| 88 | # positive from a , being in the data stream. | ||
| 89 | if ver[:3] != b"cc=": | ||
| 90 | data = ver + data | ||
| 91 | ver = b"cc=0" | ||
| 92 | |||
| 93 | # Get the version number out of the cc=N | ||
| 94 | ver = ver.split(b"=", 1)[-1].decode("ascii") | ||
| 95 | |||
| 96 | # Dispatch to the actual load method for the given version | ||
| 97 | try: | ||
| 98 | return getattr(self, "_loads_v{0}".format(ver))(request, data) | ||
| 99 | except AttributeError: | ||
| 100 | # This is a version we don't have a loads function for, so we'll | ||
| 101 | # just treat it as a miss and return None | ||
| 102 | return | ||
| 103 | |||
| 104 | def prepare_response(self, request, cached): | ||
| 105 | """Verify our vary headers match and construct a real urllib3 | ||
| 106 | HTTPResponse object. | ||
| 107 | """ | ||
| 108 | # Special case the '*' Vary value as it means we cannot actually | ||
| 109 | # determine if the cached response is suitable for this request. | ||
| 110 | if "*" in cached.get("vary", {}): | ||
| 111 | return | ||
| 112 | |||
| 113 | # Ensure that the Vary headers for the cached response match our | ||
| 114 | # request | ||
| 115 | for header, value in cached.get("vary", {}).items(): | ||
| 116 | if request.headers.get(header, None) != value: | ||
| 117 | return | ||
| 118 | |||
| 119 | body_raw = cached["response"].pop("body") | ||
| 120 | |||
| 121 | headers = CaseInsensitiveDict(data=cached['response']['headers']) | ||
| 122 | if headers.get('transfer-encoding', '') == 'chunked': | ||
| 123 | headers.pop('transfer-encoding') | ||
| 124 | |||
| 125 | cached['response']['headers'] = headers | ||
| 126 | |||
| 127 | try: | ||
| 128 | body = io.BytesIO(body_raw) | ||
| 129 | except TypeError: | ||
| 130 | # This can happen if cachecontrol serialized to v1 format (pickle) | ||
| 131 | # using Python 2. A Python 2 str(byte string) will be unpickled as | ||
| 132 | # a Python 3 str (unicode string), which will cause the above to | ||
| 133 | # fail with: | ||
| 134 | # | ||
| 135 | # TypeError: 'str' does not support the buffer interface | ||
| 136 | body = io.BytesIO(body_raw.encode('utf8')) | ||
| 137 | |||
| 138 | return HTTPResponse( | ||
| 139 | body=body, | ||
| 140 | preload_content=False, | ||
| 141 | **cached["response"] | ||
| 142 | ) | ||
| 143 | |||
| 144 | def _loads_v0(self, request, data): | ||
| 145 | # The original legacy cache data. This doesn't contain enough | ||
| 146 | # information to construct everything we need, so we'll treat this as | ||
| 147 | # a miss. | ||
| 148 | return | ||
| 149 | |||
| 150 | def _loads_v1(self, request, data): | ||
| 151 | try: | ||
| 152 | cached = pickle.loads(data) | ||
| 153 | except ValueError: | ||
| 154 | return | ||
| 155 | |||
| 156 | return self.prepare_response(request, cached) | ||
| 157 | |||
| 158 | def _loads_v2(self, request, data): | ||
| 159 | try: | ||
| 160 | cached = json.loads(zlib.decompress(data).decode("utf8")) | ||
| 161 | except (ValueError, zlib.error): | ||
| 162 | return | ||
| 163 | |||
| 164 | # We need to decode the items that we've base64 encoded | ||
| 165 | cached["response"]["body"] = _b64_decode_bytes( | ||
| 166 | cached["response"]["body"] | ||
| 167 | ) | ||
| 168 | cached["response"]["headers"] = dict( | ||
| 169 | (_b64_decode_str(k), _b64_decode_str(v)) | ||
| 170 | for k, v in cached["response"]["headers"].items() | ||
| 171 | ) | ||
| 172 | cached["response"]["reason"] = _b64_decode_str( | ||
| 173 | cached["response"]["reason"], | ||
| 174 | ) | ||
| 175 | cached["vary"] = dict( | ||
| 176 | (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) | ||
| 177 | for k, v in cached["vary"].items() | ||
| 178 | ) | ||
| 179 | |||
| 180 | return self.prepare_response(request, cached) | ||
| 181 | |||
| 182 | def _loads_v3(self, request, data): | ||
| 183 | # Due to Python 2 encoding issues, it's impossible to know for sure | ||
| 184 | # exactly how to load v3 entries, thus we'll treat these as a miss so | ||
| 185 | # that they get rewritten out as v4 entries. | ||
| 186 | return | ||
| 187 | |||
| 188 | def _loads_v4(self, request, data): | ||
| 189 | try: | ||
| 190 | cached = msgpack.loads(data, encoding='utf-8') | ||
| 191 | except ValueError: | ||
| 192 | return | ||
| 193 | |||
| 194 | return self.prepare_response(request, cached) | ||
diff --git a/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..2ceac99 --- /dev/null +++ b/venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py | |||
| @@ -0,0 +1,27 @@ | |||
| 1 | from .adapter import CacheControlAdapter | ||
| 2 | from .cache import DictCache | ||
| 3 | |||
| 4 | |||
| 5 | def CacheControl(sess, | ||
| 6 | cache=None, | ||
| 7 | cache_etags=True, | ||
| 8 | serializer=None, | ||
| 9 | heuristic=None, | ||
| 10 | controller_class=None, | ||
| 11 | adapter_class=None, | ||
| 12 | cacheable_methods=None): | ||
| 13 | |||
| 14 | cache = cache or DictCache() | ||
| 15 | adapter_class = adapter_class or CacheControlAdapter | ||
| 16 | adapter = adapter_class( | ||
| 17 | cache, | ||
| 18 | cache_etags=cache_etags, | ||
| 19 | serializer=serializer, | ||
| 20 | heuristic=heuristic, | ||
| 21 | controller_class=controller_class, | ||
| 22 | cacheable_methods=cacheable_methods | ||
| 23 | ) | ||
| 24 | sess.mount('http://', adapter) | ||
| 25 | sess.mount('https://', adapter) | ||
| 26 | |||
| 27 | return sess | ||
