caches/__init__.py000064400000000561147205457050010116 0ustar00from textwrap import dedent try: from .file_cache import FileCache except ImportError: notice = dedent(''' NOTE: In order to use the FileCache you must have lockfile installed. You can install it via pip: pip install lockfile ''') print(notice) try: import redis from .redis_cache import RedisCache except ImportError: pass caches/__init__.pyc000064400000001146147205457050010261 0ustar00 abc@sddlmZyddlmZWn"ek rHedZeGHnXy ddlZddlmZWnek r|nXdS(i(tdedenti(t FileCaches NOTE: In order to use the FileCache you must have lockfile installed. You can install it via pip: pip install lockfile N(t RedisCache( ttextwrapRt file_cacheRt ImportErrortnoticetredist redis_cacheR(((sL/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyts     caches/__init__.pyo000064400000001146147205457050010275 0ustar00 abc@sddlmZyddlmZWn"ek rHedZeGHnXy ddlZddlmZWnek r|nXdS(i(tdedenti(t FileCaches NOTE: In order to use the FileCache you must have lockfile installed. You can install it via pip: pip install lockfile N(t RedisCache( ttextwrapRt file_cacheRt ImportErrortnoticetredist redis_cacheR(((sL/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyts     caches/file_cache.py000064400000006714147205457050010427 0ustar00import hashlib import os from pip._vendor.lockfile import LockFile from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile from ..cache import BaseCache from ..controller import CacheController def _secure_open_write(filename, fmode): # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only # will open *new* files. # We specify this because we want to ensure that the mode we pass is the # mode of the file. flags |= os.O_CREAT | os.O_EXCL # Do not follow symlinks to prevent someone from making a symlink that # we follow and insecurely open a cache file. if hasattr(os, "O_NOFOLLOW"): flags |= os.O_NOFOLLOW # On Windows we'll mark this file as binary if hasattr(os, "O_BINARY"): flags |= os.O_BINARY # Before we open our file, we want to delete any existing file that is # there try: os.remove(filename) except (IOError, OSError): # The file must not exist already, so we can just skip ahead to opening pass # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a # race condition happens between the os.remove and this line, that an # error will be raised. Because we utilize a lockfile this should only # happen if someone is attempting to attack us. fd = os.open(filename, flags, fmode) try: return os.fdopen(fd, "wb") except: # An error occurred wrapping our FD in a file object os.close(fd) raise class FileCache(BaseCache): def __init__(self, directory, forever=False, filemode=0o0600, dirmode=0o0700, use_dir_lock=None, lock_class=None): if use_dir_lock is not None and lock_class is not None: raise ValueError("Cannot use use_dir_lock and lock_class together") if use_dir_lock: lock_class = MkdirLockFile if lock_class is None: lock_class = LockFile self.directory = directory self.forever = forever self.filemode = filemode self.dirmode = dirmode self.lock_class = lock_class @staticmethod def encode(x): return hashlib.sha224(x.encode()).hexdigest() def _fn(self, name): # NOTE: This method should not change as some may depend on it. # See: https://github.com/ionrock/cachecontrol/issues/63 hashed = self.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) def get(self, key): name = self._fn(key) if not os.path.exists(name): return None with open(name, 'rb') as fh: return fh.read() def set(self, key, value): name = self._fn(key) # Make sure the directory exists try: os.makedirs(os.path.dirname(name), self.dirmode) except (IOError, OSError): pass with self.lock_class(name) as lock: # Write our actual file with _secure_open_write(lock.path, self.filemode) as fh: fh.write(value) def delete(self, key): name = self._fn(key) if not self.forever: os.remove(name) def url_to_file_path(url, filecache): """Return the file cache path based on the URL. This does not ensure the file exists! """ key = CacheController.cache_url(url) return filecache._fn(key) caches/file_cache.pyc000064400000007377147205457050010600 0ustar00 abc@sddlZddlZddlmZddlmZddlmZddlm Z dZ defd YZ d Z dS( iN(tLockFile(t MkdirLockFilei(t BaseCache(tCacheControllercCstj}|tjtjBO}ttdr<|tjO}nttdr[|tjO}nytj|Wntt fk rnXtj |||}ytj |dSWntj |nXdS(Nt O_NOFOLLOWtO_BINARYtwb( tostO_WRONLYtO_CREATtO_EXCLthasattrRRtremovetIOErrortOSErrortopentfdopentclose(tfilenametfmodetflagstfd((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt_secure_open_write s   t FileCachecBsSeZedddddZedZdZdZdZ dZ RS( iicCs||dk r'|dk r'tdn|r6t}n|dkrKt}n||_||_||_||_||_dS(Ns/Cannot use use_dir_lock and lock_class together( tNonet ValueErrorRRt directorytforevertfilemodetdirmodet lock_class(tselfRRRRt use_dir_lockR((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt__init__4s       cCstj|jjS(N(thashlibtsha224tencodet hexdigest(tx((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyR$GscCs<|j|}t|d |g}tjj|j|S(Ni(R$tlistRtpathtjoinR(Rtnamethashedtparts((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt_fnKscCsK|j|}tjj|s%dSt|d}|jSWdQXdS(Ntrb(R-RR(texistsRRtread(RtkeyR*tfh((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytgetRs c Cs|j|}y#tjtjj||jWnttfk rKnX|j|2}t |j|j }|j |WdQXWdQXdS(N( R-RtmakedirsR(tdirnameRR RRRRtwrite(RR1tvalueR*tlockR2((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytsetZs#cCs,|j|}|js(tj|ndS(N(R-RRR (RR1R*((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytdeletehs N( t__name__t __module__tFalseRR!t staticmethodR$R-R3R9R:(((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyR3s   cCstj|}|j|S(s\Return the file cache path based on the URL. This does not ensure the file exists! (Rt cache_urlR-(turlt filecacheR1((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyturl_to_file_pathns( R"Rtpip._vendor.lockfileRt"pip._vendor.lockfile.mkdirlockfileRtcacheRt controllerRRRRB(((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyts   (;caches/file_cache.pyo000064400000007377147205457050010614 0ustar00 abc@sddlZddlZddlmZddlmZddlmZddlm Z dZ defd YZ d Z dS( iN(tLockFile(t MkdirLockFilei(t BaseCache(tCacheControllercCstj}|tjtjBO}ttdr<|tjO}nttdr[|tjO}nytj|Wntt fk rnXtj |||}ytj |dSWntj |nXdS(Nt O_NOFOLLOWtO_BINARYtwb( tostO_WRONLYtO_CREATtO_EXCLthasattrRRtremovetIOErrortOSErrortopentfdopentclose(tfilenametfmodetflagstfd((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt_secure_open_write s   t FileCachecBsSeZedddddZedZdZdZdZ dZ RS( iicCs||dk r'|dk r'tdn|r6t}n|dkrKt}n||_||_||_||_||_dS(Ns/Cannot use use_dir_lock and lock_class together( tNonet ValueErrorRRt directorytforevertfilemodetdirmodet lock_class(tselfRRRRt use_dir_lockR((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt__init__4s       cCstj|jjS(N(thashlibtsha224tencodet hexdigest(tx((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyR$GscCs<|j|}t|d |g}tjj|j|S(Ni(R$tlistRtpathtjoinR(Rtnamethashedtparts((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyt_fnKscCsK|j|}tjj|s%dSt|d}|jSWdQXdS(Ntrb(R-RR(texistsRRtread(RtkeyR*tfh((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytgetRs c Cs|j|}y#tjtjj||jWnttfk rKnX|j|2}t |j|j }|j |WdQXWdQXdS(N( R-RtmakedirsR(tdirnameRR RRRRtwrite(RR1tvalueR*tlockR2((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytsetZs#cCs,|j|}|js(tj|ndS(N(R-RRR (RR1R*((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pytdeletehs N( t__name__t __module__tFalseRR!t staticmethodR$R-R3R9R:(((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyR3s   cCstj|}|j|S(s\Return the file cache path based on the URL. This does not ensure the file exists! (Rt cache_urlR-(turlt filecacheR1((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyturl_to_file_pathns( R"Rtpip._vendor.lockfileRt"pip._vendor.lockfile.mkdirlockfileRtcacheRt controllerRRRRB(((sN/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyts   (;caches/redis_cache.py000064400000001715147205457050010612 0ustar00from __future__ import division from datetime import datetime def total_seconds(td): """Python 2.6 compatability""" if hasattr(td, 'total_seconds'): return td.total_seconds() ms = td.microseconds secs = (td.seconds + td.days * 24 * 3600) return (ms + secs * 10**6) / 10**6 class RedisCache(object): def __init__(self, conn): self.conn = conn def get(self, key): return self.conn.get(key) def set(self, key, value, expires=None): if not expires: self.conn.set(key, value) else: expires = expires - datetime.now() self.conn.setex(key, total_seconds(expires), value) def delete(self, key): self.conn.delete(key) def clear(self): """Helper for clearing all the keys in a database. Use with caution!""" for key in self.conn.keys(): self.conn.delete(key) def close(self): self.conn.disconnect() caches/redis_cache.pyc000064400000004531147205457050010754 0ustar00 abc@ sCddlmZddlmZdZdefdYZdS(i(tdivision(tdatetimecC sJt|dr|jS|j}|j|jdd}||ddS(sPython 2.6 compatabilityt total_secondsiii ii@Bi@B(thasattrRt microsecondstsecondstdays(ttdtmstsecs((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRs   t RedisCachecB sAeZdZdZddZdZdZdZRS(cC s ||_dS(N(tconn(tselfR ((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyt__init__scC s|jj|S(N(R tget(R tkey((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC sL|s|jj||n,|tj}|jj|t||dS(N(R tsetRtnowtsetexR(R Rtvaluetexpires((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC s|jj|dS(N(R tdelete(R R((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC s.x'|jjD]}|jj|qWdS(sIHelper for clearing all the keys in a database. Use with caution!N(R tkeysR(R R((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pytclear"scC s|jjdS(N(R t disconnect(R ((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pytclose(sN( t__name__t __module__R RtNoneRRRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyR s      N(t __future__RRRtobjectR (((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyts caches/redis_cache.pyo000064400000004531147205457050010770 0ustar00 abc@ sCddlmZddlmZdZdefdYZdS(i(tdivision(tdatetimecC sJt|dr|jS|j}|j|jdd}||ddS(sPython 2.6 compatabilityt total_secondsiii ii@Bi@B(thasattrRt microsecondstsecondstdays(ttdtmstsecs((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRs   t RedisCachecB sAeZdZdZddZdZdZdZRS(cC s ||_dS(N(tconn(tselfR ((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyt__init__scC s|jj|S(N(R tget(R tkey((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC sL|s|jj||n,|tj}|jj|t||dS(N(R tsetRtnowtsetexR(R Rtvaluetexpires((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC s|jj|dS(N(R tdelete(R R((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyRscC s.x'|jjD]}|jj|qWdS(sIHelper for clearing all the keys in a database. Use with caution!N(R tkeysR(R R((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pytclear"scC s|jjdS(N(R t disconnect(R ((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pytclose(sN( t__name__t __module__R RtNoneRRRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyR s      N(t __future__RRRtobjectR (((sO/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyts __init__.py000064400000000456147205457050006673 0ustar00"""CacheControl import Interface. Make it easy to import from cachecontrol without long namespaces. """ __author__ = 'Eric Larson' __email__ = 'eric@ionrock.org' __version__ = '0.11.7' from .wrapper import CacheControl from .adapter import CacheControlAdapter from .controller import CacheController __init__.pyc000064400000001107147205457050007030 0ustar00 abc@sLdZdZdZdZddlmZddlmZddlm Z dS( sbCacheControl import Interface. Make it easy to import from cachecontrol without long namespaces. s Eric Larsonseric@ionrock.orgs0.11.7i(t CacheControl(tCacheControlAdapter(tCacheControllerN( t__doc__t __author__t __email__t __version__twrapperRtadapterRt controllerR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyts __init__.pyo000064400000001107147205457050007044 0ustar00 abc@sLdZdZdZdZddlmZddlmZddlm Z dS( sbCacheControl import Interface. Make it easy to import from cachecontrol without long namespaces. s Eric Larsonseric@ionrock.orgs0.11.7i(t CacheControl(tCacheControlAdapter(tCacheControllerN( t__doc__t __author__t __email__t __version__twrapperRtadapterRt controllerR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyts _cmd.py000064400000002450147205457050006032 0ustar00import logging from pip._vendor import requests from pip._vendor.cachecontrol.adapter import CacheControlAdapter from pip._vendor.cachecontrol.cache import DictCache from pip._vendor.cachecontrol.controller import logger from argparse import ArgumentParser def setup_logging(): logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() logger.addHandler(handler) def get_session(): adapter = CacheControlAdapter( DictCache(), cache_etags=True, serializer=None, heuristic=None, ) sess = requests.Session() sess.mount('http://', adapter) sess.mount('https://', adapter) sess.cache_controller = adapter.controller return sess def get_args(): parser = ArgumentParser() parser.add_argument('url', help='The URL to try and cache') return parser.parse_args() def main(args=None): args = get_args() sess = get_session() # Make a request to get a response resp = sess.get(args.url) # Turn on logging setup_logging() # try setting the cache sess.cache_controller.cache_response(resp.request, resp.raw) # Now try to get it if sess.cache_controller.cached_request(resp.request): print('Cached!') else: print('Not cached :(') if __name__ == '__main__': main() _cmd.pyc000064400000004013147205457050006172 0ustar00 abc@sddlZddlmZddlmZddlmZddlmZddl m Z dZ dZ d Z dd Zed krendS( iN(trequests(tCacheControlAdapter(t DictCache(tlogger(tArgumentParsercCs-tjtjtj}tj|dS(N(RtsetLeveltloggingtDEBUGt StreamHandlert addHandler(thandler((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyt setup_logging s cCs]ttdtdddd}tj}|jd||jd||j|_|S(Nt cache_etagst serializert heuristicshttp://shttps://( RRtTruetNoneRtSessiontmountt controllertcache_controller(tadaptertsess((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyt get_sessions    cCs&t}|jddd|jS(NturlthelpsThe URL to try and cache(Rt add_argumentt parse_args(tparser((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pytget_args!s cCsjt}t}|j|j}t|jj|j|j|jj |jradGHndGHdS(NsCached!s Not cached :(( RRtgetRR Rtcache_responsetrequesttrawtcached_request(targsRtresp((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pytmain's  t__main__(Rt pip._vendorRt pip._vendor.cachecontrol.adapterRtpip._vendor.cachecontrol.cacheRt#pip._vendor.cachecontrol.controllerRtargparseRR RRRR%t__name__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyts      _cmd.pyo000064400000004013147205457050006206 0ustar00 abc@sddlZddlmZddlmZddlmZddlmZddl m Z dZ dZ d Z dd Zed krendS( iN(trequests(tCacheControlAdapter(t DictCache(tlogger(tArgumentParsercCs-tjtjtj}tj|dS(N(RtsetLeveltloggingtDEBUGt StreamHandlert addHandler(thandler((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyt setup_logging s cCs]ttdtdddd}tj}|jd||jd||j|_|S(Nt cache_etagst serializert heuristicshttp://shttps://( RRtTruetNoneRtSessiontmountt controllertcache_controller(tadaptertsess((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyt get_sessions    cCs&t}|jddd|jS(NturlthelpsThe URL to try and cache(Rt add_argumentt parse_args(tparser((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pytget_args!s cCsjt}t}|j|j}t|jj|j|j|jj |jradGHndGHdS(NsCached!s Not cached :(( RRtgetRR Rtcache_responsetrequesttrawtcached_request(targsRtresp((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pytmain's  t__main__(Rt pip._vendorRt pip._vendor.cachecontrol.adapterRtpip._vendor.cachecontrol.cacheRt#pip._vendor.cachecontrol.controllerRtargparseRR RRRR%t__name__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/_cmd.pyts      adapter.py000064400000011000147205457050006537 0ustar00import types import functools from pip._vendor.requests.adapters import HTTPAdapter from .controller import CacheController from .cache import DictCache from .filewrapper import CallbackFileWrapper class CacheControlAdapter(HTTPAdapter): invalidating_methods = set(['PUT', 'DELETE']) def __init__(self, cache=None, cache_etags=True, controller_class=None, serializer=None, heuristic=None, *args, **kw): super(CacheControlAdapter, self).__init__(*args, **kw) self.cache = cache or DictCache() self.heuristic = heuristic controller_factory = controller_class or CacheController self.controller = controller_factory( self.cache, cache_etags=cache_etags, serializer=serializer, ) def send(self, request, **kw): """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. """ if request.method == 'GET': cached_response = self.controller.cached_request(request) if cached_response: return self.build_response(request, cached_response, from_cache=True) # check for etags and add headers if appropriate request.headers.update( self.controller.conditional_headers(request) ) resp = super(CacheControlAdapter, self).send(request, **kw) return resp def build_response(self, request, response, from_cache=False): """ Build a response by making a request or using the cache. This will end up calling send and returning a potentially cached response """ if not from_cache and request.method == 'GET': # Check for any heuristics that might update headers # before trying to cache. if self.heuristic: response = self.heuristic.apply(response) # apply any expiration heuristics if response.status == 304: # We must have sent an ETag request. This could mean # that we've been expired already or that we simply # have an etag. In either case, we want to try and # update the cache if that is the case. cached_response = self.controller.update_cached_response( request, response ) if cached_response is not response: from_cache = True # We are done with the server response, read a # possible response body (compliant servers will # not return one, but we cannot be 100% sure) and # release the connection back to the pool. response.read(decode_content=False) response.release_conn() response = cached_response # We always cache the 301 responses elif response.status == 301: self.controller.cache_response(request, response) else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. response._fp = CallbackFileWrapper( response._fp, functools.partial( self.controller.cache_response, request, response, ) ) if response.chunked: super_update_chunk_length = response._update_chunk_length def _update_chunk_length(self): super_update_chunk_length() if self.chunk_left == 0: self._fp._close() response._update_chunk_length = types.MethodType(_update_chunk_length, response) resp = super(CacheControlAdapter, self).build_response( request, response ) # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: cache_url = self.controller.cache_url(request.url) self.cache.delete(cache_url) # Give the request a from_cache attr to let people use it resp.from_cache = from_cache return resp def close(self): self.cache.close() super(CacheControlAdapter, self).close() adapter.pyc000064400000006703147205457050006720 0ustar00 abc@srddlZddlZddlmZddlmZddlmZddlm Z defdYZ dS( iN(t HTTPAdapteri(tCacheController(t DictCache(tCallbackFileWrappertCacheControlAdaptercBsPeZeddgZdeddddZdZedZ dZ RS(tPUTtDELETEc Osbtt|j|||p%t|_||_|p=t}||jd|d||_dS(Nt cache_etagst serializer(tsuperRt__init__Rtcachet heuristicRt controller( tselfR Rtcontroller_classRR targstkwtcontroller_factory((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR s   cKs{|jdkr\|jj|}|r=|j||dtS|jj|jj|ntt |j ||}|S(s Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. tGETt from_cache( tmethodR tcached_requesttbuild_responsetTruetheaderstupdatetconditional_headersR Rtsend(RtrequestRtcached_responsetresp((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyRs cs| r|jdkr|jr4|jj|}n|jdkr|jj||}||k rmt}n|jdt|j |}q|jdkr|jj ||qt |j t j|jj |||_ |jr|jfd}tj|||_qntt|j||}|j|jkry|jry|jj|j}|jj|n||_|S(s Build a response by making a request or using the cache. This will end up calling send and returning a potentially cached response Ri0tdecode_contenti-cs*|jdkr&|jjndS(Ni(t chunk_leftt_fpt_close(R(tsuper_update_chunk_length(sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyt_update_chunk_lengthgs(RR tapplytstatusR tupdate_cached_responseRtreadtFalset release_conntcache_responseRR"t functoolstpartialtchunkedR%ttypest MethodTypeR RRtinvalidating_methodstokt cache_urlturlR tdeleteR(RRtresponseRRR%RR4((R$sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR3s<            cCs$|jjtt|jdS(N(R tcloseR R(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR8{s N( t__name__t __module__tsetR2tNoneRR RR*RR8(((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR s  H( R0R-tpip._vendor.requests.adaptersRR RR Rt filewrapperRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyts  adapter.pyo000064400000006703147205457050006734 0ustar00 abc@srddlZddlZddlmZddlmZddlmZddlm Z defdYZ dS( iN(t HTTPAdapteri(tCacheController(t DictCache(tCallbackFileWrappertCacheControlAdaptercBsPeZeddgZdeddddZdZedZ dZ RS(tPUTtDELETEc Osbtt|j|||p%t|_||_|p=t}||jd|d||_dS(Nt cache_etagst serializer(tsuperRt__init__Rtcachet heuristicRt controller( tselfR Rtcontroller_classRR targstkwtcontroller_factory((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR s   cKs{|jdkr\|jj|}|r=|j||dtS|jj|jj|ntt |j ||}|S(s Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. tGETt from_cache( tmethodR tcached_requesttbuild_responsetTruetheaderstupdatetconditional_headersR Rtsend(RtrequestRtcached_responsetresp((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyRs cs| r|jdkr|jr4|jj|}n|jdkr|jj||}||k rmt}n|jdt|j |}q|jdkr|jj ||qt |j t j|jj |||_ |jr|jfd}tj|||_qntt|j||}|j|jkry|jry|jj|j}|jj|n||_|S(s Build a response by making a request or using the cache. This will end up calling send and returning a potentially cached response Ri0tdecode_contenti-cs*|jdkr&|jjndS(Ni(t chunk_leftt_fpt_close(R(tsuper_update_chunk_length(sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyt_update_chunk_lengthgs(RR tapplytstatusR tupdate_cached_responseRtreadtFalset release_conntcache_responseRR"t functoolstpartialtchunkedR%ttypest MethodTypeR RRtinvalidating_methodstokt cache_urlturlR tdeleteR(RRtresponseRRR%RR4((R$sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR3s<            cCs$|jjtt|jdS(N(R tcloseR R(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR8{s N( t__name__t __module__tsetR2tNoneRR RR*RR8(((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyR s  H( R0R-tpip._vendor.requests.adaptersRR RR Rt filewrapperRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyts  cache.py000064400000001426147205457050006175 0ustar00""" The cache object API for implementing caches. The default is a thread safe in-memory dictionary. """ from threading import Lock class BaseCache(object): def get(self, key): raise NotImplemented() def set(self, key, value): raise NotImplemented() def delete(self, key): raise NotImplemented() def close(self): pass class DictCache(BaseCache): def __init__(self, init_dict=None): self.lock = Lock() self.data = init_dict or {} def get(self, key): return self.data.get(key, None) def set(self, key, value): with self.lock: self.data.update({key: value}) def delete(self, key): with self.lock: if key in self.data: self.data.pop(key) cache.pyc000064400000004541147205457050006341 0ustar00 abc@sFdZddlmZdefdYZdefdYZdS(sb The cache object API for implementing caches. The default is a thread safe in-memory dictionary. i(tLockt BaseCachecBs,eZdZdZdZdZRS(cCs tdS(N(tNotImplemented(tselftkey((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytget scCs tdS(N(R(RRtvalue((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytset scCs tdS(N(R(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytdeletescCsdS(N((R((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytcloses(t__name__t __module__RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyRs   t DictCachecBs/eZddZdZdZdZRS(cCst|_|pi|_dS(N(Rtlocktdata(Rt init_dict((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyt__init__s cCs|jj|dS(N(RRtNone(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyRscCs+|j|jji||6WdQXdS(N(R Rtupdate(RRR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR s cCs6|j'||jkr,|jj|nWdQXdS(N(R Rtpop(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR$s N(R R RRRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR s   N(t__doc__t threadingRtobjectRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytscache.pyo000064400000004541147205457050006355 0ustar00 abc@sFdZddlmZdefdYZdefdYZdS(sb The cache object API for implementing caches. The default is a thread safe in-memory dictionary. i(tLockt BaseCachecBs,eZdZdZdZdZRS(cCs tdS(N(tNotImplemented(tselftkey((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytget scCs tdS(N(R(RRtvalue((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytset scCs tdS(N(R(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytdeletescCsdS(N((R((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytcloses(t__name__t __module__RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyRs   t DictCachecBs/eZddZdZdZdZRS(cCst|_|pi|_dS(N(Rtlocktdata(Rt init_dict((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyt__init__s cCs|jj|dS(N(RRtNone(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyRscCs+|j|jji||6WdQXdS(N(R Rtupdate(RRR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR s cCs6|j'||jkr,|jj|nWdQXdS(N(R Rtpop(RR((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR$s N(R R RRRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyR s   N(t__doc__t threadingRtobjectRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pytscompat.py000064400000000574147205457050006420 0ustar00try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin try: import cPickle as pickle except ImportError: import pickle from pip._vendor.urllib3.response import HTTPResponse from pip._vendor.urllib3.util import is_fp_closed # Replicate some six behaviour try: text_type = (unicode,) except NameError: text_type = (str,) compat.pyc000064400000001152147205457050006554 0ustar00 abc@syddlmZWn!ek r7ddlmZnXyddlZWnek rgddlZnXddlmZddlm Z y e fZ Wne k re fZ nXdS(i(turljoinN(t HTTPResponse(t is_fp_closed(t urllib.parseRt ImportErrorturlparsetcPickletpickletpip._vendor.urllib3.responseRtpip._vendor.urllib3.utilRtunicodet text_typet NameErrortstr(((sC/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyts    compat.pyo000064400000001152147205457050006570 0ustar00 abc@syddlmZWn!ek r7ddlmZnXyddlZWnek rgddlZnXddlmZddlm Z y e fZ Wne k re fZ nXdS(i(turljoinN(t HTTPResponse(t is_fp_closed(t urllib.parseRt ImportErrorturlparsetcPickletpickletpip._vendor.urllib3.responseRtpip._vendor.urllib3.utilRtunicodet text_typet NameErrortstr(((sC/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyts    controller.py000064400000031340147205457050007313 0ustar00""" The httplib2 algorithms ported for use with requests. """ import logging import re import calendar import time from email.utils import parsedate_tz from pip._vendor.requests.structures import CaseInsensitiveDict from .cache import DictCache from .serialize import Serializer logger = logging.getLogger(__name__) URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") def parse_uri(uri): """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) class CacheController(object): """An interface to see if request should cached or not. """ def __init__(self, cache=None, cache_etags=True, serializer=None): self.cache = cache or DictCache() self.cache_etags = cache_etags self.serializer = serializer or Serializer() @classmethod def _urlnorm(cls, uri): """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise Exception("Only absolute URIs are allowed. uri = %s" % uri) scheme = scheme.lower() authority = authority.lower() if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path defrag_uri = scheme + "://" + authority + request_uri return defrag_uri @classmethod def cache_url(cls, uri): return cls._urlnorm(uri) def parse_cache_control(self, headers): """ Parse the cache control headers returning a dictionary with values for the different directives. """ retval = {} cc_header = 'cache-control' if 'Cache-Control' in headers: cc_header = 'Cache-Control' if cc_header in headers: parts = headers[cc_header].split(',') parts_with_args = [ tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") ] parts_wo_args = [ (name.strip().lower(), 1) for name in parts if -1 == name.find("=") ] retval = dict(parts_with_args + parts_wo_args) return retval def cached_request(self, request): """ Return a cached response if it exists in the cache, otherwise return False. """ cache_url = self.cache_url(request.url) logger.debug('Looking up "%s" in the cache', cache_url) cc = self.parse_cache_control(request.headers) # Bail out if the request insists on fresh data if 'no-cache' in cc: logger.debug('Request header has "no-cache", cache bypassed') return False if 'max-age' in cc and cc['max-age'] == 0: logger.debug('Request header has "max_age" as 0, cache bypassed') return False # Request allows serving from the cache, let's see if we find something cache_data = self.cache.get(cache_url) if cache_data is None: logger.debug('No cache entry available') return False # Check whether it can be deserialized resp = self.serializer.loads(request, cache_data) if not resp: logger.warning('Cache entry deserialization failed, entry ignored') return False # If we have a cached 301, return it immediately. We don't # need to test our response for other headers b/c it is # intrinsically "cacheable" as it is Permanent. # See: # https://tools.ietf.org/html/rfc7231#section-6.4.2 # # Client can try to refresh the value by repeating the request # with cache busting headers as usual (ie no-cache). if resp.status == 301: msg = ('Returning cached "301 Moved Permanently" response ' '(ignoring date and etag information)') logger.debug(msg) return resp headers = CaseInsensitiveDict(resp.headers) if not headers or 'date' not in headers: if 'etag' not in headers: # Without date or etag, the cached response can never be used # and should be deleted. logger.debug('Purging cached response: no date or etag') self.cache.delete(cache_url) logger.debug('Ignoring cached response: no date') return False now = time.time() date = calendar.timegm( parsedate_tz(headers['date']) ) current_age = max(0, now - date) logger.debug('Current age based on date: %i', current_age) # TODO: There is an assumption that the result will be a # urllib3 response object. This may not be best since we # could probably avoid instantiating or constructing the # response until we know we need it. resp_cc = self.parse_cache_control(headers) # determine freshness freshness_lifetime = 0 # Check the max-age pragma in the cache control header if 'max-age' in resp_cc and resp_cc['max-age'].isdigit(): freshness_lifetime = int(resp_cc['max-age']) logger.debug('Freshness lifetime from max-age: %i', freshness_lifetime) # If there isn't a max-age, check for an expires header elif 'expires' in headers: expires = parsedate_tz(headers['expires']) if expires is not None: expire_time = calendar.timegm(expires) - date freshness_lifetime = max(0, expire_time) logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. if 'max-age' in cc: try: freshness_lifetime = int(cc['max-age']) logger.debug('Freshness lifetime from request max-age: %i', freshness_lifetime) except ValueError: freshness_lifetime = 0 if 'min-fresh' in cc: try: min_fresh = int(cc['min-fresh']) except ValueError: min_fresh = 0 # adjust our current age by our min fresh current_age += min_fresh logger.debug('Adjusted current age from min-fresh: %i', current_age) # Return entry if it is fresh enough if freshness_lifetime > current_age: logger.debug('The response is "fresh", returning cached response') logger.debug('%i > %i', freshness_lifetime, current_age) return resp # we're not fresh. If we don't have an Etag, clear it out if 'etag' not in headers: logger.debug( 'The cached response is "stale" with no etag, purging' ) self.cache.delete(cache_url) # return the original handler return False def conditional_headers(self, request): cache_url = self.cache_url(request.url) resp = self.serializer.loads(request, self.cache.get(cache_url)) new_headers = {} if resp: headers = CaseInsensitiveDict(resp.headers) if 'etag' in headers: new_headers['If-None-Match'] = headers['ETag'] if 'last-modified' in headers: new_headers['If-Modified-Since'] = headers['Last-Modified'] return new_headers def cache_response(self, request, response, body=None): """ Algorithm for caching requests. This assumes a requests Response object. """ # From httplib2: Don't cache 206's since we aren't going to # handle byte range requests cacheable_status_codes = [200, 203, 300, 301] if response.status not in cacheable_status_codes: logger.debug( 'Status code %s not in %s', response.status, cacheable_status_codes ) return response_headers = CaseInsensitiveDict(response.headers) # If we've been given a body, our response has a Content-Length, that # Content-Length is valid then we can check to see if the body we've # been given matches the expected size, and if it doesn't we'll just # skip trying to cache it. if (body is not None and "content-length" in response_headers and response_headers["content-length"].isdigit() and int(response_headers["content-length"]) != len(body)): return cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) cache_url = self.cache_url(request.url) logger.debug('Updating cache with response from "%s"', cache_url) # Delete it from the cache if we happen to have it stored there no_store = False if cc.get('no-store'): no_store = True logger.debug('Response header has "no-store"') if cc_req.get('no-store'): no_store = True logger.debug('Request header has "no-store"') if no_store and self.cache.get(cache_url): logger.debug('Purging existing cache entry to honor "no-store"') self.cache.delete(cache_url) # If we've been given an etag, then keep the response if self.cache_etags and 'etag' in response_headers: logger.debug('Caching due to etag') self.cache.set( cache_url, self.serializer.dumps(request, response, body=body), ) # Add to the cache any 301s. We do this before looking that # the Date headers. elif response.status == 301: logger.debug('Caching permanant redirect') self.cache.set( cache_url, self.serializer.dumps(request, response) ) # Add to the cache if the response headers demand it. If there # is no date header then we can't do anything about expiring # the cache. elif 'date' in response_headers: # cache when there is a max-age > 0 if cc and cc.get('max-age'): if cc['max-age'].isdigit() and int(cc['max-age']) > 0: logger.debug('Caching b/c date exists and max-age > 0') self.cache.set( cache_url, self.serializer.dumps(request, response, body=body), ) # If the request can expire, it means we should cache it # in the meantime. elif 'expires' in response_headers: if response_headers['expires']: logger.debug('Caching b/c of expires header') self.cache.set( cache_url, self.serializer.dumps(request, response, body=body), ) def update_cached_response(self, request, response): """On a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. """ cache_url = self.cache_url(request.url) cached_response = self.serializer.loads( request, self.cache.get(cache_url) ) if not cached_response: # we didn't have a cached response return response # Lets update our headers with the headers from the new request: # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 # # The server isn't supposed to send headers that would make # the cached body invalid. But... just in case, we'll be sure # to strip out ones we know that might be problmatic due to # typical assumptions. excluded_headers = [ "content-length", ] cached_response.headers.update( dict((k, v) for k, v in response.headers.items() if k.lower() not in excluded_headers) ) # we want a 200 b/c we have content via the cache cached_response.status = 200 # update our cache self.cache.set( cache_url, self.serializer.dumps(request, cached_response), ) return cached_response controller.pyc000064400000021566147205457050007467 0ustar00 abc@sdZddlZddlZddlZddlZddlmZddlmZddl m Z ddl m Z ej eZejdZd Zd efd YZdS( s7 The httplib2 algorithms ported for use with requests. iN(t parsedate_tz(tCaseInsensitiveDicti(t DictCache(t Serializers9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs<tj|j}|d|d|d|d|dfS(sParses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) iiiii(tURItmatchtgroups(turiR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt parse_uristCacheControllercBsneZdZd ed dZedZedZdZ dZ dZ d dZ dZ RS( s9An interface to see if request should cached or not. cCs1|p t|_||_|p't|_dS(N(Rtcachet cache_etagsRt serializer(tselfR R R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt__init__!s c Cst|\}}}}}| s)| r<td|n|j}|j}|scd}n|r~dj||gp|}|d||}|S(s4Normalize the URL to create a safe key for the caches(Only absolute URIs are allowed. uri = %st/t?s://(Rt Exceptiontlowertjoin( tclsRtschemet authoritytpathtquerytfragmentt request_urit defrag_uri((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt_urlnorm&s   !cCs |j|S(N(R(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt cache_url:sc Csi}d}d|kr!d}n||kr||jd}g|D]R}d|jdkrGtg|jddD]}|jj^qx^qG}g|D]3}d|jdkr|jjdf^q} t|| }n|S(sz Parse the cache control headers returning a dictionary with values for the different directives. s cache-controls Cache-Controlt,it=i(tsplittfindttupletstripRtdict( R theaderstretvalt cc_headertpartstparttxtparts_with_argstnamet parts_wo_args((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytparse_cache_control>s   \=cCs^|j|j}tjd||j|j}d|krQtjdtSd|kr~|ddkr~tjdtS|jj|}|dkrtjdtS|j j ||}|stj dtS|j d krd }tj||St|j}| s!d |kr^d |krMtjd |jj|ntjdtStj}tjt|d } td|| } tjd| |j|} d} d| kr| djrt| d} tjd| n`d|krZt|d} | dk rZtj| | }td|} tjd| qZnd|kry$t|d} tjd| Wqtk rd} qXnd|kryt|d}Wntk rd}nX| |7} tjd| n| | kr.tjdtjd| | |Sd |krZtjd|jj|ntS(se Return a cached response if it exists in the cache, otherwise return False. sLooking up "%s" in the cachesno-caches-Request header has "no-cache", cache bypassedsmax-ageis1Request header has "max_age" as 0, cache bypassedsNo cache entry availables1Cache entry deserialization failed, entry ignoredi-sVReturning cached "301 Moved Permanently" response (ignoring date and etag information)tdatetetags(Purging cached response: no date or etags!Ignoring cached response: no datesCurrent age based on date: %is#Freshness lifetime from max-age: %itexpiress#Freshness lifetime from expires: %is+Freshness lifetime from request max-age: %is min-freshs'Adjusted current age from min-fresh: %is2The response is "fresh", returning cached responses%i > %is4The cached response is "stale" with no etag, purgingN(RturltloggertdebugR.R%tFalseR tgettNoneR tloadstwarningtstatusRtdeletettimetcalendarttimegmRtmaxtisdigittintt ValueError(R trequestRtcct cache_datatresptmsgR%tnowR/t current_agetresp_cctfreshness_lifetimeR1t expire_timet min_fresh((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytcached_requestVs                                cCs|j|j}|jj||jj|}i}|rt|j}d|krk|d|d 0R1sCaching b/c of expires header(R:R3R4RR%R7R@RAtlenR.RR2R5R6tTrueR R;R tsetR tdumps( R RCtresponseRRtcacheable_status_codestresponse_headerstcc_reqRDRtno_store((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytcache_responsesd        &  "    cs|j|j}|jj||jj|}|s=|Sdg|jjtfd|jj Dd|_ |jj ||jj |||S(sOn a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. scontent-lengthc3s3|])\}}|jkr||fVqdS(N(R(t.0tktv(texcluded_headers(sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pys Ts i( RR2R R8R R6R%tupdateR$titemsR:RURV(R RCRWRtcached_response((R`sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytupdate_cached_response6s   &  N(t__name__t __module__t__doc__R7RTRt classmethodRRR.RNRQR\Rd(((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyR s  y  W(RgtloggingtreR=R<t email.utilsRtpip._vendor.requests.structuresRR Rt serializeRt getLoggerReR3tcompileRRtobjectR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyts     controller.pyo000064400000021566147205457050007503 0ustar00 abc@sdZddlZddlZddlZddlZddlmZddlmZddl m Z ddl m Z ej eZejdZd Zd efd YZdS( s7 The httplib2 algorithms ported for use with requests. iN(t parsedate_tz(tCaseInsensitiveDicti(t DictCache(t Serializers9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs<tj|j}|d|d|d|d|dfS(sParses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) iiiii(tURItmatchtgroups(turiR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt parse_uristCacheControllercBsneZdZd ed dZedZedZdZ dZ dZ d dZ dZ RS( s9An interface to see if request should cached or not. cCs1|p t|_||_|p't|_dS(N(Rtcachet cache_etagsRt serializer(tselfR R R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt__init__!s c Cst|\}}}}}| s)| r<td|n|j}|j}|scd}n|r~dj||gp|}|d||}|S(s4Normalize the URL to create a safe key for the caches(Only absolute URIs are allowed. uri = %st/t?s://(Rt Exceptiontlowertjoin( tclsRtschemet authoritytpathtquerytfragmentt request_urit defrag_uri((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt_urlnorm&s   !cCs |j|S(N(R(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyt cache_url:sc Csi}d}d|kr!d}n||kr||jd}g|D]R}d|jdkrGtg|jddD]}|jj^qx^qG}g|D]3}d|jdkr|jjdf^q} t|| }n|S(sz Parse the cache control headers returning a dictionary with values for the different directives. s cache-controls Cache-Controlt,it=i(tsplittfindttupletstripRtdict( R theaderstretvalt cc_headertpartstparttxtparts_with_argstnamet parts_wo_args((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytparse_cache_control>s   \=cCs^|j|j}tjd||j|j}d|krQtjdtSd|kr~|ddkr~tjdtS|jj|}|dkrtjdtS|j j ||}|stj dtS|j d krd }tj||St|j}| s!d |kr^d |krMtjd |jj|ntjdtStj}tjt|d } td|| } tjd| |j|} d} d| kr| djrt| d} tjd| n`d|krZt|d} | dk rZtj| | }td|} tjd| qZnd|kry$t|d} tjd| Wqtk rd} qXnd|kryt|d}Wntk rd}nX| |7} tjd| n| | kr.tjdtjd| | |Sd |krZtjd|jj|ntS(se Return a cached response if it exists in the cache, otherwise return False. sLooking up "%s" in the cachesno-caches-Request header has "no-cache", cache bypassedsmax-ageis1Request header has "max_age" as 0, cache bypassedsNo cache entry availables1Cache entry deserialization failed, entry ignoredi-sVReturning cached "301 Moved Permanently" response (ignoring date and etag information)tdatetetags(Purging cached response: no date or etags!Ignoring cached response: no datesCurrent age based on date: %is#Freshness lifetime from max-age: %itexpiress#Freshness lifetime from expires: %is+Freshness lifetime from request max-age: %is min-freshs'Adjusted current age from min-fresh: %is2The response is "fresh", returning cached responses%i > %is4The cached response is "stale" with no etag, purgingN(RturltloggertdebugR.R%tFalseR tgettNoneR tloadstwarningtstatusRtdeletettimetcalendarttimegmRtmaxtisdigittintt ValueError(R trequestRtcct cache_datatresptmsgR%tnowR/t current_agetresp_cctfreshness_lifetimeR1t expire_timet min_fresh((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytcached_requestVs                                cCs|j|j}|jj||jj|}i}|rt|j}d|krk|d|d 0R1sCaching b/c of expires header(R:R3R4RR%R7R@RAtlenR.RR2R5R6tTrueR R;R tsetR tdumps( R RCtresponseRRtcacheable_status_codestresponse_headerstcc_reqRDRtno_store((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytcache_responsesd        &  "    cs|j|j}|jj||jj|}|s=|Sdg|jjtfd|jj Dd|_ |jj ||jj |||S(sOn a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. scontent-lengthc3s3|])\}}|jkr||fVqdS(N(R(t.0tktv(texcluded_headers(sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pys Ts i( RR2R R8R R6R%tupdateR$titemsR:RURV(R RCRWRtcached_response((R`sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pytupdate_cached_response6s   &  N(t__name__t __module__t__doc__R7RTRt classmethodRRR.RNRQR\Rd(((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyR s  y  W(RgtloggingtreR=R<t email.utilsRtpip._vendor.requests.structuresRR Rt serializeRt getLoggerReR3tcompileRRtobjectR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyts     filewrapper.py000064400000004743147205457050007457 0ustar00from io import BytesIO class CallbackFileWrapper(object): """ Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the contents of that buffer. All attributes are proxied to the underlying file object. This class uses members with a double underscore (__) leading prefix so as not to accidentally shadow an attribute. """ def __init__(self, fp, callback): self.__buf = BytesIO() self.__fp = fp self.__callback = callback def __getattr__(self, name): # The vaguaries of garbage collection means that self.__fp is # not always set. By using __getattribute__ and the private # name[0] allows looking up the attribute value and raising an # AttributeError when it doesn't exist. This stop thigns from # infinitely recursing calls to getattr in the case where # self.__fp hasn't been set. # # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers fp = self.__getattribute__('_CallbackFileWrapper__fp') return getattr(fp, name) def __is_fp_closed(self): try: return self.__fp.fp is None except AttributeError: pass try: return self.__fp.closed except AttributeError: pass # We just don't cache it then. # TODO: Add some logging here... return False def _close(self): if self.__callback: self.__callback(self.__buf.getvalue()) # We assign this to None here, because otherwise we can get into # really tricky problems where the CPython interpreter dead locks # because the callback is holding a reference to something which # has a __del__ method. Setting this to None breaks the cycle # and allows the garbage collector to do it's thing normally. self.__callback = None def read(self, amt=None): data = self.__fp.read(amt) self.__buf.write(data) if self.__is_fp_closed(): self._close() return data def _safe_read(self, amt): data = self.__fp._safe_read(amt) if amt == 2 and data == b'\r\n': # urllib executes this read to toss the CRLF at the end # of the chunk. return data self.__buf.write(data) if self.__is_fp_closed(): self._close() return data filewrapper.pyc000064400000005153147205457050007616 0ustar00 abc@s*ddlmZdefdYZdS(i(tBytesIOtCallbackFileWrappercBsGeZdZdZdZdZdZddZdZ RS(sv Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the contents of that buffer. All attributes are proxied to the underlying file object. This class uses members with a double underscore (__) leading prefix so as not to accidentally shadow an attribute. cCs"t|_||_||_dS(N(Rt_CallbackFileWrapper__buft_CallbackFileWrapper__fpt_CallbackFileWrapper__callback(tselftfptcallback((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt__init__s  cCs|jd}t||S(NR(t__getattribute__tgetattr(RtnameR((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt __getattr__s cCsNy|jjdkSWntk r'nXy|jjSWntk rInXtS(N(RRtNonetAttributeErrortclosedtFalse(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt__is_fp_closed!s  cCs/|jr"|j|jjnd|_dS(N(RRtgetvalueR (R((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt_close0s cCs?|jj|}|jj||jr;|jn|S(N(RtreadRtwritet"_CallbackFileWrapper__is_fp_closedR(Rtamttdata((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyR;s   cCs[|jj|}|dkr.|dkr.|S|jj||jrW|jn|S(Nis (Rt _safe_readRRRR(RRR((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyRCs  N( t__name__t __module__t__doc__RR RRR RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyRs    N(tioRtobjectR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pytsfilewrapper.pyo000064400000005153147205457050007632 0ustar00 abc@s*ddlmZdefdYZdS(i(tBytesIOtCallbackFileWrappercBsGeZdZdZdZdZdZddZdZ RS(sv Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the contents of that buffer. All attributes are proxied to the underlying file object. This class uses members with a double underscore (__) leading prefix so as not to accidentally shadow an attribute. cCs"t|_||_||_dS(N(Rt_CallbackFileWrapper__buft_CallbackFileWrapper__fpt_CallbackFileWrapper__callback(tselftfptcallback((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt__init__s  cCs|jd}t||S(NR(t__getattribute__tgetattr(RtnameR((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt __getattr__s cCsNy|jjdkSWntk r'nXy|jjSWntk rInXtS(N(RRtNonetAttributeErrortclosedtFalse(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt__is_fp_closed!s  cCs/|jr"|j|jjnd|_dS(N(RRtgetvalueR (R((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyt_close0s cCs?|jj|}|jj||jr;|jn|S(N(RtreadRtwritet"_CallbackFileWrapper__is_fp_closedR(Rtamttdata((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyR;s   cCs[|jj|}|dkr.|dkr.|S|jj||jrW|jn|S(Nis (Rt _safe_readRRRR(RRR((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyRCs  N( t__name__t __module__t__doc__RR RRR RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyRs    N(tioRtobjectR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pytsheuristics.py000064400000010055147205457050007312 0ustar00import calendar import time from email.utils import formatdate, parsedate, parsedate_tz from datetime import datetime, timedelta TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" def expire_after(delta, date=None): date = date or datetime.now() return date + delta def datetime_to_header(dt): return formatdate(calendar.timegm(dt.timetuple())) class BaseHeuristic(object): def warning(self, response): """ Return a valid 1xx warning header value describing the cache adjustments. The response is provided too allow warnings like 113 http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need to explicitly say response is over 24 hours old. """ return '110 - "Response is Stale"' def update_headers(self, response): """Update the response headers with any new headers. NOTE: This SHOULD always include some Warning header to signify that the response was cached by the client, not by way of the provided headers. """ return {} def apply(self, response): updated_headers = self.update_headers(response) if updated_headers: response.headers.update(updated_headers) warning_header_value = self.warning(response) if warning_header_value is not None: response.headers.update({'Warning': warning_header_value}) return response class OneDayCache(BaseHeuristic): """ Cache the response by providing an expires 1 day in the future. """ def update_headers(self, response): headers = {} if 'expires' not in response.headers: date = parsedate(response.headers['date']) expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) headers['expires'] = datetime_to_header(expires) headers['cache-control'] = 'public' return headers class ExpiresAfter(BaseHeuristic): """ Cache **all** requests for a defined time period. """ def __init__(self, **kw): self.delta = timedelta(**kw) def update_headers(self, response): expires = expire_after(self.delta) return { 'expires': datetime_to_header(expires), 'cache-control': 'public', } def warning(self, response): tmpl = '110 - Automatically cached for %s. Response might be stale' return tmpl % self.delta class LastModified(BaseHeuristic): """ If there is no Expires header already, fall back on Last-Modified using the heuristic from http://tools.ietf.org/html/rfc7234#section-4.2.2 to calculate a reasonable value. Firefox also does something like this per https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 Unlike mozilla we limit this to 24-hr. """ cacheable_by_default_statuses = set([ 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 ]) def update_headers(self, resp): headers = resp.headers if 'expires' in headers: return {} if 'cache-control' in headers and headers['cache-control'] != 'public': return {} if resp.status not in self.cacheable_by_default_statuses: return {} if 'date' not in headers or 'last-modified' not in headers: return {} date = calendar.timegm(parsedate_tz(headers['date'])) last_modified = parsedate(headers['last-modified']) if date is None or last_modified is None: return {} now = time.time() current_age = max(0, now - date) delta = date - calendar.timegm(last_modified) freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) if freshness_lifetime <= current_age: return {} expires = date + freshness_lifetime return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} def warning(self, resp): return None heuristics.pyc000064400000013461147205457050007461 0ustar00 abc@sddlZddlZddlmZmZmZddlmZmZdZddZ dZ de fdYZ d e fd YZd e fd YZd e fdYZdS(iN(t formatdatet parsedatet parsedate_tz(tdatetimet timedeltas%a, %d %b %Y %H:%M:%S GMTcCs|ptj}||S(N(Rtnow(tdeltatdate((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyt expire_after scCsttj|jS(N(Rtcalendarttimegmt timetuple(tdt((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytdatetime_to_headerst BaseHeuristiccBs#eZdZdZdZRS(cCsdS(s! Return a valid 1xx warning header value describing the cache adjustments. The response is provided too allow warnings like 113 http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need to explicitly say response is over 24 hours old. s110 - "Response is Stale"((tselftresponse((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytwarnings cCsiS(sUpdate the response headers with any new headers. NOTE: This SHOULD always include some Warning header to signify that the response was cached by the client, not by way of the provided headers. ((RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytupdate_headers!scCsa|j|}|r]|jj||j|}|dk r]|jji|d6q]n|S(NtWarning(RtheaderstupdateRtNone(RRtupdated_headerstwarning_header_value((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytapply*s (t__name__t __module__RRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyRs t OneDayCachecBseZdZdZRS(sM Cache the response by providing an expires 1 day in the future. cCsni}d|jkrjt|jd}ttdddt|d }t||ds    "heuristics.pyo000064400000013461147205457050007475 0ustar00 abc@sddlZddlZddlmZmZmZddlmZmZdZddZ dZ de fdYZ d e fd YZd e fd YZd e fdYZdS(iN(t formatdatet parsedatet parsedate_tz(tdatetimet timedeltas%a, %d %b %Y %H:%M:%S GMTcCs|ptj}||S(N(Rtnow(tdeltatdate((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyt expire_after scCsttj|jS(N(Rtcalendarttimegmt timetuple(tdt((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytdatetime_to_headerst BaseHeuristiccBs#eZdZdZdZRS(cCsdS(s! Return a valid 1xx warning header value describing the cache adjustments. The response is provided too allow warnings like 113 http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need to explicitly say response is over 24 hours old. s110 - "Response is Stale"((tselftresponse((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytwarnings cCsiS(sUpdate the response headers with any new headers. NOTE: This SHOULD always include some Warning header to signify that the response was cached by the client, not by way of the provided headers. ((RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytupdate_headers!scCsa|j|}|r]|jj||j|}|dk r]|jji|d6q]n|S(NtWarning(RtheaderstupdateRtNone(RRtupdated_headerstwarning_header_value((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pytapply*s (t__name__t __module__RRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyRs t OneDayCachecBseZdZdZRS(sM Cache the response by providing an expires 1 day in the future. cCsni}d|jkrjt|jd}ttdddt|d }t||ds    "serialize.py000064400000014610147205457050007120 0ustar00import base64 import io import json import zlib from pip._vendor.requests.structures import CaseInsensitiveDict from .compat import HTTPResponse, pickle, text_type def _b64_encode_bytes(b): return base64.b64encode(b).decode("ascii") def _b64_encode_str(s): return _b64_encode_bytes(s.encode("utf8")) def _b64_encode(s): if isinstance(s, text_type): return _b64_encode_str(s) return _b64_encode_bytes(s) def _b64_decode_bytes(b): return base64.b64decode(b.encode("ascii")) def _b64_decode_str(s): return _b64_decode_bytes(s).decode("utf8") class Serializer(object): def dumps(self, request, response, body=None): response_headers = CaseInsensitiveDict(response.headers) if body is None: body = response.read(decode_content=False) # NOTE: 99% sure this is dead code. I'm only leaving it # here b/c I don't have a test yet to prove # it. Basically, before using # `cachecontrol.filewrapper.CallbackFileWrapper`, # this made an effort to reset the file handle. The # `CallbackFileWrapper` short circuits this code by # setting the body as the content is consumed, the # result being a `body` argument is *always* passed # into cache_response, and in turn, # `Serializer.dump`. response._fp = io.BytesIO(body) data = { "response": { "body": _b64_encode_bytes(body), "headers": dict( (_b64_encode(k), _b64_encode(v)) for k, v in response.headers.items() ), "status": response.status, "version": response.version, "reason": _b64_encode_str(response.reason), "strict": response.strict, "decode_content": response.decode_content, }, } # Construct our vary headers data["vary"] = {} if "vary" in response_headers: varied_headers = response_headers['vary'].split(',') for header in varied_headers: header = header.strip() data["vary"][header] = request.headers.get(header, None) # Encode our Vary headers to ensure they can be serialized as JSON data["vary"] = dict( (_b64_encode(k), _b64_encode(v) if v is not None else v) for k, v in data["vary"].items() ) return b",".join([ b"cc=2", zlib.compress( json.dumps( data, separators=(",", ":"), sort_keys=True, ).encode("utf8"), ), ]) def loads(self, request, data): # Short circuit if we've been given an empty set of data if not data: return # Determine what version of the serializer the data was serialized # with try: ver, data = data.split(b",", 1) except ValueError: ver = b"cc=0" # Make sure that our "ver" is actually a version and isn't a false # positive from a , being in the data stream. if ver[:3] != b"cc=": data = ver + data ver = b"cc=0" # Get the version number out of the cc=N ver = ver.split(b"=", 1)[-1].decode("ascii") # Dispatch to the actual load method for the given version try: return getattr(self, "_loads_v{0}".format(ver))(request, data) except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None return def prepare_response(self, request, cached): """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ # Special case the '*' Vary value as it means we cannot actually # determine if the cached response is suitable for this request. if "*" in cached.get("vary", {}): return # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: return body_raw = cached["response"].pop("body") headers = CaseInsensitiveDict(data=cached['response']['headers']) if headers.get('transfer-encoding', '') == 'chunked': headers.pop('transfer-encoding') cached['response']['headers'] = headers try: body = io.BytesIO(body_raw) except TypeError: # This can happen if cachecontrol serialized to v1 format (pickle) # using Python 2. A Python 2 str(byte string) will be unpickled as # a Python 3 str (unicode string), which will cause the above to # fail with: # # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode('utf8')) return HTTPResponse( body=body, preload_content=False, **cached["response"] ) def _loads_v0(self, request, data): # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. return def _loads_v1(self, request, data): try: cached = pickle.loads(data) except ValueError: return return self.prepare_response(request, cached) def _loads_v2(self, request, data): try: cached = json.loads(zlib.decompress(data).decode("utf8")) except ValueError: return # We need to decode the items that we've base64 encoded cached["response"]["body"] = _b64_decode_bytes( cached["response"]["body"] ) cached["response"]["headers"] = dict( (_b64_decode_str(k), _b64_decode_str(v)) for k, v in cached["response"]["headers"].items() ) cached["response"]["reason"] = _b64_decode_str( cached["response"]["reason"], ) cached["vary"] = dict( (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) for k, v in cached["vary"].items() ) return self.prepare_response(request, cached) serialize.pyc000064400000013720147205457050007264 0ustar00 abc@sddlZddlZddlZddlZddlmZddlmZmZm Z dZ dZ dZ dZ d Zd efd YZdS( iN(tCaseInsensitiveDicti(t HTTPResponsetpicklet text_typecCstj|jdS(Ntascii(tbase64t b64encodetdecode(tb((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_encode_bytes scCst|jdS(Ntutf8(R tencode(ts((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_encode_strscCs#t|trt|St|S(N(t isinstanceRR R (R ((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt _b64_encodes cCstj|jdS(NR(Rt b64decodeR (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_decode_bytesscCst|jdS(NR (RR(R ((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_decode_strst SerializercBsAeZddZdZdZdZdZdZRS(c Csxt|j}|dkrB|jdt}tj||_niit|d6t d|jj Dd6|j d6|j d6t |jd6|jd6|jd6d 6}i|d 9stheaderststatustversiontreasontstricttresponsetvaryt,css?|]5\}}t||dk r0t|n|fVqdS(N(RtNone(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pys Nsscc=2t separatorst:t sort_keysR (R R#(RRR!treadtFalsetiotBytesIOt_fpR tdicttitemsRRR RRRtsplittstriptgettjointzlibtcompresstjsontdumpstTrueR (tselftrequestRRtresponse_headerstdatatvaried_headerstheader((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyR3#s<          $cCs|s dSy|jdd\}}Wntk r?d}nX|d dkrc||}d}n|jdddjd}y#t|d j|||SWntk rdSXdS( NR iscc=0iscc=t=iRs _loads_v{0}(R,t ValueErrorRtgetattrtformattAttributeError(R5R6R8tver((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pytloads[s    # cCsd|jdikrdSxE|jdijD]+\}}|jj|d|kr5dSq5W|djd}td|dd}|jdd d kr|jdn||ddsRRcss?|]5\}}t||dk r0t|n|fVqdS(N(RR!(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pys sR( R2RAR0t decompressRR<RR*R+RRK(R5R6R8RH((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt _loads_v2s% #N( t__name__t __module__R!R3RARKRLRMRO(((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyR!s  8  (  (RR'R2R0tpip._vendor.requests.structuresRtcompatRRRR R RRRtobjectR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyts         serialize.pyo000064400000013720147205457050007300 0ustar00 abc@sddlZddlZddlZddlZddlmZddlmZmZm Z dZ dZ dZ dZ d Zd efd YZdS( iN(tCaseInsensitiveDicti(t HTTPResponsetpicklet text_typecCstj|jdS(Ntascii(tbase64t b64encodetdecode(tb((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_encode_bytes scCst|jdS(Ntutf8(R tencode(ts((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_encode_strscCs#t|trt|St|S(N(t isinstanceRR R (R ((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt _b64_encodes cCstj|jdS(NR(Rt b64decodeR (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_decode_bytesscCst|jdS(NR (RR(R ((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt_b64_decode_strst SerializercBsAeZddZdZdZdZdZdZRS(c Csxt|j}|dkrB|jdt}tj||_niit|d6t d|jj Dd6|j d6|j d6t |jd6|jd6|jd6d 6}i|d 9stheaderststatustversiontreasontstricttresponsetvaryt,css?|]5\}}t||dk r0t|n|fVqdS(N(RtNone(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pys Nsscc=2t separatorst:t sort_keysR (R R#(RRR!treadtFalsetiotBytesIOt_fpR tdicttitemsRRR RRRtsplittstriptgettjointzlibtcompresstjsontdumpstTrueR (tselftrequestRRtresponse_headerstdatatvaried_headerstheader((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyR3#s<          $cCs|s dSy|jdd\}}Wntk r?d}nX|d dkrc||}d}n|jdddjd}y#t|d j|||SWntk rdSXdS( NR iscc=0iscc=t=iRs _loads_v{0}(R,t ValueErrorRtgetattrtformattAttributeError(R5R6R8tver((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pytloads[s    # cCsd|jdikrdSxE|jdijD]+\}}|jj|d|kr5dSq5W|djd}td|dd}|jdd d kr|jdn||ddsRRcss?|]5\}}t||dk r0t|n|fVqdS(N(RR!(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pys sR( R2RAR0t decompressRR<RR*R+RRK(R5R6R8RH((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyt _loads_v2s% #N( t__name__t __module__R!R3RARKRLRMRO(((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyR!s  8  (  (RR'R2R0tpip._vendor.requests.structuresRtcompatRRRR R RRRtobjectR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyts         wrapper.py000064400000000762147205457050006614 0ustar00from .adapter import CacheControlAdapter from .cache import DictCache def CacheControl(sess, cache=None, cache_etags=True, serializer=None, heuristic=None): cache = cache or DictCache() adapter = CacheControlAdapter( cache, cache_etags=cache_etags, serializer=serializer, heuristic=heuristic, ) sess.mount('http://', adapter) sess.mount('https://', adapter) return sess wrapper.pyc000064400000001270147205457050006752 0ustar00 abc@s9ddlmZddlmZdedddZdS(i(tCacheControlAdapter(t DictCachecCsQ|p t}t|d|d|d|}|jd||jd||S(Nt cache_etagst serializert heuristicshttp://shttps://(RRtmount(tsesstcacheRRRtadapter((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyt CacheControls N(RRRRtNonetTrueR (((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyts wrapper.pyo000064400000001270147205457050006766 0ustar00 abc@s9ddlmZddlmZdedddZdS(i(tCacheControlAdapter(t DictCachecCsQ|p t}t|d|d|d|}|jd||jd||S(Nt cache_etagst serializert heuristicshttp://shttps://(RRtmount(tsesstcacheRRRtadapter((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyt CacheControls N(RRRRtNonetTrueR (((sD/usr/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyts