12 changed files with 292 additions and 174 deletions
@ -1,74 +1,75 @@ |
|||
from requests.adapters import HTTPAdapter |
|||
|
|||
from cachecontrol.controller import CacheController |
|||
from cachecontrol.cache import DictCache |
|||
from .controller import CacheController |
|||
from .cache import DictCache |
|||
|
|||
class CacheControlAdapter(HTTPAdapter): |
|||
invalidating_methods = set(['PUT', 'DELETE']) |
|||
|
|||
def __init__(self, cache=None, cache_etags=True, *args, **kw): |
|||
def __init__(self, cache=None, cache_etags=True, controller_class=None, |
|||
serializer=None, *args, **kw): |
|||
super(CacheControlAdapter, self).__init__(*args, **kw) |
|||
self.cache = cache or DictCache() |
|||
self.controller = CacheController(self.cache, cache_etags=cache_etags) |
|||
|
|||
controller_factory = controller_class or CacheController |
|||
self.controller = controller_factory( |
|||
self.cache, |
|||
cache_etags=cache_etags, |
|||
serializer=serializer, |
|||
) |
|||
|
|||
def send(self, request, **kw): |
|||
"""Send a request. Use the request information to see if it |
|||
exists in the cache. |
|||
""" |
|||
Send a request. Use the request information to see if it |
|||
exists in the cache and cache the response if we need to and can. |
|||
""" |
|||
if request.method == 'GET': |
|||
cached_response = self.controller.cached_request( |
|||
request.url, request.headers |
|||
) |
|||
cached_response = self.controller.cached_request(request) |
|||
if cached_response: |
|||
# Cached responses should not have a raw field since |
|||
# they *cannot* be created from some stream. |
|||
cached_response.raw = None |
|||
return cached_response |
|||
return self.build_response(request, cached_response, from_cache=True) |
|||
|
|||
# check for etags and add headers if appropriate |
|||
headers = self.controller.add_headers(request.url) |
|||
request.headers.update(headers) |
|||
request.headers.update(self.controller.conditional_headers(request)) |
|||
|
|||
resp = super(CacheControlAdapter, self).send(request, **kw) |
|||
|
|||
return resp |
|||
|
|||
def build_response(self, request, response): |
|||
"""Build a response by making a request or using the cache. |
|||
def build_response(self, request, response, from_cache=False): |
|||
""" |
|||
Build a response by making a request or using the cache. |
|||
|
|||
This will end up calling send and returning a potentially |
|||
cached response |
|||
""" |
|||
resp = super(CacheControlAdapter, self).build_response( |
|||
request, response |
|||
) |
|||
|
|||
# See if we should invalidate the cache. |
|||
if request.method in self.invalidating_methods and resp.ok: |
|||
cache_url = self.controller.cache_url(request.url) |
|||
self.cache.delete(cache_url) |
|||
|
|||
# Try to store the response if it is a GET |
|||
elif request.method == 'GET': |
|||
if not from_cache and request.method == 'GET': |
|||
if response.status == 304: |
|||
# We must have sent an ETag request. This could mean |
|||
# that we've been expired already or that we simply |
|||
# have an etag. In either case, we want to try and |
|||
# update the cache if that is the case. |
|||
resp = self.controller.update_cached_response( |
|||
cached_response = self.controller.update_cached_response( |
|||
request, response |
|||
) |
|||
# Fix possible exception when using missing `raw` field in |
|||
# requests |
|||
# TODO: remove when requests will be bump to 2.2.2 or 2.3 |
|||
# version |
|||
resp.raw = None |
|||
|
|||
if cached_response is not response: |
|||
from_cache = True |
|||
|
|||
response = cached_response |
|||
else: |
|||
# try to cache the response |
|||
self.controller.cache_response(request, resp) |
|||
self.controller.cache_response(request, response) |
|||
|
|||
resp = super(CacheControlAdapter, self).build_response( |
|||
request, response |
|||
) |
|||
|
|||
# See if we should invalidate the cache. |
|||
if request.method in self.invalidating_methods and resp.ok: |
|||
cache_url = self.controller.cache_url(request.url) |
|||
self.cache.delete(cache_url) |
|||
|
|||
# Give the request a from_cache attr to let people use it |
|||
# rather than testing for hasattr. |
|||
if not hasattr(resp, 'from_cache'): |
|||
resp.from_cache = False |
|||
resp.from_cache = from_cache |
|||
|
|||
return resp |
|||
|
@ -0,0 +1,97 @@ |
|||
import io |
|||
|
|||
from requests.structures import CaseInsensitiveDict |
|||
|
|||
from .compat import HTTPResponse, pickle |
|||
|
|||
|
|||
class Serializer(object): |
|||
def dumps(self, request, response, body=None): |
|||
response_headers = CaseInsensitiveDict(response.headers) |
|||
|
|||
if body is None: |
|||
# TODO: Figure out a way to handle this which doesn't break |
|||
# streaming |
|||
body = response.read(decode_content=False) |
|||
response._fp = io.BytesIO(body) |
|||
|
|||
data = { |
|||
"response": { |
|||
"body": body, |
|||
"headers": response.headers, |
|||
"status": response.status, |
|||
"version": response.version, |
|||
"reason": response.reason, |
|||
"strict": response.strict, |
|||
"decode_content": response.decode_content, |
|||
}, |
|||
} |
|||
|
|||
# Construct our vary headers |
|||
data["vary"] = {} |
|||
if "vary" in response_headers: |
|||
varied_headers = response_headers['vary'].split(',') |
|||
for header in varied_headers: |
|||
header = header.strip() |
|||
data["vary"][header] = request.headers.get(header, None) |
|||
|
|||
return b"cc=1," + pickle.dumps(data, pickle.HIGHEST_PROTOCOL) |
|||
|
|||
def loads(self, request, data): |
|||
# Short circuit if we've been given an empty set of data |
|||
if not data: |
|||
return |
|||
|
|||
# Determine what version of the serializer the data was serialized |
|||
# with |
|||
try: |
|||
ver, data = data.split(b",", 1) |
|||
except ValueError: |
|||
ver = b"cc=0" |
|||
|
|||
# Make sure that our "ver" is actually a version and isn't a false |
|||
# positive from a , being in the data stream. |
|||
if ver[:3] != b"cc=": |
|||
data = ver + data |
|||
ver = b"cc=0" |
|||
|
|||
# Get the version number out of the cc=N |
|||
ver = ver.split(b"=", 1)[-1].decode("ascii") |
|||
|
|||
# Dispatch to the actual load method for the given version |
|||
try: |
|||
return getattr(self, "_loads_v{0}".format(ver))(request, data) |
|||
except AttributeError: |
|||
# This is a version we don't have a loads function for, so we'll |
|||
# just treat it as a miss and return None |
|||
return |
|||
|
|||
def _loads_v0(self, request, data): |
|||
# The original legacy cache data. This doesn't contain enough |
|||
# information to construct everything we need, so we'll treat this as |
|||
# a miss. |
|||
return |
|||
|
|||
def _loads_v1(self, request, data): |
|||
try: |
|||
cached = pickle.loads(data) |
|||
except ValueError: |
|||
return |
|||
|
|||
# Special case the '*' Vary value as it means we cannot actually |
|||
# determine if the cached response is suitable for this request. |
|||
if "*" in cached.get("vary", {}): |
|||
return |
|||
|
|||
# Ensure that the Vary headers for the cached response match our |
|||
# request |
|||
for header, value in cached.get("vary", {}).items(): |
|||
if request.headers.get(header, None) != value: |
|||
return |
|||
|
|||
body = io.BytesIO(cached["response"].pop("body")) |
|||
return HTTPResponse( |
|||
body=body, |
|||
preload_content=False, |
|||
**cached["response"] |
|||
) |
@ -1,11 +1,16 @@ |
|||
from cachecontrol.adapter import CacheControlAdapter |
|||
from cachecontrol.cache import DictCache |
|||
from cachecontrol.session import CacheControlSession |
|||
from .adapter import CacheControlAdapter |
|||
from .cache import DictCache |
|||
from .session import CacheControlSession |
|||
|
|||
def CacheControl(sess=None, cache=None, cache_etags=True): |
|||
def CacheControl(sess=None, cache=None, cache_etags=True, serializer=None): |
|||
sess = sess or CacheControlSession() |
|||
cache = cache or DictCache() |
|||
adapter = CacheControlAdapter(cache, cache_etags=cache_etags) |
|||
adapter = CacheControlAdapter( |
|||
cache, |
|||
cache_etags=cache_etags, |
|||
serializer=serializer, |
|||
) |
|||
sess.mount('http://', adapter) |
|||
sess.mount('https://', adapter) |
|||
|
|||
return sess |
|||
|
Loading…
Reference in new issue