1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-08-13 16:53:54 -04:00

Updated cacheHandler to allow us to force cache and set a max-age on the cached content, vastly improved performance with indexers.

This commit is contained in:
echel0n 2014-03-14 15:35:05 -07:00
parent 05410e2aa0
commit 40c69d6a0f
5 changed files with 36 additions and 29 deletions

View File

@ -7,10 +7,10 @@ from cachecontrol.cache import DictCache
class CacheControlAdapter(HTTPAdapter): class CacheControlAdapter(HTTPAdapter):
invalidating_methods = set(['PUT', 'DELETE']) invalidating_methods = set(['PUT', 'DELETE'])
def __init__(self, cache=None, cache_etags=True, *args, **kw): def __init__(self, cache=None, cache_etags=True, cache_force=False, *args, **kw):
super(CacheControlAdapter, self).__init__(*args, **kw) super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache() self.cache = cache or DictCache()
self.controller = CacheController(self.cache, cache_etags=cache_etags) self.controller = CacheController(self.cache, cache_etags=cache_etags, cache_force=cache_force)
def send(self, request, **kw): def send(self, request, **kw):
"""Send a request. Use the request information to see if it """Send a request. Use the request information to see if it

View File

@ -4,6 +4,7 @@ The httplib2 algorithms ported for use with requests.
import re import re
import calendar import calendar
import time import time
import os
from cachecontrol.cache import DictCache from cachecontrol.cache import DictCache
from cachecontrol.compat import parsedate_tz from cachecontrol.compat import parsedate_tz
@ -24,9 +25,10 @@ def parse_uri(uri):
class CacheController(object): class CacheController(object):
"""An interface to see if request should cached or not. """An interface to see if request should cached or not.
""" """
def __init__(self, cache=None, cache_etags=True): def __init__(self, cache=None, cache_etags=True, cache_force=False):
self.cache = cache or DictCache() self.cache = cache or DictCache()
self.cache_etags = cache_etags self.cache_etags = cache_etags
self.cache_force = cache_force
def _urlnorm(self, uri): def _urlnorm(self, uri):
"""Normalize the URL to create a safe key for the cache""" """Normalize the URL to create a safe key for the cache"""
@ -166,7 +168,7 @@ class CacheController(object):
# return the original handler # return the original handler
return False return False
def add_headers(self, url): def add_headers(self, url, resp=None):
resp = self.cache.get(url) resp = self.cache.get(url)
if resp and 'etag' in resp.headers: if resp and 'etag' in resp.headers:
return {'If-None-Match': resp.headers['etag']} return {'If-None-Match': resp.headers['etag']}
@ -212,6 +214,11 @@ class CacheController(object):
if resp.headers['expires']: if resp.headers['expires']:
self.cache.set(cache_url, resp) self.cache.set(cache_url, resp)
# If the request is for our local cache, it means we should cache it
elif self.cache_force:
resp.headers.update({'cache-control': 'max-age=21600, private'})
self.cache.set(cache_url, resp)
def update_cached_response(self, request, response): def update_cached_response(self, request, response):
"""On a 304 we will get a new set of headers that we want to """On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one. update our cached value with, assuming we have one.

View File

@ -2,9 +2,9 @@ from cachecontrol.adapter import CacheControlAdapter
from cachecontrol.cache import DictCache from cachecontrol.cache import DictCache
def CacheControl(sess, cache=None, cache_etags=True): def CacheControl(sess, cache=None, cache_etags=True, cache_force=False):
cache = cache or DictCache() cache = cache or DictCache()
adapter = CacheControlAdapter(cache, cache_etags=cache_etags) adapter = CacheControlAdapter(cache, cache_etags=cache_etags, cache_force=cache_force)
sess.mount('http://', adapter) sess.mount('http://', adapter)
return sess return sess

View File

@ -494,7 +494,7 @@ class Tvdb:
self.config['params_getSeries'] = {"seriesname": "", "language": "all"} self.config['params_getSeries'] = {"seriesname": "", "language": "all"}
else: else:
self.config['url_getSeries'] = u"%(base_url)s/api/GetSeries.php" % self.config self.config['url_getSeries'] = u"%(base_url)s/api/GetSeries.php" % self.config
self.config['params_getSeries'] = {"seriesname": "", "language": ""} self.config['params_getSeries'] = {"seriesname": "", "language": self.config['language']}
self.config['url_epInfo'] = u"%(base_url)s/api/%(apikey)s/series/%%s/all/%%s.xml" % self.config self.config['url_epInfo'] = u"%(base_url)s/api/%(apikey)s/series/%%s/all/%%s.xml" % self.config
self.config['url_epInfo_zip'] = u"%(base_url)s/api/%(apikey)s/series/%%s/all/%%s.zip" % self.config self.config['url_epInfo_zip'] = u"%(base_url)s/api/%(apikey)s/series/%%s/all/%%s.zip" % self.config
@ -527,7 +527,7 @@ class Tvdb:
# cacheControl # cacheControl
if self.config['cache_enabled']: if self.config['cache_enabled']:
sess = CacheControl(requests.Session(), cache=FileCache(self.config['cache_location'])) sess = CacheControl(requests.Session(), cache_force=True, cache=FileCache(self.config['cache_location']))
else: else:
sess = requests.Session() sess = requests.Session()
@ -538,26 +538,26 @@ class Tvdb:
lastTimeout = datetime.datetime.now() lastTimeout = datetime.datetime.now()
raise tvdb_error("Could not connect to server: %s" % (e)) raise tvdb_error("Could not connect to server: %s" % (e))
# handle gzipped content, ## handle gzipped content,
# http://dbr.lighthouseapp.com/projects/13342/tickets/72-gzipped-data-patch ## http://dbr.lighthouseapp.com/projects/13342/tickets/72-gzipped-data-patch
if 'gzip' in resp.headers.get("Content-Encoding", ''): #if 'gzip' in resp.headers.get("Content-Encoding", ''):
if gzip: # if gzip:
stream = StringIO.StringIO(resp.content) # stream = StringIO.StringIO(resp.content)
gz = gzip.GzipFile(fileobj=stream) # gz = gzip.GzipFile(fileobj=stream)
return gz.read() # return gz.read()
#
raise tvdb_error("Received gzip data from thetvdb.com, but could not correctly handle it") # raise tvdb_error("Received gzip data from thetvdb.com, but could not correctly handle it")
#
if 'application/zip' in resp.headers.get("Content-Type", ''): #if 'application/zip' in resp.headers.get("Content-Type", ''):
try: # try:
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20] # # TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
log().debug("We recived a zip file unpacking now ...") # log().debug("We recived a zip file unpacking now ...")
zipdata = StringIO.StringIO() # zipdata = StringIO.StringIO()
zipdata.write(resp.content) # zipdata.write(resp.content)
myzipfile = zipfile.ZipFile(zipdata) # myzipfile = zipfile.ZipFile(zipdata)
return myzipfile.read('%s.xml' % language) # return myzipfile.read('%s.xml' % language)
except zipfile.BadZipfile: # except zipfile.BadZipfile:
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it") # raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
return resp.content return resp.content

View File

@ -358,7 +358,7 @@ class TVRage:
# cacheControl # cacheControl
if self.config['cache_enabled']: if self.config['cache_enabled']:
sess = CacheControl(requests.Session(), cache=FileCache(self.config['cache_location'])) sess = CacheControl(requests.Session(), cache_force=True, cache=FileCache(self.config['cache_location']))
else: else:
sess = requests.Session() sess = requests.Session()