Improvements made to tv cache code for providers

This commit is contained in:
echel0n 2014-12-07 09:16:41 -08:00
parent 0365cc201b
commit 589b7167c1
33 changed files with 2362 additions and 2467 deletions

View File

@ -18,48 +18,53 @@
import os
import traceback
import re
import sickbeard
import six
import chardet
import unicodedata
from string import ascii_letters, digits
from sickbeard import logger
# This module tries to deal with the apparently random behavior of python when dealing with unicode <-> utf-8
# encodings. It tries to just use unicode, but if that fails then it tries forcing it to utf-8. Any functions
# which return something should always return unicode.
def toSafeString(original):
valid_chars = "-_.() %s%s" % (ascii_letters, digits)
cleaned_filename = unicodedata.normalize('NFKD', _toUnicode(original)).encode('ASCII', 'ignore')
valid_string = ''.join(c for c in cleaned_filename if c in valid_chars)
return ' '.join(valid_string.split())
def simplifyString(original):
string = stripAccents(original.lower())
string = toSafeString(' '.join(re.split('\W+', string)))
split = re.split('\W+|_', string.lower())
return _toUnicode(' '.join(split))
def _toUnicode(x):
try:
if isinstance(x, unicode):
return x
else:
if isinstance(x, unicode):
return x
else:
try:
return six.text_type(x)
except:
try:
return six.text_type(x)
if chardet.detect(x).get('encoding') == 'utf-8':
return x.decode('utf-8')
if isinstance(x, str):
try:
return x.decode(sickbeard.SYS_ENCODING)
except UnicodeDecodeError:
raise
return x
except:
try:
if chardet.detect(x).get('encoding') == 'utf-8':
return x.decode('utf-8')
if isinstance(x, str):
try:
return x.decode(sickbeard.SYS_ENCODING)
except UnicodeDecodeError:
raise
return x
except:
raise
except:
logger.log('Unable to decode value "%s..." : %s ' % (repr(x)[:20], traceback.format_exc()), logger.WARNING)
ascii_text = str(x).encode('string_escape')
return _toUnicode(ascii_text)
return x
def ss(x):
u_x = _toUnicode(x)
try:
return u_x.encode(sickbeard.SYS_ENCODING)
except Exception as e:
logger.log('Failed ss encoding char, force UTF8: %s' % e, logger.WARNING)
except:
try:
return u_x.encode(sickbeard.SYS_ENCODING, 'replace')
except:
@ -84,3 +89,6 @@ def ek(func, *args, **kwargs):
return _toUnicode(result)
else:
return result
def stripAccents(s):
return ''.join((c for c in unicodedata.normalize('NFD', _toUnicode(s)) if unicodedata.category(c) != 'Mn'))

View File

@ -1431,3 +1431,5 @@ def get_size(start_path='.'):
total_size += ek.ek(os.path.getsize, fp)
return total_size
def md5(text):
return hashlib.md5(ek.ss(text)).hexdigest()

View File

@ -79,7 +79,7 @@ class Animezb(generic.NZBProvider):
logger.log(u"Search url: " + search_url, logger.DEBUG)
results = []
for curItem in self.cache.getRSSFeed(search_url):
for curItem in self.cache.getRSSFeed(search_url, items=['entries']) or []:
(title, url) = self._get_title_and_url(curItem)
if title and url:
@ -134,6 +134,6 @@ class AnimezbCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
provider = Animezb()

View File

@ -157,7 +157,7 @@ class BitSoupProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:
@ -273,7 +273,7 @@ class BitSoupCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = BitSoupProvider()

View File

@ -92,7 +92,7 @@ class BTNProvider(generic.TorrentProvider):
parsedJSON = self._api_call(apikey, params)
if not parsedJSON:
logger.log(u"No data returned from " + self.name, logger.ERROR)
return []
return results
if self._checkAuthFromData(parsedJSON):
@ -311,7 +311,7 @@ class BTNCache(tvcache.TVCache):
logger.WARNING)
seconds_since_last_update = 86400
return self.provider._doSearch(search_params=None, age=seconds_since_last_update)
return {'entries': self.provider._doSearch(search_params=None, age=seconds_since_last_update)}
provider = BTNProvider()

View File

@ -123,7 +123,7 @@ class EZRSSProvider(generic.TorrentProvider):
logger.log(u"Search string: " + search_url, logger.DEBUG)
results = []
for curItem in self.cache.getRSSFeed(search_url):
for curItem in self.cache.getRSSFeed(search_url, items=['entries']) or []:
(title, url) = self._get_title_and_url(curItem)
@ -172,6 +172,6 @@ class EZRSSCache(tvcache.TVCache):
rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
provider = EZRSSProvider()

View File

@ -74,7 +74,7 @@ class Fanzub(generic.NZBProvider):
logger.log(u"Search url: " + search_url, logger.DEBUG)
results = []
for curItem in self.cache.getRSSFeed(search_url):
for curItem in self.cache.getRSSFeed(search_url, items=['entries']) or []:
(title, url) = self._get_title_and_url(curItem)
if title and url:
@ -129,6 +129,6 @@ class FanzubCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
provider = Fanzub()

View File

@ -309,6 +309,6 @@ class FreshOnTVCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = FreshOnTVProvider()

View File

@ -210,11 +210,12 @@ class HDBitsCache(tvcache.TVCache):
try:
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(),
json=True)
if self.provider._checkAuthFromData(parsedJSON):
results = parsedJSON['data']
except:
pass
return results
return {'entries': results}
provider = HDBitsProvider()

View File

@ -342,7 +342,7 @@ class HDTorrentsCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': []}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = HDTorrentsProvider()

View File

@ -279,7 +279,7 @@ class IPTorrentsCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = IPTorrentsProvider()

View File

@ -357,6 +357,6 @@ class KATCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = KATProvider()

View File

@ -238,11 +238,10 @@ class NewznabProvider(generic.NZBProvider):
def _checkAuthFromData(self, data):
if not data:
if not (data.entries and data.feed):
return self._checkAuth()
if data.feed.get('error', None):
code = data.feed.error.get('code', None)
if code == '100':
@ -297,12 +296,12 @@ class NewznabProvider(generic.NZBProvider):
while (total >= offset) and (offset < 1000):
search_url = self.url + 'api?' + urllib.urlencode(params)
logger.log(u"Search url: " + search_url, logger.DEBUG)
data = self.cache.getRSSFeed(search_url)
if not data or not self._checkAuthFromData(data):
data = self.cache.getRSSFeed(search_url, items=['entries', 'feed'])
if not self._checkAuthFromData(data):
break
for item in data.entries:
for item in data.entries or []:
(title, url) = self._get_title_and_url(item)
@ -422,56 +421,13 @@ class NewznabCache(tvcache.TVCache):
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)
def updateCache(self):
if not self.shouldUpdate():
return
try:
if self._checkAuth(None):
data = self._getRSSData()
if not data or not len(data) > 0:
return
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
try:
items = data.get('entries', [])
except:
items = data
if self._checkAuth(items):
cl = []
for item in items:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
except AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception, e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item):
title = item.title
url = item.link
(title, url) = self._get_title_and_url(item)
attrs = item.newznab_attr
if not isinstance(attrs, list):

View File

@ -320,7 +320,7 @@ class NextGenCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = NextGenProvider()

View File

@ -80,8 +80,7 @@ class NyaaProvider(generic.TorrentProvider):
logger.log(u"Search string: " + searchURL, logger.DEBUG)
results = []
for curItem in self.cache.getRSSFeed(searchURL):
for curItem in self.cache.getRSSFeed(searchURL, items=['entries']) or []:
(title, url) = self._get_title_and_url(curItem)
if title and url:
@ -126,6 +125,6 @@ class NyaaCache(tvcache.TVCache):
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
return self.getRSSFeed(url)
return self.getRSSFeed(url, items=['entries', 'feed'])
provider = NyaaProvider()

View File

@ -184,6 +184,6 @@ class OmgwtfnzbsCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
provider = OmgwtfnzbsProvider()

View File

@ -107,11 +107,11 @@ class TorrentRssProvider(generic.TorrentProvider):
if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
data = self.cache._getRSSData()
if not data or not len(data) > 0:
data = self.cache._getRSSData()['entries']
if not data:
return (False, 'No items found in the RSS feed ' + self.url)
(title, url) = self._get_title_and_url(data.entries[0])
(title, url) = self._get_title_and_url(data[0])
if not title:
return (False, 'Unable to get title from first item')
@ -168,4 +168,4 @@ class TorrentRssCache(tvcache.TVCache):
if self.provider.cookies:
request_headers = {'Cookie': self.provider.cookies}
return self.getRSSFeed(self.provider.url, request_headers=request_headers)
return self.getRSSFeed(self.provider.url, request_headers=request_headers, items=['entries', 'feed'])

View File

@ -305,6 +305,6 @@ class SCCCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = SCCProvider()

View File

@ -254,7 +254,7 @@ class SpeedCDCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = SpeedCDProvider()

View File

@ -260,7 +260,7 @@ class T411Cache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = T411Provider()

View File

@ -340,7 +340,7 @@ class ThePirateBayCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
class ThePirateBayWebproxy:
def __init__(self):

View File

@ -164,7 +164,7 @@ class TokyoToshokanCache(tvcache.TVCache):
logger.log(u"TokyoToshokan cache update URL: " + url, logger.DEBUG)
return self.getRSSFeed(url)
return self.getRSSFeed(url, items=['entries', 'feed'])
provider = TokyoToshokanProvider()

View File

@ -276,7 +276,7 @@ class TorrentBytesCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = TorrentBytesProvider()

View File

@ -282,8 +282,6 @@ class TorrentDayCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = TorrentDayProvider()

View File

@ -277,7 +277,7 @@ class TorrentLeechCache(tvcache.TVCache):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
return {'entries': self.provider._doSearch(search_params)}
provider = TorrentLeechProvider()

View File

@ -60,11 +60,12 @@ class TvTorrentsProvider(generic.TorrentProvider):
return True
def _checkAuthFromData(self, data):
if not data:
if not (data.entries and data.feed):
return self._checkAuth()
if "User can't be found" in data.feed.get('title', None) or "Invalid Hash" in data.feed.get('title', None):
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(data.feed.title),
title = data.feed.get('title', None)
if "User can't be found" in title or "Invalid Hash" in title:
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(title),
logger.DEBUG)
raise AuthException(
u"Your authentication credentials for " + self.name + " are incorrect, check your config")
@ -89,7 +90,7 @@ class TvTorrentsCache(tvcache.TVCache):
rss_url = self.provider.url + 'RssServlet?digest=' + provider.digest + '&hash=' + provider.hash + '&fname=true&exclude=(' + ignore_regex + ')'
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
return self.getRSSFeed(rss_url, items=['entries', 'feed'])
def _checkAuth(self, data):
return self.provider._checkAuthFromData(data)

View File

@ -43,30 +43,31 @@ class WombleCache(tvcache.TVCache):
self.minTime = 15
def updateCache(self):
# delete anything older then 7 days
self._clearCache()
# check if we should update
if not self.shouldUpdate():
return
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
cl = []
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
# By now we know we've got data and no auth errors, all we need to do is put it in the database
for item in self.getRSSFeed(url).get('entries', []):
ci = self._parseItem(item.title, item.url)
for item in self.getRSSFeed(url, items=['entries', 'feed'])['entries'] or []:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
self.setLastUpdate()
def _checkAuth(self, data):
return data.feed.get('title', None) != 'Invalid Link'
return data if data.feed.title != 'Invalid Link' else None
provider = WombleProvider()

View File

@ -33,7 +33,7 @@ class RSSFeeds:
finally:
self.rssDB.close()
def getFeed(self, url, post_data=None, request_headers=None):
def getFeed(self, url, post_data=None, request_headers=None, items=[]):
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
@ -42,8 +42,15 @@ class RSSFeeds:
try:
fc = Cache(self.rssDB)
feed = fc.fetch(url, False, False, request_headers)
resp = fc.fetch(url, False, False, request_headers)
return feed
data = {}
for item in items:
try:
data[item] = resp[item]
except:
data[item] = None
return data
finally:
self.rssDB.close()

View File

@ -375,7 +375,6 @@ def searchForNeededEpisodes():
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_daily]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads += [threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + " :: [" + curProvider.name + "]")]
# start the thread we just created
@ -393,13 +392,6 @@ def searchForNeededEpisodes():
# pick a single result for each episode, respecting existing results
for curEp in curFoundResults:
if curEp.show.paused:
logger.log(
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
@ -442,6 +434,7 @@ def searchProviders(show, episodes, manualSearch=False):
finalResults = []
didSearch = False
threads = []
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
@ -449,6 +442,18 @@ def searchProviders(show, episodes, manualSearch=False):
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_backlog]
for curProvider in providers:
threads += [threading.Thread(target=curProvider.cache.updateCache,
name=origThreadName + " :: [" + curProvider.name + "]")]
# start the thread we just created
for t in threads:
t.start()
# wait for all threads to finish
for t in threads:
t.join()
for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
@ -470,7 +475,6 @@ def searchProviders(show, episodes, manualSearch=False):
logger.log(u"Performing season pack search for " + show.name)
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)

View File

@ -96,12 +96,10 @@ class TVCache():
myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1")
def _get_title_and_url(self, item):
# override this in the provider if daily search has a different data layout to backlog searches
return self.provider._get_title_and_url(item)
def _getRSSData(self):
data = None
return data
return None
def _checkAuth(self, data):
return True
@ -110,49 +108,37 @@ class TVCache():
return True
def updateCache(self):
# check if we should update
if not self.shouldUpdate():
return
try:
if self._checkAuth(None):
data = self._getRSSData()
if not data or not len(data) > 0:
return
data = self._getRSSData()
if self._checkAuth(data):
# clear cache
self._clearCache()
# set updated
self.setLastUpdate()
try:
items = data.get('entries', [])
except:
items = data
cl = []
for item in data['entries']:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if self._checkAuth(items):
cl = []
for item in items:
title, url = self._get_title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
except AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception, e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
def getRSSFeed(self, url, post_data=None, request_headers=None):
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers)
def getRSSFeed(self, url, post_data=None, request_headers=None, items=[]):
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers, items)
def _translateTitle(self, title):
return u'' + title.replace(' ', '.')
@ -160,7 +146,8 @@ class TVCache():
def _translateLinkURL(self, url):
return url.replace('&amp;', '&')
def _parseItem(self, title, url):
def _parseItem(self, item):
title, url = self._get_title_and_url(item)
self._checkItemAuth(title, url)

File diff suppressed because it is too large Load Diff

View File

@ -12,7 +12,7 @@ from sickbeard.helpers import create_https_certificates
from tornado.web import Application, StaticFileHandler, RedirectHandler, HTTPError
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.routes import route
class MultiStaticFileHandler(StaticFileHandler):
def initialize(self, paths, default_filename=None):
@ -62,8 +62,8 @@ class SRWebServer(threading.Thread):
self.video_root = None
# web root
self.options['web_root'] = ('/' + self.options['web_root'].lstrip('/')) if self.options[
'web_root'] else ''
self.options['web_root'] = ('/' + self.options['web_root'].lstrip('/')) if self.options['web_root'] else '/'
sickbeard.WEB_ROOT = self.options['web_root'].strip('/')
# tornado setup
self.enable_https = self.options['enable_https']
@ -90,33 +90,31 @@ class SRWebServer(threading.Thread):
autoreload=False,
gzip=True,
xheaders=sickbeard.HANDLE_REVERSE_PROXY,
cookie_secret='61oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo='
cookie_secret='61oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=',
username=self.options['username'],
password=self.options['password'],
)
# Main Handler
self.app.add_handlers(".*$", [
(r'%s/api/(.*)(/?)' % self.options['web_root'], webapi.Api),
(r'%s/(.*)(/?)' % self.options['web_root'], webserve.MainHandler),
(r'(.*)', webserve.MainHandler)
])
# Main Handlers
self.app.add_handlers(".*$", [] + route.get_routes())
# Static Path Handler
# Static Path Handlers
self.app.add_handlers(".*$", [
(r'%s/(favicon\.ico)' % self.options['web_root'], MultiStaticFileHandler,
(r'%s(favicon\.ico)' % self.options['web_root'], MultiStaticFileHandler,
{'paths': [os.path.join(self.options['data_root'], 'images/ico/favicon.ico')]}),
(r'%s/%s/(.*)(/?)' % (self.options['web_root'], 'images'), MultiStaticFileHandler,
(r'%s%s/(.*)(/?)' % (self.options['web_root'], 'images'), MultiStaticFileHandler,
{'paths': [os.path.join(self.options['data_root'], 'images'),
os.path.join(sickbeard.CACHE_DIR, 'images')]}),
(r'%s/%s/(.*)(/?)' % (self.options['web_root'], 'css'), MultiStaticFileHandler,
(r'%s%s/(.*)(/?)' % (self.options['web_root'], 'css'), MultiStaticFileHandler,
{'paths': [os.path.join(self.options['data_root'], 'css')]}),
(r'%s/%s/(.*)(/?)' % (self.options['web_root'], 'js'), MultiStaticFileHandler,
(r'%s%s/(.*)(/?)' % (self.options['web_root'], 'js'), MultiStaticFileHandler,
{'paths': [os.path.join(self.options['data_root'], 'js')]}),
])
# Static Videos Path
if self.video_root:
self.app.add_handlers(".*$", [
(r'%s/%s/(.*)' % (self.options['web_root'], 'videos'), MultiStaticFileHandler,
(r'%s%s/(.*)' % (self.options['web_root'], 'videos'), MultiStaticFileHandler,
{'paths': [self.video_root]}),
])

22
tornado/routes.py Normal file
View File

@ -0,0 +1,22 @@
import tornado.web
class route(object):
_routes = []
def __init__(self, uri, name=None):
self._uri = uri
self.name = name
def __call__(self, _handler):
"""gets called when we class decorate"""
name = self.name and self.name or _handler.__name__
self._routes.append(tornado.web.url(self._uri, _handler, name=name))
return _handler
@classmethod
def get_routes(self):
self._routes.reverse()
return self._routes
def route_redirect(from_, to, name=None):
route._routes.append(tornado.web.url(from_, tornado.web.RedirectHandler, dict(url=to), name=name))