mirror of
https://github.com/moparisthebest/SickRage
synced 2025-01-06 03:18:01 -05:00
Improved RSS feed cache parser code for provider searches and caching of their RSS feeds.
This commit is contained in:
parent
f6370c6756
commit
fa02ff40e7
12
.gitignore
vendored
12
.gitignore
vendored
@ -1,4 +1,4 @@
|
||||
# SB User Related #
|
||||
# SR User Related #
|
||||
######################
|
||||
cache/*
|
||||
cache.db*
|
||||
@ -11,11 +11,13 @@ server.crt
|
||||
server.key
|
||||
restore/
|
||||
|
||||
# SB Test Related #
|
||||
# SR Test Related #
|
||||
######################
|
||||
tests/Logs/*
|
||||
tests/sickbeard.*
|
||||
tests/cache.db
|
||||
tests/cache/*
|
||||
tests/sickbeard.db*
|
||||
tests/cache.db*
|
||||
tests/failed.db
|
||||
|
||||
# Compiled source #
|
||||
######################
|
||||
@ -46,4 +48,4 @@ Thumbs.db
|
||||
.directory
|
||||
*~
|
||||
/.idea/
|
||||
*.torrent
|
||||
*.torrent
|
@ -78,28 +78,18 @@ class Animezb(generic.NZBProvider):
|
||||
|
||||
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
||||
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
if not data:
|
||||
return []
|
||||
results = []
|
||||
for curItem in self.cache.getRSSFeed(search_url):
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
if 'entries' in data:
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
items = data.entries
|
||||
results = []
|
||||
|
||||
for curItem in items:
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
||||
return []
|
||||
return results
|
||||
|
||||
def findPropers(self, date=None):
|
||||
|
||||
@ -144,12 +134,6 @@ class AnimezbCache(tvcache.TVCache):
|
||||
|
||||
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(rss_url)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
provider = Animezb()
|
||||
|
@ -122,15 +122,8 @@ class EZRSSProvider(generic.TorrentProvider):
|
||||
|
||||
logger.log(u"Search string: " + search_url, logger.DEBUG)
|
||||
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
|
||||
if not data:
|
||||
return []
|
||||
|
||||
items = data.entries
|
||||
|
||||
results = []
|
||||
for curItem in items:
|
||||
for curItem in self.cache.getRSSFeed(search_url):
|
||||
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
@ -179,8 +172,6 @@ class EZRSSCache(tvcache.TVCache):
|
||||
rss_url = self.provider.url + 'feed/'
|
||||
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(rss_url)
|
||||
|
||||
return data.entries or []
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
provider = EZRSSProvider()
|
||||
|
@ -73,28 +73,18 @@ class Fanzub(generic.NZBProvider):
|
||||
|
||||
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
||||
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
if not data:
|
||||
return []
|
||||
results = []
|
||||
for curItem in self.cache.getRSSFeed(search_url):
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
if 'entries' in data:
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
items = data.entries
|
||||
results = []
|
||||
|
||||
for curItem in items:
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
||||
return []
|
||||
return results
|
||||
|
||||
def findPropers(self, date=None):
|
||||
|
||||
@ -139,12 +129,6 @@ class FanzubCache(tvcache.TVCache):
|
||||
|
||||
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(rss_url)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
provider = Fanzub()
|
||||
|
@ -237,11 +237,12 @@ class NewznabProvider(generic.NZBProvider):
|
||||
|
||||
def _checkAuthFromData(self, data):
|
||||
|
||||
if data is None:
|
||||
if not data.get('entries', None):
|
||||
return self._checkAuth()
|
||||
|
||||
if 'error' in data.feed:
|
||||
code = data.feed['error']['code']
|
||||
if data.feed.get('error', None):
|
||||
|
||||
code = data.feed.error.get('code', None)
|
||||
|
||||
if code == '100':
|
||||
raise AuthException("Your API key for " + self.name + " is incorrect, check your config.")
|
||||
@ -251,7 +252,7 @@ class NewznabProvider(generic.NZBProvider):
|
||||
raise AuthException(
|
||||
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
||||
else:
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + data.feed['error']['description'],
|
||||
logger.log(u"Unknown error given from " + self.name + ": " + data.feed.error.description,
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
|
@ -79,32 +79,21 @@ class NyaaProvider(generic.TorrentProvider):
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.cache.getRSSFeed(searchURL)
|
||||
if not data:
|
||||
return []
|
||||
results = []
|
||||
for curItem in self.cache.getRSSFeed(searchURL):
|
||||
|
||||
if 'entries' in data:
|
||||
items = data.entries
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
results = []
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
for curItem in items:
|
||||
|
||||
(title, url) = self._get_title_and_url(curItem)
|
||||
|
||||
if title and url:
|
||||
results.append(curItem)
|
||||
else:
|
||||
logger.log(
|
||||
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
|
||||
return results
|
||||
|
||||
return []
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
return generic.TorrentProvider._get_title_and_url(self, item)
|
||||
|
||||
def _extract_name_from_filename(self, filename):
|
||||
@ -137,12 +126,6 @@ class NyaaCache(tvcache.TVCache):
|
||||
|
||||
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(url)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
|
||||
return self.getRSSFeed(url)
|
||||
|
||||
provider = NyaaProvider()
|
||||
|
@ -184,11 +184,6 @@ class OmgwtfnzbsCache(tvcache.TVCache):
|
||||
|
||||
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(rss_url)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
provider = OmgwtfnzbsProvider()
|
||||
|
@ -169,9 +169,4 @@ class TorrentRssCache(tvcache.TVCache):
|
||||
if self.provider.cookies:
|
||||
request_headers = {'Cookie': self.provider.cookies}
|
||||
|
||||
data = self.getRSSFeed(self.provider.url, request_headers=request_headers)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
return self.getRSSFeed(self.provider.url, request_headers=request_headers)
|
@ -164,12 +164,7 @@ class TokyoToshokanCache(tvcache.TVCache):
|
||||
|
||||
logger.log(u"TokyoToshokan cache update URL: " + url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(url)
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data.entries
|
||||
else:
|
||||
return []
|
||||
return self.getRSSFeed(url)
|
||||
|
||||
|
||||
provider = TokyoToshokanProvider()
|
||||
|
@ -54,14 +54,16 @@ class TvTorrentsProvider(generic.TorrentProvider):
|
||||
return 'tvtorrents.png'
|
||||
|
||||
def _checkAuth(self):
|
||||
|
||||
if not self.digest or not self.hash:
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||
|
||||
return True
|
||||
|
||||
def _checkAuthFromData(self, data):
|
||||
if "User can't be found" in data.feed.title or "Invalid Hash" in data.feed.title:
|
||||
if not data.get('entries', None):
|
||||
return self._checkAuth()
|
||||
|
||||
if "User can't be found" in data.feed.get('title', None) or "Invalid Hash" in data.feed.get('title', None):
|
||||
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + str(data.feed.title),
|
||||
logger.DEBUG)
|
||||
raise AuthException(
|
||||
@ -87,16 +89,9 @@ class TvTorrentsCache(tvcache.TVCache):
|
||||
rss_url = self.provider.url + 'RssServlet?digest=' + provider.digest + '&hash=' + provider.hash + '&fname=true&exclude=(' + ignore_regex + ')'
|
||||
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(rss_url)
|
||||
|
||||
if not self.provider._checkAuthFromData(data):
|
||||
return []
|
||||
|
||||
if data and 'entries' in data:
|
||||
return data['entries']
|
||||
else:
|
||||
return []
|
||||
|
||||
return self.getRSSFeed(rss_url)
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return self.provider._checkAuthFromData(data)
|
||||
|
||||
provider = TvTorrentsProvider()
|
||||
|
@ -47,38 +47,26 @@ class WombleCache(tvcache.TVCache):
|
||||
# delete anything older then 7 days
|
||||
self._clearCache()
|
||||
|
||||
data = None
|
||||
|
||||
if not self.shouldUpdate():
|
||||
return
|
||||
|
||||
cl = []
|
||||
for url in [self.provider.url + 'rss/?sec=tv-sd&fr=false', self.provider.url + 'rss/?sec=tv-hd&fr=false']:
|
||||
logger.log(u"Womble's Index cache update URL: " + url, logger.DEBUG)
|
||||
data = self.getRSSFeed(url)
|
||||
|
||||
# As long as we got something from the provider we count it as an update
|
||||
if not data:
|
||||
return []
|
||||
|
||||
# By now we know we've got data and no auth errors, all we need to do is put it in the database
|
||||
for item in data.entries:
|
||||
ci = self._parseItem(item)
|
||||
for item in self.getRSSFeed(url).get('entries', []):
|
||||
ci = self._parseItem(item.title, item.url)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
# set last updated
|
||||
if data:
|
||||
self.setLastUpdate()
|
||||
|
||||
def _checkAuth(self, data):
|
||||
return data != 'Invalid Link'
|
||||
return data.feed.get('title', None) != 'Invalid Link'
|
||||
|
||||
provider = WombleProvider()
|
||||
|
||||
|
@ -4,6 +4,7 @@ import os
|
||||
import urllib
|
||||
import urlparse
|
||||
import re
|
||||
import collections
|
||||
|
||||
import sickbeard
|
||||
|
||||
@ -11,28 +12,30 @@ from sickbeard import logger
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
from contextlib import closing
|
||||
from lib.feedcache import cache
|
||||
from feedcache.cache import Cache
|
||||
from sqliteshelf import SQLiteShelf
|
||||
|
||||
class RSSFeeds:
|
||||
def __init__(self, db_name):
|
||||
db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name) + '.db'
|
||||
if not os.path.exists(os.path.dirname(db_name)):
|
||||
sickbeard.helpers.makeDir(os.path.dirname(db_name))
|
||||
self.rssItems = {'entries': None,
|
||||
'feed': None
|
||||
}
|
||||
|
||||
try:
|
||||
db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name) + '.db'
|
||||
if not os.path.exists(os.path.dirname(db_name)):
|
||||
sickbeard.helpers.makeDir(os.path.dirname(db_name))
|
||||
|
||||
self.rssDB = SQLiteShelf(db_name)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS error: " + ex(e), logger.DEBUG)
|
||||
|
||||
def clearCache(self, age=None):
|
||||
try:
|
||||
with closing(self.rssDB) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
fc.purge(age)
|
||||
except Exception as e:
|
||||
logger.log(u"RSS error clearing cache: " + ex(e), logger.DEBUG)
|
||||
fc = Cache(self.rssDB).purge(age)
|
||||
fc.purge(age)
|
||||
finally:
|
||||
self.rssDB.close()
|
||||
|
||||
def getFeed(self, url, post_data=None, request_headers=None):
|
||||
parsed = list(urlparse.urlparse(url))
|
||||
@ -42,21 +45,12 @@ class RSSFeeds:
|
||||
url += urllib.urlencode(post_data)
|
||||
|
||||
try:
|
||||
with closing(self.rssDB) as fs:
|
||||
fc = cache.Cache(fs)
|
||||
feed = fc.fetch(url, False, False, request_headers)
|
||||
fc = Cache(self.rssDB)
|
||||
feed = fc.fetch(url, False, False, request_headers)
|
||||
|
||||
if feed:
|
||||
if 'entries' in feed:
|
||||
return feed
|
||||
elif 'error' in feed.feed:
|
||||
err_code = feed.feed['error']['code']
|
||||
err_desc = feed.feed['error']['description']
|
||||
self.rssItems['entries'] = feed.get('entries', None)
|
||||
self.rssItems['feed'] = feed.get('feed', None)
|
||||
finally:
|
||||
self.rssDB.close()
|
||||
|
||||
logger.log(
|
||||
u"RSS ERROR:[%s] CODE:[%s]" % (err_desc, err_code), logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"RSS error loading url: " + url, logger.DEBUG)
|
||||
|
||||
except Exception as e:
|
||||
logger.log(u"RSS error: " + ex(e), logger.DEBUG)
|
||||
return self.rssItems
|
@ -102,36 +102,41 @@ class TVCache():
|
||||
data = None
|
||||
return data
|
||||
|
||||
def _checkAuth(self):
|
||||
return self.provider._checkAuth()
|
||||
def _checkAuth(self, data):
|
||||
return True
|
||||
|
||||
def _checkItemAuth(self, title, url):
|
||||
return True
|
||||
|
||||
def updateCache(self):
|
||||
if self.shouldUpdate() and self._checkAuth():
|
||||
# as long as the http request worked we count this as an update
|
||||
if not self.shouldUpdate():
|
||||
return
|
||||
|
||||
if self._checkAuth(None):
|
||||
data = self._getRSSData()
|
||||
if not data:
|
||||
return []
|
||||
if data.get('entries', None):
|
||||
# clear cache
|
||||
self._clearCache()
|
||||
|
||||
# clear cache
|
||||
self._clearCache()
|
||||
# set updated
|
||||
self.setLastUpdate()
|
||||
|
||||
# set updated
|
||||
self.setLastUpdate()
|
||||
if self._checkAuth(data):
|
||||
|
||||
# parse data
|
||||
cl = []
|
||||
for item in data:
|
||||
title, url = self._get_title_and_url(item)
|
||||
ci = self._parseItem(title, url)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
cl = []
|
||||
for item in data.get('entries', []):
|
||||
title, url = self._get_title_and_url(item)
|
||||
ci = self._parseItem(title, url)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
|
||||
return []
|
||||
|
||||
@ -159,8 +164,6 @@ class TVCache():
|
||||
logger.log(
|
||||
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||
logger.DEBUG)
|
||||
return None
|
||||
|
||||
|
||||
def _getLastUpdate(self):
|
||||
myDB = self._getDB()
|
||||
|
Loading…
Reference in New Issue
Block a user