From 4b3a680bb3592ebc8379437da158a50cce8897d0 Mon Sep 17 00:00:00 2001 From: Adam Date: Wed, 6 Aug 2014 00:19:54 +0800 Subject: [PATCH] Tidy provider code --- sickbeard/providers/animezb.py | 4 +-- sickbeard/providers/bitsoup.py | 41 ++--------------------- sickbeard/providers/btn.py | 50 +---------------------------- sickbeard/providers/dtt.py | 9 ++---- sickbeard/providers/ezrss.py | 19 ++--------- sickbeard/providers/fanzub.py | 4 +-- sickbeard/providers/freshontv.py | 42 ++---------------------- sickbeard/providers/hdbits.py | 12 ------- sickbeard/providers/hdtorrents.py | 42 ++---------------------- sickbeard/providers/iptorrents.py | 42 ++---------------------- sickbeard/providers/kat.py | 38 ++-------------------- sickbeard/providers/newzbin.py | 4 +-- sickbeard/providers/nextgen.py | 42 ++---------------------- sickbeard/providers/nyaatorrents.py | 4 +-- sickbeard/providers/nzbsrus.py | 4 +-- sickbeard/providers/omgwtfnzbs.py | 4 +-- sickbeard/providers/publichd.py | 42 ++---------------------- sickbeard/providers/rsstorrent.py | 14 ++------ sickbeard/providers/scc.py | 41 ++--------------------- sickbeard/providers/speedcd.py | 41 ++--------------------- sickbeard/providers/thepiratebay.py | 42 ++---------------------- sickbeard/providers/torrentbytes.py | 40 ++--------------------- sickbeard/providers/torrentday.py | 41 ++--------------------- sickbeard/providers/torrentleech.py | 42 ++---------------------- sickbeard/providers/tvtorrents.py | 2 +- sickbeard/tvcache.py | 16 +++++---- 26 files changed, 55 insertions(+), 627 deletions(-) diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py index f1bc5bd3..5ad05b7c 100644 --- a/sickbeard/providers/animezb.py +++ b/sickbeard/providers/animezb.py @@ -136,7 +136,7 @@ class AnimezbCache(tvcache.TVCache): # only poll Animezb every 20 minutes max self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): params = { "cat": "anime".encode('utf-8'), @@ -147,7 +147,7 @@ class AnimezbCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries provider = Animezb() diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index 240f1a09..6b3db525 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -268,46 +268,9 @@ class BitSoupCache(tvcache.TVCache): # only poll TorrentBytes every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - logger.log(u"Clearing " + self.provider.name + " cache") - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = BitSoupProvider() diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 405f0811..eaa03ee6 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -297,44 +297,7 @@ class BTNCache(tvcache.TVCache): # At least 15 minutes between queries self.minTime = 15 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - - if self._checkAuth(None): - - data = self._getRSSData() - - # As long as we got something from the provider we count it as an update - if data: - self.setLastUpdate() - else: - return [] - - if self._checkAuth(data): - # By now we know we've got data and no auth errors, all we need to do is put it in the database - cl = [] - for item in data: - - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - else: - raise AuthException( - "Your authentication info for " + self.provider.name + " is incorrect, check your config") - else: - return [] - - def _getRSSData(self): + def _getDailyData(self): # Get the torrents uploaded since last check. seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) @@ -352,17 +315,6 @@ class BTNCache(tvcache.TVCache): return self.provider._doSearch(search_params=None, age=seconds_since_last_update) - def _parseItem(self, item): - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"Adding item to results: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) - else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", - logger.ERROR) - return None - def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py index 5224f78a..bfe881e7 100644 --- a/sickbeard/providers/dtt.py +++ b/sickbeard/providers/dtt.py @@ -123,7 +123,7 @@ class DTTCache(tvcache.TVCache): # only poll DTT every 30 minutes max self.minTime = 30 - def _getRSSData(self): + def _getDailyData(self): params = {"items": "all"} @@ -135,12 +135,7 @@ class DTTCache(tvcache.TVCache): url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) logger.log(u"DTT cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) - - def _parseItem(self, item): - title, url = self.provider._get_title_and_url(item) - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self.getRSSFeed(url).entries provider = DTTProvider() \ No newline at end of file diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 48ec3a4c..233a53e6 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -174,27 +174,12 @@ class EZRSSCache(tvcache.TVCache): # only poll EZRSS every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): rss_url = self.provider.url + 'feed/' logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) - - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - url = self._translateLinkURL(url) - return self._addCacheEntry(title, url) - - else: - logger.log( - u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", - logger.ERROR) - return None + return self.getRSSFeed(rss_url).entries provider = EZRSSProvider() diff --git a/sickbeard/providers/fanzub.py b/sickbeard/providers/fanzub.py index 0d0ac1e4..ebcd2431 100644 --- a/sickbeard/providers/fanzub.py +++ b/sickbeard/providers/fanzub.py @@ -128,7 +128,7 @@ class FanzubCache(tvcache.TVCache): # only poll Fanzub every 20 minutes max self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): params = { "cat": "anime".encode('utf-8'), @@ -139,7 +139,7 @@ class FanzubCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries provider = Fanzub() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 45581e4c..5e7f0871 100755 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -299,46 +299,8 @@ class FreshOnTVCache(tvcache.TVCache): # poll delay in minutes self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - logger.log(u"Clearing " + self.provider.name + " cache") - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) - + return self.provider._doSearch(search_params).entries provider = FreshOnTVProvider() \ No newline at end of file diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index ce86e810..faaa7b4c 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -251,18 +251,6 @@ class HDBitsCache(tvcache.TVCache): def _getRSSData(self): return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"Adding item to results: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) - else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", - logger.ERROR) - return None - def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 445cd714..8b02af0b 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -329,47 +329,9 @@ class HDTorrentsCache(tvcache.TVCache): # only poll HDTorrents every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': []} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = HDTorrentsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index c3de8a9d..19ae37fd 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -269,47 +269,9 @@ class IPTorrentsCache(tvcache.TVCache): # Only poll IPTorrents every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = IPTorrentsProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index e2e88924..1f3273cc 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -355,43 +355,9 @@ class KATCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = KATProvider() diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index 6da3747e..b0211355 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -314,9 +314,9 @@ class NewzbinCache(tvcache.TVCache): # only poll Newzbin every 10 mins max self.minTime = 1 - def _getRSSData(self): + def _getDailyData(self): - return self.provider._getRSSData() + return self.provider._getRSSData().entries def _parseItem(self, item): diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py index f0e0fb4d..09cca9b9 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/nextgen.py @@ -318,47 +318,9 @@ class NextGenCache(tvcache.TVCache): # Only poll NextGen every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = NextGenProvider() diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index 10797de0..f2aed6c3 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -126,7 +126,7 @@ class NyaaCache(tvcache.TVCache): # only poll NyaaTorrents every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): params = { "page": 'rss', # Use RSS page "order": '1', # Sort Descending By Date @@ -137,7 +137,7 @@ class NyaaCache(tvcache.TVCache): logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) + return self.getRSSFeed(url).entries provider = NyaaProvider() diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py index bcfe428a..570b24ed 100644 --- a/sickbeard/providers/nzbsrus.py +++ b/sickbeard/providers/nzbsrus.py @@ -98,7 +98,7 @@ class NZBsRUSCache(tvcache.TVCache): # only poll NZBs'R'US every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): url = self.provider.url + 'rssfeed.php?' urlArgs = {'cat': '91,75,104', # HD,XviD,SD-x264 'i': sickbeard.NZBSRUS_UID, @@ -107,7 +107,7 @@ class NZBsRUSCache(tvcache.TVCache): url += urllib.urlencode(urlArgs) logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) + return self.getRSSFeed(url).entries def _checkAuth(self, data): return data != 'Invalid Link' diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index dc9aa050..946a3f41 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -154,7 +154,7 @@ class OmgwtfnzbsCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): params = {'user': provider.username, 'api': provider.api_key, 'eng': 1, @@ -164,7 +164,7 @@ class OmgwtfnzbsCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index fcb44a16..ff719c01 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -245,47 +245,9 @@ class PublicHDCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = PublicHDProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index f46c7b30..2f21058e 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -152,21 +152,11 @@ class TorrentRssCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) request_headers = None if self.provider.cookies: request_headers = { 'Cookie': self.provider.cookies } - return self.getRSSFeed(self.provider.url, request_headers=request_headers) - - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - if not title or not url: - logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR) - return None - - logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self.getRSSFeed(self.provider.url, request_headers=request_headers).entries diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 5eb6b25c..2dc0c31f 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -312,47 +312,10 @@ class SCCCache(tvcache.TVCache): # only poll SCC every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = SCCProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 49e3a68f..a8ad5dd5 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -252,47 +252,10 @@ class SpeedCDCache(tvcache.TVCache): # only poll Speedcd every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = SpeedCDProvider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 40ac3a9a..5c69e5cb 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -338,47 +338,9 @@ class ThePirateBayCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) class ThePirateBayWebproxy: diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index e5f12a98..e0013808 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -274,45 +274,9 @@ class TorrentBytesCache(tvcache.TVCache): # only poll TorrentBytes every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = TorrentBytesProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 9d213d14..e1d78d8d 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -277,47 +277,10 @@ class TorrentDayCache(tvcache.TVCache): # Only poll IPTorrents every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index b56cf3f0..22009b2d 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -271,47 +271,9 @@ class TorrentLeechCache(tvcache.TVCache): # only poll TorrentLeech every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = TorrentLeechProvider() diff --git a/sickbeard/providers/tvtorrents.py b/sickbeard/providers/tvtorrents.py index 3c4da633..427ca5b8 100644 --- a/sickbeard/providers/tvtorrents.py +++ b/sickbeard/providers/tvtorrents.py @@ -86,7 +86,7 @@ class TvTorrentsCache(tvcache.TVCache): # only poll TvTorrents every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): # These will be ignored on the serverside. ignore_regex = "all.month|month.of|season[\s\d]*complete" diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 49d61c0d..97134ea7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -102,6 +102,9 @@ class TVCache(): return data + def _getDailyData(self): + return None + def _checkAuth(self, data): return True @@ -112,7 +115,7 @@ class TVCache(): if self.shouldUpdate() and self._checkAuth(None): # as long as the http request worked we count this as an update - data = self._getRSSData() + data = self._getDailyData() if not data: return [] @@ -125,8 +128,9 @@ class TVCache(): # parse data if self._checkAuth(data): cl = [] - for item in data.entries: - ci = self._parseItem(item) + for item in data: + title, url = self.provider._get_title_and_url(item) + ci = self._parseItem(title, url) if ci is not None: cl.append(ci) @@ -148,9 +152,7 @@ class TVCache(): def _translateLinkURL(self, url): return url.replace('&', '&') - def _parseItem(self, item): - title = item.title - url = item.link + def _parseItem(self, title, url): self._checkItemAuth(title, url) @@ -158,7 +160,7 @@ class TVCache(): title = self._translateTitle(title) url = self._translateLinkURL(url) - logger.log(u"Checking if item from RSS feed is in the cache: " + title, logger.DEBUG) + logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: