diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl index e161ae36..53099a08 100644 --- a/gui/slick/interfaces/default/displayShow.tmpl +++ b/gui/slick/interfaces/default/displayShow.tmpl @@ -101,7 +101,7 @@ #end if $sickbeard.indexerApi($show.indexer).name #if $xem_numbering or $xem_absolute_numbering: - [xem] + [xem] #end if diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 378b000d..5305f6f5 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -136,6 +136,15 @@ def replaceExtension(filename, newExt): return sepFile[0] + "." + newExt +def isBtsyncFile(filename): + sepFile = filename.rpartition(".") + + if sepFile[2].lower() == '!sync': + return True + else: + return False + + def isMediaFile(filename): # ignore samples if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I): diff --git a/sickbeard/name_parser/regexes.py b/sickbeard/name_parser/regexes.py index fdfae0a3..0efcc562 100644 --- a/sickbeard/name_parser/regexes.py +++ b/sickbeard/name_parser/regexes.py @@ -166,12 +166,13 @@ normal_regexes = [ # 01 - Ep Name # 01 - Ep Name ''' - ^((?P.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator - (?P\d{1,2}) # 02 - (?:-(?P\d{1,2}))* # 02 - [. _-]+((?P.+?) # Source_Quality_Etc- - ((?[^- ]+([. _-]\[.*\])?))?)?$ # Group + ^((?P.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator + (?P\d{1,3}) # 02 + (?:-(?P\d{1,3}))* # -03-04-05 etc + \s?of?\s?\d{1,3}? # of joiner (with or without spaces) and series total ep + [. _-]+((?P.+?) # Source_Quality_Etc- + ((?[^- ]+([. _-]\[.*\])?))?)?$ # Group ''' ), @@ -390,4 +391,4 @@ anime_regexes = [ (v(?P[0-9]))? # v2 .*? # Separator and EOL ''') -] \ No newline at end of file +] diff --git a/sickbeard/notifiers/xbmc.py b/sickbeard/notifiers/xbmc.py index 7ac364db..f1873f29 100644 --- a/sickbeard/notifiers/xbmc.py +++ b/sickbeard/notifiers/xbmc.py @@ -143,7 +143,7 @@ class XBMCNotifier: command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % ( title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url) notifyResult = self._send_to_xbmc_json(command, curHost, username, password) - if getattr(notifyResult, 'result', None): + if notifyResult.get('result'): result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING) else: if sickbeard.XBMC_ALWAYS_ON or force: diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py index 4beefa71..a02c09f2 100644 --- a/sickbeard/processTV.py +++ b/sickbeard/processTV.py @@ -138,6 +138,13 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior path, dirs, files = get_path_dir_files(dirName, nzbName, type) + btsyncFiles = filter(helpers.isBtsyncFile, files) + + # Don't post process if files are still being synced from btsync + if btsyncFiles: + returnStr += logHelper(u"Found .!sync files, skipping post processing", logger.ERROR) + return returnStr + returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG) returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG) @@ -179,6 +186,13 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False): + btsyncFiles = filter(helpers.isBtsyncFile, fileList) + + # Don't post process if files are still being synced from btsync + if btsyncFiles: + returnStr += logHelper(u"Found .!sync files, skipping post processing", logger.ERROR) + return returnStr + rarFiles = filter(helpers.isRarFile, fileList) rarContent = unRAR(processPath, rarFiles, force) fileList = set(fileList + rarContent) diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index ec071b1a..ce33da66 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -136,13 +136,10 @@ class ProperFinder(): curProper.indexer = parse_result.show.indexer # populate our Proper instance - if parse_result.is_anime: - logger.log(u"I am sorry '" + curProper.name + "' seams to be an anime proper seach is not yet suported", - logger.DEBUG) - continue - else: - curProper.season = parse_result.season_number if parse_result.season_number != None else 1 - curProper.episode = parse_result.episode_numbers[0] + curProper.season = parse_result.season_number if parse_result.season_number != None else 1 + curProper.episode = parse_result.episode_numbers[0] + + # only get anime proper if it has release group and version if parse_result.is_anime: if parse_result.release_group and parse_result.version: curProper.release_group = parse_result.release_group @@ -153,7 +150,7 @@ class ProperFinder(): curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) if not show_name_helpers.filterBadReleases(curProper.name): - logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", + logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it", logger.DEBUG) continue diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py index 077e85ec..5ad05b7c 100644 --- a/sickbeard/providers/animezb.py +++ b/sickbeard/providers/animezb.py @@ -29,6 +29,7 @@ from sickbeard.common import * from sickbeard import tvcache from lib.dateutil.parser import parse as parseDate + class Animezb(generic.NZBProvider): def __init__(self): @@ -60,7 +61,8 @@ class Animezb(generic.NZBProvider): def _get_episode_search_strings(self, ep_obj, add_string=''): search_string = [] for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - ep_string = '+'.join([helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)]) + ep_string = '+'.join( + [helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)]) search_string.append(ep_string) return search_string @@ -106,25 +108,25 @@ class Animezb(generic.NZBProvider): results = [] - for i in [2, 3, 4]: # we will look for a version 2, 3 and 4 - for item in self._doSearch("v" + str(i)): + for item in self._doSearch("v2 OR v3 OR v4 OR v5"): - (title, url) = self._get_title_and_url(item) + (title, url) = self._get_title_and_url(item) - if item.has_key('published_parsed') and item['published_parsed']: - result_date = item.published_parsed - if result_date: - result_date = datetime.datetime(*result_date[0:6]) - else: - logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") - continue + if item.has_key('published_parsed') and item['published_parsed']: + result_date = item.published_parsed + if result_date: + result_date = datetime.datetime(*result_date[0:6]) + else: + logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") + continue - if not date or result_date > date: - search_result = classes.Proper(title, url, result_date, self.show) - results.append(search_result) + if not date or result_date > date: + search_result = classes.Proper(title, url, result_date, self.show) + results.append(search_result) return results + class AnimezbCache(tvcache.TVCache): def __init__(self, provider): @@ -132,22 +134,20 @@ class AnimezbCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) # only poll Animezb every 20 minutes max - # we get 100 post each call ! self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): - params = {"cat": "anime".encode('utf-8'), - "max": "100".encode('utf-8') + params = { + "cat": "anime".encode('utf-8'), + "max": "100".encode('utf-8') } rss_url = self.provider.url + 'rss?' + urllib.urlencode(params) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries - def _checkItemAuth(self, title, url): - return True provider = Animezb() diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index 240f1a09..6b3db525 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -268,46 +268,9 @@ class BitSoupCache(tvcache.TVCache): # only poll TorrentBytes every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - logger.log(u"Clearing " + self.provider.name + " cache") - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = BitSoupProvider() diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index 405f0811..eaa03ee6 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -297,44 +297,7 @@ class BTNCache(tvcache.TVCache): # At least 15 minutes between queries self.minTime = 15 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - - if self._checkAuth(None): - - data = self._getRSSData() - - # As long as we got something from the provider we count it as an update - if data: - self.setLastUpdate() - else: - return [] - - if self._checkAuth(data): - # By now we know we've got data and no auth errors, all we need to do is put it in the database - cl = [] - for item in data: - - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - else: - raise AuthException( - "Your authentication info for " + self.provider.name + " is incorrect, check your config") - else: - return [] - - def _getRSSData(self): + def _getDailyData(self): # Get the torrents uploaded since last check. seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) @@ -352,17 +315,6 @@ class BTNCache(tvcache.TVCache): return self.provider._doSearch(search_params=None, age=seconds_since_last_update) - def _parseItem(self, item): - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"Adding item to results: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) - else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", - logger.ERROR) - return None - def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py index 5224f78a..bfe881e7 100644 --- a/sickbeard/providers/dtt.py +++ b/sickbeard/providers/dtt.py @@ -123,7 +123,7 @@ class DTTCache(tvcache.TVCache): # only poll DTT every 30 minutes max self.minTime = 30 - def _getRSSData(self): + def _getDailyData(self): params = {"items": "all"} @@ -135,12 +135,7 @@ class DTTCache(tvcache.TVCache): url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) logger.log(u"DTT cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) - - def _parseItem(self, item): - title, url = self.provider._get_title_and_url(item) - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self.getRSSFeed(url).entries provider = DTTProvider() \ No newline at end of file diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 48ec3a4c..233a53e6 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -174,27 +174,12 @@ class EZRSSCache(tvcache.TVCache): # only poll EZRSS every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): rss_url = self.provider.url + 'feed/' logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) - - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - url = self._translateLinkURL(url) - return self._addCacheEntry(title, url) - - else: - logger.log( - u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", - logger.ERROR) - return None + return self.getRSSFeed(rss_url).entries provider = EZRSSProvider() diff --git a/sickbeard/providers/fanzub.py b/sickbeard/providers/fanzub.py index f92ec7b9..ebcd2431 100644 --- a/sickbeard/providers/fanzub.py +++ b/sickbeard/providers/fanzub.py @@ -29,6 +29,7 @@ from sickbeard.common import * from sickbeard import tvcache from lib.dateutil.parser import parse as parseDate + class Fanzub(generic.NZBProvider): def __init__(self): @@ -51,9 +52,6 @@ class Fanzub(generic.NZBProvider): def imageName(self): return 'fanzub.gif' - def _checkAuth(self): - return True - def _get_season_search_strings(self, ep_obj): return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] @@ -102,25 +100,25 @@ class Fanzub(generic.NZBProvider): results = [] - for i in [2, 3, 4]: # we will look for a version 2, 3 and 4 - for item in self._doSearch("v" + str(i)): + for item in self._doSearch("v2|v3|v4|v5"): - (title, url) = self._get_title_and_url(item) + (title, url) = self._get_title_and_url(item) - if item.has_key('published_parsed') and item['published_parsed']: - result_date = item.published_parsed - if result_date: - result_date = datetime.datetime(*result_date[0:6]) - else: - logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") - continue + if item.has_key('published_parsed') and item['published_parsed']: + result_date = item.published_parsed + if result_date: + result_date = datetime.datetime(*result_date[0:6]) + else: + logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") + continue - if not date or result_date > date: - search_result = classes.Proper(title, url, result_date, self.show) - results.append(search_result) + if not date or result_date > date: + search_result = classes.Proper(title, url, result_date, self.show) + results.append(search_result) return results + class FanzubCache(tvcache.TVCache): def __init__(self, provider): @@ -128,22 +126,20 @@ class FanzubCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) # only poll Fanzub every 20 minutes max - # we get 100 post each call ! self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): - params = {"cat": "anime".encode('utf-8'), - "max": "100".encode('utf-8') + params = { + "cat": "anime".encode('utf-8'), + "max": "100".encode('utf-8') } rss_url = self.provider.url + 'rss?' + urllib.urlencode(params) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries - def _checkItemAuth(self, title, url): - return True provider = Fanzub() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 45581e4c..5e7f0871 100755 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -299,46 +299,8 @@ class FreshOnTVCache(tvcache.TVCache): # poll delay in minutes self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - logger.log(u"Clearing " + self.provider.name + " cache") - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) - + return self.provider._doSearch(search_params).entries provider = FreshOnTVProvider() \ No newline at end of file diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index ce86e810..38a20ec2 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -177,17 +177,17 @@ class HDBitsProvider(generic.TorrentProvider): if show.air_by_date or show.sports: post_data['tvdb'] = { 'id': show.indexerid, - 'season': str(episode.airdate)[:7], + 'season': str(season.airdate)[:7], } elif show.anime: post_data['tvdb'] = { 'id': show.indexerid, - 'season': "%d" % episode.scene_absolute_number, + 'season': "%d" % season.scene_absolute_number, } else: post_data['tvdb'] = { 'id': show.indexerid, - 'season': episode.scene_season, + 'season': season.scene_season, } if search_term: @@ -251,18 +251,6 @@ class HDBitsCache(tvcache.TVCache): def _getRSSData(self): return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - - if title and url: - logger.log(u"Adding item to results: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) - else: - logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable", - logger.ERROR) - return None - def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 445cd714..8b02af0b 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -329,47 +329,9 @@ class HDTorrentsCache(tvcache.TVCache): # only poll HDTorrents every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': []} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = HDTorrentsProvider() diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index c3de8a9d..19ae37fd 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -269,47 +269,9 @@ class IPTorrentsCache(tvcache.TVCache): # Only poll IPTorrents every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = IPTorrentsProvider() diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index e2e88924..1f3273cc 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -355,43 +355,9 @@ class KATCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = KATProvider() diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index 6da3747e..b0211355 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -314,9 +314,9 @@ class NewzbinCache(tvcache.TVCache): # only poll Newzbin every 10 mins max self.minTime = 1 - def _getRSSData(self): + def _getDailyData(self): - return self.provider._getRSSData() + return self.provider._getRSSData().entries def _parseItem(self, item): diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 0a0494c1..d2dcccbe 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -74,8 +74,7 @@ class NewznabProvider(generic.NZBProvider): int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) def imageName(self): - if ek.ek(os.path.isfile, - ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', 'slick', 'images', 'providers', self.getID() + '.png')): + if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')): return self.getID() + '.png' return 'newznab.png' diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py index f0e0fb4d..09cca9b9 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/nextgen.py @@ -318,47 +318,9 @@ class NextGenCache(tvcache.TVCache): # Only poll NextGen every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = NextGenProvider() diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index d9612e45..f2aed6c3 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -11,7 +11,7 @@ # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. +# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . @@ -28,7 +28,6 @@ from sickbeard.common import Quality from sickbeard import tvcache from sickbeard import show_name_helpers -REMOTE_DBG = False class NyaaProvider(generic.TorrentProvider): def __init__(self): @@ -70,9 +69,10 @@ class NyaaProvider(generic.TorrentProvider): logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name)) return [] - params = {"term": search_string.encode('utf-8'), - "cats": '1_37', #Limit to English-translated Anime (for now) - "sort": '2', #Sort Descending By Seeders + params = { + "term": search_string.encode('utf-8'), + "cats": '1_37', # Limit to English-translated Anime (for now) + "sort": '2', # Sort Descending By Seeders } searchURL = self.url + '?page=rss&' + urllib.urlencode(params) @@ -118,6 +118,7 @@ class NyaaProvider(generic.TorrentProvider): def seedRatio(self): return self.ratio + class NyaaCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) @@ -125,29 +126,18 @@ class NyaaCache(tvcache.TVCache): # only poll NyaaTorrents every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): params = { - "page": 'rss', # Use RSS page - "order": '1' #Sort Descending By Date + "page": 'rss', # Use RSS page + "order": '1', # Sort Descending By Date + "cats": '1_37', # Limit to English-translated Anime (for now) } url = self.provider.url + '?' + urllib.urlencode(params) logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) - - def _parseItem(self, item): - (title, url) = self.provider._get_title_and_url(item) - - if not title or not url: - logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable", - logger.ERROR) - return None - - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.getRSSFeed(url).entries provider = NyaaProvider() diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py index bcfe428a..570b24ed 100644 --- a/sickbeard/providers/nzbsrus.py +++ b/sickbeard/providers/nzbsrus.py @@ -98,7 +98,7 @@ class NZBsRUSCache(tvcache.TVCache): # only poll NZBs'R'US every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): url = self.provider.url + 'rssfeed.php?' urlArgs = {'cat': '91,75,104', # HD,XviD,SD-x264 'i': sickbeard.NZBSRUS_UID, @@ -107,7 +107,7 @@ class NZBsRUSCache(tvcache.TVCache): url += urllib.urlencode(urlArgs) logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url) + return self.getRSSFeed(url).entries def _checkAuth(self, data): return data != 'Invalid Link' diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index dc9aa050..946a3f41 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -154,7 +154,7 @@ class OmgwtfnzbsCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) self.minTime = 20 - def _getRSSData(self): + def _getDailyData(self): params = {'user': provider.username, 'api': provider.api_key, 'eng': 1, @@ -164,7 +164,7 @@ class OmgwtfnzbsCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url) + return self.getRSSFeed(rss_url).entries def _checkAuth(self, data): return self.provider._checkAuthFromData(data) diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index fcb44a16..ff719c01 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -245,47 +245,9 @@ class PublicHDCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = PublicHDProvider() diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index f46c7b30..fc7f118b 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -53,7 +53,7 @@ class TorrentRssProvider(generic.TorrentProvider): return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only)) def imageName(self): - if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): + if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')): return self.getID() + '.png' return 'torrentrss.png' @@ -152,21 +152,11 @@ class TorrentRssCache(tvcache.TVCache): tvcache.TVCache.__init__(self, provider) self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) request_headers = None if self.provider.cookies: request_headers = { 'Cookie': self.provider.cookies } - return self.getRSSFeed(self.provider.url, request_headers=request_headers) - - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - if not title or not url: - logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR) - return None - - logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) - return self._addCacheEntry(title, url) + return self.getRSSFeed(self.provider.url, request_headers=request_headers).entries diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index 5eb6b25c..2dc0c31f 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -312,47 +312,10 @@ class SCCCache(tvcache.TVCache): # only poll SCC every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = SCCProvider() diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 49e3a68f..a8ad5dd5 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -252,47 +252,10 @@ class SpeedCDCache(tvcache.TVCache): # only poll Speedcd every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = SpeedCDProvider() diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 40ac3a9a..5c69e5cb 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -338,47 +338,9 @@ class ThePirateBayCache(tvcache.TVCache): # only poll ThePirateBay every 10 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['rss']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) class ThePirateBayWebproxy: diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py index e5f12a98..e0013808 100644 --- a/sickbeard/providers/torrentbytes.py +++ b/sickbeard/providers/torrentbytes.py @@ -274,45 +274,9 @@ class TorrentBytesCache(tvcache.TVCache): # only poll TorrentBytes every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = TorrentBytesProvider() diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 9d213d14..e1d78d8d 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -277,47 +277,10 @@ class TorrentDayCache(tvcache.TVCache): # Only poll IPTorrents every 10 minutes max self.minTime = 10 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) + return self.provider._doSearch(search_params) - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) provider = TorrentDayProvider() diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index b56cf3f0..22009b2d 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -271,47 +271,9 @@ class TorrentLeechCache(tvcache.TVCache): # only poll TorrentLeech every 20 minutes max self.minTime = 20 - def updateCache(self): - - # delete anything older then 7 days - self._clearCache() - - if not self.shouldUpdate(): - return - + def _getDailyData(self): search_params = {'RSS': ['']} - rss_results = self.provider._doSearch(search_params) - - if rss_results: - self.setLastUpdate() - else: - return [] - - cl = [] - for result in rss_results: - - item = (result[0], result[1]) - ci = self._parseItem(item) - if ci is not None: - cl.append(ci) - - - - if len(cl) > 0: - myDB = self._getDB() - myDB.mass_action(cl) - - - def _parseItem(self, item): - - (title, url) = item - - if not title or not url: - return None - - logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) - - return self._addCacheEntry(title, url) + return self.provider._doSearch(search_params) provider = TorrentLeechProvider() diff --git a/sickbeard/providers/tvtorrents.py b/sickbeard/providers/tvtorrents.py index 3c4da633..427ca5b8 100644 --- a/sickbeard/providers/tvtorrents.py +++ b/sickbeard/providers/tvtorrents.py @@ -86,7 +86,7 @@ class TvTorrentsCache(tvcache.TVCache): # only poll TvTorrents every 15 minutes max self.minTime = 15 - def _getRSSData(self): + def _getDailyData(self): # These will be ignored on the serverside. ignore_regex = "all.month|month.of|season[\s\d]*complete" diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py index c9a138d3..0e1fd0b6 100644 --- a/sickbeard/traktChecker.py +++ b/sickbeard/traktChecker.py @@ -148,13 +148,16 @@ class TraktChecker(): self.addDefaultShow(indexer, indexer_id, show["title"], SKIPPED) newShow = helpers.findCertainShow(sickbeard.showList, indexer_id) - if newShow and int(newShow['indexer']) == indexer: - for episode in show["episodes"]: - if newShow is not None: - self.setEpisodeToWanted(newShow, episode["season"], episode["number"]) - else: - self.todoWanted.append((indexer_id, episode["season"], episode["number"])) - self.startBacklog(newShow) + try: + if newShow and int(newShow['indexer']) == indexer: + for episode in show["episodes"]: + if newShow is not None: + self.setEpisodeToWanted(newShow, episode["season"], episode["number"]) + else: + self.todoWanted.append((indexer_id, episode["season"], episode["number"])) + self.startBacklog(newShow) + except TypeError: + logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG) def addDefaultShow(self, indexer, indexer_id, name, status): """ @@ -233,4 +236,4 @@ class TraktChecker(): for season in segment[1]: logger.log(u"Starting backlog for " + show.name + " season " + str( season) + " because some eps were set to wanted") - self.todoBacklog.remove(segment) \ No newline at end of file + self.todoBacklog.remove(segment) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 49d61c0d..97134ea7 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -102,6 +102,9 @@ class TVCache(): return data + def _getDailyData(self): + return None + def _checkAuth(self, data): return True @@ -112,7 +115,7 @@ class TVCache(): if self.shouldUpdate() and self._checkAuth(None): # as long as the http request worked we count this as an update - data = self._getRSSData() + data = self._getDailyData() if not data: return [] @@ -125,8 +128,9 @@ class TVCache(): # parse data if self._checkAuth(data): cl = [] - for item in data.entries: - ci = self._parseItem(item) + for item in data: + title, url = self.provider._get_title_and_url(item) + ci = self._parseItem(title, url) if ci is not None: cl.append(ci) @@ -148,9 +152,7 @@ class TVCache(): def _translateLinkURL(self, url): return url.replace('&', '&') - def _parseItem(self, item): - title = item.title - url = item.link + def _parseItem(self, title, url): self._checkItemAuth(title, url) @@ -158,7 +160,7 @@ class TVCache(): title = self._translateTitle(title) url = self._translateLinkURL(url) - logger.log(u"Checking if item from RSS feed is in the cache: " + title, logger.DEBUG) + logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG) return self._addCacheEntry(title, url) else: