diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 44f2795d..6be32027 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -281,6 +281,7 @@ class GenericProvider: itemList += itemsUnknown if itemsUnknown else [] # filter results + cl = [] for item in itemList: (title, url) = self._get_title_and_url(item) @@ -299,53 +300,61 @@ class GenericProvider: quality = parse_result.quality release_group = parse_result.release_group - actual_season = None - actual_episodes = None - + addCacheEntry = False if not (showObj.air_by_date or showObj.sports): if search_mode == 'sponly' and len(parse_result.episode_numbers): logger.log( u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it", logger.DEBUG) - continue + addCacheEntry = True + else: + if not len(parse_result.episode_numbers) and ( + parse_result.season_number and parse_result.season_number != season) or ( + not parse_result.season_number and season != 1): + logger.log(u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring", + logger.DEBUG) + addCacheEntry = True + elif len(parse_result.episode_numbers) and ( + parse_result.season_number != season or not [ep for ep in episodes if + ep.scene_episode in parse_result.episode_numbers]): + logger.log(u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring", + logger.DEBUG) + addCacheEntry = True - if not len(parse_result.episode_numbers) and ( - parse_result.season_number and parse_result.season_number != season) or ( - not parse_result.season_number and season != 1): - logger.log(u"The result " + title + " doesn't seem to be a valid season that we want, ignoring", - logger.DEBUG) - continue - elif len(parse_result.episode_numbers) and ( - parse_result.season_number != season or not [ep for ep in episodes if - ep.scene_episode in parse_result.episode_numbers]): - logger.log(u"The result " + title + " doesn't seem to be a valid episode that we want, ignoring", - logger.DEBUG) - continue - - # we just use the existing info for normal searches - actual_season = season - actual_episodes = parse_result.episode_numbers + if not addCacheEntry: + # we just use the existing info for normal searches + actual_season = season + actual_episodes = parse_result.episode_numbers else: if not (parse_result.is_air_by_date or parse_result.is_sports): logger.log( u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it", logger.DEBUG) - continue + addCacheEntry = True + else: + airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal() + myDB = db.DBConnection() + sql_results = myDB.select( + "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", + [showObj.indexerid, airdate]) - airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal() - myDB = db.DBConnection() - sql_results = myDB.select( - "SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", - [showObj.indexerid, airdate]) + if len(sql_results) != 1: + logger.log( + u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", + logger.WARNING) + addCacheEntry = True - if len(sql_results) != 1: - logger.log( - u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it", - logger.WARNING) - continue + if not addCacheEntry: + actual_season = int(sql_results[0]["season"]) + actual_episodes = [int(sql_results[0]["episode"])] - actual_season = int(sql_results[0]["season"]) - actual_episodes = [int(sql_results[0]["episode"])] + # add parsed result to cache for usage later on + if addCacheEntry: + logger.log(u"Adding item from search to cache: " + title, logger.DEBUG) + ci = self.cache._addCacheEntry(title, url, parse_result=parse_result) + if ci is not None: + cl.append(ci) + continue # make sure we want the episode wantEp = True @@ -396,6 +405,11 @@ class GenericProvider: else: results[epNum].append(result) + # check if we have items to add to cache + if len(cl) > 0: + myDB = self.cache._getDB() + myDB.mass_action(cl) + return results def findPropers(self, search_date=None): diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index bdf4b31e..7b04b00f 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -22,7 +22,6 @@ import sys import time import urllib, urlparse -from xml.dom.minidom import parseString from datetime import datetime, timedelta import sickbeard @@ -352,13 +351,9 @@ class NewzbinCache(tvcache.TVCache): logger.ERROR) return - quality = self.provider.getQuality(item) - - logger.log("Found quality " + str(quality), logger.DEBUG) - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - self._addCacheEntry(title, url, quality=quality) + self._addCacheEntry(title, url) provider = NewzbinProvider() diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 59ee5679..e1a5ebae 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -140,11 +140,9 @@ class TVCache(): def _translateTitle(self, title): return u'' + title.replace(' ', '.') - def _translateLinkURL(self, url): return url.replace('&', '&') - def _parseItem(self, item): title = item.title url = item.link @@ -231,26 +229,30 @@ class TVCache(): return True - def _addCacheEntry(self, name, url, indexer_id=0, quality=None): + def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0): - # create showObj from indexer_id if available - showObj=None - if indexer_id: - showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) + # check if we passed in a parsed result or should we try and create one + if not parse_result: - try: - myParser = NameParser(showObj=showObj, convert=True) - parse_result = myParser.parse(name) - except InvalidNameException: - logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) - return None - except InvalidShowException: - logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) - return None + # create showObj from indexer_id if available + showObj=None + if indexer_id: + showObj = helpers.findCertainShow(sickbeard.showList, indexer_id) - if not parse_result or not parse_result.series_name: - return None + try: + myParser = NameParser(showObj=showObj, convert=True) + parse_result = myParser.parse(name) + except InvalidNameException: + logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) + return None + except InvalidShowException: + logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG) + return None + if not parse_result or not parse_result.series_name: + return None + + # if we made it this far then lets add the parsed result to cache for usager later on season = episodes = None if parse_result.is_air_by_date or parse_result.is_sports: airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal() @@ -263,7 +265,7 @@ class TVCache(): season = int(sql_results[0]["season"]) episodes = [int(sql_results[0]["episode"])] else: - season = parse_result.season_number if parse_result.season_number != None else 1 + season = parse_result.season_number if parse_result.season_number else 1 episodes = parse_result.episode_numbers if season and episodes: @@ -274,8 +276,7 @@ class TVCache(): curTimestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release - if quality is None: - quality = Quality.sceneQuality(name, parse_result.is_anime) + quality = parse_result.quality if not isinstance(name, unicode): name = unicode(name, 'utf-8', 'replace')