From c350c0fdf94bde366484aa7d7c0b14ecabb4017c Mon Sep 17 00:00:00 2001 From: echel0n Date: Sun, 18 May 2014 08:33:31 -0700 Subject: [PATCH] Fixed dupe issues with backlog searches. Set cache to store results for 1 week and remove anything older then 1 week only if daily search has searched the results before hand. --- sickbeard/dailysearcher.py | 13 ++++++++++ sickbeard/providers/btn.py | 7 ++--- sickbeard/providers/generic.py | 12 ++++----- sickbeard/providers/hdbits.py | 7 ++--- sickbeard/providers/hdtorrents.py | 7 ++--- sickbeard/providers/iptorrents.py | 7 ++--- sickbeard/providers/kat.py | 7 ++--- sickbeard/providers/newznab.py | 9 ++++--- sickbeard/providers/nextgen.py | 7 ++--- sickbeard/providers/publichd.py | 7 ++--- sickbeard/providers/scc.py | 7 ++--- sickbeard/providers/speedcd.py | 7 ++--- sickbeard/providers/thepiratebay.py | 7 ++--- sickbeard/providers/torrentday.py | 7 ++--- sickbeard/providers/torrentleech.py | 7 ++--- sickbeard/search.py | 39 ++++++++++++++-------------- sickbeard/search_queue.py | 40 ++++++++++++++++++----------- sickbeard/tvcache.py | 12 ++++----- 18 files changed, 123 insertions(+), 86 deletions(-) diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py index dadf036b..e750a374 100644 --- a/sickbeard/dailysearcher.py +++ b/sickbeard/dailysearcher.py @@ -39,9 +39,19 @@ class DailySearcher(): self.amActive = False def run(self): + # remove names from cache that link back to active shows that we watch sickbeard.name_cache.syncNameCache() + logger.log(u"Updating RSS cache ...") + + origThreadName = threading.currentThread().name + providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] + for curProviderCount, curProvider in enumerate(providers): + threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" + + curProvider.cache.updateCache() + logger.log(u"Checking to see if any shows have wanted episodes available for the last week ...") curDate = datetime.date.today() - datetime.timedelta(weeks=1) @@ -80,6 +90,9 @@ class DailySearcher(): else: todaysEps[show].append(ep) + # reset thread name back to original + threading.currentThread().name = origThreadName + if len(todaysEps): for show in todaysEps: segment = todaysEps[show] diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index fe04693b..5f62b9eb 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -312,6 +312,10 @@ class BTNCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -325,9 +329,6 @@ class BTNCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - if self._checkAuth(data): # By now we know we've got data and no auth errors, all we need to do is put it in the database cl = [] diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index a6dfd5cc..a52534f6 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -252,10 +252,10 @@ class GenericProvider: u"Incomplete Indexer <-> Scene mapping detected for " + epObj.prettyName() + ", skipping search!") continue - cacheResult = self.cache.searchCache([epObj], manualSearch) - if len(cacheResult): - results.update({epObj.episode:cacheResult[epObj]}) - continue + #cacheResult = self.cache.searchCache([epObj], manualSearch) + #if len(cacheResult): + # results.update({epObj.episode:cacheResult[epObj]}) + # continue if search_mode == 'sponly': for curString in self._get_season_search_strings(epObj): @@ -273,8 +273,8 @@ class GenericProvider: searchItems[epObj] = itemList # if we have cached results return them. - if len(results): - return results + #if len(results): + # return results for ep_obj in searchItems: for item in searchItems[ep_obj]: diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 501c622a..536436f9 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -184,6 +184,10 @@ class HDBitsCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -197,9 +201,6 @@ class HDBitsCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - parsedJSON = helpers.parse_json(data) if parsedJSON is None: diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 0efdf15c..b9d55ce1 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -341,6 +341,10 @@ class HDTorrentsCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -352,9 +356,6 @@ class HDTorrentsCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 78ea53ed..30079bb5 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -284,6 +284,10 @@ class IPTorrentsCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -295,9 +299,6 @@ class IPTorrentsCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py index df751db5..25342f94 100644 --- a/sickbeard/providers/kat.py +++ b/sickbeard/providers/kat.py @@ -415,6 +415,10 @@ class KATCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -426,9 +430,6 @@ class KATCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: item = (result[0], result[1]) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index c68199aa..65dc129a 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -273,6 +273,11 @@ class NewznabCache(tvcache.TVCache): return self.provider._checkAuthFromData(data) def updateCache(self): + + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -285,10 +290,6 @@ class NewznabCache(tvcache.TVCache): else: return [] - # now that we've loaded the current RSS feed lets delete the old cache - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - if self._checkAuth(data): items = data.entries ql = [] diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py index 80abe64b..83e561fa 100644 --- a/sickbeard/providers/nextgen.py +++ b/sickbeard/providers/nextgen.py @@ -333,6 +333,10 @@ class NextGenCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -344,9 +348,6 @@ class NextGenCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py index 32f5fc57..9d6aba21 100644 --- a/sickbeard/providers/publichd.py +++ b/sickbeard/providers/publichd.py @@ -306,6 +306,10 @@ class PublicHDCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -317,9 +321,6 @@ class PublicHDCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - ql = [] for result in rss_results: diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py index ac6379bb..34f14493 100644 --- a/sickbeard/providers/scc.py +++ b/sickbeard/providers/scc.py @@ -326,6 +326,10 @@ class SCCCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -337,9 +341,6 @@ class SCCCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py index 583ae36e..f609c67a 100644 --- a/sickbeard/providers/speedcd.py +++ b/sickbeard/providers/speedcd.py @@ -262,6 +262,10 @@ class SpeedCDCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -273,9 +277,6 @@ class SpeedCDCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - ql = [] for result in rss_results: diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py index 045f8908..70c5e2c8 100644 --- a/sickbeard/providers/thepiratebay.py +++ b/sickbeard/providers/thepiratebay.py @@ -395,6 +395,10 @@ class ThePirateBayCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -406,9 +410,6 @@ class ThePirateBayCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py index 3ebd8d56..8ac801d5 100644 --- a/sickbeard/providers/torrentday.py +++ b/sickbeard/providers/torrentday.py @@ -289,6 +289,10 @@ class TorrentDayCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -300,9 +304,6 @@ class TorrentDayCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py index 7e3c1856..f807e6b3 100644 --- a/sickbeard/providers/torrentleech.py +++ b/sickbeard/providers/torrentleech.py @@ -285,6 +285,10 @@ class TorrentLeechCache(tvcache.TVCache): def updateCache(self): + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -296,9 +300,6 @@ class TorrentLeechCache(tvcache.TVCache): else: return [] - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - cl = [] for result in rss_results: diff --git a/sickbeard/search.py b/sickbeard/search.py index a969d9ad..dd90f412 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -316,22 +316,20 @@ def filterSearchResults(show, results): return foundResults -def searchForNeededEpisodes(queueItem): +def searchForNeededEpisodes(episodes): foundResults = {} didSearch = False # ask all providers for any episodes it finds + origThreadName = threading.currentThread().name providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] for curProviderCount, curProvider in enumerate(providers): - threading.currentThread().name = queueItem.thread_name + "[" + curProvider.name + "]" + threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" try: - logger.log(u"Updating RSS cache ...") - curProvider.cache.updateCache() - logger.log(u"Searching RSS cache ...") - curFoundResults = curProvider.searchRSS(queueItem.segment) + curFoundResults = curProvider.searchRSS(episodes) except exceptions.AuthException, e: logger.log(u"Authentication error: " + ex(e), logger.ERROR) if curProviderCount != len(providers): @@ -382,7 +380,10 @@ def searchForNeededEpisodes(queueItem): return foundResults.values() if len(foundResults) else {} -def searchProviders(queueItem, show, season, episodes, manualSearch=False): +def searchProviders(show, season, episodes, manualSearch=False): + foundResults = {} + finalResults = [] + # check if we want to search for season packs instead of just season/episode seasonSearch = False seasonEps = show.getAllEpisodes(season) @@ -394,17 +395,17 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False): if not len(providers): logger.log(u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.", logger.ERROR) - return queueItem + return - foundResults = {} + origThreadName = threading.currentThread().name for providerNum, provider in enumerate(providers): - threading.currentThread().name = queueItem.thread_name + ":[" + provider.name + "]" + threading.currentThread().name = origThreadName + " :: [" + provider.name + "]" foundResults.setdefault(provider.name, {}) searchCount = 0 search_mode = 'eponly' if seasonSearch and provider.search_mode == 'sponly': - search_mode = provider.search_mode + search_mode = provider.search_mode while(True): searchCount += 1 @@ -492,8 +493,8 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False): for curEpNum in allEps: epObjs.append(show.getEpisode(season, curEpNum)) bestSeasonNZB.episodes = epObjs - queueItem.results = [bestSeasonNZB] - return queueItem + + return [bestSeasonNZB] elif not anyWanted: logger.log( @@ -612,7 +613,7 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False): del foundResults[provider.name][epNum] # of all the single ep results narrow it down to the best one for each episode - queueItem.results += set(multiResults.values()) + finalResults += set(multiResults.values()) for curEp in foundResults[provider.name]: if curEp in (MULTI_EP_RESULT, SEASON_RESULT): continue @@ -628,20 +629,20 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False): # add result if its not a duplicate and found = False - for i, result in enumerate(queueItem.results): + for i, result in enumerate(finalResults): for bestResultEp in bestResult.episodes: if bestResultEp in result.episodes: if result.quality < bestResult.quality: - queueItem.results.pop(i) + finalResults.pop(i) else: found = True if not found: - queueItem.results += [bestResult] + finalResults += [bestResult] # check that we got all the episodes we wanted first before doing a match and snatch wantedEpCount = 0 for wantedEp in episodes: - for result in queueItem.results: + for result in finalResults: if wantedEp in result.episodes and isFinalResult(result): wantedEpCount += 1 @@ -649,4 +650,4 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False): if providerNum == len(providers) or wantedEpCount == len(episodes): break - return queueItem \ No newline at end of file + return finalResults \ No newline at end of file diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 13bb330b..3d8ffafd 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -81,29 +81,33 @@ class SearchQueue(generic_queue.GenericQueue): logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) def snatch_item(self, item): - for result in item.results: + for result in item: # just use the first result for now logger.log(u"Downloading " + result.name + " from " + result.provider.name) item.success = search.snatchEpisode(result) + # give the CPU a break time.sleep(2) + # return results of snatch return item class DailySearchQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH - self.thread_name = 'DAILYSEARCH-' + str(show.indexerid) + '-' + self.thread_name = 'DAILYSEARCH-' + str(show.indexerid) self.show = show self.segment = segment - self.results = [] def execute(self): generic_queue.QueueItem.execute(self) logger.log("Beginning daily search for [" + self.show.name + "]") - foundResults = search.searchForNeededEpisodes(self) + foundResults = search.searchForNeededEpisodes(self.segment) + + # reset thread back to original name + threading.currentThread().name = self.thread_name if not len(foundResults): logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]") @@ -117,11 +121,10 @@ class ManualSearchQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH - self.thread_name = 'MANUAL-' + str(show.indexerid) + '-' + self.thread_name = 'MANUAL-' + str(show.indexerid) self.success = None self.show = show self.segment = segment - self.results = [] def execute(self): generic_queue.QueueItem.execute(self) @@ -130,10 +133,13 @@ class ManualSearchQueueItem(generic_queue.QueueItem): try: logger.log("Beginning manual search for [" + self.segment.prettyName() + "]") - searchResult = search.searchProviders(queueItem, self.show, self.segment.season, [self.segment], True) + searchResult = search.searchProviders(self.show, self.segment.season, [self.segment], True) + + # reset thread back to original name + threading.currentThread().name = self.thread_name if searchResult: - queueItem = SearchQueue().snatch_item(searchResult) + self.success = SearchQueue().snatch_item(searchResult) else: ui.notifications.message('No downloads were found', "Couldn't find a download for %s" % self.segment.prettyName()) @@ -143,17 +149,16 @@ class ManualSearchQueueItem(generic_queue.QueueItem): except Exception: logger.log(traceback.format_exc(), logger.DEBUG) - generic_queue.QueueItem.finish(queueItem) + generic_queue.QueueItem.finish(self) class BacklogQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH) self.priority = generic_queue.QueuePriorities.LOW - self.thread_name = 'BACKLOG-' + str(show.indexerid) + '-' + self.thread_name = 'BACKLOG-' + str(show.indexerid) self.success = None self.show = show self.segment = segment - self.results = [] def execute(self): generic_queue.QueueItem.execute(self) @@ -165,7 +170,10 @@ class BacklogQueueItem(generic_queue.QueueItem): try: logger.log("Beginning backlog search for [" + self.show.name + "]") - searchResult = search.searchProviders(self, self.show, season, wantedEps, False) + searchResult = search.searchProviders(self.show, season, wantedEps, False) + + # reset thread back to original name + threading.currentThread().name = self.thread_name if searchResult: SearchQueue().snatch_item(searchResult) @@ -181,11 +189,10 @@ class FailedQueueItem(generic_queue.QueueItem): def __init__(self, show, segment): generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH) self.priority = generic_queue.QueuePriorities.HIGH - self.thread_name = 'RETRY-' + str(show.indexerid) + '-' + self.thread_name = 'RETRY-' + str(show.indexerid) self.show = show self.segment = segment self.success = None - self.results = [] def execute(self): generic_queue.QueueItem.execute(self) @@ -208,7 +215,10 @@ class FailedQueueItem(generic_queue.QueueItem): if len(failed_episodes): try: - searchResult = search.searchProviders(self, self.show, failed_episodes[0].season, failed_episodes, True) + searchResult = search.searchProviders(self.show, failed_episodes[0].season, failed_episodes, True) + + # reset thread back to original name + threading.currentThread().name = self.thread_name if searchResult: SearchQueue().snatch_item(searchResult) diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 2447dba5..efc1e302 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -87,7 +87,7 @@ class TVCache(): curDate = datetime.date.today() - datetime.timedelta(weeks=1) - myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [curDate.toordinal()]) + myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))]) def _getRSSData(self): @@ -102,6 +102,11 @@ class TVCache(): return True def updateCache(self): + + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + if not self.shouldUpdate(): return @@ -114,10 +119,6 @@ class TVCache(): else: return [] - # now that we've loaded the current RSS feed lets delete the old cache - logger.log(u"Clearing " + self.provider.name + " cache and updating with new information") - self._clearCache() - if self._checkAuth(data): items = data.entries cl = [] @@ -239,7 +240,6 @@ class TVCache(): {'time': int(time.mktime(toDate.timetuple()))}, {'provider': self.providerID}) - lastUpdate = property(_getLastUpdate) lastSearch = property(_getLastSearch)