Fixed dupe issues with backlog searches.

Set cache to store results for 1 week and remove anything older then 1 week only if daily search has searched the results before hand.
This commit is contained in:
echel0n 2014-05-18 08:33:31 -07:00
parent f8a8f4c5b4
commit c350c0fdf9
18 changed files with 123 additions and 86 deletions

View File

@ -39,9 +39,19 @@ class DailySearcher():
self.amActive = False
def run(self):
# remove names from cache that link back to active shows that we watch
sickbeard.name_cache.syncNameCache()
logger.log(u"Updating RSS cache ...")
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
for curProviderCount, curProvider in enumerate(providers):
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curProvider.cache.updateCache()
logger.log(u"Checking to see if any shows have wanted episodes available for the last week ...")
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
@ -80,6 +90,9 @@ class DailySearcher():
else:
todaysEps[show].append(ep)
# reset thread name back to original
threading.currentThread().name = origThreadName
if len(todaysEps):
for show in todaysEps:
segment = todaysEps[show]

View File

@ -312,6 +312,10 @@ class BTNCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -325,9 +329,6 @@ class BTNCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
if self._checkAuth(data):
# By now we know we've got data and no auth errors, all we need to do is put it in the database
cl = []

View File

@ -252,10 +252,10 @@ class GenericProvider:
u"Incomplete Indexer <-> Scene mapping detected for " + epObj.prettyName() + ", skipping search!")
continue
cacheResult = self.cache.searchCache([epObj], manualSearch)
if len(cacheResult):
results.update({epObj.episode:cacheResult[epObj]})
continue
#cacheResult = self.cache.searchCache([epObj], manualSearch)
#if len(cacheResult):
# results.update({epObj.episode:cacheResult[epObj]})
# continue
if search_mode == 'sponly':
for curString in self._get_season_search_strings(epObj):
@ -273,8 +273,8 @@ class GenericProvider:
searchItems[epObj] = itemList
# if we have cached results return them.
if len(results):
return results
#if len(results):
# return results
for ep_obj in searchItems:
for item in searchItems[ep_obj]:

View File

@ -184,6 +184,10 @@ class HDBitsCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -197,9 +201,6 @@ class HDBitsCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
parsedJSON = helpers.parse_json(data)
if parsedJSON is None:

View File

@ -341,6 +341,10 @@ class HDTorrentsCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -352,9 +356,6 @@ class HDTorrentsCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -284,6 +284,10 @@ class IPTorrentsCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -295,9 +299,6 @@ class IPTorrentsCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -415,6 +415,10 @@ class KATCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -426,9 +430,6 @@ class KATCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:
item = (result[0], result[1])

View File

@ -273,6 +273,11 @@ class NewznabCache(tvcache.TVCache):
return self.provider._checkAuthFromData(data)
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -285,10 +290,6 @@ class NewznabCache(tvcache.TVCache):
else:
return []
# now that we've loaded the current RSS feed lets delete the old cache
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
if self._checkAuth(data):
items = data.entries
ql = []

View File

@ -333,6 +333,10 @@ class NextGenCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -344,9 +348,6 @@ class NextGenCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -306,6 +306,10 @@ class PublicHDCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -317,9 +321,6 @@ class PublicHDCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
ql = []
for result in rss_results:

View File

@ -326,6 +326,10 @@ class SCCCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -337,9 +341,6 @@ class SCCCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -262,6 +262,10 @@ class SpeedCDCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -273,9 +277,6 @@ class SpeedCDCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
ql = []
for result in rss_results:

View File

@ -395,6 +395,10 @@ class ThePirateBayCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -406,9 +410,6 @@ class ThePirateBayCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -289,6 +289,10 @@ class TorrentDayCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -300,9 +304,6 @@ class TorrentDayCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -285,6 +285,10 @@ class TorrentLeechCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -296,9 +300,6 @@ class TorrentLeechCache(tvcache.TVCache):
else:
return []
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
cl = []
for result in rss_results:

View File

@ -316,22 +316,20 @@ def filterSearchResults(show, results):
return foundResults
def searchForNeededEpisodes(queueItem):
def searchForNeededEpisodes(episodes):
foundResults = {}
didSearch = False
# ask all providers for any episodes it finds
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
for curProviderCount, curProvider in enumerate(providers):
threading.currentThread().name = queueItem.thread_name + "[" + curProvider.name + "]"
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try:
logger.log(u"Updating RSS cache ...")
curProvider.cache.updateCache()
logger.log(u"Searching RSS cache ...")
curFoundResults = curProvider.searchRSS(queueItem.segment)
curFoundResults = curProvider.searchRSS(episodes)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
if curProviderCount != len(providers):
@ -382,7 +380,10 @@ def searchForNeededEpisodes(queueItem):
return foundResults.values() if len(foundResults) else {}
def searchProviders(queueItem, show, season, episodes, manualSearch=False):
def searchProviders(show, season, episodes, manualSearch=False):
foundResults = {}
finalResults = []
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
seasonEps = show.getAllEpisodes(season)
@ -394,17 +395,17 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False):
if not len(providers):
logger.log(u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
logger.ERROR)
return queueItem
return
foundResults = {}
origThreadName = threading.currentThread().name
for providerNum, provider in enumerate(providers):
threading.currentThread().name = queueItem.thread_name + ":[" + provider.name + "]"
threading.currentThread().name = origThreadName + " :: [" + provider.name + "]"
foundResults.setdefault(provider.name, {})
searchCount = 0
search_mode = 'eponly'
if seasonSearch and provider.search_mode == 'sponly':
search_mode = provider.search_mode
search_mode = provider.search_mode
while(True):
searchCount += 1
@ -492,8 +493,8 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False):
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs
queueItem.results = [bestSeasonNZB]
return queueItem
return [bestSeasonNZB]
elif not anyWanted:
logger.log(
@ -612,7 +613,7 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False):
del foundResults[provider.name][epNum]
# of all the single ep results narrow it down to the best one for each episode
queueItem.results += set(multiResults.values())
finalResults += set(multiResults.values())
for curEp in foundResults[provider.name]:
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
continue
@ -628,20 +629,20 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False):
# add result if its not a duplicate and
found = False
for i, result in enumerate(queueItem.results):
for i, result in enumerate(finalResults):
for bestResultEp in bestResult.episodes:
if bestResultEp in result.episodes:
if result.quality < bestResult.quality:
queueItem.results.pop(i)
finalResults.pop(i)
else:
found = True
if not found:
queueItem.results += [bestResult]
finalResults += [bestResult]
# check that we got all the episodes we wanted first before doing a match and snatch
wantedEpCount = 0
for wantedEp in episodes:
for result in queueItem.results:
for result in finalResults:
if wantedEp in result.episodes and isFinalResult(result):
wantedEpCount += 1
@ -649,4 +650,4 @@ def searchProviders(queueItem, show, season, episodes, manualSearch=False):
if providerNum == len(providers) or wantedEpCount == len(episodes):
break
return queueItem
return finalResults

View File

@ -81,29 +81,33 @@ class SearchQueue(generic_queue.GenericQueue):
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
def snatch_item(self, item):
for result in item.results:
for result in item:
# just use the first result for now
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
item.success = search.snatchEpisode(result)
# give the CPU a break
time.sleep(2)
# return results of snatch
return item
class DailySearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.thread_name = 'DAILYSEARCH-' + str(show.indexerid) + '-'
self.thread_name = 'DAILYSEARCH-' + str(show.indexerid)
self.show = show
self.segment = segment
self.results = []
def execute(self):
generic_queue.QueueItem.execute(self)
logger.log("Beginning daily search for [" + self.show.name + "]")
foundResults = search.searchForNeededEpisodes(self)
foundResults = search.searchForNeededEpisodes(self.segment)
# reset thread back to original name
threading.currentThread().name = self.thread_name
if not len(foundResults):
logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]")
@ -117,11 +121,10 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.thread_name = 'MANUAL-' + str(show.indexerid) + '-'
self.thread_name = 'MANUAL-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.results = []
def execute(self):
generic_queue.QueueItem.execute(self)
@ -130,10 +133,13 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
try:
logger.log("Beginning manual search for [" + self.segment.prettyName() + "]")
searchResult = search.searchProviders(queueItem, self.show, self.segment.season, [self.segment], True)
searchResult = search.searchProviders(self.show, self.segment.season, [self.segment], True)
# reset thread back to original name
threading.currentThread().name = self.thread_name
if searchResult:
queueItem = SearchQueue().snatch_item(searchResult)
self.success = SearchQueue().snatch_item(searchResult)
else:
ui.notifications.message('No downloads were found',
"Couldn't find a download for <i>%s</i>" % self.segment.prettyName())
@ -143,17 +149,16 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
generic_queue.QueueItem.finish(queueItem)
generic_queue.QueueItem.finish(self)
class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.thread_name = 'BACKLOG-' + str(show.indexerid) + '-'
self.thread_name = 'BACKLOG-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.results = []
def execute(self):
generic_queue.QueueItem.execute(self)
@ -165,7 +170,10 @@ class BacklogQueueItem(generic_queue.QueueItem):
try:
logger.log("Beginning backlog search for [" + self.show.name + "]")
searchResult = search.searchProviders(self, self.show, season, wantedEps, False)
searchResult = search.searchProviders(self.show, season, wantedEps, False)
# reset thread back to original name
threading.currentThread().name = self.thread_name
if searchResult:
SearchQueue().snatch_item(searchResult)
@ -181,11 +189,10 @@ class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.thread_name = 'RETRY-' + str(show.indexerid) + '-'
self.thread_name = 'RETRY-' + str(show.indexerid)
self.show = show
self.segment = segment
self.success = None
self.results = []
def execute(self):
generic_queue.QueueItem.execute(self)
@ -208,7 +215,10 @@ class FailedQueueItem(generic_queue.QueueItem):
if len(failed_episodes):
try:
searchResult = search.searchProviders(self, self.show, failed_episodes[0].season, failed_episodes, True)
searchResult = search.searchProviders(self.show, failed_episodes[0].season, failed_episodes, True)
# reset thread back to original name
threading.currentThread().name = self.thread_name
if searchResult:
SearchQueue().snatch_item(searchResult)

View File

@ -87,7 +87,7 @@ class TVCache():
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [curDate.toordinal()])
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
def _getRSSData(self):
@ -102,6 +102,11 @@ class TVCache():
return True
def updateCache(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
@ -114,10 +119,6 @@ class TVCache():
else:
return []
# now that we've loaded the current RSS feed lets delete the old cache
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
if self._checkAuth(data):
items = data.entries
cl = []
@ -239,7 +240,6 @@ class TVCache():
{'time': int(time.mktime(toDate.timetuple()))},
{'provider': self.providerID})
lastUpdate = property(_getLastUpdate)
lastSearch = property(_getLastSearch)