Threads dailysearcher process for each provider. Allows dailysearcher to continue other operations without having to wait for providers with slow network operation

This commit is contained in:
Adam 2014-09-22 13:41:29 +08:00
parent b0d550b3fb
commit 0aedf52997
1 changed files with 15 additions and 12 deletions

View File

@ -363,6 +363,7 @@ def searchForNeededEpisodes():
didSearch = False
origThreadName = threading.currentThread().name
threads = []
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
@ -377,20 +378,20 @@ def searchForNeededEpisodes():
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
" :: [" + curProvider.name + "]"))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try:
curProvider.cache.updateCache()
curFoundResults = curProvider.searchRSS(episodes)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
continue
except Exception, e:
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
continue
finally:
threading.currentThread().name = origThreadName
curFoundResults = curProvider.searchRSS(episodes)
didSearch = True
@ -430,6 +431,8 @@ def searchForNeededEpisodes():
foundResults[curEp] = bestResult
threading.currentThread().name = origThreadName
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the sickrage config for daily searches. Please check your settings.",