diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl
index ed5963a4..a13a9b2a 100644
--- a/gui/slick/interfaces/default/displayShow.tmpl
+++ b/gui/slick/interfaces/default/displayShow.tmpl
@@ -53,9 +53,11 @@
#else
- #for $country in $show.imdb_info['country_codes'].split('|')
-
- #end for
+ #if 'country_codes' in $show.imdb_info:
+ #for $country in $show.imdb_info['country_codes'].split('|')
+
+ #end for
+ #end if
($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 0b7a5ea7..f3eb9022 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -79,6 +79,7 @@ showUpdateScheduler = None
versionCheckScheduler = None
showQueueScheduler = None
searchQueueScheduler = None
+snatchQueueScheduler = None
properFinderScheduler = None
autoPostProcesserScheduler = None
subtitlesFinderScheduler = None
@@ -489,7 +490,7 @@ def initialize(consoleLogging=True):
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, \
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY,MIN_UPDATE_FREQUENCY,UPDATE_FREQUENCY,\
- showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
+ showQueueScheduler, searchQueueScheduler, snatchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_STRIP_YEAR, \
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \
@@ -1124,7 +1125,7 @@ def start():
def halt():
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
- subtitlesFinderScheduler, started, \
+ subtitlesFinderScheduler, snatchQueueScheduler, started, \
traktWatchListCheckerSchedular
with INIT_LOCK:
@@ -1177,6 +1178,13 @@ def halt():
except:
pass
+ snatchQueueScheduler.abort = True
+ logger.log(u"Waiting for the SNATCHQUEUE thread to exit")
+ try:
+ snatchQueueScheduler.thread.join(10)
+ except:
+ pass
+
autoPostProcesserScheduler.abort = True
logger.log(u"Waiting for the POSTPROCESSER thread to exit")
try:
diff --git a/sickbeard/browser.py b/sickbeard/browser.py
index 3f60f36d..631255c6 100644
--- a/sickbeard/browser.py
+++ b/sickbeard/browser.py
@@ -56,7 +56,6 @@ def foldersAtPath(path, includeParent=False):
# walk up the tree until we find a valid path
while path and not os.path.isdir(path):
- time.sleep(0.01)
if path == os.path.dirname(path):
path = ''
break
diff --git a/sickbeard/config.py b/sickbeard/config.py
index 7ab0cc59..01c08cfb 100644
--- a/sickbeard/config.py
+++ b/sickbeard/config.py
@@ -399,7 +399,6 @@ class ConfigMigrator():
sickbeard.CONFIG_VERSION = self.config_version
while self.config_version < self.expected_config_version:
- time.sleep(0.01)
next_version = self.config_version + 1
if next_version in self.migration_names:
diff --git a/sickbeard/db.py b/sickbeard/db.py
index 75d5f142..c48829c1 100644
--- a/sickbeard/db.py
+++ b/sickbeard/db.py
@@ -79,7 +79,6 @@ class DBConnection:
attempt = 0
while attempt < 5:
- time.sleep(0.01)
try:
if args == None:
logger.log(self.filename + ": " + query, logger.DB)
@@ -119,7 +118,6 @@ class DBConnection:
attempt = 0
while attempt < 5:
- time.sleep(0.01)
try:
for qu in querylist:
if len(qu) == 1:
@@ -164,7 +162,6 @@ class DBConnection:
attempt = 0
while attempt < 5:
- time.sleep(0.01)
try:
if args == None:
logger.log(self.filename + ": " + query, logger.DB)
diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py
index 0beeb7ae..4fb2c59f 100644
--- a/sickbeard/generic_queue.py
+++ b/sickbeard/generic_queue.py
@@ -18,30 +18,22 @@
import datetime
import threading
+import Queue
from sickbeard import logger
-
-
class QueuePriorities:
LOW = 10
NORMAL = 20
HIGH = 30
-
-class GenericQueue(object):
+class GenericQueue:
def __init__(self):
-
self.currentItem = None
- self.queue = []
-
self.thread = None
-
self.queue_name = "QUEUE"
-
self.min_priority = 0
-
- self.currentItem = None
+ self.queue = Queue.PriorityQueue()
def pause(self):
logger.log(u"Pausing queue")
@@ -53,11 +45,13 @@ class GenericQueue(object):
def add_item(self, item):
item.added = datetime.datetime.now()
- self.queue.append(item)
-
+ self.queue.put(item, item.priority)
return item
- def run(self):
+ def run(self, queue=None):
+ # dynamically set queue
+ if queue:
+ self.queue = queue
# only start a new task if one isn't already going
if self.thread == None or self.thread.isAlive() == False:
@@ -67,55 +61,25 @@ class GenericQueue(object):
self.currentItem.finish()
self.currentItem = None
- # if there's something in the queue then run it in a thread and take it out of the queue
- if len(self.queue) > 0:
-
- # sort by priority
- def sorter(x, y):
- """
- Sorts by priority descending then time ascending
- """
- if x.priority == y.priority:
- if y.added == x.added:
- return 0
- elif y.added < x.added:
- return 1
- elif y.added > x.added:
- return -1
- else:
- return y.priority - x.priority
-
- self.queue.sort(cmp=sorter)
-
- queueItem = self.queue[0]
-
+ if not self.queue.empty():
+ queueItem = self.queue.get()
if queueItem.priority < self.min_priority:
return
- # launch the queue item in a thread
- # TODO: improve thread name
threadName = self.queue_name + '-' + queueItem.get_thread_name()
self.thread = threading.Thread(None, queueItem.execute, threadName)
self.thread.start()
self.currentItem = queueItem
- # take it out of the queue
- del self.queue[0]
-
class QueueItem:
def __init__(self, name, action_id=0):
self.name = name
-
self.inProgress = False
-
self.priority = QueuePriorities.NORMAL
-
self.thread_name = None
-
self.action_id = action_id
-
self.added = None
def get_thread_name(self):
@@ -132,6 +96,4 @@ class QueueItem:
def finish(self):
"""Implementing Classes should call this"""
- self.inProgress = False
-
-
+ self.inProgress = False
\ No newline at end of file
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index b8e47315..95044899 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -551,8 +551,6 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
# as long as the folder exists and doesn't contain any files, delete it
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
- time.sleep(0.01)
-
check_files = ek.ek(os.listdir, check_empty_dir)
if not check_files or (len(check_files) <= len(ignore_items) and all(
@@ -794,7 +792,6 @@ def backupVersionedFile(old_file, version):
new_file = old_file + '.' + 'v' + str(version)
while not ek.ek(os.path.isfile, new_file):
- time.sleep(0.01)
if not ek.ek(os.path.isfile, old_file):
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
break
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 37319ac0..a1aac9b1 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -18,6 +18,7 @@
from __future__ import with_statement
+import time
import os
import sys
import threading
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index 614e97fa..bb92e1fd 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -212,8 +212,8 @@ class NameParser(object):
i = result = 0
for integer, numeral in numeral_map:
- time.sleep(0.01)
while n[i:i + len(numeral)] == numeral:
+ time.sleep(1)
result += integer
i += len(numeral)
diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py
index 7e16db49..ab5d7172 100644
--- a/sickbeard/network_timezones.py
+++ b/sickbeard/network_timezones.py
@@ -170,7 +170,6 @@ def update_network_dict():
# list of sql commands to update the network_timezones table
ql = []
for cur_d, cur_t in d.iteritems():
- time.sleep(0.01)
h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]:
# update old record
diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py
index b16bba48..a5764e6e 100644
--- a/sickbeard/properFinder.py
+++ b/sickbeard/properFinder.py
@@ -85,10 +85,9 @@ class ProperFinder():
# if they haven't been added by a different provider than add the proper to the list
for x in curPropers:
- time.sleep(0.01)
showObj = helpers.findCertainShow(sickbeard.showList, x.indexerid)
if not showObj:
- logger.log(u"Unable to find the show we watch with indexerID " + str(x.indexerid), logger.ERROR)
+ logger.log(u"Unable to find the show in our watch list " + str(x.name), logger.DEBUG)
continue
name = self._genericName(x.name)
diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py
index e6412dfe..4f40ec08 100644
--- a/sickbeard/providers/btn.py
+++ b/sickbeard/providers/btn.py
@@ -321,7 +321,7 @@ class BTNCache(tvcache.TVCache):
# By now we know we've got data and no auth errors, all we need to do is put it in the database
cl = []
for item in data:
- time.sleep(0.01)
+
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py
index 731381d9..c3ca587f 100644
--- a/sickbeard/providers/generic.py
+++ b/sickbeard/providers/generic.py
@@ -240,18 +240,15 @@ class GenericProvider:
self.cache.updateCache()
for epObj in episodes:
- time.sleep(0.01)
+
cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult):
results.update(cacheResult)
continue
- if epObj.show.air_by_date:
- logger.log(u'Searching "%s" for "%s"' % (self.name, epObj.prettyABDName()))
- else:
- logger.log(
- u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
+ logger.log(
+ u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
if seasonSearch:
for curString in self._get_season_search_strings(epObj):
@@ -273,7 +270,7 @@ class GenericProvider:
for episode, items in searchItems.items():
for item in items:
- time.sleep(0.01)
+
(title, url) = self._get_title_and_url(item)
@@ -335,7 +332,7 @@ class GenericProvider:
logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
quality], logger.DEBUG)
- time.sleep(0.01)
+
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
@@ -351,6 +348,7 @@ class GenericProvider:
result.quality = quality
result.provider = self
result.content = None
+ result.extraInfo = [show]
if len(epObj) == 1:
epNum = epObj[0].episode
@@ -361,7 +359,6 @@ class GenericProvider:
parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0:
epNum = SEASON_RESULT
- result.extraInfo = [show]
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if not result:
diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py
index 463e5886..b6597625 100644
--- a/sickbeard/providers/hdbits.py
+++ b/sickbeard/providers/hdbits.py
@@ -207,7 +207,7 @@ class HDBitsCache(tvcache.TVCache):
ql = []
for item in items:
- time.sleep(0.01)
+
ci = self._parseItem(item)
if ci is not None:
ql.append(ci)
diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py
index 6ac8861d..ade6a1f0 100644
--- a/sickbeard/providers/hdtorrents.py
+++ b/sickbeard/providers/hdtorrents.py
@@ -349,7 +349,7 @@ class HDTorrentsCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index 42d8d940..d67378c5 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -295,7 +295,7 @@ class IPTorrentsCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index 3bd373c5..7f65d6ea 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -424,7 +424,7 @@ class KATCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index edeb20b8..d3d27b6a 100644
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -173,7 +173,7 @@ class NewznabProvider(generic.NZBProvider):
return True
- def _doSearch(self, search_params, show=None, max_age=0):
+ def _doSearch(self, search_params, epcount=0, age=0):
self._checkAuth()
@@ -183,8 +183,8 @@ class NewznabProvider(generic.NZBProvider):
"cat": self.catIDs}
# if max_age is set, use it, don't allow it to be missing
- if max_age or not params['maxage']:
- params['maxage'] = max_age
+ if age or not params['maxage']:
+ params['maxage'] = age
if search_params:
params.update(search_params)
@@ -229,7 +229,7 @@ class NewznabProvider(generic.NZBProvider):
cache_results]
for term in search_terms:
- for item in self._doSearch({'q': term}, max_age=4):
+ for item in self._doSearch({'q': term}, age=4):
(title, url) = self._get_title_and_url(item)
diff --git a/sickbeard/providers/nextgen.py b/sickbeard/providers/nextgen.py
index 5976ebe8..4b0622f0 100644
--- a/sickbeard/providers/nextgen.py
+++ b/sickbeard/providers/nextgen.py
@@ -344,7 +344,7 @@ class NextGenCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py
index 440fd2ea..990f3ea2 100644
--- a/sickbeard/providers/publichd.py
+++ b/sickbeard/providers/publichd.py
@@ -316,7 +316,7 @@ class PublicHDCache(tvcache.TVCache):
ql = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/scc.py b/sickbeard/providers/scc.py
index d0d03dc2..96352fee 100644
--- a/sickbeard/providers/scc.py
+++ b/sickbeard/providers/scc.py
@@ -322,7 +322,7 @@ class SCCCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/speedcd.py b/sickbeard/providers/speedcd.py
index e27c9d4c..ba6d3d2d 100644
--- a/sickbeard/providers/speedcd.py
+++ b/sickbeard/providers/speedcd.py
@@ -274,7 +274,7 @@ class SpeedCDCache(tvcache.TVCache):
ql = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index 42fe320e..28b9bac8 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -415,7 +415,7 @@ class ThePirateBayCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index 654b8ac5..f1b1b82a 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -296,7 +296,7 @@ class TorrentDayCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/providers/torrentleech.py b/sickbeard/providers/torrentleech.py
index 8a489950..b02a9ba7 100644
--- a/sickbeard/providers/torrentleech.py
+++ b/sickbeard/providers/torrentleech.py
@@ -295,7 +295,7 @@ class TorrentLeechCache(tvcache.TVCache):
cl = []
for result in rss_results:
- time.sleep(0.01)
+
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py
index 7409aa83..520d51e8 100644
--- a/sickbeard/scheduler.py
+++ b/sickbeard/scheduler.py
@@ -27,7 +27,7 @@ from sickbeard.exceptions import ex
class Scheduler:
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True,
- threadName="ScheduledThread", silent=False):
+ threadName="ScheduledThread", silent=False, runOnce=False, queue=None):
if runImmediately:
self.lastRun = datetime.datetime.fromordinal(1)
@@ -44,6 +44,8 @@ class Scheduler:
self.initThread()
self.abort = False
+ self.runOnce = runOnce
+ self.queue = queue
def initThread(self):
if self.thread == None or not self.thread.isAlive():
@@ -61,8 +63,7 @@ class Scheduler:
def runAction(self):
while True:
- time.sleep(0.01)
-
+ time.sleep(1)
currentTime = datetime.datetime.now()
if currentTime - self.lastRun > self.cycleTime:
@@ -70,12 +71,17 @@ class Scheduler:
try:
if not self.silent:
logger.log(u"Starting new thread: " + self.threadName, logger.DEBUG)
- self.action.run()
+
+ # check if we want to pass in our queue dynamically
+ if self.queue:
+ self.action.run(self.queue)
+ else:
+ self.action.run()
except Exception, e:
logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR)
logger.log(repr(traceback.format_exc()), logger.DEBUG)
- if self.abort:
+ if self.abort or self.runOnce:
self.abort = False
self.thread = None
return
diff --git a/sickbeard/search.py b/sickbeard/search.py
index f13060e0..7bf40c63 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -21,6 +21,7 @@ from __future__ import with_statement
import os
import re
import threading
+import Queue
import traceback
import datetime
@@ -111,11 +112,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
if result is None: return False
- # don't notify when we re-download an episode
- for curEpObj in result.episodes:
- if curEpObj.status in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST:
- return 2
-
result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority
@@ -363,8 +359,9 @@ def filterSearchResults(show, results):
return foundResults
-def searchProviders(show, season, episodes, curProvider, seasonSearch=False, manualSearch=False):
- threading.currentThread().name = curProvider.name
+def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch=False, manualSearch=False):
+ thread_name = str(curProvider.name).upper() + '-' + str(show.indexerid)
+ threading.currentThread().name = thread_name
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
foundResults = {}
@@ -392,7 +389,7 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
curResults = filterSearchResults(show, curResults)
if len(curResults):
foundResults.update(curResults)
- logger.log(u"Provider search results: " + str(foundResults), logger.DEBUG)
+ logger.log(u"Provider search results: " + repr(foundResults), logger.DEBUG)
if not len(foundResults):
return []
@@ -407,6 +404,7 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
highest_quality_overall = 0
for cur_episode in foundResults:
for cur_result in foundResults[cur_episode]:
+ cur_result.queue_item = queueItem
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
@@ -574,4 +572,4 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
finalResults.append(pickBestResult(foundResults[curEp], show))
- return finalResults
+ return finalResults
\ No newline at end of file
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index b1c230f1..251f097f 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -125,7 +125,8 @@ class BacklogSearcher:
backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment)
if backlog_queue_item.wantedEpisodes:
- sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
+ backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment)
+ #sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
else:
logger.log(
u"Nothing in season " + str(cur_segment) + " needs to be downloaded, skipping this season",
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index d0115343..547ed5ce 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -19,36 +19,114 @@
from __future__ import with_statement
import datetime
-from threading import Thread
-import threading
+import Queue
import time
+import traceback
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers
-from sickbeard import generic_queue
+from sickbeard import generic_queue, scheduler
from sickbeard import search, failed_history, history
from sickbeard import ui
-
from lib.concurrent import futures
+from lib.concurrent.futures.thread import ThreadPoolExecutor
BACKLOG_SEARCH = 10
RSS_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 30
+SNATCH = 40
+
+# snatch queues
+ManualSnatchQueue = Queue.PriorityQueue()
+RSSSnatchQueue = Queue.PriorityQueue()
+BacklogSnatchQueue = Queue.PriorityQueue()
+FailedSnatchQueue = Queue.PriorityQueue()
+
+SearchItemQueue = Queue.PriorityQueue()
+
+
+class SnatchQueue(generic_queue.GenericQueue):
+ def __init__(self):
+ generic_queue.GenericQueue.__init__(self)
+ self.queue_name = "SNATCHQUEUE"
+
+ def is_in_queue(self, show, episodes, quality):
+ for cur_item in self.queue.queue:
+ if cur_item.results.extraInfo[0] == show \
+ and cur_item.results.episodes.sort() == episodes.sort() \
+ and cur_item.results.quality >= quality:
+ return True
+ return False
+
+ def add_item(self, item):
+ # dynamically select our snatch queue
+ if item.type == 'RSSSearchQueueItem':
+ self.queue = RSSSnatchQueue
+ elif item.type == 'ManualSearchQueueItem':
+ self.queue = ManualSnatchQueue
+ elif item.type == 'BacklogQueueItem':
+ self.queue = BacklogSnatchQueue
+ elif item.type == 'FailedQueueItem':
+ self.queue = FailedSnatchQueue
+ else:
+ return
+
+ # check if we already have a item ready to snatch with same or better quality score
+ if not self.is_in_queue(item.results.extraInfo[0], item.results.episodes, item.results.quality):
+ generic_queue.GenericQueue.add_item(self, item)
+ else:
+ logger.log(
+ u"Not adding item [" + item.results.name + "] it's already in the queue with same or higher quality",
+ logger.DEBUG)
+
+
+class SnatchQueueItem(generic_queue.QueueItem):
+ def __init__(self, results, queue_item):
+ generic_queue.QueueItem.__init__(self, 'Snatch', SNATCH)
+ self.priority = generic_queue.QueuePriorities.HIGH
+ self.thread_name = 'SNATCH-' + str(results.extraInfo[0].indexerid)
+ self.results = results
+ self.success = None
+ self.queue_item = queue_item
+ self.type = queue_item.type
+
+ def execute(self):
+ generic_queue.QueueItem.execute(self)
+
+ # just use the first result for now
+ logger.log(u"Downloading " + self.results.name + " from " + self.results.provider.name)
+
+ result = search.snatchEpisode(self.results)
+
+ if self.type == "ManualSearchQueueItem":
+ providerModule = self.results.provider
+ if not result:
+ ui.notifications.error(
+ 'Error while attempting to snatch ' + self.results.name + ', check your logs')
+ elif providerModule == None:
+ ui.notifications.error('Provider is configured incorrectly, unable to download')
+
+ self.success = result
+ self.queue_item.success = result
+
+ generic_queue.QueueItem.finish(self.queue_item)
+ generic_queue.QueueItem.finish(self)
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = "SEARCHQUEUE"
+ self.queue = SearchItemQueue
def is_in_queue(self, show, segment):
- for cur_item in self.queue:
+ for cur_item in self.queue.queue:
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, ep_obj):
- for cur_item in self.queue:
+ for cur_item in self.queue.queue:
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
return True
return False
@@ -70,6 +148,7 @@ class SearchQueue(generic_queue.GenericQueue):
return False
def add_item(self, item):
+
if isinstance(item, RSSSearchQueueItem):
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
@@ -86,124 +165,85 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, ep_obj):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
- self.ep_obj = ep_obj
+ self.type = self.__class__.__name__
+ self.thread_name = 'MANUAL-' + str(ep_obj.show.indexerid)
self.success = None
+ self.show = ep_obj.show
+ self.ep_obj = ep_obj
def execute(self):
generic_queue.QueueItem.execute(self)
- foundResults = []
+ fs = []
didSearch = False
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
-
try:
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(
- executor.map(self.process, providers))
- didSearch = True
+ with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ for provider in providers:
+ didSearch = True
+ logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "] on " + provider.name)
+ executor.submit(
+ search.searchProviders, self, self.show, self.ep_obj.season, [self.ep_obj], provider, False,
+ True).add_done_callback(snatch_results)
+ executor.shutdown(wait=True)
except Exception, e:
- pass
+ logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR)
- result = False
- if not len(foundResults):
- if self.ep_obj.show.air_by_date:
- ui.notifications.message('No downloads were found ...',
- "Couldn't find a download for %s" % self.ep_obj.prettyABName())
- logger.log(u"Unable to find a download for " + self.ep_obj.prettyABDName())
- else:
- ui.notifications.message('No downloads were found ...',
- "Couldn't find a download for %s" % self.ep_obj.prettyName())
- logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
-
- self.success = result
+ if ManualSnatchQueue.empty():
+ ui.notifications.message('No downloads were found',
+ "Couldn't find a download for %s" % self.ep_obj.prettyName())
+ logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
else:
- for foundResult in [item for sublist in foundResults for item in sublist]:
- time.sleep(0.01)
+ # snatch all items in queue
+ scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=ManualSnatchQueue).thread.start()
- result = search.snatchEpisode(foundResult)
-
- # duplicate snatch detected due to multithreading
- if result == 2:
- continue
-
- providerModule = foundResult.provider
- if not result:
- ui.notifications.error(
- 'Error while attempting to snatch ' + foundResult.name + ', check your logs')
- elif providerModule == None:
- ui.notifications.error('Provider is configured incorrectly, unable to download')
-
- # just use the first result for now
- logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name)
-
- self.success = result
-
- def process(self, curProvider):
- if self.ep_obj.show.air_by_date:
- logger.log("Beginning manual search for " + self.ep_obj.prettyABDName())
- else:
- logger.log("Beginning manual search for " + self.ep_obj.prettyName())
-
- return search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], curProvider, False, True)
-
- def finish(self):
- # don't let this linger if something goes wrong
- if self.success == None:
- self.success = False
- else:
- generic_queue.QueueItem.finish(self)
+ generic_queue.QueueItem.finish(self)
class RSSSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH)
+ self.thread_name = 'RSSFEED'
+ self.type = self.__class__.__name__
def execute(self):
generic_queue.QueueItem.execute(self)
- foundResults = []
+ results = False
didSearch = False
+ self._changeMissingEpisodes()
+
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try:
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(
- executor.map(self.process, providers))
-
- didSearch = True
+ with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ for provider in providers:
+ didSearch = True
+ logger.log("Beginning RSS Feed search on " + provider.name)
+ executor.submit(search.searchForNeededEpisodes, provider).add_done_callback(snatch_results)
+ executor.shutdown(wait=True)
except:
- pass
+ logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch:
logger.log(
- u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
+ u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR)
- if len(foundResults):
- for curResult in [item for sublist in foundResults for item in sublist]:
- time.sleep(0.01)
- result = search.snatchEpisode(curResult)
-
- # duplicate snatch detected due to multithreading
- if result == 2:
- continue
+ if RSSSnatchQueue.empty():
+ logger.log(u"No needed episodes found on the RSS feeds")
else:
- logger.log(u"RSS Feed search found nothing to snatch ...")
+ # snatch all items in queue
+ scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=RSSSnatchQueue).thread.start()
generic_queue.QueueItem.finish(self)
- def process(self, curProvider):
- self._changeMissingEpisodes()
-
- logger.log(u"Beginning search for new episodes on RSS feeds and in cache")
- return search.searchForNeededEpisodes(curProvider)
-
def _changeMissingEpisodes(self):
logger.log(u"Changing all old missing episodes to status WANTED")
@@ -240,6 +280,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
+ self.type = self.__class__.__name__
self.thread_name = 'BACKLOG-' + str(show.indexerid)
self.show = show
@@ -274,53 +315,48 @@ class BacklogQueueItem(generic_queue.QueueItem):
def execute(self):
generic_queue.QueueItem.execute(self)
- foundResults = []
+ results = False
didSearch = False
- providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
-
- try:
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(executor.map(self.process,providers))
- didSearch = True
- except:
- pass
-
- if not didSearch:
- logger.log(
- u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
- logger.ERROR)
-
- if len(foundResults):
- for curResult in [item for sublist in foundResults for item in sublist]:
- time.sleep(0.01)
-
- result = search.snatchEpisode(curResult)
-
- # duplicate snatch detected due to multithreading
- if result == 2:
- continue
-
- else:
- logger.log(u"Backlog search found nothing to snatch ...")
-
- self.finish()
-
- def process(self, curProvider):
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
seasonEps = self.show.getAllEpisodes(self.segment)
if len(seasonEps) == len(self.wantedEpisodes):
seasonSearch = True
- return search.searchProviders(self.show, self.segment, self.wantedEpisodes, curProvider, seasonSearch, False)
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ for provider in providers:
+ didSearch = True
+ logger.log("Beginning backlog search for [" + self.segment + "] on " + provider.name)
+ executor.submit(
+ search.searchProviders, self, self.show, self.segment, self.wantedEpisodes, provider,
+ seasonSearch, False).add_done_callback(snatch_results)
+ executor.shutdown(wait=True)
+ except Exception, e:
+ logger.log(traceback.format_exc(), logger.DEBUG)
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
+ logger.ERROR)
+
+ if BacklogSnatchQueue.empty():
+ logger.log(u"No needed episodes found during backlog search")
+ else:
+ # snatch all items in queue
+ scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=BacklogSnatchQueue).thread.start()
+
+ self.finish()
def _need_any_episodes(self, statusResults, bestQualities):
wantedEpisodes = []
# check through the list of statuses to see if we want any
for curStatusResult in statusResults:
- time.sleep(0.01)
+ time.sleep(1)
curCompositeStatus = int(curStatusResult["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
@@ -344,61 +380,24 @@ class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, episodes):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
+ self.type = self.__class__.__name__
self.thread_name = 'RETRY-' + str(show.indexerid)
-
self.show = show
self.episodes = episodes
-
self.success = None
def execute(self):
generic_queue.QueueItem.execute(self)
- foundResults = []
+ results = False
didSearch = False
- providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
-
- try:
- with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
- foundResults = list(
- executor.map(self.process, providers))
- didSearch = True
- except:
- pass
-
- if not didSearch:
- logger.log(
- u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
- logger.ERROR)
-
- if len(foundResults):
- for curResult in [item for sublist in foundResults for item in sublist]:
- time.sleep(0.01)
-
- result = search.snatchEpisode(curResult)
-
- # duplicate snatch detected due to multithreading
- if result == 2:
- continue
-
- self.success = result
- else:
- logger.log(u"Retry failed download search found nothing to snatch ...")
-
- self.finish()
-
- def process(self, curProvider):
episodes = []
for i, epObj in enumerate(episodes):
- time.sleep(0.01)
-
- if epObj.show.air_by_date:
- logger.log("Beginning manual search for " + epObj.prettyABDName())
- else:
- logger.log(
- "Beginning failed download search for " + epObj.prettyName())
+ time.sleep(1)
+ logger.log(
+ "Beginning failed download search for " + epObj.prettyName())
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
if release:
@@ -410,4 +409,36 @@ class FailedQueueItem(generic_queue.QueueItem):
failed_history.revertEpisode(self.show, epObj.season, epObj.episode)
episodes.append(epObj)
- return search.searchProviders(self.show, self.episodes[0].season, self.episodes, curProvider, False, False)
\ No newline at end of file
+ providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
+
+ try:
+ with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
+ for provider in providers:
+ didSearch = True
+ executor.submit(
+ search.searchProviders, self, self.show, self.episodes[0].season, self.episodes, provider,
+ False,
+ True).add_done_callback(snatch_results)
+ executor.shutdown(wait=True)
+ except Exception, e:
+ logger.log(traceback.format_exc(), logger.DEBUG)
+
+ if not didSearch:
+ logger.log(
+ u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
+ logger.ERROR)
+
+ if FailedSnatchQueue.empty():
+ logger.log(u"No needed episodes found on the RSS feeds")
+ else:
+ # snatch all items in queue
+ scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=FailedSnatchQueue).thread.start()
+
+ self.finish()
+
+
+# send to snatch queue
+def snatch_results(f):
+ for result in f.result():
+ snatch_queue_item = SnatchQueueItem(result, result.queue_item)
+ SnatchQueue().add_item(snatch_queue_item)
\ No newline at end of file
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index 6a6e9cc7..22d2aa3d 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -19,6 +19,7 @@
from __future__ import with_statement
import traceback
+import Queue
import sickbeard
@@ -30,16 +31,18 @@ from sickbeard import generic_queue
from sickbeard import name_cache
from sickbeard.exceptions import ex
+ShowItemQueue = Queue.PriorityQueue()
class ShowQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = "SHOWQUEUE"
+ self.queue = ShowItemQueue
def _isInQueue(self, show, actions):
- return show in [x.show for x in self.queue if x.action_id in actions]
+ return show in [x.show for x in self.queue.queue if x.action_id in actions] if self.queue.qsize() > 0 else []
def _isBeingSomethinged(self, show, actions):
return self.currentItem != None and show == self.currentItem.show and \
@@ -73,7 +76,7 @@ class ShowQueue(generic_queue.GenericQueue):
return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,))
def _getLoadingShowList(self):
- return [x for x in self.queue + [self.currentItem] if x != None and x.isLoading]
+ return [x for x in self.queue.queue + [self.currentItem] if x != None and x.isLoading] if self.queue.qsize() > 0 else []
loadingShowList = property(_getLoadingShowList)
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index c2278b4c..77c02d4f 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -441,7 +441,6 @@ class TVShow(object):
sql_l = []
for season in showObj:
- time.sleep(0.01)
scannedEps[season] = {}
for episode in showObj[season]:
# need some examples of wtf episode 0 means to decide if we want it or not
@@ -1732,18 +1731,10 @@ class TVEpisode(object):
Returns: A string representing the episode's name and season/ep numbers
"""
-
- return self._format_pattern('%SN - %Sx%0E - %EN')
-
- def prettyABDName(self):
- """
- Returns the name of this episode in a "pretty" human-readable format. Used for logging
- and notifications and such.
-
- Returns: A string representing the episode's name and season/ep numbers
- """
-
- return self._format_pattern('%SN - %AD - %EN')
+ if self.show.air_by_date:
+ return self._format_pattern('%SN - %AD - %EN')
+ else:
+ return self._format_pattern('%SN - %Sx%0E - %EN')
def prettySceneName(self):
"""
@@ -1752,8 +1743,10 @@ class TVEpisode(object):
Returns: A string representing the episode's name and season/ep numbers
"""
-
- return self._format_pattern('%SN - %XSx%0XE - %EN')
+ if self.show.air_by_date:
+ return self._format_pattern('%SN - %AD - %EN')
+ else:
+ return self._format_pattern('%SN - %XSx%0XE - %EN')
def _ep_name(self):
"""
@@ -1851,13 +1844,13 @@ class TVEpisode(object):
'%Q.N': dot(Quality.qualityStrings[epQual]),
'%Q_N': us(Quality.qualityStrings[epQual]),
'%S': str(self.season),
- '%0S': '%02d' % int(self.season) if not self.show.air_by_date else self.season,
+ '%0S': '%02d' % self.season,
'%E': str(self.episode),
- '%0E': '%02d' % int(self.episode)if not self.show.air_by_date else self.episode,
+ '%0E': '%02d' % self.episode,
'%XS': str(self.scene_season),
- '%0XS': '%02d' % int(self.scene_season),
+ '%0XS': '%02d' % self.scene_season,
'%XE': str(self.scene_episode),
- '%0XE': '%02d' % int(self.scene_episode),
+ '%0XE': '%02d' % self.scene_episode,
'%RN': release_name(self.release_name),
'%RG': release_group(self.release_name),
'%AD': str(self.airdate).replace('-', ' '),
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index cd3f61ac..3ca75fb9 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -143,7 +143,6 @@ class TVCache():
items = data.entries
ql = []
for item in items:
- time.sleep(0.01)
qi = self._parseItem(item)
if qi is not None:
ql.append(qi)
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 2f1001d8..d5bea869 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -3153,7 +3153,6 @@ class Home:
sql_l = []
for curEp in eps.split('|'):
- time.sleep(0.01)
logger.log(u"Attempting to set status on episode " + curEp + " to " + status, logger.DEBUG)