diff --git a/SickBeard.py b/SickBeard.py index 08fe709f..c892458f 100755 --- a/SickBeard.py +++ b/SickBeard.py @@ -341,13 +341,12 @@ def main(): # Stay alive while my threads do the work while (True): + time.sleep(1) if sickbeard.invoked_command: sickbeard.invoked_command() sickbeard.invoked_command = None - time.sleep(1) - return if __name__ == "__main__": diff --git a/sickbeard/browser.py b/sickbeard/browser.py index 71fb65e5..3f60f36d 100644 --- a/sickbeard/browser.py +++ b/sickbeard/browser.py @@ -19,6 +19,7 @@ import os import string import cherrypy +import time from sickbeard import encodingKludge as ek @@ -55,6 +56,7 @@ def foldersAtPath(path, includeParent=False): # walk up the tree until we find a valid path while path and not os.path.isdir(path): + time.sleep(0.01) if path == os.path.dirname(path): path = '' break diff --git a/sickbeard/config.py b/sickbeard/config.py index 35e9b6b8..7ab0cc59 100644 --- a/sickbeard/config.py +++ b/sickbeard/config.py @@ -20,7 +20,7 @@ import cherrypy import os.path import datetime import re - +import time from sickbeard import helpers from sickbeard import logger from sickbeard import naming @@ -399,7 +399,7 @@ class ConfigMigrator(): sickbeard.CONFIG_VERSION = self.config_version while self.config_version < self.expected_config_version: - + time.sleep(0.01) next_version = self.config_version + 1 if next_version in self.migration_names: diff --git a/sickbeard/db.py b/sickbeard/db.py index c48829c1..75d5f142 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -79,6 +79,7 @@ class DBConnection: attempt = 0 while attempt < 5: + time.sleep(0.01) try: if args == None: logger.log(self.filename + ": " + query, logger.DB) @@ -118,6 +119,7 @@ class DBConnection: attempt = 0 while attempt < 5: + time.sleep(0.01) try: for qu in querylist: if len(qu) == 1: @@ -162,6 +164,7 @@ class DBConnection: attempt = 0 while attempt < 5: + time.sleep(0.01) try: if args == None: logger.log(self.filename + ": " + query, logger.DB) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 5a4f0230..b6b02f7e 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -549,6 +549,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None): # as long as the folder exists and doesn't contain any files, delete it while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir: + time.sleep(0.01) check_files = ek.ek(os.listdir, check_empty_dir) @@ -791,6 +792,7 @@ def backupVersionedFile(old_file, version): new_file = old_file + '.' + 'v' + str(version) while not ek.ek(os.path.isfile, new_file): + time.sleep(0.01) if not ek.ek(os.path.isfile, old_file): logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG) break diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py index 996a426e..1327737d 100644 --- a/sickbeard/name_parser/parser.py +++ b/sickbeard/name_parser/parser.py @@ -20,6 +20,7 @@ import datetime import os.path import re import regexes +import time import sickbeard from sickbeard import logger, helpers, scene_numbering @@ -207,6 +208,7 @@ class NameParser(object): i = result = 0 for integer, numeral in numeral_map: + time.sleep(0.01) while n[i:i + len(numeral)] == numeral: result += integer i += len(numeral) @@ -424,6 +426,7 @@ class NameParserCache(object): self._previous_parsed[name] = parse_result self._previous_parsed_list.append(name) while len(self._previous_parsed_list) > self._cache_size: + time.sleep(0.01) del_me = self._previous_parsed_list.pop(0) self._previous_parsed.pop(del_me) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index c0c58e30..cac7c02e 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -24,6 +24,7 @@ import os import re import urllib import urlparse +import time import sickbeard @@ -238,6 +239,8 @@ class GenericProvider: self.cache.updateCache() for epObj in episodes: + time.sleep(0.01) + itemList = [] cacheResult = self.cache.searchCache(epObj, manualSearch) @@ -271,6 +274,8 @@ class GenericProvider: for episode, items in searchItems.items(): for item in items: + time.sleep(0.01) + (title, url) = self._get_title_and_url(item) quality = self.getQuality(item) @@ -331,6 +336,7 @@ class GenericProvider: logger.log( u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ quality], logger.DEBUG) + time.sleep(0.01) continue logger.log(u"Found result " + title + " at " + url, logger.DEBUG) diff --git a/sickbeard/scheduler.py b/sickbeard/scheduler.py index 29cae776..7409aa83 100644 --- a/sickbeard/scheduler.py +++ b/sickbeard/scheduler.py @@ -61,6 +61,7 @@ class Scheduler: def runAction(self): while True: + time.sleep(0.01) currentTime = datetime.datetime.now() @@ -78,5 +79,3 @@ class Scheduler: self.abort = False self.thread = None return - - time.sleep(1) diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 7c11399c..3bbf239c 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -111,6 +111,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem): self.success = result else: for foundResult in foundResults: + time.sleep(0.01) + # just use the first result for now logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name) @@ -152,9 +154,10 @@ class RSSSearchQueueItem(generic_queue.QueueItem): foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()])) for curResult in foundResults: + time.sleep(0.01) + if curResult: search.snatchEpisode(curResult) - #time.sleep(2) generic_queue.QueueItem.finish(self) @@ -238,6 +241,8 @@ class BacklogQueueItem(generic_queue.QueueItem): for curResult in foundResults if foundResults else logger.log( u"Backlog search found nothing to snatch ..."): + time.sleep(0.01) + search.snatchEpisode(curResult) self.finish() @@ -256,6 +261,8 @@ class BacklogQueueItem(generic_queue.QueueItem): # check through the list of statuses to see if we want any for curStatusResult in statusResults: + time.sleep(0.01) + curCompositeStatus = int(curStatusResult["status"]) curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus) episode = int(curStatusResult["episode"]) @@ -292,8 +299,9 @@ class FailedQueueItem(generic_queue.QueueItem): # download whatever we find for curResult in foundResults: + time.sleep(0.01) + self.success = search.snatchEpisode(curResult) - time.sleep(5) self.finish() @@ -301,6 +309,8 @@ class FailedQueueItem(generic_queue.QueueItem): episodes = [] for i, epObj in enumerate(episodes): + time.sleep(0.01) + if epObj.show.air_by_date: logger.log("Beginning manual search for " + epObj.prettyABDName()) else: