Added in some sleep timers to help improve overall cpu usage

This commit is contained in:
echel0n 2014-05-06 14:35:37 -07:00
parent a0bd46c18a
commit 8a61eb1e26
9 changed files with 32 additions and 8 deletions

View File

@ -341,13 +341,12 @@ def main():
# Stay alive while my threads do the work
while (True):
time.sleep(1)
if sickbeard.invoked_command:
sickbeard.invoked_command()
sickbeard.invoked_command = None
time.sleep(1)
return
if __name__ == "__main__":

View File

@ -19,6 +19,7 @@
import os
import string
import cherrypy
import time
from sickbeard import encodingKludge as ek
@ -55,6 +56,7 @@ def foldersAtPath(path, includeParent=False):
# walk up the tree until we find a valid path
while path and not os.path.isdir(path):
time.sleep(0.01)
if path == os.path.dirname(path):
path = ''
break

View File

@ -20,7 +20,7 @@ import cherrypy
import os.path
import datetime
import re
import time
from sickbeard import helpers
from sickbeard import logger
from sickbeard import naming
@ -399,7 +399,7 @@ class ConfigMigrator():
sickbeard.CONFIG_VERSION = self.config_version
while self.config_version < self.expected_config_version:
time.sleep(0.01)
next_version = self.config_version + 1
if next_version in self.migration_names:

View File

@ -79,6 +79,7 @@ class DBConnection:
attempt = 0
while attempt < 5:
time.sleep(0.01)
try:
if args == None:
logger.log(self.filename + ": " + query, logger.DB)
@ -118,6 +119,7 @@ class DBConnection:
attempt = 0
while attempt < 5:
time.sleep(0.01)
try:
for qu in querylist:
if len(qu) == 1:
@ -162,6 +164,7 @@ class DBConnection:
attempt = 0
while attempt < 5:
time.sleep(0.01)
try:
if args == None:
logger.log(self.filename + ": " + query, logger.DB)

View File

@ -549,6 +549,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
# as long as the folder exists and doesn't contain any files, delete it
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
time.sleep(0.01)
check_files = ek.ek(os.listdir, check_empty_dir)
@ -791,6 +792,7 @@ def backupVersionedFile(old_file, version):
new_file = old_file + '.' + 'v' + str(version)
while not ek.ek(os.path.isfile, new_file):
time.sleep(0.01)
if not ek.ek(os.path.isfile, old_file):
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
break

View File

@ -20,6 +20,7 @@ import datetime
import os.path
import re
import regexes
import time
import sickbeard
from sickbeard import logger, helpers, scene_numbering
@ -207,6 +208,7 @@ class NameParser(object):
i = result = 0
for integer, numeral in numeral_map:
time.sleep(0.01)
while n[i:i + len(numeral)] == numeral:
result += integer
i += len(numeral)
@ -424,6 +426,7 @@ class NameParserCache(object):
self._previous_parsed[name] = parse_result
self._previous_parsed_list.append(name)
while len(self._previous_parsed_list) > self._cache_size:
time.sleep(0.01)
del_me = self._previous_parsed_list.pop(0)
self._previous_parsed.pop(del_me)

View File

@ -24,6 +24,7 @@ import os
import re
import urllib
import urlparse
import time
import sickbeard
@ -238,6 +239,8 @@ class GenericProvider:
self.cache.updateCache()
for epObj in episodes:
time.sleep(0.01)
itemList = []
cacheResult = self.cache.searchCache(epObj, manualSearch)
@ -271,6 +274,8 @@ class GenericProvider:
for episode, items in searchItems.items():
for item in items:
time.sleep(0.01)
(title, url) = self._get_title_and_url(item)
quality = self.getQuality(item)
@ -331,6 +336,7 @@ class GenericProvider:
logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
quality], logger.DEBUG)
time.sleep(0.01)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

View File

@ -61,6 +61,7 @@ class Scheduler:
def runAction(self):
while True:
time.sleep(0.01)
currentTime = datetime.datetime.now()
@ -78,5 +79,3 @@ class Scheduler:
self.abort = False
self.thread = None
return
time.sleep(1)

View File

@ -111,6 +111,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.success = result
else:
for foundResult in foundResults:
time.sleep(0.01)
# just use the first result for now
logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name)
@ -152,9 +154,10 @@ class RSSSearchQueueItem(generic_queue.QueueItem):
foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]))
for curResult in foundResults:
time.sleep(0.01)
if curResult:
search.snatchEpisode(curResult)
#time.sleep(2)
generic_queue.QueueItem.finish(self)
@ -238,6 +241,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
for curResult in foundResults if foundResults else logger.log(
u"Backlog search found nothing to snatch ..."):
time.sleep(0.01)
search.snatchEpisode(curResult)
self.finish()
@ -256,6 +261,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
# check through the list of statuses to see if we want any
for curStatusResult in statusResults:
time.sleep(0.01)
curCompositeStatus = int(curStatusResult["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
episode = int(curStatusResult["episode"])
@ -292,8 +299,9 @@ class FailedQueueItem(generic_queue.QueueItem):
# download whatever we find
for curResult in foundResults:
time.sleep(0.01)
self.success = search.snatchEpisode(curResult)
time.sleep(5)
self.finish()
@ -301,6 +309,8 @@ class FailedQueueItem(generic_queue.QueueItem):
episodes = []
for i, epObj in enumerate(episodes):
time.sleep(0.01)
if epObj.show.air_by_date:
logger.log("Beginning manual search for " + epObj.prettyABDName())
else: