1
0
mirror of https://github.com/moparisthebest/SickRage synced 2025-01-07 11:58:01 -05:00

Added in some sleep timers to help improve overall cpu usage

This commit is contained in:
echel0n 2014-05-06 14:35:37 -07:00
parent a0bd46c18a
commit 8a61eb1e26
9 changed files with 32 additions and 8 deletions

View File

@ -341,13 +341,12 @@ def main():
# Stay alive while my threads do the work # Stay alive while my threads do the work
while (True): while (True):
time.sleep(1)
if sickbeard.invoked_command: if sickbeard.invoked_command:
sickbeard.invoked_command() sickbeard.invoked_command()
sickbeard.invoked_command = None sickbeard.invoked_command = None
time.sleep(1)
return return
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -19,6 +19,7 @@
import os import os
import string import string
import cherrypy import cherrypy
import time
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
@ -55,6 +56,7 @@ def foldersAtPath(path, includeParent=False):
# walk up the tree until we find a valid path # walk up the tree until we find a valid path
while path and not os.path.isdir(path): while path and not os.path.isdir(path):
time.sleep(0.01)
if path == os.path.dirname(path): if path == os.path.dirname(path):
path = '' path = ''
break break

View File

@ -20,7 +20,7 @@ import cherrypy
import os.path import os.path
import datetime import datetime
import re import re
import time
from sickbeard import helpers from sickbeard import helpers
from sickbeard import logger from sickbeard import logger
from sickbeard import naming from sickbeard import naming
@ -399,7 +399,7 @@ class ConfigMigrator():
sickbeard.CONFIG_VERSION = self.config_version sickbeard.CONFIG_VERSION = self.config_version
while self.config_version < self.expected_config_version: while self.config_version < self.expected_config_version:
time.sleep(0.01)
next_version = self.config_version + 1 next_version = self.config_version + 1
if next_version in self.migration_names: if next_version in self.migration_names:

View File

@ -79,6 +79,7 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
if args == None: if args == None:
logger.log(self.filename + ": " + query, logger.DB) logger.log(self.filename + ": " + query, logger.DB)
@ -118,6 +119,7 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
for qu in querylist: for qu in querylist:
if len(qu) == 1: if len(qu) == 1:
@ -162,6 +164,7 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
if args == None: if args == None:
logger.log(self.filename + ": " + query, logger.DB) logger.log(self.filename + ": " + query, logger.DB)

View File

@ -549,6 +549,7 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
# as long as the folder exists and doesn't contain any files, delete it # as long as the folder exists and doesn't contain any files, delete it
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir: while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
time.sleep(0.01)
check_files = ek.ek(os.listdir, check_empty_dir) check_files = ek.ek(os.listdir, check_empty_dir)
@ -791,6 +792,7 @@ def backupVersionedFile(old_file, version):
new_file = old_file + '.' + 'v' + str(version) new_file = old_file + '.' + 'v' + str(version)
while not ek.ek(os.path.isfile, new_file): while not ek.ek(os.path.isfile, new_file):
time.sleep(0.01)
if not ek.ek(os.path.isfile, old_file): if not ek.ek(os.path.isfile, old_file):
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG) logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
break break

View File

@ -20,6 +20,7 @@ import datetime
import os.path import os.path
import re import re
import regexes import regexes
import time
import sickbeard import sickbeard
from sickbeard import logger, helpers, scene_numbering from sickbeard import logger, helpers, scene_numbering
@ -207,6 +208,7 @@ class NameParser(object):
i = result = 0 i = result = 0
for integer, numeral in numeral_map: for integer, numeral in numeral_map:
time.sleep(0.01)
while n[i:i + len(numeral)] == numeral: while n[i:i + len(numeral)] == numeral:
result += integer result += integer
i += len(numeral) i += len(numeral)
@ -424,6 +426,7 @@ class NameParserCache(object):
self._previous_parsed[name] = parse_result self._previous_parsed[name] = parse_result
self._previous_parsed_list.append(name) self._previous_parsed_list.append(name)
while len(self._previous_parsed_list) > self._cache_size: while len(self._previous_parsed_list) > self._cache_size:
time.sleep(0.01)
del_me = self._previous_parsed_list.pop(0) del_me = self._previous_parsed_list.pop(0)
self._previous_parsed.pop(del_me) self._previous_parsed.pop(del_me)

View File

@ -24,6 +24,7 @@ import os
import re import re
import urllib import urllib
import urlparse import urlparse
import time
import sickbeard import sickbeard
@ -238,6 +239,8 @@ class GenericProvider:
self.cache.updateCache() self.cache.updateCache()
for epObj in episodes: for epObj in episodes:
time.sleep(0.01)
itemList = [] itemList = []
cacheResult = self.cache.searchCache(epObj, manualSearch) cacheResult = self.cache.searchCache(epObj, manualSearch)
@ -271,6 +274,8 @@ class GenericProvider:
for episode, items in searchItems.items(): for episode, items in searchItems.items():
for item in items: for item in items:
time.sleep(0.01)
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
quality = self.getQuality(item) quality = self.getQuality(item)
@ -331,6 +336,7 @@ class GenericProvider:
logger.log( logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
quality], logger.DEBUG) quality], logger.DEBUG)
time.sleep(0.01)
continue continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG) logger.log(u"Found result " + title + " at " + url, logger.DEBUG)

View File

@ -61,6 +61,7 @@ class Scheduler:
def runAction(self): def runAction(self):
while True: while True:
time.sleep(0.01)
currentTime = datetime.datetime.now() currentTime = datetime.datetime.now()
@ -78,5 +79,3 @@ class Scheduler:
self.abort = False self.abort = False
self.thread = None self.thread = None
return return
time.sleep(1)

View File

@ -111,6 +111,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.success = result self.success = result
else: else:
for foundResult in foundResults: for foundResult in foundResults:
time.sleep(0.01)
# just use the first result for now # just use the first result for now
logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name) logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name)
@ -152,9 +154,10 @@ class RSSSearchQueueItem(generic_queue.QueueItem):
foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()])) foundResults = list(executor.map(self.process, [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]))
for curResult in foundResults: for curResult in foundResults:
time.sleep(0.01)
if curResult: if curResult:
search.snatchEpisode(curResult) search.snatchEpisode(curResult)
#time.sleep(2)
generic_queue.QueueItem.finish(self) generic_queue.QueueItem.finish(self)
@ -238,6 +241,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
for curResult in foundResults if foundResults else logger.log( for curResult in foundResults if foundResults else logger.log(
u"Backlog search found nothing to snatch ..."): u"Backlog search found nothing to snatch ..."):
time.sleep(0.01)
search.snatchEpisode(curResult) search.snatchEpisode(curResult)
self.finish() self.finish()
@ -256,6 +261,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
# check through the list of statuses to see if we want any # check through the list of statuses to see if we want any
for curStatusResult in statusResults: for curStatusResult in statusResults:
time.sleep(0.01)
curCompositeStatus = int(curStatusResult["status"]) curCompositeStatus = int(curStatusResult["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus) curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
episode = int(curStatusResult["episode"]) episode = int(curStatusResult["episode"])
@ -292,8 +299,9 @@ class FailedQueueItem(generic_queue.QueueItem):
# download whatever we find # download whatever we find
for curResult in foundResults: for curResult in foundResults:
time.sleep(0.01)
self.success = search.snatchEpisode(curResult) self.success = search.snatchEpisode(curResult)
time.sleep(5)
self.finish() self.finish()
@ -301,6 +309,8 @@ class FailedQueueItem(generic_queue.QueueItem):
episodes = [] episodes = []
for i, epObj in enumerate(episodes): for i, epObj in enumerate(episodes):
time.sleep(0.01)
if epObj.show.air_by_date: if epObj.show.air_by_date:
logger.log("Beginning manual search for " + epObj.prettyABDName()) logger.log("Beginning manual search for " + epObj.prettyABDName())
else: else: