1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-08-13 16:53:54 -04:00

Fixed app performance issues from recent upgrades.

Fixed issues with manual searches not working.

Moved misc routine tasks to a proper schedualed thread that runs every 1 hour for updating network timezones, scene exceptions, and trimming failed history.

Forced misc routine tasks at startup to complete first before anything else runs, critical since this does our scene exception updates and internal scene cache loading which is needed before searches begin.
This commit is contained in:
echel0n 2014-05-26 13:16:07 -07:00
parent de01fa1e37
commit c5f933e4c8
9 changed files with 132 additions and 105 deletions

View File

@ -20,7 +20,7 @@ from __future__ import with_statement
import cherrypy
import webbrowser
import sqlite3
import time
import datetime
import socket
import os, sys, subprocess, re
@ -41,6 +41,7 @@ from sickbeard import helpers, db, exceptions, show_queue, search_queue, schedul
from sickbeard import logger
from sickbeard import naming
from sickbeard import dailysearcher
from sickbeard import maintenance
from sickbeard import scene_numbering, scene_exceptions, name_cache
from indexers.indexer_api import indexerApi
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
@ -77,6 +78,7 @@ PIDFILE = ''
DAEMON = None
maintenanceScheduler = None
dailySearchScheduler = None
backlogSearchScheduler = None
showUpdateScheduler = None
@ -472,7 +474,7 @@ def initialize(consoleLogging=True):
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
ANIME_SPLIT_HOME
ANIME_SPLIT_HOME, maintenanceScheduler
if __INITIALIZED__:
return False
@ -904,7 +906,7 @@ def initialize(consoleLogging=True):
# initialize the cache database
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
# initalize the failed downloads database
# initialize the failed downloads database
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
# fix up any db problems
@ -933,10 +935,20 @@ def initialize(consoleLogging=True):
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
providerList = providers.makeProviderList()
# the interval for this is stored inside the ShowUpdater class
showUpdaterInstance = showUpdater.ShowUpdater()
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
cycleTime=showUpdaterInstance.updateInterval,
maintenanceScheduler = scheduler.Scheduler(maintenance.Maintenance(),
cycleTime=datetime.timedelta(hours=1),
threadName="MAINTENANCE",
silent=True,
runImmediately=True)
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
threadName="DAILYSEARCHER",
silent=True,
runImmediately=DAILYSEARCH_STARTUP)
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
cycleTime=showUpdater.ShowUpdater().updateInterval,
threadName="SHOWUPDATER",
runImmediately=False)
@ -956,29 +968,30 @@ def initialize(consoleLogging=True):
threadName="SEARCHQUEUE",
silent=True)
properFinderInstance = properFinder.ProperFinder()
properFinderScheduler = scheduler.Scheduler(properFinderInstance,
cycleTime=properFinderInstance.updateInterval,
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
cycleTime=properFinder.ProperFinder().updateInterval,
threadName="FINDPROPERS",
silent=False if DOWNLOAD_PROPERS else True,
runImmediately=True)
if not DOWNLOAD_PROPERS:
properFinderScheduler.silent = True
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
cycleTime=datetime.timedelta(
minutes=AUTOPOSTPROCESSER_FREQUENCY),
threadName="POSTPROCESSER",
silent=False if PROCESS_AUTOMATICALLY else True,
runImmediately=True)
if not PROCESS_AUTOMATICALLY:
autoPostProcesserScheduler.silent = True
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
cycleTime=datetime.timedelta(hours=1),
threadName="TRAKTWATCHLIST",
silent=False if USE_TRAKT else True,
runImmediately=True)
if not USE_TRAKT:
traktWatchListCheckerSchedular.silent = True
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
threadName="FINDSUBTITLES",
silent=False if USE_SUBTITLES else True,
runImmediately=True)
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
cycleTime=datetime.timedelta(
@ -987,23 +1000,6 @@ def initialize(consoleLogging=True):
silent=True,
runImmediately=BACKLOG_STARTUP)
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
threadName="DAILYSEARCHER",
silent=True,
runImmediately=DAILYSEARCH_STARTUP)
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
threadName="FINDSUBTITLES",
runImmediately=True)
if not USE_SUBTITLES:
subtitlesFinderScheduler.silent = True
showList = []
loadingShowList = {}
# dynamically load provider settings
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
curProvider.providerType == GenericProvider.TORRENT]:
@ -1041,7 +1037,7 @@ def initialize(consoleLogging=True):
curTorrentProvider.getID() + '_options', '')
if hasattr(curTorrentProvider, 'ratio'):
curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_ratio', '')
curTorrentProvider.getID() + '_ratio', '')
if hasattr(curTorrentProvider, 'minseed'):
curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(),
curTorrentProvider.getID() + '_minseed', 0)
@ -1099,23 +1095,29 @@ def initialize(consoleLogging=True):
except:
pass
showList = []
loadingShowList = {}
__INITIALIZED__ = True
return True
def start():
global __INITIALIZED__, backlogSearchScheduler, \
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, started, USE_SUBTITLES, \
traktWatchListCheckerSchedular, dailySearchScheduler, started
subtitlesFinderScheduler, USE_SUBTITLES,traktWatchListCheckerSchedular, \
dailySearchScheduler, started
with INIT_LOCK:
if __INITIALIZED__:
# start the version checker
versionCheckScheduler.thread.start()
# start the maintenance scheduler
maintenanceScheduler.thread.start()
logger.log(u"Performing initial maintenance tasks, please wait ...")
while maintenanceScheduler.action.amActive:
time.sleep(1)
# start the daily search scheduler
dailySearchScheduler.thread.start()
@ -1123,18 +1125,21 @@ def start():
# start the backlog scheduler
backlogSearchScheduler.thread.start()
# start the show updater
showUpdateScheduler.thread.start()
# start the version checker
versionCheckScheduler.thread.start()
# start the queue checker
showQueueScheduler.thread.start()
# start the search queue checker
searchQueueScheduler.thread.start()
# start the queue checker
properFinderScheduler.thread.start()
# start the queue checker
showQueueScheduler.thread.start()
# start the show updater
showUpdateScheduler.thread.start()
# start the proper finder
autoPostProcesserScheduler.thread.start()
@ -1149,10 +1154,11 @@ def start():
def halt():
global __INITIALIZED__, backlogSearchScheduler, showUpdateScheduler, \
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, dailySearchScheduler, started, \
traktWatchListCheckerSchedular
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, traktWatchListCheckerSchedular, \
dailySearchScheduler, started
with INIT_LOCK:
@ -1162,10 +1168,10 @@ def halt():
# abort all the threads
backlogSearchScheduler.abort = True
logger.log(u"Waiting for the BACKLOG thread to exit")
maintenanceScheduler.abort = True
logger.log(u"Waiting for the MAINTENANCE scheduler thread to exit")
try:
backlogSearchScheduler.thread.join(10)
maintenanceScheduler.thread.join(10)
except:
pass
@ -1176,6 +1182,13 @@ def halt():
except:
pass
backlogSearchScheduler.abort = True
logger.log(u"Waiting for the BACKLOG thread to exit")
try:
backlogSearchScheduler.thread.join(10)
except:
pass
showUpdateScheduler.abort = True
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
try:
@ -1234,7 +1247,7 @@ def halt():
if ADBA_CONNECTION:
ADBA_CONNECTION.logout()
#ADBA_CONNECTION.stop()
# ADBA_CONNECTION.stop()
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit")
try:
ADBA_CONNECTION.join(5)
@ -1336,7 +1349,7 @@ def restart(soft=True):
if soft:
halt()
saveAll()
#logger.log(u"Restarting cherrypy")
# logger.log(u"Restarting cherrypy")
#cherrypy.engine.restart()
logger.log(u"Re-initializing all data")
initialize()

View File

@ -17,10 +17,9 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import time
import datetime
import threading
import traceback
import sickbeard
from sickbeard import logger
@ -28,9 +27,6 @@ from sickbeard import db
from sickbeard import common
from sickbeard import helpers
from sickbeard import exceptions
from sickbeard.exceptions import ex
from sickbeard.search import pickBestResult, snatchEpisode
from sickbeard import generic_queue
class DailySearcher():
def __init__(self):
@ -90,6 +86,6 @@ class DailySearcher():
sickbeard.name_cache.clearCache(show)
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, segment)
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) #@UndefinedVariable
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
else:
logger.log(u"Could not find any needed episodes to search for ...")

47
sickbeard/maintenance.py Normal file
View File

@ -0,0 +1,47 @@
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import threading
import sickbeard
from sickbeard import scene_exceptions
from sickbeard import failed_history
from sickbeard import network_timezones
class Maintenance():
def __init__(self):
self.lock = threading.Lock()
self.amActive = False
def run(self, force=False):
self.amActive = True
# refresh scene exceptions too
scene_exceptions.retrieve_exceptions()
# refresh network timezones
network_timezones.update_network_dict()
# sure, why not?
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.trimHistory()
self.amActive = False

View File

@ -11,7 +11,7 @@
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.

View File

@ -37,6 +37,7 @@ DAILY_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 30
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
@ -83,6 +84,7 @@ class SearchQueue(generic_queue.GenericQueue):
else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class DailySearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
@ -113,6 +115,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
generic_queue.QueueItem.finish(self)
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
@ -155,6 +158,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.success = False
generic_queue.QueueItem.finish(self)
class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
@ -168,7 +172,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
generic_queue.QueueItem.execute(self)
for season in self.segment:
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {'title': self.show.name + " Season " + str(season)}
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
'title': self.show.name + " Season " + str(season)}
wantedEps = self.segment[season]
@ -196,6 +201,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
self.finish()
class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)

View File

@ -27,7 +27,8 @@ from sickbeard import ui
from sickbeard.exceptions import ex
from sickbeard import encodingKludge as ek
from sickbeard import db
from sickbeard import network_timezones
from sickbeard import failed_history
class ShowUpdater():
def __init__(self):

View File

@ -261,7 +261,7 @@ class TVCache():
return True
def _addCacheEntry(self, name, url, quality=None):
# if we don't have complete info then parse the filename to get it
try:
myParser = NameParser()
parse_result = myParser.parse(name).convert()
@ -269,32 +269,11 @@ class TVCache():
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
return None
if not parse_result:
logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG)
if not parse_result or not parse_result.series_name:
return None
if not parse_result.series_name:
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None
showObj = None
if parse_result.show:
showObj = parse_result.show
if not showObj:
showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult:
showObj = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
if not showObj:
for curShow in sickbeard.showList:
if show_name_helpers.isGoodResult(name, curShow, False):
showObj = curShow
break
if not showObj:
if not parse_result.show:
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
return None
season = episodes = None
@ -304,7 +283,7 @@ class TVCache():
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[showObj.indexerid, showObj.indexer, airdate])
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
@ -330,7 +309,7 @@ class TVCache():
return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]
def searchCache(self, episodes, manualSearch=False):

View File

@ -33,12 +33,8 @@ import sickbeard
from sickbeard import helpers
from sickbeard import version, ui
from sickbeard import logger
from sickbeard import scene_exceptions
from sickbeard.exceptions import ex
from sickbeard import encodingKludge as ek
from sickbeard import failed_history
from sickbeard import network_timezones
class CheckVersion():
"""
@ -69,17 +65,6 @@ class CheckVersion():
# do a soft restart
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
if not updated:
# refresh scene exceptions too
scene_exceptions.retrieve_exceptions()
# refresh network timezones
network_timezones.update_network_dict()
# sure, why not?
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.trimHistory()
def find_install_type(self):
"""
Determines how this copy of SB was installed.

View File

@ -205,7 +205,7 @@ class ManageSearches:
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
t.dailySearchStatus = sickbeard.searchQueueScheduler.action.is_dailysearch_in_progress() # @UndefinedVariable
t.dailySearchStatus = sickbeard.searchQueueScheduler.action.amActive # @UndefinedVariable
t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive # @UndefinedVariable
t.submenu = ManageMenu()