1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-13 11:32:20 -05:00

Added RSS Cache updater with user-settable interval from config section of web interface, updates the cache so that searches can be more instant when looking for snatches.

Updated backlog search code to re-set skipped or missed episodes to wanted.
This commit is contained in:
echel0n 2014-05-12 01:52:14 -07:00
parent 7673cd5cc9
commit 31297b9069
9 changed files with 97 additions and 64 deletions

View File

@ -47,7 +47,7 @@
<div class="field-pair">
<label class="nocheck clearfix">
<span class="component-title">Search Frequency</span>
<span class="component-title">RSS Cache Update Frequency</span>
<input type="text" name="search_frequency" value="$sickbeard.SEARCH_FREQUENCY" size="5" />
</label>
<label class="nocheck clearfix">

View File

@ -38,6 +38,7 @@ from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder,
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
from sickbeard import logger
from sickbeard import naming
from sickbeard import rssupdater
from sickbeard import scene_numbering, scene_exceptions, name_cache
from indexers.indexer_api import indexerApi
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
@ -82,6 +83,7 @@ properFinderScheduler = None
autoPostProcesserScheduler = None
subtitlesFinderScheduler = None
traktWatchListCheckerSchedular = None
updateRSSScheduler = None
showList = None
loadingShowList = None
@ -133,7 +135,6 @@ ROOT_DIRS = None
UPDATE_SHOWS_ON_START = None
SORT_ARTICLE = None
DEBUG = False
NUM_OF_THREADS = None
USE_LISTVIEW = None
METADATA_XBMC = None
@ -528,7 +529,7 @@ def initialize(consoleLogging=True):
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, NUM_OF_THREADS
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, updateRSSScheduler
if __INITIALIZED__:
return False
@ -597,8 +598,6 @@ def initialize(consoleLogging=True):
DEBUG = bool(check_setting_int(CFG, 'General', 'debug', 0))
NUM_OF_THREADS = check_setting_int(CFG, 'General', 'num_of_threads', 1)
ENABLE_HTTPS = bool(check_setting_int(CFG, 'General', 'enable_https', 0))
HTTPS_CERT = check_setting_str(CFG, 'General', 'https_cert', 'server.crt')
@ -1063,6 +1062,12 @@ def initialize(consoleLogging=True):
threadName="CHECKVERSION",
runImmediately=True)
updateRSSScheduler = scheduler.Scheduler(rssupdater.RSSUpdater(),
cycleTime=datetime.timedelta(minutes=SEARCH_FREQUENCY),
threadName="RSSUPDATER",
silent=True,
runImmediately=True)
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
cycleTime=datetime.timedelta(seconds=3),
threadName="SHOWQUEUE",
@ -1099,7 +1104,7 @@ def initialize(consoleLogging=True):
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
cycleTime=datetime.timedelta(
minutes=get_backlog_cycle_time()),
minutes=get_backlog_cycle_time()),
threadName="BACKLOG",
runImmediately=True)
backlogSearchScheduler.action.cycleTime = BACKLOG_SEARCH_FREQUENCY
@ -1126,30 +1131,33 @@ def start():
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, started, USE_SUBTITLES, \
traktWatchListCheckerSchedular, started
traktWatchListCheckerSchedular, updateRSSScheduler, started
with INIT_LOCK:
if __INITIALIZED__:
# start the queue checker
showQueueScheduler.thread.start()
# start the version checker
versionCheckScheduler.thread.start()
# start the RSS cache updater
updateRSSScheduler.thread.start()
# start the backlog scheduler
backlogSearchScheduler.thread.start()
# start the show updater
showUpdateScheduler.thread.start()
# start the search queue checker
searchQueueScheduler.thread.start()
# start the queue checker
properFinderScheduler.thread.start()
# start the queue checker
showQueueScheduler.thread.start()
# start the show updater
showUpdateScheduler.thread.start()
# start the proper finder
autoPostProcesserScheduler.thread.start()
@ -1166,7 +1174,7 @@ def start():
def halt():
global __INITIALIZED__, backlogSearchScheduler, showUpdateScheduler, \
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, started, \
subtitlesFinderScheduler, updateRSSScheduler, started, \
traktWatchListCheckerSchedular
with INIT_LOCK:
@ -1240,6 +1248,13 @@ def halt():
except:
pass
updateRSSScheduler.abort = True
logger.log(u"Waiting for the RSSUPDATER thread to exit")
try:
updateRSSScheduler.thread.join(10)
except:
pass
__INITIALIZED__ = False
@ -1355,7 +1370,6 @@ def save_config():
new_config['General']['use_api'] = int(USE_API)
new_config['General']['api_key'] = API_KEY
new_config['General']['debug'] = int(DEBUG)
new_config['General']['num_of_threads'] = int(NUM_OF_THREADS)
new_config['General']['enable_https'] = int(ENABLE_HTTPS)
new_config['General']['https_cert'] = HTTPS_CERT
new_config['General']['https_key'] = HTTPS_KEY

View File

@ -20,12 +20,8 @@ import datetime
import threading
import Queue
import sickbeard
from lib.concurrent.futures.thread import ThreadPoolExecutor
from sickbeard import logger
class QueuePriorities:
LOW = 10
NORMAL = 20
@ -33,7 +29,6 @@ class QueuePriorities:
class GenericQueue:
def __init__(self):
#self.executor = ThreadPoolExecutor(sickbeard.NUM_OF_THREADS)
self.currentItem = None
self.thread = None
self.queue_name = "QUEUE"
@ -72,8 +67,7 @@ class GenericQueue:
return
threadName = self.queue_name + '-' + queueItem.get_thread_name()
executor = ThreadPoolExecutor(sickbeard.NUM_OF_THREADS)
self.thread = executor.submit(queueItem.execute, name=threadName)
self.thread = threading.Thread(None, queueItem.execute, threadName)
self.currentItem = queueItem
class QueueItem:

View File

@ -285,21 +285,29 @@ def makeDir(path):
def searchDBForShow(regShowName):
showNames = list(set([re.sub('[. -]', ' ', regShowName), regShowName]))
showNames = [re.sub('[. -]', ' ', regShowName)]
myDB = db.DBConnection()
yearRegex = "([^()]+?)\s*(\()?(\d{4})(?(2)\))$"
for showName in showNames:
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ? OR show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(1) + '%', match.group(3)])
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE show_name LIKE ?",
[showName])
if len(sqlResults) == 1:
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
else:
# if we didn't get exactly one result then try again with the year stripped off if possible
match = re.match(yearRegex, showName)
if match and match.group(1):
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
logger.DEBUG)
sqlResults = myDB.select(
"SELECT * FROM tv_shows WHERE (show_name LIKE ?) AND startyear = ?",
[match.group(1) + '%', match.group(3)])
if len(sqlResults) == 0:
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
@ -308,7 +316,7 @@ def searchDBForShow(regShowName):
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
continue
else:
return int(sqlResults[0]["indexer_id"])
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
return

View File

@ -180,13 +180,6 @@ class GenericProvider:
return True
def searchRSS(self):
self._checkAuth()
self.cache.updateCache()
return self.cache.findNeededEpisodes()
def getQuality(self, item):
"""
Figures out the quality of the given RSS item node
@ -236,9 +229,6 @@ class GenericProvider:
searchItems = {}
itemList = []
#if not manualSearch:
# self.cache.updateCache()
for epObj in episodes:
cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult):

View File

@ -291,7 +291,6 @@ class NewznabCache(tvcache.TVCache):
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
self._clearCache()
if self._checkAuth(data):
items = data.entries
ql = []
@ -325,6 +324,6 @@ class NewznabCache(tvcache.TVCache):
url = self._translateLinkURL(url)
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
logger.log(u"Attempting to add item from RSS to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)

View File

@ -20,17 +20,19 @@ from __future__ import with_statement
import sickbeard
from sickbeard import search_queue
from sickbeard import logger
import threading
class CurrentSearcher():
class RSSUpdater():
def __init__(self):
self.lock = threading.Lock()
self.amActive = False
def run(self):
search_queue_item = search_queue.RSSSearchQueueItem()
sickbeard.searchQueueScheduler.action.add_item(search_queue_item)
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
for provider in providers:
logger.log(u"Updating RSS cache for provider [" + provider.name + "]")
provider.cache.updateCache()

View File

@ -145,6 +145,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
self.segment = segment
self.wantedEpisodes = []
self._changeMissingEpisodes()
logger.log(u"Seeing if we need any episodes from " + self.show.name + " season " + str(self.segment))
myDB = db.DBConnection()
@ -222,6 +224,37 @@ class BacklogQueueItem(generic_queue.QueueItem):
return wantedEpisodes
def _changeMissingEpisodes(self):
logger.log(u"Changing all old missing episodes to status WANTED")
curDate = datetime.date.today().toordinal()
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?",
[common.UNAIRED, curDate])
for sqlEp in sqlResults:
try:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
return None
if show == None:
logger.log(u"Unable to find the show with ID " + str(
sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR)
return None
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
else:
ep.status = common.WANTED
ep.saveToDB()
class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, episodes):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)

View File

@ -251,24 +251,18 @@ class TVCache():
indexerid = int(cacheResult)
if not indexerid:
name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name)
for cur_name in name_list:
if not indexerid:
for curShow in sickbeard.showList:
if show_name_helpers.isGoodResult(cur_name, curShow, False):
indexerid = int(curShow.indexerid)
break
showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult:
indexerid = int(showResult[0])
if not indexerid:
# do a scene reverse-lookup to get a list of all possible names
scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name)
if scene_id:
indexerid = int(scene_id)
break
if not indexerid:
for curShow in sickbeard.showList:
if show_name_helpers.isGoodResult(name, curShow, False):
indexerid = int(curShow.indexerid)
break
showObj = None
if indexerid:
logger.log(u"Found Indexer ID: [" + str(indexerid) + "], for [" + str(cur_name) + "}", logger.DEBUG)
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
if not showObj:
@ -318,7 +312,6 @@ class TVCache():
neededEps = self.findNeededEpisodes(episode, manualSearch)
return neededEps
def listPropers(self, date=None, delimiter="."):
myDB = self._getDB()