mirror of
https://github.com/moparisthebest/SickRage
synced 2024-11-18 07:15:13 -05:00
Merge remote-tracking branch 'origin/dev'
This commit is contained in:
commit
bc2016a3ac
@ -888,6 +888,54 @@ td.tvShow a {
|
|||||||
-o-opacity: 1 !important;
|
-o-opacity: 1 !important;
|
||||||
opacity: 1 !important;
|
opacity: 1 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Anime section for editShow */
|
||||||
|
#Anime {
|
||||||
|
clear: both;
|
||||||
|
display: block ;
|
||||||
|
overflow-x: hidden;
|
||||||
|
overflow-y: hidden;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
padding-left: 20px;
|
||||||
|
padding-right: 0px;
|
||||||
|
padding-top: 10px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#Anime div.component-group-desc {
|
||||||
|
float: left;
|
||||||
|
width: 165px;
|
||||||
|
}
|
||||||
|
#Anime div.component-group-desc p {
|
||||||
|
color: #666666;
|
||||||
|
margin-bottom: 0.4em;
|
||||||
|
margin-left: 0;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-top: 0.4em;
|
||||||
|
width: 95%;
|
||||||
|
}
|
||||||
|
|
||||||
|
div.blackwhitelist{
|
||||||
|
float: left;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
div.blackwhitelist input {
|
||||||
|
margin: 5px 0px;
|
||||||
|
}
|
||||||
|
div.blackwhitelist.pool select {
|
||||||
|
min-width: 230px;
|
||||||
|
}
|
||||||
|
div.blackwhitelist.white select, div.blackwhitelist.black select {
|
||||||
|
min-width: 150px;
|
||||||
|
}
|
||||||
|
div.blackwhitelist span {
|
||||||
|
display: block;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
div.blackwhitelist.anidb, div.blackwhitelist.manual {
|
||||||
|
margin: 7px 0px;
|
||||||
|
}
|
||||||
|
|
||||||
/* for manage_massEdit */
|
/* for manage_massEdit */
|
||||||
.optionWrapper {
|
.optionWrapper {
|
||||||
width: 450px;
|
width: 450px;
|
||||||
|
@ -146,49 +146,7 @@ Separate words with a comma, e.g. "word1,word2,word3"
|
|||||||
<br /><br />
|
<br /><br />
|
||||||
|
|
||||||
#if $show.is_anime
|
#if $show.is_anime
|
||||||
<p>
|
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_blackwhitelist.tmpl")
|
||||||
Release Groups:
|
|
||||||
</p>
|
|
||||||
<input type="text" id="addToPoolText"/>
|
|
||||||
<input type="button" value="Add to White" id="addToWhite">
|
|
||||||
<input type="button" value="Add to Black" id="addToBlack"><br/>
|
|
||||||
<div class="blackwhiteliste white">
|
|
||||||
<span>White:</span>
|
|
||||||
<select multiple id="white">
|
|
||||||
#for $keyword in $whitelist:
|
|
||||||
<option value="$keyword">$keyword</option>
|
|
||||||
#end for
|
|
||||||
</select>
|
|
||||||
<br/>
|
|
||||||
<input id="removeW" value="Remove >>" type="button"/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="blackwhiteliste pool">
|
|
||||||
<span>Pool (Name|Rating|Subed Ep):</span>
|
|
||||||
<select multiple id="pool">
|
|
||||||
#for $group in $groups
|
|
||||||
#if $group not in $whitelist and $group['name'] not in $blacklist:
|
|
||||||
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
|
|
||||||
#end if
|
|
||||||
#end for
|
|
||||||
</select>
|
|
||||||
<br/>
|
|
||||||
<input id="addW" value="<< Add" type="button"/>
|
|
||||||
<input id="addB" value="Add >>" type="button"/>
|
|
||||||
<input id="removeP" value="Remove" type="button"/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="blackwhiteliste black">
|
|
||||||
<span>Black:</span>
|
|
||||||
<select multiple id="black">
|
|
||||||
#for $keyword in $blacklist:
|
|
||||||
<option value="$keyword">$keyword</option>
|
|
||||||
#end for
|
|
||||||
</select>
|
|
||||||
<br/>
|
|
||||||
<input id="removeB" value="<< Remove" type="button"/>
|
|
||||||
</div>
|
|
||||||
<br style="clear:both;"/>
|
|
||||||
#end if
|
#end if
|
||||||
<input type="hidden" name="whitelist" id="whitelist"/>
|
<input type="hidden" name="whitelist" id="whitelist"/>
|
||||||
<input type="hidden" name="blacklist" id="blacklist"/>
|
<input type="hidden" name="blacklist" id="blacklist"/>
|
||||||
|
53
gui/slick/interfaces/default/inc_blackwhitelist.tmpl
Normal file
53
gui/slick/interfaces/default/inc_blackwhitelist.tmpl
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
<b>Fansub Groups:</b>
|
||||||
|
<div class="bwlWrapper" id="Anime">
|
||||||
|
<div class="component-group-desc">
|
||||||
|
<p>Select your preferred fansub groups from the <b>Available Groups</b> and add them to the <b>Whitelist</b>. Add groups to the <b>Blacklist</b> to ignore them.</p>
|
||||||
|
<p>The <b>Whitelist</b> is checked <i>before</i> the <b>Blacklist</b>.</p>
|
||||||
|
<p>Groups are shown as <b>Name</b> | <b>Rating</b> | <b>Number of subbed episodes</b>.</p>
|
||||||
|
<p>You may also add any fansub group not listed to either list manually.</p>
|
||||||
|
</div>
|
||||||
|
<div class="blackwhitelist all">
|
||||||
|
<div class="blackwhitelist anidb">
|
||||||
|
<div class="blackwhitelist white">
|
||||||
|
<span><h4>Whitelist</h4></span>
|
||||||
|
<select id="white" multiple="multiple" size="12">
|
||||||
|
#for $keyword in $whitelist:
|
||||||
|
<option value="$keyword">$keyword</option>
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input class="btn" style="float: left;" id="removeW" value="Remove" type="button"/>
|
||||||
|
</div>
|
||||||
|
<div class="blackwhitelist pool">
|
||||||
|
<span><h4>Available Groups</h4></span>
|
||||||
|
<select id="pool" multiple="multiple" size="12">
|
||||||
|
#for $group in $groups
|
||||||
|
#if $group not in $whitelist and $group['name'] not in $blacklist:
|
||||||
|
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
|
||||||
|
#end if
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input class="btn" style="float: left;" id="addW" value="Add to Whitelist" type="button"/>
|
||||||
|
<input class="btn" style="float: right;" id="addB" value="Add to Blacklist" type="button"/>
|
||||||
|
</div>
|
||||||
|
<div class="blackwhitelist black">
|
||||||
|
<span><h4>Blacklist</h4></span>
|
||||||
|
<select id="black" multiple="multiple" size="12">
|
||||||
|
#for $keyword in $blacklist:
|
||||||
|
<option value="$keyword">$keyword</option>
|
||||||
|
#end for
|
||||||
|
</select>
|
||||||
|
<br/>
|
||||||
|
<input class="btn" style="float: right;" id="removeB" value="Remove" type="button"/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<br style="clear:both" />
|
||||||
|
<div class="blackwhitelist manual">
|
||||||
|
<input type="text" id="addToPoolText" size="30" />
|
||||||
|
<input class="btn" type="button" value="Add to Whitelist" id="addToWhite">
|
||||||
|
<input class="btn" type="button" value="Add to Blacklist" id="addToBlack">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<br style="clear:both" />
|
||||||
|
</div>
|
@ -15,7 +15,7 @@
|
|||||||
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
||||||
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
||||||
<b>Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
|
<b>Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
|
||||||
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
<b>Backlog</b>: <%=str(sickbeard.backlogSearchScheduler.timeLeft()).split('.')[0]%>
|
||||||
</div>
|
</div>
|
||||||
<ul style="float:right;">
|
<ul style="float:right;">
|
||||||
<li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><img src="$sbRoot/images/menu/update16.png" alt="" width="16" height="16" />Force Version Check</a></li>
|
<li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><img src="$sbRoot/images/menu/update16.png" alt="" width="16" height="16" />Force Version Check</a></li>
|
||||||
|
@ -466,12 +466,14 @@ class Tvdb:
|
|||||||
|
|
||||||
if cache is True:
|
if cache is True:
|
||||||
self.config['cache_enabled'] = True
|
self.config['cache_enabled'] = True
|
||||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
|
self.config['cache_location'] = self._getTempDir()
|
||||||
|
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||||
elif cache is False:
|
elif cache is False:
|
||||||
self.config['cache_enabled'] = False
|
self.config['cache_enabled'] = False
|
||||||
elif isinstance(cache, basestring):
|
elif isinstance(cache, basestring):
|
||||||
self.config['cache_enabled'] = True
|
self.config['cache_enabled'] = True
|
||||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
|
self.config['cache_location'] = cache
|
||||||
|
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||||
|
|
||||||
|
@ -318,12 +318,14 @@ class TVRage:
|
|||||||
|
|
||||||
if cache is True:
|
if cache is True:
|
||||||
self.config['cache_enabled'] = True
|
self.config['cache_enabled'] = True
|
||||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
|
self.config['cache_location'] = self._getTempDir()
|
||||||
|
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||||
elif cache is False:
|
elif cache is False:
|
||||||
self.config['cache_enabled'] = False
|
self.config['cache_enabled'] = False
|
||||||
elif isinstance(cache, basestring):
|
elif isinstance(cache, basestring):
|
||||||
self.config['cache_enabled'] = True
|
self.config['cache_enabled'] = True
|
||||||
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
|
self.config['cache_location'] = cache
|
||||||
|
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||||
|
|
||||||
|
@ -1133,9 +1133,6 @@ def start():
|
|||||||
|
|
||||||
# start the maintenance scheduler
|
# start the maintenance scheduler
|
||||||
maintenanceScheduler.thread.start()
|
maintenanceScheduler.thread.start()
|
||||||
logger.log(u"Performing initial maintenance tasks, please wait ...")
|
|
||||||
while maintenanceScheduler.action.amActive:
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
# start the daily search scheduler
|
# start the daily search scheduler
|
||||||
dailySearchScheduler.thread.start()
|
dailySearchScheduler.thread.start()
|
||||||
|
@ -34,7 +34,7 @@ class Maintenance():
|
|||||||
def run(self, force=False):
|
def run(self, force=False):
|
||||||
self.amActive = True
|
self.amActive = True
|
||||||
|
|
||||||
# refresh scene exceptions too
|
# get and update scene exceptions lists
|
||||||
scene_exceptions.retrieve_exceptions()
|
scene_exceptions.retrieve_exceptions()
|
||||||
|
|
||||||
# refresh network timezones
|
# refresh network timezones
|
||||||
|
@ -124,8 +124,8 @@ class NameParser(object):
|
|||||||
|
|
||||||
if not self.showObj and not self.naming_pattern:
|
if not self.showObj and not self.naming_pattern:
|
||||||
# Regex pattern to return the Show / Series Name regardless of the file pattern tossed at it, matched 53 show name examples from regexes.py
|
# Regex pattern to return the Show / Series Name regardless of the file pattern tossed at it, matched 53 show name examples from regexes.py
|
||||||
show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P<series_name>.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
|
show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P<series_name>.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
|
||||||
show_pattern_alt = '''^(?P<series_name>.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
|
show_pattern_alt = '''^(?P<series_name>.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
|
||||||
|
|
||||||
self.showObj = self._matchShowName(name, show_pattern)
|
self.showObj = self._matchShowName(name, show_pattern)
|
||||||
if not self.showObj:
|
if not self.showObj:
|
||||||
|
@ -36,7 +36,7 @@ class RSSFeeds:
|
|||||||
|
|
||||||
self.fc.purge(age)
|
self.fc.purge(age)
|
||||||
|
|
||||||
def getRSSFeed(self, url, post_data=None):
|
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
||||||
if not self.fc:
|
if not self.fc:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ class RSSFeeds:
|
|||||||
if post_data:
|
if post_data:
|
||||||
url += urllib.urlencode(post_data)
|
url += urllib.urlencode(post_data)
|
||||||
|
|
||||||
feed = self.fc.fetch(url)
|
feed = self.fc.fetch(url, False, False, request_headers)
|
||||||
if not feed:
|
if not feed:
|
||||||
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
|
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
|
||||||
return
|
return
|
||||||
|
@ -33,6 +33,23 @@ exceptionCache = {}
|
|||||||
exceptionSeasonCache = {}
|
exceptionSeasonCache = {}
|
||||||
exceptionIndexerCache = {}
|
exceptionIndexerCache = {}
|
||||||
|
|
||||||
|
|
||||||
|
def shouldRefresh(list):
|
||||||
|
myDB = db.DBConnection('cache.db')
|
||||||
|
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
||||||
|
[list])
|
||||||
|
if rows:
|
||||||
|
return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def setLastRefresh(list):
|
||||||
|
myDB = db.DBConnection('cache.db')
|
||||||
|
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
||||||
|
[list, time.time()])
|
||||||
|
|
||||||
|
|
||||||
def get_scene_exceptions(indexer_id, season=-1):
|
def get_scene_exceptions(indexer_id, season=-1):
|
||||||
"""
|
"""
|
||||||
Given a indexer_id, return a list of all the scene exceptions.
|
Given a indexer_id, return a list of all the scene exceptions.
|
||||||
@ -50,7 +67,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
|||||||
try:
|
try:
|
||||||
exceptionCache[indexer_id][season] = exceptionsList
|
exceptionCache[indexer_id][season] = exceptionsList
|
||||||
except:
|
except:
|
||||||
exceptionCache[indexer_id] = {season:exceptionsList}
|
exceptionCache[indexer_id] = {season: exceptionsList}
|
||||||
else:
|
else:
|
||||||
exceptionsList = list(set(exceptionCache[indexer_id][season]))
|
exceptionsList = list(set(exceptionCache[indexer_id][season]))
|
||||||
|
|
||||||
@ -59,6 +76,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
|||||||
|
|
||||||
return exceptionsList
|
return exceptionsList
|
||||||
|
|
||||||
|
|
||||||
def get_all_scene_exceptions(indexer_id):
|
def get_all_scene_exceptions(indexer_id):
|
||||||
myDB = db.DBConnection('cache.db')
|
myDB = db.DBConnection('cache.db')
|
||||||
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||||
@ -71,6 +89,7 @@ def get_all_scene_exceptions(indexer_id):
|
|||||||
|
|
||||||
return exceptionsList
|
return exceptionsList
|
||||||
|
|
||||||
|
|
||||||
def get_scene_seasons(indexer_id):
|
def get_scene_seasons(indexer_id):
|
||||||
"""
|
"""
|
||||||
return a list of season numbers that have scene exceptions
|
return a list of season numbers that have scene exceptions
|
||||||
@ -113,7 +132,8 @@ def get_scene_exception_by_name_multiple(show_name):
|
|||||||
cur_season = int(cur_exception["season"])
|
cur_season = int(cur_exception["season"])
|
||||||
|
|
||||||
if show_name.lower() in (
|
if show_name.lower() in (
|
||||||
cur_exception_name.lower(), sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
|
cur_exception_name.lower(),
|
||||||
|
sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
|
||||||
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
|
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
|
||||||
out.append((cur_indexer_id, cur_season))
|
out.append((cur_indexer_id, cur_season))
|
||||||
if out:
|
if out:
|
||||||
@ -135,46 +155,49 @@ def retrieve_exceptions():
|
|||||||
exceptionSeasonCache = {}
|
exceptionSeasonCache = {}
|
||||||
|
|
||||||
# exceptions are stored on github pages
|
# exceptions are stored on github pages
|
||||||
for indexer in sickbeard.indexerApi().indexers:
|
if setLastRefresh('normal'):
|
||||||
logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
|
logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
|
||||||
|
|
||||||
url = sickbeard.indexerApi(indexer).config['scene_url']
|
url = sickbeard.indexerApi(indexer).config['scene_url']
|
||||||
|
|
||||||
url_data = helpers.getURL(url)
|
url_data = helpers.getURL(url)
|
||||||
|
|
||||||
if url_data is None:
|
if url_data is None:
|
||||||
# When urlData is None, trouble connecting to github
|
# When urlData is None, trouble connecting to github
|
||||||
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
|
||||||
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
|
|
||||||
for cur_line in url_data.splitlines():
|
|
||||||
cur_line = cur_line.decode('utf-8')
|
|
||||||
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
|
|
||||||
|
|
||||||
if not aliases:
|
|
||||||
continue
|
|
||||||
|
|
||||||
indexer_id = int(indexer_id)
|
|
||||||
|
|
||||||
# regex out the list of shows, taking \' into account
|
|
||||||
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
|
||||||
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
|
||||||
|
|
||||||
exception_dict[indexer_id] = alias_list
|
|
||||||
|
|
||||||
logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
|
|
||||||
xem_exceptions = _xem_excpetions_fetcher(indexer)
|
|
||||||
for xem_ex in xem_exceptions: # anidb xml anime exceptions
|
|
||||||
if xem_ex in exception_dict:
|
|
||||||
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
|
|
||||||
else:
|
else:
|
||||||
exception_dict[xem_ex] = xem_exceptions[xem_ex]
|
setLastRefresh('normal')
|
||||||
|
|
||||||
logger.log(u"Checking for scene exception updates for AniDB")
|
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
|
||||||
|
for cur_line in url_data.splitlines():
|
||||||
|
cur_line = cur_line.decode('utf-8')
|
||||||
|
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
|
||||||
|
|
||||||
|
if not aliases:
|
||||||
|
continue
|
||||||
|
|
||||||
|
indexer_id = int(indexer_id)
|
||||||
|
|
||||||
|
# regex out the list of shows, taking \' into account
|
||||||
|
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
||||||
|
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
||||||
|
|
||||||
|
exception_dict[indexer_id] = alias_list
|
||||||
|
|
||||||
|
# XEM scene exceptions
|
||||||
|
xem_exceptions = _xem_excpetions_fetcher()
|
||||||
|
for xem_ex in xem_exceptions:
|
||||||
|
if xem_ex in exception_dict:
|
||||||
|
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
|
||||||
|
else:
|
||||||
|
exception_dict[xem_ex] = xem_exceptions[xem_ex]
|
||||||
|
|
||||||
|
# AniDB scene exceptions
|
||||||
local_exceptions = _retrieve_anidb_mainnames()
|
local_exceptions = _retrieve_anidb_mainnames()
|
||||||
for local_ex in local_exceptions: # anidb xml anime exceptions
|
for local_ex in local_exceptions:
|
||||||
if local_ex in exception_dict:
|
if local_ex in exception_dict:
|
||||||
exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex]
|
exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex]
|
||||||
else:
|
else:
|
||||||
@ -209,6 +232,7 @@ def retrieve_exceptions():
|
|||||||
# build indexer scene name cache
|
# build indexer scene name cache
|
||||||
buildIndexerCache()
|
buildIndexerCache()
|
||||||
|
|
||||||
|
|
||||||
def update_scene_exceptions(indexer_id, scene_exceptions):
|
def update_scene_exceptions(indexer_id, scene_exceptions):
|
||||||
"""
|
"""
|
||||||
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
||||||
@ -235,15 +259,9 @@ def _retrieve_anidb_mainnames():
|
|||||||
|
|
||||||
anidb_mainNames = {}
|
anidb_mainNames = {}
|
||||||
|
|
||||||
myDB = db.DBConnection('cache.db')
|
if shouldRefresh('anidb'):
|
||||||
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
logger.log(u"Checking for scene exception updates for AniDB")
|
||||||
['anidb'])
|
|
||||||
if rows:
|
|
||||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
|
|
||||||
else:
|
|
||||||
refresh = True
|
|
||||||
|
|
||||||
if refresh:
|
|
||||||
for show in sickbeard.showList:
|
for show in sickbeard.showList:
|
||||||
if show.is_anime and show.indexer == 1:
|
if show.is_anime and show.indexer == 1:
|
||||||
try:
|
try:
|
||||||
@ -257,41 +275,40 @@ def _retrieve_anidb_mainnames():
|
|||||||
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
|
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
setLastRefresh('anidb')
|
||||||
['anidb', time.time()])
|
|
||||||
|
|
||||||
return anidb_mainNames
|
return anidb_mainNames
|
||||||
|
|
||||||
|
|
||||||
def _xem_excpetions_fetcher(indexer):
|
def _xem_excpetions_fetcher():
|
||||||
global MAX_XEM_AGE_SECS
|
global MAX_XEM_AGE_SECS
|
||||||
|
|
||||||
exception_dict = {}
|
exception_dict = {}
|
||||||
|
|
||||||
myDB = db.DBConnection('cache.db')
|
if shouldRefresh('xem'):
|
||||||
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
success = False
|
||||||
['xem'])
|
for indexer in sickbeard.indexerApi().indexers:
|
||||||
if rows:
|
logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
|
||||||
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
|
||||||
else:
|
|
||||||
refresh = True
|
|
||||||
|
|
||||||
if refresh:
|
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config[
|
||||||
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
|
'xem_origin']
|
||||||
|
|
||||||
url_data = helpers.getURL(url, json=True)
|
url_data = helpers.getURL(url, json=True)
|
||||||
if url_data is None:
|
if url_data is None:
|
||||||
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
logger.log(u"Check scene exceptions update failed for " + sickbeard.indexerApi(
|
||||||
return exception_dict
|
indexer).name + ", Unable to get URL: " + url, logger.ERROR)
|
||||||
|
continue
|
||||||
|
|
||||||
if url_data['result'] == 'failure':
|
if url_data['result'] == 'failure':
|
||||||
return exception_dict
|
continue
|
||||||
|
|
||||||
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
for indexerid, names in url_data['data'].items():
|
||||||
['xem', time.time()])
|
exception_dict[int(indexerid)] = names
|
||||||
|
|
||||||
for indexerid, names in url_data['data'].items():
|
success = True
|
||||||
exception_dict[int(indexerid)] = names
|
|
||||||
|
if success:
|
||||||
|
setLastRefresh('xem')
|
||||||
|
|
||||||
return exception_dict
|
return exception_dict
|
||||||
|
|
||||||
@ -303,6 +320,7 @@ def getSceneSeasons(indexer_id):
|
|||||||
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
||||||
return [cur_exception["season"] for cur_exception in seasons]
|
return [cur_exception["season"] for cur_exception in seasons]
|
||||||
|
|
||||||
|
|
||||||
def buildIndexerCache():
|
def buildIndexerCache():
|
||||||
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
||||||
global exceptionIndexerCache
|
global exceptionIndexerCache
|
||||||
|
@ -45,7 +45,7 @@ class BacklogSearcher:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
self._lastBacklog = self._get_lastBacklog()
|
self._lastBacklog = self._get_lastBacklog()
|
||||||
self.cycleTime = 7
|
self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24
|
||||||
self.lock = threading.Lock()
|
self.lock = threading.Lock()
|
||||||
self.amActive = False
|
self.amActive = False
|
||||||
self.amPaused = False
|
self.amPaused = False
|
||||||
|
@ -136,9 +136,9 @@ class TVCache():
|
|||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def getRSSFeed(self, url, post_data=None):
|
def getRSSFeed(self, url, post_data=None, request_headers=None):
|
||||||
with RSSFeeds(self.providerID) as feed:
|
with RSSFeeds(self.providerID) as feed:
|
||||||
data = feed.getRSSFeed(url, post_data)
|
data = feed.getRSSFeed(url, post_data, request_headers)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _translateTitle(self, title):
|
def _translateTitle(self, title):
|
||||||
|
Loading…
Reference in New Issue
Block a user