Merge remote-tracking branch 'origin/dev'

This commit is contained in:
echel0n 2014-06-30 00:01:18 -07:00
commit bc2016a3ac
13 changed files with 204 additions and 126 deletions

View File

@ -888,6 +888,54 @@ td.tvShow a {
-o-opacity: 1 !important;
opacity: 1 !important;
}
/* Anime section for editShow */
#Anime {
clear: both;
display: block ;
overflow-x: hidden;
overflow-y: hidden;
padding-bottom: 10px;
padding-left: 20px;
padding-right: 0px;
padding-top: 10px;
font-size: 14px;
}
#Anime div.component-group-desc {
float: left;
width: 165px;
}
#Anime div.component-group-desc p {
color: #666666;
margin-bottom: 0.4em;
margin-left: 0;
margin-right: 0;
margin-top: 0.4em;
width: 95%;
}
div.blackwhitelist{
float: left;
text-align: center;
}
div.blackwhitelist input {
margin: 5px 0px;
}
div.blackwhitelist.pool select {
min-width: 230px;
}
div.blackwhitelist.white select, div.blackwhitelist.black select {
min-width: 150px;
}
div.blackwhitelist span {
display: block;
text-align: center;
}
div.blackwhitelist.anidb, div.blackwhitelist.manual {
margin: 7px 0px;
}
/* for manage_massEdit */
.optionWrapper {
width: 450px;

View File

@ -146,49 +146,7 @@ Separate words with a comma, e.g. "word1,word2,word3"
<br /><br />
#if $show.is_anime
<p>
Release Groups:
</p>
<input type="text" id="addToPoolText"/>
<input type="button" value="Add to White" id="addToWhite">
<input type="button" value="Add to Black" id="addToBlack"><br/>
<div class="blackwhiteliste white">
<span>White:</span>
<select multiple id="white">
#for $keyword in $whitelist:
<option value="$keyword">$keyword</option>
#end for
</select>
<br/>
<input id="removeW" value="Remove &gt;&gt;" type="button"/>
</div>
<div class="blackwhiteliste pool">
<span>Pool (Name|Rating|Subed Ep):</span>
<select multiple id="pool">
#for $group in $groups
#if $group not in $whitelist and $group['name'] not in $blacklist:
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
#end if
#end for
</select>
<br/>
<input id="addW" value="&lt;&lt; Add" type="button"/>
<input id="addB" value="Add &gt;&gt;" type="button"/>
<input id="removeP" value="Remove" type="button"/>
</div>
<div class="blackwhiteliste black">
<span>Black:</span>
<select multiple id="black">
#for $keyword in $blacklist:
<option value="$keyword">$keyword</option>
#end for
</select>
<br/>
<input id="removeB" value="&lt;&lt; Remove" type="button"/>
</div>
<br style="clear:both;"/>
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_blackwhitelist.tmpl")
#end if
<input type="hidden" name="whitelist" id="whitelist"/>
<input type="hidden" name="blacklist" id="blacklist"/>

View File

@ -0,0 +1,53 @@
<b>Fansub Groups:</b>
<div class="bwlWrapper" id="Anime">
<div class="component-group-desc">
<p>Select your preferred fansub groups from the <b>Available Groups</b> and add them to the <b>Whitelist</b>. Add groups to the <b>Blacklist</b> to ignore them.</p>
<p>The <b>Whitelist</b> is checked <i>before</i> the <b>Blacklist</b>.</p>
<p>Groups are shown as <b>Name</b> | <b>Rating</b> | <b>Number of subbed episodes</b>.</p>
<p>You may also add any fansub group not listed to either list manually.</p>
</div>
<div class="blackwhitelist all">
<div class="blackwhitelist anidb">
<div class="blackwhitelist white">
<span><h4>Whitelist</h4></span>
<select id="white" multiple="multiple" size="12">
#for $keyword in $whitelist:
<option value="$keyword">$keyword</option>
#end for
</select>
<br/>
<input class="btn" style="float: left;" id="removeW" value="Remove" type="button"/>
</div>
<div class="blackwhitelist pool">
<span><h4>Available Groups</h4></span>
<select id="pool" multiple="multiple" size="12">
#for $group in $groups
#if $group not in $whitelist and $group['name'] not in $blacklist:
<option value="$group['name']">$group['name'] | $group['rating'] | $group['range']</option>
#end if
#end for
</select>
<br/>
<input class="btn" style="float: left;" id="addW" value="Add to Whitelist" type="button"/>
<input class="btn" style="float: right;" id="addB" value="Add to Blacklist" type="button"/>
</div>
<div class="blackwhitelist black">
<span><h4>Blacklist</h4></span>
<select id="black" multiple="multiple" size="12">
#for $keyword in $blacklist:
<option value="$keyword">$keyword</option>
#end for
</select>
<br/>
<input class="btn" style="float: right;" id="removeB" value="Remove" type="button"/>
</div>
</div>
<br style="clear:both" />
<div class="blackwhitelist manual">
<input type="text" id="addToPoolText" size="30" />
<input class="btn" type="button" value="Add to Whitelist" id="addToWhite">
<input class="btn" type="button" value="Add to Blacklist" id="addToBlack">
</div>
</div>
<br style="clear:both" />
</div>

View File

@ -15,7 +15,7 @@
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
<b>Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
<b>Backlog</b>: <%=str(sickbeard.backlogSearchScheduler.timeLeft()).split('.')[0]%>
</div>
<ul style="float:right;">
<li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><img src="$sbRoot/images/menu/update16.png" alt="" width="16" height="16" />Force Version Check</a></li>

View File

@ -466,12 +466,14 @@ class Tvdb:
if cache is True:
self.config['cache_enabled'] = True
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
self.config['cache_location'] = self._getTempDir()
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False:
self.config['cache_enabled'] = False
elif isinstance(cache, basestring):
self.config['cache_enabled'] = True
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
self.config['cache_location'] = cache
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))

View File

@ -318,12 +318,14 @@ class TVRage:
if cache is True:
self.config['cache_enabled'] = True
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
self.config['cache_location'] = self._getTempDir()
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False:
self.config['cache_enabled'] = False
elif isinstance(cache, basestring):
self.config['cache_enabled'] = True
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
self.config['cache_location'] = cache
self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))

View File

@ -1133,9 +1133,6 @@ def start():
# start the maintenance scheduler
maintenanceScheduler.thread.start()
logger.log(u"Performing initial maintenance tasks, please wait ...")
while maintenanceScheduler.action.amActive:
time.sleep(1)
# start the daily search scheduler
dailySearchScheduler.thread.start()

View File

@ -34,7 +34,7 @@ class Maintenance():
def run(self, force=False):
self.amActive = True
# refresh scene exceptions too
# get and update scene exceptions lists
scene_exceptions.retrieve_exceptions()
# refresh network timezones

View File

@ -124,8 +124,8 @@ class NameParser(object):
if not self.showObj and not self.naming_pattern:
# Regex pattern to return the Show / Series Name regardless of the file pattern tossed at it, matched 53 show name examples from regexes.py
show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P<series_name>.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
show_pattern_alt = '''^(?P<series_name>.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P<series_name>.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
show_pattern_alt = '''^(?P<series_name>.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
self.showObj = self._matchShowName(name, show_pattern)
if not self.showObj:

View File

@ -36,7 +36,7 @@ class RSSFeeds:
self.fc.purge(age)
def getRSSFeed(self, url, post_data=None):
def getRSSFeed(self, url, post_data=None, request_headers=None):
if not self.fc:
return
@ -47,7 +47,7 @@ class RSSFeeds:
if post_data:
url += urllib.urlencode(post_data)
feed = self.fc.fetch(url)
feed = self.fc.fetch(url, False, False, request_headers)
if not feed:
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
return
@ -59,4 +59,4 @@ class RSSFeeds:
logger.log(u"No RSS items found using URL: " + url, logger.WARNING)
return
return feed
return feed

View File

@ -33,6 +33,23 @@ exceptionCache = {}
exceptionSeasonCache = {}
exceptionIndexerCache = {}
def shouldRefresh(list):
myDB = db.DBConnection('cache.db')
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
[list])
if rows:
return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
return True
def setLastRefresh(list):
myDB = db.DBConnection('cache.db')
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
[list, time.time()])
def get_scene_exceptions(indexer_id, season=-1):
"""
Given a indexer_id, return a list of all the scene exceptions.
@ -50,7 +67,7 @@ def get_scene_exceptions(indexer_id, season=-1):
try:
exceptionCache[indexer_id][season] = exceptionsList
except:
exceptionCache[indexer_id] = {season:exceptionsList}
exceptionCache[indexer_id] = {season: exceptionsList}
else:
exceptionsList = list(set(exceptionCache[indexer_id][season]))
@ -59,6 +76,7 @@ def get_scene_exceptions(indexer_id, season=-1):
return exceptionsList
def get_all_scene_exceptions(indexer_id):
myDB = db.DBConnection('cache.db')
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
@ -71,6 +89,7 @@ def get_all_scene_exceptions(indexer_id):
return exceptionsList
def get_scene_seasons(indexer_id):
"""
return a list of season numbers that have scene exceptions
@ -113,7 +132,8 @@ def get_scene_exception_by_name_multiple(show_name):
cur_season = int(cur_exception["season"])
if show_name.lower() in (
cur_exception_name.lower(), sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
cur_exception_name.lower(),
sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
out.append((cur_indexer_id, cur_season))
if out:
@ -135,46 +155,49 @@ def retrieve_exceptions():
exceptionSeasonCache = {}
# exceptions are stored on github pages
for indexer in sickbeard.indexerApi().indexers:
logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
if setLastRefresh('normal'):
for indexer in sickbeard.indexerApi().indexers:
logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
url = sickbeard.indexerApi(indexer).config['scene_url']
url = sickbeard.indexerApi(indexer).config['scene_url']
url_data = helpers.getURL(url)
url_data = helpers.getURL(url)
if url_data is None:
# When urlData is None, trouble connecting to github
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
continue
if url_data is None:
# When urlData is None, trouble connecting to github
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
continue
else:
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
for cur_line in url_data.splitlines():
cur_line = cur_line.decode('utf-8')
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
if not aliases:
continue
indexer_id = int(indexer_id)
# regex out the list of shows, taking \' into account
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
exception_dict[indexer_id] = alias_list
logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
xem_exceptions = _xem_excpetions_fetcher(indexer)
for xem_ex in xem_exceptions: # anidb xml anime exceptions
if xem_ex in exception_dict:
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
else:
exception_dict[xem_ex] = xem_exceptions[xem_ex]
setLastRefresh('normal')
logger.log(u"Checking for scene exception updates for AniDB")
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
for cur_line in url_data.splitlines():
cur_line = cur_line.decode('utf-8')
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
if not aliases:
continue
indexer_id = int(indexer_id)
# regex out the list of shows, taking \' into account
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
exception_dict[indexer_id] = alias_list
# XEM scene exceptions
xem_exceptions = _xem_excpetions_fetcher()
for xem_ex in xem_exceptions:
if xem_ex in exception_dict:
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
else:
exception_dict[xem_ex] = xem_exceptions[xem_ex]
# AniDB scene exceptions
local_exceptions = _retrieve_anidb_mainnames()
for local_ex in local_exceptions: # anidb xml anime exceptions
for local_ex in local_exceptions:
if local_ex in exception_dict:
exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex]
else:
@ -209,6 +232,7 @@ def retrieve_exceptions():
# build indexer scene name cache
buildIndexerCache()
def update_scene_exceptions(indexer_id, scene_exceptions):
"""
Given a indexer_id, and a list of all show scene exceptions, update the db.
@ -235,15 +259,9 @@ def _retrieve_anidb_mainnames():
anidb_mainNames = {}
myDB = db.DBConnection('cache.db')
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['anidb'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
else:
refresh = True
if shouldRefresh('anidb'):
logger.log(u"Checking for scene exception updates for AniDB")
if refresh:
for show in sickbeard.showList:
if show.is_anime and show.indexer == 1:
try:
@ -257,41 +275,40 @@ def _retrieve_anidb_mainnames():
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
if success:
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['anidb', time.time()])
setLastRefresh('anidb')
return anidb_mainNames
def _xem_excpetions_fetcher(indexer):
global MAX_XEM_AGE_SECS
def _xem_excpetions_fetcher():
global MAX_XEM_AGE_SECS
exception_dict = {}
myDB = db.DBConnection('cache.db')
rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
['xem'])
if rows:
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
refresh = True
if shouldRefresh('xem'):
success = False
for indexer in sickbeard.indexerApi().indexers:
logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
if refresh:
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config[
'xem_origin']
url_data = helpers.getURL(url, json=True)
if url_data is None:
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
return exception_dict
url_data = helpers.getURL(url, json=True)
if url_data is None:
logger.log(u"Check scene exceptions update failed for " + sickbeard.indexerApi(
indexer).name + ", Unable to get URL: " + url, logger.ERROR)
continue
if url_data['result'] == 'failure':
return exception_dict
if url_data['result'] == 'failure':
continue
myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
['xem', time.time()])
for indexerid, names in url_data['data'].items():
exception_dict[int(indexerid)] = names
for indexerid, names in url_data['data'].items():
exception_dict[int(indexerid)] = names
success = True
if success:
setLastRefresh('xem')
return exception_dict
@ -303,6 +320,7 @@ def getSceneSeasons(indexer_id):
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
return [cur_exception["season"] for cur_exception in seasons]
def buildIndexerCache():
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
global exceptionIndexerCache

View File

@ -45,7 +45,7 @@ class BacklogSearcher:
def __init__(self):
self._lastBacklog = self._get_lastBacklog()
self.cycleTime = 7
self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24
self.lock = threading.Lock()
self.amActive = False
self.amPaused = False

View File

@ -136,9 +136,9 @@ class TVCache():
return []
def getRSSFeed(self, url, post_data=None):
def getRSSFeed(self, url, post_data=None, request_headers=None):
with RSSFeeds(self.providerID) as feed:
data = feed.getRSSFeed(url, post_data)
data = feed.getRSSFeed(url, post_data, request_headers)
return data
def _translateTitle(self, title):