diff --git a/gui/slick/css/default.css b/gui/slick/css/default.css
index 4b8a5b66..360c672e 100644
--- a/gui/slick/css/default.css
+++ b/gui/slick/css/default.css
@@ -888,6 +888,54 @@ td.tvShow a {
-o-opacity: 1 !important;
opacity: 1 !important;
}
+
+/* Anime section for editShow */
+#Anime {
+ clear: both;
+ display: block ;
+ overflow-x: hidden;
+ overflow-y: hidden;
+ padding-bottom: 10px;
+ padding-left: 20px;
+ padding-right: 0px;
+ padding-top: 10px;
+ font-size: 14px;
+}
+
+#Anime div.component-group-desc {
+ float: left;
+ width: 165px;
+}
+#Anime div.component-group-desc p {
+ color: #666666;
+ margin-bottom: 0.4em;
+ margin-left: 0;
+ margin-right: 0;
+ margin-top: 0.4em;
+ width: 95%;
+}
+
+div.blackwhitelist{
+ float: left;
+ text-align: center;
+}
+div.blackwhitelist input {
+ margin: 5px 0px;
+}
+div.blackwhitelist.pool select {
+ min-width: 230px;
+}
+div.blackwhitelist.white select, div.blackwhitelist.black select {
+ min-width: 150px;
+}
+div.blackwhitelist span {
+ display: block;
+ text-align: center;
+}
+div.blackwhitelist.anidb, div.blackwhitelist.manual {
+ margin: 7px 0px;
+}
+
/* for manage_massEdit */
.optionWrapper {
width: 450px;
diff --git a/gui/slick/interfaces/default/editShow.tmpl b/gui/slick/interfaces/default/editShow.tmpl
index 95cebd0c..63c8eb1f 100644
--- a/gui/slick/interfaces/default/editShow.tmpl
+++ b/gui/slick/interfaces/default/editShow.tmpl
@@ -146,49 +146,7 @@ Separate words with a comma, e.g. "word1,word2,word3"
#if $show.is_anime
-
-Release Groups:
-
-
-
-
-
- White:
-
- #for $keyword in $whitelist:
- $keyword
- #end for
-
-
-
-
-
-
- Pool (Name|Rating|Subed Ep):
-
- #for $group in $groups
- #if $group not in $whitelist and $group['name'] not in $blacklist:
- $group['name'] | $group['rating'] | $group['range']
- #end if
- #end for
-
-
-
-
-
-
-
-
- Black:
-
- #for $keyword in $blacklist:
- $keyword
- #end for
-
-
-
-
-
+#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_blackwhitelist.tmpl")
#end if
diff --git a/gui/slick/interfaces/default/inc_blackwhitelist.tmpl b/gui/slick/interfaces/default/inc_blackwhitelist.tmpl
new file mode 100644
index 00000000..83129d0a
--- /dev/null
+++ b/gui/slick/interfaces/default/inc_blackwhitelist.tmpl
@@ -0,0 +1,53 @@
+Fansub Groups:
+
+
+
Select your preferred fansub groups from the Available Groups and add them to the Whitelist . Add groups to the Blacklist to ignore them.
+
The Whitelist is checked before the Blacklist .
+
Groups are shown as Name | Rating | Number of subbed episodes .
+
You may also add any fansub group not listed to either list manually.
+
+
+
+
\ No newline at end of file
diff --git a/gui/slick/interfaces/default/inc_bottom.tmpl b/gui/slick/interfaces/default/inc_bottom.tmpl
index 25fccc23..51b31f3b 100644
--- a/gui/slick/interfaces/default/inc_bottom.tmpl
+++ b/gui/slick/interfaces/default/inc_bottom.tmpl
@@ -15,7 +15,7 @@
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
$numShows shows ($numGoodShows active) | $numDLEpisodes/$numEpisodes episodes downloaded |
Search : <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
-Backlog : $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
+Backlog : <%=str(sickbeard.backlogSearchScheduler.timeLeft()).split('.')[0]%>
Force Version Check
diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py
index 30942e29..4a09261b 100644
--- a/lib/tvdb_api/tvdb_api.py
+++ b/lib/tvdb_api/tvdb_api.py
@@ -466,12 +466,14 @@ class Tvdb:
if cache is True:
self.config['cache_enabled'] = True
- self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
+ self.config['cache_location'] = self._getTempDir()
+ self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False:
self.config['cache_enabled'] = False
elif isinstance(cache, basestring):
self.config['cache_enabled'] = True
- self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
+ self.config['cache_location'] = cache
+ self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
diff --git a/lib/tvrage_api/tvrage_api.py b/lib/tvrage_api/tvrage_api.py
index e1661627..9aae00d2 100644
--- a/lib/tvrage_api/tvrage_api.py
+++ b/lib/tvrage_api/tvrage_api.py
@@ -318,12 +318,14 @@ class TVRage:
if cache is True:
self.config['cache_enabled'] = True
- self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self._getTempDir()))
+ self.config['cache_location'] = self._getTempDir()
+ self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False:
self.config['cache_enabled'] = False
elif isinstance(cache, basestring):
self.config['cache_enabled'] = True
- self.sess = cachecontrol.CacheControl(cache=caches.FileCache(cache))
+ self.config['cache_location'] = cache
+ self.sess = cachecontrol.CacheControl(cache=caches.FileCache(self.config['cache_location']))
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 05fbc778..ba7828fb 100644
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -1133,9 +1133,6 @@ def start():
# start the maintenance scheduler
maintenanceScheduler.thread.start()
- logger.log(u"Performing initial maintenance tasks, please wait ...")
- while maintenanceScheduler.action.amActive:
- time.sleep(1)
# start the daily search scheduler
dailySearchScheduler.thread.start()
diff --git a/sickbeard/maintenance.py b/sickbeard/maintenance.py
index dc11c712..d9a6b01f 100644
--- a/sickbeard/maintenance.py
+++ b/sickbeard/maintenance.py
@@ -34,7 +34,7 @@ class Maintenance():
def run(self, force=False):
self.amActive = True
- # refresh scene exceptions too
+ # get and update scene exceptions lists
scene_exceptions.retrieve_exceptions()
# refresh network timezones
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index bf57c84a..1f02a38f 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -124,8 +124,8 @@ class NameParser(object):
if not self.showObj and not self.naming_pattern:
# Regex pattern to return the Show / Series Name regardless of the file pattern tossed at it, matched 53 show name examples from regexes.py
- show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
- show_pattern_alt = '''^(?P.*?(?:[ ._-](\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
+ show_pattern = '''(?:(?:\[.*?\])|(?:\d{3}[\.-]))*[ _\.]?(?P.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
+ show_pattern_alt = '''^(?P.*?(?:[ ._-]((?!\d{4}\W\d\d\W\d\d\W)\d{4}))?)(?:(?:(?:[ ._-]+\d+)|(?:[ ._-]+s\d{2}))|(?:\W+(?:(?:S\d[\dE._ -])|(?:\d\d?x)|(?:\d{4}\W\d\d\W\d\d)|(?:(?:part|pt)[\._ -]?(?:\d|[ivx]))|Season\W+\d+\W+|E\d+\W+|(?:\d{1,3}.+\d{1,}[a-zA-Z]{2}\W+[a-zA-Z]{3,}\W+\d{4}.+))))'''
self.showObj = self._matchShowName(name, show_pattern)
if not self.showObj:
diff --git a/sickbeard/rssfeeds.py b/sickbeard/rssfeeds.py
index 71c2623c..d843c551 100644
--- a/sickbeard/rssfeeds.py
+++ b/sickbeard/rssfeeds.py
@@ -36,7 +36,7 @@ class RSSFeeds:
self.fc.purge(age)
- def getRSSFeed(self, url, post_data=None):
+ def getRSSFeed(self, url, post_data=None, request_headers=None):
if not self.fc:
return
@@ -47,7 +47,7 @@ class RSSFeeds:
if post_data:
url += urllib.urlencode(post_data)
- feed = self.fc.fetch(url)
+ feed = self.fc.fetch(url, False, False, request_headers)
if not feed:
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)
return
@@ -59,4 +59,4 @@ class RSSFeeds:
logger.log(u"No RSS items found using URL: " + url, logger.WARNING)
return
- return feed
\ No newline at end of file
+ return feed
diff --git a/sickbeard/scene_exceptions.py b/sickbeard/scene_exceptions.py
index 368d4feb..277d076c 100644
--- a/sickbeard/scene_exceptions.py
+++ b/sickbeard/scene_exceptions.py
@@ -33,6 +33,23 @@ exceptionCache = {}
exceptionSeasonCache = {}
exceptionIndexerCache = {}
+
+def shouldRefresh(list):
+ myDB = db.DBConnection('cache.db')
+ rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
+ [list])
+ if rows:
+ return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
+ else:
+ return True
+
+
+def setLastRefresh(list):
+ myDB = db.DBConnection('cache.db')
+ myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
+ [list, time.time()])
+
+
def get_scene_exceptions(indexer_id, season=-1):
"""
Given a indexer_id, return a list of all the scene exceptions.
@@ -50,7 +67,7 @@ def get_scene_exceptions(indexer_id, season=-1):
try:
exceptionCache[indexer_id][season] = exceptionsList
except:
- exceptionCache[indexer_id] = {season:exceptionsList}
+ exceptionCache[indexer_id] = {season: exceptionsList}
else:
exceptionsList = list(set(exceptionCache[indexer_id][season]))
@@ -59,6 +76,7 @@ def get_scene_exceptions(indexer_id, season=-1):
return exceptionsList
+
def get_all_scene_exceptions(indexer_id):
myDB = db.DBConnection('cache.db')
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
@@ -71,6 +89,7 @@ def get_all_scene_exceptions(indexer_id):
return exceptionsList
+
def get_scene_seasons(indexer_id):
"""
return a list of season numbers that have scene exceptions
@@ -113,7 +132,8 @@ def get_scene_exception_by_name_multiple(show_name):
cur_season = int(cur_exception["season"])
if show_name.lower() in (
- cur_exception_name.lower(), sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
+ cur_exception_name.lower(),
+ sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
out.append((cur_indexer_id, cur_season))
if out:
@@ -135,46 +155,49 @@ def retrieve_exceptions():
exceptionSeasonCache = {}
# exceptions are stored on github pages
- for indexer in sickbeard.indexerApi().indexers:
- logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
+ if setLastRefresh('normal'):
+ for indexer in sickbeard.indexerApi().indexers:
+ logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
- url = sickbeard.indexerApi(indexer).config['scene_url']
+ url = sickbeard.indexerApi(indexer).config['scene_url']
- url_data = helpers.getURL(url)
+ url_data = helpers.getURL(url)
- if url_data is None:
- # When urlData is None, trouble connecting to github
- logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
- continue
+ if url_data is None:
+ # When urlData is None, trouble connecting to github
+ logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
+ continue
- else:
- # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
- for cur_line in url_data.splitlines():
- cur_line = cur_line.decode('utf-8')
- indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
-
- if not aliases:
- continue
-
- indexer_id = int(indexer_id)
-
- # regex out the list of shows, taking \' into account
- # alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(? (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
- else:
- refresh = True
+ if shouldRefresh('anidb'):
+ logger.log(u"Checking for scene exception updates for AniDB")
- if refresh:
for show in sickbeard.showList:
if show.is_anime and show.indexer == 1:
try:
@@ -257,41 +275,40 @@ def _retrieve_anidb_mainnames():
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
if success:
- myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
- ['anidb', time.time()])
+ setLastRefresh('anidb')
return anidb_mainNames
-def _xem_excpetions_fetcher(indexer):
- global MAX_XEM_AGE_SECS
+def _xem_excpetions_fetcher():
+ global MAX_XEM_AGE_SECS
exception_dict = {}
- myDB = db.DBConnection('cache.db')
- rows = myDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
- ['xem'])
- if rows:
- refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
- else:
- refresh = True
+ if shouldRefresh('xem'):
+ success = False
+ for indexer in sickbeard.indexerApi().indexers:
+ logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
- if refresh:
- url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
+ url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config[
+ 'xem_origin']
- url_data = helpers.getURL(url, json=True)
- if url_data is None:
- logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
- return exception_dict
+ url_data = helpers.getURL(url, json=True)
+ if url_data is None:
+ logger.log(u"Check scene exceptions update failed for " + sickbeard.indexerApi(
+ indexer).name + ", Unable to get URL: " + url, logger.ERROR)
+ continue
- if url_data['result'] == 'failure':
- return exception_dict
+ if url_data['result'] == 'failure':
+ continue
- myDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
- ['xem', time.time()])
+ for indexerid, names in url_data['data'].items():
+ exception_dict[int(indexerid)] = names
- for indexerid, names in url_data['data'].items():
- exception_dict[int(indexerid)] = names
+ success = True
+
+ if success:
+ setLastRefresh('xem')
return exception_dict
@@ -303,6 +320,7 @@ def getSceneSeasons(indexer_id):
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
return [cur_exception["season"] for cur_exception in seasons]
+
def buildIndexerCache():
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
global exceptionIndexerCache
diff --git a/sickbeard/searchBacklog.py b/sickbeard/searchBacklog.py
index 2a6eaf90..e162d390 100644
--- a/sickbeard/searchBacklog.py
+++ b/sickbeard/searchBacklog.py
@@ -45,7 +45,7 @@ class BacklogSearcher:
def __init__(self):
self._lastBacklog = self._get_lastBacklog()
- self.cycleTime = 7
+ self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24
self.lock = threading.Lock()
self.amActive = False
self.amPaused = False
diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py
index 16879349..ba6a59a7 100644
--- a/sickbeard/tvcache.py
+++ b/sickbeard/tvcache.py
@@ -136,9 +136,9 @@ class TVCache():
return []
- def getRSSFeed(self, url, post_data=None):
+ def getRSSFeed(self, url, post_data=None, request_headers=None):
with RSSFeeds(self.providerID) as feed:
- data = feed.getRSSFeed(url, post_data)
+ data = feed.getRSSFeed(url, post_data, request_headers)
return data
def _translateTitle(self, title):