2014-03-10 01:18:05 -04:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 01:18:05 -04:00
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 01:18:05 -04:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 01:18:05 -04:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 08:37:22 -04:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
import re
|
2014-06-01 02:44:44 -04:00
|
|
|
import time
|
2014-03-25 01:57:24 -04:00
|
|
|
import sickbeard
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
from lib import adba
|
2014-03-10 01:18:05 -04:00
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import name_cache
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import db
|
|
|
|
|
2014-06-01 02:44:44 -04:00
|
|
|
MAX_XEM_AGE_SECS = 86400 # 1 day
|
|
|
|
MAX_ANIDB_AGE_SECS = 86400 # 1 day
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
exceptionCache = {}
|
|
|
|
exceptionSeasonCache = {}
|
|
|
|
exceptionIndexerCache = {}
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
def get_scene_exceptions(indexer_id, season=-1):
|
2014-03-10 01:18:05 -04:00
|
|
|
"""
|
|
|
|
Given a indexer_id, return a list of all the scene exceptions.
|
|
|
|
"""
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
global exceptionCache
|
|
|
|
|
|
|
|
if indexer_id not in exceptionCache or season not in exceptionCache[indexer_id]:
|
|
|
|
myDB = db.DBConnection("cache.db")
|
2014-05-26 14:07:10 -04:00
|
|
|
exceptions = myDB.select("SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?",
|
|
|
|
[indexer_id, season])
|
|
|
|
exceptionsList = list(set([cur_exception["show_name"] for cur_exception in exceptions]))
|
|
|
|
|
|
|
|
if len(exceptionsList):
|
|
|
|
try:
|
|
|
|
exceptionCache[indexer_id][season] = exceptionsList
|
|
|
|
except:
|
|
|
|
exceptionCache[indexer_id] = {season:exceptionsList}
|
2014-05-26 02:29:22 -04:00
|
|
|
else:
|
2014-05-26 14:07:10 -04:00
|
|
|
exceptionsList = list(set(exceptionCache[indexer_id][season]))
|
2014-05-26 02:29:22 -04:00
|
|
|
|
|
|
|
if season == 1: # if we where looking for season 1 we can add generic names
|
|
|
|
exceptionsList += get_scene_exceptions(indexer_id, season=-1)
|
|
|
|
|
|
|
|
return exceptionsList
|
|
|
|
|
|
|
|
def get_all_scene_exceptions(indexer_id):
|
2014-03-10 01:18:05 -04:00
|
|
|
myDB = db.DBConnection("cache.db")
|
2014-05-26 02:29:22 -04:00
|
|
|
exceptions = myDB.select("SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
|
|
|
exceptionsList = {}
|
|
|
|
[cur_exception["show_name"] for cur_exception in exceptions]
|
|
|
|
for cur_exception in exceptions:
|
|
|
|
if not cur_exception["season"] in exceptionsList:
|
|
|
|
exceptionsList[cur_exception["season"]] = []
|
|
|
|
exceptionsList[cur_exception["season"]].append(cur_exception["show_name"])
|
|
|
|
|
|
|
|
return exceptionsList
|
|
|
|
|
|
|
|
def get_scene_seasons(indexer_id):
|
|
|
|
"""
|
|
|
|
return a list of season numbers that have scene exceptions
|
|
|
|
"""
|
|
|
|
global exceptionSeasonCache
|
|
|
|
if indexer_id not in exceptionSeasonCache:
|
|
|
|
myDB = db.DBConnection("cache.db")
|
2014-05-26 14:07:10 -04:00
|
|
|
sqlResults = myDB.select("SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?",
|
|
|
|
[indexer_id])
|
2014-05-26 02:29:22 -04:00
|
|
|
exceptionSeasonCache[indexer_id] = [int(x["season"]) for x in sqlResults]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
return exceptionSeasonCache[indexer_id]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def get_scene_exception_by_name(show_name):
|
2014-05-26 02:29:22 -04:00
|
|
|
return get_scene_exception_by_name_multiple(show_name)[0]
|
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
def get_scene_exception_by_name_multiple(show_name):
|
2014-03-10 01:18:05 -04:00
|
|
|
"""
|
|
|
|
Given a show name, return the indexerid of the exception, None if no exception
|
|
|
|
is present.
|
|
|
|
"""
|
|
|
|
|
|
|
|
myDB = db.DBConnection("cache.db")
|
|
|
|
|
|
|
|
# try the obvious case first
|
2014-05-26 14:07:10 -04:00
|
|
|
exception_result = myDB.select(
|
|
|
|
"SELECT indexer_id, season FROM scene_exceptions WHERE LOWER(show_name) = ? ORDER BY season ASC",
|
|
|
|
[show_name.lower()])
|
2014-03-10 01:18:05 -04:00
|
|
|
if exception_result:
|
2014-05-26 02:29:22 -04:00
|
|
|
return [(int(x["indexer_id"]), int(x["season"])) for x in exception_result]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
out = []
|
|
|
|
all_exception_results = myDB.select("SELECT show_name, indexer_id, season FROM scene_exceptions")
|
2014-03-10 01:18:05 -04:00
|
|
|
for cur_exception in all_exception_results:
|
|
|
|
|
|
|
|
cur_exception_name = cur_exception["show_name"]
|
|
|
|
cur_indexer_id = int(cur_exception["indexer_id"])
|
2014-05-26 02:29:22 -04:00
|
|
|
cur_season = int(cur_exception["season"])
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
if show_name.lower() in (
|
|
|
|
cur_exception_name.lower(), sickbeard.helpers.sanitizeSceneName(cur_exception_name).lower().replace('.', ' ')):
|
2014-05-26 02:29:22 -04:00
|
|
|
logger.log(u"Scene exception lookup got indexer id " + str(cur_indexer_id) + u", using that", logger.DEBUG)
|
|
|
|
out.append((cur_indexer_id, cur_season))
|
|
|
|
if out:
|
|
|
|
return out
|
|
|
|
else:
|
|
|
|
return [(None, None)]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def retrieve_exceptions():
|
|
|
|
"""
|
|
|
|
Looks up the exceptions on github, parses them into a dict, and inserts them into the
|
|
|
|
scene_exceptions table in cache.db. Also clears the scene name cache.
|
|
|
|
"""
|
|
|
|
|
2014-05-26 17:06:54 -04:00
|
|
|
global exceptionCache, exceptionSeasonCache
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
exception_dict = {}
|
2014-05-26 14:07:10 -04:00
|
|
|
exceptionCache = {}
|
|
|
|
exceptionSeasonCache = {}
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
# exceptions are stored on github pages
|
2014-03-26 22:01:53 -04:00
|
|
|
for indexer in sickbeard.indexerApi().indexers:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")
|
|
|
|
|
|
|
|
url = sickbeard.indexerApi(indexer).config['scene_url']
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
url_data = helpers.getURL(url)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
if url_data is None:
|
|
|
|
# When urlData is None, trouble connecting to github
|
|
|
|
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
|
|
|
continue
|
|
|
|
|
|
|
|
else:
|
|
|
|
# each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
|
|
|
|
for cur_line in url_data.splitlines():
|
|
|
|
cur_line = cur_line.decode('utf-8')
|
2014-05-26 14:07:10 -04:00
|
|
|
indexer_id, sep, aliases = cur_line.partition(':') # @UnusedVariable
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
if not aliases:
|
|
|
|
continue
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
indexer_id = int(indexer_id)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
# regex out the list of shows, taking \' into account
|
2014-05-26 14:07:10 -04:00
|
|
|
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
2014-05-26 02:29:22 -04:00
|
|
|
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
exception_dict[indexer_id] = alias_list
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
logger.log(u"Checking for XEM scene exception updates for " + sickbeard.indexerApi(indexer).name)
|
|
|
|
xem_exceptions = _xem_excpetions_fetcher(indexer)
|
|
|
|
for xem_ex in xem_exceptions: # anidb xml anime exceptions
|
|
|
|
if xem_ex in exception_dict:
|
|
|
|
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
|
|
|
|
else:
|
|
|
|
exception_dict[xem_ex] = xem_exceptions[xem_ex]
|
|
|
|
|
|
|
|
logger.log(u"Checking for scene exception updates for AniDB")
|
|
|
|
local_exceptions = _retrieve_anidb_mainnames()
|
|
|
|
for local_ex in local_exceptions: # anidb xml anime exceptions
|
|
|
|
if local_ex in exception_dict:
|
|
|
|
exception_dict[local_ex] = exception_dict[local_ex] + local_exceptions[local_ex]
|
|
|
|
else:
|
|
|
|
exception_dict[local_ex] = local_exceptions[local_ex]
|
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
myDB = db.DBConnection("cache.db")
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
changed_exceptions = False
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
# write all the exceptions we got off the net into the database
|
|
|
|
for cur_indexer_id in exception_dict:
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
# get a list of the existing exceptions for this ID
|
2014-03-25 01:57:24 -04:00
|
|
|
existing_exceptions = [x["show_name"] for x in
|
|
|
|
myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
for cur_exception_dict in exception_dict[cur_indexer_id]:
|
|
|
|
cur_exception, curSeason = cur_exception_dict.items()[0]
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
# if this exception isn't already in the DB then add it
|
|
|
|
if cur_exception not in existing_exceptions:
|
2014-05-26 02:29:22 -04:00
|
|
|
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
|
|
|
|
[cur_indexer_id, cur_exception, curSeason])
|
2014-03-10 23:58:18 -04:00
|
|
|
changed_exceptions = True
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-10 23:58:18 -04:00
|
|
|
# since this could invalidate the results of the cache we clear it out after updating
|
|
|
|
if changed_exceptions:
|
|
|
|
logger.log(u"Updated scene exceptions")
|
|
|
|
name_cache.clearCache()
|
|
|
|
else:
|
|
|
|
logger.log(u"No scene exceptions update needed")
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-26 17:06:54 -04:00
|
|
|
# build indexer scene name cache
|
|
|
|
buildIndexerCache()
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def update_scene_exceptions(indexer_id, scene_exceptions):
|
|
|
|
"""
|
|
|
|
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
|
|
|
"""
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
global exceptionIndexerCache
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
myDB = db.DBConnection("cache.db")
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-30 08:30:28 -04:00
|
|
|
myDB.action('DELETE FROM scene_exceptions WHERE indexer_id=? and custom=1', [indexer_id])
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
2014-05-30 08:30:28 -04:00
|
|
|
for cur_season in [-1] + sickbeard.scene_exceptions.get_scene_seasons(indexer_id):
|
|
|
|
for cur_exception in scene_exceptions:
|
|
|
|
exceptionIndexerCache[helpers.full_sanitizeSceneName(cur_exception)] = indexer_id
|
|
|
|
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)",
|
|
|
|
[indexer_id, cur_exception, cur_season, 1])
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
name_cache.clearCache()
|
2014-05-26 02:29:22 -04:00
|
|
|
|
|
|
|
def _retrieve_anidb_mainnames():
|
2014-06-01 02:44:44 -04:00
|
|
|
global MAX_ANIDB_AGE_SECS
|
|
|
|
|
|
|
|
success = False
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
anidb_mainNames = {}
|
2014-06-01 02:44:44 -04:00
|
|
|
|
|
|
|
cacheDB = db.DBConnection('cache.db')
|
|
|
|
|
|
|
|
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
|
|
|
['anidb'])
|
|
|
|
if rows:
|
|
|
|
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_ANIDB_AGE_SECS)
|
|
|
|
else:
|
|
|
|
refresh = True
|
|
|
|
|
|
|
|
if refresh:
|
|
|
|
for show in sickbeard.showList:
|
|
|
|
if show.is_anime and show.indexer == 1:
|
|
|
|
try:
|
|
|
|
anime = adba.Anime(None, name=show.name, tvdbid=show.indexerid, autoCorrectName=True)
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
success = True
|
|
|
|
|
|
|
|
if anime.name and anime.name != show.name:
|
|
|
|
anidb_mainNames[show.indexerid] = [{anime.name: -1}]
|
|
|
|
|
|
|
|
if success:
|
|
|
|
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
|
|
|
['anidb', time.time()])
|
2014-05-26 02:29:22 -04:00
|
|
|
|
|
|
|
return anidb_mainNames
|
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
def _xem_excpetions_fetcher(indexer):
|
2014-06-01 02:44:44 -04:00
|
|
|
global MAX_XEM_AGE_SECS
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
exception_dict = {}
|
|
|
|
|
2014-06-01 02:44:44 -04:00
|
|
|
cacheDB = db.DBConnection('cache.db')
|
|
|
|
|
|
|
|
rows = cacheDB.select("SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?",
|
|
|
|
['xem'])
|
|
|
|
if rows:
|
|
|
|
refresh = time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
|
|
|
|
else:
|
|
|
|
refresh = True
|
|
|
|
|
|
|
|
if refresh:
|
|
|
|
url = "http://thexem.de/map/allNames?origin=%s&seasonNumbers=1" % sickbeard.indexerApi(indexer).config['xem_origin']
|
|
|
|
|
|
|
|
url_data = helpers.getURL(url, json=True)
|
|
|
|
if url_data is None:
|
|
|
|
logger.log(u"Check scene exceptions update failed. Unable to get URL: " + url, logger.ERROR)
|
|
|
|
return exception_dict
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-06-01 02:44:44 -04:00
|
|
|
if url_data['result'] == 'failure':
|
|
|
|
return exception_dict
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-06-01 02:44:44 -04:00
|
|
|
cacheDB.action("INSERT OR REPLACE INTO scene_exceptions_refresh (list, last_refreshed) VALUES (?,?)",
|
|
|
|
['xem', time.time()])
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-06-01 02:44:44 -04:00
|
|
|
for indexerid, names in url_data['data'].items():
|
|
|
|
exception_dict[int(indexerid)] = names
|
2014-05-26 02:29:22 -04:00
|
|
|
|
|
|
|
return exception_dict
|
|
|
|
|
2014-05-26 14:07:10 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
def getSceneSeasons(indexer_id):
|
|
|
|
"""get a list of season numbers that have scene excpetions
|
|
|
|
"""
|
|
|
|
myDB = db.DBConnection("cache.db")
|
|
|
|
seasons = myDB.select("SELECT DISTINCT season FROM scene_exceptions WHERE indexer_id = ?", [indexer_id])
|
|
|
|
return [cur_exception["season"] for cur_exception in seasons]
|
2014-05-26 17:06:54 -04:00
|
|
|
|
|
|
|
def buildIndexerCache():
|
|
|
|
logger.log(u"Updating internal scene name cache", logger.MESSAGE)
|
|
|
|
global exceptionIndexerCache
|
|
|
|
exceptionIndexerCache = {}
|
|
|
|
|
|
|
|
for show in sickbeard.showList:
|
|
|
|
for curSeason in [-1] + sickbeard.scene_exceptions.get_scene_seasons(show.indexerid):
|
|
|
|
exceptionIndexerCache[helpers.full_sanitizeSceneName(show.name)] = show.indexerid
|
|
|
|
for name in get_scene_exceptions(show.indexerid, season=curSeason):
|
|
|
|
exceptionIndexerCache[name] = show.indexerid
|
|
|
|
exceptionIndexerCache[helpers.full_sanitizeSceneName(name)] = show.indexerid
|
|
|
|
|
|
|
|
logger.log(u"Updated internal scene name cache", logger.MESSAGE)
|
|
|
|
logger.log(u"Internal scene name cache set to: " + str(exceptionIndexerCache), logger.DEBUG)
|