1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Fixed andidb scene exceptions to be called only on shows that are marked as anime.

Cleanup now performed after scene exception retrieval is performed on globals.

Fixed db type errors for name parser cache and rss feed cache.
This commit is contained in:
echel0n 2014-07-14 21:10:57 -07:00
parent d02c0bd6eb
commit 79f923dc9c
4 changed files with 36 additions and 31 deletions

View File

@ -111,7 +111,7 @@ class QueueItem(threading.Thread):
self.added = None
self.alive = True
self.stop = threading.Event()
def run(self):
"""Implementing classes should call this"""

View File

@ -583,23 +583,24 @@ class NameParserCache:
self.npc_cache_size = 200
try:
self.npc = shelve.open(ek.ek(os.path.join, sickbeard.CACHE_DIR, 'npc.db'))
self.npc = shelve.open(ek.ek(os.path.join, sickbeard.CACHE_DIR, 'name_parser_cache'))
except Exception as e:
logger.log(u"NameParser Cache error: " + ex(e), logger.ERROR)
raise
def __del__(self):
self.npc.close()
if getattr(self, "npc", None) is not None:
self.npc.close()
def add(self, name, parse_result):
name = name.encode('utf-8', 'replace')
name = name.encode('utf-8', 'ignore')
self.npc[str(name)] = parse_result
while len(self.npc.items()) > self.npc_cache_size:
del self.npc.keys()[0]
def get(self, name):
name = name.encode('utf-8', 'replace')
name = name.encode('utf-8', 'ignore')
parse_result = self.npc.get(str(name), None)
if parse_result:

View File

@ -13,18 +13,18 @@ from lib.feedcache import cache
class RSSFeeds:
def __init__(self, db_name):
try:
self.fs = shelve.open(ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name + '.db'))
self.fs = shelve.open(ek.ek(os.path.join, sickbeard.CACHE_DIR, db_name))
self.fc = cache.Cache(self.fs)
except Exception, e:
logger.log(u"RSS error: " + ex(e), logger.ERROR)
raise
def __del__(self):
self.fs.close()
if getattr(self, "fs", None) is not None:
self.fs.close()
def clearCache(self, age=None):
self.fc.purge(age)
self.fs.close()
def getFeed(self, url, post_data=None, request_headers=None):
parsed = list(urlparse.urlparse(url))
@ -34,7 +34,6 @@ class RSSFeeds:
url += urllib.urlencode(post_data)
feed = self.fc.fetch(url, False, False, request_headers)
self.fs.close()
if not feed:
logger.log(u"RSS Error loading URL: " + url, logger.ERROR)

View File

@ -27,6 +27,10 @@ from sickbeard import name_cache
from sickbeard import logger
from sickbeard import db
exception_dict = {}
anidb_exception_dict = {}
xem_exception_dict = {}
exceptionsCache = {}
exceptionsSeasonCache = {}
@ -157,9 +161,7 @@ def retrieve_exceptions():
Looks up the exceptions on github, parses them into a dict, and inserts them into the
scene_exceptions table in cache.db. Also clears the scene name cache.
"""
global exceptionsCache, exceptionsSeasonCache
exception_dict = {}
global exception_dict, anidb_exception_dict, xem_exception_dict
# exceptions are stored on github pages
for indexer in sickbeard.indexerApi().indexers:
@ -192,22 +194,24 @@ def retrieve_exceptions():
# alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
exception_dict[indexer_id] = alias_list
del alias_list
del url_data
# XEM scene exceptions
xem_exceptions = _xem_exceptions_fetcher()
for xem_ex in xem_exceptions:
_xem_exceptions_fetcher()
for xem_ex in xem_exception_dict:
if xem_ex in exception_dict:
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exceptions[xem_ex]
exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exception_dict[xem_ex]
else:
exception_dict[xem_ex] = xem_exceptions[xem_ex]
exception_dict[xem_ex] = xem_exception_dict[xem_ex]
# AniDB scene exceptions
anidb_exceptions = _anidb_exceptions_fetcher()
for anidb_ex in anidb_exceptions:
_anidb_exceptions_fetcher()
for anidb_ex in anidb_exception_dict:
if anidb_ex in exception_dict:
exception_dict[anidb_ex] = exception_dict[anidb_ex] + anidb_exceptions[anidb_ex]
exception_dict[anidb_ex] = exception_dict[anidb_ex] + anidb_exception_dict[anidb_ex]
else:
exception_dict[anidb_ex] = anidb_exceptions[anidb_ex]
exception_dict[anidb_ex] = anidb_exception_dict[anidb_ex]
changed_exceptions = False
@ -224,6 +228,7 @@ def retrieve_exceptions():
# if this exception isn't already in the DB then add it
if cur_exception not in existing_exceptions:
if not isinstance(cur_exception, unicode):
cur_exception = unicode(cur_exception, 'utf-8', 'replace')
@ -237,9 +242,11 @@ def retrieve_exceptions():
else:
logger.log(u"No scene exceptions update needed")
# cleanup
del exception_dict
# cleanup
exception_dict.clear()
anidb_exception_dict.clear()
xem_exception_dict.clear()
def update_scene_exceptions(indexer_id, scene_exceptions):
"""
@ -259,29 +266,27 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season, custom) VALUES (?,?,?,?)",
[indexer_id, cur_exception, cur_season, 1])
def _anidb_exceptions_fetcher():
exception_dict = {}
global anidb_exception_dict
if shouldRefresh('anidb'):
logger.log(u"Checking for scene exception updates for AniDB")
for show in sickbeard.showList:
if show.indexer == 1:
if show.is_anime and show.indexer == 1:
try:
anime = adba.Anime(None, name=show.name, tvdbid=show.indexerid, autoCorrectName=True)
except:
continue
else:
if anime.name and anime.name != show.name:
exception_dict[show.indexerid] = [{anime.name: -1}]
anidb_exception_dict[show.indexerid] = [{anime.name: -1}]
setLastRefresh('anidb')
return exception_dict
return anidb_exception_dict
def _xem_exceptions_fetcher():
exception_dict = {}
global xem_exception_dict
if shouldRefresh('xem'):
for indexer in sickbeard.indexerApi().indexers:
@ -300,11 +305,11 @@ def _xem_exceptions_fetcher():
continue
for indexerid, names in url_data['data'].items():
exception_dict[int(indexerid)] = names
xem_exception_dict[int(indexerid)] = names
setLastRefresh('xem')
return exception_dict
return xem_exception_dict
def getSceneSeasons(indexer_id):