Added TVRage support to our scene numbering lookups.

Tweaked our API's HTTP Handlers for better performance.

Fixed a bug in metadata parser that was preventing TVRage shows from getting images.
This commit is contained in:
echel0n 2014-03-19 22:33:34 -07:00
parent f4a9987933
commit 36643acf5e
10 changed files with 206 additions and 135 deletions

View File

@ -44,6 +44,11 @@ from tvdb_ui import BaseUI, ConsoleUI
from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
# Cached Session Handler
from lib.httpcache import CachingHTTPAdapter
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
def log():
return logging.getLogger("tvdb_api")
@ -511,17 +516,12 @@ class Tvdb:
try:
log().debug("Retrieving URL %s" % url)
# cacheControl
if self.config['cache_enabled']:
from lib.httpcache import CachingHTTPAdapter
sess = requests.Session()
sess.mount('http://', CachingHTTPAdapter())
else:
sess = requests.Session()
# get response from TVDB
resp = sess.get(url, params=params)
sess.close()
if self.config['cache_enabled']:
resp = s.get(url, params=params)
else:
resp = requests.get(url, params=params)
except requests.HTTPError, e:
raise tvdb_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
@ -534,21 +534,18 @@ class Tvdb:
except Exception, e:
raise tvdb_error("Unknown exception occured: " + str(e.message) + " while loading URL " + str(url))
if resp.ok and resp.content:
if 'application/zip' in resp.headers.get("Content-Type", ''):
try:
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
log().debug("We recived a zip file unpacking now ...")
zipdata = StringIO.StringIO()
zipdata.write(resp.content)
myzipfile = zipfile.ZipFile(zipdata)
return myzipfile.read('%s.xml' % language)
except zipfile.BadZipfile:
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
if 'application/zip' in resp.headers.get("Content-Type", '') and resp.ok:
try:
# TODO: The zip contains actors.xml and banners.xml, which are currently ignored [GH-20]
log().debug("We recived a zip file unpacking now ...")
zipdata = StringIO.StringIO()
zipdata.write(resp.content)
myzipfile = zipfile.ZipFile(zipdata)
return myzipfile.read('%s.xml' % language)
except zipfile.BadZipfile:
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
return resp.content
return None
return resp.content if resp.ok else None
def _getetsrc(self, url, params=None, language=None):
"""Loads a URL using caching, returns an ElementTree of the source

View File

@ -36,6 +36,11 @@ from tvrage_ui import BaseUI
from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfound,
tvrage_seasonnotfound, tvrage_episodenotfound, tvrage_attributenotfound)
# Cached Session Handler
from lib.httpcache import CachingHTTPAdapter
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
def log():
return logging.getLogger("tvrage_api")
@ -252,6 +257,7 @@ class TVRage:
self.shows = ShowContainer() # Holds all Show classes
self.corrections = {} # Holds show-name to show_id mapping
self.sess = requests.session() # HTTP Session
self.config = {}
@ -266,15 +272,10 @@ class TVRage:
if cache is True:
self.config['cache_enabled'] = True
self.config['cache_location'] = self._getTempDir()
elif cache is False:
self.config['cache_enabled'] = False
elif isinstance(cache, basestring):
self.config['cache_enabled'] = True
self.config['cache_location'] = cache
else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
@ -345,17 +346,12 @@ class TVRage:
try:
log().debug("Retrieving URL %s" % url)
# cacheControl
if self.config['cache_enabled']:
from lib.httpcache import CachingHTTPAdapter
sess = requests.Session()
sess.mount('http://', CachingHTTPAdapter())
else:
sess = requests.Session()
# get response from TVRage
resp = sess.get(url, params=params)
sess.close()
if self.config['cache_enabled']:
resp = s.get(url, params=params)
else:
resp = requests.get(url, params=params)
except requests.HTTPError, e:
raise tvrage_error("HTTP error " + str(e.errno) + " while loading URL " + str(url))
@ -365,7 +361,7 @@ class TVRage:
except requests.Timeout, e:
raise tvrage_error("Connection timed out " + str(e.message) + " while loading URL " + str(url))
return resp.content if resp.ok and resp.content else None
return resp.content if resp.ok else None
def _getetsrc(self, url, params=None):
"""Loads a URL using caching, returns an ElementTree of the source

View File

@ -37,6 +37,7 @@ from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker,
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
from sickbeard import logger
from sickbeard import naming
from sickbeard import scene_numbering
from common import SD, SKIPPED, NAMING_REPEAT

View File

@ -57,7 +57,21 @@ class AddNetworkTimezones(AddSceneNameCache):
def execute(self):
self.connection.action("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)")
class ConverSceneExceptionsToIndexerID(AddNetworkTimezones):
class AddXemNumbering(AddNetworkTimezones):
def test(self):
return self.hasTable("xem_numbering")
def execute(self):
self.connection.action("CREATE TABLE xem_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
class AddXemRefresh(AddXemNumbering):
def test(self):
return self.hasTable("xem_refresh")
def execute(self):
self.connection.action("CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)")
class ConvertSceneExceptionsToIndexerID(AddXemRefresh):
def test(self):
return self.hasColumn("scene_exceptions", "indexer_id")
@ -67,7 +81,7 @@ class ConverSceneExceptionsToIndexerID(AddNetworkTimezones):
self.connection.action("INSERT INTO scene_exceptions(exception_id, indexer_id, show_name) SELECT exception_id, tvdb_id, show_name FROM tmp_scene_exceptions")
self.connection.action("DROP TABLE tmp_scene_exceptions")
class ConverSceneNamesToIndexerID(ConverSceneExceptionsToIndexerID):
class ConvertSceneNamesToIndexerID(ConvertSceneExceptionsToIndexerID):
def test(self):
return self.hasColumn("scene_names", "indexer_id")
@ -76,3 +90,4 @@ class ConverSceneNamesToIndexerID(ConverSceneExceptionsToIndexerID):
self.connection.action("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT)")
self.connection.action("INSERT INTO scene_names(indexer_id, name) SELECT tvdb_id, name FROM tmp_scene_names")
self.connection.action("DROP TABLE tmp_scene_names")

View File

@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException
MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 26
MAX_DB_VERSION = 27
class MainSanityCheck(db.DBSanityCheck):
@ -570,4 +570,18 @@ class AddArchiveFirstMatchOption(ConvertInfoToIndexerScheme):
self.connection.action("ALTER TABLE tv_shows ADD archive_firstmatch NUMERIC")
self.incDBVersion()
class AddSceneNumbering(AddArchiveFirstMatchOption):
def test(self):
return self.checkDBVersion() >= 27
def execute(self):
backupDatabase(27)
if self.hasTable("scene_numbering"):
self.connection.action("DROP TABLE scene_numbering")
self.connection.action("CREATE TABLE scene_numbering (indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))")
self.incDBVersion()

View File

@ -33,9 +33,8 @@ import uuid
import base64
from lib import requests
from httplib import BadStatusLine
from itertools import izip, cycle
from lib.httpcache import CachingHTTPAdapter
from contextlib import closing
try:
import json
@ -170,10 +169,6 @@ def getURL(url, post_data=None, headers=None, params=None, timeout=None):
Returns a byte-string retrieved from the url provider.
"""
# Cache Handler
sess = requests.Session()
sess.mount('http://', CachingHTTPAdapter())
req_headers = ['User-Agent', USER_AGENT, 'Accept-Encoding', 'gzip,deflate']
if headers:
for cur_header in headers:
@ -186,8 +181,7 @@ Returns a byte-string retrieved from the url provider.
url = urlparse.urlunparse(parsed)
it = iter(req_headers)
resp = sess.get(url, params=params, data=post_data, headers=dict(zip(it, it)))
sess.close()
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)))
except requests.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return None
@ -200,7 +194,7 @@ Returns a byte-string retrieved from the url provider.
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
return None
return resp.content if resp.ok and resp.content else None
return resp.content if resp.ok else None
def _remove_file_failed(file):
try:
@ -210,19 +204,13 @@ def _remove_file_failed(file):
def download_file(url, filename):
try:
# cache handler
sess = requests.Session()
sess.mount('http://', CachingHTTPAdapter())
req = sess.get(url, stream=True)
#CHUNK = 16 * 1024
with open(filename, 'wb') as fp:
for chunk in req.iter_content(chunk_size=(16 *1024)):
if chunk:
fp.write(chunk)
fp.flush()
fp.close()
sess.close()
with closing(requests.get(url, stream=True)) as r:
with open(filename, 'wb') as fp:
for chunk in r.iter_content(chunk_size=(16 *1024)):
if chunk:
fp.write(chunk)
fp.flush()
fp.close()
except requests.HTTPError, e:
_remove_file_failed(filename)
@ -264,7 +252,7 @@ def makeDir(path):
return True
def searchDBForShow(regShowName):
def searchDBForShow(regShowName, indexer_id=None):
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]
@ -302,7 +290,7 @@ def searchDBForShow(regShowName):
return None
def searchIndexersForShow(regShowName):
def searchIndexersForShow(regShowName, indexer_id = None):
showNames = [re.sub('[. -]', ' ', regShowName),regShowName]

View File

@ -949,8 +949,7 @@ class GenericMetadata():
result = tmdb.TV(id)
except:
return None
return None
pass
if result is None:
return None

View File

@ -676,7 +676,7 @@ class PostProcessor(object):
root_ep = None
for cur_episode in episodes:
# convert episode from scene numbering to TVDB numbering
# convert episode from scene numbering to Indexer numbering
(s, e) = sickbeard.scene_numbering.get_indexer_numbering(indexer_id, season, int(cur_episode))
self._log(u"Retrieving episode object for " + str(s) + "x" + str(e), logger.DEBUG)

View File

@ -27,26 +27,16 @@ try:
except ImportError:
from lib import simplejson as json
import sickbeard
from sickbeard import logger
from sickbeard import db
from sickbeard.helpers import getURL
from sickbeard import helpers
from sickbeard.exceptions import ex
from lib import requests
MAX_XEM_AGE_SECS = 86400 # 1 day
_schema_created = False
def _check_for_schema():
global _schema_created
if not _schema_created:
myDB = db.DBConnection()
cacheDB = db.DBConnection('cache.db')
myDB.action('CREATE TABLE if not exists scene_numbering (indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))')
cacheDB.action('CREATE TABLE if not exists xem_numbering (indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, PRIMARY KEY (indexer_id, season, episode))')
cacheDB.action('CREATE TABLE if not exists xem_refresh (indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER)')
_schema_created = True
def get_scene_numbering(indexer_id, season, episode, fallback_to_xem=True):
"""
Returns a tuple, (season, episode), with the scene numbering (if there is one),
@ -62,7 +52,7 @@ def get_scene_numbering(indexer_id, season, episode, fallback_to_xem=True):
"""
if indexer_id is None or season is None or episode is None:
return (season, episode)
result = find_scene_numbering(indexer_id, season, episode)
if result:
return result
@ -79,11 +69,14 @@ def find_scene_numbering(indexer_id, season, episode):
"""
if indexer_id is None or season is None or episode is None:
return (season, episode)
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return (season, episode)
indexer = showObj.indexer
myDB = db.DBConnection()
rows = myDB.select("SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer_id = ? and season = ? and episode = ?", [indexer_id, season, episode])
rows = myDB.select("SELECT scene_season, scene_episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [indexer, indexer_id, season, episode])
if rows:
return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
else:
@ -96,11 +89,14 @@ def get_indexer_numbering(indexer_id, sceneSeason, sceneEpisode, fallback_to_xem
"""
if indexer_id is None or sceneSeason is None or sceneEpisode is None:
return (sceneSeason, sceneEpisode)
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return (sceneSeason, sceneEpisode)
indexer = showObj.indexer
myDB = db.DBConnection()
rows = myDB.select("SELECT season, episode FROM scene_numbering WHERE indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer_id, sceneSeason, sceneEpisode])
rows = myDB.select("SELECT season, episode FROM scene_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer, indexer_id, sceneSeason, sceneEpisode])
if rows:
return (int(rows[0]["season"]), int(rows[0]["episode"]))
else:
@ -116,13 +112,16 @@ def get_scene_numbering_for_show(indexer_id):
"""
if indexer_id is None:
return {}
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return {}
indexer = showObj.indexer
myDB = db.DBConnection()
rows = myDB.select('''SELECT season, episode, scene_season, scene_episode
FROM scene_numbering WHERE indexer_id = ?
ORDER BY season, episode''', [indexer_id])
FROM scene_numbering WHERE indexer = ? and indexer_id = ?
ORDER BY season, episode''', [indexer, indexer_id])
result = {}
for row in rows:
result[(int(row['season']), int(row['episode']))] = (int(row['scene_season']), int(row['scene_episode']))
@ -137,8 +136,11 @@ def set_scene_numbering(indexer_id, season, episode, sceneSeason=None, sceneEpis
"""
if indexer_id is None or season is None or episode is None:
return
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return
indexer = showObj.indexer
myDB = db.DBConnection()
# sanity
@ -146,11 +148,11 @@ def set_scene_numbering(indexer_id, season, episode, sceneSeason=None, sceneEpis
#if sceneEpisode == None: sceneEpisode = episode
# delete any existing record first
myDB.action('DELETE FROM scene_numbering where indexer_id = ? and season = ? and episode = ?', [indexer_id, season, episode])
myDB.action('DELETE FROM scene_numbering where indexer = ? and indexer_id = ? and season = ? and episode = ?', [indexer, indexer_id, season, episode])
# now, if the new numbering is not the default, we save a new record
if sceneSeason is not None and sceneEpisode is not None:
myDB.action("INSERT INTO scene_numbering (indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?)", [indexer_id, season, episode, sceneSeason, sceneEpisode])
myDB.action("INSERT INTO scene_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)", [indexer, indexer_id, season, episode, sceneSeason, sceneEpisode])
def find_xem_numbering(indexer_id, season, episode):
@ -165,13 +167,16 @@ def find_xem_numbering(indexer_id, season, episode):
"""
if indexer_id is None or season is None or episode is None:
return None
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return None
indexer = showObj.indexer
if _xem_refresh_needed(indexer_id):
_xem_refresh(indexer_id)
cacheDB = db.DBConnection('cache.db')
rows = cacheDB.select("SELECT scene_season, scene_episode FROM xem_numbering WHERE indexer_id = ? and season = ? and episode = ?", [indexer_id, season, episode])
rows = cacheDB.select("SELECT scene_season, scene_episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and season = ? and episode = ?", [indexer, indexer_id, season, episode])
if rows:
return (int(rows[0]["scene_season"]), int(rows[0]["scene_episode"]))
else:
@ -188,12 +193,15 @@ def get_indexer_numbering_for_xem(indexer_id, sceneSeason, sceneEpisode):
"""
if indexer_id is None or sceneSeason is None or sceneEpisode is None:
return None
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return None
indexer = showObj.indexer
if _xem_refresh_needed(indexer_id):
_xem_refresh(indexer_id)
cacheDB = db.DBConnection('cache.db')
rows = cacheDB.select("SELECT season, episode FROM xem_numbering WHERE indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer_id, sceneSeason, sceneEpisode])
rows = cacheDB.select("SELECT season, episode FROM xem_numbering WHERE indexer = ? and indexer_id = ? and scene_season = ? and scene_episode = ?", [indexer, indexer_id, sceneSeason, sceneEpisode])
if rows:
return (int(rows[0]["season"]), int(rows[0]["episode"]))
else:
@ -208,10 +216,13 @@ def _xem_refresh_needed(indexer_id):
"""
if indexer_id is None:
return False
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return False
indexer = showObj.indexer
cacheDB = db.DBConnection('cache.db')
rows = cacheDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer_id = ?", [indexer_id])
rows = cacheDB.select("SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?", [indexer, indexer_id])
if rows:
return time.time() > (int(rows[0]['last_refreshed']) + MAX_XEM_AGE_SECS)
else:
@ -225,41 +236,58 @@ def _xem_refresh(indexer_id):
"""
if indexer_id is None:
return
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return
indexer = showObj.indexer
try:
logger.log(u'Looking up XEM scene mapping for show %s' % (indexer_id,), logger.DEBUG)
logger.log(u'Looking up XEM scene mapping for show %s on %s' % (indexer_id, indexer,), logger.DEBUG)
#data = getURL('http://thexem.de/map/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,))
data = requests.get('http://thexem.de/map/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json()
# http://thexem.de/map/all?id=1640 91&origin=tvdb&destination=scene
data = None
if 'Tvdb' in indexer:
data = requests.get('http://thexem.de/map/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json()
elif 'TVRage' in indexer:
data = requests.get('http://thexem.de/map/all?id=%s&origin=rage&destination=scene' % (indexer_id,)).json()
if data is None or data == '':
logger.log(u'No XEN data for show "%s", trying TVTumbler' % (indexer_id,), logger.MESSAGE)
#data = getURL('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,))
data = requests.get('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json()
logger.log(u'No XEN data for show "%s on %s", trying TVTumbler' % (indexer_id, indexer,), logger.MESSAGE)
if 'Tvdb' in indexer:
data = requests.get('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=tvdb&destination=scene' % (indexer_id,)).json()
elif 'TVRage' in indexer:
data = requests.get('http://show-api.tvtumbler.com/api/thexem/all?id=%s&origin=rage&destination=scene' % (indexer_id,)).json()
if data is None or data == '':
logger.log(u'TVTumbler also failed for show "%s". giving up.' % (indexer_id,), logger.MESSAGE)
logger.log(u'TVTumbler also failed for show "%s on %s". giving up.' % (indexer_id, indexer,), logger.MESSAGE)
return None
result = data
if result:
_check_for_schema()
cacheDB = db.DBConnection('cache.db')
cacheDB.action("INSERT OR REPLACE INTO xem_refresh (indexer_id, last_refreshed) VALUES (?,?)", [indexer_id, time.time()])
cacheDB.action("INSERT OR REPLACE INTO xem_refresh (indexer, indexer_id, last_refreshed) VALUES (?,?,?)", [indexer, indexer_id, time.time()])
if 'success' in result['result']:
cacheDB.action("DELETE FROM xem_numbering where indexer_id = ?", [indexer_id])
cacheDB.action("DELETE FROM xem_numbering where indexer = ? and indexer_id = ?", [indexer, indexer_id])
for entry in result['data']:
if 'scene' in entry:
cacheDB.action("INSERT INTO xem_numbering (indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?)",
[indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene']['season'], entry['scene']['episode'] ])
if 'Tvdb' in indexer:
cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene']['season'], entry['scene']['episode'] ])
elif 'TVRage' in indexer:
cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry['rage']['season'], entry['rage']['episode'], entry['scene']['season'], entry['scene']['episode'] ])
if 'scene_2' in entry: # for doubles
cacheDB.action("INSERT INTO xem_numbering (indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?)",
[indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene_2']['season'], entry['scene_2']['episode'] ])
#logger.log(u'Found XEM scene data for show %s' % (indexer_id), logger.MESSAGE)
if 'Tvdb' in indexer:
cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry['tvdb']['season'], entry['tvdb']['episode'], entry['scene_2']['season'], entry['scene_2']['episode'] ])
elif 'TVRage' in indexer:
cacheDB.action("INSERT INTO xem_numbering (indexer, indexer_id, season, episode, scene_season, scene_episode) VALUES (?,?,?,?,?,?)",
[indexer, indexer_id, entry['rage']['season'], entry['rage']['episode'], entry['scene_2']['season'], entry['scene_2']['episode'] ])
else:
logger.log(u'Failed to get XEM scene data for show %s because "%s"' % (indexer_id, result['message']), logger.MESSAGE)
logger.log(u'Failed to get XEM scene data for show %s from %s because "%s"' % (indexer_id, indexer, result['message']), logger.MESSAGE)
else:
logger.log(u"Empty lookup result - no XEM data for show %s" % (indexer_id,), logger.MESSAGE)
logger.log(u"Empty lookup result - no XEM data for show %s on %s" % (indexer_id, indexer,), logger.MESSAGE)
except Exception, e:
logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + ": " + ex(e), logger.WARNING)
logger.log(u"Exception while refreshing XEM data for show " + str(indexer_id) + " on " + indexer + ": " + ex(e), logger.WARNING)
logger.log(traceback.format_exc(), logger.DEBUG)
return None
@ -271,15 +299,18 @@ def get_xem_numbering_for_show(indexer_id):
"""
if indexer_id is None:
return {}
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return {}
indexer = showObj.indexer
if _xem_refresh_needed(indexer_id):
_xem_refresh(indexer_id)
cacheDB = db.DBConnection('cache.db')
rows = cacheDB.select('''SELECT season, episode, scene_season, scene_episode
FROM xem_numbering WHERE indexer_id = ?
ORDER BY season, episode''', [indexer_id])
FROM xem_numbering WHERE indexer = ? and indexer_id = ?
ORDER BY season, episode''', [indexer, indexer_id])
result = {}
for row in rows:
result[(int(row['season']), int(row['episode']))] = (int(row['scene_season']), int(row['scene_episode']))
@ -295,15 +326,18 @@ def get_xem_numbering_for_season(indexer_id, season):
if indexer_id is None or season is None:
return {}
_check_for_schema()
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
if showObj is None: return {}
indexer = showObj.indexer
if _xem_refresh_needed(indexer_id):
_xem_refresh(indexer_id)
cacheDB = db.DBConnection('cache.db')
rows = cacheDB.select('''SELECT season, scene_season
FROM xem_numbering WHERE indexer_id = ? AND season = ?
ORDER BY season''', [indexer_id, season])
FROM xem_numbering WHERE indexer = ? and indexer_id = ? AND season = ?
ORDER BY season''', [indexer, indexer_id, season])
result = {}
if rows:
for row in rows:

View File

@ -3340,6 +3340,12 @@ class Home:
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# figure out what segment the episode is in and remember it so we can backlog it
if ep_obj.show.air_by_date:
segment = str(ep_obj.airdate)[:7]
else:
segment = ep_obj.season
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, {ep_obj.season: ep_obj.episode})
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
@ -3364,6 +3370,27 @@ class Home:
return json.dumps({'result': 'failure'})
# try:
#
#
# ui.notifications.message('Info', pp.log)
# except exceptions.FailedHistoryNotFoundException:
# ui.notifications.error('Not Found Error', 'Couldn\'t find release in history. (Has it been over 30 days?)\n'
# 'Can\'t mark it as bad.')
# return json.dumps({'result': 'failure'})
# except exceptions.FailedHistoryMultiSnatchException:
# ui.notifications.error('Multi-Snatch Error', 'The same episode was snatched again before the first one was done.\n'
# 'Please cancel any downloads of this episode and then set it back to wanted.\n Can\'t continue.')
# return json.dumps({'result': 'failure'})
# except exceptions.FailedProcessingFailed:
# ui.notifications.error('Processing Failed', pp.log)
# return json.dumps({'result': 'failure'})
# except Exception as e:
# ui.notifications.error('Unknown Error', 'Unknown exception: ' + str(e))
# return json.dumps({'result': 'failure'})
#
# return json.dumps({'result': 'success'})
class UI:
@cherrypy.expose