mirror of
https://github.com/moparisthebest/SickRage
synced 2024-10-31 15:35:01 -04:00
Improved TVRage API code to now include show/episode summaries and fanart.
Bugfix in metadata code for XBMC 12+ to display proper episodeguide urls. Misc tweaks here and there.
This commit is contained in:
parent
01eb8c0129
commit
0fcd780c2f
@ -26,8 +26,9 @@ class TVCheck(unittest.TestCase):
|
|||||||
id = 1396
|
id = 1396
|
||||||
name = 'UFC'
|
name = 'UFC'
|
||||||
tmdb = TMDB(TMDB_API_KEY)
|
tmdb = TMDB(TMDB_API_KEY)
|
||||||
tv = tmdb.TV(id)
|
find = tmdb.Find(23281)
|
||||||
response = tv.images()
|
response = find.info({'external_source': 'tvrage_id'})
|
||||||
|
self.assertTrue(hasattr(response, name))
|
||||||
|
|
||||||
def testTVSearch(self):
|
def testTVSearch(self):
|
||||||
id = 1396
|
id = 1396
|
||||||
|
@ -13,6 +13,7 @@ __author__ = "echel0n"
|
|||||||
__version__ = "1.0"
|
__version__ = "1.0"
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
@ -261,6 +262,11 @@ class TVRage:
|
|||||||
|
|
||||||
self.config = {}
|
self.config = {}
|
||||||
|
|
||||||
|
if apikey is not None:
|
||||||
|
self.config['apikey'] = apikey
|
||||||
|
else:
|
||||||
|
self.config['apikey'] = "Uhewg1Rr0o62fvZvUIZt" # tvdb_api's API key
|
||||||
|
|
||||||
self.config['debug_enabled'] = debug # show debugging messages
|
self.config['debug_enabled'] = debug # show debugging messages
|
||||||
|
|
||||||
self.config['custom_ui'] = custom_ui
|
self.config['custom_ui'] = custom_ui
|
||||||
@ -330,11 +336,11 @@ class TVRage:
|
|||||||
# http://tvrage.com/wiki/index.php/Programmers_API
|
# http://tvrage.com/wiki/index.php/Programmers_API
|
||||||
self.config['base_url'] = "http://services.tvrage.com"
|
self.config['base_url'] = "http://services.tvrage.com"
|
||||||
|
|
||||||
self.config['url_getSeries'] = u"%(base_url)s/feeds/full_search.php?show=%%s" % self.config
|
self.config['url_getSeries'] = u"%(base_url)s/myfeeds/search.php?key=%(apikey)s&show=%%s" % self.config
|
||||||
|
|
||||||
self.config['url_epInfo'] = u"%(base_url)s/feeds/episode_list.php?sid=%%s" % self.config
|
self.config['url_epInfo'] = u"%(base_url)s/myfeeds/episode_list.php?key=%(apikey)s&sid=%%s" % self.config
|
||||||
|
|
||||||
self.config['url_seriesInfo'] = u"%(base_url)s/feeds/full_show_info.php?sid=%%s" % self.config
|
self.config['url_seriesInfo'] = u"%(base_url)s/myfeeds/showinfo.php?key=%(apikey)s&sid=%%s" % self.config
|
||||||
|
|
||||||
def _getTempDir(self):
|
def _getTempDir(self):
|
||||||
"""Returns the [system temp dir]/tvrage_api-u501 (or
|
"""Returns the [system temp dir]/tvrage_api-u501 (or
|
||||||
@ -422,15 +428,40 @@ class TVRage:
|
|||||||
def _getetsrc(self, url):
|
def _getetsrc(self, url):
|
||||||
"""Loads a URL using caching, returns an ElementTree of the source
|
"""Loads a URL using caching, returns an ElementTree of the source
|
||||||
"""
|
"""
|
||||||
|
reDict = {
|
||||||
|
'showid': 'id',
|
||||||
|
'showname': 'seriesname',
|
||||||
|
'summary': 'overview',
|
||||||
|
'startdate': 'firstaired',
|
||||||
|
'genres': 'genre',
|
||||||
|
'airtime': 'airs_time',
|
||||||
|
'airday': 'airs_dayofweek',
|
||||||
|
'image': 'fanart',
|
||||||
|
'epnum': 'id',
|
||||||
|
'title': 'episodename',
|
||||||
|
'airdate': 'firstaired',
|
||||||
|
'screencap': 'filename',
|
||||||
|
'seasonnum': 'episodenumber',
|
||||||
|
}
|
||||||
|
|
||||||
|
robj = re.compile('|'.join(reDict.keys()))
|
||||||
src = self._loadUrl(url)
|
src = self._loadUrl(url)
|
||||||
try:
|
try:
|
||||||
# TVRAGE doesn't sanitize \r (CR) from user input in some fields,
|
# TVRAGE doesn't sanitize \r (CR) from user input in some fields,
|
||||||
# remove it to avoid errors. Change from SickBeard, from will14m
|
# remove it to avoid errors. Change from SickBeard, from will14m
|
||||||
return ElementTree.fromstring(src.rstrip("\r"))
|
xml = ElementTree.fromstring(src.rstrip("\r"))
|
||||||
|
tree = ElementTree.ElementTree(xml)
|
||||||
|
for elm in tree.iter():
|
||||||
|
elm.tag = robj.sub(lambda m: reDict[m.group(0)], elm.tag)
|
||||||
|
return ElementTree.fromstring(ElementTree.tostring(xml))
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
src = self._loadUrl(url, recache=True)
|
src = self._loadUrl(url, recache=True)
|
||||||
try:
|
try:
|
||||||
return ElementTree.fromstring(src.rstrip("\r"))
|
xml = ElementTree.fromstring(src.rstrip("\r"))
|
||||||
|
tree = ElementTree.ElementTree(xml)
|
||||||
|
for elm in tree.iter():
|
||||||
|
elm.tag = robj.sub(lambda m: reDict[m.group(0)], elm.tag)
|
||||||
|
return ElementTree.fromstring(ElementTree.tostring(xml))
|
||||||
except SyntaxError, exceptionmsg:
|
except SyntaxError, exceptionmsg:
|
||||||
errormsg = "There was an error with the XML retrieved from tvrage.com:\n%s" % (
|
errormsg = "There was an error with the XML retrieved from tvrage.com:\n%s" % (
|
||||||
exceptionmsg
|
exceptionmsg
|
||||||
@ -489,21 +520,6 @@ class TVRage:
|
|||||||
"""This searches tvrage.com for the series name
|
"""This searches tvrage.com for the series name
|
||||||
and returns the result list
|
and returns the result list
|
||||||
"""
|
"""
|
||||||
|
|
||||||
remap_keys = {
|
|
||||||
'showid': 'id',
|
|
||||||
'epnum': 'id',
|
|
||||||
'started': 'firstaired',
|
|
||||||
'airdate': 'firstaired',
|
|
||||||
'genres': 'genre',
|
|
||||||
'airtime': 'airs_time',
|
|
||||||
'name': 'seriesname',
|
|
||||||
'image': 'image_type',
|
|
||||||
'airday': 'airs_dayofweek',
|
|
||||||
'title': 'episodename',
|
|
||||||
'seasonnum': 'episodenumber'
|
|
||||||
}
|
|
||||||
|
|
||||||
series = urllib.quote(series.encode("utf-8"))
|
series = urllib.quote(series.encode("utf-8"))
|
||||||
log().debug("Searching for show %s" % series)
|
log().debug("Searching for show %s" % series)
|
||||||
seriesEt = self._getetsrc(self.config['url_getSeries'] % (series))
|
seriesEt = self._getetsrc(self.config['url_getSeries'] % (series))
|
||||||
@ -511,10 +527,7 @@ class TVRage:
|
|||||||
seriesResult = {}
|
seriesResult = {}
|
||||||
for series in seriesEt:
|
for series in seriesEt:
|
||||||
for k in series.getchildren():
|
for k in series.getchildren():
|
||||||
if k.tag.lower() in remap_keys:
|
seriesResult.setdefault(k.tag.lower(), k.text)
|
||||||
seriesResult.setdefault(remap_keys[k.tag.lower()], k.text)
|
|
||||||
else:
|
|
||||||
seriesResult.setdefault(k.tag.lower(), k.text)
|
|
||||||
|
|
||||||
seriesResult['id'] = int(seriesResult['id'])
|
seriesResult['id'] = int(seriesResult['id'])
|
||||||
log().debug('Found series %s' % seriesResult['seriesname'])
|
log().debug('Found series %s' % seriesResult['seriesname'])
|
||||||
@ -549,20 +562,6 @@ class TVRage:
|
|||||||
shows[series_id][season_number][episode_number]
|
shows[series_id][season_number][episode_number]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
remap_keys = {
|
|
||||||
'showid': 'id',
|
|
||||||
'epnum': 'id',
|
|
||||||
'started': 'firstaired',
|
|
||||||
'airdate': 'firstaired',
|
|
||||||
'genres': 'genre',
|
|
||||||
'airtime': 'airs_time',
|
|
||||||
'name': 'seriesname',
|
|
||||||
'image': 'image_type',
|
|
||||||
'airday': 'airs_dayofweek',
|
|
||||||
'title': 'episodename',
|
|
||||||
'seasonnum': 'episodenumber'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse show information
|
# Parse show information
|
||||||
log().debug('Getting all series data for %s' % (sid))
|
log().debug('Getting all series data for %s' % (sid))
|
||||||
seriesInfoEt = self._getetsrc(
|
seriesInfoEt = self._getetsrc(
|
||||||
@ -570,12 +569,9 @@ class TVRage:
|
|||||||
)
|
)
|
||||||
|
|
||||||
for curInfo in seriesInfoEt:
|
for curInfo in seriesInfoEt:
|
||||||
if curInfo.tag.lower() in remap_keys:
|
tag = curInfo.tag.lower()
|
||||||
tag = remap_keys[curInfo.tag.lower()]
|
|
||||||
else:
|
|
||||||
tag = curInfo.tag.lower()
|
|
||||||
|
|
||||||
if curInfo.tag.lower() in ('started', 'ended') and curInfo.text is not None:
|
if tag in 'firstaired':
|
||||||
try:
|
try:
|
||||||
fixDate = dt.datetime.strptime(curInfo.text,"%b/%d/%Y")
|
fixDate = dt.datetime.strptime(curInfo.text,"%b/%d/%Y")
|
||||||
value = fixDate.strftime("%Y-%m-%d")
|
value = fixDate.strftime("%Y-%m-%d")
|
||||||
@ -600,10 +596,7 @@ class TVRage:
|
|||||||
# Parse genre data
|
# Parse genre data
|
||||||
log().debug('Getting genres of %s' % (sid))
|
log().debug('Getting genres of %s' % (sid))
|
||||||
for genre in seriesInfoEt.find('genres'):
|
for genre in seriesInfoEt.find('genres'):
|
||||||
if genre.tag in remap_keys:
|
tag = genre.tag.lower()
|
||||||
tag = remap_keys[genre.tag.lower()]
|
|
||||||
else:
|
|
||||||
tag = genre.tag.lower()
|
|
||||||
|
|
||||||
value = genre.text
|
value = genre.text
|
||||||
if value is not None:
|
if value is not None:
|
||||||
@ -623,13 +616,10 @@ class TVRage:
|
|||||||
try:
|
try:
|
||||||
seas_no = int(cur_seas.attrib['no'])
|
seas_no = int(cur_seas.attrib['no'])
|
||||||
for cur_ep in cur_seas:
|
for cur_ep in cur_seas:
|
||||||
ep_no = int(cur_ep.find('seasonnum').text)
|
ep_no = int(cur_ep.find('episodenumber').text)
|
||||||
self._setItem(sid, seas_no, ep_no, 'seasonnumber', seas_no)
|
self._setItem(sid, seas_no, ep_no, 'seasonnumber', seas_no)
|
||||||
for cur_item in cur_ep:
|
for cur_item in cur_ep:
|
||||||
if cur_item.tag.lower() in remap_keys:
|
tag = cur_item.tag.lower()
|
||||||
tag = remap_keys[cur_item.tag.lower()]
|
|
||||||
else:
|
|
||||||
tag = cur_item.tag.lower()
|
|
||||||
|
|
||||||
value = cur_item.text
|
value = cur_item.text
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
@ -37,16 +37,10 @@ class indexerApi(generic.GenericIndexer):
|
|||||||
self.config['api_parms']['cache'] = os.path.join(sickbeard.CACHE_DIR, indexer)
|
self.config['api_parms']['cache'] = os.path.join(sickbeard.CACHE_DIR, indexer)
|
||||||
|
|
||||||
# wrap the indexer API object and return it back
|
# wrap the indexer API object and return it back
|
||||||
self._wrapped = eval(indexer)(forceConnect=True, *args, **self.config['api_parms'])
|
self._wrapped = eval(indexer)(*args, **self.config['api_parms'])
|
||||||
|
|
||||||
def __getattr__(self, attr):
|
def __getattr__(self, attr):
|
||||||
try:
|
return getattr(self._wrapped, attr)
|
||||||
return getattr(self._wrapped, attr)
|
|
||||||
except KeyError:
|
|
||||||
raise indexer_attributenotfound
|
|
||||||
|
|
||||||
def __getitem__(self, attr):
|
def __getitem__(self, attr):
|
||||||
try:
|
return self._wrapped.__getitem__(attr)
|
||||||
return self._wrapped.__getitem__(attr)
|
|
||||||
except KeyError:
|
|
||||||
raise indexer_attributenotfound
|
|
@ -1,29 +0,0 @@
|
|||||||
import sys
|
|
||||||
import logging
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from indexer_api import indexerApi
|
|
||||||
from indexer_exceptions import indexer_exception
|
|
||||||
|
|
||||||
# Set our common indexer_api options here
|
|
||||||
INDEXER_API_PARMS = {'apikey': '9DAF49C96CBF8DAC',
|
|
||||||
'language': 'en',
|
|
||||||
'useZip': True}
|
|
||||||
|
|
||||||
|
|
||||||
INDEXER_API_PARMS['indexer'] = 'Tvdb'
|
|
||||||
lindexer_api_parms = INDEXER_API_PARMS.copy()
|
|
||||||
|
|
||||||
try:
|
|
||||||
imdbid = " "
|
|
||||||
|
|
||||||
t = indexerApi().config['valid_languages']
|
|
||||||
t = indexerApi(**lindexer_api_parms)
|
|
||||||
myEp = t[258171]
|
|
||||||
|
|
||||||
if getattr(myEp, 'seriesname', None) is not None:
|
|
||||||
print "FOUND"
|
|
||||||
|
|
||||||
except indexer_exception as e:
|
|
||||||
print e
|
|
||||||
pass
|
|
1
sickbeard/indexers/test/__init__.py
Normal file
1
sickbeard/indexers/test/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
__author__ = 'Justin'
|
51
sickbeard/indexers/test/test.py
Normal file
51
sickbeard/indexers/test/test.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os.path
|
||||||
|
sys.path.append(os.path.abspath('..'))
|
||||||
|
sys.path.append(os.path.abspath('../lib'))
|
||||||
|
|
||||||
|
import sickbeard
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from sickbeard import encodingKludge as ek, providers, tvcache
|
||||||
|
from sickbeard import db
|
||||||
|
from sickbeard.databases import mainDB
|
||||||
|
from sickbeard.databases import cache_db
|
||||||
|
|
||||||
|
|
||||||
|
from indexer_api import indexerApi
|
||||||
|
from indexer_exceptions import indexer_exception
|
||||||
|
|
||||||
|
class APICheck(unittest.TestCase):
|
||||||
|
indexer_id = 258171
|
||||||
|
indexer = 'Tvdb'
|
||||||
|
# Set our common indexer_api options here
|
||||||
|
INDEXER_API_PARMS = {'apikey': '9DAF49C96CBF8DAC',
|
||||||
|
'language': 'en',
|
||||||
|
'useZip': True}
|
||||||
|
|
||||||
|
|
||||||
|
INDEXER_API_PARMS['indexer'] = indexer
|
||||||
|
lindexer_api_parms = INDEXER_API_PARMS.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
imdbid = " "
|
||||||
|
showurl = indexerApi(**lindexer_api_parms).config['base_url'] + indexer_id + '/all/en.zip'
|
||||||
|
t = indexerApi().config['valid_languages']
|
||||||
|
t = indexerApi(**lindexer_api_parms)
|
||||||
|
myEp = t[258171]
|
||||||
|
|
||||||
|
if getattr(myEp, 'seriesname', None) is not None:
|
||||||
|
print "FOUND"
|
||||||
|
|
||||||
|
except indexer_exception as e:
|
||||||
|
print e
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
227
sickbeard/indexers/test/test_lib.py
Normal file
227
sickbeard/indexers/test/test_lib.py
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
# coding=UTF-8
|
||||||
|
# Author: Dennis Lutter <lad1337@gmail.com>
|
||||||
|
# URL: http://code.google.com/p/sickbeard/
|
||||||
|
#
|
||||||
|
# This file is part of Sick Beard.
|
||||||
|
#
|
||||||
|
# Sick Beard is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Sick Beard is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os.path
|
||||||
|
sys.path.append(os.path.abspath('..'))
|
||||||
|
sys.path.append(os.path.abspath('../lib'))
|
||||||
|
|
||||||
|
import sickbeard
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from sickbeard import encodingKludge as ek, providers, tvcache
|
||||||
|
from sickbeard import db
|
||||||
|
from sickbeard.databases import mainDB
|
||||||
|
from sickbeard.databases import cache_db
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# test globals
|
||||||
|
#=================
|
||||||
|
TESTDIR = os.path.abspath('.')
|
||||||
|
TESTDBNAME = "sickbeard.db"
|
||||||
|
TESTCACHEDBNAME = "cache.db"
|
||||||
|
|
||||||
|
|
||||||
|
SHOWNAME = u"show name"
|
||||||
|
SEASON = 4
|
||||||
|
EPISODE = 2
|
||||||
|
FILENAME = u"show name - s0" + str(SEASON) + "e0" + str(EPISODE) + ".mkv"
|
||||||
|
FILEDIR = os.path.join(TESTDIR, SHOWNAME)
|
||||||
|
FILEPATH = os.path.join(FILEDIR, FILENAME)
|
||||||
|
|
||||||
|
SHOWDIR = os.path.join(TESTDIR, SHOWNAME + " final")
|
||||||
|
|
||||||
|
#sickbeard.logger.sb_log_instance = sickbeard.logger.SBRotatingLogHandler(os.path.join(TESTDIR, 'sickbeard.log'), sickbeard.logger.NUM_LOGS, sickbeard.logger.LOG_SIZE)
|
||||||
|
sickbeard.logger.SBRotatingLogHandler.log_file = os.path.join(os.path.join(TESTDIR, 'Logs'), 'test_sickbeard.log')
|
||||||
|
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# prepare env functions
|
||||||
|
#=================
|
||||||
|
def createTestLogFolder():
|
||||||
|
if not os.path.isdir(sickbeard.LOG_DIR):
|
||||||
|
os.mkdir(sickbeard.LOG_DIR)
|
||||||
|
|
||||||
|
# call env functions at appropriate time during sickbeard var setup
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# sickbeard globals
|
||||||
|
#=================
|
||||||
|
sickbeard.SYS_ENCODING = 'UTF-8'
|
||||||
|
sickbeard.showList = []
|
||||||
|
sickbeard.QUALITY_DEFAULT = 4 # hdtv
|
||||||
|
sickbeard.FLATTEN_FOLDERS_DEFAULT = 0
|
||||||
|
|
||||||
|
sickbeard.NAMING_PATTERN = ''
|
||||||
|
sickbeard.NAMING_ABD_PATTERN = ''
|
||||||
|
sickbeard.NAMING_MULTI_EP = 1
|
||||||
|
|
||||||
|
|
||||||
|
sickbeard.PROVIDER_ORDER = ["sick_beard_index"]
|
||||||
|
sickbeard.newznabProviderList = providers.getNewznabProviderList("Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0!!!NZBs.org|http://nzbs.org/||5030,5040,5070,5090|0!!!Usenet-Crawler|http://www.usenet-crawler.com/||5030,5040|0")
|
||||||
|
sickbeard.providerList = providers.makeProviderList()
|
||||||
|
|
||||||
|
sickbeard.PROG_DIR = os.path.abspath('..')
|
||||||
|
sickbeard.DATA_DIR = sickbeard.PROG_DIR
|
||||||
|
sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs')
|
||||||
|
createTestLogFolder()
|
||||||
|
sickbeard.logger.sb_log_instance.initLogging(False)
|
||||||
|
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# dummy functions
|
||||||
|
#=================
|
||||||
|
def _dummy_saveConfig():
|
||||||
|
return True
|
||||||
|
# this overrides the sickbeard save_config which gets called during a db upgrade
|
||||||
|
# this might be considered a hack
|
||||||
|
mainDB.sickbeard.save_config = _dummy_saveConfig
|
||||||
|
|
||||||
|
|
||||||
|
# the real one tries to contact tvdb just stop it from getting more info on the ep
|
||||||
|
def _fake_specifyEP(self, season, episode):
|
||||||
|
pass
|
||||||
|
|
||||||
|
sickbeard.tv.TVEpisode.specifyEpisode = _fake_specifyEP
|
||||||
|
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# test classes
|
||||||
|
#=================
|
||||||
|
class SickbeardTestDBCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
sickbeard.showList = []
|
||||||
|
setUp_test_db()
|
||||||
|
setUp_test_episode_file()
|
||||||
|
setUp_test_show_dir()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
sickbeard.showList = []
|
||||||
|
tearDown_test_db()
|
||||||
|
tearDown_test_episode_file()
|
||||||
|
tearDown_test_show_dir()
|
||||||
|
|
||||||
|
|
||||||
|
class TestDBConnection(db.DBConnection, object):
|
||||||
|
|
||||||
|
def __init__(self, dbFileName=TESTDBNAME):
|
||||||
|
dbFileName = os.path.join(TESTDIR, dbFileName)
|
||||||
|
super(TestDBConnection, self).__init__(dbFileName)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheDBConnection(TestDBConnection, object):
|
||||||
|
|
||||||
|
def __init__(self, providerName):
|
||||||
|
db.DBConnection.__init__(self, os.path.join(TESTDIR, TESTCACHEDBNAME))
|
||||||
|
|
||||||
|
# Create the table if it's not already there
|
||||||
|
try:
|
||||||
|
sql = "CREATE TABLE " + providerName + " (name TEXT, season NUMERIC, episodes TEXT, indexerid NUMERIC, url TEXT, time NUMERIC, quality TEXT);"
|
||||||
|
self.connection.execute(sql)
|
||||||
|
self.connection.commit()
|
||||||
|
except sqlite3.OperationalError, e:
|
||||||
|
if str(e) != "table " + providerName + " already exists":
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create the table if it's not already there
|
||||||
|
try:
|
||||||
|
sql = "CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);"
|
||||||
|
self.connection.execute(sql)
|
||||||
|
self.connection.commit()
|
||||||
|
except sqlite3.OperationalError, e:
|
||||||
|
if str(e) != "table lastUpdate already exists":
|
||||||
|
raise
|
||||||
|
|
||||||
|
# this will override the normal db connection
|
||||||
|
sickbeard.db.DBConnection = TestDBConnection
|
||||||
|
sickbeard.tvcache.CacheDBConnection = TestCacheDBConnection
|
||||||
|
|
||||||
|
|
||||||
|
#=================
|
||||||
|
# test functions
|
||||||
|
#=================
|
||||||
|
def setUp_test_db():
|
||||||
|
"""upgrades the db to the latest version
|
||||||
|
"""
|
||||||
|
# upgrading the db
|
||||||
|
db.upgradeDatabase(db.DBConnection(), mainDB.InitialSchema)
|
||||||
|
# fix up any db problems
|
||||||
|
db.sanityCheckDatabase(db.DBConnection(), mainDB.MainSanityCheck)
|
||||||
|
|
||||||
|
#and for cache.b too
|
||||||
|
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown_test_db():
|
||||||
|
"""Deletes the test db
|
||||||
|
although this seams not to work on my system it leaves me with an zero kb file
|
||||||
|
"""
|
||||||
|
# uncomment next line so leave the db intact between test and at the end
|
||||||
|
#return False
|
||||||
|
if os.path.exists(os.path.join(TESTDIR, TESTDBNAME)):
|
||||||
|
os.remove(os.path.join(TESTDIR, TESTDBNAME))
|
||||||
|
if os.path.exists(os.path.join(TESTDIR, TESTCACHEDBNAME)):
|
||||||
|
os.remove(os.path.join(TESTDIR, TESTCACHEDBNAME))
|
||||||
|
|
||||||
|
|
||||||
|
def setUp_test_episode_file():
|
||||||
|
if not os.path.exists(FILEDIR):
|
||||||
|
os.makedirs(FILEDIR)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(FILEPATH, 'w') as f:
|
||||||
|
f.write("foo bar")
|
||||||
|
except EnvironmentError:
|
||||||
|
print "Unable to set up test episode"
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown_test_episode_file():
|
||||||
|
shutil.rmtree(FILEDIR)
|
||||||
|
|
||||||
|
|
||||||
|
def setUp_test_show_dir():
|
||||||
|
if not os.path.exists(SHOWDIR):
|
||||||
|
os.makedirs(SHOWDIR)
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown_test_show_dir():
|
||||||
|
shutil.rmtree(SHOWDIR)
|
||||||
|
|
||||||
|
tearDown_test_db()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print "=================="
|
||||||
|
print "Dont call this directly"
|
||||||
|
print "=================="
|
||||||
|
print "you might want to call"
|
||||||
|
|
||||||
|
dirList = os.listdir(TESTDIR)
|
||||||
|
for fname in dirList:
|
||||||
|
if (fname.find("_test") > 0) and (fname.find("pyc") < 0):
|
||||||
|
print "- " + fname
|
||||||
|
|
||||||
|
print "=================="
|
||||||
|
print "or just call all_tests.py"
|
@ -746,7 +746,7 @@ class GenericMetadata():
|
|||||||
if getattr(indexer_show_obj, 'banner', None) is not None:
|
if getattr(indexer_show_obj, 'banner', None) is not None:
|
||||||
image_url = re.sub('graphical', '_cache/graphical', indexer_show_obj['banner'])
|
image_url = re.sub('graphical', '_cache/graphical', indexer_show_obj['banner'])
|
||||||
else:
|
else:
|
||||||
if getattr(indexer_show_obj, 'banner', None) is not None:
|
if getattr(indexer_show_obj, image_type, None) is not None:
|
||||||
image_url = indexer_show_obj[image_type]
|
image_url = indexer_show_obj[image_type]
|
||||||
|
|
||||||
# Try and get posters and fanart from TMDB
|
# Try and get posters and fanart from TMDB
|
||||||
|
@ -153,7 +153,7 @@ class XBMC_12PlusMetadata(generic.GenericMetadata):
|
|||||||
episodeguideurl = etree.SubElement(episodeguide, "url")
|
episodeguideurl = etree.SubElement(episodeguide, "url")
|
||||||
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
|
episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
|
||||||
if getattr(myShow, 'id', None) is not None:
|
if getattr(myShow, 'id', None) is not None:
|
||||||
showurl = indexer_api.indexerApi(show_obj.indexer).config['base_url'] + myShow["id"] + '/all/en.zip'
|
showurl = t.config['base_url'] + myShow["id"] + '/all/en.zip'
|
||||||
episodeguideurl.text = showurl
|
episodeguideurl.text = showurl
|
||||||
episodeguideurl2.text = showurl
|
episodeguideurl2.text = showurl
|
||||||
|
|
||||||
|
@ -347,7 +347,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||||||
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable
|
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable
|
||||||
|
|
||||||
self.show.writeMetadata(force=self.refresh)
|
self.show.writeMetadata(force=self.refresh)
|
||||||
self.show.populateCache()
|
self.show.populateCache()
|
||||||
|
|
||||||
self.show.flushEpisodes()
|
self.show.flushEpisodes()
|
||||||
|
|
||||||
|
@ -2266,7 +2266,7 @@ class NewHomeAddShows:
|
|||||||
|
|
||||||
if indexer is not None and indexer_id is not None:
|
if indexer is not None and indexer_id is not None:
|
||||||
# add the show
|
# add the show
|
||||||
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, SKIPPED, sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT, sickbeard.SUBTITLES_DEFAULT, refresh=True) # @UndefinedVariable
|
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, SKIPPED, sickbeard.QUALITY_DEFAULT, sickbeard.FLATTEN_FOLDERS_DEFAULT, sickbeard.SUBTITLES_DEFAULT, refresh=True)
|
||||||
num_added += 1
|
num_added += 1
|
||||||
|
|
||||||
if num_added:
|
if num_added:
|
||||||
|
@ -23,7 +23,6 @@ import test_lib as test
|
|||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard.tv import TVEpisode, TVShow
|
from sickbeard.tv import TVEpisode, TVShow
|
||||||
|
|
||||||
|
|
||||||
class TVShowTests(test.SickbeardTestDBCase):
|
class TVShowTests(test.SickbeardTestDBCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
Loading…
Reference in New Issue
Block a user