Fixed AttributeError: 'NoneType' object has no attribute 'is_scene' in scene_numbering code.

Show root dirs can not be set from general config menu.
Mass editing shows now has the ability to delete root dirs as well as edit them.
Daily search no longer is restricted to just 1 week of results for searching from which now allows for replacing lower quality downloads with higher quality ones if available.
RSS Cache is updated for each provider on demand now when performing manual, failed, backlog, or daily searches.
This commit is contained in:
echel0n 2014-08-30 01:47:00 -07:00
parent 8e37afbcc5
commit ee458bd211
33 changed files with 181 additions and 189 deletions

View File

@ -26,6 +26,7 @@
#end if
<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/rootDirs.js?$sbPID"></script>
<div id="config">
<div id="config-content">
@ -167,6 +168,14 @@
</label>
</div>
<div class="field-pair">
<label class="clearfix">
<span class="component-title">Show Root Directories</span>
<span class="component-desc">Set root directories for where you want your shows to be.</span>
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_rootDirs.tmpl")
</label>
</div>
<input type="submit" class="btn config_submitter" value="Save Changes" />
</fieldset>
</div><!-- /component-group1 //-->

View File

@ -27,6 +27,7 @@
#set $cur_index = $root_dir_list.index($cur_dir)
<div style="padding: 6px 0 3px 25px;">
<input class="btn edit_root_dir" type="button" class="edit_root_dir" id="edit_root_dir_$cur_index" value="Edit" />
<input class="btn delete_root_dir" type="button" class="delete_root_dir" id="delete_root_dir_$cur_index" value="Delete" />
$cur_dir => <span id="display_new_root_dir_$cur_index">$cur_dir</span>
</div>
<input type="hidden" name="orig_root_dir_$cur_index" value="$cur_dir" />

View File

@ -22,4 +22,10 @@ $(document).ready(function(){
});
$('.delete_root_dir').click(function(){
var cur_id = find_dir_index($(this).attr('id'));
$('#new_root_dir_'+cur_id).val(null);
$('#display_new_root_dir_'+cur_id).html('<b>DELETED</b>');
});
});

View File

@ -40,67 +40,37 @@ class DailySearcher():
self.amActive = True
didSearch = False
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProviderCount, curProvider in enumerate(providers):
curDate = datetime.date.today().toordinal()
logger.log(u"Updating [" + curProvider.name + "] RSS cache ...")
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?",
[common.UNAIRED, curDate])
sql_l = []
for sqlEp in sqlResults:
try:
curProvider.cache.updateCache()
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
continue
except Exception, e:
logger.log(u"Error while updating cache for " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue
didSearch = True
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
else:
ep.status = common.WANTED
if didSearch:
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
curDate = datetime.date.today()
sql_l.append(ep.get_sql())
if len(sql_l) > 0:
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?",
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
myDB.mass_action(sql_l)
sql_l = []
for sqlEp in sqlResults:
try:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
if ep.status == common.UNAIRED:
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
ep.status = common.WANTED
sql_l.append(ep.get_sql())
if ep.status == common.WANTED:
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, [ep])
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
else:
logger.log(u"Could not find any wanted episodes for the last 7 days to search for")
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
else:
logger.log(
u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
logger.ERROR)
# queue episode for daily search
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem()
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
self.amActive = False

View File

@ -210,21 +210,17 @@ def _remove_file_failed(file):
except:
pass
def findCertainShow(showList, indexerid):
if not showList:
return None
results = []
if indexerid:
if showList and indexerid:
results = filter(lambda x: int(x.indexerid) == int(indexerid), showList)
if len(results):
if len(results) == 1:
return results[0]
elif len(results) > 1:
raise MultipleShowObjectsException()
def makeDir(path):
if not ek.ek(os.path.isdir, path):
try:

View File

@ -133,7 +133,7 @@ class AnimezbCache(tvcache.TVCache):
# only poll Animezb every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
params = {
"cat": "anime".encode('utf-8'),

View File

@ -274,7 +274,7 @@ class BitSoupCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -297,7 +297,7 @@ class BTNCache(tvcache.TVCache):
# At least 15 minutes between queries
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
# Get the torrents uploaded since last check.
seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple()))

View File

@ -174,7 +174,7 @@ class EZRSSCache(tvcache.TVCache):
# only poll EZRSS every 15 minutes max
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)

View File

@ -128,7 +128,7 @@ class FanzubCache(tvcache.TVCache):
# only poll Fanzub every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
params = {
"cat": "anime".encode('utf-8'),

View File

@ -306,7 +306,7 @@ class FreshOnTVCache(tvcache.TVCache):
# poll delay in minutes
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -195,8 +195,8 @@ class GenericProvider:
return True
def searchRSS(self, episodes):
return self.cache.findNeededEpisodes(episodes)
def searchRSS(self):
return self.cache.findNeededEpisodes()
def getQuality(self, item, anime=False):
"""
@ -255,10 +255,15 @@ class GenericProvider:
searched_scene_season = None
for epObj in episodes:
# check cache for results
cacheResult = self.cache.searchCache([epObj], manualSearch)
if len(cacheResult):
results.update({epObj.episode: cacheResult[epObj]})
# search cache for episode result
cacheResult = self.cache.searchCache(epObj, manualSearch)
if cacheResult:
if epObj not in results:
results = [cacheResult]
else:
results.append(cacheResult)
# found result, search next episode
continue
# skip if season already searched

View File

@ -204,7 +204,7 @@ class HDBitsCache(tvcache.TVCache):
# only poll HDBits every 15 minutes max
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
if not self.provider._checkAuthFromData(parsedJSON):

View File

@ -336,7 +336,7 @@ class HDTorrentsCache(tvcache.TVCache):
# only poll HDTorrents every 10 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': []}
return self.provider._doSearch(search_params)

View File

@ -276,7 +276,7 @@ class IPTorrentsCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max
self.minTime = 10
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -355,7 +355,7 @@ class KATCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)

View File

@ -337,21 +337,18 @@ class NewznabCache(tvcache.TVCache):
def updateCache(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
if self._checkAuth(None):
if self.shouldUpdate() and self._checkAuth(None):
data = self._getRSSData()
# as long as the http request worked we count this as an update
if data:
self.setLastUpdate()
else:
if not data:
return []
self.setLastUpdate()
# clear cache
self._clearCache()
if self._checkAuth(data):
items = data.entries
cl = []
@ -370,7 +367,6 @@ class NewznabCache(tvcache.TVCache):
return []
# overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item):
title = item.title

View File

@ -318,7 +318,7 @@ class NextGenCache(tvcache.TVCache):
# Only poll NextGen every 10 minutes max
self.minTime = 10
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -126,7 +126,7 @@ class NyaaCache(tvcache.TVCache):
# only poll NyaaTorrents every 15 minutes max
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
params = {
"page": 'rss', # Use RSS page
"order": '1', # Sort Descending By Date

View File

@ -174,7 +174,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
return (title, url)
def _getDailyData(self):
def _getRSSData(self):
params = {'user': provider.username,
'api': provider.api_key,
'eng': 1,

View File

@ -102,7 +102,7 @@ class TorrentRssProvider(generic.TorrentProvider):
if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
items = self.cache._getDailyData()
items = self.cache._getRSSData()
if not len(items) > 0:
return (False, 'No items found in the RSS feed ' + self.url)
@ -157,7 +157,7 @@ class TorrentRssCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider)
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None

View File

@ -312,7 +312,7 @@ class SCCCache(tvcache.TVCache):
# only poll SCC every 10 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -252,7 +252,7 @@ class SpeedCDCache(tvcache.TVCache):
# only poll Speedcd every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -338,7 +338,7 @@ class ThePirateBayCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)

View File

@ -274,7 +274,7 @@ class TorrentBytesCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -277,7 +277,7 @@ class TorrentDayCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max
self.minTime = 10
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -275,7 +275,7 @@ class TorrentLeechCache(tvcache.TVCache):
# only poll TorrentLeech every 20 minutes max
self.minTime = 20
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -83,7 +83,7 @@ class TvTorrentsCache(tvcache.TVCache):
# only poll TvTorrents every 15 minutes max
self.minTime = 15
def _getDailyData(self):
def _getRSSData(self):
# These will be ignored on the serverside.
ignore_regex = "all.month|month.of|season[\s\d]*complete"

View File

@ -53,7 +53,7 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
return (season, episode)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexer_id))
if not showObj.is_scene:
if showObj and not showObj.is_scene:
return (season, episode)
result = find_scene_numbering(int(indexer_id), int(indexer), season, episode)
@ -105,7 +105,7 @@ def get_scene_absolute_numbering(indexer_id, indexer, absolute_number, fallback_
indexer = int(indexer)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, indexer_id)
if not showObj.is_scene:
if showObj and not showObj.is_scene:
return absolute_number
result = find_scene_absolute_numbering(indexer_id, indexer, absolute_number)

View File

@ -336,7 +336,7 @@ def filterSearchResults(show, season, results):
return foundResults
def searchForNeededEpisodes(show, episodes):
def searchForNeededEpisodes():
foundResults = {}
didSearch = False
@ -344,15 +344,13 @@ def searchForNeededEpisodes(show, episodes):
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProviderCount, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
continue
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try:
curFoundResults = curProvider.searchRSS(episodes)
curProvider.cache.updateCache()
curFoundResults = curProvider.searchRSS()
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
continue
@ -374,6 +372,12 @@ def searchForNeededEpisodes(show, episodes):
logger.DEBUG)
continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode
@ -400,7 +404,7 @@ def searchForNeededEpisodes(show, episodes):
u"No NZB/Torrent providers found or enabled in the sickrage config for daily searches. Please check your settings.",
logger.ERROR)
return foundResults.values() if len(foundResults) else {}
return foundResults.values()
def searchProviders(show, season, episodes, manualSearch=False):
@ -409,6 +413,9 @@ def searchProviders(show, season, episodes, manualSearch=False):
didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
# check if we want to search for season packs instead of just season/episode
seasonSearch = False
if not manualSearch:
@ -442,6 +449,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season))
try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch)
except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)

View File

@ -29,6 +29,7 @@ from sickbeard import logger
from sickbeard import ui
from sickbeard import common
class BacklogSearchScheduler(scheduler.Scheduler):
def forceSearch(self):
self.action._set_lastBacklog(1)
@ -40,11 +41,12 @@ class BacklogSearchScheduler(scheduler.Scheduler):
else:
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
class BacklogSearcher:
def __init__(self):
self._lastBacklog = self._get_lastBacklog()
self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24
self.cycleTime = sickbeard.BACKLOG_FREQUENCY / 60 / 24
self.lock = threading.Lock()
self.amActive = False
self.amPaused = False
@ -99,7 +101,7 @@ class BacklogSearcher:
if len(segments):
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments)
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
else:
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season",
logger.DEBUG)
@ -132,14 +134,14 @@ class BacklogSearcher:
return self._lastBacklog
def _get_segments(self, show, fromDate):
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) #@UnusedVariable
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection()
if show.air_by_date:
sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid])
else:
sqlResults = myDB.select(

View File

@ -72,34 +72,27 @@ class SearchQueue(generic_queue.GenericQueue):
return False
def add_item(self, item):
if isinstance(item, (DailySearchQueueItem, BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) \
and not self.is_in_queue(item.show, item.segment):
sickbeard.name_cache.buildNameCache(item.show)
if isinstance(item, DailySearchQueueItem) or (
isinstance(item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) and not self.is_in_queue(
item.show, item.segment)):
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class DailySearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'DAILYSEARCH-' + str(show.indexerid)
self.show = show
self.segment = segment
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log("Beginning daily search for [" + self.show.name + "]")
foundResults = search.searchForNeededEpisodes(self.show, self.segment)
logger.log("Beginning daily search for new episodes")
foundResults = search.searchForNeededEpisodes()
if not len(foundResults):
logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]")
logger.log(u"No needed episodes found")
else:
for result in foundResults:
# just use the first result for now
@ -115,6 +108,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
@ -169,7 +163,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
try:
for season in self.segment:
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
'title': self.show.name + " Season " + str(season)}
'title': self.show.name + " Season " + str(season)}
wantedEps = self.segment[season]

View File

@ -89,26 +89,17 @@ class TVCache():
def _clearCache(self):
if self.shouldClearCache():
logger.log(u"Clearing items older than 1 week from " + self.provider.name + " cache")
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
myDB = self._getDB()
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))])
myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1")
def _get_title_and_url(self, item):
# override this in the provider if daily search has a different data layout to backlog searches
return self.provider._get_title_and_url(item)
def _getRSSData(self):
data = None
return data
def _getDailyData(self):
return None
def _checkAuth(self):
return self.provider._checkAuth()
@ -116,10 +107,9 @@ class TVCache():
return True
def updateCache(self):
if self.shouldUpdate() and self._checkAuth():
# as long as the http request worked we count this as an update
data = self._getDailyData()
data = self._getRSSData()
if not data:
return []
@ -289,9 +279,9 @@ class TVCache():
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def searchCache(self, episodes, manualSearch=False):
neededEps = self.findNeededEpisodes(episodes, manualSearch)
return neededEps
def searchCache(self, episode, manualSearch=False):
neededEps = self.findNeededEpisodes(episode, manualSearch)
return neededEps[episode]
def listPropers(self, date=None, delimiter="."):
myDB = self._getDB()
@ -303,69 +293,84 @@ class TVCache():
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
def findNeededEpisodes(self, episodes, manualSearch=False):
def findNeededEpisodes(self, episode=None, manualSearch=False):
neededEps = {}
for epObj in episodes:
myDB = self._getDB()
if episode:
neededEps[episode] = []
myDB = self._getDB()
if not episode:
sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]")
else:
sqlResults = myDB.select(
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
[epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"])
[episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"])
# for each cache entry
for curResult in sqlResults:
# for each cache entry
for curResult in sqlResults:
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well)
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]):
continue
# skip non-tv crap
if not show_name_helpers.filterBadReleases(curResult["name"]):
continue
# get the show object, or if it's not one of our shows then ignore it
try:
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
except MultipleShowObjectsException:
showObj = None
# get the show object, or if it's not one of our shows then ignore it
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
if not showObj:
continue
if not showObj:
continue
# skip if provider is anime only and show is not anime
if self.provider.anime_only and not showObj.is_anime:
logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG)
continue
# get season and ep data (ignoring multi-eps for now)
curSeason = int(curResult["season"])
if curSeason == -1:
continue
curEp = curResult["episodes"].split("|")[1]
if not curEp:
continue
curEp = int(curEp)
curQuality = int(curResult["quality"])
curReleaseGroup = curResult["release_group"]
curVersion = curResult["version"]
# get season and ep data (ignoring multi-eps for now)
curSeason = int(curResult["season"])
if curSeason == -1:
continue
curEp = curResult["episodes"].split("|")[1]
if not curEp:
continue
curEp = int(curEp)
# if the show says we want that episode then add it to the list
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
Quality.qualityStrings[curQuality], logger.DEBUG)
continue
curQuality = int(curResult["quality"])
curReleaseGroup = curResult["release_group"]
curVersion = curResult["version"]
# build a result object
title = curResult["name"]
url = curResult["url"]
# if the show says we want that episode then add it to the list
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
Quality.qualityStrings[curQuality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url)
# build name cache for show
sickbeard.name_cache.buildNameCache(showObj)
result = self.provider.getResult([epObj])
result.show = showObj
result.url = url
result.name = title
result.quality = curQuality
result.release_group = curReleaseGroup
result.version = curVersion
result.content = None
if episode:
epObj = episode
else:
epObj = showObj.getEpisode(curSeason, curEp)
# add it to the list
if epObj not in neededEps:
neededEps[epObj] = [result]
else:
neededEps[epObj].append(result)
# build a result object
title = curResult["name"]
url = curResult["url"]
logger.log(u"Found result " + title + " at " + url)
result = self.provider.getResult([epObj])
result.show = showObj
result.url = url
result.name = title
result.quality = curQuality
result.release_group = curReleaseGroup
result.version = curVersion
result.content = None
# add it to the list
if epObj not in neededEps:
neededEps[epObj] = [result]
else:
neededEps[epObj].append(result)
# datetime stamp this search so cache gets cleared
self.setLastSearch()