2014-03-10 01:18:05 -04:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
|
|
|
# This file is part of Sick Beard.
|
|
|
|
#
|
|
|
|
# Sick Beard is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Sick Beard is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import datetime
|
2014-05-06 07:29:25 -04:00
|
|
|
from threading import Thread
|
|
|
|
import threading
|
2014-03-10 01:18:05 -04:00
|
|
|
import time
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
from sickbeard import db, logger, common, exceptions, helpers
|
|
|
|
from sickbeard import generic_queue
|
|
|
|
from sickbeard import search, failed_history, history
|
|
|
|
from sickbeard import ui
|
|
|
|
|
2014-05-06 07:29:25 -04:00
|
|
|
from lib.concurrent import futures
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
BACKLOG_SEARCH = 10
|
|
|
|
RSS_SEARCH = 20
|
2014-05-04 23:04:46 -04:00
|
|
|
FAILED_SEARCH = 30
|
2014-03-10 01:18:05 -04:00
|
|
|
MANUAL_SEARCH = 30
|
|
|
|
|
|
|
|
class SearchQueue(generic_queue.GenericQueue):
|
|
|
|
def __init__(self):
|
|
|
|
generic_queue.GenericQueue.__init__(self)
|
|
|
|
self.queue_name = "SEARCHQUEUE"
|
|
|
|
|
|
|
|
def is_in_queue(self, show, segment):
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_ep_in_queue(self, ep_obj):
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def pause_backlog(self):
|
|
|
|
self.min_priority = generic_queue.QueuePriorities.HIGH
|
|
|
|
|
|
|
|
def unpause_backlog(self):
|
|
|
|
self.min_priority = 0
|
|
|
|
|
|
|
|
def is_backlog_paused(self):
|
|
|
|
# backlog priorities are NORMAL, this should be done properly somewhere
|
|
|
|
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
|
|
|
|
|
|
|
def is_backlog_in_progress(self):
|
|
|
|
for cur_item in self.queue + [self.currentItem]:
|
|
|
|
if isinstance(cur_item, BacklogQueueItem):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def add_item(self, item):
|
|
|
|
if isinstance(item, RSSSearchQueueItem):
|
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
|
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
elif isinstance(item, ManualSearchQueueItem) and not self.is_ep_in_queue(item.ep_obj):
|
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
2014-05-06 16:25:41 -04:00
|
|
|
elif isinstance(item, FailedQueueItem) and not self.is_in_queue(item.show, item.episodes):
|
2014-03-10 01:18:05 -04:00
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
else:
|
|
|
|
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
|
|
|
def __init__(self, ep_obj):
|
|
|
|
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
|
|
|
self.priority = generic_queue.QueuePriorities.HIGH
|
|
|
|
self.ep_obj = ep_obj
|
|
|
|
self.success = None
|
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
generic_queue.QueueItem.execute(self)
|
2014-05-07 03:50:49 -04:00
|
|
|
|
|
|
|
foundResults = []
|
|
|
|
didSearch = False
|
|
|
|
|
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
|
|
|
|
|
|
try:
|
|
|
|
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
|
|
|
foundResults = list(
|
|
|
|
executor.map(self.process, providers))
|
|
|
|
didSearch = True
|
|
|
|
except Exception, e:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not didSearch:
|
|
|
|
logger.log(
|
|
|
|
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
|
|
|
logger.ERROR)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
result = False
|
2014-05-07 03:50:49 -04:00
|
|
|
if not len(foundResults):
|
2014-05-06 07:29:25 -04:00
|
|
|
if self.ep_obj.show.air_by_date:
|
|
|
|
ui.notifications.message('No downloads were found ...',
|
|
|
|
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyABName())
|
|
|
|
logger.log(u"Unable to find a download for " + self.ep_obj.prettyABDName())
|
|
|
|
else:
|
|
|
|
ui.notifications.message('No downloads were found ...',
|
2014-05-07 03:50:49 -04:00
|
|
|
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
|
2014-05-06 07:29:25 -04:00
|
|
|
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
self.success = result
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-05-07 03:50:49 -04:00
|
|
|
for foundResult in [item for sublist in foundResults for item in sublist]:
|
2014-05-06 17:35:37 -04:00
|
|
|
time.sleep(0.01)
|
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
result = search.snatchEpisode(foundResult)
|
2014-03-19 19:33:49 -04:00
|
|
|
|
2014-05-07 05:03:57 -04:00
|
|
|
# duplicate snatch detected due to multithreading
|
|
|
|
if result == 2:
|
|
|
|
continue
|
|
|
|
|
2014-04-27 06:31:54 -04:00
|
|
|
providerModule = foundResult.provider
|
|
|
|
if not result:
|
2014-05-06 07:29:25 -04:00
|
|
|
ui.notifications.error(
|
|
|
|
'Error while attempting to snatch ' + foundResult.name + ', check your logs')
|
2014-04-27 06:31:54 -04:00
|
|
|
elif providerModule == None:
|
|
|
|
ui.notifications.error('Provider is configured incorrectly, unable to download')
|
2014-03-19 19:33:49 -04:00
|
|
|
|
2014-05-07 05:03:57 -04:00
|
|
|
# just use the first result for now
|
|
|
|
logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-07 05:03:57 -04:00
|
|
|
self.success = result
|
2014-05-06 07:29:25 -04:00
|
|
|
|
|
|
|
def process(self, curProvider):
|
|
|
|
if self.ep_obj.show.air_by_date:
|
|
|
|
logger.log("Beginning manual search for " + self.ep_obj.prettyABDName())
|
|
|
|
else:
|
|
|
|
logger.log("Beginning manual search for " + self.ep_obj.prettyName())
|
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
return search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], curProvider, False, True)
|
2014-05-06 07:29:25 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def finish(self):
|
|
|
|
# don't let this linger if something goes wrong
|
2014-03-20 14:03:22 -04:00
|
|
|
if self.success == None:
|
2014-03-10 01:18:05 -04:00
|
|
|
self.success = False
|
2014-05-07 05:03:57 -04:00
|
|
|
else:
|
|
|
|
generic_queue.QueueItem.finish(self)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class RSSSearchQueueItem(generic_queue.QueueItem):
|
|
|
|
def __init__(self):
|
|
|
|
generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH)
|
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
generic_queue.QueueItem.execute(self)
|
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
foundResults = []
|
|
|
|
didSearch = False
|
2014-05-06 17:35:37 -04:00
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
|
|
|
|
|
|
try:
|
|
|
|
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
|
|
|
foundResults = list(
|
|
|
|
executor.map(self.process, providers))
|
|
|
|
|
|
|
|
didSearch = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not didSearch:
|
|
|
|
logger.log(
|
|
|
|
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
|
|
|
|
logger.ERROR)
|
|
|
|
|
|
|
|
if len(foundResults):
|
|
|
|
for curResult in [item for sublist in foundResults for item in sublist]:
|
|
|
|
time.sleep(0.01)
|
2014-05-07 05:03:57 -04:00
|
|
|
result = search.snatchEpisode(curResult)
|
|
|
|
|
|
|
|
# duplicate snatch detected due to multithreading
|
|
|
|
if result == 2:
|
|
|
|
continue
|
2014-05-07 03:50:49 -04:00
|
|
|
else:
|
|
|
|
logger.log(u"RSS Feed search found nothing to snatch ...")
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
generic_queue.QueueItem.finish(self)
|
|
|
|
|
2014-05-06 07:29:25 -04:00
|
|
|
def process(self, curProvider):
|
|
|
|
self._changeMissingEpisodes()
|
|
|
|
|
|
|
|
logger.log(u"Beginning search for new episodes on RSS feeds and in cache")
|
|
|
|
return search.searchForNeededEpisodes(curProvider)
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def _changeMissingEpisodes(self):
|
|
|
|
|
|
|
|
logger.log(u"Changing all old missing episodes to status WANTED")
|
|
|
|
|
|
|
|
curDate = datetime.date.today().toordinal()
|
|
|
|
|
|
|
|
myDB = db.DBConnection()
|
2014-03-25 01:57:24 -04:00
|
|
|
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?",
|
|
|
|
[common.UNAIRED, curDate])
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
for sqlEp in sqlResults:
|
|
|
|
|
|
|
|
try:
|
|
|
|
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
|
|
|
|
except exceptions.MultipleShowObjectsException:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp["showid"]))
|
2014-03-10 01:18:05 -04:00
|
|
|
return None
|
|
|
|
|
2014-03-20 14:03:22 -04:00
|
|
|
if show == None:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Unable to find the show with ID " + str(
|
|
|
|
sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR)
|
2014-03-10 01:18:05 -04:00
|
|
|
return None
|
|
|
|
|
|
|
|
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
|
|
|
|
with ep.lock:
|
|
|
|
if ep.show.paused:
|
|
|
|
ep.status = common.SKIPPED
|
|
|
|
else:
|
|
|
|
ep.status = common.WANTED
|
|
|
|
ep.saveToDB()
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class BacklogQueueItem(generic_queue.QueueItem):
|
|
|
|
def __init__(self, show, segment):
|
|
|
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
|
|
|
self.priority = generic_queue.QueuePriorities.LOW
|
|
|
|
self.thread_name = 'BACKLOG-' + str(show.indexerid)
|
|
|
|
|
|
|
|
self.show = show
|
|
|
|
self.segment = segment
|
2014-05-03 05:23:26 -04:00
|
|
|
self.wantedEpisodes = []
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
logger.log(u"Seeing if we need any episodes from " + self.show.name + " season " + str(self.segment))
|
|
|
|
|
|
|
|
myDB = db.DBConnection()
|
|
|
|
|
|
|
|
# see if there is anything in this season worth searching for
|
2014-04-30 08:10:13 -04:00
|
|
|
if not self.show.air_by_date:
|
2014-05-03 05:23:26 -04:00
|
|
|
statusResults = myDB.select("SELECT status, episode FROM tv_episodes WHERE showid = ? AND season = ?",
|
2014-03-25 01:57:24 -04:00
|
|
|
[self.show.indexerid, self.segment])
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-04-27 06:31:54 -04:00
|
|
|
season_year, season_month = map(int, self.segment.split('-'))
|
|
|
|
min_date = datetime.date(season_year, season_month, 1)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
# it's easier to just hard code this than to worry about rolling the year over or making a month length map
|
2014-04-27 06:31:54 -04:00
|
|
|
if season_month == 12:
|
|
|
|
max_date = datetime.date(season_year, 12, 31)
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-04-27 06:31:54 -04:00
|
|
|
max_date = datetime.date(season_year, season_month + 1, 1) - datetime.timedelta(days=1)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
statusResults = myDB.select(
|
2014-05-03 05:23:26 -04:00
|
|
|
"SELECT status, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ?",
|
2014-03-25 01:57:24 -04:00
|
|
|
[self.show.indexerid, min_date.toordinal(), max_date.toordinal()])
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable
|
2014-05-03 05:23:26 -04:00
|
|
|
self.wantedEpisodes = self._need_any_episodes(statusResults, bestQualities)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
generic_queue.QueueItem.execute(self)
|
2014-05-06 07:29:25 -04:00
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
foundResults = []
|
|
|
|
didSearch = False
|
2014-05-06 17:35:37 -04:00
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
|
|
|
|
|
|
try:
|
|
|
|
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
|
|
|
foundResults = list(executor.map(self.process,providers))
|
|
|
|
didSearch = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not didSearch:
|
|
|
|
logger.log(
|
|
|
|
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
|
|
|
|
logger.ERROR)
|
|
|
|
|
|
|
|
if len(foundResults):
|
|
|
|
for curResult in [item for sublist in foundResults for item in sublist]:
|
|
|
|
time.sleep(0.01)
|
|
|
|
|
2014-05-07 05:03:57 -04:00
|
|
|
result = search.snatchEpisode(curResult)
|
|
|
|
|
|
|
|
# duplicate snatch detected due to multithreading
|
|
|
|
if result == 2:
|
|
|
|
continue
|
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
else:
|
|
|
|
logger.log(u"Backlog search found nothing to snatch ...")
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-06 07:29:25 -04:00
|
|
|
self.finish()
|
|
|
|
|
|
|
|
def process(self, curProvider):
|
2014-05-04 23:04:46 -04:00
|
|
|
# check if we want to search for season packs instead of just season/episode
|
|
|
|
seasonSearch = False
|
|
|
|
seasonEps = self.show.getAllEpisodes(self.segment)
|
|
|
|
if len(seasonEps) == len(self.wantedEpisodes):
|
|
|
|
seasonSearch = True
|
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
return search.searchProviders(self.show, self.segment, self.wantedEpisodes, curProvider, seasonSearch, False)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def _need_any_episodes(self, statusResults, bestQualities):
|
2014-05-03 05:23:26 -04:00
|
|
|
wantedEpisodes = []
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
# check through the list of statuses to see if we want any
|
|
|
|
for curStatusResult in statusResults:
|
2014-05-06 17:35:37 -04:00
|
|
|
time.sleep(0.01)
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
curCompositeStatus = int(curStatusResult["status"])
|
|
|
|
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
2014-05-03 05:23:26 -04:00
|
|
|
episode = int(curStatusResult["episode"])
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
if bestQualities:
|
|
|
|
highestBestQuality = max(bestQualities)
|
|
|
|
else:
|
|
|
|
highestBestQuality = 0
|
|
|
|
|
|
|
|
# if we need a better one then say yes
|
2014-03-25 01:57:24 -04:00
|
|
|
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
|
|
|
|
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
|
2014-05-04 23:04:46 -04:00
|
|
|
epObj = self.show.getEpisode(self.segment, episode)
|
2014-05-03 05:23:26 -04:00
|
|
|
wantedEpisodes.append(epObj)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
return wantedEpisodes
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
class FailedQueueItem(generic_queue.QueueItem):
|
2014-05-03 05:23:26 -04:00
|
|
|
def __init__(self, show, episodes):
|
2014-05-04 23:04:46 -04:00
|
|
|
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
2014-03-10 01:18:05 -04:00
|
|
|
self.priority = generic_queue.QueuePriorities.HIGH
|
|
|
|
self.thread_name = 'RETRY-' + str(show.indexerid)
|
|
|
|
|
|
|
|
self.show = show
|
2014-05-03 05:23:26 -04:00
|
|
|
self.episodes = episodes
|
2014-03-19 10:59:34 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self.success = None
|
|
|
|
|
|
|
|
def execute(self):
|
|
|
|
generic_queue.QueueItem.execute(self)
|
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
foundResults = []
|
|
|
|
didSearch = False
|
|
|
|
|
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
|
|
|
|
|
|
try:
|
|
|
|
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
|
|
|
foundResults = list(
|
|
|
|
executor.map(self.process, providers))
|
|
|
|
didSearch = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not didSearch:
|
|
|
|
logger.log(
|
|
|
|
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
|
|
|
|
logger.ERROR)
|
2014-05-06 17:35:37 -04:00
|
|
|
|
2014-05-07 03:50:49 -04:00
|
|
|
if len(foundResults):
|
|
|
|
for curResult in [item for sublist in foundResults for item in sublist]:
|
|
|
|
time.sleep(0.01)
|
|
|
|
|
2014-05-07 05:03:57 -04:00
|
|
|
result = search.snatchEpisode(curResult)
|
|
|
|
|
|
|
|
# duplicate snatch detected due to multithreading
|
|
|
|
if result == 2:
|
|
|
|
continue
|
|
|
|
|
|
|
|
self.success = result
|
2014-05-07 03:50:49 -04:00
|
|
|
else:
|
|
|
|
logger.log(u"Retry failed download search found nothing to snatch ...")
|
2014-05-06 07:29:25 -04:00
|
|
|
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
def process(self, curProvider):
|
2014-05-03 05:23:26 -04:00
|
|
|
episodes = []
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-04 23:04:46 -04:00
|
|
|
for i, epObj in enumerate(episodes):
|
2014-05-06 17:35:37 -04:00
|
|
|
time.sleep(0.01)
|
|
|
|
|
2014-05-06 07:29:25 -04:00
|
|
|
if epObj.show.air_by_date:
|
|
|
|
logger.log("Beginning manual search for " + epObj.prettyABDName())
|
|
|
|
else:
|
|
|
|
logger.log(
|
2014-05-07 03:50:49 -04:00
|
|
|
"Beginning failed download search for " + epObj.prettyName())
|
2014-05-04 23:04:46 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
|
2014-03-19 10:59:34 -04:00
|
|
|
if release:
|
|
|
|
logger.log(u"Marking release as bad: " + release)
|
2014-05-03 05:23:26 -04:00
|
|
|
failed_history.markFailed(self.show, epObj.season, epObj.episode)
|
2014-03-19 10:59:34 -04:00
|
|
|
failed_history.logFailed(release)
|
2014-05-03 05:23:26 -04:00
|
|
|
history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-03 05:23:26 -04:00
|
|
|
failed_history.revertEpisode(self.show, epObj.season, epObj.episode)
|
|
|
|
episodes.append(epObj)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-06 07:29:25 -04:00
|
|
|
return search.searchProviders(self.show, self.episodes[0].season, self.episodes, curProvider, False, False)
|