1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-08-13 16:53:54 -04:00

Fixed issues with post-processing.

Fixed issue with priority post-processing, bug would allow 2nd attempt to go through even if priority settings were not selected.

Fixed issues with cache retrieval and storing of provider results, properly stored now with season and ep info converted to indexer numbering.

Fixed a bug that was caching unconverted scene numbered files for post-processing causing us to skip converting them when needed.
This commit is contained in:
echel0n 2014-05-26 03:42:34 -07:00
parent 7e711c0665
commit f7b11e1e98
8 changed files with 96 additions and 135 deletions

View File

@ -55,7 +55,7 @@ class FailedProcessor(object):
parser = NameParser(False)
try:
parsed = parser.parse(releaseName)
parsed = parser.parse(releaseName).convert()
except InvalidNameException:
self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING)
raise exceptions.FailedProcessingFailed()
@ -69,22 +69,18 @@ class FailedProcessor(object):
logger.log(u" - " + str(parsed.air_date), logger.DEBUG)
logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG)
self._show_obj = sickbeard.helpers.get_show_by_name(parsed.series_name)
if self._show_obj is None:
if parsed.show is None:
self._log(
u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)",
logger.WARNING)
raise exceptions.FailedProcessingFailed()
# scene -> indexer numbering
parsed = parsed.convert(self._show_obj)
segment = {parsed.season_number:[]}
for episode in parsed.episode_numbers:
epObj = self._show_obj.getEpisode(parsed.season_number, episode)
epObj = parsed.show.getEpisode(parsed.season_number, episode)
segment[parsed.season_number].append(epObj)
cur_failed_queue_item = search_queue.FailedQueueItem(self._show_obj, segment)
cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
return True

View File

@ -1009,24 +1009,34 @@ def _check_against_names(nameInQuestion, show, season=-1):
def get_show_by_name(name, useIndexer=False):
name = full_sanitizeSceneName(name)
showObj = sickbeard.name_cache.retrieveShowFromCache(name)
if not showObj and sickbeard.showList:
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
for showName in showNames:
if showName in sickbeard.scene_exceptions.exceptionIndexerCache:
showObj = findCertainShow(sickbeard.showList, int(sickbeard.scene_exceptions.exceptionIndexerCache[showName]))
if showObj:
break
try:
# check cache for show
showObj = sickbeard.name_cache.retrieveShowFromCache(name)
if showObj:
return showObj
if useIndexer and not showObj:
(sn, idx, id) = searchIndexerForShowID(showName, ui=classes.ShowListUI)
if id:
showObj = findCertainShow(sickbeard.showList, int(id))
if not showObj and sickbeard.showList:
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
for showName in showNames:
if showName in sickbeard.scene_exceptions.exceptionIndexerCache:
showObj = findCertainShow(sickbeard.showList, int(sickbeard.scene_exceptions.exceptionIndexerCache[showName]))
if showObj:
break
return showObj
if useIndexer and not showObj:
(sn, idx, id) = searchIndexerForShowID(showName, ui=classes.ShowListUI)
if id:
showObj = findCertainShow(sickbeard.showList, int(id))
if showObj:
break
# add show to cache
if showObj:
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
except:
showObj = None
return showObj
def is_hidden_folder(folder):
"""

View File

@ -202,6 +202,7 @@ class NameParser(object):
result.show = show
return result
elif cur_regex_type == 'normal':
result.show = show if show else None
return result
return None
@ -254,7 +255,7 @@ class NameParser(object):
return result
def parse(self, name):
def parse(self, name, cache_result=True):
name = self._unicodify(name)
cached = name_parser_cache.get(name)
@ -329,7 +330,9 @@ class NameParser(object):
if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.series_name:
raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
name_parser_cache.add(name, final_result)
if cache_result:
name_parser_cache.add(name, final_result)
return final_result
@ -436,8 +439,8 @@ class ParseResult(object):
return to_return.encode('utf-8')
def convert(self, show):
if not show: return self # need show object
def convert(self):
if not self.show: return self # need show object
if not self.season_number: return self # can't work without a season
if not len(self.episode_numbers): return self # need at least one episode
if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date
@ -451,7 +454,7 @@ class ParseResult(object):
if len(self.ab_episode_numbers):
abNo = self.ab_episode_numbers[i]
(s, e, a) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, self.season_number,
(s, e, a) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number,
epNo, abNo)
new_episode_numbers.append(e)
new_season_numbers.append(s)
@ -491,7 +494,8 @@ class ParseResult(object):
def _is_anime(self):
if self.ab_episode_numbers:
return True
if self.show and self.show.is_anime:
return True
return False
is_anime = property(_is_anime)

View File

@ -389,7 +389,7 @@ class PostProcessor(object):
Returns a (indexer_id, season, []) tuple. The first two may be None if none were found.
"""
to_return = (None, None, None, [], None)
to_return = (None, None, [], None)
# if we don't have either of these then there's nothing to use to search the history for anyway
if not self.nzb_name and not self.folder_name:
@ -415,7 +415,7 @@ class PostProcessor(object):
if len(sql_results) == 0:
continue
indexer_id = int(sql_results[0]["showid"])
show = helpers.findCertainShow(sickbeard.showList, int(sql_results[0]["showid"]))
season = int(sql_results[0]["season"])
quality = int(sql_results[0]["quality"])
@ -423,7 +423,7 @@ class PostProcessor(object):
quality = None
self.in_history = True
to_return = (indexer_id, None, season, [], quality)
to_return = (show, season, [], quality)
self._log("Found result in history: " + str(to_return), logger.DEBUG)
return to_return
@ -436,8 +436,7 @@ class PostProcessor(object):
# remember whether it's a proper
if parse_result.extra_info:
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,
re.I) != None
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,re.I) != None
# if the result is complete then remember that for later
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
@ -471,17 +470,14 @@ class PostProcessor(object):
logger.log(u"Analyzing name " + repr(name))
indexer_id = None
indexer = None
to_return = (indexer_id, indexer, None, [], None)
to_return = (None, None, [], None)
if not name:
return to_return
# parse the name to break it into show name, season, and episode
np = NameParser(file, useIndexers=True)
parse_result = np.parse(name)
parse_result = np.parse(name).convert()
self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG)
@ -495,22 +491,17 @@ class PostProcessor(object):
season = parse_result.season_number
episodes = parse_result.episode_numbers
showObj = helpers.get_show_by_name(parse_result.series_name)
if showObj:
indexer_id = showObj.indexerid
indexer = showObj.indexer
to_return = (indexer_id, indexer, season, episodes, None)
to_return = (parse_result.show, season, episodes, None)
self._finalize(parse_result)
return to_return
def _analyze_anidb(self, filePath):
# TODO: rewrite this
return (None, None, None)
return (None, None, None, None)
if not helpers.set_up_anidb_connection():
return (None, None, None)
return (None, None, None, None)
ep = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath)
try:
@ -545,7 +536,7 @@ class PostProcessor(object):
else:
if len(episodes):
self._log(u"Lookup successful from anidb. ", logger.DEBUG)
return (indexer_id, season, episodes)
return (show, season, episodes, None)
if ep.anidb_file_name:
self._log(u"Lookup successful, using anidb filename " + str(ep.anidb_file_name), logger.DEBUG)
@ -576,7 +567,7 @@ class PostProcessor(object):
For a given file try to find the showid, season, and episode.
"""
indexer_id = indexer = season = quality = None
show = season = quality = None
episodes = []
# try to look up the nzb in history
@ -595,23 +586,23 @@ class PostProcessor(object):
lambda: self._analyze_name(self.file_path),
# try to analyze the dir + file name together as one name
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name),
# try to analyze the file path with the help of aniDB
lambda: self._analyze_anidb(self.file_path)
]
# attempt every possible method to get our info
for cur_attempt in attempt_list:
try:
(cur_indexer_id, cur_indexer, cur_season, cur_episodes, cur_quality) = cur_attempt()
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
except InvalidNameException, e:
logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
continue
# check and confirm first that the indexer_id exists in our shows list before setting it
if cur_indexer_id != indexer_id and cur_indexer:
indexer_id = cur_indexer_id
indexer = cur_indexer
if cur_show:
show = cur_show
if cur_quality and not (self.in_history and quality):
quality = cur_quality
@ -622,46 +613,46 @@ class PostProcessor(object):
episodes = cur_episodes
# for air-by-date shows we need to look up the season/episode from database
if season == -1 and indexer_id and indexer and episodes:
if season == -1 and show and episodes:
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG)
airdate = episodes[0].toordinal()
myDB = db.DBConnection()
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[indexer_id, indexer, airdate])
[show.indexerid, show.indexer, airdate])
if sql_result:
season = int(sql_result[0][0])
episodes = [int(sql_result[0][1])]
else:
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
indexer_id) + u", skipping", logger.DEBUG)
show.indexerid) + u", skipping", logger.DEBUG)
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
episodes = []
continue
# if there's no season then we can hopefully just use 1 automatically
elif season == None and indexer_id and indexer:
elif season == None and show:
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
[indexer_id, indexer])
[show.indexerid, show.indexer])
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
self._log(
u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...",
u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
logger.DEBUG)
season = 1
if indexer_id and indexer and season and episodes:
return (indexer_id, indexer, season, episodes, quality)
if show and season and episodes:
return (show, season, episodes, quality)
return (indexer_id, indexer, season, episodes, quality)
return (show, season, episodes, quality)
def _get_ep_obj(self, indexer_id, indexer, season, episodes):
def _get_ep_obj(self, show, season, episodes):
"""
Retrieve the TVEpisode object requested.
indexer_id: The indexerid of the show (int)
show: The show object belonging to the show we want to process
season: The season of the episode (int)
episodes: A list of episodes to find (list of ints)
@ -669,36 +660,17 @@ class PostProcessor(object):
be instantiated and returned. If the episode can't be found then None will be returned.
"""
self._log(u"Loading show object with Indexer ID:[" + str(indexer_id) + "] for Indexer:[" + str(sickbeard.indexerApi(indexer).name) + "]", logger.DEBUG)
# find the show in the showlist
try:
show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id)
except exceptions.MultipleShowObjectsException:
raise #TODO: later I'll just log this, for now I want to know about it ASAP
# if we can't find the show then there's nothing we can really do
if not show_obj:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
raise exceptions.PostProcessingFailed()
root_ep = None
for cur_episode in episodes:
self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG)
# detect and convert scene numbered releases
season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode)
# now that we've figured out which episode this file is just load it manually
try:
curEp = show_obj.getEpisode(season, cur_episode)
curEp = show.getEpisode(season, cur_episode)
except exceptions.EpisodeNotFoundException, e:
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed()
self._log(u"Episode object has been converted from Scene numbering " + str(curEp.scene_season) + "x" + str(
curEp.scene_episode) + " to Indexer numbering" + str(curEp.season) + "x" + str(curEp.episode))
# associate all the episodes together under a single root episode
if root_ep == None:
root_ep = curEp
@ -856,14 +828,18 @@ class PostProcessor(object):
self.in_history = False
# try to find the file info
(indexer_id, indexer, season, episodes, quality) = self._find_info()
if not indexer_id or not indexer or season == None or not episodes:
(show, season, episodes, quality) = self._find_info()
if not show:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
raise exceptions.PostProcessingFailed()
elif season == None or not episodes:
self._log(u"Not enough information to determine what episode this is", logger.DEBUG)
self._log(u"Quitting post-processing", logger.DEBUG)
return False
# retrieve/create the corresponding TVEpisode objects
ep_obj = self._get_ep_obj(indexer_id, indexer, season, episodes)
ep_obj = self._get_ep_obj(show, season, episodes)
# get the quality of the episode we're processing
if quality:
@ -878,10 +854,6 @@ class PostProcessor(object):
priority_download = self._is_priority(ep_obj, new_ep_quality)
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
# set the status of the episodes
for curEp in [ep_obj] + ep_obj.relatedEps:
curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# check for an existing file
existing_file_status = self._checkForExistingFile(ep_obj.location)
@ -907,6 +879,10 @@ class PostProcessor(object):
u"This download is marked a priority download so I'm going to replace an existing file if I find one",
logger.DEBUG)
# set the status of the episodes
#for curEp in [ep_obj] + ep_obj.relatedEps:
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps:
try:

View File

@ -208,14 +208,14 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
#check if the dir have at least one tv video file
for video in videoFiles:
try:
NameParser().parse(video)
NameParser().parse(video, cache_result=False)
return True
except InvalidNameException:
pass
for dir in allDirs:
try:
NameParser().parse(dir)
NameParser().parse(dir, cache_result=False)
return True
except InvalidNameException:
pass
@ -226,7 +226,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
for packed in packedFiles:
try:
NameParser().parse(packed)
NameParser().parse(packed, cache_result=False)
return True
except InvalidNameException:
pass

View File

@ -146,19 +146,12 @@ class ProperFinder():
curProper.indexerid = curShow.indexerid
break
showObj = None
if curProper.indexerid:
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
if not showObj:
if not parse_result.show:
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
continue
if not in_cache:
sickbeard.name_cache.addNameToCache(parse_result.series_name, curProper.indexerid)
# scene numbering -> indexer numbering
parse_result = parse_result.convert(showObj)
sickbeard.name_cache.addNameToCache(parse_result.series_name, parse_result.show.indexerid)
if not parse_result.episode_numbers:
logger.log(

View File

@ -286,16 +286,13 @@ class GenericProvider:
# parse the file name
try:
myParser = NameParser(False, show=show, useIndexers=manualSearch)
parse_result = myParser.parse(title)
parse_result = myParser.parse(title).convert()
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue
quality = self.getQuality(item, parse_result.is_anime)
# scene -> indexer numbering
parse_result = parse_result.convert(self.show)
if not (self.show.air_by_date or self.show.sports):
if search_mode == 'sponly' and len(parse_result.episode_numbers):
logger.log(

View File

@ -11,7 +11,7 @@
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
@ -45,6 +45,7 @@ from name_parser.parser import NameParser, InvalidNameException
cache_lock = threading.Lock()
class CacheDBConnection(db.DBConnection):
def __init__(self, providerName):
db.DBConnection.__init__(self, "cache.db")
@ -260,13 +261,10 @@ class TVCache():
return True
def _addCacheEntry(self, name, url, quality=None):
indexerid = None
in_cache = False
# if we don't have complete info then parse the filename to get it
try:
myParser = NameParser()
parse_result = myParser.parse(name)
parse_result = myParser.parse(name).convert()
except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
return None
@ -279,36 +277,26 @@ class TVCache():
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None
cacheResult = sickbeard.name_cache.retrieveNameFromCache(parse_result.series_name)
if cacheResult:
in_cache = True
indexerid = int(cacheResult)
elif cacheResult == 0:
return None
showObj = None
if parse_result.show:
showObj = parse_result.show
if not indexerid:
if not showObj:
showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult:
indexerid = int(showResult[0])
showObj = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
if not indexerid:
if not showObj:
for curShow in sickbeard.showList:
if show_name_helpers.isGoodResult(name, curShow, False):
indexerid = curShow.indexerid
showObj = curShow
break
showObj = None
if indexerid:
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
if not showObj:
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
return None
# scene -> indexer numbering
parse_result = parse_result.convert(showObj)
season = episodes = None
if parse_result.air_by_date or parse_result.sports:
myDB = db.DBConnection()
@ -316,7 +304,7 @@ class TVCache():
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[indexerid, showObj.indexer, airdate])
[showObj.indexerid, showObj.indexer, airdate])
if sql_results > 0:
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
@ -340,12 +328,9 @@ class TVCache():
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
if not in_cache:
sickbeard.name_cache.addNameToCache(parse_result.series_name, indexerid)
return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, indexerid, url, curTimestamp, quality]]
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
def searchCache(self, episodes, manualSearch=False):
@ -420,7 +405,7 @@ class TVCache():
result.quality = curQuality
result.content = self.provider.getURL(url) \
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
and not url.startswith('magnet') else None
and not url.startswith('magnet') else None
# add it to the list
if epObj not in neededEps: