mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-12 11:02:21 -05:00
"Borrowed" code from SB's post-processing routine.
This commit is contained in:
parent
1b84c6d281
commit
24cd235988
@ -11,7 +11,7 @@
|
||||
# SickRage is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
@ -44,6 +44,7 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval
|
||||
|
||||
from lib import adba
|
||||
|
||||
|
||||
class PostProcessor(object):
|
||||
"""
|
||||
A class which will process a media file according to the post processing settings in the config.
|
||||
@ -56,10 +57,6 @@ class PostProcessor(object):
|
||||
|
||||
IGNORED_FILESTRINGS = ["/.AppleDouble/", ".DS_Store"]
|
||||
|
||||
NZB_NAME = 1
|
||||
FOLDER_NAME = 2
|
||||
FILE_NAME = 3
|
||||
|
||||
def __init__(self, file_path, nzb_name=None, process_method=None, is_priority=None):
|
||||
"""
|
||||
Creates a new post processor with the given file path and optionally an NZB name.
|
||||
@ -85,19 +82,17 @@ class PostProcessor(object):
|
||||
self.process_method = process_method if process_method else sickbeard.PROCESS_METHOD
|
||||
|
||||
self.in_history = False
|
||||
|
||||
self.release_group = None
|
||||
|
||||
self.release_name = None
|
||||
|
||||
self.is_proper = False
|
||||
|
||||
self.is_priority = is_priority
|
||||
|
||||
self.good_results = {self.NZB_NAME: False,
|
||||
self.FOLDER_NAME: False,
|
||||
self.FILE_NAME: False}
|
||||
|
||||
self.log = ''
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def _log(self, message, level=logger.MESSAGE):
|
||||
"""
|
||||
A wrapper for the internal logger which also keeps track of messages and saves them to a string for later.
|
||||
@ -183,7 +178,7 @@ class PostProcessor(object):
|
||||
if subtitles_only and not associated_file_path[len(associated_file_path) - 3:] in common.subtitleExtensions:
|
||||
continue
|
||||
|
||||
#Exclude .rar files from associated list
|
||||
# Exclude .rar files from associated list
|
||||
if re.search('(^.+\.(rar|r\d+)$)', associated_file_path):
|
||||
continue
|
||||
|
||||
@ -216,7 +211,7 @@ class PostProcessor(object):
|
||||
for cur_file in file_list:
|
||||
if ek.ek(os.path.isfile, cur_file):
|
||||
self._log(u"Deleting file " + cur_file, logger.DEBUG)
|
||||
#check first the read-only attribute
|
||||
# check first the read-only attribute
|
||||
file_attribute = ek.ek(os.stat, cur_file)[0]
|
||||
if (not file_attribute & stat.S_IWRITE):
|
||||
# File is read-only, so make it writeable
|
||||
@ -439,28 +434,24 @@ class PostProcessor(object):
|
||||
|
||||
# remember whether it's a proper
|
||||
if parse_result.extra_info:
|
||||
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,re.I) != None
|
||||
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None
|
||||
|
||||
# if the result is complete then remember that for later
|
||||
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
|
||||
test_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))
|
||||
# if the result is complete then set release name
|
||||
if parse_result.series_name and ((parse_result.season_number is not None and parse_result.episode_numbers)
|
||||
or parse_result.air_date) and parse_result.release_group:
|
||||
|
||||
if not self.release_name:
|
||||
self.release_name = helpers.remove_extension(ek.ek(os.path.basename, parse_result.original_name))
|
||||
|
||||
if test_name == self.nzb_name:
|
||||
self.good_results[self.NZB_NAME] = True
|
||||
elif test_name == self.folder_name:
|
||||
self.good_results[self.FOLDER_NAME] = True
|
||||
elif test_name == self.file_name:
|
||||
self.good_results[self.FILE_NAME] = True
|
||||
else:
|
||||
logger.log(u"Nothing was good, found " + repr(test_name) + " and wanted either " + repr(
|
||||
self.nzb_name) + ", " + repr(self.folder_name) + ", or " + repr(self.file_name))
|
||||
else:
|
||||
logger.log(u"Parse result not sufficient(all following have to be set). Will not save release name",
|
||||
logger.log(u"Parse result not sufficient (all following have to be set). will not save release name",
|
||||
logger.DEBUG)
|
||||
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
||||
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
||||
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||
logger.log("Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
|
||||
logger.log(u"Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
|
||||
logger.log(u"Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
|
||||
logger.log(u"Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||
logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
|
||||
logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
|
||||
|
||||
def _analyze_name(self, name, file=True):
|
||||
"""
|
||||
@ -520,7 +511,7 @@ class PostProcessor(object):
|
||||
else:
|
||||
self.anidbEpisode = ep
|
||||
|
||||
#TODO: clean code. it looks like it's from hell
|
||||
# TODO: clean code. it looks like it's from hell
|
||||
for name in ep.allNames:
|
||||
|
||||
indexer_id = name_cache.retrieveNameFromCache(name)
|
||||
@ -621,12 +612,14 @@ class PostProcessor(object):
|
||||
|
||||
# for air-by-date shows we need to look up the season/episode from database
|
||||
if season == -1 and show and episodes:
|
||||
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
self._log(
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
airdate = episodes[0].toordinal()
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[show.indexerid, show.indexer, airdate])
|
||||
sql_result = myDB.select(
|
||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
|
||||
[show.indexerid, show.indexer, airdate])
|
||||
|
||||
if sql_result:
|
||||
season = int(sql_result[0][0])
|
||||
@ -652,7 +645,7 @@ class PostProcessor(object):
|
||||
|
||||
if show and season and episodes:
|
||||
return (show, season, episodes, quality)
|
||||
|
||||
|
||||
return (show, season, episodes, quality)
|
||||
|
||||
def _get_ep_obj(self, show, season, episodes):
|
||||
@ -701,7 +694,7 @@ class PostProcessor(object):
|
||||
|
||||
# if there is a quality available in the status then we don't need to bother guessing from the filename
|
||||
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST:
|
||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
|
||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
|
||||
if ep_quality != common.Quality.UNKNOWN:
|
||||
self._log(
|
||||
u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
|
||||
@ -733,7 +726,9 @@ class PostProcessor(object):
|
||||
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
|
||||
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) # @UnusedVariable
|
||||
if ep_quality != common.Quality.UNKNOWN:
|
||||
self._log(u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality], logger.DEBUG)
|
||||
self._log(
|
||||
u"The old status had a quality in it, using that: " + common.Quality.qualityStrings[ep_quality],
|
||||
logger.DEBUG)
|
||||
return ep_quality
|
||||
|
||||
# Try guessing quality from the file name
|
||||
@ -853,7 +848,8 @@ class PostProcessor(object):
|
||||
|
||||
# get the quality of the episode we're processing
|
||||
if quality:
|
||||
self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality], logger.DEBUG)
|
||||
self._log(u"Snatch history had a quality in it, using that: " + common.Quality.qualityStrings[quality],
|
||||
logger.DEBUG)
|
||||
new_ep_quality = quality
|
||||
else:
|
||||
new_ep_quality = self._get_quality(ep_obj)
|
||||
@ -873,7 +869,9 @@ class PostProcessor(object):
|
||||
# if there's an existing file that we don't want to replace stop here
|
||||
if existing_file_status == PostProcessor.EXISTS_LARGER:
|
||||
if self.is_proper:
|
||||
self._log(u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace", logger.DEBUG)
|
||||
self._log(
|
||||
u"File exists and new file is smaller, new file is a proper/repack, marking it safe to replace",
|
||||
logger.DEBUG)
|
||||
return True
|
||||
|
||||
else:
|
||||
@ -891,7 +889,7 @@ class PostProcessor(object):
|
||||
logger.DEBUG)
|
||||
|
||||
# set the status of the episodes
|
||||
#for curEp in [ep_obj] + ep_obj.relatedEps:
|
||||
# for curEp in [ep_obj] + ep_obj.relatedEps:
|
||||
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
|
||||
|
||||
# delete the existing file (and company)
|
||||
@ -922,21 +920,12 @@ class PostProcessor(object):
|
||||
sql_l = []
|
||||
for cur_ep in [ep_obj] + ep_obj.relatedEps:
|
||||
with cur_ep.lock:
|
||||
cur_release_name = None
|
||||
|
||||
# use the best possible representation of the release name
|
||||
if self.good_results[self.NZB_NAME]:
|
||||
cur_release_name = self.nzb_name
|
||||
elif self.good_results[self.FOLDER_NAME]:
|
||||
cur_release_name = self.folder_name
|
||||
elif self.good_results[self.FILE_NAME]:
|
||||
cur_release_name = self.file_name
|
||||
|
||||
if cur_release_name:
|
||||
self._log("Found release name " + cur_release_name, logger.DEBUG)
|
||||
cur_ep.release_name = cur_release_name
|
||||
if self.release_name:
|
||||
self._log("Found release name " + self.release_name, logger.DEBUG)
|
||||
cur_ep.release_name = self.release_name
|
||||
else:
|
||||
logger.log("good results: " + repr(self.good_results), logger.DEBUG)
|
||||
cur_ep.release_name = ""
|
||||
|
||||
if ep_obj.status in common.Quality.SNATCHED_BEST:
|
||||
cur_ep.status = common.Quality.compositeStatus(common.ARCHIVED, new_ep_quality)
|
||||
|
Loading…
Reference in New Issue
Block a user