1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Anime propers

This commit is contained in:
Adam 2014-07-22 12:53:32 +08:00 committed by adam
parent f2a1331110
commit fd9be53fcb
8 changed files with 138 additions and 28 deletions

View File

@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 39
MAX_DB_VERSION = 40
class MainSanityCheck(db.DBSanityCheck):
def check(self):
@ -901,3 +901,17 @@ class AddIndexerMapping(AddSceneToTvShows):
"CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER, mindexer NUMERIC, PRIMARY KEY (indexer_id, indexer))")
self.incDBVersion()
class AddVersionToTvEpisodes(AddIndexerMapping):
def test(self):
return self.checkDBVersion() >= 40
def execute(self):
backupDatabase(40)
logger.log(u"Adding column version to tv_episodes and history")
self.addColumn("tv_episodes", "version", "NUMERIC", "-1")
self.addColumn("tv_episodes", "release_group", "TEXT", "")
self.addColumn("history", "version", "NUMERIC", "-1")
self.incDBVersion()

View File

@ -25,7 +25,7 @@ from sickbeard.common import SNATCHED, SUBTITLED, FAILED, Quality
dateFormat = "%Y%m%d%H%M%S"
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
def _logHistoryItem(action, showid, season, episode, quality, resource, provider, version=-1):
logDate = datetime.datetime.today().strftime(dateFormat)
if not isinstance(resource, unicode):
@ -33,8 +33,8 @@ def _logHistoryItem(action, showid, season, episode, quality, resource, provider
myDB = db.DBConnection()
myDB.action(
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
[action, logDate, showid, season, episode, quality, resource, provider])
"INSERT INTO history (action, date, showid, season, episode, quality, resource, provider, version) VALUES (?,?,?,?,?,?,?,?,?)",
[action, logDate, showid, season, episode, quality, resource, provider, version])
def logSnatch(searchResult):
@ -44,6 +44,7 @@ def logSnatch(searchResult):
season = int(curEpObj.season)
episode = int(curEpObj.episode)
quality = searchResult.quality
version = searchResult.version
providerClass = searchResult.provider
if providerClass != None:
@ -55,10 +56,10 @@ def logSnatch(searchResult):
resource = searchResult.name
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
_logHistoryItem(action, showid, season, episode, quality, resource, provider, version)
def logDownload(episode, filename, new_ep_quality, release_group=None):
def logDownload(episode, filename, new_ep_quality, release_group=None, version=-1):
showid = int(episode.show.indexerid)
season = int(episode.season)
epNum = int(episode.episode)
@ -73,7 +74,7 @@ def logDownload(episode, filename, new_ep_quality, release_group=None):
action = episode.status
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
_logHistoryItem(action, showid, season, epNum, quality, filename, provider, version)
def logSubtitle(showid, season, episode, status, subtitleResult):

View File

@ -201,6 +201,16 @@ class NameParser(object):
result.release_group = match.group('release_group')
result.score += 1
if 'version' in named_groups:
# assigns version to anime file if detected using anime regex. Non-anime regex receives -1
version = match.group('version')
if version:
result.version = version
else:
result.version = 1
else:
result.version = -1
matches.append(result)
@ -438,6 +448,7 @@ class NameParser(object):
final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')
final_result.version = self._combine_results(dir_name_result, file_name_result, 'version')
final_result.which_regex = []
if final_result == file_name_result:
@ -483,7 +494,8 @@ class ParseResult(object):
ab_episode_numbers=None,
show=None,
score=None,
quality=None
quality=None,
version=None
):
self.original_name = original_name
@ -518,6 +530,8 @@ class ParseResult(object):
self.show = show
self.score = score
self.version = version
def __eq__(self, other):
if not other:
return False
@ -548,6 +562,8 @@ class ParseResult(object):
return False
if self.quality != other.quality:
return False
if self.version != other.version:
return False
return True
@ -569,7 +585,10 @@ class ParseResult(object):
to_return += str(self.sports_event_id)
to_return += str(self.sports_air_date)
if self.ab_episode_numbers:
to_return += ' [Absolute Nums: ' + str(self.ab_episode_numbers) + ']'
to_return += ' [ABS: ' + str(self.ab_episode_numbers) + ']'
if self.version:
to_return += ' [ANIME VER: ' + str(self.version) + ']'
if self.release_group:
to_return += ' [GROUP: ' + self.release_group + ']'

View File

@ -93,6 +93,8 @@ class PostProcessor(object):
self.log = ''
self.version = None
def _log(self, message, level=logger.MESSAGE):
"""
A wrapper for the internal logger which also keeps track of messages and saves them to a string for later.
@ -382,10 +384,10 @@ class PostProcessor(object):
"""
Look up the NZB name in the history and see if it contains a record for self.nzb_name
Returns a (indexer_id, season, []) tuple. The first two may be None if none were found.
Returns a (indexer_id, season, [], quality, version) tuple. The first two may be None if none were found.
"""
to_return = (None, None, [], None)
to_return = (None, None, [], None, None)
# if we don't have either of these then there's nothing to use to search the history for anyway
if not self.nzb_name and not self.folder_name:
@ -413,6 +415,7 @@ class PostProcessor(object):
indexer_id = int(sql_results[0]["showid"])
season = int(sql_results[0]["season"])
quality = int(sql_results[0]["quality"])
version = int(sql_results[0]["version"])
if quality == common.Quality.UNKNOWN:
quality = None
@ -420,7 +423,8 @@ class PostProcessor(object):
show = helpers.findCertainShow(sickbeard.showList, indexer_id)
self.in_history = True
to_return = (show, season, [], quality)
self.version = version
to_return = (show, season, [], quality, version)
self._log("Found result in history: " + str(to_return), logger.DEBUG)
return to_return
@ -452,6 +456,7 @@ class PostProcessor(object):
logger.log(u" or Parse result(air_date): " + str(parse_result.air_date), logger.DEBUG)
logger.log(u"Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
def _analyze_name(self, name, file=True):
"""
Takes a name and tries to figure out a show, season, and episode from it.
@ -464,7 +469,7 @@ class PostProcessor(object):
logger.log(u"Analyzing name " + repr(name))
to_return = (None, None, [], None)
to_return = (None, None, [], None, None)
if not name:
return to_return
@ -488,7 +493,7 @@ class PostProcessor(object):
season = parse_result.season_number
episodes = parse_result.episode_numbers
to_return = (show, season, episodes, parse_result.quality)
to_return = (show, season, episodes, parse_result.quality, None)
self._finalize(parse_result)
return to_return
@ -516,7 +521,7 @@ class PostProcessor(object):
For a given file try to find the showid, season, and episode.
"""
show = season = quality = None
show = season = quality = version = None
episodes = []
# try to look up the nzb in history
@ -542,7 +547,7 @@ class PostProcessor(object):
for cur_attempt in attempt_list:
try:
(cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
(cur_show, cur_season, cur_episodes, cur_quality, cur_version) = cur_attempt()
except (InvalidNameException, InvalidShowException), e:
logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
continue
@ -555,6 +560,10 @@ class PostProcessor(object):
if cur_quality and not (self.in_history and quality):
quality = cur_quality
# we only get current version for animes from history to prevent issues with old database entries
if cur_version is not None:
version = cur_version
if cur_season != None:
season = cur_season
if cur_episodes:
@ -594,9 +603,9 @@ class PostProcessor(object):
season = 1
if show and season and episodes:
return (show, season, episodes, quality)
return (show, season, episodes, quality, version)
return (show, season, episodes, quality)
return (show, season, episodes, quality, version)
def _get_ep_obj(self, show, season, episodes):
"""
@ -783,7 +792,7 @@ class PostProcessor(object):
self.anidbEpisode = None
# try to find the file info
(show, season, episodes, quality) = self._find_info()
(show, season, episodes, quality, version) = self._find_info()
if not show:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
@ -810,6 +819,14 @@ class PostProcessor(object):
priority_download = self._is_priority(ep_obj, new_ep_quality)
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
# get the version of the episode we're processing
if version:
self._log(u"Snatch history had a version in it, using that: v" + str(version),
logger.DEBUG)
new_ep_version = version
else:
new_ep_version = -1
# check for an existing file
existing_file_status = self._checkForExistingFile(ep_obj.location)
@ -890,6 +907,13 @@ class PostProcessor(object):
cur_ep.is_proper = self.is_proper
cur_ep.version = new_ep_version
if self.release_group:
cur_ep.release_group = self.release_group
else:
cur_ep.release_group = ""
sql_l.append(cur_ep.get_sql())
if len(sql_l) > 0:
@ -981,7 +1005,7 @@ class PostProcessor(object):
ep_obj.createMetaFiles()
# log it to history
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group, new_ep_version)
# send notifications
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))

View File

@ -141,6 +141,12 @@ class ProperFinder():
else:
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
curProper.episode = parse_result.episode_numbers[0]
if parse_result.is_anime:
if parse_result.release_group and parse_result.version:
curProper.release_group = parse_result.release_group
curProper.version = parse_result.version
else:
continue
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
@ -165,6 +171,25 @@ class ProperFinder():
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
continue
# check if we actually want this proper (if it's the right release group and a higher version)
if parse_result.is_anime:
myDB = db.DBConnection()
sqlResults = myDB.select(
"SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[curProper.indexerid, curProper.season, curProper.episode])
oldVersion = int(sqlResults[0]["version"])
oldRelease_group = (sqlResults[0]["release_group"])
if oldVersion > -1 and oldVersion < curProper.version:
logger.log("Found new anime v" + str(curProper.version) + " to replace existing v" + str(oldVersion))
else:
continue
if oldRelease_group != curProper.release_group:
logger.log("Skipping proper from release group: " + curProper.release_group + ", does not match existing release group: " + oldRelease_group)
continue
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
@ -221,6 +246,7 @@ class ProperFinder():
result.url = curProper.url
result.name = curProper.name
result.quality = curProper.quality
result.version = curProper.version
# snatch it
search.snatchEpisode(result, SNATCHED_PROPER)

View File

@ -306,6 +306,7 @@ class GenericProvider:
showObj = parse_result.show
quality = parse_result.quality
release_group = parse_result.release_group
version = parse_result.version
addCacheEntry = False
if not (showObj.air_by_date or showObj.sports):
@ -394,6 +395,7 @@ class GenericProvider:
result.quality = quality
result.release_group = release_group
result.content = None
result.version = version
if len(epObj) == 1:
epNum = epObj[0].episode

View File

@ -1274,6 +1274,8 @@ class TVEpisode(object):
self._file_size = 0
self._release_name = ''
self._is_proper = False
self._version = 0
self._release_group = ''
# setting any of the above sets the dirty flag
self.dirty = True
@ -1317,6 +1319,8 @@ class TVEpisode(object):
file_size = property(lambda self: self._file_size, dirty_setter("_file_size"))
release_name = property(lambda self: self._release_name, dirty_setter("_release_name"))
is_proper = property(lambda self: self._is_proper, dirty_setter("_is_proper"))
version = property(lambda self: self._version, dirty_setter("_version"))
release_group = property(lambda self: self._release_group, dirty_setter("_release_group"))
def _set_location(self, new_location):
logger.log(u"Setter sets location to " + new_location, logger.DEBUG)
@ -1523,6 +1527,12 @@ class TVEpisode(object):
if sqlResults[0]["is_proper"]:
self.is_proper = int(sqlResults[0]["is_proper"])
if sqlResults[0]["version"]:
self.version = int(sqlResults[0]["version"])
if sqlResults[0]["release_group"] is not None:
self.release_group = sqlResults[0]["release_group"]
self.dirty = False
return True
@ -1849,23 +1859,26 @@ class TVEpisode(object):
"UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, "
"subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
"absolute_number = ? WHERE episode_id = ?",
"absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?",
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
self.hastbn,
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
self.season, self.episode, self.absolute_number, epID]]
self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
else:
# use a custom insert method to get the data into the DB.
return [
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, "
"subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, "
"release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES "
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)"
",?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
self.description,
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
self.absolute_number]]
self.absolute_number, self.version, self.release_group]]
def saveToDB(self, forceSave=False):
"""
@ -1898,7 +1911,9 @@ class TVEpisode(object):
"file_size": self.file_size,
"release_name": self.release_name,
"is_proper": self.is_proper,
"absolute_number": self.absolute_number
"absolute_number": self.absolute_number,
"version": self.version,
"release_group": self.release_group
}
controlValueDict = {"showid": self.show.indexerid,
"season": self.season,

View File

@ -55,6 +55,10 @@ class CacheDBConnection(db.DBConnection):
if not self.hasColumn(providerName, 'release_group'):
self.addColumn(providerName, 'release_group', "TEXT", "")
# add version column to table if missing
if not self.hasColumn(providerName, 'version'):
self.addColumn(providerName, 'version', "NUMERIC", "-1")
except Exception, e:
if str(e) != "table [" + providerName + "] already exists":
raise
@ -272,11 +276,14 @@ class TVCache():
# get release group
release_group = parse_result.release_group
# get version
version = parse_result.version
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
return [
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group) VALUES (?,?,?,?,?,?,?,?)",
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group]]
"INSERT OR IGNORE INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality, release_group, version) VALUES (?,?,?,?,?,?,?,?,?)",
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def searchCache(self, episodes, manualSearch=False):
@ -328,6 +335,7 @@ class TVCache():
curEp = int(curEp)
curQuality = int(curResult["quality"])
curReleaseGroup = curResult["release_group"]
curVersion = curResult["version"]
# if the show says we want that episode then add it to the list
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
@ -347,6 +355,7 @@ class TVCache():
result.name = title
result.quality = curQuality
result.release_group = curReleaseGroup
result.version = curVersion
result.content = self.provider.getURL(url) \
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
and not url.startswith('magnet') else None