2014-03-10 01:18:05 -04:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 01:18:05 -04:00
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 01:18:05 -04:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 01:18:05 -04:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2014-05-26 02:29:22 -04:00
|
|
|
# GNU General Public License for more details.
|
2014-03-10 01:18:05 -04:00
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 08:37:22 -04:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-15 01:53:32 -04:00
|
|
|
from __future__ import with_statement
|
|
|
|
|
2014-07-15 14:40:40 -04:00
|
|
|
import os
|
2014-07-08 18:17:34 -04:00
|
|
|
import time
|
2014-05-31 06:35:11 -04:00
|
|
|
import re
|
2014-03-10 01:18:05 -04:00
|
|
|
import datetime
|
|
|
|
import os.path
|
|
|
|
import regexes
|
|
|
|
import sickbeard
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
from sickbeard import logger, helpers, scene_numbering, common, exceptions, scene_exceptions, encodingKludge as ek
|
2014-07-15 14:40:40 -04:00
|
|
|
from sickbeard.exceptions import ex
|
2014-07-15 01:53:32 -04:00
|
|
|
from contextlib import closing
|
2014-04-30 18:07:18 -04:00
|
|
|
from dateutil import parser
|
2014-07-15 14:40:40 -04:00
|
|
|
from shove import Shove
|
2014-07-15 01:53:32 -04:00
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class NameParser(object):
|
2014-07-14 22:00:53 -04:00
|
|
|
NORMAL_REGEX = 0
|
|
|
|
SPORTS_REGEX = 1
|
|
|
|
ANIME_REGEX = 2
|
2014-04-28 05:15:29 -04:00
|
|
|
|
2014-07-06 09:11:04 -04:00
|
|
|
def __init__(self, file_name=True, showObj=None, useIndexers=False, convert=False,
|
2014-06-22 14:33:58 -04:00
|
|
|
naming_pattern=False):
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self.file_name = file_name
|
2014-05-31 06:35:11 -04:00
|
|
|
self.showObj = showObj
|
2014-07-06 09:11:04 -04:00
|
|
|
self.useIndexers = useIndexers
|
2014-05-31 06:35:11 -04:00
|
|
|
self.convert = convert
|
2014-06-19 21:13:07 -04:00
|
|
|
self.naming_pattern = naming_pattern
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
self.regexModes = [self.NORMAL_REGEX, self.SPORTS_REGEX, self.ANIME_REGEX]
|
2014-07-15 05:40:21 -04:00
|
|
|
if self.showObj and not (self.showObj.is_anime or self.showObj.is_sports):
|
2014-07-14 22:00:53 -04:00
|
|
|
self.regexModes = [self.NORMAL_REGEX]
|
|
|
|
elif self.showObj and self.showObj.is_anime:
|
|
|
|
self.regexModes = [self.ANIME_REGEX]
|
|
|
|
elif self.showObj and self.showObj.is_sports:
|
|
|
|
self.regexModes = [self.SPORTS_REGEX]
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def clean_series_name(self, series_name):
|
|
|
|
"""Cleans up series name by removing any . and _
|
|
|
|
characters, along with any trailing hyphens.
|
2014-05-28 20:44:00 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
Is basically equivalent to replacing all _ and . with a
|
|
|
|
space, but handles decimal numbers in string, for example:
|
2014-05-28 20:44:00 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
>>> cleanRegexedSeriesName("an.example.1.0.test")
|
|
|
|
'an example 1.0 test'
|
|
|
|
>>> cleanRegexedSeriesName("an_example_1.0_test")
|
|
|
|
'an example 1.0 test'
|
2014-05-28 20:44:00 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
Stolen from dbr's tvnamer
|
|
|
|
"""
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
series_name = re.sub("(\D)\.(?!\s)(\D)", "\\1 \\2", series_name)
|
2014-03-25 01:57:24 -04:00
|
|
|
series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot
|
2014-03-10 01:18:05 -04:00
|
|
|
series_name = re.sub("(\D)\.(?!\s)", "\\1 ", series_name)
|
|
|
|
series_name = re.sub("\.(?!\s)(\D)", " \\1", series_name)
|
|
|
|
series_name = series_name.replace("_", " ")
|
|
|
|
series_name = re.sub("-$", "", series_name)
|
2014-03-25 01:57:24 -04:00
|
|
|
series_name = re.sub("^\[.*\]", "", series_name)
|
2014-03-10 01:18:05 -04:00
|
|
|
return series_name.strip()
|
|
|
|
|
2014-04-28 05:15:29 -04:00
|
|
|
def _compile_regexes(self, regexMode):
|
2014-07-14 22:00:53 -04:00
|
|
|
if regexMode == self.SPORTS_REGEX:
|
2014-05-07 03:50:49 -04:00
|
|
|
logger.log(u"Using SPORTS regexs", logger.DEBUG)
|
2014-05-26 02:29:22 -04:00
|
|
|
uncompiled_regex = [regexes.sports_regexs]
|
|
|
|
elif regexMode == self.ANIME_REGEX:
|
|
|
|
logger.log(u"Using ANIME regexs", logger.DEBUG)
|
2014-06-01 01:39:24 -04:00
|
|
|
uncompiled_regex = [regexes.anime_regexes]
|
2014-04-28 05:15:29 -04:00
|
|
|
else:
|
2014-07-14 22:00:53 -04:00
|
|
|
logger.log(u"Using NORMAL reqgexs", logger.DEBUG)
|
2014-05-26 02:29:22 -04:00
|
|
|
uncompiled_regex = [regexes.normal_regexes]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
self.compiled_regexes = []
|
2014-05-26 02:29:22 -04:00
|
|
|
for regexItem in uncompiled_regex:
|
2014-07-14 22:00:53 -04:00
|
|
|
for i, (cur_pattern_name, cur_pattern) in enumerate(regexItem):
|
|
|
|
try:
|
|
|
|
cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE)
|
|
|
|
except re.error, errormsg:
|
|
|
|
logger.log(u"WARNING: Invalid episode_pattern, %s. %s" % (errormsg, cur_pattern))
|
|
|
|
else:
|
|
|
|
cur_pattern_name = str(i) + "_" + cur_pattern_name
|
|
|
|
self.compiled_regexes.append((regexMode, cur_pattern_name, cur_regex))
|
2014-06-24 21:57:05 -04:00
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
def _parse_string(self, name):
|
2014-05-28 17:13:29 -04:00
|
|
|
if not name:
|
|
|
|
return
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
matches = []
|
|
|
|
doneSearch = False
|
|
|
|
bestResult = None
|
|
|
|
|
|
|
|
for regexMode in self.regexModes:
|
|
|
|
if doneSearch:
|
|
|
|
break
|
|
|
|
|
|
|
|
self._compile_regexes(regexMode)
|
|
|
|
for (cur_regexMode, cur_regex_name, cur_regex) in self.compiled_regexes:
|
|
|
|
if doneSearch:
|
2014-07-06 18:06:19 -04:00
|
|
|
break
|
2014-06-22 14:33:58 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
match = cur_regex.match(name)
|
2014-06-22 14:33:58 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if not match:
|
|
|
|
continue
|
2014-06-22 14:33:58 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
regex_num = int(re.match('^\d{1,2}', cur_regex_name).group(0))
|
|
|
|
result = ParseResult(name)
|
|
|
|
result.which_regex = [cur_regex_name]
|
|
|
|
result.score = 0 - regex_num
|
2014-05-17 05:27:17 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
named_groups = match.groupdict().keys()
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'series_name' in named_groups:
|
|
|
|
result.series_name = match.group('series_name')
|
|
|
|
if result.series_name:
|
|
|
|
result.series_name = self.clean_series_name(result.series_name)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-07-15 05:40:21 -04:00
|
|
|
if self.showObj and self.showObj.name.lower() == result.series_name.lower():
|
|
|
|
result.show = self.showObj
|
|
|
|
else:
|
|
|
|
if not self.naming_pattern:
|
2014-07-14 22:00:53 -04:00
|
|
|
result.show = helpers.get_show_by_name(result.series_name, useIndexer=self.useIndexers)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-07-15 05:40:21 -04:00
|
|
|
if not result.show:
|
|
|
|
if len(self.regexModes) > 1:
|
2014-07-15 01:53:32 -04:00
|
|
|
break
|
2014-07-15 05:40:21 -04:00
|
|
|
else:
|
|
|
|
continue
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
result.score += 1
|
2014-05-31 06:35:11 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'season_num' in named_groups:
|
|
|
|
tmp_season = int(match.group('season_num'))
|
|
|
|
if not (cur_regex_name == 'bare' and tmp_season in (19, 20)):
|
|
|
|
result.season_number = tmp_season
|
|
|
|
result.score += 1
|
2014-05-31 06:35:11 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'ep_num' in named_groups:
|
|
|
|
ep_num = self._convert_number(match.group('ep_num'))
|
|
|
|
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
|
|
|
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
|
|
|
result.score += 1
|
|
|
|
else:
|
|
|
|
result.episode_numbers = [ep_num]
|
|
|
|
result.score += 1
|
2014-05-31 06:35:11 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'ep_ab_num' in named_groups:
|
|
|
|
ep_ab_num = self._convert_number(match.group('ep_ab_num'))
|
|
|
|
if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'):
|
|
|
|
result.ab_episode_numbers = range(ep_ab_num,
|
|
|
|
self._convert_number(match.group('extra_ab_ep_num')) + 1)
|
|
|
|
result.score += 1
|
|
|
|
else:
|
|
|
|
result.ab_episode_numbers = [ep_ab_num]
|
|
|
|
result.score += 1
|
2014-05-31 06:35:11 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'sports_event_id' in named_groups:
|
|
|
|
sports_event_id = match.group('sports_event_id')
|
|
|
|
if sports_event_id:
|
|
|
|
result.sports_event_id = int(match.group('sports_event_id'))
|
|
|
|
result.score += 1
|
|
|
|
|
|
|
|
if 'sports_event_name' in named_groups:
|
|
|
|
result.sports_event_name = match.group('sports_event_name')
|
|
|
|
if result.sports_event_name:
|
|
|
|
result.sports_event_name = self.clean_series_name(result.sports_event_name)
|
|
|
|
result.score += 1
|
|
|
|
|
|
|
|
if 'sports_air_date' in named_groups:
|
|
|
|
sports_air_date = match.group('sports_air_date')
|
2014-07-15 05:40:21 -04:00
|
|
|
if result.show and result.show.is_sports:
|
2014-07-14 22:00:53 -04:00
|
|
|
try:
|
|
|
|
result.sports_air_date = parser.parse(sports_air_date, fuzzy=True).date()
|
|
|
|
result.score += 1
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
2014-07-15 05:40:21 -04:00
|
|
|
if result.show and result.show.air_by_date:
|
|
|
|
year = int(match.group('air_year'))
|
|
|
|
month = int(match.group('air_month'))
|
|
|
|
day = int(match.group('air_day'))
|
2014-05-31 06:35:11 -04:00
|
|
|
|
2014-07-15 05:40:21 -04:00
|
|
|
try:
|
|
|
|
dtStr = '%s-%s-%s' % (year, month, day)
|
|
|
|
result.air_date = datetime.datetime.strptime(dtStr, "%Y-%m-%d").date()
|
|
|
|
result.score += 1
|
|
|
|
except:
|
|
|
|
pass
|
2014-05-30 04:16:12 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'extra_info' in named_groups:
|
|
|
|
tmp_extra_info = match.group('extra_info')
|
2014-05-28 17:13:29 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
|
|
|
if not (tmp_extra_info and 'season_only' in cur_regex_name and re.search(
|
|
|
|
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I)):
|
|
|
|
result.extra_info = tmp_extra_info
|
|
|
|
result.score += 1
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if 'release_group' in named_groups:
|
|
|
|
result.release_group = match.group('release_group')
|
2014-06-07 04:17:12 -04:00
|
|
|
result.score += 1
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
doneSearch = True if result.show else False
|
2014-04-30 08:10:13 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
matches.append(result)
|
2014-06-01 19:18:53 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
time.sleep(0.05)
|
2014-07-08 18:17:34 -04:00
|
|
|
|
2014-06-01 19:18:53 -04:00
|
|
|
if len(matches):
|
2014-07-14 22:00:53 -04:00
|
|
|
# pick best match with highest score based on placement
|
|
|
|
bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score)
|
2014-06-01 19:18:53 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# get quality
|
2014-07-15 14:40:40 -04:00
|
|
|
bestResult.quality = common.Quality.nameQuality(name,
|
|
|
|
bestResult.show.is_anime if bestResult.show else False)
|
2014-06-22 13:27:00 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# scene convert result
|
|
|
|
bestResult = bestResult.convert() if self.convert and not self.naming_pattern else bestResult
|
2014-06-07 07:06:21 -04:00
|
|
|
|
2014-07-15 06:18:34 -04:00
|
|
|
if not self.naming_pattern:
|
|
|
|
if bestResult.show and bestResult.show.is_anime and len(bestResult.ab_episode_numbers):
|
|
|
|
new_episode_numbers = []
|
|
|
|
new_season_numbers = []
|
|
|
|
|
|
|
|
for epAbsNo in bestResult.ab_episode_numbers:
|
|
|
|
try:
|
|
|
|
(s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, None, [epAbsNo])
|
|
|
|
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
new_episode_numbers.extend(e)
|
|
|
|
new_season_numbers.append(s)
|
2014-07-15 06:11:54 -04:00
|
|
|
|
2014-07-15 06:18:34 -04:00
|
|
|
if len(new_season_numbers) and len(new_episode_numbers):
|
|
|
|
bestResult.episode_numbers = new_episode_numbers
|
|
|
|
bestResult.season_number = new_season_numbers[0]
|
2014-07-15 06:11:54 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
return bestResult
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def _combine_results(self, first, second, attr):
|
|
|
|
# if the first doesn't exist then return the second or nothing
|
|
|
|
if not first:
|
|
|
|
if not second:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return getattr(second, attr)
|
|
|
|
|
|
|
|
# if the second doesn't exist then return the first
|
|
|
|
if not second:
|
|
|
|
return getattr(first, attr)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
a = getattr(first, attr)
|
|
|
|
b = getattr(second, attr)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# if a is good use it
|
2014-03-20 14:03:22 -04:00
|
|
|
if a != None or (type(a) == list and len(a)):
|
2014-03-10 01:18:05 -04:00
|
|
|
return a
|
|
|
|
# if not use b (if b isn't set it'll just be default)
|
|
|
|
else:
|
|
|
|
return b
|
|
|
|
|
2014-03-25 01:57:24 -04:00
|
|
|
def _unicodify(self, obj, encoding="utf-8"):
|
2014-03-10 01:18:05 -04:00
|
|
|
if isinstance(obj, basestring):
|
|
|
|
if not isinstance(obj, unicode):
|
2014-07-14 22:00:53 -04:00
|
|
|
obj = unicode(obj, encoding, 'replace')
|
2014-03-10 01:18:05 -04:00
|
|
|
return obj
|
|
|
|
|
2014-05-28 17:13:29 -04:00
|
|
|
def _convert_number(self, org_number):
|
|
|
|
"""
|
|
|
|
Convert org_number into an integer
|
|
|
|
org_number: integer or representation of a number: string or unicode
|
|
|
|
Try force converting to int first, on error try converting from Roman numerals
|
|
|
|
returns integer or 0
|
|
|
|
"""
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
try:
|
2014-05-28 17:13:29 -04:00
|
|
|
# try forcing to int
|
|
|
|
if org_number:
|
|
|
|
number = int(org_number)
|
|
|
|
else:
|
|
|
|
number = 0
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
except:
|
2014-05-28 17:13:29 -04:00
|
|
|
# on error try converting from Roman numerals
|
|
|
|
roman_to_int_map = (('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100),
|
|
|
|
('XC', 90), ('L', 50), ('XL', 40), ('X', 10),
|
|
|
|
('IX', 9), ('V', 5), ('IV', 4), ('I', 1)
|
2014-03-10 01:18:05 -04:00
|
|
|
)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-28 17:13:29 -04:00
|
|
|
roman_numeral = str(org_number).upper()
|
|
|
|
number = 0
|
|
|
|
index = 0
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-28 17:13:29 -04:00
|
|
|
for numeral, integer in roman_to_int_map:
|
|
|
|
while roman_numeral[index:index + len(numeral)] == numeral:
|
|
|
|
number += integer
|
|
|
|
index += len(numeral)
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-28 17:13:29 -04:00
|
|
|
return number
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-26 06:42:34 -04:00
|
|
|
def parse(self, name, cache_result=True):
|
2014-03-10 01:18:05 -04:00
|
|
|
name = self._unicodify(name)
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if self.naming_pattern:
|
|
|
|
cache_result = False
|
|
|
|
|
|
|
|
cached = NameParserCache().get(name)
|
2014-03-10 01:18:05 -04:00
|
|
|
if cached:
|
|
|
|
return cached
|
|
|
|
|
|
|
|
# break it into parts if there are any (dirname, file name, extension)
|
|
|
|
dir_name, file_name = os.path.split(name)
|
2014-05-31 09:16:18 -04:00
|
|
|
ext_match = re.match('(.*)\.\w{3,4}$', file_name)
|
2014-03-10 01:18:05 -04:00
|
|
|
if ext_match and self.file_name:
|
|
|
|
base_file_name = ext_match.group(1)
|
|
|
|
else:
|
|
|
|
base_file_name = file_name
|
|
|
|
|
|
|
|
# set up a result to use
|
|
|
|
final_result = ParseResult(name)
|
|
|
|
|
|
|
|
# try parsing the file name
|
|
|
|
file_name_result = self._parse_string(base_file_name)
|
|
|
|
|
2014-06-19 21:13:07 -04:00
|
|
|
# use only the direct parent dir
|
|
|
|
dir_name = os.path.basename(dir_name)
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# parse the dirname for extra info if needed
|
|
|
|
dir_name_result = self._parse_string(dir_name)
|
|
|
|
|
|
|
|
# build the ParseResult object
|
|
|
|
final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date')
|
2014-07-14 22:00:53 -04:00
|
|
|
|
|
|
|
# anime absolute numbers
|
2014-05-26 02:29:22 -04:00
|
|
|
final_result.ab_episode_numbers = self._combine_results(file_name_result, dir_name_result, 'ab_episode_numbers')
|
2014-04-30 08:10:13 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# sports
|
2014-05-01 23:03:44 -04:00
|
|
|
final_result.sports_event_id = self._combine_results(file_name_result, dir_name_result, 'sports_event_id')
|
|
|
|
final_result.sports_event_name = self._combine_results(file_name_result, dir_name_result, 'sports_event_name')
|
2014-07-14 22:00:53 -04:00
|
|
|
final_result.sports_air_date = self._combine_results(file_name_result, dir_name_result, 'sports_air_date')
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if not final_result.air_date and not final_result.sports_air_date:
|
2014-03-10 01:18:05 -04:00
|
|
|
final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number')
|
|
|
|
final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers')
|
|
|
|
|
|
|
|
# if the dirname has a release group/show name I believe it over the filename
|
|
|
|
final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name')
|
|
|
|
final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info')
|
|
|
|
final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group')
|
|
|
|
|
|
|
|
final_result.which_regex = []
|
|
|
|
if final_result == file_name_result:
|
|
|
|
final_result.which_regex = file_name_result.which_regex
|
|
|
|
elif final_result == dir_name_result:
|
|
|
|
final_result.which_regex = dir_name_result.which_regex
|
|
|
|
else:
|
|
|
|
if file_name_result:
|
|
|
|
final_result.which_regex += file_name_result.which_regex
|
|
|
|
if dir_name_result:
|
|
|
|
final_result.which_regex += dir_name_result.which_regex
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
|
2014-06-07 07:06:21 -04:00
|
|
|
final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if not final_result.show:
|
|
|
|
raise InvalidShowException(
|
|
|
|
"Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# if there's no useful info in it then raise an exception
|
2014-07-14 22:00:53 -04:00
|
|
|
if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and final_result.sports_air_date == None and not final_result.ab_episode_numbers and not final_result.series_name:
|
2014-03-10 01:18:05 -04:00
|
|
|
raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
|
|
|
|
|
2014-05-26 06:42:34 -04:00
|
|
|
if cache_result:
|
2014-07-14 22:00:53 -04:00
|
|
|
NameParserCache().add(name, final_result)
|
2014-05-26 06:42:34 -04:00
|
|
|
|
2014-06-16 06:44:33 -04:00
|
|
|
logger.log(u"Parsed " + name + " into " + str(final_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
return final_result
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-30 01:48:02 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class ParseResult(object):
|
|
|
|
def __init__(self,
|
|
|
|
original_name,
|
|
|
|
series_name=None,
|
2014-05-01 23:03:44 -04:00
|
|
|
sports_event_id=None,
|
|
|
|
sports_event_name=None,
|
2014-07-14 22:00:53 -04:00
|
|
|
sports_air_date=None,
|
2014-03-10 01:18:05 -04:00
|
|
|
season_number=None,
|
|
|
|
episode_numbers=None,
|
|
|
|
extra_info=None,
|
|
|
|
release_group=None,
|
2014-04-28 05:15:29 -04:00
|
|
|
air_date=None,
|
2014-05-26 02:29:22 -04:00
|
|
|
ab_episode_numbers=None,
|
2014-05-31 06:35:11 -04:00
|
|
|
show=None,
|
2014-06-07 07:06:21 -04:00
|
|
|
score=None,
|
|
|
|
quality=None
|
2014-03-25 01:57:24 -04:00
|
|
|
):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
self.original_name = original_name
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self.series_name = series_name
|
|
|
|
self.season_number = season_number
|
|
|
|
if not episode_numbers:
|
|
|
|
self.episode_numbers = []
|
|
|
|
else:
|
|
|
|
self.episode_numbers = episode_numbers
|
|
|
|
|
2014-05-26 02:29:22 -04:00
|
|
|
if not ab_episode_numbers:
|
|
|
|
self.ab_episode_numbers = []
|
|
|
|
else:
|
|
|
|
self.ab_episode_numbers = ab_episode_numbers
|
|
|
|
|
2014-06-07 07:06:21 -04:00
|
|
|
if not quality:
|
|
|
|
self.quality = common.Quality.UNKNOWN
|
|
|
|
else:
|
|
|
|
self.quality = quality
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self.extra_info = extra_info
|
|
|
|
self.release_group = release_group
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
self.air_date = air_date
|
2014-04-30 08:10:13 -04:00
|
|
|
|
2014-05-01 23:03:44 -04:00
|
|
|
self.sports_event_id = sports_event_id
|
|
|
|
self.sports_event_name = sports_event_name
|
2014-07-14 22:00:53 -04:00
|
|
|
self.sports_air_date = sports_air_date
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-06-01 01:39:24 -04:00
|
|
|
self.which_regex = []
|
2014-05-26 02:29:22 -04:00
|
|
|
self.show = show
|
2014-05-31 06:35:11 -04:00
|
|
|
self.score = score
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
def __eq__(self, other):
|
|
|
|
if not other:
|
|
|
|
return False
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if self.series_name != other.series_name:
|
|
|
|
return False
|
|
|
|
if self.season_number != other.season_number:
|
|
|
|
return False
|
|
|
|
if self.episode_numbers != other.episode_numbers:
|
|
|
|
return False
|
|
|
|
if self.extra_info != other.extra_info:
|
|
|
|
return False
|
|
|
|
if self.release_group != other.release_group:
|
|
|
|
return False
|
|
|
|
if self.air_date != other.air_date:
|
|
|
|
return False
|
2014-05-01 23:03:44 -04:00
|
|
|
if self.sports_event_id != other.sports_event_id:
|
|
|
|
return False
|
|
|
|
if self.sports_event_name != other.sports_event_name:
|
|
|
|
return False
|
2014-07-14 22:00:53 -04:00
|
|
|
if self.sports_air_date != other.sports_air_date:
|
2014-04-30 18:07:18 -04:00
|
|
|
return False
|
2014-05-26 02:29:22 -04:00
|
|
|
if self.ab_episode_numbers != other.ab_episode_numbers:
|
|
|
|
return False
|
|
|
|
if self.show != other.show:
|
|
|
|
return False
|
2014-05-31 06:35:11 -04:00
|
|
|
if self.score != other.score:
|
|
|
|
return False
|
2014-06-07 07:06:21 -04:00
|
|
|
if self.quality != other.quality:
|
|
|
|
return False
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
return True
|
|
|
|
|
|
|
|
def __str__(self):
|
2014-03-20 14:03:22 -04:00
|
|
|
if self.series_name != None:
|
2014-03-10 01:18:05 -04:00
|
|
|
to_return = self.series_name + u' - '
|
|
|
|
else:
|
|
|
|
to_return = u''
|
2014-03-20 14:03:22 -04:00
|
|
|
if self.season_number != None:
|
2014-03-25 01:57:24 -04:00
|
|
|
to_return += 'S' + str(self.season_number)
|
2014-03-10 01:18:05 -04:00
|
|
|
if self.episode_numbers and len(self.episode_numbers):
|
|
|
|
for e in self.episode_numbers:
|
2014-03-25 01:57:24 -04:00
|
|
|
to_return += 'E' + str(e)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if self.is_air_by_date:
|
2014-03-10 01:18:05 -04:00
|
|
|
to_return += str(self.air_date)
|
2014-07-14 22:00:53 -04:00
|
|
|
if self.is_sports:
|
2014-05-01 23:03:44 -04:00
|
|
|
to_return += str(self.sports_event_name)
|
|
|
|
to_return += str(self.sports_event_id)
|
2014-07-14 22:00:53 -04:00
|
|
|
to_return += str(self.sports_air_date)
|
2014-05-26 02:29:22 -04:00
|
|
|
if self.ab_episode_numbers:
|
|
|
|
to_return += ' absolute_numbers: ' + str(self.ab_episode_numbers)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
if self.extra_info:
|
|
|
|
to_return += ' - ' + self.extra_info
|
|
|
|
if self.release_group:
|
|
|
|
to_return += ' (' + self.release_group + ')'
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
to_return += ' [ABD: ' + str(self.is_air_by_date) + ']'
|
|
|
|
to_return += ' [SPORTS: ' + str(self.is_sports) + ']'
|
2014-05-26 02:29:22 -04:00
|
|
|
to_return += ' [ANIME: ' + str(self.is_anime) + ']'
|
2014-04-28 05:15:29 -04:00
|
|
|
to_return += ' [whichReg: ' + str(self.which_regex) + ']'
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
return to_return.encode('utf-8')
|
|
|
|
|
2014-05-26 06:42:34 -04:00
|
|
|
def convert(self):
|
2014-05-30 01:48:02 -04:00
|
|
|
if not self.show:
|
|
|
|
return self # can't convert with out a show object
|
2014-05-03 05:23:26 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if self.is_air_by_date or self.is_sports: # scene numbering does not apply to air-by-date or sports shows
|
2014-05-28 17:13:29 -04:00
|
|
|
return self
|
|
|
|
|
2014-05-01 07:36:16 -04:00
|
|
|
new_episode_numbers = []
|
|
|
|
new_season_numbers = []
|
2014-05-26 02:29:22 -04:00
|
|
|
new_absolute_numbers = []
|
|
|
|
|
2014-05-30 01:48:02 -04:00
|
|
|
if self.show.is_anime and len(self.ab_episode_numbers):
|
2014-07-03 12:15:03 -04:00
|
|
|
scene_season = scene_exceptions.get_scene_exception_by_name(self.series_name)[1]
|
2014-05-30 01:48:02 -04:00
|
|
|
for epAbsNo in self.ab_episode_numbers:
|
2014-07-06 09:11:04 -04:00
|
|
|
ab = scene_numbering.get_indexer_absolute_numbering(self.show.indexerid, self.show.indexer, epAbsNo,
|
|
|
|
True, scene_season)
|
2014-06-30 12:21:21 -04:00
|
|
|
if ab:
|
|
|
|
try:
|
|
|
|
(s, e) = helpers.get_all_episodes_from_absolute_number(self.show, None, [ab])
|
|
|
|
except exceptions.EpisodeNotFoundByAbsoluteNumberException:
|
|
|
|
logger.log(str(self.show.indexerid) + ": Indexer object absolute number " + str(
|
|
|
|
ab) + " is incomplete, skipping this episode")
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
new_absolute_numbers.append(ab)
|
|
|
|
new_episode_numbers.extend(e)
|
|
|
|
new_season_numbers.append(s)
|
2014-05-30 01:48:02 -04:00
|
|
|
|
2014-05-31 09:47:27 -04:00
|
|
|
elif self.season_number and len(self.episode_numbers):
|
2014-05-28 17:13:29 -04:00
|
|
|
for epNo in self.episode_numbers:
|
2014-05-30 01:48:02 -04:00
|
|
|
(s, e) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer,
|
|
|
|
self.season_number,
|
|
|
|
epNo)
|
2014-06-01 01:39:24 -04:00
|
|
|
if self.show.is_anime:
|
|
|
|
a = helpers.get_absolute_number_from_season_and_episode(self.show, s, e)
|
|
|
|
if a:
|
|
|
|
new_absolute_numbers.append(a)
|
|
|
|
|
2014-05-28 17:13:29 -04:00
|
|
|
new_episode_numbers.append(e)
|
|
|
|
new_season_numbers.append(s)
|
2014-05-01 07:36:16 -04:00
|
|
|
|
2014-05-31 06:35:11 -04:00
|
|
|
# need to do a quick sanity check heregex. It's possible that we now have episodes
|
2014-05-01 07:36:16 -04:00
|
|
|
# from more than one season (by tvdb numbering), and this is just too much
|
|
|
|
# for sickbeard, so we'd need to flag it.
|
|
|
|
new_season_numbers = list(set(new_season_numbers)) # remove duplicates
|
|
|
|
if len(new_season_numbers) > 1:
|
|
|
|
raise InvalidNameException("Scene numbering results episodes from "
|
|
|
|
"seasons %s, (i.e. more than one) and "
|
2014-05-23 08:37:22 -04:00
|
|
|
"sickrage does not support this. "
|
2014-05-01 07:36:16 -04:00
|
|
|
"Sorry." % (str(new_season_numbers)))
|
|
|
|
|
|
|
|
# I guess it's possible that we'd have duplicate episodes too, so lets
|
|
|
|
# eliminate them
|
|
|
|
new_episode_numbers = list(set(new_episode_numbers))
|
|
|
|
new_episode_numbers.sort()
|
|
|
|
|
2014-05-30 01:48:02 -04:00
|
|
|
# maybe even duplicate absolute numbers so why not do them as well
|
2014-05-26 02:29:22 -04:00
|
|
|
new_absolute_numbers = list(set(new_absolute_numbers))
|
|
|
|
new_absolute_numbers.sort()
|
|
|
|
|
2014-05-30 01:48:02 -04:00
|
|
|
if len(new_absolute_numbers):
|
|
|
|
self.ab_episode_numbers = new_absolute_numbers
|
|
|
|
|
|
|
|
if len(new_season_numbers) and len(new_episode_numbers):
|
|
|
|
self.episode_numbers = new_episode_numbers
|
|
|
|
self.season_number = new_season_numbers[0]
|
2014-04-30 08:10:13 -04:00
|
|
|
|
2014-06-22 14:33:58 -04:00
|
|
|
logger.log(u"Converted parsed result " + self.original_name + " into " + str(self).decode('utf-8',
|
|
|
|
'xmlcharrefreplace'),
|
|
|
|
logger.DEBUG)
|
2014-06-16 06:44:33 -04:00
|
|
|
|
2014-04-30 08:21:16 -04:00
|
|
|
return self
|
2014-04-30 08:18:20 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
@property
|
|
|
|
def is_air_by_date(self):
|
2014-04-30 08:10:13 -04:00
|
|
|
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
|
2014-03-10 01:18:05 -04:00
|
|
|
return True
|
|
|
|
return False
|
2014-05-07 03:50:49 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
@property
|
|
|
|
def is_sports(self):
|
|
|
|
if self.season_number == None and len(self.episode_numbers) == 0 and self.sports_air_date:
|
|
|
|
return True
|
2014-05-26 02:29:22 -04:00
|
|
|
return False
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
@property
|
|
|
|
def is_anime(self):
|
|
|
|
if len(self.ab_episode_numbers):
|
2014-04-28 05:15:29 -04:00
|
|
|
return True
|
|
|
|
return False
|
2014-05-07 03:50:49 -04:00
|
|
|
|
2014-04-28 05:15:29 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
class NameParserCache:
|
|
|
|
def __init__(self):
|
2014-07-15 14:40:40 -04:00
|
|
|
self.db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'name_parser_cache.db')
|
2014-07-14 22:00:53 -04:00
|
|
|
self.npc_cache_size = 200
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-05-01 18:53:37 -04:00
|
|
|
def add(self, name, parse_result):
|
2014-07-15 14:51:40 -04:00
|
|
|
if not isinstance(name, unicode):
|
|
|
|
name = unicode(name, 'utf-8', 'replace')
|
2014-03-25 01:57:24 -04:00
|
|
|
|
2014-07-15 14:40:40 -04:00
|
|
|
try:
|
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as npc:
|
2014-07-15 01:53:32 -04:00
|
|
|
npc[str(name)] = parse_result
|
|
|
|
|
|
|
|
while len(npc.items()) > self.npc_cache_size:
|
|
|
|
del npc.keys()[0]
|
2014-07-15 14:40:40 -04:00
|
|
|
except:
|
|
|
|
os.remove(self.db_name)
|
|
|
|
try:
|
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as npc:
|
|
|
|
npc[str(name)] = parse_result
|
|
|
|
|
|
|
|
while len(npc.items()) > self.npc_cache_size:
|
|
|
|
del npc.keys()[0]
|
|
|
|
except Exception as e:
|
|
|
|
logger.log(u"NameParser cache error: " + ex(e), logger.ERROR)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
def get(self, name):
|
2014-07-15 14:51:40 -04:00
|
|
|
if not isinstance(name, unicode):
|
|
|
|
name = unicode(name, 'utf-8', 'replace')
|
2014-07-15 01:53:32 -04:00
|
|
|
|
|
|
|
try:
|
2014-07-15 14:40:40 -04:00
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as npc:
|
2014-07-15 01:53:32 -04:00
|
|
|
parse_result = npc.get(str(name), None)
|
2014-07-15 14:40:40 -04:00
|
|
|
except:
|
|
|
|
os.remove(self.db_name)
|
|
|
|
try:
|
|
|
|
with closing(Shove('sqlite:///' + self.db_name, compress=True)) as npc:
|
|
|
|
parse_result = npc.get(str(name), None)
|
|
|
|
except Exception as e:
|
|
|
|
logger.log(u"NameParser cache error: " + ex(e), logger.ERROR)
|
|
|
|
parse_result = None
|
2014-07-06 09:11:04 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if parse_result:
|
|
|
|
logger.log("Using cached parse result for: " + name, logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
return parse_result
|
2014-07-06 09:11:04 -04:00
|
|
|
|
2014-07-15 14:40:40 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
class InvalidNameException(Exception):
|
2014-07-14 22:00:53 -04:00
|
|
|
"The given release name is not valid"
|
2014-07-06 09:11:04 -04:00
|
|
|
|
|
|
|
|
|
|
|
class InvalidShowException(Exception):
|
2014-07-14 22:00:53 -04:00
|
|
|
"The given show name is not valid"
|