mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-13 19:42:20 -05:00
Merge branch 'origin/dev'
This commit is contained in:
commit
0b5c9e9c38
@ -269,10 +269,8 @@
|
|||||||
</div>
|
</div>
|
||||||
#end for
|
#end for
|
||||||
|
|
||||||
<div class="clearfix" style="clear:left;"></div><br/>
|
<div class="clearfix" style="clear:left;"></div><br/>
|
||||||
|
<input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
|
||||||
<input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
|
|
||||||
|
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</div><!-- /component-group2 //-->
|
</div><!-- /component-group2 //-->
|
||||||
|
|
||||||
@ -292,7 +290,7 @@
|
|||||||
<select id="name_presets">
|
<select id="name_presets">
|
||||||
#set is_custom = True
|
#set is_custom = True
|
||||||
#for $cur_preset in $naming.name_presets:
|
#for $cur_preset in $naming.name_presets:
|
||||||
#set $tmp = $naming.test_name($cur_preset)
|
#set $tmp = $naming.test_name($cur_preset, anime_type=3)
|
||||||
#if $cur_preset == $sickbeard.NAMING_PATTERN:
|
#if $cur_preset == $sickbeard.NAMING_PATTERN:
|
||||||
#set is_custom = False
|
#set is_custom = False
|
||||||
#end if
|
#end if
|
||||||
@ -452,7 +450,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="naming_example_div">
|
<div id="naming_example_div">
|
||||||
<h2>Sample:</h2>
|
<h2>Single-EP Sample:</h2>
|
||||||
<div class="example">
|
<div class="example">
|
||||||
<span class="jumbo" id="naming_example"> </span>
|
<span class="jumbo" id="naming_example"> </span>
|
||||||
</div>
|
</div>
|
||||||
@ -467,8 +465,24 @@
|
|||||||
<br/>
|
<br/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="naming_example_anime_div">
|
||||||
|
<h2>Single-EP Anime Sample:</h2>
|
||||||
|
<div class="example">
|
||||||
|
<span class="jumbo" id="naming_example_anime"> </span>
|
||||||
|
</div>
|
||||||
|
<br/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="naming_example_multi_anime_div">
|
||||||
|
<h2>Multi-EP Anime sample:</h2>
|
||||||
|
<div class="example">
|
||||||
|
<span class="jumbo" id="naming_example_multi_anime"> </span>
|
||||||
|
</div>
|
||||||
|
<br/>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="field-pair">
|
<div class="field-pair">
|
||||||
<input type="radio" name="naming_anime" id="naming_anime" value="1" #if $sickbeard.NAMING_ANIME == 1then "checked=\"checked\"" else ""#/>
|
<input type="radio" name="naming_anime" id="naming_anime" value="1" #if $sickbeard.NAMING_ANIME == 1 then "checked=\"checked\"" else ""#/>
|
||||||
<label class="clearfix" for="naming_anime">
|
<label class="clearfix" for="naming_anime">
|
||||||
<span class="component-title">Add Absolute Number</span>
|
<span class="component-title">Add Absolute Number</span>
|
||||||
<span class="component-desc">Add the absolute number to the season/episode format?</span>
|
<span class="component-desc">Add the absolute number to the season/episode format?</span>
|
||||||
|
@ -10,11 +10,11 @@
|
|||||||
#set $myDB = $db.DBConnection()
|
#set $myDB = $db.DBConnection()
|
||||||
#set $today = str($datetime.date.today().toordinal())
|
#set $today = str($datetime.date.today().toordinal())
|
||||||
#set $numShows = len($sickbeard.showList)
|
#set $numShows = len($sickbeard.showList)
|
||||||
#set $numGoodShows = len([x for x in $sickbeard.showList if x.paused == 0 and x.status != "Ended"])
|
#set $numGoodShows = len([x for x in $sickbeard.showList if x.paused == 0 and "Ended" not in x.status])
|
||||||
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
||||||
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
||||||
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
||||||
<b>Daily Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
|
<b>Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
|
||||||
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
||||||
</div>
|
</div>
|
||||||
<ul style="float:right;">
|
<ul style="float:right;">
|
||||||
|
@ -29,8 +29,9 @@ $(document).ready(function () {
|
|||||||
function fill_examples() {
|
function fill_examples() {
|
||||||
var pattern = $('#naming_pattern').val();
|
var pattern = $('#naming_pattern').val();
|
||||||
var multi = $('#naming_multi_ep :selected').val();
|
var multi = $('#naming_multi_ep :selected').val();
|
||||||
|
var anime_type = $('input[name="naming_anime"]:checked').val();
|
||||||
|
|
||||||
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern},
|
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern, anime_type: 3},
|
||||||
function (data) {
|
function (data) {
|
||||||
if (data) {
|
if (data) {
|
||||||
$('#naming_example').text(data + '.ext');
|
$('#naming_example').text(data + '.ext');
|
||||||
@ -40,7 +41,7 @@ $(document).ready(function () {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern, multi: multi},
|
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern, multi: multi, anime_type: 3},
|
||||||
function (data) {
|
function (data) {
|
||||||
if (data) {
|
if (data) {
|
||||||
$('#naming_example_multi').text(data + '.ext');
|
$('#naming_example_multi').text(data + '.ext');
|
||||||
@ -50,7 +51,27 @@ $(document).ready(function () {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$.get(sbRoot + '/config/postProcessing/isNamingValid', {pattern: pattern, multi: multi},
|
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern, anime_type: anime_type},
|
||||||
|
function (data) {
|
||||||
|
if (data) {
|
||||||
|
$('#naming_example_anime').text(data + '.ext');
|
||||||
|
$('#naming_example_anime_div').show();
|
||||||
|
} else {
|
||||||
|
$('#naming_example_anime_div').hide();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$.get(sbRoot + '/config/postProcessing/testNaming', {pattern: pattern, multi: multi, anime_type: anime_type},
|
||||||
|
function (data) {
|
||||||
|
if (data) {
|
||||||
|
$('#naming_example_multi_anime').text(data + '.ext');
|
||||||
|
$('#naming_example_multi_anime_div').show();
|
||||||
|
} else {
|
||||||
|
$('#naming_example_multi_anime_div').hide();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$.get(sbRoot + '/config/postProcessing/isNamingValid', {pattern: pattern, multi: multi, anime_type: anime_type},
|
||||||
function (data) {
|
function (data) {
|
||||||
if (data == "invalid") {
|
if (data == "invalid") {
|
||||||
$('#naming_pattern').qtip('option', {
|
$('#naming_pattern').qtip('option', {
|
||||||
@ -221,6 +242,10 @@ $(document).ready(function () {
|
|||||||
setup_sports_naming();
|
setup_sports_naming();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$('input[name="naming_anime"]').click(function(){
|
||||||
|
setup_naming();
|
||||||
|
});
|
||||||
|
|
||||||
$('#naming_multi_ep').change(fill_examples);
|
$('#naming_multi_ep').change(fill_examples);
|
||||||
$('#naming_pattern').focusout(fill_examples);
|
$('#naming_pattern').focusout(fill_examples);
|
||||||
$('#naming_pattern').keyup(function () {
|
$('#naming_pattern').keyup(function () {
|
||||||
|
@ -29,7 +29,7 @@
|
|||||||
|
|
||||||
[imdbpy]
|
[imdbpy]
|
||||||
## Default.
|
## Default.
|
||||||
accessSystem = http
|
accessSystem = httpThin
|
||||||
|
|
||||||
## Optional (options common to every data access system):
|
## Optional (options common to every data access system):
|
||||||
# Activate adult searches (on, by default).
|
# Activate adult searches (on, by default).
|
||||||
@ -69,7 +69,7 @@ accessSystem = http
|
|||||||
## Set the threshold for logging messages.
|
## Set the threshold for logging messages.
|
||||||
# Can be one of "debug", "info", "warning", "error", "critical" (default:
|
# Can be one of "debug", "info", "warning", "error", "critical" (default:
|
||||||
# "warning").
|
# "warning").
|
||||||
#loggingLevel = debug
|
loggingLevel = debug
|
||||||
|
|
||||||
## Path to a configuration file for the logging facility;
|
## Path to a configuration file for the logging facility;
|
||||||
# see: http://docs.python.org/library/logging.html#configuring-logging
|
# see: http://docs.python.org/library/logging.html#configuring-logging
|
||||||
|
@ -598,11 +598,11 @@ class Tvdb:
|
|||||||
zipdata = StringIO.StringIO()
|
zipdata = StringIO.StringIO()
|
||||||
zipdata.write(resp.content)
|
zipdata.write(resp.content)
|
||||||
myzipfile = zipfile.ZipFile(zipdata)
|
myzipfile = zipfile.ZipFile(zipdata)
|
||||||
return xmltodict.parse(myzipfile.read('%s.xml' % language).strip(), postprocessor=process)
|
return xmltodict.parse(myzipfile.read('%s.xml' % language), postprocessor=process)
|
||||||
except zipfile.BadZipfile:
|
except zipfile.BadZipfile:
|
||||||
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
raise tvdb_error("Bad zip file received from thetvdb.com, could not read it")
|
||||||
else:
|
else:
|
||||||
return xmltodict.parse(resp.content.strip(), postprocessor=process)
|
return xmltodict.parse(resp.content.strip().encode('utf-8'), postprocessor=process)
|
||||||
|
|
||||||
def _getetsrc(self, url, params=None, language=None):
|
def _getetsrc(self, url, params=None, language=None):
|
||||||
"""Loads a URL using caching, returns an ElementTree of the source
|
"""Loads a URL using caching, returns an ElementTree of the source
|
||||||
|
@ -462,7 +462,7 @@ class TVRage:
|
|||||||
return (key, value)
|
return (key, value)
|
||||||
|
|
||||||
if resp.ok:
|
if resp.ok:
|
||||||
return xmltodict.parse(resp.content.strip(), postprocessor=remap_keys)
|
return xmltodict.parse(resp.content.strip().encode('utf-8'), postprocessor=remap_keys)
|
||||||
|
|
||||||
def _getetsrc(self, url, params=None):
|
def _getetsrc(self, url, params=None):
|
||||||
"""Loads a URL using caching, returns an ElementTree of the source
|
"""Loads a URL using caching, returns an ElementTree of the source
|
||||||
@ -527,6 +527,7 @@ class TVRage:
|
|||||||
if not isinstance(data, dict or list):
|
if not isinstance(data, dict or list):
|
||||||
data = data.replace(u"&", u"&")
|
data = data.replace(u"&", u"&")
|
||||||
data = data.strip()
|
data = data.strip()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def search(self, series):
|
def search(self, series):
|
||||||
@ -597,7 +598,7 @@ class TVRage:
|
|||||||
self.config['params_epInfo']['sid'] = sid
|
self.config['params_epInfo']['sid'] = sid
|
||||||
epsEt = self._getetsrc(self.config['url_epInfo'], self.config['params_epInfo'])
|
epsEt = self._getetsrc(self.config['url_epInfo'], self.config['params_epInfo'])
|
||||||
|
|
||||||
for season in epsEt['episodelist']['season']:
|
for season in epsEt['episodelist'].values():
|
||||||
episodes = season['episode']
|
episodes = season['episode']
|
||||||
if not isinstance(episodes, list):
|
if not isinstance(episodes, list):
|
||||||
episodes = [episodes]
|
episodes = [episodes]
|
||||||
|
2
setup.py
2
setup.py
@ -179,7 +179,7 @@ for curFile in auto_process_files:
|
|||||||
setup(
|
setup(
|
||||||
options = {'py2exe': {'bundle_files': 1}},
|
options = {'py2exe': {'bundle_files': 1}},
|
||||||
zipfile = None,
|
zipfile = None,
|
||||||
console = ['updater.py'],
|
console = ['updater.py'], requires=['Cheetah']
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'test' in oldArgs:
|
if 'test' in oldArgs:
|
||||||
|
@ -597,13 +597,12 @@ def initialize(consoleLogging=True):
|
|||||||
NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
|
NAMING_PATTERN = check_setting_str(CFG, 'General', 'naming_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
|
||||||
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%Y/%0M/%SN - %A.D - %EN')
|
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%Y/%0M/%SN - %A.D - %EN')
|
||||||
NAMING_CUSTOM_ABD = check_setting_int(CFG, 'General', 'naming_custom_abd', 0)
|
NAMING_CUSTOM_ABD = check_setting_int(CFG, 'General', 'naming_custom_abd', 0)
|
||||||
NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern',
|
NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern', '%Y/%0M/%SN - %A.D - %EN')
|
||||||
'Season %0S/%SN - S%0SE%0E - %EN')
|
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
|
||||||
NAMING_CUSTOM_SPORTS = check_setting_int(CFG, 'General', 'naming_custom_sports', 0)
|
NAMING_CUSTOM_SPORTS = check_setting_int(CFG, 'General', 'naming_custom_sports', 0)
|
||||||
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
|
NAMING_MULTI_EP = check_setting_int(CFG, 'General', 'naming_multi_ep', 1)
|
||||||
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||||
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
|
NAMING_STRIP_YEAR = bool(check_setting_int(CFG, 'General', 'naming_strip_year', 0))
|
||||||
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
|
|
||||||
|
|
||||||
USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 0))
|
USE_NZBS = bool(check_setting_int(CFG, 'General', 'use_nzbs', 0))
|
||||||
USE_TORRENTS = bool(check_setting_int(CFG, 'General', 'use_torrents', 1))
|
USE_TORRENTS = bool(check_setting_int(CFG, 'General', 'use_torrents', 1))
|
||||||
|
@ -163,9 +163,9 @@ class AllShowsListUI:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if 'seriesname' in curShow:
|
if 'seriesname' in curShow:
|
||||||
seriesnames.append(str(curShow['seriesname']))
|
seriesnames.append(curShow['seriesname'])
|
||||||
if 'aliasnames' in curShow:
|
if 'aliasnames' in curShow:
|
||||||
seriesnames.extend(str(curShow['aliasnames']).split('|'))
|
seriesnames.extend(curShow['aliasnames'].split('|'))
|
||||||
|
|
||||||
for name in seriesnames:
|
for name in seriesnames:
|
||||||
if searchterm.lower() in name.lower():
|
if searchterm.lower() in name.lower():
|
||||||
|
@ -29,7 +29,7 @@ class rTorrentAPI(GenericClient):
|
|||||||
super(rTorrentAPI, self).__init__('rTorrent', host, username, password)
|
super(rTorrentAPI, self).__init__('rTorrent', host, username, password)
|
||||||
|
|
||||||
def _get_auth(self):
|
def _get_auth(self):
|
||||||
auth = None
|
self.auth = None
|
||||||
|
|
||||||
if self.auth is not None:
|
if self.auth is not None:
|
||||||
return self.auth
|
return self.auth
|
||||||
|
@ -437,7 +437,7 @@ class ConfigMigrator():
|
|||||||
else:
|
else:
|
||||||
logger.log(u"Proceeding with upgrade")
|
logger.log(u"Proceeding with upgrade")
|
||||||
|
|
||||||
# do the migration, expect a method named _migrate_v<num>
|
# do the migration, expect a method named _migrate_v<num>
|
||||||
logger.log(u"Migrating config up to version " + str(next_version) + migration_name)
|
logger.log(u"Migrating config up to version " + str(next_version) + migration_name)
|
||||||
getattr(self, '_migrate_v' + str(next_version))()
|
getattr(self, '_migrate_v' + str(next_version))()
|
||||||
self.config_version = next_version
|
self.config_version = next_version
|
||||||
|
@ -50,7 +50,7 @@ class DBConnection:
|
|||||||
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
def __init__(self, filename="sickbeard.db", suffix=None, row_type=None):
|
||||||
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.connection = sqlite3.connect(dbFilename(filename), 20)
|
self.connection = sqlite3.connect(dbFilename(filename, suffix), 20)
|
||||||
if row_type == "dict":
|
if row_type == "dict":
|
||||||
self.connection.row_factory = self._dict_factory
|
self.connection.row_factory = self._dict_factory
|
||||||
else:
|
else:
|
||||||
|
@ -23,7 +23,7 @@ import threading
|
|||||||
import regexes
|
import regexes
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
from sickbeard import logger, helpers, scene_numbering
|
from sickbeard import logger, helpers, scene_numbering, common
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
|
||||||
nameparser_lock = threading.Lock()
|
nameparser_lock = threading.Lock()
|
||||||
@ -136,11 +136,9 @@ class NameParser(object):
|
|||||||
|
|
||||||
if 'season_num' in named_groups:
|
if 'season_num' in named_groups:
|
||||||
tmp_season = int(match.group('season_num'))
|
tmp_season = int(match.group('season_num'))
|
||||||
if cur_regex_name == 'bare' and tmp_season in (19, 20):
|
if not (cur_regex_name == 'bare' and tmp_season in (19, 20)):
|
||||||
continue
|
result.season_number = tmp_season
|
||||||
|
result.score += 1
|
||||||
result.season_number = tmp_season
|
|
||||||
result.score += 1
|
|
||||||
|
|
||||||
if 'ep_num' in named_groups:
|
if 'ep_num' in named_groups:
|
||||||
ep_num = self._convert_number(match.group('ep_num'))
|
ep_num = self._convert_number(match.group('ep_num'))
|
||||||
@ -198,12 +196,10 @@ class NameParser(object):
|
|||||||
tmp_extra_info = match.group('extra_info')
|
tmp_extra_info = match.group('extra_info')
|
||||||
|
|
||||||
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
# Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season
|
||||||
if tmp_extra_info and cur_regex_name == 'season_only' and re.search(
|
if not (tmp_extra_info and cur_regex_name == 'season_only' and re.search(
|
||||||
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I):
|
r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I)):
|
||||||
continue
|
result.extra_info = tmp_extra_info
|
||||||
|
result.score += 1
|
||||||
result.extra_info = tmp_extra_info
|
|
||||||
result.score += 1
|
|
||||||
|
|
||||||
if 'release_group' in named_groups:
|
if 'release_group' in named_groups:
|
||||||
result.release_group = match.group('release_group')
|
result.release_group = match.group('release_group')
|
||||||
@ -211,6 +207,14 @@ class NameParser(object):
|
|||||||
|
|
||||||
cur_show = helpers.get_show_by_name(result.series_name, useIndexer=self.useIndexers)
|
cur_show = helpers.get_show_by_name(result.series_name, useIndexer=self.useIndexers)
|
||||||
if not cur_show:
|
if not cur_show:
|
||||||
|
if self.showObj:
|
||||||
|
if self.showObj.air_by_date and result.air_date:
|
||||||
|
result.score += 1
|
||||||
|
elif self.showObj.sports and result.sports_event_date:
|
||||||
|
result.score += 1
|
||||||
|
elif self.showObj.anime and len(result.ab_episode_numbers):
|
||||||
|
result.score += 1
|
||||||
|
|
||||||
matches.append(result)
|
matches.append(result)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -231,6 +235,10 @@ class NameParser(object):
|
|||||||
if len(matches):
|
if len(matches):
|
||||||
result = max(matches, key=lambda x: x.score)
|
result = max(matches, key=lambda x: x.score)
|
||||||
|
|
||||||
|
# get quality
|
||||||
|
if result.show:
|
||||||
|
result.quality = common.Quality.nameQuality(name, bool(result.show and result.show.is_anime))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _combine_results(self, first, second, attr):
|
def _combine_results(self, first, second, attr):
|
||||||
@ -352,6 +360,7 @@ class NameParser(object):
|
|||||||
final_result.which_regex += dir_name_result.which_regex
|
final_result.which_regex += dir_name_result.which_regex
|
||||||
|
|
||||||
final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
|
final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
|
||||||
|
final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')
|
||||||
|
|
||||||
# if there's no useful info in it then raise an exception
|
# if there's no useful info in it then raise an exception
|
||||||
if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.series_name:
|
if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.series_name:
|
||||||
@ -377,7 +386,8 @@ class ParseResult(object):
|
|||||||
air_date=None,
|
air_date=None,
|
||||||
ab_episode_numbers=None,
|
ab_episode_numbers=None,
|
||||||
show=None,
|
show=None,
|
||||||
score=None
|
score=None,
|
||||||
|
quality=None
|
||||||
):
|
):
|
||||||
|
|
||||||
self.original_name = original_name
|
self.original_name = original_name
|
||||||
@ -394,6 +404,11 @@ class ParseResult(object):
|
|||||||
else:
|
else:
|
||||||
self.ab_episode_numbers = ab_episode_numbers
|
self.ab_episode_numbers = ab_episode_numbers
|
||||||
|
|
||||||
|
if not quality:
|
||||||
|
self.quality = common.Quality.UNKNOWN
|
||||||
|
else:
|
||||||
|
self.quality = quality
|
||||||
|
|
||||||
self.extra_info = extra_info
|
self.extra_info = extra_info
|
||||||
self.release_group = release_group
|
self.release_group = release_group
|
||||||
|
|
||||||
@ -435,6 +450,8 @@ class ParseResult(object):
|
|||||||
return False
|
return False
|
||||||
if self.score != other.score:
|
if self.score != other.score:
|
||||||
return False
|
return False
|
||||||
|
if self.quality != other.quality:
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ normal_regexes = {'normal':[
|
|||||||
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||||
e(?P<ep_num>\d+) # E02 and separator
|
e(?P<ep_num>\d+) # E02 and separator
|
||||||
(([. _-]*e|-) # linking e/- char
|
(([. _-]*e|-) # linking e/- char
|
||||||
(?P<extra_ep_num>(?!(1080|720)[pi])\d+))* # additional E03/etc
|
(?P<extra_ep_num>(?!(1080|720|480)[pi])\d+))* # additional E03/etc
|
||||||
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||||
@ -76,7 +76,7 @@ normal_regexes = {'normal':[
|
|||||||
(?P<ep_num>\d+) # 02 and separator
|
(?P<ep_num>\d+) # 02 and separator
|
||||||
(([. _-]*x|-) # linking x/- char
|
(([. _-]*x|-) # linking x/- char
|
||||||
(?P<extra_ep_num>
|
(?P<extra_ep_num>
|
||||||
(?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
|
(?!(1080|720|480)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
|
||||||
\d+))* # additional x03/etc
|
\d+))* # additional x03/etc
|
||||||
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
|
||||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||||
@ -136,7 +136,7 @@ normal_regexes = {'normal':[
|
|||||||
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
|
||||||
(?P<ep_num>(\d+|[ivx]+)) # first ep num
|
(?P<ep_num>(\d+|[ivx]+)) # first ep num
|
||||||
((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner
|
((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner
|
||||||
(?P<extra_ep_num>(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num
|
(?P<extra_ep_num>(?!(1080|720|480)[pi])(\d+|[ivx]+))[. _-]) # second ep num
|
||||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||||
-(?P<release_group>[^- ]+))?)?$ # Group
|
-(?P<release_group>[^- ]+))?)?$ # Group
|
||||||
@ -153,7 +153,7 @@ normal_regexes = {'normal':[
|
|||||||
(?P<ep_num>(\d+|([ivx]+(?=[. _-])))) # first ep num
|
(?P<ep_num>(\d+|([ivx]+(?=[. _-])))) # first ep num
|
||||||
([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner
|
([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner
|
||||||
((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part
|
((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part
|
||||||
(?P<extra_ep_num>(?!(1080|720)[pi])
|
(?P<extra_ep_num>(?!(1080|720|480)[pi])
|
||||||
(\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num
|
(\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num
|
||||||
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
|
||||||
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
((?<![. _-])(?<!WEB) # Make sure this is really the release group
|
||||||
@ -200,6 +200,20 @@ sports_regexs = {'sports':[
|
|||||||
-(?P<release_group>[^- ]+))?)?$
|
-(?P<release_group>[^- ]+))?)?$
|
||||||
'''
|
'''
|
||||||
),
|
),
|
||||||
|
|
||||||
|
('sports_bare',
|
||||||
|
# Sports.Name.2010.11.23.Source.Quality.Etc-Group
|
||||||
|
# Sports.Name.23rd.Nov.2010.Source.Quality.Etc-Group
|
||||||
|
'''
|
||||||
|
^(?P<series_name>.+?)[. _-]+
|
||||||
|
((?P<sports_event_id>\d{3})[. _-]+)?
|
||||||
|
((?P<sports_event_name>\.+)[. _-]+)?
|
||||||
|
(?P<sports_event_date>(\d{4}[. _-]+\d{1,2}[. _-]+\d{1,2})|(\d{1,2}\w{2}[. _-]+\w+[. _-]+\d{4}))
|
||||||
|
[. _-]*((?P<extra_info>.+?)((?<![. _-])(?<!WEB)
|
||||||
|
-(?P<release_group>[^- ]+))?)?$
|
||||||
|
'''
|
||||||
|
),
|
||||||
|
|
||||||
]}
|
]}
|
||||||
|
|
||||||
anime_regexes = {'anime':[
|
anime_regexes = {'anime':[
|
||||||
@ -208,8 +222,8 @@ anime_regexes = {'anime':[
|
|||||||
"""
|
"""
|
||||||
^(?:\[(?P<release_group>.+?)\][ ._-]*)
|
^(?:\[(?P<release_group>.+?)\][ ._-]*)
|
||||||
(?P<series_name>.+?)[ ._-]+
|
(?P<series_name>.+?)[ ._-]+
|
||||||
(?P<ep_ab_num>\d{1,3})
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+)
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))?[ ._-]+?
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))?[ ._-]+?
|
||||||
(?:v(?P<version>[0-9]))?
|
(?:v(?P<version>[0-9]))?
|
||||||
(?:[\w\.]*)
|
(?:[\w\.]*)
|
||||||
(?:(?:(?:[\[\(])(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)(?:[\]\)]))|(?:\d{3,4}[xp]))
|
(?:(?:(?:[\[\(])(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)(?:[\]\)]))|(?:\d{3,4}[xp]))
|
||||||
@ -227,8 +241,8 @@ anime_regexes = {'anime':[
|
|||||||
'''
|
'''
|
||||||
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
||||||
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
||||||
(?P<ep_ab_num>\d{1,3}) # E01
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # E01
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # E02
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # E02
|
||||||
(v(?P<version>[0-9]))? # version
|
(v(?P<version>[0-9]))? # version
|
||||||
[ ._-]+\[(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\] # Source_Quality_Etc-
|
[ ._-]+\[(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\] # Source_Quality_Etc-
|
||||||
(\[(?P<crc>\w{8})\])? # CRC
|
(\[(?P<crc>\w{8})\])? # CRC
|
||||||
@ -242,8 +256,8 @@ anime_regexes = {'anime':[
|
|||||||
'''
|
'''
|
||||||
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
||||||
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
||||||
(?P<ep_ab_num>\d{1,3}) # E01
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # E01
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # E02
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # E02
|
||||||
(v(?P<version>[0-9]))? # version
|
(v(?P<version>[0-9]))? # version
|
||||||
[ ._-]+\((?P<extra_info>(CX[ ._-]?)?\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\) # Source_Quality_Etc-
|
[ ._-]+\((?P<extra_info>(CX[ ._-]?)?\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\) # Source_Quality_Etc-
|
||||||
(\[(?P<crc>\w{8})\])? # CRC
|
(\[(?P<crc>\w{8})\])? # CRC
|
||||||
@ -255,8 +269,8 @@ anime_regexes = {'anime':[
|
|||||||
'''
|
'''
|
||||||
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
||||||
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
||||||
(?P<ep_ab_num>\d{1,3}) # E01
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # E01
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # E02
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # E02
|
||||||
(v(?P<version>[0-9]))? # version
|
(v(?P<version>[0-9]))? # version
|
||||||
[ ._-]+\[(?P<extra_info>\d{3,4}p) # Source_Quality_Etc-
|
[ ._-]+\[(?P<extra_info>\d{3,4}p) # Source_Quality_Etc-
|
||||||
(\[(?P<crc>\w{8})\])? # CRC
|
(\[(?P<crc>\w{8})\])? # CRC
|
||||||
@ -271,8 +285,8 @@ anime_regexes = {'anime':[
|
|||||||
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator
|
||||||
(?P<series_name>.+?)[ ._]* # Show_Name and separator
|
(?P<series_name>.+?)[ ._]* # Show_Name and separator
|
||||||
([ ._-]+-[ ._-]+[A-Z]+[ ._-]+)?[ ._-]+ # funny stuff, this is sooo nuts ! this will kick me in the butt one day
|
([ ._-]+-[ ._-]+[A-Z]+[ ._-]+)?[ ._-]+ # funny stuff, this is sooo nuts ! this will kick me in the butt one day
|
||||||
(?P<ep_ab_num>\d{1,3}) # E01
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # E01
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # E02
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # E02
|
||||||
(v(?P<version>[0-9]))? # version
|
(v(?P<version>[0-9]))? # version
|
||||||
([ ._-](\[\w{1,2}\])?\[[a-z][.]?\w{2,4}\])? #codec
|
([ ._-](\[\w{1,2}\])?\[[a-z][.]?\w{2,4}\])? #codec
|
||||||
[ ._-]*\[(?P<extra_info>(\d{3,4}[xp]?\d{0,4})?[\.\w\s-]*)\] # Source_Quality_Etc-
|
[ ._-]*\[(?P<extra_info>(\d{3,4}[xp]?\d{0,4})?[\.\w\s-]*)\] # Source_Quality_Etc-
|
||||||
@ -301,13 +315,14 @@ anime_regexes = {'anime':[
|
|||||||
(([. _-]*e|-) # linking e/- char
|
(([. _-]*e|-) # linking e/- char
|
||||||
(?P<extra_ep_num>\d+))* # additional E03/etc
|
(?P<extra_ep_num>\d+))* # additional E03/etc
|
||||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # absolute number
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # "-" as separator and anditional absolute number, all optinal
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # "-" as separator and anditional absolute number, all optinal
|
||||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||||
.*?
|
.*?
|
||||||
'''
|
'''
|
||||||
|
|
||||||
),
|
),
|
||||||
|
|
||||||
('anime_and_normal_x',
|
('anime_and_normal_x',
|
||||||
# Bleach - s16e03-04 - 313-314
|
# Bleach - s16e03-04 - 313-314
|
||||||
# Bleach.s16e03-04.313-314
|
# Bleach.s16e03-04.313-314
|
||||||
@ -319,8 +334,8 @@ anime_regexes = {'anime':[
|
|||||||
(([. _-]*e|-) # linking e/- char
|
(([. _-]*e|-) # linking e/- char
|
||||||
(?P<extra_ep_num>\d+))* # additional E03/etc
|
(?P<extra_ep_num>\d+))* # additional E03/etc
|
||||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # absolute number
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # "-" as separator and anditional absolute number, all optinal
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # "-" as separator and anditional absolute number, all optinal
|
||||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||||
.*?
|
.*?
|
||||||
'''
|
'''
|
||||||
@ -331,8 +346,8 @@ anime_regexes = {'anime':[
|
|||||||
# Bleach - 313-314 - s16e03-04
|
# Bleach - 313-314 - s16e03-04
|
||||||
'''
|
'''
|
||||||
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator
|
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator
|
||||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # absolute number
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # "-" as separator and anditional absolute number, all optinal
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # "-" as separator and anditional absolute number, all optinal
|
||||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||||
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||||
@ -346,8 +361,8 @@ anime_regexes = {'anime':[
|
|||||||
('anime_and_normal_front',
|
('anime_and_normal_front',
|
||||||
# 165.Naruto Shippuuden.s08e014
|
# 165.Naruto Shippuuden.s08e014
|
||||||
'''
|
'''
|
||||||
^(?P<ep_ab_num>\d{1,3}) # start of string and absolute number
|
^(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # start of string and absolute number
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))? # "-" as separator and anditional absolute number, all optinal
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # "-" as separator and anditional absolute number, all optinal
|
||||||
(v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2"
|
(v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2"
|
||||||
(?P<series_name>.+?)[ ._-]+
|
(?P<series_name>.+?)[ ._-]+
|
||||||
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||||
@ -357,18 +372,19 @@ anime_regexes = {'anime':[
|
|||||||
.*?
|
.*?
|
||||||
'''
|
'''
|
||||||
),
|
),
|
||||||
|
|
||||||
('anime_ep_name',
|
('anime_ep_name',
|
||||||
"""
|
'''
|
||||||
^(?:\[(?P<release_group>.+?)\][ ._-]*)
|
^(?:\[(?P<release_group>.+?)\][ ._-]*)
|
||||||
(?P<series_name>.+?)[ ._-]+
|
(?P<series_name>.+?)[ ._-]+
|
||||||
(?P<ep_ab_num>\d{1,3})
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+)
|
||||||
(-(?P<extra_ab_ep_num>\d{1,3}))?[ ._-]*?
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))?[ ._-]*?
|
||||||
(?:v(?P<version>[0-9])[ ._-]+?)?
|
(?:v(?P<version>[0-9])[ ._-]+?)?
|
||||||
(?:.+?[ ._-]+?)?
|
(?:.+?[ ._-]+?)?
|
||||||
\[(?P<extra_info>\w+)\][ ._-]?
|
\[(?P<extra_info>\w+)\][ ._-]?
|
||||||
(?:\[(?P<crc>\w{8})\])?
|
(?:\[(?P<crc>\w{8})\])?
|
||||||
.*?
|
.*?
|
||||||
"""
|
'''
|
||||||
),
|
),
|
||||||
|
|
||||||
('anime_bare',
|
('anime_bare',
|
||||||
@ -377,8 +393,8 @@ anime_regexes = {'anime':[
|
|||||||
'''
|
'''
|
||||||
^(\[(?P<release_group>.+?)\][ ._-]*)?
|
^(\[(?P<release_group>.+?)\][ ._-]*)?
|
||||||
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
(?P<series_name>.+?)[ ._-]+ # Show_Name and separator
|
||||||
(?P<ep_ab_num>\d{3}) # E01
|
(?P<ep_ab_num>(?!(1080|720|480)[pi])\d+) # E01
|
||||||
(-(?P<extra_ab_ep_num>\d{3}))? # E02
|
(-(?P<extra_ab_ep_num>(?!(1080|720|480)[pi])\d+))? # E02
|
||||||
(v(?P<version>[0-9]))? # v2
|
(v(?P<version>[0-9]))? # v2
|
||||||
.*? # Separator and EOL
|
.*? # Separator and EOL
|
||||||
''')
|
''')
|
||||||
|
@ -40,11 +40,9 @@ name_abd_presets = ('%SN - %A-D - %EN',
|
|||||||
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
||||||
)
|
)
|
||||||
|
|
||||||
name_sports_presets = ('%SN - %Sx%0E - %EN',
|
name_sports_presets = ('%SN - %A-D - %EN',
|
||||||
'%S.N.S%0SE%0E.%E.N',
|
'%S.N.%A.D.%E.N.%Q.N',
|
||||||
'%Sx%0E - %EN',
|
'%Y/%0M/%S.N.%A.D.%E.N-%RG'
|
||||||
'S%0SE%0E - %EN',
|
|
||||||
'Season %0S/%S.N.S%0SE%0E.%Q.N-%RG'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
class TVShow():
|
class TVShow():
|
||||||
@ -98,7 +96,7 @@ class TVEpisode(tv.TVEpisode):
|
|||||||
self._is_proper = True
|
self._is_proper = True
|
||||||
|
|
||||||
|
|
||||||
def check_force_season_folders(pattern=None, multi=None):
|
def check_force_season_folders(pattern=None, multi=None, anime_type=None):
|
||||||
"""
|
"""
|
||||||
Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
|
Checks if the name can still be parsed if you strip off the folders to determine if we need to force season folders
|
||||||
to be enabled or not.
|
to be enabled or not.
|
||||||
@ -108,15 +106,18 @@ def check_force_season_folders(pattern=None, multi=None):
|
|||||||
if pattern == None:
|
if pattern == None:
|
||||||
pattern = sickbeard.NAMING_PATTERN
|
pattern = sickbeard.NAMING_PATTERN
|
||||||
|
|
||||||
valid = not validate_name(pattern, None, file_only=True)
|
if anime_type == None:
|
||||||
|
anime_type = sickbeard.NAMING_ANIME
|
||||||
|
|
||||||
|
valid = not validate_name(pattern, None, anime_type, file_only=True)
|
||||||
|
|
||||||
if multi != None:
|
if multi != None:
|
||||||
valid = valid or not validate_name(pattern, multi, file_only=True)
|
valid = valid or not validate_name(pattern, multi, anime_type, file_only=True)
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
|
|
||||||
def check_valid_naming(pattern=None, multi=None):
|
def check_valid_naming(pattern=None, multi=None, anime_type=None):
|
||||||
"""
|
"""
|
||||||
Checks if the name is can be parsed back to its original form for both single and multi episodes.
|
Checks if the name is can be parsed back to its original form for both single and multi episodes.
|
||||||
|
|
||||||
@ -125,12 +126,15 @@ def check_valid_naming(pattern=None, multi=None):
|
|||||||
if pattern == None:
|
if pattern == None:
|
||||||
pattern = sickbeard.NAMING_PATTERN
|
pattern = sickbeard.NAMING_PATTERN
|
||||||
|
|
||||||
|
if anime_type == None:
|
||||||
|
anime_type = sickbeard.NAMING_ANIME
|
||||||
|
|
||||||
logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG)
|
logger.log(u"Checking whether the pattern " + pattern + " is valid for a single episode", logger.DEBUG)
|
||||||
valid = validate_name(pattern, None)
|
valid = validate_name(pattern, None, anime_type)
|
||||||
|
|
||||||
if multi != None:
|
if multi != None:
|
||||||
logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG)
|
logger.log(u"Checking whether the pattern " + pattern + " is valid for a multi episode", logger.DEBUG)
|
||||||
valid = valid and validate_name(pattern, multi)
|
valid = valid and validate_name(pattern, multi, anime_type)
|
||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
@ -163,10 +167,10 @@ def check_valid_sports_naming(pattern=None):
|
|||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False):
|
def validate_name(pattern, multi=None, anime_type=None, file_only=False, abd=False, sports=False):
|
||||||
ep = generate_sample_ep(multi, abd, sports)
|
ep = generate_sample_ep(multi, abd, sports, anime_type)
|
||||||
|
|
||||||
new_name = ep.formatted_filename(pattern, multi) + '.ext'
|
new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext'
|
||||||
new_path = ep.formatted_dir(pattern, multi)
|
new_path = ep.formatted_dir(pattern, multi)
|
||||||
if not file_only:
|
if not file_only:
|
||||||
new_name = ek.ek(os.path.join, new_path, new_name)
|
new_name = ek.ek(os.path.join, new_path, new_name)
|
||||||
@ -177,7 +181,7 @@ def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False)
|
|||||||
|
|
||||||
logger.log(u"Trying to parse " + new_name, logger.DEBUG)
|
logger.log(u"Trying to parse " + new_name, logger.DEBUG)
|
||||||
|
|
||||||
parser = NameParser(True)
|
parser = NameParser(True, showObj=ep.show)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = parser.parse(new_name)
|
result = parser.parse(new_name)
|
||||||
@ -191,18 +195,26 @@ def validate_name(pattern, multi=None, file_only=False, abd=False, sports=False)
|
|||||||
if result.air_date != ep.airdate:
|
if result.air_date != ep.airdate:
|
||||||
logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
logger.log(u"Air date incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
elif sports:
|
||||||
|
if result.sports_event_date != ep.airdate:
|
||||||
|
logger.log(u"Sports event date incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
||||||
|
return False
|
||||||
|
elif anime_type != 3:
|
||||||
|
if len(result.ab_episode_numbers) and result.ab_episode_numbers != [x.absolute_number for x in [ep] + ep.relatedEps]:
|
||||||
|
logger.log(u"Absolute numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
||||||
|
return False
|
||||||
else:
|
else:
|
||||||
if result.season_number != ep.season:
|
if result.season_number != ep.season:
|
||||||
logger.log(u"Season incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
logger.log(u"Season number incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
|
if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
|
||||||
logger.log(u"Episode incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
logger.log(u"Episode numbering incorrect in parsed episode, pattern isn't valid", logger.DEBUG)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def generate_sample_ep(multi=None, abd=False, sports=False, anime=False):
|
def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None):
|
||||||
# make a fake episode object
|
# make a fake episode object
|
||||||
ep = TVEpisode(2, 3, 3, "Ep Name")
|
ep = TVEpisode(2, 3, 3, "Ep Name")
|
||||||
|
|
||||||
@ -215,31 +227,44 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime=False):
|
|||||||
elif sports:
|
elif sports:
|
||||||
ep._release_name = 'Show.Name.100.Fighter.vs.Fighter.HDTV.XviD-RLSGROUP'
|
ep._release_name = 'Show.Name.100.Fighter.vs.Fighter.HDTV.XviD-RLSGROUP'
|
||||||
ep.show.sports = 1
|
ep.show.sports = 1
|
||||||
elif anime:
|
|
||||||
ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
|
||||||
ep.show.anime = 1
|
|
||||||
else:
|
else:
|
||||||
ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
if anime_type != 3:
|
||||||
|
ep.show.anime = 1
|
||||||
|
ep._release_name = 'Show.Name.003.HDTV.XviD-RLSGROUP'
|
||||||
|
else:
|
||||||
|
ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
||||||
|
|
||||||
if multi != None:
|
if multi != None:
|
||||||
ep._name = "Ep Name (1)"
|
ep._name = "Ep Name (1)"
|
||||||
ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
|
|
||||||
|
|
||||||
secondEp = TVEpisode(2, 4, 4, "Ep Name (2)")
|
if anime_type != 3:
|
||||||
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
ep.show.anime = 1
|
||||||
secondEp._release_name = ep._release_name
|
|
||||||
|
|
||||||
thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)")
|
ep._release_name = 'Show.Name.003-004.HDTV.XviD-RLSGROUP'
|
||||||
thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
|
||||||
thirdEp._release_name = ep._release_name
|
|
||||||
|
|
||||||
ep.relatedEps.append(secondEp)
|
secondEp = TVEpisode(2, 4, 4, "Ep Name (2)")
|
||||||
ep.relatedEps.append(thirdEp)
|
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
|
secondEp._release_name = ep._release_name
|
||||||
|
|
||||||
|
ep.relatedEps.append(secondEp)
|
||||||
|
else:
|
||||||
|
ep._release_name = 'Show.Name.S02E03E04E05.HDTV.XviD-RLSGROUP'
|
||||||
|
|
||||||
|
secondEp = TVEpisode(2, 4, 4, "Ep Name (2)")
|
||||||
|
secondEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
|
secondEp._release_name = ep._release_name
|
||||||
|
|
||||||
|
thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)")
|
||||||
|
thirdEp._status = Quality.compositeStatus(DOWNLOADED, Quality.HDTV)
|
||||||
|
thirdEp._release_name = ep._release_name
|
||||||
|
|
||||||
|
ep.relatedEps.append(secondEp)
|
||||||
|
ep.relatedEps.append(thirdEp)
|
||||||
|
|
||||||
return ep
|
return ep
|
||||||
|
|
||||||
|
|
||||||
def test_name(pattern, multi=None, abd=False, sports=False, anime=False):
|
def test_name(pattern, multi=None, abd=False, sports=False, anime_type=None):
|
||||||
ep = generate_sample_ep(multi, abd, sports, anime)
|
ep = generate_sample_ep(multi, abd, sports, anime_type)
|
||||||
|
|
||||||
return {'name': ep.formatted_filename(pattern, multi), 'dir': ep.formatted_dir(pattern, multi)}
|
return {'name': ep.formatted_filename(pattern, multi, anime_type), 'dir': ep.formatted_dir(pattern, multi)}
|
@ -498,7 +498,7 @@ class PostProcessor(object):
|
|||||||
season = parse_result.season_number
|
season = parse_result.season_number
|
||||||
episodes = parse_result.episode_numbers
|
episodes = parse_result.episode_numbers
|
||||||
|
|
||||||
to_return = (parse_result.show, season, episodes, None)
|
to_return = (parse_result.show, season, episodes, parse_result.quality)
|
||||||
|
|
||||||
self._finalize(parse_result)
|
self._finalize(parse_result)
|
||||||
return to_return
|
return to_return
|
||||||
|
@ -284,7 +284,7 @@ class GenericProvider:
|
|||||||
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
quality = self.getQuality(item, parse_result.is_anime)
|
quality = parse_result.quality
|
||||||
|
|
||||||
if not (self.show.air_by_date or self.show.sports):
|
if not (self.show.air_by_date or self.show.sports):
|
||||||
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
||||||
|
@ -99,8 +99,7 @@ def get_scene_absolute_numbering(indexer_id, indexer, absolute_number, fallback_
|
|||||||
(so the return values will always be set)
|
(so the return values will always be set)
|
||||||
|
|
||||||
@param indexer_id: int
|
@param indexer_id: int
|
||||||
@param season: int
|
@param absolute_number: int
|
||||||
@param episode: int
|
|
||||||
@param fallback_to_xem: bool If set (the default), check xem for matches if there is no local scene numbering
|
@param fallback_to_xem: bool If set (the default), check xem for matches if there is no local scene numbering
|
||||||
@return: (int, int) a tuple with (season, episode)
|
@return: (int, int) a tuple with (season, episode)
|
||||||
"""
|
"""
|
||||||
@ -258,9 +257,8 @@ def find_xem_absolute_numbering(indexer_id, indexer, absolute_number):
|
|||||||
Refreshes/Loads as needed.
|
Refreshes/Loads as needed.
|
||||||
|
|
||||||
@param indexer_id: int
|
@param indexer_id: int
|
||||||
@param season: int
|
@param absolute_number: int
|
||||||
@param episode: int
|
@return: int
|
||||||
@return: (int, int) a tuple of scene_season, scene_episode, or None if there is no special mapping.
|
|
||||||
"""
|
"""
|
||||||
if indexer_id is None or absolute_number is None:
|
if indexer_id is None or absolute_number is None:
|
||||||
return absolute_number
|
return absolute_number
|
||||||
@ -313,9 +311,8 @@ def get_indexer_absolute_numbering_for_xem(indexer_id, indexer, sceneAbsoluteNum
|
|||||||
Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering
|
Reverse of find_xem_numbering: lookup a tvdb season and episode using scene numbering
|
||||||
|
|
||||||
@param indexer_id: int
|
@param indexer_id: int
|
||||||
@param sceneSeason: int
|
@param sceneAbsoluteNumber: int
|
||||||
@param sceneEpisode: int
|
@return: int
|
||||||
@return: (int, int) a tuple of (season, episode)
|
|
||||||
"""
|
"""
|
||||||
if indexer_id is None or sceneAbsoluteNumber is None:
|
if indexer_id is None or sceneAbsoluteNumber is None:
|
||||||
return sceneAbsoluteNumber
|
return sceneAbsoluteNumber
|
||||||
|
106
sickbeard/tv.py
106
sickbeard/tv.py
@ -52,6 +52,7 @@ from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVE
|
|||||||
from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \
|
from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \
|
||||||
NAMING_LIMITED_EXTEND_E_PREFIXED
|
NAMING_LIMITED_EXTEND_E_PREFIXED
|
||||||
|
|
||||||
|
|
||||||
class TVShow(object):
|
class TVShow(object):
|
||||||
def __init__(self, indexer, indexerid, lang=""):
|
def __init__(self, indexer, indexerid, lang=""):
|
||||||
|
|
||||||
@ -200,7 +201,7 @@ class TVShow(object):
|
|||||||
ep = None
|
ep = None
|
||||||
|
|
||||||
# if we get an anime get the real season and episode
|
# if we get an anime get the real season and episode
|
||||||
if self.is_anime and not self.is_scene and absolute_number and not season and not episode:
|
if self.is_anime and absolute_number and not season and not episode:
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sql = "SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0"
|
sql = "SELECT * FROM tv_episodes WHERE showid = ? and absolute_number = ? and season != 0"
|
||||||
sqlResults = myDB.select(sql, [self.indexerid, absolute_number])
|
sqlResults = myDB.select(sql, [self.indexerid, absolute_number])
|
||||||
@ -238,13 +239,13 @@ class TVShow(object):
|
|||||||
|
|
||||||
# get scene absolute numbering
|
# get scene absolute numbering
|
||||||
ep.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(self.indexerid,
|
ep.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(self.indexerid,
|
||||||
self.indexer,
|
self.indexer,
|
||||||
ep.absolute_number)
|
ep.absolute_number)
|
||||||
|
|
||||||
# get scene season and episode numbering
|
# get scene season and episode numbering
|
||||||
ep.scene_season, ep.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.indexerid,
|
ep.scene_season, ep.scene_episode = sickbeard.scene_numbering.get_scene_numbering(self.indexerid,
|
||||||
self.indexer,
|
self.indexer,
|
||||||
season, episode)
|
season, episode)
|
||||||
|
|
||||||
if ep != None:
|
if ep != None:
|
||||||
self.episodes[season][episode] = ep
|
self.episodes[season][episode] = ep
|
||||||
@ -891,23 +892,13 @@ class TVShow(object):
|
|||||||
imdb_info[key] = imdbTv.get(key.replace('_', ' '))
|
imdb_info[key] = imdbTv.get(key.replace('_', ' '))
|
||||||
|
|
||||||
# Filter only the value
|
# Filter only the value
|
||||||
if imdb_info['runtimes']:
|
imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0) or self.runtime
|
||||||
imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0)
|
imdb_info['akas'] = '|'.join(imdb_info['akas']) or ''
|
||||||
else:
|
|
||||||
imdb_info['runtimes'] = self.runtime
|
|
||||||
|
|
||||||
if imdb_info['akas']:
|
# Join all genres in a string
|
||||||
imdb_info['akas'] = '|'.join(imdb_info['akas'])
|
imdb_info['genres'] = '|'.join(imdb_info['genres']) or ''
|
||||||
else:
|
|
||||||
imdb_info['akas'] = ''
|
|
||||||
|
|
||||||
# Join all genres in a string
|
# Get only the production country certificate if any
|
||||||
if imdb_info['genres']:
|
|
||||||
imdb_info['genres'] = '|'.join(imdb_info['genres'])
|
|
||||||
else:
|
|
||||||
imdb_info['genres'] = ''
|
|
||||||
|
|
||||||
# Get only the production country certificate if any
|
|
||||||
if imdb_info['certificates'] and imdb_info['countries']:
|
if imdb_info['certificates'] and imdb_info['countries']:
|
||||||
dct = {}
|
dct = {}
|
||||||
try:
|
try:
|
||||||
@ -921,11 +912,7 @@ class TVShow(object):
|
|||||||
else:
|
else:
|
||||||
imdb_info['certificates'] = ''
|
imdb_info['certificates'] = ''
|
||||||
|
|
||||||
if imdb_info['country_codes']:
|
imdb_info['country_codes'] = '|'.join(imdb_info['country_codes']) or ''
|
||||||
imdb_info['country_codes'] = '|'.join(imdb_info['country_codes'])
|
|
||||||
else:
|
|
||||||
imdb_info['country_codes'] = ''
|
|
||||||
|
|
||||||
imdb_info['last_update'] = datetime.date.today().toordinal()
|
imdb_info['last_update'] = datetime.date.today().toordinal()
|
||||||
|
|
||||||
# Rename dict keys without spaces for DB upsert
|
# Rename dict keys without spaces for DB upsert
|
||||||
@ -1513,13 +1500,13 @@ class TVEpisode(object):
|
|||||||
|
|
||||||
# does one now a better way to test for NULL in the db field ?
|
# does one now a better way to test for NULL in the db field ?
|
||||||
if sqlResults[0]["scene_season"]:
|
if sqlResults[0]["scene_season"]:
|
||||||
self.scene_season = int(sqlResults[0]["scene_season"])
|
self.scene_season = int(sqlResults[0]["scene_season"] or 0)
|
||||||
|
|
||||||
if sqlResults[0]["scene_episode"]:
|
if sqlResults[0]["scene_episode"]:
|
||||||
self.scene_episode = int(sqlResults[0]["scene_episode"])
|
self.scene_episode = int(sqlResults[0]["scene_episode"] or 0)
|
||||||
|
|
||||||
if sqlResults[0]["scene_absolute_number"]:
|
if sqlResults[0]["scene_absolute_number"]:
|
||||||
self.scene_absolute_number = int(sqlResults[0]["scene_absolute_number"])
|
self.scene_absolute_number = int(sqlResults[0]["scene_absolute_number"] or 0)
|
||||||
|
|
||||||
if sqlResults[0]["release_name"] is not None:
|
if sqlResults[0]["release_name"] is not None:
|
||||||
self.release_name = sqlResults[0]["release_name"]
|
self.release_name = sqlResults[0]["release_name"]
|
||||||
@ -1855,18 +1842,21 @@ class TVEpisode(object):
|
|||||||
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
|
"location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
|
||||||
"absolute_number = ? WHERE episode_id = ?",
|
"absolute_number = ? WHERE episode_id = ?",
|
||||||
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
[self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
|
||||||
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn,
|
self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
|
||||||
self.status, self.location, self.file_size,self.release_name, self.is_proper, self.show.indexerid,
|
self.hastbn,
|
||||||
|
self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
|
||||||
self.season, self.episode, self.absolute_number, epID]]
|
self.season, self.episode, self.absolute_number, epID]]
|
||||||
else:
|
else:
|
||||||
# use a custom insert method to get the data into the DB.
|
# use a custom insert method to get the data into the DB.
|
||||||
return [
|
return [
|
||||||
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
"INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, release_name, is_proper, showid, season, episode, absolute_number) VALUES "
|
||||||
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
"((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?),?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
|
||||||
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description,
|
[self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name,
|
||||||
|
self.description,
|
||||||
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
",".join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
|
||||||
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
|
||||||
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number]]
|
self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode,
|
||||||
|
self.absolute_number]]
|
||||||
|
|
||||||
def saveToDB(self, forceSave=False):
|
def saveToDB(self, forceSave=False):
|
||||||
"""
|
"""
|
||||||
@ -2054,7 +2044,7 @@ class TVEpisode(object):
|
|||||||
'%XE': str(self.scene_episode),
|
'%XE': str(self.scene_episode),
|
||||||
'%0XE': '%02d' % self.scene_episode,
|
'%0XE': '%02d' % self.scene_episode,
|
||||||
'%AB': '%(#)03d' % {'#': self.absolute_number},
|
'%AB': '%(#)03d' % {'#': self.absolute_number},
|
||||||
'%XA': '%(#)03d' % {'#': self.scene_absolute_number},
|
'%XAB': '%(#)03d' % {'#': self.scene_absolute_number},
|
||||||
'%RN': release_name(self.release_name),
|
'%RN': release_name(self.release_name),
|
||||||
'%RG': release_group(self.release_name),
|
'%RG': release_group(self.release_name),
|
||||||
'%AD': str(self.airdate).replace('-', ' '),
|
'%AD': str(self.airdate).replace('-', ' '),
|
||||||
@ -2084,7 +2074,7 @@ class TVEpisode(object):
|
|||||||
|
|
||||||
return result_name
|
return result_name
|
||||||
|
|
||||||
def _format_pattern(self, pattern=None, multi=None):
|
def _format_pattern(self, pattern=None, multi=None, anime_type=None):
|
||||||
"""
|
"""
|
||||||
Manipulates an episode naming pattern and then fills the template in
|
Manipulates an episode naming pattern and then fills the template in
|
||||||
"""
|
"""
|
||||||
@ -2095,6 +2085,9 @@ class TVEpisode(object):
|
|||||||
if multi == None:
|
if multi == None:
|
||||||
multi = sickbeard.NAMING_MULTI_EP
|
multi = sickbeard.NAMING_MULTI_EP
|
||||||
|
|
||||||
|
if anime_type == None:
|
||||||
|
anime_type = sickbeard.NAMING_ANIME
|
||||||
|
|
||||||
replace_map = self._replace_map()
|
replace_map = self._replace_map()
|
||||||
|
|
||||||
result_name = pattern
|
result_name = pattern
|
||||||
@ -2104,9 +2097,9 @@ class TVEpisode(object):
|
|||||||
if self.show.air_by_date or self.show.sports:
|
if self.show.air_by_date or self.show.sports:
|
||||||
result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKRAGE')
|
result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKRAGE')
|
||||||
result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickrage')
|
result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickrage')
|
||||||
elif self.show.is_anime:
|
elif self.show.anime:
|
||||||
result_name = result_name.replace('%RN', '%S.N.%AN.%E.N-SiCKRAGE')
|
result_name = result_name.replace('%RN', '%S.N.%AB.%E.N-SiCKRAGE')
|
||||||
result_name = result_name.replace('%rn', '%s.n.%an.%e.n-sickrage')
|
result_name = result_name.replace('%rn', '%s.n.%ab.%e.n-sickrage')
|
||||||
else:
|
else:
|
||||||
result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKRAGE')
|
result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKRAGE')
|
||||||
result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickrage')
|
result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickrage')
|
||||||
@ -2195,16 +2188,39 @@ class TVEpisode(object):
|
|||||||
|
|
||||||
ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map())
|
ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map())
|
||||||
|
|
||||||
if season_ep_match:
|
if self.show.anime and anime_type != 3:
|
||||||
|
if self.absolute_number == 0:
|
||||||
|
curAbsolute_number = self.episode
|
||||||
|
else:
|
||||||
|
curAbsolute_number = self.absolute_number
|
||||||
|
|
||||||
|
if self.season != 0: # dont set absolute numbers if we are on specials !
|
||||||
|
if anime_type == 1: # this crazy person wants both ! (note: +=)
|
||||||
|
ep_string += sep + "%(#)03d" % {
|
||||||
|
"#": curAbsolute_number}
|
||||||
|
elif anime_type == 2: # total anime freak only need the absolute number ! (note: =)
|
||||||
|
ep_string = "%(#)03d" % {"#": curAbsolute_number}
|
||||||
|
|
||||||
|
for relEp in self.relatedEps:
|
||||||
|
if relEp.absolute_number != 0:
|
||||||
|
ep_string += '-' + "%(#)03d" % {"#": relEp.absolute_number}
|
||||||
|
else:
|
||||||
|
ep_string += '-' + "%(#)03d" % {"#": relEp.episode}
|
||||||
|
|
||||||
|
regex_replacement = None
|
||||||
|
if anime_type == 2:
|
||||||
|
regex_replacement = r'\g<pre_sep>' + ep_string + r'\g<post_sep>'
|
||||||
|
elif season_ep_match:
|
||||||
regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>'
|
regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>'
|
||||||
elif ep_only_match:
|
elif ep_only_match:
|
||||||
regex_replacement = ep_string
|
regex_replacement = ep_string
|
||||||
|
|
||||||
# fill out the template for this piece and then insert this piece into the actual pattern
|
if regex_replacement:
|
||||||
cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group)
|
# fill out the template for this piece and then insert this piece into the actual pattern
|
||||||
# cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
|
cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group)
|
||||||
# logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
|
# cur_name_group_result = cur_name_group.replace(ep_format, ep_string)
|
||||||
result_name = result_name.replace(cur_name_group, cur_name_group_result)
|
# logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG)
|
||||||
|
result_name = result_name.replace(cur_name_group, cur_name_group_result)
|
||||||
|
|
||||||
result_name = self._format_string(result_name, replace_map)
|
result_name = self._format_string(result_name, replace_map)
|
||||||
|
|
||||||
@ -2251,7 +2267,7 @@ class TVEpisode(object):
|
|||||||
else:
|
else:
|
||||||
return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
|
return self._format_pattern(os.sep.join(name_groups[:-1]), multi)
|
||||||
|
|
||||||
def formatted_filename(self, pattern=None, multi=None):
|
def formatted_filename(self, pattern=None, multi=None, anime_type=None):
|
||||||
"""
|
"""
|
||||||
Just the filename of the episode, formatted based on the naming settings
|
Just the filename of the episode, formatted based on the naming settings
|
||||||
"""
|
"""
|
||||||
@ -2268,7 +2284,7 @@ class TVEpisode(object):
|
|||||||
# split off the dirs only, if they exist
|
# split off the dirs only, if they exist
|
||||||
name_groups = re.split(r'[\\/]', pattern)
|
name_groups = re.split(r'[\\/]', pattern)
|
||||||
|
|
||||||
return self._format_pattern(name_groups[-1], multi)
|
return self._format_pattern(name_groups[-1], multi, anime_type)
|
||||||
|
|
||||||
def rename(self):
|
def rename(self):
|
||||||
"""
|
"""
|
||||||
|
@ -28,6 +28,7 @@ import datetime
|
|||||||
import random
|
import random
|
||||||
|
|
||||||
from Cheetah.Template import Template
|
from Cheetah.Template import Template
|
||||||
|
from cherrypy.lib.static import serve_fileobj
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import cherrypy.lib
|
import cherrypy.lib
|
||||||
import cherrypy.lib.cptools
|
import cherrypy.lib.cptools
|
||||||
@ -82,8 +83,10 @@ def _handle_reverse_proxy():
|
|||||||
if sickbeard.HANDLE_REVERSE_PROXY:
|
if sickbeard.HANDLE_REVERSE_PROXY:
|
||||||
cherrypy.lib.cptools.proxy()
|
cherrypy.lib.cptools.proxy()
|
||||||
|
|
||||||
|
|
||||||
cherrypy.tools.handle_reverse_proxy = cherrypy.Tool('before_handler', _handle_reverse_proxy)
|
cherrypy.tools.handle_reverse_proxy = cherrypy.Tool('before_handler', _handle_reverse_proxy)
|
||||||
|
|
||||||
|
|
||||||
class PageTemplate(Template):
|
class PageTemplate(Template):
|
||||||
def __init__(self, *args, **KWs):
|
def __init__(self, *args, **KWs):
|
||||||
KWs['file'] = os.path.join(sickbeard.PROG_DIR, "gui/" + sickbeard.GUI_NAME + "/interfaces/default/",
|
KWs['file'] = os.path.join(sickbeard.PROG_DIR, "gui/" + sickbeard.GUI_NAME + "/interfaces/default/",
|
||||||
@ -205,7 +208,7 @@ class ManageSearches:
|
|||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
t = PageTemplate(file="manage_manageSearches.tmpl")
|
t = PageTemplate(file="manage_manageSearches.tmpl")
|
||||||
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
# t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
||||||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
||||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
||||||
t.dailySearchStatus = sickbeard.dailySearchScheduler.action.amActive # @UndefinedVariable
|
t.dailySearchStatus = sickbeard.dailySearchScheduler.action.amActive # @UndefinedVariable
|
||||||
@ -638,7 +641,8 @@ class Manage:
|
|||||||
return _munge(t)
|
return _munge(t)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def massEditSubmit(self, paused=None, anime=None, scene=None, flatten_folders=None, quality_preset=False, subtitles=None,
|
def massEditSubmit(self, paused=None, anime=None, scene=None, flatten_folders=None, quality_preset=False,
|
||||||
|
subtitles=None,
|
||||||
anyQualities=[], bestQualities=[], toEdit=None, *args, **kwargs):
|
anyQualities=[], bestQualities=[], toEdit=None, *args, **kwargs):
|
||||||
|
|
||||||
dir_map = {}
|
dir_map = {}
|
||||||
@ -884,7 +888,7 @@ class History:
|
|||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
|
|
||||||
#sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage))
|
# sqlResults = myDB.select("SELECT h.*, show_name, name FROM history h, tv_shows s, tv_episodes e WHERE h.showid=s.indexer_id AND h.showid=e.showid AND h.season=e.season AND h.episode=e.episode ORDER BY date DESC LIMIT "+str(numPerPage*(p-1))+", "+str(numPerPage))
|
||||||
if limit == "0":
|
if limit == "0":
|
||||||
sqlResults = myDB.select(
|
sqlResults = myDB.select(
|
||||||
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC")
|
"SELECT h.*, show_name FROM history h, tv_shows s WHERE h.showid=s.indexer_id ORDER BY date DESC")
|
||||||
@ -926,9 +930,9 @@ class History:
|
|||||||
else:
|
else:
|
||||||
index = [i for i, dict in enumerate(compact) \
|
index = [i for i, dict in enumerate(compact) \
|
||||||
if dict['show_id'] == sql_result['showid'] \
|
if dict['show_id'] == sql_result['showid'] \
|
||||||
and dict['season'] == sql_result['season'] \
|
and dict['season'] == sql_result['season'] \
|
||||||
and dict['episode'] == sql_result['episode']
|
and dict['episode'] == sql_result['episode']
|
||||||
and dict['quality'] == sql_result['quality']][0]
|
and dict['quality'] == sql_result['quality']][0]
|
||||||
|
|
||||||
action = {}
|
action = {}
|
||||||
history = compact[index]
|
history = compact[index]
|
||||||
@ -1267,7 +1271,6 @@ class ConfigPostProcessing:
|
|||||||
sickbeard.NAMING_CUSTOM_ABD = config.checkbox_to_value(naming_custom_abd)
|
sickbeard.NAMING_CUSTOM_ABD = config.checkbox_to_value(naming_custom_abd)
|
||||||
sickbeard.NAMING_CUSTOM_SPORTS = config.checkbox_to_value(naming_custom_sports)
|
sickbeard.NAMING_CUSTOM_SPORTS = config.checkbox_to_value(naming_custom_sports)
|
||||||
sickbeard.NAMING_STRIP_YEAR = config.checkbox_to_value(naming_strip_year)
|
sickbeard.NAMING_STRIP_YEAR = config.checkbox_to_value(naming_strip_year)
|
||||||
sickbeard.NAMING_ANIME = config.checkbox_to_value(naming_anime)
|
|
||||||
sickbeard.USE_FAILED_DOWNLOADS = config.checkbox_to_value(use_failed_downloads)
|
sickbeard.USE_FAILED_DOWNLOADS = config.checkbox_to_value(use_failed_downloads)
|
||||||
sickbeard.DELETE_FAILED = config.checkbox_to_value(delete_failed)
|
sickbeard.DELETE_FAILED = config.checkbox_to_value(delete_failed)
|
||||||
sickbeard.SKIP_REMOVED_FILES = config.checkbox_to_value(skip_removed_files)
|
sickbeard.SKIP_REMOVED_FILES = config.checkbox_to_value(skip_removed_files)
|
||||||
@ -1288,12 +1291,16 @@ class ConfigPostProcessing:
|
|||||||
sickbeard.metadata_provider_dict['TIVO'].set_config(sickbeard.METADATA_TIVO)
|
sickbeard.metadata_provider_dict['TIVO'].set_config(sickbeard.METADATA_TIVO)
|
||||||
sickbeard.metadata_provider_dict['Mede8er'].set_config(sickbeard.METADATA_MEDE8ER)
|
sickbeard.metadata_provider_dict['Mede8er'].set_config(sickbeard.METADATA_MEDE8ER)
|
||||||
|
|
||||||
if self.isNamingValid(naming_pattern, naming_multi_ep) != "invalid":
|
if self.isNamingValid(naming_pattern, naming_multi_ep, anime_type=naming_anime) != "invalid":
|
||||||
sickbeard.NAMING_PATTERN = naming_pattern
|
sickbeard.NAMING_PATTERN = naming_pattern
|
||||||
sickbeard.NAMING_MULTI_EP = int(naming_multi_ep)
|
sickbeard.NAMING_MULTI_EP = int(naming_multi_ep)
|
||||||
|
sickbeard.NAMING_ANIME = int(naming_anime)
|
||||||
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders()
|
||||||
else:
|
else:
|
||||||
results.append("You tried saving an invalid naming config, not saving your naming settings")
|
if int(naming_anime) in [1, 2]:
|
||||||
|
results.append("You tried saving an invalid anime naming config, not saving your naming settings")
|
||||||
|
else:
|
||||||
|
results.append("You tried saving an invalid naming config, not saving your naming settings")
|
||||||
|
|
||||||
if self.isNamingValid(naming_abd_pattern, None, abd=True) != "invalid":
|
if self.isNamingValid(naming_abd_pattern, None, abd=True) != "invalid":
|
||||||
sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern
|
sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern
|
||||||
@ -1320,22 +1327,31 @@ class ConfigPostProcessing:
|
|||||||
redirect("/config/postProcessing/")
|
redirect("/config/postProcessing/")
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def testNaming(self, pattern=None, multi=None, abd=False, sports=False, anime=None):
|
def testNaming(self, pattern=None, multi=None, abd=False, sports=False, anime_type=None):
|
||||||
|
|
||||||
if multi is not None:
|
if multi is not None:
|
||||||
multi = int(multi)
|
multi = int(multi)
|
||||||
|
|
||||||
result = naming.test_name(pattern, multi, abd, sports, anime)
|
if anime_type is not None:
|
||||||
|
anime_type = int(anime_type)
|
||||||
|
|
||||||
|
result = naming.test_name(pattern, multi, abd, sports, anime_type)
|
||||||
|
|
||||||
result = ek.ek(os.path.join, result['dir'], result['name'])
|
result = ek.ek(os.path.join, result['dir'], result['name'])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def isNamingValid(self, pattern=None, multi=None, abd=False, sports=False):
|
def isNamingValid(self, pattern=None, multi=None, abd=False, sports=False, anime_type=None):
|
||||||
if pattern is None:
|
if pattern is None:
|
||||||
return "invalid"
|
return "invalid"
|
||||||
|
|
||||||
|
if multi is not None:
|
||||||
|
multi = int(multi)
|
||||||
|
|
||||||
|
if anime_type is not None:
|
||||||
|
anime_type = int(anime_type)
|
||||||
|
|
||||||
# air by date shows just need one check, we don't need to worry about season folders
|
# air by date shows just need one check, we don't need to worry about season folders
|
||||||
if abd:
|
if abd:
|
||||||
is_valid = naming.check_valid_abd_naming(pattern)
|
is_valid = naming.check_valid_abd_naming(pattern)
|
||||||
@ -1348,10 +1364,10 @@ class ConfigPostProcessing:
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
# check validity of single and multi ep cases for the whole path
|
# check validity of single and multi ep cases for the whole path
|
||||||
is_valid = naming.check_valid_naming(pattern, multi)
|
is_valid = naming.check_valid_naming(pattern, multi, anime_type)
|
||||||
|
|
||||||
# check validity of single and multi ep cases for only the file name
|
# check validity of single and multi ep cases for only the file name
|
||||||
require_season_folders = naming.check_force_season_folders(pattern, multi)
|
require_season_folders = naming.check_force_season_folders(pattern, multi, anime_type)
|
||||||
|
|
||||||
if is_valid and not require_season_folders:
|
if is_valid and not require_season_folders:
|
||||||
return "valid"
|
return "valid"
|
||||||
@ -1584,7 +1600,8 @@ class ConfigProviders:
|
|||||||
curProvider, curEnabled = curProviderStr.split(':')
|
curProvider, curEnabled = curProviderStr.split(':')
|
||||||
curEnabled = config.to_int(curEnabled)
|
curEnabled = config.to_int(curEnabled)
|
||||||
|
|
||||||
curProvObj = [x for x in sickbeard.providers.sortedProviderList() if x.getID() == curProvider and hasattr(x, 'enabled')]
|
curProvObj = [x for x in sickbeard.providers.sortedProviderList() if
|
||||||
|
x.getID() == curProvider and hasattr(x, 'enabled')]
|
||||||
if curProvObj:
|
if curProvObj:
|
||||||
curProvObj[0].enabled = bool(curEnabled)
|
curProvObj[0].enabled = bool(curEnabled)
|
||||||
|
|
||||||
@ -2013,8 +2030,8 @@ class ConfigSubtitles:
|
|||||||
|
|
||||||
redirect("/config/subtitles/")
|
redirect("/config/subtitles/")
|
||||||
|
|
||||||
class ConfigAnime:
|
|
||||||
|
|
||||||
|
class ConfigAnime:
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
|
|
||||||
@ -2023,7 +2040,8 @@ class ConfigAnime:
|
|||||||
return _munge(t)
|
return _munge(t)
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def saveAnime(self, use_anidb=None, anidb_username=None, anidb_password=None, anidb_use_mylist=None, split_home=None):
|
def saveAnime(self, use_anidb=None, anidb_username=None, anidb_password=None, anidb_use_mylist=None,
|
||||||
|
split_home=None):
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
@ -2054,12 +2072,13 @@ class ConfigAnime:
|
|||||||
for x in results:
|
for x in results:
|
||||||
logger.log(x, logger.ERROR)
|
logger.log(x, logger.ERROR)
|
||||||
ui.notifications.error('Error(s) Saving Configuration',
|
ui.notifications.error('Error(s) Saving Configuration',
|
||||||
'<br />\n'.join(results))
|
'<br />\n'.join(results))
|
||||||
else:
|
else:
|
||||||
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE) )
|
ui.notifications.message('Configuration Saved', ek.ek(os.path.join, sickbeard.CONFIG_FILE))
|
||||||
|
|
||||||
redirect("/config/anime/")
|
redirect("/config/anime/")
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def index(self):
|
def index(self):
|
||||||
@ -2082,6 +2101,7 @@ class Config:
|
|||||||
|
|
||||||
anime = ConfigAnime()
|
anime = ConfigAnime()
|
||||||
|
|
||||||
|
|
||||||
def haveXBMC():
|
def haveXBMC():
|
||||||
return sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY
|
return sickbeard.USE_XBMC and sickbeard.XBMC_UPDATE_LIBRARY
|
||||||
|
|
||||||
@ -2179,6 +2199,8 @@ class NewHomeAddShows:
|
|||||||
if not lang or lang == 'null':
|
if not lang or lang == 'null':
|
||||||
lang = "en"
|
lang = "en"
|
||||||
|
|
||||||
|
search_term = search_term.encode('utf-8')
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
final_results = []
|
final_results = []
|
||||||
|
|
||||||
@ -2190,7 +2212,7 @@ class NewHomeAddShows:
|
|||||||
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
||||||
|
|
||||||
logger.log("Searching for Show with searchterm: %s on Indexer: %s" % (
|
logger.log("Searching for Show with searchterm: %s on Indexer: %s" % (
|
||||||
search_term, sickbeard.indexerApi(indexer).name), logger.DEBUG)
|
search_term, sickbeard.indexerApi(indexer).name), logger.DEBUG)
|
||||||
try:
|
try:
|
||||||
# add search results
|
# add search results
|
||||||
results.setdefault(indexer, []).extend(t[search_term])
|
results.setdefault(indexer, []).extend(t[search_term])
|
||||||
@ -2199,8 +2221,7 @@ class NewHomeAddShows:
|
|||||||
|
|
||||||
map(final_results.extend,
|
map(final_results.extend,
|
||||||
([[sickbeard.indexerApi(id).name, id, sickbeard.indexerApi(id).config["show_url"], int(show['id']),
|
([[sickbeard.indexerApi(id).name, id, sickbeard.indexerApi(id).config["show_url"], int(show['id']),
|
||||||
show['seriesname'], show['firstaired']] for show in shows] for id, shows in
|
show['seriesname'], show['firstaired']] for show in shows] for id, shows in results.items()))
|
||||||
results.items()))
|
|
||||||
|
|
||||||
lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang]
|
lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang]
|
||||||
return json.dumps({'results': final_results, 'langid': lang_id})
|
return json.dumps({'results': final_results, 'langid': lang_id})
|
||||||
@ -2440,7 +2461,8 @@ class NewHomeAddShows:
|
|||||||
|
|
||||||
# add the show
|
# add the show
|
||||||
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality,
|
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality,
|
||||||
flatten_folders, subtitles, indexerLang, anime, scene) # @UndefinedVariable
|
flatten_folders, subtitles, indexerLang, anime,
|
||||||
|
scene) # @UndefinedVariable
|
||||||
ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
|
ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
|
||||||
|
|
||||||
return finishAddShow()
|
return finishAddShow()
|
||||||
@ -2530,7 +2552,7 @@ class NewHomeAddShows:
|
|||||||
|
|
||||||
ErrorLogsMenu = [
|
ErrorLogsMenu = [
|
||||||
{'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/'},
|
{'title': 'Clear Errors', 'path': 'errorlogs/clearerrors/'},
|
||||||
#{ 'title': 'View Log', 'path': 'errorlogs/viewlog' },
|
# { 'title': 'View Log', 'path': 'errorlogs/viewlog' },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -2625,7 +2647,6 @@ class Home:
|
|||||||
def index(self):
|
def index(self):
|
||||||
|
|
||||||
t = PageTemplate(file="home.tmpl")
|
t = PageTemplate(file="home.tmpl")
|
||||||
|
|
||||||
if sickbeard.ANIME_SPLIT_HOME:
|
if sickbeard.ANIME_SPLIT_HOME:
|
||||||
shows = []
|
shows = []
|
||||||
anime = []
|
anime = []
|
||||||
@ -2634,10 +2655,10 @@ class Home:
|
|||||||
anime.append(show)
|
anime.append(show)
|
||||||
else:
|
else:
|
||||||
shows.append(show)
|
shows.append(show)
|
||||||
t.showlists = [["Shows",shows],
|
t.showlists = [["Shows", shows],
|
||||||
["Anime",anime]]
|
["Anime", anime]]
|
||||||
else:
|
else:
|
||||||
t.showlists = [["Shows",sickbeard.showList]]
|
t.showlists = [["Shows", sickbeard.showList]]
|
||||||
|
|
||||||
t.submenu = HomeMenu()
|
t.submenu = HomeMenu()
|
||||||
return _munge(t)
|
return _munge(t)
|
||||||
@ -2652,7 +2673,7 @@ class Home:
|
|||||||
|
|
||||||
connection, accesMsg = sab.getSabAccesMethod(host, username, password, apikey)
|
connection, accesMsg = sab.getSabAccesMethod(host, username, password, apikey)
|
||||||
if connection:
|
if connection:
|
||||||
authed, authMsg = sab.testAuthentication(host, username, password, apikey) #@UnusedVariable
|
authed, authMsg = sab.testAuthentication(host, username, password, apikey) # @UnusedVariable
|
||||||
if authed:
|
if authed:
|
||||||
return "Success. Connected and authenticated"
|
return "Success. Connected and authenticated"
|
||||||
else:
|
else:
|
||||||
@ -3071,10 +3092,11 @@ class Home:
|
|||||||
anime.append(show)
|
anime.append(show)
|
||||||
else:
|
else:
|
||||||
shows.append(show)
|
shows.append(show)
|
||||||
t.sortedShowLists = [["Shows",sorted(shows, lambda x, y: cmp(titler(x.name), titler(y.name)))],
|
t.sortedShowLists = [["Shows", sorted(shows, lambda x, y: cmp(titler(x.name), titler(y.name)))],
|
||||||
["Anime",sorted(anime, lambda x, y: cmp(titler(x.name), titler(y.name)))]]
|
["Anime", sorted(anime, lambda x, y: cmp(titler(x.name), titler(y.name)))]]
|
||||||
else:
|
else:
|
||||||
t.sortedShowLists = [["Shows",sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name)))]]
|
t.sortedShowLists = [
|
||||||
|
["Shows", sorted(sickbeard.showList, lambda x, y: cmp(titler(x.name), titler(y.name)))]]
|
||||||
|
|
||||||
t.bwl = BlackAndWhiteList(showObj.indexerid)
|
t.bwl = BlackAndWhiteList(showObj.indexerid)
|
||||||
|
|
||||||
@ -3209,7 +3231,7 @@ class Home:
|
|||||||
if type(exceptions_list) != list:
|
if type(exceptions_list) != list:
|
||||||
exceptions_list = [exceptions_list]
|
exceptions_list = [exceptions_list]
|
||||||
|
|
||||||
#If directCall from mass_edit_update no scene exceptions handling
|
# If directCall from mass_edit_update no scene exceptions handling
|
||||||
if directCall:
|
if directCall:
|
||||||
do_update_exceptions = False
|
do_update_exceptions = False
|
||||||
else:
|
else:
|
||||||
@ -3329,7 +3351,7 @@ class Home:
|
|||||||
except exceptions.CantRefreshException, e:
|
except exceptions.CantRefreshException, e:
|
||||||
errors.append("Unable to refresh this show:" + ex(e))
|
errors.append("Unable to refresh this show:" + ex(e))
|
||||||
# grab updated info from TVDB
|
# grab updated info from TVDB
|
||||||
#showObj.loadEpisodesFromIndexer()
|
# showObj.loadEpisodesFromIndexer()
|
||||||
# rescan the episodes in the new folder
|
# rescan the episodes in the new folder
|
||||||
except exceptions.NoNFOException:
|
except exceptions.NoNFOException:
|
||||||
errors.append(
|
errors.append(
|
||||||
@ -3604,7 +3626,7 @@ class Home:
|
|||||||
return _genericMessage("Error", "Show not in show list")
|
return _genericMessage("Error", "Show not in show list")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
show_loc = showObj.location #@UnusedVariable
|
show_loc = showObj.location # @UnusedVariable
|
||||||
except exceptions.ShowDirNotFoundException:
|
except exceptions.ShowDirNotFoundException:
|
||||||
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
|
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
|
||||||
|
|
||||||
@ -3652,7 +3674,7 @@ class Home:
|
|||||||
return _genericMessage("Error", errMsg)
|
return _genericMessage("Error", errMsg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
show_loc = show_obj.location #@UnusedVariable
|
show_loc = show_obj.location # @UnusedVariable
|
||||||
except exceptions.ShowDirNotFoundException:
|
except exceptions.ShowDirNotFoundException:
|
||||||
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
|
return _genericMessage("Error", "Can't rename episodes when the show dir is missing.")
|
||||||
|
|
||||||
@ -3705,7 +3727,7 @@ class Home:
|
|||||||
|
|
||||||
# return the correct json value
|
# return the correct json value
|
||||||
if ep_queue_item.success:
|
if ep_queue_item.success:
|
||||||
#Find the quality class for the episode
|
# Find the quality class for the episode
|
||||||
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
||||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||||
for x in (SD, HD720p, HD1080p):
|
for x in (SD, HD720p, HD1080p):
|
||||||
@ -3746,7 +3768,8 @@ class Home:
|
|||||||
return json.dumps({'result': status, 'subtitles': ','.join([x for x in ep_obj.subtitles])})
|
return json.dumps({'result': status, 'subtitles': ','.join([x for x in ep_obj.subtitles])})
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def setSceneNumbering(self, show, indexer, forSeason=None, forEpisode=None, forAbsolute=None, sceneSeason=None, sceneEpisode=None, sceneAbsolute=None):
|
def setSceneNumbering(self, show, indexer, forSeason=None, forEpisode=None, forAbsolute=None, sceneSeason=None,
|
||||||
|
sceneEpisode=None, sceneAbsolute=None):
|
||||||
|
|
||||||
# sanitize:
|
# sanitize:
|
||||||
if forSeason in ['null', '']: forSeason = None
|
if forSeason in ['null', '']: forSeason = None
|
||||||
@ -3800,7 +3823,8 @@ class Home:
|
|||||||
if sceneSeason is not None: sceneSeason = int(sceneSeason)
|
if sceneSeason is not None: sceneSeason = int(sceneSeason)
|
||||||
if sceneEpisode is not None: sceneEpisode = int(sceneEpisode)
|
if sceneEpisode is not None: sceneEpisode = int(sceneEpisode)
|
||||||
|
|
||||||
set_scene_numbering(show, indexer, season=forSeason, episode=forEpisode, sceneSeason=sceneSeason, sceneEpisode=sceneEpisode)
|
set_scene_numbering(show, indexer, season=forSeason, episode=forEpisode, sceneSeason=sceneSeason,
|
||||||
|
sceneEpisode=sceneEpisode)
|
||||||
|
|
||||||
if showObj.is_anime:
|
if showObj.is_anime:
|
||||||
sn = get_scene_absolute_numbering(show, indexer, forAbsolute)
|
sn = get_scene_absolute_numbering(show, indexer, forAbsolute)
|
||||||
@ -3826,7 +3850,7 @@ class Home:
|
|||||||
return json.dumps({'result': 'failure'})
|
return json.dumps({'result': 'failure'})
|
||||||
|
|
||||||
# create failed segment
|
# create failed segment
|
||||||
segment = {season:[ep_obj]}
|
segment = {season: [ep_obj]}
|
||||||
|
|
||||||
# make a queue item for it and put it on the queue
|
# make a queue item for it and put it on the queue
|
||||||
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, segment)
|
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, segment)
|
||||||
@ -3838,7 +3862,7 @@ class Home:
|
|||||||
|
|
||||||
# return the correct json value
|
# return the correct json value
|
||||||
if ep_queue_item.success:
|
if ep_queue_item.success:
|
||||||
#Find the quality class for the episode
|
# Find the quality class for the episode
|
||||||
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
||||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||||
for x in (SD, HD720p, HD1080p):
|
for x in (SD, HD720p, HD1080p):
|
||||||
@ -3889,7 +3913,7 @@ class WebInterface:
|
|||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def showPoster(self, show=None, which=None):
|
def showPoster(self, show=None, which=None):
|
||||||
|
|
||||||
#Redirect initial poster/banner thumb to default images
|
# Redirect initial poster/banner thumb to default images
|
||||||
if which[0:6] == 'poster':
|
if which[0:6] == 'poster':
|
||||||
default_image_name = 'poster.png'
|
default_image_name = 'poster.png'
|
||||||
else:
|
else:
|
||||||
@ -4023,8 +4047,8 @@ class WebInterface:
|
|||||||
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
|
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
|
||||||
|
|
||||||
t = PageTemplate(file="comingEpisodes.tmpl")
|
t = PageTemplate(file="comingEpisodes.tmpl")
|
||||||
# paused_item = { 'title': '', 'path': 'toggleComingEpsDisplayPaused' }
|
# paused_item = { 'title': '', 'path': 'toggleComingEpsDisplayPaused' }
|
||||||
# paused_item['title'] = 'Hide Paused' if sickbeard.COMING_EPS_DISPLAY_PAUSED else 'Show Paused'
|
# paused_item['title'] = 'Hide Paused' if sickbeard.COMING_EPS_DISPLAY_PAUSED else 'Show Paused'
|
||||||
paused_item = {'title': 'View Paused:', 'path': {'': ''}}
|
paused_item = {'title': 'View Paused:', 'path': {'': ''}}
|
||||||
paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else {
|
paused_item['path'] = {'Hide': 'toggleComingEpsDisplayPaused'} if sickbeard.COMING_EPS_DISPLAY_PAUSED else {
|
||||||
'Show': 'toggleComingEpsDisplayPaused'}
|
'Show': 'toggleComingEpsDisplayPaused'}
|
||||||
|
Loading…
Reference in New Issue
Block a user