mirror of
https://github.com/moparisthebest/SickRage
synced 2024-11-16 06:15:09 -05:00
Merge remote-tracking branch 'upstream/dev' into dev
Conflicts: SickBeard.py
This commit is contained in:
commit
5dae4c86c9
119
SickBeard.py
119
SickBeard.py
@ -2,20 +2,20 @@
|
|||||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||||
# URL: http://code.google.com/p/sickbeard/
|
# URL: http://code.google.com/p/sickbeard/
|
||||||
#
|
#
|
||||||
# This file is part of Sick Beard.
|
# This file is part of SickRage.
|
||||||
#
|
#
|
||||||
# Sick Beard is free software: you can redistribute it and/or modify
|
# SickRage is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License as published by
|
# it under the terms of the GNU General Public License as published by
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
# (at your option) any later version.
|
# (at your option) any later version.
|
||||||
#
|
#
|
||||||
# Sick Beard is distributed in the hope that it will be useful,
|
# SickRage is distributed in the hope that it will be useful,
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
# GNU General Public License for more details.
|
# GNU General Public License for more details.
|
||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
# Check needed software dependencies to nudge users to fix their setup
|
# Check needed software dependencies to nudge users to fix their setup
|
||||||
import sys
|
import sys
|
||||||
@ -96,9 +96,10 @@ def daemonize():
|
|||||||
try:
|
try:
|
||||||
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
||||||
if pid != 0:
|
if pid != 0:
|
||||||
sys.exit(0)
|
os._exit(0)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
raise RuntimeError("1st fork failed: %s [%d]" % (e.strerror, e.errno))
|
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
os.setsid() # @UndefinedVariable - only available in UNIX
|
os.setsid() # @UndefinedVariable - only available in UNIX
|
||||||
|
|
||||||
@ -110,18 +111,33 @@ def daemonize():
|
|||||||
try:
|
try:
|
||||||
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
pid = os.fork() # @UndefinedVariable - only available in UNIX
|
||||||
if pid != 0:
|
if pid != 0:
|
||||||
sys.exit(0)
|
os._exit(0)
|
||||||
except OSError, e:
|
except OSError, e:
|
||||||
raise RuntimeError("2nd fork failed: %s [%d]" % (e.strerror, e.errno))
|
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
|
||||||
|
sys.exit(1)
|
||||||
dev_null = file('/dev/null', 'r')
|
|
||||||
os.dup2(dev_null.fileno(), sys.stdin.fileno())
|
|
||||||
|
|
||||||
|
# Write pid
|
||||||
if sickbeard.CREATEPID:
|
if sickbeard.CREATEPID:
|
||||||
pid = str(os.getpid())
|
pid = str(os.getpid())
|
||||||
logger.log(u"Writing PID " + pid + " to " + str(sickbeard.PIDFILE))
|
logger.log(u"Writing PID: " + pid + " to " + str(sickbeard.PIDFILE))
|
||||||
file(sickbeard.PIDFILE, 'w').write("%s\n" % pid)
|
try:
|
||||||
|
file(sickbeard.PIDFILE, 'w').write("%s\n" % pid)
|
||||||
|
except IOError, e:
|
||||||
|
logger.log_error_and_exit(
|
||||||
|
u"Unable to write PID file: " + sickbeard.PIDFILE + " Error: " + str(e.strerror) + " [" + str(
|
||||||
|
e.errno) + "]")
|
||||||
|
|
||||||
|
# Redirect all output
|
||||||
|
sys.stdout.flush()
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
devnull = getattr(os, 'devnull', '/dev/null')
|
||||||
|
stdin = file(devnull, 'r')
|
||||||
|
stdout = file(devnull, 'a+')
|
||||||
|
stderr = file(devnull, 'a+')
|
||||||
|
os.dup2(stdin.fileno(), sys.stdin.fileno())
|
||||||
|
os.dup2(stdout.fileno(), sys.stdout.fileno())
|
||||||
|
os.dup2(stderr.fileno(), sys.stderr.fileno())
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
"""
|
"""
|
||||||
@ -134,8 +150,8 @@ def main():
|
|||||||
sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
|
sickbeard.PROG_DIR = os.path.dirname(sickbeard.MY_FULLNAME)
|
||||||
sickbeard.DATA_DIR = sickbeard.PROG_DIR
|
sickbeard.DATA_DIR = sickbeard.PROG_DIR
|
||||||
sickbeard.MY_ARGS = sys.argv[1:]
|
sickbeard.MY_ARGS = sys.argv[1:]
|
||||||
sickbeard.CREATEPID = False
|
|
||||||
sickbeard.DAEMON = False
|
sickbeard.DAEMON = False
|
||||||
|
sickbeard.CREATEPID = False
|
||||||
|
|
||||||
sickbeard.SYS_ENCODING = None
|
sickbeard.SYS_ENCODING = None
|
||||||
|
|
||||||
@ -157,7 +173,7 @@ def main():
|
|||||||
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
|
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
|
||||||
sys.setdefaultencoding(sickbeard.SYS_ENCODING)
|
sys.setdefaultencoding(sickbeard.SYS_ENCODING)
|
||||||
except:
|
except:
|
||||||
print 'Sorry, you MUST add the Sick Beard folder to the PYTHONPATH environment variable'
|
print 'Sorry, you MUST add the SickRage folder to the PYTHONPATH environment variable'
|
||||||
print 'or find another way to force Python to use ' + sickbeard.SYS_ENCODING + ' for string encoding.'
|
print 'or find another way to force Python to use ' + sickbeard.SYS_ENCODING + ' for string encoding.'
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -196,13 +212,15 @@ def main():
|
|||||||
if o in ('-p', '--port'):
|
if o in ('-p', '--port'):
|
||||||
forcedPort = int(a)
|
forcedPort = int(a)
|
||||||
|
|
||||||
# Run as a daemon
|
# Run as a double forked daemon
|
||||||
if o in ('-d', '--daemon'):
|
if o in ('-d', '--daemon'):
|
||||||
|
sickbeard.DAEMON = True
|
||||||
|
# When running as daemon disable consoleLogging and don't start browser
|
||||||
|
consoleLogging = False
|
||||||
|
noLaunch = True
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
print "Daemonize not supported under Windows, starting normally"
|
sickbeard.DAEMON = False
|
||||||
else:
|
|
||||||
consoleLogging = False
|
|
||||||
sickbeard.DAEMON = True
|
|
||||||
|
|
||||||
# Specify folder to load the config file from
|
# Specify folder to load the config file from
|
||||||
if o in ('--config',):
|
if o in ('--config',):
|
||||||
@ -214,21 +232,27 @@ def main():
|
|||||||
|
|
||||||
# Write a pidfile if requested
|
# Write a pidfile if requested
|
||||||
if o in ('--pidfile',):
|
if o in ('--pidfile',):
|
||||||
|
sickbeard.CREATEPID = True
|
||||||
sickbeard.PIDFILE = str(a)
|
sickbeard.PIDFILE = str(a)
|
||||||
|
|
||||||
# If the pidfile already exists, sickbeard may still be running, so exit
|
# If the pidfile already exists, sickbeard may still be running, so exit
|
||||||
if os.path.exists(sickbeard.PIDFILE):
|
if os.path.exists(sickbeard.PIDFILE):
|
||||||
sys.exit("PID file '" + sickbeard.PIDFILE + "' already exists. Exiting.")
|
sys.exit("PID file: " + sickbeard.PIDFILE + " already exists. Exiting.")
|
||||||
|
|
||||||
# The pidfile is only useful in daemon mode, make sure we can write the file properly
|
# The pidfile is only useful in daemon mode, make sure we can write the file properly
|
||||||
if sickbeard.DAEMON:
|
if sickbeard.CREATEPID:
|
||||||
sickbeard.CREATEPID = True
|
if sickbeard.DAEMON:
|
||||||
try:
|
pid_dir = os.path.dirname(sickbeard.PIDFILE)
|
||||||
file(sickbeard.PIDFILE, 'w').write("pid\n")
|
if not os.access(pid_dir, os.F_OK):
|
||||||
except IOError, e:
|
sys.exit("PID dir: " + pid_dir + " doesn't exist. Exiting.")
|
||||||
raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno))
|
if not os.access(pid_dir, os.W_OK):
|
||||||
else:
|
sys.exit("PID dir: " + pid_dir + " must be writable (write permissions). Exiting.")
|
||||||
logger.log(u"Not running in daemon mode. PID file creation disabled.")
|
|
||||||
|
else:
|
||||||
|
if consoleLogging:
|
||||||
|
sys.stdout.write("Not running in daemon mode. PID file creation disabled.\n")
|
||||||
|
|
||||||
|
sickbeard.CREATEPID = False
|
||||||
|
|
||||||
# If they don't specify a config file then put it in the data dir
|
# If they don't specify a config file then put it in the data dir
|
||||||
if not sickbeard.CONFIG_FILE:
|
if not sickbeard.CONFIG_FILE:
|
||||||
@ -255,7 +279,7 @@ def main():
|
|||||||
os.chdir(sickbeard.DATA_DIR)
|
os.chdir(sickbeard.DATA_DIR)
|
||||||
|
|
||||||
if consoleLogging:
|
if consoleLogging:
|
||||||
print "Starting up Sick Beard " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE
|
print "Starting up SickRage " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE
|
||||||
|
|
||||||
# Load the config and publish it to the sickbeard package
|
# Load the config and publish it to the sickbeard package
|
||||||
if not os.path.isfile(sickbeard.CONFIG_FILE):
|
if not os.path.isfile(sickbeard.CONFIG_FILE):
|
||||||
@ -266,13 +290,13 @@ def main():
|
|||||||
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
|
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
|
||||||
if CUR_DB_VERSION > 0:
|
if CUR_DB_VERSION > 0:
|
||||||
if CUR_DB_VERSION < MIN_DB_VERSION:
|
if CUR_DB_VERSION < MIN_DB_VERSION:
|
||||||
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of Sick Beard (" + str(MIN_DB_VERSION) + ").\n" + \
|
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") is too old to migrate from with this version of SickRage (" + str(MIN_DB_VERSION) + ").\n" + \
|
||||||
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
|
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
|
||||||
if CUR_DB_VERSION > MAX_DB_VERSION:
|
if CUR_DB_VERSION > MAX_DB_VERSION:
|
||||||
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of Sick Beard supports (" + str(MAX_DB_VERSION) + ").\n" + \
|
raise SystemExit("Your database version (" + str(db.DBConnection().checkDBVersion()) + ") has been incremented past what this version of SickRage supports (" + str(MAX_DB_VERSION) + ").\n" + \
|
||||||
"If you have used other forks of SB, your database may be unusable due to their modifications.")
|
"If you have used other forks of SB, your database may be unusable due to their modifications.")
|
||||||
|
|
||||||
# Initialize the config and our threads
|
# Initialize the config and our threads
|
||||||
sickbeard.initialize(consoleLogging=consoleLogging)
|
sickbeard.initialize(consoleLogging=consoleLogging)
|
||||||
|
|
||||||
sickbeard.showList = []
|
sickbeard.showList = []
|
||||||
@ -306,18 +330,18 @@ def main():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
initWebServer({
|
initWebServer({
|
||||||
'port': startPort,
|
'port': startPort,
|
||||||
'host': webhost,
|
'host': webhost,
|
||||||
'data_root': os.path.join(sickbeard.PROG_DIR, 'gui/'+sickbeard.GUI_NAME),
|
'data_root': os.path.join(sickbeard.PROG_DIR, 'gui/'+sickbeard.GUI_NAME),
|
||||||
'web_root': sickbeard.WEB_ROOT,
|
'web_root': sickbeard.WEB_ROOT,
|
||||||
'log_dir': log_dir,
|
'log_dir': log_dir,
|
||||||
'username': sickbeard.WEB_USERNAME,
|
'username': sickbeard.WEB_USERNAME,
|
||||||
'password': sickbeard.WEB_PASSWORD,
|
'password': sickbeard.WEB_PASSWORD,
|
||||||
'enable_https': sickbeard.ENABLE_HTTPS,
|
'enable_https': sickbeard.ENABLE_HTTPS,
|
||||||
'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
|
'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
|
||||||
'https_cert': sickbeard.HTTPS_CERT,
|
'https_cert': sickbeard.HTTPS_CERT,
|
||||||
'https_key': sickbeard.HTTPS_KEY,
|
'https_key': sickbeard.HTTPS_KEY,
|
||||||
})
|
})
|
||||||
except IOError:
|
except IOError:
|
||||||
logger.log(u"Unable to start web server, is something else running on port %d?" % startPort, logger.ERROR)
|
logger.log(u"Unable to start web server, is something else running on port %d?" % startPort, logger.ERROR)
|
||||||
if sickbeard.LAUNCH_BROWSER and not sickbeard.DAEMON:
|
if sickbeard.LAUNCH_BROWSER and not sickbeard.DAEMON:
|
||||||
@ -342,12 +366,13 @@ def main():
|
|||||||
|
|
||||||
# Stay alive while my threads do the work
|
# Stay alive while my threads do the work
|
||||||
while (True):
|
while (True):
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
if sickbeard.invoked_command:
|
if sickbeard.invoked_command:
|
||||||
sickbeard.invoked_command()
|
sickbeard.invoked_command()
|
||||||
sickbeard.invoked_command = None
|
sickbeard.invoked_command = None
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -17,7 +17,12 @@
|
|||||||
|
|
||||||
<div id="summary" class="align-left">
|
<div id="summary" class="align-left">
|
||||||
<table class="infoTable" cellspacing="1" border="0" cellpadding="0">
|
<table class="infoTable" cellspacing="1" border="0" cellpadding="0">
|
||||||
<tr><td class="infoTableHeader">SR Version: </td><td class="infoTableCell">BRANCH: ($sickbeard.version.SICKBEARD_VERSION) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- – build.date //--><br />
|
<tr><td class="infoTableHeader">SR Version: </td><td class="infoTableCell">
|
||||||
|
#if $sickbeard.VERSION_NOTIFY
|
||||||
|
BRANCH: ($sickbeard.version.SICKBEARD_VERSION) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- – build.date //--><br />
|
||||||
|
#else
|
||||||
|
You don't have version checking turned on. Please turn on "Check for Update" in Config > General.<br />
|
||||||
|
#end if
|
||||||
<font color="red">You are using BETA software</font>
|
<font color="red">You are using BETA software</font>
|
||||||
</td></tr>
|
</td></tr>
|
||||||
<tr><td class="infoTableHeader">SR Config file: </td><td class="infoTableCell">$sickbeard.CONFIG_FILE</td></tr>
|
<tr><td class="infoTableHeader">SR Config file: </td><td class="infoTableCell">$sickbeard.CONFIG_FILE</td></tr>
|
||||||
|
@ -248,8 +248,10 @@
|
|||||||
#if int($show.air_by_date) != 1 and int($show.sports) != 1
|
#if int($show.air_by_date) != 1 and int($show.sports) != 1
|
||||||
#if (epResult["season"], epResult["episode"]) in $xem_numbering:
|
#if (epResult["season"], epResult["episode"]) in $xem_numbering:
|
||||||
#set ($dfltSeas, $dfltEpis) = $xem_numbering[(epResult["season"], epResult["episode"])]
|
#set ($dfltSeas, $dfltEpis) = $xem_numbering[(epResult["season"], epResult["episode"])]
|
||||||
#else
|
#elif $xem_numbering and (epResult["season"], epResult["episode"]) not in $xem_numbering:
|
||||||
#set ($dfltSeas, $dfltEpis) = (0,0)
|
#set ($dfltSeas, $dfltEpis) = (0,0)
|
||||||
|
#else:
|
||||||
|
#set ($dfltSeas, $dfltEpis) = (epResult["season"], epResult["episode"])
|
||||||
#end if
|
#end if
|
||||||
#if (epResult["season"], epResult["episode"]) in $scene_numbering:
|
#if (epResult["season"], epResult["episode"]) in $scene_numbering:
|
||||||
#set ($scSeas, $scEpis) = $scene_numbering[(epResult["season"], epResult["episode"])]
|
#set ($scSeas, $scEpis) = $scene_numbering[(epResult["season"], epResult["episode"])]
|
||||||
|
@ -39,6 +39,10 @@ FEATURES:
|
|||||||
|
|
||||||
To run SickRage from source you will need Python 2.6+ and Cheetah 2.1.0+.
|
To run SickRage from source you will need Python 2.6+ and Cheetah 2.1.0+.
|
||||||
|
|
||||||
## Bugs
|
## Forums
|
||||||
|
|
||||||
If you find a bug please report [here][githubissues] on Github or at our forums http://www.sickrage.tv. Verify that it hasn't already been submitted and then [log a new bug][githubnewissue]. Be sure to provide a sickrage log in debug mode where is the error evidence or it'll never get fixed.
|
Any questions or setup info your looking for can be found at out forums http://www.sickrage.tv
|
||||||
|
<br>
|
||||||
|
If you find a bug please report at our forums http://sickrage.tv/forums/forum/help-support/bug-issue-reports
|
||||||
|
<br>
|
||||||
|
Be sure to provide a sickrage log in debug mode where is the error evidence or it'll never get fixed.
|
||||||
|
@ -32,7 +32,7 @@ from threading import Lock
|
|||||||
from sickbeard import providers, metadata, config
|
from sickbeard import providers, metadata, config
|
||||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
||||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd
|
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, naming_ep_type
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, naming_ep_type
|
||||||
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||||
subtitles, traktWatchListChecker
|
subtitles, traktWatchListChecker
|
||||||
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
||||||
@ -50,6 +50,8 @@ from sickbeard.databases import mainDB, cache_db, failed_db
|
|||||||
|
|
||||||
from lib.configobj import ConfigObj
|
from lib.configobj import ConfigObj
|
||||||
|
|
||||||
|
import xml.etree.ElementTree as ElementTree
|
||||||
|
|
||||||
invoked_command = None
|
invoked_command = None
|
||||||
|
|
||||||
PID = None
|
PID = None
|
||||||
@ -135,6 +137,7 @@ ROOT_DIRS = None
|
|||||||
UPDATE_SHOWS_ON_START = None
|
UPDATE_SHOWS_ON_START = None
|
||||||
SORT_ARTICLE = None
|
SORT_ARTICLE = None
|
||||||
DEBUG = False
|
DEBUG = False
|
||||||
|
CLEAR_CACHE = None
|
||||||
|
|
||||||
USE_LISTVIEW = None
|
USE_LISTVIEW = None
|
||||||
METADATA_XBMC = None
|
METADATA_XBMC = None
|
||||||
@ -521,7 +524,7 @@ def initialize(consoleLogging=True):
|
|||||||
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \
|
||||||
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \
|
||||||
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \
|
||||||
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, \
|
NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, CLEAR_CACHE, \
|
||||||
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
|
||||||
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
|
||||||
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
|
||||||
@ -696,21 +699,21 @@ def initialize(consoleLogging=True):
|
|||||||
EZRSS = bool(check_setting_int(CFG, 'General', 'use_torrent', 0))
|
EZRSS = bool(check_setting_int(CFG, 'General', 'use_torrent', 0))
|
||||||
if not EZRSS:
|
if not EZRSS:
|
||||||
EZRSS = bool(check_setting_int(CFG, 'EZRSS', 'ezrss', 0))
|
EZRSS = bool(check_setting_int(CFG, 'EZRSS', 'ezrss', 0))
|
||||||
EZRSS_RATIO = check_setting_int(CFG, 'EZRSS', 'ezrss_ratio', 0)
|
EZRSS_RATIO = check_setting_float(CFG, 'EZRSS', 'ezrss_ratio', 0)
|
||||||
|
|
||||||
TVTORRENTS = bool(check_setting_int(CFG, 'TVTORRENTS', 'tvtorrents', 0))
|
TVTORRENTS = bool(check_setting_int(CFG, 'TVTORRENTS', 'tvtorrents', 0))
|
||||||
TVTORRENTS_DIGEST = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_digest', '')
|
TVTORRENTS_DIGEST = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_digest', '')
|
||||||
TVTORRENTS_HASH = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_hash', '')
|
TVTORRENTS_HASH = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_hash', '')
|
||||||
TVTORRENTS_RATIO = check_setting_int(CFG, 'TVTORRENTS', 'tvtorrents_ratio', 0)
|
TVTORRENTS_RATIO = check_setting_float(CFG, 'TVTORRENTS', 'tvtorrents_ratio', 0)
|
||||||
TVTORRENTS_OPTIONS = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_options', '')
|
TVTORRENTS_OPTIONS = check_setting_str(CFG, 'TVTORRENTS', 'tvtorrents_options', '')
|
||||||
|
|
||||||
BTN = bool(check_setting_int(CFG, 'BTN', 'btn', 0))
|
BTN = bool(check_setting_int(CFG, 'BTN', 'btn', 0))
|
||||||
BTN_API_KEY = check_setting_str(CFG, 'BTN', 'btn_api_key', '')
|
BTN_API_KEY = check_setting_str(CFG, 'BTN', 'btn_api_key', '')
|
||||||
BTN_RATIO = check_setting_int(CFG, 'BTN', 'btn_ratio', 0)
|
BTN_RATIO = check_setting_float(CFG, 'BTN', 'btn_ratio', 0)
|
||||||
BTN_OPTIONS = check_setting_str(CFG, 'BTN', 'btn_options', '')
|
BTN_OPTIONS = check_setting_str(CFG, 'BTN', 'btn_options', '')
|
||||||
|
|
||||||
THEPIRATEBAY = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay', 1))
|
THEPIRATEBAY = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay', 1))
|
||||||
THEPIRATEBAY_RATIO = check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay_ratio', 0)
|
THEPIRATEBAY_RATIO = check_setting_float(CFG, 'THEPIRATEBAY', 'thepiratebay_ratio', 0)
|
||||||
THEPIRATEBAY_TRUSTED = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay_trusted', 1))
|
THEPIRATEBAY_TRUSTED = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay_trusted', 1))
|
||||||
THEPIRATEBAY_PROXY = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay_proxy', 0))
|
THEPIRATEBAY_PROXY = bool(check_setting_int(CFG, 'THEPIRATEBAY', 'thepiratebay_proxy', 0))
|
||||||
THEPIRATEBAY_PROXY_URL = check_setting_str(CFG, 'THEPIRATEBAY', 'thepiratebay_proxy_url', '')
|
THEPIRATEBAY_PROXY_URL = check_setting_str(CFG, 'THEPIRATEBAY', 'thepiratebay_proxy_url', '')
|
||||||
@ -720,60 +723,60 @@ def initialize(consoleLogging=True):
|
|||||||
TORRENTLEECH = bool(check_setting_int(CFG, 'TORRENTLEECH', 'torrentleech', 0))
|
TORRENTLEECH = bool(check_setting_int(CFG, 'TORRENTLEECH', 'torrentleech', 0))
|
||||||
TORRENTLEECH_USERNAME = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_username', '')
|
TORRENTLEECH_USERNAME = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_username', '')
|
||||||
TORRENTLEECH_PASSWORD = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_password', '')
|
TORRENTLEECH_PASSWORD = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_password', '')
|
||||||
TORRENTLEECH_RATIO = check_setting_int(CFG, 'TORRENTLEECH', 'torrentleech_ratio', 0)
|
TORRENTLEECH_RATIO = check_setting_float(CFG, 'TORRENTLEECH', 'torrentleech_ratio', 0)
|
||||||
TORRENTLEECH_OPTIONS = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_options', '')
|
TORRENTLEECH_OPTIONS = check_setting_str(CFG, 'TORRENTLEECH', 'torrentleech_options', '')
|
||||||
|
|
||||||
IPTORRENTS = bool(check_setting_int(CFG, 'IPTORRENTS', 'iptorrents', 0))
|
IPTORRENTS = bool(check_setting_int(CFG, 'IPTORRENTS', 'iptorrents', 0))
|
||||||
IPTORRENTS_USERNAME = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_username', '')
|
IPTORRENTS_USERNAME = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_username', '')
|
||||||
IPTORRENTS_PASSWORD = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_password', '')
|
IPTORRENTS_PASSWORD = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_password', '')
|
||||||
IPTORRENTS_RATIO = check_setting_int(CFG, 'IPTORRENTS', 'iptorrents_ratio', 0)
|
IPTORRENTS_RATIO = check_setting_float(CFG, 'IPTORRENTS', 'iptorrents_ratio', 0)
|
||||||
IPTORRENTS_FREELEECH = bool(check_setting_int(CFG, 'IPTORRENTS', 'iptorrents_freeleech', 0))
|
IPTORRENTS_FREELEECH = bool(check_setting_int(CFG, 'IPTORRENTS', 'iptorrents_freeleech', 0))
|
||||||
IPTORRENTS_OPTIONS = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_options', '')
|
IPTORRENTS_OPTIONS = check_setting_str(CFG, 'IPTORRENTS', 'iptorrents_options', '')
|
||||||
|
|
||||||
NEXTGEN = bool(check_setting_int(CFG, 'NEXTGEN', 'nextgen', 0))
|
NEXTGEN = bool(check_setting_int(CFG, 'NEXTGEN', 'nextgen', 0))
|
||||||
NEXTGEN_USERNAME = check_setting_str(CFG, 'NEXTGEN', 'nextgen_username', '')
|
NEXTGEN_USERNAME = check_setting_str(CFG, 'NEXTGEN', 'nextgen_username', '')
|
||||||
NEXTGEN_PASSWORD = check_setting_str(CFG, 'NEXTGEN', 'nextgen_password', '')
|
NEXTGEN_PASSWORD = check_setting_str(CFG, 'NEXTGEN', 'nextgen_password', '')
|
||||||
NEXTGEN_RATIO = check_setting_int(CFG, 'NEXTGEN', 'nextgen_ratio', 0)
|
NEXTGEN_RATIO = check_setting_float(CFG, 'NEXTGEN', 'nextgen_ratio', 0)
|
||||||
NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '')
|
NEXTGEN_OPTIONS = check_setting_str(CFG, 'NEXTGEN', 'nextgen_options', '')
|
||||||
|
|
||||||
KAT = bool(check_setting_int(CFG, 'KAT', 'kat', 0))
|
KAT = bool(check_setting_int(CFG, 'KAT', 'kat', 0))
|
||||||
KAT_RATIO = check_setting_int(CFG, 'KAT', 'kat_ratio', 0)
|
KAT_RATIO = check_setting_float(CFG, 'KAT', 'kat_ratio', 0)
|
||||||
KAT_VERIFIED = bool(check_setting_int(CFG, 'KAT', 'kat_verified', 1))
|
KAT_VERIFIED = bool(check_setting_int(CFG, 'KAT', 'kat_verified', 1))
|
||||||
KAT_OPTIONS = check_setting_str(CFG, 'KAT', 'kat_options', '')
|
KAT_OPTIONS = check_setting_str(CFG, 'KAT', 'kat_options', '')
|
||||||
|
|
||||||
PUBLICHD = bool(check_setting_int(CFG, 'PUBLICHD', 'publichd', 0))
|
PUBLICHD = bool(check_setting_int(CFG, 'PUBLICHD', 'publichd', 0))
|
||||||
PUBLICHD_RATIO = check_setting_int(CFG, 'PUBLICHD', 'publichd_ratio', 0)
|
PUBLICHD_RATIO = check_setting_float(CFG, 'PUBLICHD', 'publichd_ratio', 0)
|
||||||
PUBLICHD_OPTIONS = check_setting_str(CFG, 'PUBLICHD', 'publichd_options', '')
|
PUBLICHD_OPTIONS = check_setting_str(CFG, 'PUBLICHD', 'publichd_options', '')
|
||||||
|
|
||||||
SCC = bool(check_setting_int(CFG, 'SCC', 'scc', 0))
|
SCC = bool(check_setting_int(CFG, 'SCC', 'scc', 0))
|
||||||
SCC_USERNAME = check_setting_str(CFG, 'SCC', 'scc_username', '')
|
SCC_USERNAME = check_setting_str(CFG, 'SCC', 'scc_username', '')
|
||||||
SCC_PASSWORD = check_setting_str(CFG, 'SCC', 'scc_password', '')
|
SCC_PASSWORD = check_setting_str(CFG, 'SCC', 'scc_password', '')
|
||||||
SCC_RATIO = check_setting_int(CFG, 'SCC', 'scc_ratio', 0)
|
SCC_RATIO = check_setting_float(CFG, 'SCC', 'scc_ratio', 0)
|
||||||
SCC_OPTIONS = check_setting_str(CFG, 'SCC', 'scc_options', '')
|
SCC_OPTIONS = check_setting_str(CFG, 'SCC', 'scc_options', '')
|
||||||
|
|
||||||
HDTORRENTS = bool(check_setting_int(CFG, 'HDTORRENTS', 'hdtorrents', 0))
|
HDTORRENTS = bool(check_setting_int(CFG, 'HDTORRENTS', 'hdtorrents', 0))
|
||||||
HDTORRENTS_USERNAME = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_username', '')
|
HDTORRENTS_USERNAME = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_username', '')
|
||||||
HDTORRENTS_PASSWORD = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_password', '')
|
HDTORRENTS_PASSWORD = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_password', '')
|
||||||
HDTORRENTS_RATIO = check_setting_int(CFG, 'HDTORRENTS', 'hdtorrents_ratio', 0)
|
HDTORRENTS_RATIO = check_setting_float(CFG, 'HDTORRENTS', 'hdtorrents_ratio', 0)
|
||||||
HDTORRENTS_OPTIONS = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_options', '')
|
HDTORRENTS_OPTIONS = check_setting_str(CFG, 'HDTORRENTS', 'hdtorrents_options', '')
|
||||||
|
|
||||||
TORRENTDAY = bool(check_setting_int(CFG, 'TORRENTDAY', 'torrentday', 0))
|
TORRENTDAY = bool(check_setting_int(CFG, 'TORRENTDAY', 'torrentday', 0))
|
||||||
TORRENTDAY_USERNAME = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_username', '')
|
TORRENTDAY_USERNAME = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_username', '')
|
||||||
TORRENTDAY_PASSWORD = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_password', '')
|
TORRENTDAY_PASSWORD = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_password', '')
|
||||||
TORRENTDAY_RATIO = check_setting_int(CFG, 'TORRENTDAY', 'torrentday_ratio', 0)
|
TORRENTDAY_RATIO = check_setting_float(CFG, 'TORRENTDAY', 'torrentday_ratio', 0)
|
||||||
TORRENTDAY_FREELEECH = bool(check_setting_int(CFG, 'TORRENTDAY', 'torrentday_freeleech', 0))
|
TORRENTDAY_FREELEECH = bool(check_setting_int(CFG, 'TORRENTDAY', 'torrentday_freeleech', 0))
|
||||||
TORRENTDAY_OPTIONS = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_options', '')
|
TORRENTDAY_OPTIONS = check_setting_str(CFG, 'TORRENTDAY', 'torrentday_options', '')
|
||||||
|
|
||||||
HDBITS = bool(check_setting_int(CFG, 'HDBITS', 'hdbits', 0))
|
HDBITS = bool(check_setting_int(CFG, 'HDBITS', 'hdbits', 0))
|
||||||
HDBITS_USERNAME = check_setting_str(CFG, 'HDBITS', 'hdbits_username', '')
|
HDBITS_USERNAME = check_setting_str(CFG, 'HDBITS', 'hdbits_username', '')
|
||||||
HDBITS_PASSKEY = check_setting_str(CFG, 'HDBITS', 'hdbits_passkey', '')
|
HDBITS_PASSKEY = check_setting_str(CFG, 'HDBITS', 'hdbits_passkey', '')
|
||||||
HDBITS_RATIO = check_setting_int(CFG, 'HDBITS', 'hdbits_ratio', 0)
|
HDBITS_RATIO = check_setting_float(CFG, 'HDBITS', 'hdbits_ratio', 0)
|
||||||
HDBITS_OPTIONS = check_setting_str(CFG, 'HDBITS', 'hdbits_options', '')
|
HDBITS_OPTIONS = check_setting_str(CFG, 'HDBITS', 'hdbits_options', '')
|
||||||
|
|
||||||
SPEEDCD = bool(check_setting_int(CFG, 'SPEEDCD', 'speedcd', 0))
|
SPEEDCD = bool(check_setting_int(CFG, 'SPEEDCD', 'speedcd', 0))
|
||||||
SPEEDCD_USERNAME = check_setting_str(CFG, 'SPEEDCD', 'speedcd_username', '')
|
SPEEDCD_USERNAME = check_setting_str(CFG, 'SPEEDCD', 'speedcd_username', '')
|
||||||
SPEEDCD_PASSWORD = check_setting_str(CFG, 'SPEEDCD', 'speedcd_password', '')
|
SPEEDCD_PASSWORD = check_setting_str(CFG, 'SPEEDCD', 'speedcd_password', '')
|
||||||
SPEEDCD_RATIO = check_setting_int(CFG, 'SPEEDCD', 'speedcd_ratio', 0)
|
SPEEDCD_RATIO = check_setting_float(CFG, 'SPEEDCD', 'speedcd_ratio', 0)
|
||||||
SPEEDCD_FREELEECH = bool(check_setting_int(CFG, 'SPEEDCD', 'speedcd_freeleech', 0))
|
SPEEDCD_FREELEECH = bool(check_setting_int(CFG, 'SPEEDCD', 'speedcd_freeleech', 0))
|
||||||
|
|
||||||
NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
|
NZBS = bool(check_setting_int(CFG, 'NZBs', 'nzbs', 0))
|
||||||
@ -806,7 +809,7 @@ def initialize(consoleLogging=True):
|
|||||||
TORRENT_PASSWORD = check_setting_str(CFG, 'TORRENT', 'torrent_password', '')
|
TORRENT_PASSWORD = check_setting_str(CFG, 'TORRENT', 'torrent_password', '')
|
||||||
TORRENT_HOST = check_setting_str(CFG, 'TORRENT', 'torrent_host', '')
|
TORRENT_HOST = check_setting_str(CFG, 'TORRENT', 'torrent_host', '')
|
||||||
TORRENT_PATH = check_setting_str(CFG, 'TORRENT', 'torrent_path', '')
|
TORRENT_PATH = check_setting_str(CFG, 'TORRENT', 'torrent_path', '')
|
||||||
TORRENT_RATIO = check_setting_int(CFG, 'TORRENT', 'torrent_ratio', 0)
|
TORRENT_RATIO = check_setting_float(CFG, 'TORRENT', 'torrent_ratio', 0)
|
||||||
TORRENT_SEED_TIME = check_setting_int(CFG, 'TORRENT', 'torrent_seed_time', 0)
|
TORRENT_SEED_TIME = check_setting_int(CFG, 'TORRENT', 'torrent_seed_time', 0)
|
||||||
TORRENT_PAUSED = bool(check_setting_int(CFG, 'TORRENT', 'torrent_paused', 0))
|
TORRENT_PAUSED = bool(check_setting_int(CFG, 'TORRENT', 'torrent_paused', 0))
|
||||||
TORRENT_HIGH_BANDWIDTH = bool(check_setting_int(CFG, 'TORRENT', 'torrent_high_bandwidth', 0))
|
TORRENT_HIGH_BANDWIDTH = bool(check_setting_int(CFG, 'TORRENT', 'torrent_high_bandwidth', 0))
|
||||||
@ -1123,6 +1126,17 @@ def initialize(consoleLogging=True):
|
|||||||
showList = []
|
showList = []
|
||||||
loadingShowList = {}
|
loadingShowList = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
url = 'http://raw.github.com/echel0n/sickrage-init/master/settings.ini'
|
||||||
|
clear_cache = ElementTree.XML(helpers.getURL(url)).find('cache/clear').text
|
||||||
|
CLEAR_CACHE = check_setting_str(CFG, 'General', 'clear_cache', '')
|
||||||
|
if CLEAR_CACHE != clear_cache:
|
||||||
|
for curProvider in [x for x in providers.sortedProviderList() if x.isActive()]:
|
||||||
|
curProvider.cache._clearCache()
|
||||||
|
CLEAR_CACHE = clear_cache
|
||||||
|
save_config()
|
||||||
|
except:pass
|
||||||
|
|
||||||
__INITIALIZED__ = True
|
__INITIALIZED__ = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -1259,12 +1273,21 @@ def halt():
|
|||||||
__INITIALIZED__ = False
|
__INITIALIZED__ = False
|
||||||
|
|
||||||
|
|
||||||
|
def remove_pid_file(PIDFILE):
|
||||||
|
try:
|
||||||
|
if os.path.exists(PIDFILE):
|
||||||
|
os.remove(PIDFILE)
|
||||||
|
|
||||||
|
except (IOError, OSError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def sig_handler(signum=None, frame=None):
|
def sig_handler(signum=None, frame=None):
|
||||||
if type(signum) != type(None):
|
if type(signum) != type(None):
|
||||||
logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
|
logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
|
||||||
saveAndShutdown()
|
saveAndShutdown()
|
||||||
|
|
||||||
|
|
||||||
def saveAll():
|
def saveAll():
|
||||||
global showList
|
global showList
|
||||||
|
|
||||||
@ -1288,7 +1311,7 @@ def saveAndShutdown(restart=False):
|
|||||||
|
|
||||||
if CREATEPID:
|
if CREATEPID:
|
||||||
logger.log(u"Removing pidfile " + str(PIDFILE))
|
logger.log(u"Removing pidfile " + str(PIDFILE))
|
||||||
os.remove(PIDFILE)
|
remove_pid_file(PIDFILE)
|
||||||
|
|
||||||
if restart:
|
if restart:
|
||||||
install_type = versionCheckScheduler.action.install_type
|
install_type = versionCheckScheduler.action.install_type
|
||||||
@ -1310,7 +1333,7 @@ def saveAndShutdown(restart=False):
|
|||||||
popen_list += MY_ARGS
|
popen_list += MY_ARGS
|
||||||
if '--nolaunch' not in popen_list:
|
if '--nolaunch' not in popen_list:
|
||||||
popen_list += ['--nolaunch']
|
popen_list += ['--nolaunch']
|
||||||
logger.log(u"Restarting Sick Beard with " + str(popen_list))
|
logger.log(u"Restarting SickRage with " + str(popen_list))
|
||||||
logger.close()
|
logger.close()
|
||||||
subprocess.Popen(popen_list, cwd=os.getcwd())
|
subprocess.Popen(popen_list, cwd=os.getcwd())
|
||||||
|
|
||||||
@ -1433,30 +1456,32 @@ def save_config():
|
|||||||
new_config['General']['ignore_words'] = IGNORE_WORDS
|
new_config['General']['ignore_words'] = IGNORE_WORDS
|
||||||
new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED)
|
new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED)
|
||||||
|
|
||||||
|
new_config['General']['clear_cache'] = CLEAR_CACHE
|
||||||
|
|
||||||
new_config['Blackhole'] = {}
|
new_config['Blackhole'] = {}
|
||||||
new_config['Blackhole']['nzb_dir'] = NZB_DIR
|
new_config['Blackhole']['nzb_dir'] = NZB_DIR
|
||||||
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
|
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
|
||||||
|
|
||||||
new_config['EZRSS'] = {}
|
new_config['EZRSS'] = {}
|
||||||
new_config['EZRSS']['ezrss'] = int(EZRSS)
|
new_config['EZRSS']['ezrss'] = int(EZRSS)
|
||||||
new_config['EZRSS']['ezrss_ratio'] = int(EZRSS_RATIO)
|
new_config['EZRSS']['ezrss_ratio'] = float(EZRSS_RATIO)
|
||||||
|
|
||||||
new_config['TVTORRENTS'] = {}
|
new_config['TVTORRENTS'] = {}
|
||||||
new_config['TVTORRENTS']['tvtorrents'] = int(TVTORRENTS)
|
new_config['TVTORRENTS']['tvtorrents'] = int(TVTORRENTS)
|
||||||
new_config['TVTORRENTS']['tvtorrents_digest'] = TVTORRENTS_DIGEST
|
new_config['TVTORRENTS']['tvtorrents_digest'] = TVTORRENTS_DIGEST
|
||||||
new_config['TVTORRENTS']['tvtorrents_hash'] = TVTORRENTS_HASH
|
new_config['TVTORRENTS']['tvtorrents_hash'] = TVTORRENTS_HASH
|
||||||
new_config['TVTORRENTS']['tvtorrents_ratio'] = int(TVTORRENTS_RATIO)
|
new_config['TVTORRENTS']['tvtorrents_ratio'] = float(TVTORRENTS_RATIO)
|
||||||
new_config['TVTORRENTS']['tvtorrents_options'] = TVTORRENTS_OPTIONS
|
new_config['TVTORRENTS']['tvtorrents_options'] = TVTORRENTS_OPTIONS
|
||||||
|
|
||||||
new_config['BTN'] = {}
|
new_config['BTN'] = {}
|
||||||
new_config['BTN']['btn'] = int(BTN)
|
new_config['BTN']['btn'] = int(BTN)
|
||||||
new_config['BTN']['btn_api_key'] = BTN_API_KEY
|
new_config['BTN']['btn_api_key'] = BTN_API_KEY
|
||||||
new_config['BTN']['btn_ratio'] = int(BTN_RATIO)
|
new_config['BTN']['btn_ratio'] = float(BTN_RATIO)
|
||||||
new_config['BTN']['btn_options'] = BTN_OPTIONS
|
new_config['BTN']['btn_options'] = BTN_OPTIONS
|
||||||
|
|
||||||
new_config['THEPIRATEBAY'] = {}
|
new_config['THEPIRATEBAY'] = {}
|
||||||
new_config['THEPIRATEBAY']['thepiratebay'] = int(THEPIRATEBAY)
|
new_config['THEPIRATEBAY']['thepiratebay'] = int(THEPIRATEBAY)
|
||||||
new_config['THEPIRATEBAY']['thepiratebay_ratio'] = int(THEPIRATEBAY_RATIO)
|
new_config['THEPIRATEBAY']['thepiratebay_ratio'] = float(THEPIRATEBAY_RATIO)
|
||||||
new_config['THEPIRATEBAY']['thepiratebay_trusted'] = int(THEPIRATEBAY_TRUSTED)
|
new_config['THEPIRATEBAY']['thepiratebay_trusted'] = int(THEPIRATEBAY_TRUSTED)
|
||||||
new_config['THEPIRATEBAY']['thepiratebay_proxy'] = int(THEPIRATEBAY_PROXY)
|
new_config['THEPIRATEBAY']['thepiratebay_proxy'] = int(THEPIRATEBAY_PROXY)
|
||||||
new_config['THEPIRATEBAY']['thepiratebay_proxy_url'] = THEPIRATEBAY_PROXY_URL
|
new_config['THEPIRATEBAY']['thepiratebay_proxy_url'] = THEPIRATEBAY_PROXY_URL
|
||||||
@ -1467,14 +1492,14 @@ def save_config():
|
|||||||
new_config['TORRENTLEECH']['torrentleech'] = int(TORRENTLEECH)
|
new_config['TORRENTLEECH']['torrentleech'] = int(TORRENTLEECH)
|
||||||
new_config['TORRENTLEECH']['torrentleech_username'] = TORRENTLEECH_USERNAME
|
new_config['TORRENTLEECH']['torrentleech_username'] = TORRENTLEECH_USERNAME
|
||||||
new_config['TORRENTLEECH']['torrentleech_password'] = helpers.encrypt(TORRENTLEECH_PASSWORD, ENCRYPTION_VERSION)
|
new_config['TORRENTLEECH']['torrentleech_password'] = helpers.encrypt(TORRENTLEECH_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['TORRENTLEECH']['torrentleech_ratio'] = int(TORRENTLEECH_RATIO)
|
new_config['TORRENTLEECH']['torrentleech_ratio'] = float(TORRENTLEECH_RATIO)
|
||||||
new_config['TORRENTLEECH']['torrentleech_options'] = TORRENTLEECH_OPTIONS
|
new_config['TORRENTLEECH']['torrentleech_options'] = TORRENTLEECH_OPTIONS
|
||||||
|
|
||||||
new_config['IPTORRENTS'] = {}
|
new_config['IPTORRENTS'] = {}
|
||||||
new_config['IPTORRENTS']['iptorrents'] = int(IPTORRENTS)
|
new_config['IPTORRENTS']['iptorrents'] = int(IPTORRENTS)
|
||||||
new_config['IPTORRENTS']['iptorrents_username'] = IPTORRENTS_USERNAME
|
new_config['IPTORRENTS']['iptorrents_username'] = IPTORRENTS_USERNAME
|
||||||
new_config['IPTORRENTS']['iptorrents_password'] = helpers.encrypt(IPTORRENTS_PASSWORD, ENCRYPTION_VERSION)
|
new_config['IPTORRENTS']['iptorrents_password'] = helpers.encrypt(IPTORRENTS_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['IPTORRENTS']['iptorrents_ratio'] = int(IPTORRENTS_RATIO)
|
new_config['IPTORRENTS']['iptorrents_ratio'] = float(IPTORRENTS_RATIO)
|
||||||
new_config['IPTORRENTS']['iptorrents_freeleech'] = int(IPTORRENTS_FREELEECH)
|
new_config['IPTORRENTS']['iptorrents_freeleech'] = int(IPTORRENTS_FREELEECH)
|
||||||
new_config['IPTORRENTS']['iptorrents_options'] = IPTORRENTS_OPTIONS
|
new_config['IPTORRENTS']['iptorrents_options'] = IPTORRENTS_OPTIONS
|
||||||
|
|
||||||
@ -1482,39 +1507,39 @@ def save_config():
|
|||||||
new_config['NEXTGEN']['nextgen'] = int(NEXTGEN)
|
new_config['NEXTGEN']['nextgen'] = int(NEXTGEN)
|
||||||
new_config['NEXTGEN']['nextgen_username'] = NEXTGEN_USERNAME
|
new_config['NEXTGEN']['nextgen_username'] = NEXTGEN_USERNAME
|
||||||
new_config['NEXTGEN']['nextgen_password'] = helpers.encrypt(NEXTGEN_PASSWORD, ENCRYPTION_VERSION)
|
new_config['NEXTGEN']['nextgen_password'] = helpers.encrypt(NEXTGEN_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['NEXTGEN']['nextgen_ratio'] = int(NEXTGEN_RATIO)
|
new_config['NEXTGEN']['nextgen_ratio'] = float(NEXTGEN_RATIO)
|
||||||
new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS
|
new_config['NEXTGEN']['nextgen_options'] = NEXTGEN_OPTIONS
|
||||||
|
|
||||||
new_config['KAT'] = {}
|
new_config['KAT'] = {}
|
||||||
new_config['KAT']['kat'] = int(KAT)
|
new_config['KAT']['kat'] = int(KAT)
|
||||||
new_config['KAT']['kat_ratio'] = int(KAT_RATIO)
|
new_config['KAT']['kat_ratio'] = float(KAT_RATIO)
|
||||||
new_config['KAT']['kat_verified'] = int(KAT_VERIFIED)
|
new_config['KAT']['kat_verified'] = int(KAT_VERIFIED)
|
||||||
new_config['KAT']['kat_options'] = KAT_OPTIONS
|
new_config['KAT']['kat_options'] = KAT_OPTIONS
|
||||||
|
|
||||||
new_config['PUBLICHD'] = {}
|
new_config['PUBLICHD'] = {}
|
||||||
new_config['PUBLICHD']['publichd'] = int(PUBLICHD)
|
new_config['PUBLICHD']['publichd'] = int(PUBLICHD)
|
||||||
new_config['PUBLICHD']['publichd_ratio'] = int(PUBLICHD_RATIO)
|
new_config['PUBLICHD']['publichd_ratio'] = float(PUBLICHD_RATIO)
|
||||||
new_config['PUBLICHD']['publichd_options'] = PUBLICHD_OPTIONS
|
new_config['PUBLICHD']['publichd_options'] = PUBLICHD_OPTIONS
|
||||||
|
|
||||||
new_config['SCC'] = {}
|
new_config['SCC'] = {}
|
||||||
new_config['SCC']['scc'] = int(SCC)
|
new_config['SCC']['scc'] = int(SCC)
|
||||||
new_config['SCC']['scc_username'] = SCC_USERNAME
|
new_config['SCC']['scc_username'] = SCC_USERNAME
|
||||||
new_config['SCC']['scc_password'] = helpers.encrypt(SCC_PASSWORD, ENCRYPTION_VERSION)
|
new_config['SCC']['scc_password'] = helpers.encrypt(SCC_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['SCC']['scc_ratio'] = int(SCC_RATIO)
|
new_config['SCC']['scc_ratio'] = float(SCC_RATIO)
|
||||||
new_config['SCC']['scc_options'] = SCC_OPTIONS
|
new_config['SCC']['scc_options'] = SCC_OPTIONS
|
||||||
|
|
||||||
new_config['HDTORRENTS'] = {}
|
new_config['HDTORRENTS'] = {}
|
||||||
new_config['HDTORRENTS']['hdtorrents'] = int(HDTORRENTS)
|
new_config['HDTORRENTS']['hdtorrents'] = int(HDTORRENTS)
|
||||||
new_config['HDTORRENTS']['hdtorrents_username'] = HDTORRENTS_USERNAME
|
new_config['HDTORRENTS']['hdtorrents_username'] = HDTORRENTS_USERNAME
|
||||||
new_config['HDTORRENTS']['hdtorrents_password'] = helpers.encrypt(HDTORRENTS_PASSWORD, ENCRYPTION_VERSION)
|
new_config['HDTORRENTS']['hdtorrents_password'] = helpers.encrypt(HDTORRENTS_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['HDTORRENTS']['hdtorrents_ratio'] = int(HDTORRENTS_RATIO)
|
new_config['HDTORRENTS']['hdtorrents_ratio'] = float(HDTORRENTS_RATIO)
|
||||||
new_config['HDTORRENTS']['hdtorrents_options'] = HDTORRENTS_OPTIONS
|
new_config['HDTORRENTS']['hdtorrents_options'] = HDTORRENTS_OPTIONS
|
||||||
|
|
||||||
new_config['TORRENTDAY'] = {}
|
new_config['TORRENTDAY'] = {}
|
||||||
new_config['TORRENTDAY']['torrentday'] = int(TORRENTDAY)
|
new_config['TORRENTDAY']['torrentday'] = int(TORRENTDAY)
|
||||||
new_config['TORRENTDAY']['torrentday_username'] = TORRENTDAY_USERNAME
|
new_config['TORRENTDAY']['torrentday_username'] = TORRENTDAY_USERNAME
|
||||||
new_config['TORRENTDAY']['torrentday_password'] = helpers.encrypt(TORRENTDAY_PASSWORD, ENCRYPTION_VERSION)
|
new_config['TORRENTDAY']['torrentday_password'] = helpers.encrypt(TORRENTDAY_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['TORRENTDAY']['torrentday_ratio'] = int(TORRENTDAY_RATIO)
|
new_config['TORRENTDAY']['torrentday_ratio'] = float(TORRENTDAY_RATIO)
|
||||||
new_config['TORRENTDAY']['torrentday_freeleech'] = int(TORRENTDAY_FREELEECH)
|
new_config['TORRENTDAY']['torrentday_freeleech'] = int(TORRENTDAY_FREELEECH)
|
||||||
new_config['TORRENTDAY']['torrentday_options'] = TORRENTDAY_OPTIONS
|
new_config['TORRENTDAY']['torrentday_options'] = TORRENTDAY_OPTIONS
|
||||||
|
|
||||||
@ -1522,14 +1547,14 @@ def save_config():
|
|||||||
new_config['HDBITS']['hdbits'] = int(HDBITS)
|
new_config['HDBITS']['hdbits'] = int(HDBITS)
|
||||||
new_config['HDBITS']['hdbits_username'] = HDBITS_USERNAME
|
new_config['HDBITS']['hdbits_username'] = HDBITS_USERNAME
|
||||||
new_config['HDBITS']['hdbits_passkey'] = HDBITS_PASSKEY
|
new_config['HDBITS']['hdbits_passkey'] = HDBITS_PASSKEY
|
||||||
new_config['HDBITS']['hdbits_ratio'] = int(HDBITS_RATIO)
|
new_config['HDBITS']['hdbits_ratio'] = float(HDBITS_RATIO)
|
||||||
new_config['HDBITS']['hdbits_options'] = HDBITS_OPTIONS
|
new_config['HDBITS']['hdbits_options'] = HDBITS_OPTIONS
|
||||||
|
|
||||||
new_config['SPEEDCD'] = {}
|
new_config['SPEEDCD'] = {}
|
||||||
new_config['SPEEDCD']['speedcd'] = int(SPEEDCD)
|
new_config['SPEEDCD']['speedcd'] = int(SPEEDCD)
|
||||||
new_config['SPEEDCD']['speedcd_username'] = SPEEDCD_USERNAME
|
new_config['SPEEDCD']['speedcd_username'] = SPEEDCD_USERNAME
|
||||||
new_config['SPEEDCD']['speedcd_password'] = helpers.encrypt(SPEEDCD_PASSWORD, ENCRYPTION_VERSION)
|
new_config['SPEEDCD']['speedcd_password'] = helpers.encrypt(SPEEDCD_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['SPEEDCD']['speedcd_ratio'] = int(SPEEDCD_RATIO)
|
new_config['SPEEDCD']['speedcd_ratio'] = float(SPEEDCD_RATIO)
|
||||||
new_config['SPEEDCD']['speedcd_freeleech'] = int(SPEEDCD_FREELEECH)
|
new_config['SPEEDCD']['speedcd_freeleech'] = int(SPEEDCD_FREELEECH)
|
||||||
|
|
||||||
new_config['NZBs'] = {}
|
new_config['NZBs'] = {}
|
||||||
@ -1570,7 +1595,7 @@ def save_config():
|
|||||||
new_config['TORRENT']['torrent_password'] = helpers.encrypt(TORRENT_PASSWORD, ENCRYPTION_VERSION)
|
new_config['TORRENT']['torrent_password'] = helpers.encrypt(TORRENT_PASSWORD, ENCRYPTION_VERSION)
|
||||||
new_config['TORRENT']['torrent_host'] = TORRENT_HOST
|
new_config['TORRENT']['torrent_host'] = TORRENT_HOST
|
||||||
new_config['TORRENT']['torrent_path'] = TORRENT_PATH
|
new_config['TORRENT']['torrent_path'] = TORRENT_PATH
|
||||||
new_config['TORRENT']['torrent_ratio'] = int(TORRENT_RATIO)
|
new_config['TORRENT']['torrent_ratio'] = float(TORRENT_RATIO)
|
||||||
new_config['TORRENT']['torrent_seed_time'] = int(TORRENT_SEED_TIME)
|
new_config['TORRENT']['torrent_seed_time'] = int(TORRENT_SEED_TIME)
|
||||||
new_config['TORRENT']['torrent_paused'] = int(TORRENT_PAUSED)
|
new_config['TORRENT']['torrent_paused'] = int(TORRENT_PAUSED)
|
||||||
new_config['TORRENT']['torrent_high_bandwidth'] = int(TORRENT_HIGH_BANDWIDTH)
|
new_config['TORRENT']['torrent_high_bandwidth'] = int(TORRENT_HIGH_BANDWIDTH)
|
||||||
|
@ -85,12 +85,12 @@ class TransmissionAPI(GenericClient):
|
|||||||
ratio = result.ratio
|
ratio = result.ratio
|
||||||
elif sickbeard.TORRENT_RATIO:
|
elif sickbeard.TORRENT_RATIO:
|
||||||
ratio = sickbeard.TORRENT_RATIO
|
ratio = sickbeard.TORRENT_RATIO
|
||||||
|
if ratio:
|
||||||
try:
|
try:
|
||||||
float(ratio)
|
float(ratio)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.log(self.name + u': Invalid Ratio. "' + ratio + u'" is not a number', logger.ERROR)
|
logger.log(self.name + u': Invalid Ratio. "' + ratio + u'" is not a number', logger.ERROR)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
torrent_id = self._get_torrent_hash(result)
|
torrent_id = self._get_torrent_hash(result)
|
||||||
|
|
||||||
|
@ -30,17 +30,15 @@ def fixStupidEncodings(x, silent=False):
|
|||||||
try:
|
try:
|
||||||
return x.decode(sickbeard.SYS_ENCODING)
|
return x.decode(sickbeard.SYS_ENCODING)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
logger.log(u"Unable to decode value: " + repr(x), logger.ERROR)
|
logger.log(u"Unable to decode value: "+repr(x), logger.ERROR)
|
||||||
return None
|
return None
|
||||||
elif type(x) == unicode:
|
elif type(x) == unicode:
|
||||||
return x
|
return x
|
||||||
else:
|
else:
|
||||||
logger.log(
|
logger.log(u"Unknown value passed in, ignoring it: "+str(type(x))+" ("+repr(x)+":"+repr(type(x))+")", logger.DEBUG if silent else logger.ERROR)
|
||||||
u"Unknown value passed in, ignoring it: " + str(type(x)) + " (" + repr(x) + ":" + repr(type(x)) + ")",
|
|
||||||
logger.DEBUG if silent else logger.ERROR)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
def fixListEncodings(x):
|
def fixListEncodings(x):
|
||||||
if type(x) != list and type(x) != tuple:
|
if type(x) != list and type(x) != tuple:
|
||||||
@ -48,24 +46,20 @@ def fixListEncodings(x):
|
|||||||
else:
|
else:
|
||||||
return filter(lambda x: x != None, map(fixStupidEncodings, x))
|
return filter(lambda x: x != None, map(fixStupidEncodings, x))
|
||||||
|
|
||||||
|
|
||||||
def callPeopleStupid(x):
|
def callPeopleStupid(x):
|
||||||
try:
|
try:
|
||||||
return x.encode(sickbeard.SYS_ENCODING)
|
return x.encode(sickbeard.SYS_ENCODING)
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
logger.log(
|
logger.log(u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: "+repr(x)+", "+sickbeard.SYS_ENCODING, logger.ERROR)
|
||||||
u"YOUR COMPUTER SUCKS! Your data is being corrupted by a bad locale/encoding setting. Report this error on the forums or IRC please: " + repr(
|
|
||||||
x) + ", " + sickbeard.SYS_ENCODING, logger.ERROR)
|
|
||||||
return x.encode(sickbeard.SYS_ENCODING, 'ignore')
|
return x.encode(sickbeard.SYS_ENCODING, 'ignore')
|
||||||
|
|
||||||
|
def ek(func, *args):
|
||||||
def ek(func, *args, **kwargs):
|
|
||||||
result = None
|
result = None
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
result = func(*args, **kwargs)
|
result = func(*args)
|
||||||
else:
|
else:
|
||||||
result = func(*[callPeopleStupid(x) if type(x) in (str, unicode) else x for x in args], **kwargs)
|
result = func(*[callPeopleStupid(x) if type(x) in (str, unicode) else x for x in args])
|
||||||
|
|
||||||
if type(result) in (list, tuple):
|
if type(result) in (list, tuple):
|
||||||
return fixListEncodings(result)
|
return fixListEncodings(result)
|
||||||
|
@ -22,17 +22,12 @@ import datetime
|
|||||||
|
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import exceptions
|
from sickbeard.exceptions import ex, EpisodeNotFoundException
|
||||||
from sickbeard.history import dateFormat
|
from sickbeard.history import dateFormat
|
||||||
from sickbeard.common import Quality
|
from sickbeard.common import Quality
|
||||||
from sickbeard.common import WANTED, FAILED
|
from sickbeard.common import WANTED, FAILED
|
||||||
|
|
||||||
|
|
||||||
def _log_helper(message, level=logger.MESSAGE):
|
|
||||||
logger.log(message, level)
|
|
||||||
return message + u"\n"
|
|
||||||
|
|
||||||
|
|
||||||
def prepareFailedName(release):
|
def prepareFailedName(release):
|
||||||
"""Standardizes release name for failed DB"""
|
"""Standardizes release name for failed DB"""
|
||||||
|
|
||||||
@ -41,6 +36,10 @@ def prepareFailedName(release):
|
|||||||
fixed = fixed.rpartition(".")[0]
|
fixed = fixed.rpartition(".")[0]
|
||||||
|
|
||||||
fixed = re.sub("[\.\-\+\ ]", "_", fixed)
|
fixed = re.sub("[\.\-\+\ ]", "_", fixed)
|
||||||
|
|
||||||
|
if not isinstance(fixed, unicode):
|
||||||
|
fixed = unicode(fixed, 'utf-8')
|
||||||
|
|
||||||
return fixed
|
return fixed
|
||||||
|
|
||||||
|
|
||||||
@ -55,26 +54,26 @@ def logFailed(release):
|
|||||||
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
sql_results = myDB.select("SELECT * FROM history WHERE release=?", [release])
|
||||||
|
|
||||||
if len(sql_results) == 0:
|
if len(sql_results) == 0:
|
||||||
log_str += _log_helper(
|
logger.log(
|
||||||
u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
u"Release not found in snatch history. Recording it as bad with no size and no proivder.", logger.WARNING)
|
||||||
log_str += _log_helper(
|
logger.log(
|
||||||
u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
u"Future releases of the same name from providers that don't return size will be skipped.", logger.WARNING)
|
||||||
elif len(sql_results) > 1:
|
elif len(sql_results) > 1:
|
||||||
log_str += _log_helper(u"Multiple logged snatches found for release", logger.WARNING)
|
logger.log(u"Multiple logged snatches found for release", logger.WARNING)
|
||||||
sizes = len(set(x["size"] for x in sql_results))
|
sizes = len(set(x["size"] for x in sql_results))
|
||||||
providers = len(set(x["provider"] for x in sql_results))
|
providers = len(set(x["provider"] for x in sql_results))
|
||||||
if sizes == 1:
|
if sizes == 1:
|
||||||
log_str += _log_helper(u"However, they're all the same size. Continuing with found size.", logger.WARNING)
|
logger.log(u"However, they're all the same size. Continuing with found size.", logger.WARNING)
|
||||||
size = sql_results[0]["size"]
|
size = sql_results[0]["size"]
|
||||||
else:
|
else:
|
||||||
log_str += _log_helper(
|
logger.log(
|
||||||
u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider",
|
u"They also vary in size. Deleting the logged snatches and recording this release with no size/provider",
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
for result in sql_results:
|
for result in sql_results:
|
||||||
deleteLoggedSnatch(result["release"], result["size"], result["provider"])
|
deleteLoggedSnatch(result["release"], result["size"], result["provider"])
|
||||||
|
|
||||||
if providers == 1:
|
if providers == 1:
|
||||||
log_str += _log_helper(u"They're also from the same provider. Using it as well.")
|
logger.log(u"They're also from the same provider. Using it as well.")
|
||||||
provider = sql_results[0]["provider"]
|
provider = sql_results[0]["provider"]
|
||||||
else:
|
else:
|
||||||
size = sql_results[0]["size"]
|
size = sql_results[0]["size"]
|
||||||
@ -105,6 +104,8 @@ def hasFailed(release, size, provider="%"):
|
|||||||
is found with any provider.
|
is found with any provider.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
release = prepareFailedName(release)
|
||||||
|
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
sql_results = myDB.select(
|
sql_results = myDB.select(
|
||||||
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
|
"SELECT * FROM failed WHERE release=? AND size=? AND provider LIKE ?",
|
||||||
@ -113,74 +114,42 @@ def hasFailed(release, size, provider="%"):
|
|||||||
return (len(sql_results) > 0)
|
return (len(sql_results) > 0)
|
||||||
|
|
||||||
|
|
||||||
def revertEpisode(show_obj, season, episode=None):
|
def revertEpisode(epObj):
|
||||||
"""Restore the episodes of a failed download to their original state"""
|
"""Restore the episodes of a failed download to their original state"""
|
||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
log_str = u""
|
|
||||||
|
|
||||||
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?", [show_obj.indexerid, season])
|
|
||||||
# {episode: result, ...}
|
sql_results = myDB.select("SELECT * FROM history WHERE showid=? AND season=?", [epObj.show.indexerid, epObj.season])
|
||||||
history_eps = dict([(res["episode"], res) for res in sql_results])
|
history_eps = dict([(res["episode"], res) for res in sql_results])
|
||||||
|
|
||||||
if episode:
|
try:
|
||||||
try:
|
logger.log(u"Reverting episode (%s, %s): %s" % (epObj.season, epObj.episode, epObj.name))
|
||||||
ep_obj = show_obj.getEpisode(season, episode)
|
with epObj.lock:
|
||||||
log_str += _log_helper(u"Reverting episode (%s, %s): %s" % (season, episode, ep_obj.name))
|
if epObj.episode in history_eps:
|
||||||
with ep_obj.lock:
|
logger.log(u"Found in history")
|
||||||
if episode in history_eps:
|
epObj.status = history_eps[epObj.episode]['old_status']
|
||||||
log_str += _log_helper(u"Found in history")
|
else:
|
||||||
ep_obj.status = history_eps[episode]['old_status']
|
logger.log(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
||||||
else:
|
logger.WARNING)
|
||||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
epObj.status = WANTED
|
||||||
logger.WARNING)
|
|
||||||
ep_obj.status = WANTED
|
|
||||||
|
|
||||||
ep_obj.saveToDB()
|
epObj.saveToDB()
|
||||||
|
|
||||||
except exceptions.EpisodeNotFoundException, e:
|
except EpisodeNotFoundException, e:
|
||||||
log_str += _log_helper(u"Unable to create episode, please set its status manually: " + exceptions.ex(e),
|
logger.log(u"Unable to create episode, please set its status manually: " + ex(e),
|
||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
else:
|
|
||||||
# Whole season
|
|
||||||
log_str += _log_helper(u"Setting season to wanted: " + str(season))
|
|
||||||
for ep_obj in show_obj.getAllEpisodes(season):
|
|
||||||
log_str += _log_helper(u"Reverting episode (%d, %d): %s" % (season, ep_obj.episode, ep_obj.name))
|
|
||||||
with ep_obj.lock:
|
|
||||||
if ep_obj in history_eps:
|
|
||||||
log_str += _log_helper(u"Found in history")
|
|
||||||
ep_obj.status = history_eps[ep_obj]['old_status']
|
|
||||||
else:
|
|
||||||
log_str += _log_helper(u"WARNING: Episode not found in history. Setting it back to WANTED",
|
|
||||||
logger.WARNING)
|
|
||||||
ep_obj.status = WANTED
|
|
||||||
|
|
||||||
ep_obj.saveToDB()
|
def markFailed(epObj):
|
||||||
|
|
||||||
return log_str
|
|
||||||
|
|
||||||
|
|
||||||
def markFailed(show_obj, season, episode=None):
|
|
||||||
log_str = u""
|
log_str = u""
|
||||||
|
|
||||||
if episode:
|
try:
|
||||||
try:
|
with epObj.lock:
|
||||||
ep_obj = show_obj.getEpisode(season, episode)
|
quality = Quality.splitCompositeStatus(epObj.status)[1]
|
||||||
|
epObj.status = Quality.compositeStatus(FAILED, quality)
|
||||||
|
epObj.saveToDB()
|
||||||
|
|
||||||
with ep_obj.lock:
|
except EpisodeNotFoundException, e:
|
||||||
quality = Quality.splitCompositeStatus(ep_obj.status)[1]
|
logger.log(u"Unable to get episode, please set its status manually: " + ex(e), logger.WARNING)
|
||||||
ep_obj.status = Quality.compositeStatus(FAILED, quality)
|
|
||||||
ep_obj.saveToDB()
|
|
||||||
|
|
||||||
except exceptions.EpisodeNotFoundException, e:
|
|
||||||
log_str += _log_helper(u"Unable to get episode, please set its status manually: " + exceptions.ex(e),
|
|
||||||
logger.WARNING)
|
|
||||||
else:
|
|
||||||
# Whole season
|
|
||||||
for ep_obj in show_obj.getAllEpisodes(season):
|
|
||||||
with ep_obj.lock:
|
|
||||||
quality = Quality.splitCompositeStatus(ep_obj.status)[1]
|
|
||||||
ep_obj.status = Quality.compositeStatus(FAILED, quality)
|
|
||||||
ep_obj.saveToDB()
|
|
||||||
|
|
||||||
return log_str
|
return log_str
|
||||||
|
|
||||||
@ -200,13 +169,11 @@ def logSnatch(searchResult):
|
|||||||
show_obj = searchResult.episodes[0].show
|
show_obj = searchResult.episodes[0].show
|
||||||
|
|
||||||
for episode in searchResult.episodes:
|
for episode in searchResult.episodes:
|
||||||
old_status = show_obj.getEpisode(episode.season, episode.episode).status
|
|
||||||
|
|
||||||
myDB.action(
|
myDB.action(
|
||||||
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
"INSERT INTO history (date, size, release, provider, showid, season, episode, old_status)"
|
||||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
|
[logDate, searchResult.size, release, provider, show_obj.indexerid, episode.season, episode.episode,
|
||||||
old_status])
|
episode.status])
|
||||||
|
|
||||||
|
|
||||||
def deleteLoggedSnatch(release, size, provider):
|
def deleteLoggedSnatch(release, size, provider):
|
||||||
@ -223,14 +190,11 @@ def trimHistory():
|
|||||||
myDB.action("DELETE FROM history WHERE date < " + str(
|
myDB.action("DELETE FROM history WHERE date < " + str(
|
||||||
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
(datetime.datetime.today() - datetime.timedelta(days=30)).strftime(dateFormat)))
|
||||||
|
|
||||||
|
def findRelease(epObj):
|
||||||
def findRelease(show, season, episode):
|
|
||||||
"""
|
"""
|
||||||
Find releases in history by show ID and season.
|
Find releases in history by show ID and season.
|
||||||
Return None for release if multiple found or no release found.
|
Return None for release if multiple found or no release found.
|
||||||
"""
|
"""
|
||||||
if not show: return (None, None, None)
|
|
||||||
if not season: return (None, None, None)
|
|
||||||
|
|
||||||
release = None
|
release = None
|
||||||
provider = None
|
provider = None
|
||||||
@ -238,13 +202,13 @@ def findRelease(show, season, episode):
|
|||||||
myDB = db.DBConnection("failed.db")
|
myDB = db.DBConnection("failed.db")
|
||||||
|
|
||||||
# Clear old snatches for this release if any exist
|
# Clear old snatches for this release if any exist
|
||||||
myDB.action("DELETE FROM history WHERE showid=" + str(show.indexerid) + " AND season=" + str(
|
myDB.action("DELETE FROM history WHERE showid=" + str(epObj.show.indexerid) + " AND season=" + str(
|
||||||
season) + " AND episode=" + str(episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
epObj.season) + " AND episode=" + str(epObj.episode) + " AND date < (SELECT max(date) FROM history WHERE showid=" + str(
|
||||||
show.indexerid) + " AND season=" + str(season) + " AND episode=" + str(episode) + ")")
|
epObj.show.indexerid) + " AND season=" + str(epObj.season) + " AND episode=" + str(epObj.episode) + ")")
|
||||||
|
|
||||||
# Search for release in snatch history
|
# Search for release in snatch history
|
||||||
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
|
results = myDB.select("SELECT release, provider, date FROM history WHERE showid=? AND season=? AND episode=?",
|
||||||
[show.indexerid, season, episode])
|
[epObj.show.indexerid, epObj.season, epObj.episode])
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
release = str(result["release"])
|
release = str(result["release"])
|
||||||
@ -255,9 +219,9 @@ def findRelease(show, season, episode):
|
|||||||
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
|
myDB.action("DELETE FROM history WHERE release=? AND date!=?", [release, date])
|
||||||
|
|
||||||
# Found a previously failed release
|
# Found a previously failed release
|
||||||
logger.log(u"Failed release found for season (%s): (%s)" % (season, result["release"]), logger.DEBUG)
|
logger.log(u"Failed release found for season (%s): (%s)" % (epObj.season, result["release"]), logger.DEBUG)
|
||||||
return (release, provider)
|
return (release, provider)
|
||||||
|
|
||||||
# Release was not found
|
# Release was not found
|
||||||
logger.log(u"No releases found for season (%s) of (%s)" % (season, show.indexerid), logger.DEBUG)
|
logger.log(u"No releases found for season (%s) of (%s)" % (epObj.season, epObj.show.indexerid), logger.DEBUG)
|
||||||
return (release, provider)
|
return (release, provider)
|
@ -212,10 +212,11 @@ def getURL(url, post_data=None, headers=None, params=None, timeout=30, json=Fals
|
|||||||
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
logger.log(u"Connection timed out " + str(e.message) + " while loading URL " + url, logger.WARNING)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if json:
|
if r.ok:
|
||||||
return r.json() if r.ok else None
|
if json:
|
||||||
|
return r.json()
|
||||||
|
|
||||||
return r.content if r.ok else None
|
return r.content
|
||||||
|
|
||||||
|
|
||||||
def _remove_file_failed(file):
|
def _remove_file_failed(file):
|
||||||
@ -284,7 +285,7 @@ def makeDir(path):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def searchDBForShow(regShowName):
|
def searchDBForShow(regShowName, log=False):
|
||||||
showNames = [re.sub('[. -]', ' ', regShowName)]
|
showNames = [re.sub('[. -]', ' ', regShowName)]
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
@ -303,17 +304,20 @@ def searchDBForShow(regShowName):
|
|||||||
# if we didn't get exactly one result then try again with the year stripped off if possible
|
# if we didn't get exactly one result then try again with the year stripped off if possible
|
||||||
match = re.match(yearRegex, showName)
|
match = re.match(yearRegex, showName)
|
||||||
if match and match.group(1):
|
if match and match.group(1):
|
||||||
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
|
if log:
|
||||||
logger.DEBUG)
|
logger.log(u"Unable to match original name but trying to manually strip and specify show year",
|
||||||
|
logger.DEBUG)
|
||||||
sqlResults = myDB.select(
|
sqlResults = myDB.select(
|
||||||
"SELECT * FROM tv_shows WHERE (show_name LIKE ?) AND startyear = ?",
|
"SELECT * FROM tv_shows WHERE (show_name LIKE ?) AND startyear = ?",
|
||||||
[match.group(1) + '%', match.group(3)])
|
[match.group(1) + '%', match.group(3)])
|
||||||
|
|
||||||
if len(sqlResults) == 0:
|
if len(sqlResults) == 0:
|
||||||
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
if log:
|
||||||
|
logger.log(u"Unable to match a record in the DB for " + showName, logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif len(sqlResults) > 1:
|
elif len(sqlResults) > 1:
|
||||||
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
if log:
|
||||||
|
logger.log(u"Multiple results for " + showName + " in the DB, unable to match show name", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
return (int(sqlResults[0]["indexer_id"]), sqlResults[0]["show_name"])
|
||||||
|
@ -85,11 +85,11 @@ def logSubtitle(showid, season, episode, status, subtitleResult):
|
|||||||
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
|
||||||
|
|
||||||
|
|
||||||
def logFailed(indexerid, season, episode, status, release, provider=None):
|
def logFailed(epObj, release, provider=None):
|
||||||
showid = int(indexerid)
|
showid = int(epObj.show.indexerid)
|
||||||
season = int(season)
|
season = int(epObj.season)
|
||||||
epNum = int(episode)
|
epNum = int(epObj.episode)
|
||||||
status, quality = Quality.splitCompositeStatus(status)
|
status, quality = Quality.splitCompositeStatus(epObj.status)
|
||||||
action = Quality.compositeStatus(FAILED, quality)
|
action = Quality.compositeStatus(FAILED, quality)
|
||||||
|
|
||||||
_logHistoryItem(action, showid, season, epNum, quality, release, provider)
|
_logHistoryItem(action, showid, season, epNum, quality, release, provider)
|
||||||
|
@ -260,19 +260,55 @@ class GenericMetadata():
|
|||||||
"""
|
"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def create_show_metadata(self, show_obj, force=False):
|
def create_show_metadata(self, show_obj):
|
||||||
if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or force):
|
if self.show_metadata and show_obj and not self._has_show_metadata(show_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating show metadata for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating show metadata for " + show_obj.name, logger.DEBUG)
|
||||||
return self.write_show_file(show_obj)
|
return self.write_show_file(show_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def create_episode_metadata(self, ep_obj, force=False):
|
def create_episode_metadata(self, ep_obj):
|
||||||
if self.episode_metadata and ep_obj and (not self._has_episode_metadata(ep_obj) or force):
|
if self.episode_metadata and ep_obj and not self._has_episode_metadata(ep_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
|
logger.log(u"Metadata provider " + self.name + " creating episode metadata for " + ep_obj.prettyName(),
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
return self.write_ep_file(ep_obj)
|
return self.write_ep_file(ep_obj)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def update_show_indexer_metadata(self, show_obj):
|
||||||
|
if self.show_metadata and show_obj and self._has_show_metadata(show_obj):
|
||||||
|
logger.log(u"Metadata provider " + self.name + " updating show indexer info metadata file for " + show_obj.name, logger.DEBUG)
|
||||||
|
|
||||||
|
nfo_file_path = self.get_show_file_path(show_obj)
|
||||||
|
try:
|
||||||
|
with ek.ek(open, nfo_file_path, 'r') as xmlFileObj:
|
||||||
|
showXML = etree.ElementTree(file=xmlFileObj)
|
||||||
|
|
||||||
|
|
||||||
|
indexer = showXML.find('indexer')
|
||||||
|
indexerid = showXML.find('id')
|
||||||
|
|
||||||
|
root = showXML.getroot()
|
||||||
|
if indexer:
|
||||||
|
indexer.text = show_obj.indexer
|
||||||
|
else:
|
||||||
|
etree.SubElement(root, "indexer").text = str(show_obj.indexer)
|
||||||
|
|
||||||
|
if indexerid:
|
||||||
|
indexerid.text = show_obj.indexerid
|
||||||
|
else:
|
||||||
|
etree.SubElement(root, "id").text = str(show_obj.indexerid)
|
||||||
|
|
||||||
|
# Make it purdy
|
||||||
|
helpers.indentXML(root)
|
||||||
|
|
||||||
|
showXML.write(nfo_file_path)
|
||||||
|
helpers.chmodAsParent(nfo_file_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except IOError, e:
|
||||||
|
logger.log(
|
||||||
|
u"Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
|
||||||
|
logger.ERROR)
|
||||||
|
|
||||||
def create_fanart(self, show_obj):
|
def create_fanart(self, show_obj):
|
||||||
if self.fanart and show_obj and not self._has_fanart(show_obj):
|
if self.fanart and show_obj and not self._has_fanart(show_obj):
|
||||||
logger.log(u"Metadata provider " + self.name + " creating fanart for " + show_obj.name, logger.DEBUG)
|
logger.log(u"Metadata provider " + self.name + " creating fanart for " + show_obj.name, logger.DEBUG)
|
||||||
|
@ -82,6 +82,9 @@ class PS3Metadata(generic.GenericMetadata):
|
|||||||
def create_show_metadata(self, show_obj, force=False):
|
def create_show_metadata(self, show_obj, force=False):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def update_show_indexer_metadata(self, show_obj):
|
||||||
|
pass
|
||||||
|
|
||||||
def get_show_file_path(self, show_obj):
|
def get_show_file_path(self, show_obj):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -90,6 +90,9 @@ class TIVOMetadata(generic.GenericMetadata):
|
|||||||
def create_show_metadata(self, show_obj, force=False):
|
def create_show_metadata(self, show_obj, force=False):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def update_show_indexer_metadata(self, show_obj):
|
||||||
|
pass
|
||||||
|
|
||||||
def get_show_file_path(self, show_obj):
|
def get_show_file_path(self, show_obj):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -95,6 +95,9 @@ class WDTVMetadata(generic.GenericMetadata):
|
|||||||
def create_show_metadata(self, show_obj, force=False):
|
def create_show_metadata(self, show_obj, force=False):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def update_show_indexer_metadata(self, show_obj):
|
||||||
|
pass
|
||||||
|
|
||||||
def get_show_file_path(self, show_obj):
|
def get_show_file_path(self, show_obj):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -373,11 +373,15 @@ class ParseResult(object):
|
|||||||
return to_return.encode('utf-8')
|
return to_return.encode('utf-8')
|
||||||
|
|
||||||
def convert(self):
|
def convert(self):
|
||||||
if self.air_by_date: return self # scene numbering does not apply to air-by-date
|
if not self.series_name: return self # can't work without a series name
|
||||||
|
if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date
|
||||||
if self.season_number == None: return self # can't work without a season
|
if self.season_number == None: return self # can't work without a season
|
||||||
if len(self.episode_numbers) == 0: return self # need at least one episode
|
if len(self.episode_numbers) == 0: return self # need at least one episode
|
||||||
|
|
||||||
self.show = helpers.get_show_by_name(self.series_name)
|
showResult = helpers.searchDBForShow(self.series_name)
|
||||||
|
if showResult:
|
||||||
|
self.show = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
|
||||||
|
|
||||||
if not self.show:
|
if not self.show:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -52,9 +52,9 @@ class Boxcar2Notifier:
|
|||||||
|
|
||||||
data = urllib.urlencode({
|
data = urllib.urlencode({
|
||||||
'user_credentials': accesstoken,
|
'user_credentials': accesstoken,
|
||||||
'notification[title]': "SickRage : " + title,
|
'notification[title]': "SickRage : " + title + ' ' + msg,
|
||||||
'notification[long_message]': msg,
|
'notification[long_message]': msg,
|
||||||
'notification[sound]': "bird-1"
|
'notification[sound]': "success"
|
||||||
})
|
})
|
||||||
|
|
||||||
# send the request to boxcar2
|
# send the request to boxcar2
|
||||||
|
@ -625,7 +625,6 @@ class PostProcessor(object):
|
|||||||
|
|
||||||
# detect and convert scene numbered releases
|
# detect and convert scene numbered releases
|
||||||
season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode)
|
season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode)
|
||||||
self._log(u"Episode object has been scene converted to " + str(season) + "x" + str(cur_episode), logger.DEBUG)
|
|
||||||
|
|
||||||
# now that we've figured out which episode this file is just load it manually
|
# now that we've figured out which episode this file is just load it manually
|
||||||
try:
|
try:
|
||||||
@ -634,6 +633,9 @@ class PostProcessor(object):
|
|||||||
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
|
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
|
||||||
raise exceptions.PostProcessingFailed()
|
raise exceptions.PostProcessingFailed()
|
||||||
|
|
||||||
|
self._log(u"Episode object has been converted from Scene numbering " + str(curEp.scene_season) + "x" + str(
|
||||||
|
curEp.scene_episode) + " to Indexer numbering" + str(curEp.season) + "x" + str(curEp.episode))
|
||||||
|
|
||||||
# associate all the episodes together under a single root episode
|
# associate all the episodes together under a single root episode
|
||||||
if root_ep == None:
|
if root_ep == None:
|
||||||
root_ep = curEp
|
root_ep = curEp
|
||||||
|
@ -123,7 +123,7 @@ def makeTorrentRssProvider(configString):
|
|||||||
|
|
||||||
|
|
||||||
def getDefaultNewznabProviders():
|
def getDefaultNewznabProviders():
|
||||||
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|http://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0'
|
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040,5060|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0'
|
||||||
|
|
||||||
|
|
||||||
def getProviderModule(name):
|
def getProviderModule(name):
|
||||||
|
@ -210,21 +210,13 @@ class BTNProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
# Search for entire seasons: no need to do special things for air by date shows
|
# Search for entire seasons: no need to do special things for air by date shows
|
||||||
whole_season_params['category'] = 'Season'
|
whole_season_params['category'] = 'Season'
|
||||||
whole_season_params['name'] = 'Season ' + str(ep_obj.scene_season)
|
|
||||||
|
|
||||||
search_params.append(whole_season_params)
|
|
||||||
|
|
||||||
# Search for episodes in the season
|
|
||||||
partial_season_params['category'] = 'Episode'
|
|
||||||
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
# Search for the year of the air by date show
|
# Search for the year of the air by date show
|
||||||
partial_season_params['name'] = str(ep_obj.airdate).split('-')[0]
|
whole_season_params['name'] = str(ep_obj.airdate)[:7]
|
||||||
else:
|
else:
|
||||||
# Search for any result which has Sxx in the name
|
whole_season_params['name'] = 'Season ' + str(ep_obj.scene_season)
|
||||||
partial_season_params['name'] = 'S%02d' % int(ep_obj.scene_season)
|
|
||||||
|
|
||||||
search_params.append(partial_season_params)
|
search_params.append(whole_season_params)
|
||||||
|
|
||||||
return search_params
|
return search_params
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ class DTTProvider(generic.TorrentProvider):
|
|||||||
return quality
|
return quality
|
||||||
|
|
||||||
def getSearchResults(self, show, season, episodes, seasonSearch=False, manualSearch=False):
|
def getSearchResults(self, show, season, episodes, seasonSearch=False, manualSearch=False):
|
||||||
return generic.TorrentProvider.getSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
return generic.TorrentProvider.findSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
||||||
|
|
||||||
def _dtt_show_id(self, show_name):
|
def _dtt_show_id(self, show_name):
|
||||||
return sanitizeSceneName(show_name).replace('.', '-').lower()
|
return sanitizeSceneName(show_name).replace('.', '-').lower()
|
||||||
|
@ -68,7 +68,7 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||||||
logger.WARNING)
|
logger.WARNING)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
results = generic.TorrentProvider.getSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
results = generic.TorrentProvider.findSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@ -78,12 +78,10 @@ class EZRSSProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
|
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
|
||||||
|
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
params['season'] = ep_obj.scene_season
|
params['season'] = str(ep_obj.airdate)[:7]
|
||||||
else:
|
else:
|
||||||
params['season'] = self._get_episode_search_strings(ep_obj)[0]['season']
|
params['season'] = ep_obj.scene_season
|
||||||
|
|
||||||
#params['episode'] = self._get_episode_search_strings(ep_obj)[0]['episode']
|
|
||||||
|
|
||||||
return [params]
|
return [params]
|
||||||
|
|
||||||
|
@ -232,7 +232,8 @@ class GenericProvider:
|
|||||||
for epObj in episodes:
|
for epObj in episodes:
|
||||||
if not epObj.show.air_by_date:
|
if not epObj.show.air_by_date:
|
||||||
if epObj.scene_season == 0 or epObj.scene_episode == 0:
|
if epObj.scene_season == 0 or epObj.scene_episode == 0:
|
||||||
logger.log(u"Incomplete Indexer <-> Scene mapping detected for " + epObj.prettyName() + ", skipping search!")
|
logger.log(
|
||||||
|
u"Incomplete Indexer <-> Scene mapping detected for " + epObj.prettyName() + ", skipping search!")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cacheResult = self.cache.searchCache(epObj, manualSearch)
|
cacheResult = self.cache.searchCache(epObj, manualSearch)
|
||||||
@ -265,7 +266,6 @@ class GenericProvider:
|
|||||||
for ep_obj, items in searchItems.items():
|
for ep_obj, items in searchItems.items():
|
||||||
for item in items:
|
for item in items:
|
||||||
|
|
||||||
|
|
||||||
(title, url) = self._get_title_and_url(item)
|
(title, url) = self._get_title_and_url(item)
|
||||||
|
|
||||||
quality = self.getQuality(item)
|
quality = self.getQuality(item)
|
||||||
@ -280,13 +280,13 @@ class GenericProvider:
|
|||||||
|
|
||||||
if not (self.show.air_by_date or self.show.sports):
|
if not (self.show.air_by_date or self.show.sports):
|
||||||
if not len(parse_result.episode_numbers) and (
|
if not len(parse_result.episode_numbers) and (
|
||||||
parse_result.season_number != None and parse_result.season_number != ep_obj.season) or (
|
parse_result.season_number != None and parse_result.season_number != ep_obj.season) or (
|
||||||
parse_result.season_number == None and ep_obj.season != 1):
|
parse_result.season_number == None and ep_obj.season != 1):
|
||||||
logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
|
logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
|
||||||
ep_obj.season) + ", ignoring", logger.DEBUG)
|
ep_obj.season) + ", ignoring", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif len(parse_result.episode_numbers) and (
|
elif len(parse_result.episode_numbers) and (
|
||||||
parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
|
parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
|
||||||
logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
|
logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
|
||||||
ep_obj.episode) + ", skipping it", logger.DEBUG)
|
ep_obj.episode) + ", skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
@ -301,10 +301,17 @@ class GenericProvider:
|
|||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if (parse_result.air_by_date and parse_result.air_date != ep_obj.airdate) or (
|
||||||
|
parse_result.sports and parse_result.sports_event_date != ep_obj.airdate):
|
||||||
|
logger.log("Episode " + title + " didn't air on " + str(ep_obj.airdate) + ", skipping it",
|
||||||
|
logger.DEBUG)
|
||||||
|
continue
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
sql_results = myDB.select(
|
||||||
[show.indexerid,
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
||||||
parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
|
[show.indexerid,
|
||||||
|
parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
|
||||||
|
|
||||||
if len(sql_results) != 1:
|
if len(sql_results) != 1:
|
||||||
logger.log(
|
logger.log(
|
||||||
@ -324,7 +331,8 @@ class GenericProvider:
|
|||||||
|
|
||||||
if not wantEp:
|
if not wantEp:
|
||||||
logger.log(
|
logger.log(
|
||||||
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
|
u"Ignoring result " + title + " because we don't want an episode that is " +
|
||||||
|
Quality.qualityStrings[
|
||||||
quality], logger.DEBUG)
|
quality], logger.DEBUG)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
@ -154,10 +154,16 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if season:
|
if season:
|
||||||
post_data['tvdb'] = {
|
if show.air_by_date or show.sports:
|
||||||
'id': show.indexerid,
|
post_data['tvdb'] = {
|
||||||
'season': season,
|
'id': show.indexerid,
|
||||||
}
|
'season': str(episode.airdate)[:7],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
post_data['tvdb'] = {
|
||||||
|
'id': show.indexerid,
|
||||||
|
'season': season,
|
||||||
|
}
|
||||||
|
|
||||||
if search_term:
|
if search_term:
|
||||||
post_data['search'] = search_term
|
post_data['search'] = search_term
|
||||||
|
@ -114,15 +114,14 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
search_string['Season'].append(ep_string)
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -94,15 +94,13 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
search_string['Season'].append(ep_string)
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -170,17 +170,18 @@ class KATProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(ep_obj.scene_season) + 'E' + ' category:tv' #1) showName SXX -SXXE
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7] + ' category:tv' #2) showName Season X
|
||||||
|
else:
|
||||||
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) + ' -S%02d' % int(ep_obj.scene_season) + 'E' + ' category:tv' #1) showName SXX -SXXE
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' + ' category:tv' #2) showName Season X
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + ' Season ' + str(ep_obj.airdate)[:7] + ' category:tv' #2) showName Season X
|
||||||
|
else:
|
||||||
|
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' + ' category:tv' #2) showName Season X
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||||
|
@ -94,19 +94,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||||||
cur_params['q'] = helpers.sanitizeSceneName(cur_exception)
|
cur_params['q'] = helpers.sanitizeSceneName(cur_exception)
|
||||||
|
|
||||||
# season
|
# season
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
cur_params['season'] = str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
cur_params['season'] = str(ep_obj.scene_season)
|
cur_params['season'] = str(ep_obj.scene_season)
|
||||||
|
|
||||||
to_return.append(cur_params)
|
to_return.append(cur_params)
|
||||||
|
|
||||||
cur_params = {}
|
|
||||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
cur_params['season'] = self._get_episode_search_strings(ep_obj)[0]['season']
|
|
||||||
|
|
||||||
#cur_params['episode'] = self._get_episode_search_strings(ep_obj)[0]['ep']
|
|
||||||
|
|
||||||
to_return.append(cur_params)
|
|
||||||
|
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||||
|
@ -134,15 +134,13 @@ class NextGenProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
search_string['Season'].append(ep_string)
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -50,18 +50,17 @@ class NyaaProvider(generic.TorrentProvider):
|
|||||||
return 'nyaatorrents.png'
|
return 'nyaatorrents.png'
|
||||||
|
|
||||||
def getQuality(self, item, anime=False):
|
def getQuality(self, item, anime=False):
|
||||||
self.debug()
|
|
||||||
title = item.title
|
title = item.title
|
||||||
quality = Quality.sceneQuality(title)
|
quality = Quality.sceneQuality(title)
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
def getSearchResults(self, show, season, episodes, seasonSearch=False, manualSearch=False):
|
def getSearchResults(self, show, season, episodes, seasonSearch=False, manualSearch=False):
|
||||||
results = generic.TorrentProvider.getSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
results = generic.TorrentProvider.findSearchResults(self, show, season, episodes, seasonSearch, manualSearch)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
names = []
|
names = []
|
||||||
names.extend(show_name_helpers.makeSceneshowSearchStrings(self.show))
|
names.extend(show_name_helpers.makeSceneShowSearchStrings(self.show))
|
||||||
return names
|
return names
|
||||||
|
|
||||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||||
|
@ -76,17 +76,18 @@ class PublicHDProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
|
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(allPossibleShowNames(self.show)):
|
||||||
for show_name in set(allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX -SXXE
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX -SXXE
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + ' Season ' + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) #2) showName Season X
|
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) #2) showName Season X
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
else:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -103,14 +103,13 @@ class SCCProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
@ -142,7 +141,7 @@ class SCCProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
def _isSection(section, text):
|
def _isSection(self, section, text):
|
||||||
title = '<title>.+? \| %s</title>' % section
|
title = '<title>.+? \| %s</title>' % section
|
||||||
if re.search(title, text, re.IGNORECASE):
|
if re.search(title, text, re.IGNORECASE):
|
||||||
return True
|
return True
|
||||||
|
@ -94,14 +94,13 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||||||
|
|
||||||
#If Every episode in Season is a wanted Episode then search for Season first
|
#If Every episode in Season is a wanted Episode then search for Season first
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name +' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name +' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -173,17 +173,19 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(allPossibleShowNames(self.show)) if not (ep_obj.show.air_by_date or ep_obj.show.sports) else []:
|
||||||
for show_name in set(allPossibleShowNames(self.show)) if not (ep_obj.show.air_by_date or ep_obj.show.sports) else []:
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + ' Season ' + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' #2) showName Season X
|
ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' #2) showName Season X
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -121,14 +121,13 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -98,14 +98,13 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||||||
def _get_season_search_strings(self, ep_obj):
|
def _get_season_search_strings(self, ep_obj):
|
||||||
|
|
||||||
search_string = {'Season': []}
|
search_string = {'Season': []}
|
||||||
if not (ep_obj.show.air_by_date or ep_obj.show.sports):
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||||
|
ep_string = show_name + str(ep_obj.airdate)[:7]
|
||||||
|
else:
|
||||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||||
search_string['Season'].append(ep_string)
|
|
||||||
elif ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
||||||
search_string['Season'] = self._get_episode_search_strings(ep_obj)[0]['Season']
|
|
||||||
|
|
||||||
#search_string['Episode'] = self._get_episode_search_strings(ep_obj)[0]['Episode']
|
search_string['Season'].append(ep_string)
|
||||||
|
|
||||||
return [search_string]
|
return [search_string]
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ from sickbeard.exceptions import ex
|
|||||||
|
|
||||||
class Scheduler:
|
class Scheduler:
|
||||||
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True,
|
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True,
|
||||||
threadName="ScheduledThread", silent=False, runOnce=False):
|
threadName="ScheduledThread", silent=False):
|
||||||
|
|
||||||
if runImmediately:
|
if runImmediately:
|
||||||
self.lastRun = datetime.datetime.fromordinal(1)
|
self.lastRun = datetime.datetime.fromordinal(1)
|
||||||
@ -44,7 +44,6 @@ class Scheduler:
|
|||||||
self.initThread()
|
self.initThread()
|
||||||
|
|
||||||
self.abort = False
|
self.abort = False
|
||||||
self.runOnce = runOnce
|
|
||||||
|
|
||||||
def initThread(self):
|
def initThread(self):
|
||||||
if self.thread == None or not self.thread.isAlive():
|
if self.thread == None or not self.thread.isAlive():
|
||||||
@ -76,7 +75,7 @@ class Scheduler:
|
|||||||
logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR)
|
logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR)
|
||||||
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
||||||
|
|
||||||
if self.abort or self.runOnce:
|
if self.abort:
|
||||||
self.abort = False
|
self.abort = False
|
||||||
self.thread = None
|
self.thread = None
|
||||||
return
|
return
|
||||||
|
@ -43,6 +43,7 @@ from sickbeard import failed_history
|
|||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard.providers.generic import GenericProvider, tvcache
|
from sickbeard.providers.generic import GenericProvider, tvcache
|
||||||
|
|
||||||
|
|
||||||
def _downloadResult(result):
|
def _downloadResult(result):
|
||||||
"""
|
"""
|
||||||
Downloads a result to the appropriate black hole folder.
|
Downloads a result to the appropriate black hole folder.
|
||||||
@ -173,6 +174,7 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def filter_release_name(name, filter_words):
|
def filter_release_name(name, filter_words):
|
||||||
"""
|
"""
|
||||||
Filters out results based on filter_words
|
Filters out results based on filter_words
|
||||||
@ -191,6 +193,7 @@ def filter_release_name(name, filter_words):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def pickBestResult(results, show, quality_list=None):
|
def pickBestResult(results, show, quality_list=None):
|
||||||
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
|
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
|
||||||
|
|
||||||
@ -220,6 +223,7 @@ def pickBestResult(results, show, quality_list=None):
|
|||||||
|
|
||||||
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
|
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
|
||||||
bestResult = cur_result
|
bestResult = cur_result
|
||||||
|
|
||||||
elif bestResult.quality == cur_result.quality:
|
elif bestResult.quality == cur_result.quality:
|
||||||
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
|
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
|
||||||
bestResult = cur_result
|
bestResult = cur_result
|
||||||
@ -233,6 +237,7 @@ def pickBestResult(results, show, quality_list=None):
|
|||||||
|
|
||||||
return bestResult
|
return bestResult
|
||||||
|
|
||||||
|
|
||||||
def isFinalResult(result):
|
def isFinalResult(result):
|
||||||
"""
|
"""
|
||||||
Checks if the given result is good enough quality that we can stop searching for other ones.
|
Checks if the given result is good enough quality that we can stop searching for other ones.
|
||||||
@ -253,7 +258,7 @@ def isFinalResult(result):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
||||||
elif any_qualities and result.quality == max(any_qualities):
|
elif any_qualities and result.quality in any_qualities:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
elif best_qualities and result.quality == max(best_qualities):
|
elif best_qualities and result.quality == max(best_qualities):
|
||||||
@ -289,6 +294,7 @@ def isFirstBestMatch(result):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def filterSearchResults(show, results):
|
def filterSearchResults(show, results):
|
||||||
foundResults = {}
|
foundResults = {}
|
||||||
|
|
||||||
@ -306,20 +312,19 @@ def filterSearchResults(show, results):
|
|||||||
|
|
||||||
return foundResults
|
return foundResults
|
||||||
|
|
||||||
|
|
||||||
def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manualSearch=False):
|
def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manualSearch=False):
|
||||||
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
|
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
|
||||||
|
|
||||||
finalResults = []
|
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
|
|
||||||
if not len(providers):
|
if not len(providers):
|
||||||
logger.log(u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
|
logger.log(u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
return []
|
return queueItem
|
||||||
|
|
||||||
for provider in providers:
|
for providerNum, provider in enumerate(providers):
|
||||||
foundResults = {provider.name:{}}
|
foundResults = {provider.name: {}}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
curResults = provider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
|
curResults = provider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
|
||||||
@ -343,14 +348,16 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
|||||||
# pick the best season NZB
|
# pick the best season NZB
|
||||||
bestSeasonNZB = None
|
bestSeasonNZB = None
|
||||||
if SEASON_RESULT in foundResults[provider.name]:
|
if SEASON_RESULT in foundResults[provider.name]:
|
||||||
bestSeasonNZB = pickBestResult(foundResults[provider.name][SEASON_RESULT], show, anyQualities + bestQualities)
|
bestSeasonNZB = pickBestResult(foundResults[provider.name][SEASON_RESULT], show,
|
||||||
|
anyQualities + bestQualities)
|
||||||
|
|
||||||
highest_quality_overall = 0
|
highest_quality_overall = 0
|
||||||
for cur_episode in foundResults[provider.name]:
|
for cur_episode in foundResults[provider.name]:
|
||||||
for cur_result in foundResults[provider.name][cur_episode]:
|
for cur_result in foundResults[provider.name][cur_episode]:
|
||||||
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
||||||
highest_quality_overall = cur_result.quality
|
highest_quality_overall = cur_result.quality
|
||||||
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
|
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
# see if every episode is wanted
|
# see if every episode is wanted
|
||||||
if bestSeasonNZB:
|
if bestSeasonNZB:
|
||||||
@ -378,10 +385,12 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
|||||||
|
|
||||||
# if we need every ep in the season check if single episode releases should be preferred over season releases (missing single episode releases will be picked individually from season release)
|
# if we need every ep in the season check if single episode releases should be preferred over season releases (missing single episode releases will be picked individually from season release)
|
||||||
preferSingleEpisodesOverSeasonReleases = sickbeard.PREFER_EPISODE_RELEASES
|
preferSingleEpisodesOverSeasonReleases = sickbeard.PREFER_EPISODE_RELEASES
|
||||||
logger.log(u"Prefer single episodes over season releases: "+str(preferSingleEpisodesOverSeasonReleases), logger.DEBUG)
|
logger.log(u"Prefer single episodes over season releases: " + str(preferSingleEpisodesOverSeasonReleases),
|
||||||
|
logger.DEBUG)
|
||||||
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
||||||
if allWanted and bestSeasonNZB.quality == highest_quality_overall and not preferSingleEpisodesOverSeasonReleases:
|
if allWanted and bestSeasonNZB.quality == highest_quality_overall and not preferSingleEpisodesOverSeasonReleases:
|
||||||
logger.log(u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name)
|
logger.log(
|
||||||
|
u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name)
|
||||||
epObjs = []
|
epObjs = []
|
||||||
for curEpNum in allEps:
|
for curEpNum in allEps:
|
||||||
epObjs.append(show.getEpisode(season, curEpNum))
|
epObjs.append(show.getEpisode(season, curEpNum))
|
||||||
@ -500,13 +509,13 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
|||||||
for epObj in multiResult.episodes:
|
for epObj in multiResult.episodes:
|
||||||
epNum = epObj.episode
|
epNum = epObj.episode
|
||||||
if epNum in foundResults[provider.name]:
|
if epNum in foundResults[provider.name]:
|
||||||
logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
logger.log(
|
||||||
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
||||||
|
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
||||||
del foundResults[provider.name][epNum]
|
del foundResults[provider.name][epNum]
|
||||||
|
|
||||||
finalResults += set(multiResults.values())
|
|
||||||
|
|
||||||
# of all the single ep results narrow it down to the best one for each episode
|
# of all the single ep results narrow it down to the best one for each episode
|
||||||
|
queueItem.results += set(multiResults.values())
|
||||||
for curEp in foundResults[provider.name]:
|
for curEp in foundResults[provider.name]:
|
||||||
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
||||||
continue
|
continue
|
||||||
@ -514,30 +523,34 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
|||||||
if len(foundResults[provider.name][curEp]) == 0:
|
if len(foundResults[provider.name][curEp]) == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = pickBestResult(foundResults[provider.name][curEp], show)
|
bestResult = pickBestResult(foundResults[provider.name][curEp], show)
|
||||||
finalResults.append(result)
|
|
||||||
|
|
||||||
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
# if all results were rejected move on to the next episode
|
||||||
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
if not bestResult:
|
||||||
|
continue
|
||||||
|
|
||||||
# if there is a redownload that's higher than this then we definitely need to keep looking
|
# add result if its not a duplicate and
|
||||||
if best_qualities and result.quality == max(best_qualities):
|
if isFinalResult(bestResult):
|
||||||
logger.log(u"Found a highest quality archive match to snatch [" + result.name + "]", logger.DEBUG)
|
found = False
|
||||||
queueItem.results = [result]
|
for i, result in enumerate(queueItem.results):
|
||||||
return queueItem
|
for bestResultEp in bestResult.episodes:
|
||||||
|
if bestResultEp in result.episodes:
|
||||||
|
if result.quality < bestResult.quality:
|
||||||
|
queueItem.results.pop(i)
|
||||||
|
else:
|
||||||
|
found = True
|
||||||
|
if not found:
|
||||||
|
queueItem.results += [bestResult]
|
||||||
|
|
||||||
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
# check that we got all the episodes we wanted first before doing a match and snatch
|
||||||
elif any_qualities and result.quality in any_qualities:
|
wantedEpCount = 0
|
||||||
logger.log(u"Found a initial quality match to snatch [" + result.name + "]", logger.DEBUG)
|
for wantedEp in episodes:
|
||||||
queueItem.results = [result]
|
for result in queueItem.results:
|
||||||
return queueItem
|
if wantedEp in result.episodes:
|
||||||
|
wantedEpCount += 1
|
||||||
|
|
||||||
# remove duplicates and insures snatch of highest quality from results
|
# make sure we search every provider for results unless we found everything we wanted
|
||||||
for i1, result1 in enumerate(finalResults):
|
if providerNum != len(providers) and wantedEpCount != len(episodes):
|
||||||
for i2, result2 in enumerate(finalResults):
|
continue
|
||||||
if result2.provider.show == show and result2.episodes.sort() == episodes.sort() and len(finalResults) > 1:
|
|
||||||
if result1.quality >= result2.quality:
|
|
||||||
finalResults.pop(i2)
|
|
||||||
|
|
||||||
queueItem.results = finalResults
|
|
||||||
return queueItem
|
return queueItem
|
||||||
|
@ -90,7 +90,6 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||||||
status = search.snatchEpisode(result)
|
status = search.snatchEpisode(result)
|
||||||
item.success = status
|
item.success = status
|
||||||
generic_queue.QueueItem.finish(item)
|
generic_queue.QueueItem.finish(item)
|
||||||
return status
|
|
||||||
|
|
||||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, ep_obj):
|
def __init__(self, ep_obj):
|
||||||
@ -100,6 +99,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||||||
self.success = None
|
self.success = None
|
||||||
self.show = ep_obj.show
|
self.show = ep_obj.show
|
||||||
self.ep_obj = ep_obj
|
self.ep_obj = ep_obj
|
||||||
|
self.results = []
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
@ -109,25 +109,18 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||||||
searchResult = search.searchProviders(self, self.show, self.ep_obj.season, [self.ep_obj],False,True)
|
searchResult = search.searchProviders(self, self.show, self.ep_obj.season, [self.ep_obj],False,True)
|
||||||
|
|
||||||
if searchResult:
|
if searchResult:
|
||||||
self.success = SearchQueue().snatch_item(searchResult)
|
SearchQueue().snatch_item(searchResult)
|
||||||
|
else:
|
||||||
|
ui.notifications.message('No downloads were found',
|
||||||
|
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
|
||||||
|
|
||||||
|
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not self.success:
|
|
||||||
ui.notifications.message('No downloads were found',
|
|
||||||
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
|
|
||||||
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
|
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
# don't let this linger if something goes wrong
|
|
||||||
if self.success == None:
|
|
||||||
self.success = False
|
|
||||||
generic_queue.QueueItem.finish(self)
|
|
||||||
|
|
||||||
|
|
||||||
class BacklogQueueItem(generic_queue.QueueItem):
|
class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||||
@ -137,6 +130,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||||||
self.show = show
|
self.show = show
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
self.wantedEpisodes = []
|
self.wantedEpisodes = []
|
||||||
|
self.results = []
|
||||||
|
|
||||||
self._changeMissingEpisodes()
|
self._changeMissingEpisodes()
|
||||||
|
|
||||||
@ -174,21 +168,18 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||||||
if len(seasonEps) == len(self.wantedEpisodes):
|
if len(seasonEps) == len(self.wantedEpisodes):
|
||||||
seasonSearch = True
|
seasonSearch = True
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x]
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.log("Beginning backlog search for episodes from [" + self.show.name + "] - Season[" + str(self.segment) + "]")
|
logger.log("Beginning backlog search for episodes from [" + self.show.name + "] - Season[" + str(self.segment) + "]")
|
||||||
searchResult = search.searchProviders(self, self.show, self.segment, self.wantedEpisodes, seasonSearch, False)
|
searchResult = search.searchProviders(self, self.show, self.segment, self.wantedEpisodes, seasonSearch, False)
|
||||||
|
|
||||||
if searchResult:
|
if searchResult:
|
||||||
self.success = SearchQueue().snatch_item(searchResult)
|
SearchQueue().snatch_item(searchResult)
|
||||||
|
else:
|
||||||
|
logger.log(u"No needed episodes found during backlog search")
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not self.success:
|
|
||||||
logger.log(u"No needed episodes found during backlog search")
|
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
def _need_any_episodes(self, statusResults, bestQualities):
|
def _need_any_episodes(self, statusResults, bestQualities):
|
||||||
@ -253,32 +244,34 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||||||
self.show = show
|
self.show = show
|
||||||
self.episodes = episodes
|
self.episodes = episodes
|
||||||
self.success = None
|
self.success = None
|
||||||
|
self.results = []
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
|
|
||||||
|
failed_episodes = []
|
||||||
for i, epObj in enumerate(self.episodes):
|
for i, epObj in enumerate(self.episodes):
|
||||||
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
|
(release, provider) = failed_history.findRelease(epObj)
|
||||||
if release:
|
if release:
|
||||||
logger.log(u"Marking release as bad: " + release)
|
logger.log(u"Marking release as bad: " + release)
|
||||||
failed_history.markFailed(self.show, epObj.season, epObj.episode)
|
failed_history.markFailed(epObj)
|
||||||
failed_history.logFailed(release)
|
failed_history.logFailed(release)
|
||||||
history.logFailed(self.show.indexerid, epObj.season, epObj.episode, epObj.status, release, provider)
|
history.logFailed(epObj, release, provider)
|
||||||
|
failed_history.revertEpisode(epObj)
|
||||||
|
failed_episodes.append(epObj)
|
||||||
|
|
||||||
failed_history.revertEpisode(self.show, epObj.season, epObj.episode)
|
if len(failed_episodes):
|
||||||
|
try:
|
||||||
|
logger.log(
|
||||||
|
"Beginning failed download search for episodes from Season [" + str(self.episodes[0].season) + "]")
|
||||||
|
|
||||||
try:
|
searchResult = search.searchProviders(self, self.show, failed_episodes[0].season, failed_episodes, False, True)
|
||||||
logger.log(
|
|
||||||
"Beginning failed download search for episodes from Season [" + str(self.episodes[0].season) + "]")
|
|
||||||
|
|
||||||
searchResult = search.searchProviders(self, self.show, self.episodes[0].season, self.episodes, False, True)
|
if searchResult:
|
||||||
if searchResult:
|
SearchQueue().snatch_item(searchResult)
|
||||||
self.success = SearchQueue().snatch_item(searchResult)
|
else:
|
||||||
|
logger.log(u"No episodes found to retry for failed downloads return from providers!")
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not self.success:
|
|
||||||
logger.log(u"No needed episodes found on the RSS feeds")
|
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
@ -119,7 +119,10 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
|||||||
[show.indexerid])
|
[show.indexerid])
|
||||||
numseasons = int(numseasonsSQlResult[0][0])
|
numseasons = int(numseasonsSQlResult[0][0])
|
||||||
|
|
||||||
seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
|
if show.air_by_date or show.sports:
|
||||||
|
seasonStrings = [str(ep_obj.airdate)[:7]]
|
||||||
|
else:
|
||||||
|
seasonStrings = ["S%02d" % int(ep_obj.scene_season)]
|
||||||
|
|
||||||
showNames = set(makeSceneShowSearchStrings(show))
|
showNames = set(makeSceneShowSearchStrings(show))
|
||||||
|
|
||||||
|
@ -144,9 +144,9 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||||||
return queueItemObj
|
return queueItemObj
|
||||||
|
|
||||||
def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
|
def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
|
||||||
subtitles=None, lang="en", refresh=False):
|
subtitles=None, lang="en"):
|
||||||
queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
|
queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
|
||||||
subtitles, refresh)
|
subtitles)
|
||||||
|
|
||||||
self.add_item(queueItemObj)
|
self.add_item(queueItemObj)
|
||||||
|
|
||||||
@ -203,8 +203,7 @@ class ShowQueueItem(generic_queue.QueueItem):
|
|||||||
|
|
||||||
|
|
||||||
class QueueItemAdd(ShowQueueItem):
|
class QueueItemAdd(ShowQueueItem):
|
||||||
def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles,
|
def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles):
|
||||||
refresh):
|
|
||||||
|
|
||||||
self.indexer = indexer
|
self.indexer = indexer
|
||||||
self.indexer_id = indexer_id
|
self.indexer_id = indexer_id
|
||||||
@ -214,7 +213,6 @@ class QueueItemAdd(ShowQueueItem):
|
|||||||
self.flatten_folders = flatten_folders
|
self.flatten_folders = flatten_folders
|
||||||
self.lang = lang
|
self.lang = lang
|
||||||
self.subtitles = subtitles
|
self.subtitles = subtitles
|
||||||
self.refresh = refresh
|
|
||||||
|
|
||||||
self.show = None
|
self.show = None
|
||||||
|
|
||||||
@ -380,7 +378,8 @@ class QueueItemAdd(ShowQueueItem):
|
|||||||
logger.log(u"Launching backlog for this show since its episodes are WANTED")
|
logger.log(u"Launching backlog for this show since its episodes are WANTED")
|
||||||
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable
|
sickbeard.backlogSearchScheduler.action.searchBacklog([self.show]) #@UndefinedVariable
|
||||||
|
|
||||||
self.show.writeMetadata(force=self.refresh)
|
self.show.writeMetadata()
|
||||||
|
self.show.updateMetadata()
|
||||||
self.show.populateCache()
|
self.show.populateCache()
|
||||||
|
|
||||||
self.show.flushEpisodes()
|
self.show.flushEpisodes()
|
||||||
@ -410,7 +409,8 @@ class QueueItemRefresh(ShowQueueItem):
|
|||||||
logger.log(u"Performing refresh on " + self.show.name)
|
logger.log(u"Performing refresh on " + self.show.name)
|
||||||
|
|
||||||
self.show.refreshDir()
|
self.show.refreshDir()
|
||||||
self.show.writeMetadata(force=True)
|
self.show.writeMetadata()
|
||||||
|
self.show.updateMetadata()
|
||||||
self.show.populateCache()
|
self.show.populateCache()
|
||||||
|
|
||||||
self.inProgress = False
|
self.inProgress = False
|
||||||
|
@ -205,7 +205,9 @@ class TVShow(object):
|
|||||||
if ep != None:
|
if ep != None:
|
||||||
self.episodes[season][episode] = ep
|
self.episodes[season][episode] = ep
|
||||||
|
|
||||||
return self.episodes[season][episode]
|
epObj = self.episodes[season][episode]
|
||||||
|
epObj.convertToSceneNumbering()
|
||||||
|
return epObj
|
||||||
|
|
||||||
def should_update(self, update_date=datetime.date.today()):
|
def should_update(self, update_date=datetime.date.today()):
|
||||||
|
|
||||||
@ -251,7 +253,7 @@ class TVShow(object):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def writeShowNFO(self, force=False):
|
def writeShowNFO(self):
|
||||||
|
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
@ -261,11 +263,11 @@ class TVShow(object):
|
|||||||
|
|
||||||
logger.log(str(self.indexerid) + u": Writing NFOs for show")
|
logger.log(str(self.indexerid) + u": Writing NFOs for show")
|
||||||
for cur_provider in sickbeard.metadata_provider_dict.values():
|
for cur_provider in sickbeard.metadata_provider_dict.values():
|
||||||
result = cur_provider.create_show_metadata(self, force) or result
|
result = cur_provider.create_show_metadata(self) or result
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def writeMetadata(self, show_only=False, force=False):
|
def writeMetadata(self, show_only=False):
|
||||||
|
|
||||||
if not ek.ek(os.path.isdir, self._location):
|
if not ek.ek(os.path.isdir, self._location):
|
||||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||||
@ -273,12 +275,12 @@ class TVShow(object):
|
|||||||
|
|
||||||
self.getImages()
|
self.getImages()
|
||||||
|
|
||||||
self.writeShowNFO(force)
|
self.writeShowNFO()
|
||||||
|
|
||||||
if not show_only:
|
if not show_only:
|
||||||
self.writeEpisodeNFOs(force)
|
self.writeEpisodeNFOs()
|
||||||
|
|
||||||
def writeEpisodeNFOs(self, force=False):
|
def writeEpisodeNFOs(self):
|
||||||
|
|
||||||
if not ek.ek(os.path.isdir, self._location):
|
if not ek.ek(os.path.isdir, self._location):
|
||||||
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||||
@ -293,9 +295,31 @@ class TVShow(object):
|
|||||||
logger.log(str(self.indexerid) + u": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(
|
logger.log(str(self.indexerid) + u": Retrieving/creating episode " + str(epResult["season"]) + "x" + str(
|
||||||
epResult["episode"]), logger.DEBUG)
|
epResult["episode"]), logger.DEBUG)
|
||||||
curEp = self.getEpisode(epResult["season"], epResult["episode"])
|
curEp = self.getEpisode(epResult["season"], epResult["episode"])
|
||||||
curEp.createMetaFiles(force)
|
curEp.createMetaFiles()
|
||||||
|
|
||||||
|
|
||||||
|
def updateMetadata(self):
|
||||||
|
|
||||||
|
if not ek.ek(os.path.isdir, self._location):
|
||||||
|
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.updateShowNFO()
|
||||||
|
|
||||||
|
def updateShowNFO(self):
|
||||||
|
|
||||||
|
result = False
|
||||||
|
|
||||||
|
if not ek.ek(os.path.isdir, self._location):
|
||||||
|
logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.log(str(self.indexerid) + u": Updating NFOs for show with new indexer info")
|
||||||
|
for cur_provider in sickbeard.metadata_provider_dict.values():
|
||||||
|
result = cur_provider.update_show_indexer_metadata(self) or result
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
# find all media files in the show folder and create episodes for as many as possible
|
# find all media files in the show folder and create episodes for as many as possible
|
||||||
def loadEpisodesFromDir(self):
|
def loadEpisodesFromDir(self):
|
||||||
|
|
||||||
@ -1158,8 +1182,6 @@ class TVEpisode(object):
|
|||||||
|
|
||||||
self.specifyEpisode(self.season, self.episode)
|
self.specifyEpisode(self.season, self.episode)
|
||||||
|
|
||||||
self.convertToSceneNumbering()
|
|
||||||
|
|
||||||
self.relatedEps = []
|
self.relatedEps = []
|
||||||
|
|
||||||
self.checkForMetaFiles()
|
self.checkForMetaFiles()
|
||||||
@ -1610,28 +1632,28 @@ class TVEpisode(object):
|
|||||||
toReturn += "status: " + str(self.status) + "\n"
|
toReturn += "status: " + str(self.status) + "\n"
|
||||||
return toReturn
|
return toReturn
|
||||||
|
|
||||||
def createMetaFiles(self, force=False):
|
def createMetaFiles(self):
|
||||||
|
|
||||||
if not ek.ek(os.path.isdir, self.show._location):
|
if not ek.ek(os.path.isdir, self.show._location):
|
||||||
logger.log(str(self.show.indexerid) + u": The show dir is missing, not bothering to try to create metadata")
|
logger.log(str(self.show.indexerid) + u": The show dir is missing, not bothering to try to create metadata")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.createNFO(force)
|
self.createNFO()
|
||||||
self.createThumbnail()
|
self.createThumbnail()
|
||||||
|
|
||||||
if self.checkForMetaFiles():
|
if self.checkForMetaFiles():
|
||||||
self.saveToDB()
|
self.saveToDB()
|
||||||
|
|
||||||
def createNFO(self, force=False):
|
def createNFO(self):
|
||||||
|
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
for cur_provider in sickbeard.metadata_provider_dict.values():
|
for cur_provider in sickbeard.metadata_provider_dict.values():
|
||||||
result = cur_provider.create_episode_metadata(self, force) or result
|
result = cur_provider.create_episode_metadata(self) or result
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def createThumbnail(self, force=False):
|
def createThumbnail(self):
|
||||||
|
|
||||||
result = False
|
result = False
|
||||||
|
|
||||||
@ -1904,6 +1926,9 @@ class TVEpisode(object):
|
|||||||
result_name = result_name.replace('%rg', 'sickbeard')
|
result_name = result_name.replace('%rg', 'sickbeard')
|
||||||
logger.log(u"Episode has no release name, replacing it with a generic one: " + result_name, logger.DEBUG)
|
logger.log(u"Episode has no release name, replacing it with a generic one: " + result_name, logger.DEBUG)
|
||||||
|
|
||||||
|
if not replace_map['%RT']:
|
||||||
|
result_name = re.sub('([ _.-]*)%RT([ _.-]*)', r'\2', result_name)
|
||||||
|
|
||||||
# split off ep name part only
|
# split off ep name part only
|
||||||
name_groups = re.split(r'[\\/]', result_name)
|
name_groups = re.split(r'[\\/]', result_name)
|
||||||
|
|
||||||
|
@ -232,7 +232,7 @@ class TVCache():
|
|||||||
# if we don't have complete info then parse the filename to get it
|
# if we don't have complete info then parse the filename to get it
|
||||||
try:
|
try:
|
||||||
myParser = NameParser()
|
myParser = NameParser()
|
||||||
parse_result = myParser.parse(name)
|
parse_result = myParser.parse(name).convert()
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
@ -255,13 +255,6 @@ class TVCache():
|
|||||||
if showResult:
|
if showResult:
|
||||||
indexerid = int(showResult[0])
|
indexerid = int(showResult[0])
|
||||||
|
|
||||||
# if not indexerid:
|
|
||||||
# for curShow in sickbeard.showList:
|
|
||||||
# if curShow.name == parse_result.series_name:
|
|
||||||
# if show_name_helpers.isGoodResult(name, curShow, False):
|
|
||||||
# indexerid = int(curShow.indexerid)
|
|
||||||
# break
|
|
||||||
|
|
||||||
showObj = None
|
showObj = None
|
||||||
if indexerid:
|
if indexerid:
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
|
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
|
||||||
@ -327,14 +320,14 @@ class TVCache():
|
|||||||
def findNeededEpisodes(self, epObj=None, manualSearch=False):
|
def findNeededEpisodes(self, epObj=None, manualSearch=False):
|
||||||
neededEps = {}
|
neededEps = {}
|
||||||
|
|
||||||
myDB = self._getDB()
|
cacheDB = self._getDB()
|
||||||
|
|
||||||
if not epObj:
|
if not epObj:
|
||||||
sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]")
|
sqlResults = cacheDB.select("SELECT * FROM [" + self.providerID + "]")
|
||||||
else:
|
else:
|
||||||
sqlResults = myDB.select(
|
sqlResults = cacheDB.select(
|
||||||
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
|
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
|
||||||
[epObj.show.indexerid, epObj.scene_season, "%|" + str(epObj.scene_episode) + "|%"])
|
[epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"])
|
||||||
|
|
||||||
# for each cache entry
|
# for each cache entry
|
||||||
for curResult in sqlResults:
|
for curResult in sqlResults:
|
||||||
@ -386,9 +379,9 @@ class TVCache():
|
|||||||
|
|
||||||
# add it to the list
|
# add it to the list
|
||||||
if epObj not in neededEps:
|
if epObj not in neededEps:
|
||||||
neededEps[epObj] = [result]
|
neededEps[epObj.episode] = [result]
|
||||||
else:
|
else:
|
||||||
neededEps[epObj].append(result)
|
neededEps[epObj.episode].append(result)
|
||||||
|
|
||||||
return neededEps
|
return neededEps
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ import tarfile
|
|||||||
import stat
|
import stat
|
||||||
import traceback
|
import traceback
|
||||||
import gh_api as github
|
import gh_api as github
|
||||||
|
import threading
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
@ -57,23 +58,27 @@ class CheckVersion():
|
|||||||
self.updater = None
|
self.updater = None
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
updated = None
|
||||||
if self.check_for_new_version():
|
if self.check_for_new_version():
|
||||||
if sickbeard.AUTO_UPDATE:
|
if sickbeard.AUTO_UPDATE:
|
||||||
logger.log(u"New update found for SickRage, starting auto-updater ...")
|
logger.log(u"New update found for SickRage, starting auto-updater ...")
|
||||||
updated = sickbeard.versionCheckScheduler.action.update()
|
updated = sickbeard.versionCheckScheduler.action.update()
|
||||||
if updated:
|
if updated:
|
||||||
logger.log(u"Update was successfull, restarting SickRage ...")
|
logger.log(u"Update was successfull, restarting SickRage ...")
|
||||||
sickbeard.restart(False)
|
|
||||||
|
|
||||||
# refresh scene exceptions too
|
# do a soft restart
|
||||||
scene_exceptions.retrieve_exceptions()
|
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
|
||||||
|
|
||||||
# refresh network timezones
|
if not updated:
|
||||||
network_timezones.update_network_dict()
|
# refresh scene exceptions too
|
||||||
|
scene_exceptions.retrieve_exceptions()
|
||||||
|
|
||||||
# sure, why not?
|
# refresh network timezones
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
network_timezones.update_network_dict()
|
||||||
failed_history.trimHistory()
|
|
||||||
|
# sure, why not?
|
||||||
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
|
failed_history.trimHistory()
|
||||||
|
|
||||||
def find_install_type(self):
|
def find_install_type(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1565,16 +1565,16 @@ class ConfigProviders:
|
|||||||
else:
|
else:
|
||||||
logger.log(u"don't know what " + curProvider + " is, skipping")
|
logger.log(u"don't know what " + curProvider + " is, skipping")
|
||||||
|
|
||||||
sickbeard.EZRSS_RATIO = config.to_int(ezrss_ratio)
|
sickbeard.EZRSS_RATIO = ezrss_ratio
|
||||||
|
|
||||||
sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip()
|
sickbeard.TVTORRENTS_DIGEST = tvtorrents_digest.strip()
|
||||||
sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip()
|
sickbeard.TVTORRENTS_HASH = tvtorrents_hash.strip()
|
||||||
sickbeard.TVTORRENTS_RATIO = config.to_int(tvtorrents_ratio)
|
sickbeard.TVTORRENTS_RATIO = config.to_int(tvtorrents_ratio)
|
||||||
|
|
||||||
sickbeard.BTN_API_KEY = btn_api_key.strip()
|
sickbeard.BTN_API_KEY = btn_api_key.strip()
|
||||||
sickbeard.BTN_RATIO = config.to_int(btn_ratio)
|
sickbeard.BTN_RATIO = btn_ratio
|
||||||
|
|
||||||
sickbeard.THEPIRATEBAY_RATIO = config.to_int(thepiratebay_ratio)
|
sickbeard.THEPIRATEBAY_RATIO = thepiratebay_ratio
|
||||||
sickbeard.THEPIRATEBAY_TRUSTED = config.checkbox_to_value(thepiratebay_trusted)
|
sickbeard.THEPIRATEBAY_TRUSTED = config.checkbox_to_value(thepiratebay_trusted)
|
||||||
|
|
||||||
thepiratebay_proxy = config.checkbox_to_value(thepiratebay_proxy)
|
thepiratebay_proxy = config.checkbox_to_value(thepiratebay_proxy)
|
||||||
@ -1587,48 +1587,48 @@ class ConfigProviders:
|
|||||||
|
|
||||||
sickbeard.TORRENTLEECH_USERNAME = torrentleech_username
|
sickbeard.TORRENTLEECH_USERNAME = torrentleech_username
|
||||||
sickbeard.TORRENTLEECH_PASSWORD = torrentleech_password
|
sickbeard.TORRENTLEECH_PASSWORD = torrentleech_password
|
||||||
sickbeard.TORRENTLEECH_RATIO = config.to_int(torrentleech_ratio)
|
sickbeard.TORRENTLEECH_RATIO = torrentleech_ratio
|
||||||
|
|
||||||
sickbeard.IPTORRENTS_USERNAME = iptorrents_username.strip()
|
sickbeard.IPTORRENTS_USERNAME = iptorrents_username.strip()
|
||||||
sickbeard.IPTORRENTS_PASSWORD = iptorrents_password.strip()
|
sickbeard.IPTORRENTS_PASSWORD = iptorrents_password.strip()
|
||||||
sickbeard.IPTORRENTS_RATIO = config.to_int(iptorrents_ratio)
|
sickbeard.IPTORRENTS_RATIO = iptorrents_ratio
|
||||||
|
|
||||||
sickbeard.IPTORRENTS_FREELEECH = config.checkbox_to_value(iptorrents_freeleech)
|
sickbeard.IPTORRENTS_FREELEECH = config.checkbox_to_value(iptorrents_freeleech)
|
||||||
|
|
||||||
sickbeard.KAT_TRUSTED = config.checkbox_to_value(kat_trusted)
|
sickbeard.KAT_TRUSTED = config.checkbox_to_value(kat_trusted)
|
||||||
sickbeard.KAT_RATIO = config.to_int(kat_ratio)
|
sickbeard.KAT_RATIO = kat_ratio
|
||||||
sickbeard.KAT_VERIFIED = config.checkbox_to_value(kat_verified)
|
sickbeard.KAT_VERIFIED = config.checkbox_to_value(kat_verified)
|
||||||
|
|
||||||
sickbeard.PUBLICHD_RATIO = publichd_ratio
|
sickbeard.PUBLICHD_RATIO = publichd_ratio
|
||||||
|
|
||||||
sickbeard.TORRENTDAY_USERNAME = torrentday_username.strip()
|
sickbeard.TORRENTDAY_USERNAME = torrentday_username.strip()
|
||||||
sickbeard.TORRENTDAY_PASSWORD = torrentday_password.strip()
|
sickbeard.TORRENTDAY_PASSWORD = torrentday_password.strip()
|
||||||
sickbeard.TORRENTDAY_RATIO = config.to_int(torrentday_ratio)
|
sickbeard.TORRENTDAY_RATIO = torrentday_ratio
|
||||||
|
|
||||||
sickbeard.TORRENTDAY_FREELEECH = config.checkbox_to_value(torrentday_freeleech)
|
sickbeard.TORRENTDAY_FREELEECH = config.checkbox_to_value(torrentday_freeleech)
|
||||||
|
|
||||||
sickbeard.SCC_USERNAME = scc_username.strip()
|
sickbeard.SCC_USERNAME = scc_username.strip()
|
||||||
sickbeard.SCC_PASSWORD = scc_password.strip()
|
sickbeard.SCC_PASSWORD = scc_password.strip()
|
||||||
sickbeard.SCC_RATIO = config.to_int(scc_ratio)
|
sickbeard.SCC_RATIO = scc_ratio
|
||||||
|
|
||||||
sickbeard.HDTORRENTS_USERNAME = hdtorrents_username.strip()
|
sickbeard.HDTORRENTS_USERNAME = hdtorrents_username.strip()
|
||||||
sickbeard.HDTORRENTS_PASSWORD = hdtorrents_password.strip()
|
sickbeard.HDTORRENTS_PASSWORD = hdtorrents_password.strip()
|
||||||
sickbeard.HDTORRENTS_RATIO = config.to_int(hdtorrents_ratio)
|
sickbeard.HDTORRENTS_RATIO = hdtorrents_ratio
|
||||||
|
|
||||||
sickbeard.HDBITS_USERNAME = hdbits_username.strip()
|
sickbeard.HDBITS_USERNAME = hdbits_username.strip()
|
||||||
sickbeard.HDBITS_PASSKEY = hdbits_passkey.strip()
|
sickbeard.HDBITS_PASSKEY = hdbits_passkey.strip()
|
||||||
sickbeard.HDBITS_RATIO = config.to_int(hdbits_ratio)
|
sickbeard.HDBITS_RATIO = hdbits_ratio
|
||||||
|
|
||||||
sickbeard.OMGWTFNZBS_USERNAME = omgwtfnzbs_username.strip()
|
sickbeard.OMGWTFNZBS_USERNAME = omgwtfnzbs_username.strip()
|
||||||
sickbeard.OMGWTFNZBS_APIKEY = omgwtfnzbs_apikey.strip()
|
sickbeard.OMGWTFNZBS_APIKEY = omgwtfnzbs_apikey.strip()
|
||||||
|
|
||||||
sickbeard.NEXTGEN_USERNAME = nextgen_username.strip()
|
sickbeard.NEXTGEN_USERNAME = nextgen_username.strip()
|
||||||
sickbeard.NEXTGEN_PASSWORD = nextgen_password.strip()
|
sickbeard.NEXTGEN_PASSWORD = nextgen_password.strip()
|
||||||
sickbeard.NEXTGEN_RATIO = config.to_int(nextgen_ratio)
|
sickbeard.NEXTGEN_RATIO = nextgen_ratio
|
||||||
|
|
||||||
sickbeard.SPEEDCD_USERNAME = speedcd_username.strip()
|
sickbeard.SPEEDCD_USERNAME = speedcd_username.strip()
|
||||||
sickbeard.SPEEDCD_PASSWORD = speedcd_password.strip()
|
sickbeard.SPEEDCD_PASSWORD = speedcd_password.strip()
|
||||||
sickbeard.SPEEDCD_RATIO = config.to_int(speedcd_ratio)
|
sickbeard.SPEEDCD_RATIO = speedcd_ratio
|
||||||
sickbeard.SPEEDCD_FREELEECH = config.checkbox_to_value(speedcd_freeleech)
|
sickbeard.SPEEDCD_FREELEECH = config.checkbox_to_value(speedcd_freeleech)
|
||||||
|
|
||||||
sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList])
|
sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList])
|
||||||
@ -2350,7 +2350,7 @@ class NewHomeAddShows:
|
|||||||
sickbeard.STATUS_DEFAULT,
|
sickbeard.STATUS_DEFAULT,
|
||||||
sickbeard.QUALITY_DEFAULT,
|
sickbeard.QUALITY_DEFAULT,
|
||||||
sickbeard.FLATTEN_FOLDERS_DEFAULT,
|
sickbeard.FLATTEN_FOLDERS_DEFAULT,
|
||||||
sickbeard.SUBTITLES_DEFAULT, refresh=True)
|
sickbeard.SUBTITLES_DEFAULT)
|
||||||
num_added += 1
|
num_added += 1
|
||||||
|
|
||||||
if num_added:
|
if num_added:
|
||||||
|
Loading…
Reference in New Issue
Block a user