mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-13 11:32:20 -05:00
Merge remote-tracking branch 'git.darksystems.lan/nightly' into dev
This commit is contained in:
commit
7d4e7f04a4
@ -56,14 +56,11 @@ import threading
|
||||
import getopt
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import db
|
||||
from sickbeard import db, logger, network_timezones, failed_history, name_cache, versionChecker
|
||||
from sickbeard.tv import TVShow
|
||||
from sickbeard import logger, network_timezones, failed_history, name_cache
|
||||
from sickbeard.webserveInit import SRWebServer
|
||||
from sickbeard.version import SICKBEARD_VERSION
|
||||
from sickbeard.databases.mainDB import MIN_DB_VERSION, MAX_DB_VERSION
|
||||
from sickbeard.event_queue import Events
|
||||
|
||||
from lib.configobj import ConfigObj
|
||||
|
||||
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
|
||||
@ -350,7 +347,7 @@ class SickRage(object):
|
||||
os._exit(1)
|
||||
|
||||
if self.consoleLogging:
|
||||
print "Starting up SickRage " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE
|
||||
print "Starting up SickRage " + sickbeard.BRANCH + " from " + sickbeard.CONFIG_FILE
|
||||
|
||||
# Fire up all our threads
|
||||
sickbeard.start()
|
||||
@ -369,6 +366,7 @@ class SickRage(object):
|
||||
if self.forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
|
||||
sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable
|
||||
|
||||
# Launch browser
|
||||
if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
|
||||
sickbeard.launchBrowser(self.startPort)
|
||||
|
||||
|
BIN
gui/slick/images/network/nine network.png
Normal file
BIN
gui/slick/images/network/nine network.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 3.5 KiB |
@ -30,7 +30,7 @@
|
||||
<table class="infoTable" cellspacing="1" border="0" cellpadding="0">
|
||||
<tr><td class="infoTableHeader">SR Version: </td><td class="infoTableCell">
|
||||
#if $sickbeard.VERSION_NOTIFY
|
||||
BRANCH: ($sickbeard.version.SICKBEARD_VERSION) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- – build.date //--><br />
|
||||
BRANCH: ($sickbeard.BRANCH) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- – build.date //--><br />
|
||||
#else
|
||||
You don't have version checking turned on. Please turn on "Check for Update" in Config > General.<br />
|
||||
#end if
|
||||
|
@ -282,7 +282,7 @@
|
||||
<span class="component-desc">
|
||||
<select id="branchVersion" name="branchVersion">
|
||||
#for $cur_branch in $sickbeard.versionCheckScheduler.action.list_remote_branches():
|
||||
<option value="$cur_branch" #if $cur_branch == $sickbeard.version.SICKBEARD_VERSION then "selected=\"selected\"" else ""#>$cur_branch.capitalize()</option>
|
||||
<option value="$cur_branch" #if $cur_branch == $sickbeard.BRANCH then "selected=\"selected\"" else ""#>$cur_branch.capitalize()</option>
|
||||
#end for
|
||||
</select>
|
||||
<input class="btn" class="btn" type="button" id="branchCheckout" value="Checkout Branch">
|
||||
|
@ -1,4 +1,3 @@
|
||||
#import sickbeard.version
|
||||
#import sickbeard
|
||||
#import urllib
|
||||
|
||||
@ -10,7 +9,7 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
|
||||
<meta name="robots" content="noindex">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<title>SickRage - BRANCH:$sickbeard.version.SICKBEARD_VERSION - $title</title>
|
||||
<title>SickRage - BRANCH:[$sickbeard.BRANCH] - $title</title>
|
||||
<!--[if lt IE 9]>
|
||||
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
|
||||
<![endif]-->
|
||||
|
@ -268,10 +268,12 @@ $(document).ready(function(){
|
||||
var devices = jQuery.parseJSON(data).devices;
|
||||
$("#pushbullet_device_list").html('');
|
||||
for (var i = 0; i < devices.length; i++) {
|
||||
if(current_pushbullet_device == devices[i].iden) {
|
||||
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'" selected>' + devices[i].extras.nickname + '</option>')
|
||||
} else {
|
||||
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'">' + devices[i].extras.nickname + '</option>')
|
||||
if(devices[i].active == true) {
|
||||
if(current_pushbullet_device == devices[i].iden) {
|
||||
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'" selected>' + devices[i].nickname + '</option>')
|
||||
} else {
|
||||
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'">' + devices[i].nickname + '</option>')
|
||||
}
|
||||
}
|
||||
}
|
||||
if(msg) {
|
||||
|
@ -471,13 +471,11 @@ class Tvdb:
|
||||
if cache is True:
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = self._getTempDir()
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
elif cache is False:
|
||||
self.config['cache_enabled'] = False
|
||||
elif isinstance(cache, basestring):
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = cache
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
else:
|
||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||
|
||||
@ -565,14 +563,15 @@ class Tvdb:
|
||||
|
||||
# get response from TVDB
|
||||
if self.config['cache_enabled']:
|
||||
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
self.sess.proxies = {
|
||||
session.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
|
||||
resp = self.sess.get(url, cache_auto=True, params=params)
|
||||
resp = session.get(url, cache_auto=True, params=params)
|
||||
else:
|
||||
resp = requests.get(url, params=params)
|
||||
except requests.exceptions.HTTPError, e:
|
||||
@ -630,7 +629,7 @@ class Tvdb:
|
||||
"""
|
||||
try:
|
||||
src = self._loadUrl(url, params=params, language=language)
|
||||
src = [src[item] for item in src][0]
|
||||
src = [src[item] for item in src][0] if src else []
|
||||
except:
|
||||
errormsg = "There was an error with the XML retrieved from thetvdb.com:"
|
||||
|
||||
|
@ -305,7 +305,6 @@ class TVRage:
|
||||
|
||||
self.shows = ShowContainer() # Holds all Show classes
|
||||
self.corrections = {} # Holds show-name to show_id mapping
|
||||
self.sess = requests.session() # HTTP Session
|
||||
|
||||
self.config = {}
|
||||
|
||||
@ -323,13 +322,11 @@ class TVRage:
|
||||
if cache is True:
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = self._getTempDir()
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
elif cache is False:
|
||||
self.config['cache_enabled'] = False
|
||||
elif isinstance(cache, basestring):
|
||||
self.config['cache_enabled'] = True
|
||||
self.config['cache_location'] = cache
|
||||
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
else:
|
||||
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
|
||||
|
||||
@ -396,6 +393,7 @@ class TVRage:
|
||||
except ImportError:
|
||||
return os.path.join(tempfile.gettempdir(), "tvrage_api")
|
||||
|
||||
|
||||
return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid))
|
||||
|
||||
#@retry(tvrage_error)
|
||||
@ -405,14 +403,15 @@ class TVRage:
|
||||
|
||||
# get response from TVRage
|
||||
if self.config['cache_enabled']:
|
||||
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
|
||||
if self.config['proxy']:
|
||||
log().debug("Using proxy for URL: %s" % url)
|
||||
self.sess.proxies = {
|
||||
session.proxies = {
|
||||
"http": self.config['proxy'],
|
||||
"https": self.config['proxy'],
|
||||
}
|
||||
|
||||
resp = self.sess.get(url.strip(), cache_auto=True, params=params)
|
||||
resp = session.get(url.strip(), cache_auto=True, params=params)
|
||||
else:
|
||||
resp = requests.get(url.strip(), params=params)
|
||||
|
||||
@ -488,7 +487,7 @@ class TVRage:
|
||||
|
||||
try:
|
||||
src = self._loadUrl(url, params)
|
||||
src = [src[item] for item in src][0]
|
||||
src = [src[item] for item in src][0] if src else []
|
||||
except:
|
||||
errormsg = "There was an error with the XML retrieved from tvrage.com"
|
||||
|
||||
|
149
setup.py
149
setup.py
@ -1,12 +1,30 @@
|
||||
import re
|
||||
import urllib, ConfigParser
|
||||
from distutils.core import setup
|
||||
import py2exe, sys, os, shutil, datetime, zipfile, subprocess, fnmatch
|
||||
import urllib
|
||||
import ConfigParser
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
import subprocess
|
||||
import fnmatch
|
||||
import googlecode_upload
|
||||
from lib.pygithub import github
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
try:
|
||||
import py2exe
|
||||
except:
|
||||
print "The Python module py2exe is required"
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
import pygithub.github
|
||||
except:
|
||||
print "The Python module pyGitHub is required"
|
||||
sys.exit(1)
|
||||
|
||||
# mostly stolen from the SABnzbd package.py file
|
||||
name = 'SickBeard'
|
||||
name = 'SickRage'
|
||||
version = '0.1'
|
||||
|
||||
release = name + '-' + version
|
||||
@ -14,12 +32,12 @@ release = name + '-' + version
|
||||
Win32ConsoleName = 'SickBeard-console.exe'
|
||||
Win32WindowName = 'SickBeard.exe'
|
||||
|
||||
def findLatestBuild():
|
||||
|
||||
regex = "http\://sickbeard\.googlecode\.com/files/SickBeard\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
|
||||
|
||||
svnFile = urllib.urlopen("http://code.google.com/p/sickbeard/downloads/list")
|
||||
|
||||
def findLatestBuild():
|
||||
regex = "http\://sickrage\.googlecode\.com/files/SickRage\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
|
||||
|
||||
svnFile = urllib.urlopen("http://code.google.com/p/sickrage/downloads/list")
|
||||
|
||||
for curLine in svnFile.readlines():
|
||||
match = re.search(regex, curLine)
|
||||
if match:
|
||||
@ -28,39 +46,38 @@ def findLatestBuild():
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def recursive_find_data_files(root_dir, allowed_extensions=('*')):
|
||||
|
||||
to_return = {}
|
||||
for (dirpath, dirnames, filenames) in os.walk(root_dir):
|
||||
if not filenames:
|
||||
continue
|
||||
|
||||
|
||||
for cur_filename in filenames:
|
||||
|
||||
|
||||
matches_pattern = False
|
||||
for cur_pattern in allowed_extensions:
|
||||
if fnmatch.fnmatch(cur_filename, '*.'+cur_pattern):
|
||||
if fnmatch.fnmatch(cur_filename, '*.' + cur_pattern):
|
||||
matches_pattern = True
|
||||
if not matches_pattern:
|
||||
continue
|
||||
|
||||
|
||||
cur_filepath = os.path.join(dirpath, cur_filename)
|
||||
to_return.setdefault(dirpath, []).append(cur_filepath)
|
||||
|
||||
|
||||
return sorted(to_return.items())
|
||||
|
||||
|
||||
def find_all_libraries(root_dirs):
|
||||
|
||||
libs = []
|
||||
|
||||
|
||||
for cur_root_dir in root_dirs:
|
||||
for (dirpath, dirnames, filenames) in os.walk(cur_root_dir):
|
||||
if '__init__.py' not in filenames:
|
||||
continue
|
||||
|
||||
libs.append(dirpath.replace(os.sep, '.'))
|
||||
|
||||
|
||||
libs.append(dirpath.replace(os.sep, '.'))
|
||||
|
||||
return libs
|
||||
|
||||
|
||||
@ -71,7 +88,7 @@ def allFiles(dir):
|
||||
if os.path.isdir(fullFile):
|
||||
files += allFiles(fullFile)
|
||||
else:
|
||||
files.append(fullFile)
|
||||
files.append(fullFile)
|
||||
|
||||
return files
|
||||
|
||||
@ -94,19 +111,14 @@ if not 'nopull' in oldArgs:
|
||||
# pull new source from git
|
||||
print 'Updating source from git'
|
||||
p = subprocess.Popen('git pull origin master', shell=True, cwd=compile_dir)
|
||||
o,e = p.communicate()
|
||||
o, e = p.communicate()
|
||||
|
||||
# figure out what build this is going to be
|
||||
latestBuild = findLatestBuild()
|
||||
if 'test' in oldArgs:
|
||||
currentBuildNumber = str(latestBuild)+'a'
|
||||
currentBuildNumber = str(latestBuild) + 'a'
|
||||
else:
|
||||
currentBuildNumber = latestBuild+1
|
||||
|
||||
# write the version file before we compile
|
||||
versionFile = open("sickbeard/version.py", "w")
|
||||
versionFile.write("SICKBEARD_VERSION = \"build "+str(currentBuildNumber)+"\"")
|
||||
versionFile.close()
|
||||
currentBuildNumber = latestBuild + 1
|
||||
|
||||
# set up the compilation options
|
||||
data_files = recursive_find_data_files('data', ['gif', 'png', 'jpg', 'ico', 'js', 'css', 'tmpl'])
|
||||
@ -114,25 +126,25 @@ data_files = recursive_find_data_files('data', ['gif', 'png', 'jpg', 'ico', 'js'
|
||||
options = dict(
|
||||
name=name,
|
||||
version=release,
|
||||
author='Nic Wolfe',
|
||||
author_email='nic@wolfeden.ca',
|
||||
author='echel0n',
|
||||
author_email='sickrage.tv@gmail.com',
|
||||
description=name + ' ' + release,
|
||||
scripts=['SickBeard.py'],
|
||||
packages=find_all_libraries(['sickbeard', 'lib']),
|
||||
)
|
||||
|
||||
# set up py2exe to generate the console app
|
||||
program = [ {'script': 'SickBeard.py' } ]
|
||||
program = [{'script': 'SickBeard.py'}]
|
||||
options['options'] = {'py2exe':
|
||||
{
|
||||
'bundle_files': 3,
|
||||
'packages': ['Cheetah'],
|
||||
'excludes': ['Tkconstants', 'Tkinter', 'tcl'],
|
||||
'optimize': 2,
|
||||
'compressed': 0
|
||||
}
|
||||
}
|
||||
options['zipfile'] = 'lib/sickbeard.zip'
|
||||
{
|
||||
'bundle_files': 3,
|
||||
'packages': ['Cheetah'],
|
||||
'excludes': ['Tkconstants', 'Tkinter', 'tcl'],
|
||||
'optimize': 2,
|
||||
'compressed': 0
|
||||
}
|
||||
}
|
||||
options['zipfile'] = 'lib/sickrage.zip'
|
||||
options['console'] = program
|
||||
options['data_files'] = data_files
|
||||
|
||||
@ -146,7 +158,7 @@ try:
|
||||
os.rename("dist/%s" % Win32WindowName, "dist/%s" % Win32ConsoleName)
|
||||
except:
|
||||
print "Cannot create dist/%s" % Win32ConsoleName
|
||||
#sys.exit(1)
|
||||
# sys.exit(1)
|
||||
|
||||
# we don't need this stuff when we make the 2nd exe
|
||||
del options['console']
|
||||
@ -158,8 +170,8 @@ setup(**options)
|
||||
|
||||
# compile sabToSickbeard.exe using the existing setup.py script
|
||||
auto_process_dir = os.path.join(compile_dir, 'autoProcessTV')
|
||||
p = subprocess.Popen([ sys.executable, os.path.join(auto_process_dir, 'setup.py') ], cwd=auto_process_dir, shell=True)
|
||||
o,e = p.communicate()
|
||||
p = subprocess.Popen([sys.executable, os.path.join(auto_process_dir, 'setup.py')], cwd=auto_process_dir, shell=True)
|
||||
o, e = p.communicate()
|
||||
|
||||
# copy autoProcessTV files to the dist dir
|
||||
auto_process_files = ['autoProcessTV/sabToSickBeard.py',
|
||||
@ -167,9 +179,9 @@ auto_process_files = ['autoProcessTV/sabToSickBeard.py',
|
||||
'autoProcessTV/autoProcessTV.py',
|
||||
'autoProcessTV/autoProcessTV.cfg.sample',
|
||||
'autoProcessTV/sabToSickBeard.exe']
|
||||
|
||||
|
||||
os.makedirs('dist/autoProcessTV')
|
||||
|
||||
|
||||
for curFile in auto_process_files:
|
||||
newFile = os.path.join('dist', curFile)
|
||||
print "Copying file from", curFile, "to", newFile
|
||||
@ -177,9 +189,9 @@ for curFile in auto_process_files:
|
||||
|
||||
# compile updater.exe
|
||||
setup(
|
||||
options = {'py2exe': {'bundle_files': 1}},
|
||||
zipfile = None,
|
||||
console = ['updater.py'], requires=['Cheetah']
|
||||
options={'py2exe': {'bundle_files': 1}},
|
||||
zipfile=None,
|
||||
console=['updater.py'], requires=['Cheetah']
|
||||
)
|
||||
|
||||
if 'test' in oldArgs:
|
||||
@ -188,7 +200,7 @@ else:
|
||||
# start building the CHANGELOG.txt
|
||||
print 'Creating changelog'
|
||||
gh = github.GitHub()
|
||||
|
||||
|
||||
# read the old changelog and find the last commit from that build
|
||||
lastCommit = ""
|
||||
try:
|
||||
@ -197,25 +209,25 @@ else:
|
||||
cl.close()
|
||||
except:
|
||||
print "I guess there's no changelog"
|
||||
|
||||
|
||||
newestCommit = ""
|
||||
changeString = ""
|
||||
|
||||
# cycle through all the git commits and save their commit messages
|
||||
for curCommit in gh.commits.forBranch('midgetspy', 'Sick-Beard'):
|
||||
for curCommit in gh.commits.forBranch('echel0n', 'SickRage'):
|
||||
if curCommit.id == lastCommit:
|
||||
break
|
||||
|
||||
|
||||
if newestCommit == "":
|
||||
newestCommit = curCommit.id
|
||||
|
||||
|
||||
changeString += curCommit.message + "\n\n"
|
||||
|
||||
|
||||
# if we didn't find any changes don't make a changelog file
|
||||
if newestCommit != "":
|
||||
newChangelog = open("CHANGELOG.txt", "w")
|
||||
newChangelog.write(newestCommit+"\n\n")
|
||||
newChangelog.write("Changelog for build "+str(currentBuildNumber)+"\n\n")
|
||||
newChangelog.write(newestCommit + "\n\n")
|
||||
newChangelog.write("Changelog for build " + str(currentBuildNumber) + "\n\n")
|
||||
newChangelog.write(changeString)
|
||||
newChangelog.close()
|
||||
else:
|
||||
@ -227,7 +239,7 @@ if os.path.exists("CHANGELOG.txt"):
|
||||
|
||||
# figure out what we're going to call the zip file
|
||||
print 'Zipping files...'
|
||||
zipFilename = 'SickBeard-win32-alpha-build'+str(currentBuildNumber)
|
||||
zipFilename = 'SickRage-win32-alpha-build' + str(currentBuildNumber)
|
||||
if os.path.isfile(zipFilename + '.zip'):
|
||||
zipNum = 2
|
||||
while os.path.isfile(zipFilename + '.{0:0>2}.zip'.format(str(zipNum))):
|
||||
@ -245,12 +257,6 @@ z.close()
|
||||
|
||||
print "Created zip at", zipFilename
|
||||
|
||||
# leave version file as it is in source
|
||||
print "Reverting version file to master"
|
||||
versionFile = open("sickbeard/version.py", "w")
|
||||
versionFile.write("SICKBEARD_VERSION = \"master\"")
|
||||
versionFile.close()
|
||||
|
||||
# i store my google code username/pw in a config so i can have this file in public source control
|
||||
config = ConfigParser.ConfigParser()
|
||||
configFilename = os.path.join(compile_dir, "gc.ini")
|
||||
@ -262,12 +268,15 @@ gc_password = config.get("GC", "password")
|
||||
# upload to google code unless I tell it not to
|
||||
if "noup" not in oldArgs and "test" not in oldArgs:
|
||||
print "Uploading zip to google code"
|
||||
googlecode_upload.upload(os.path.abspath(zipFilename+".zip"), "sickbeard", gc_username, gc_password, "Win32 alpha build "+str(currentBuildNumber)+" (unstable/development release)", ["Featured","Type-Executable","OpSys-Windows"])
|
||||
|
||||
googlecode_upload.upload(os.path.abspath(zipFilename + ".zip"), "sickrage", gc_username, gc_password,
|
||||
"Win32 alpha build " + str(currentBuildNumber) + " (unstable/development release)",
|
||||
["Featured", "Type-Executable", "OpSys-Windows"])
|
||||
|
||||
if 'nopush' not in oldArgs and 'test' not in oldArgs:
|
||||
# tag commit as a new build and push changes to github
|
||||
print 'Tagging commit and pushing'
|
||||
p = subprocess.Popen('git tag -a "build-'+str(currentBuildNumber)+'" -m "Windows build '+zipFilename+'"', shell=True, cwd=compile_dir)
|
||||
o,e = p.communicate()
|
||||
p = subprocess.Popen('git tag -a "build-' + str(currentBuildNumber) + '" -m "Windows build ' + zipFilename + '"',
|
||||
shell=True, cwd=compile_dir)
|
||||
o, e = p.communicate()
|
||||
p = subprocess.Popen('git push --tags origin windows_binaries', shell=True, cwd=compile_dir)
|
||||
o,e = p.communicate()
|
||||
o, e = p.communicate()
|
||||
|
@ -102,6 +102,7 @@ VERSION_NOTIFY = False
|
||||
AUTO_UPDATE = False
|
||||
NOTIFY_ON_UPDATE = False
|
||||
CUR_COMMIT_HASH = None
|
||||
BRANCH = None
|
||||
|
||||
INIT_LOCK = Lock()
|
||||
started = False
|
||||
@ -442,7 +443,7 @@ __INITIALIZED__ = False
|
||||
def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||
global BRANCH, ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
|
||||
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
|
||||
@ -509,6 +510,9 @@ def initialize(consoleLogging=True):
|
||||
CheckSection(CFG, 'Pushbullet')
|
||||
CheckSection(CFG, 'Subtitles')
|
||||
|
||||
# branch
|
||||
BRANCH = check_setting_str(CFG, 'General', 'branch', '')
|
||||
|
||||
ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', 'cache')
|
||||
# fix bad configs due to buggy code
|
||||
if ACTUAL_CACHE_DIR == 'None':
|
||||
@ -1140,16 +1144,20 @@ def start():
|
||||
searchQueueScheduler.start()
|
||||
|
||||
# start the queue checker
|
||||
properFinderScheduler.start()
|
||||
if DOWNLOAD_PROPERS:
|
||||
properFinderScheduler.start()
|
||||
|
||||
# start the proper finder
|
||||
autoPostProcesserScheduler.start()
|
||||
if PROCESS_AUTOMATICALLY:
|
||||
autoPostProcesserScheduler.start()
|
||||
|
||||
# start the subtitles finder
|
||||
subtitlesFinderScheduler.start()
|
||||
if USE_SUBTITLES:
|
||||
subtitlesFinderScheduler.start()
|
||||
|
||||
# start the trakt checker
|
||||
traktCheckerScheduler.start()
|
||||
if USE_TRAKT:
|
||||
traktCheckerScheduler.start()
|
||||
|
||||
started = True
|
||||
|
||||
@ -1170,7 +1178,7 @@ def halt():
|
||||
events.stop.set()
|
||||
logger.log(u"Waiting for the EVENTS thread to exit")
|
||||
try:
|
||||
events.join()
|
||||
events.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
@ -1216,33 +1224,37 @@ def halt():
|
||||
except:
|
||||
pass
|
||||
|
||||
autoPostProcesserScheduler.stop.set()
|
||||
logger.log(u"Waiting for the POSTPROCESSER thread to exit")
|
||||
try:
|
||||
autoPostProcesserScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
if PROCESS_AUTOMATICALLY:
|
||||
autoPostProcesserScheduler.stop.set()
|
||||
logger.log(u"Waiting for the POSTPROCESSER thread to exit")
|
||||
try:
|
||||
autoPostProcesserScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
traktCheckerScheduler.stop.set()
|
||||
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
|
||||
try:
|
||||
traktCheckerScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
if USE_TRAKT:
|
||||
traktCheckerScheduler.stop.set()
|
||||
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
|
||||
try:
|
||||
traktCheckerScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
properFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the PROPERFINDER thread to exit")
|
||||
try:
|
||||
properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
if DOWNLOAD_PROPERS:
|
||||
properFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the PROPERFINDER thread to exit")
|
||||
try:
|
||||
properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
subtitlesFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
|
||||
try:
|
||||
subtitlesFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
if USE_SUBTITLES:
|
||||
subtitlesFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
|
||||
try:
|
||||
subtitlesFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
if ADBA_CONNECTION:
|
||||
ADBA_CONNECTION.logout()
|
||||
@ -1255,7 +1267,6 @@ def halt():
|
||||
__INITIALIZED__ = False
|
||||
started = False
|
||||
|
||||
|
||||
def sig_handler(signum=None, frame=None):
|
||||
if type(signum) != type(None):
|
||||
logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
|
||||
@ -1291,6 +1302,7 @@ def save_config():
|
||||
|
||||
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
new_config['General'] = {}
|
||||
new_config['General']['branch'] = BRANCH
|
||||
new_config['General']['config_version'] = CONFIG_VERSION
|
||||
new_config['General']['encryption_version'] = int(ENCRYPTION_VERSION)
|
||||
new_config['General']['log_dir'] = ACTUAL_LOG_DIR if ACTUAL_LOG_DIR else 'Logs'
|
||||
|
@ -27,9 +27,6 @@ from sickbeard import processTV
|
||||
|
||||
class PostProcesser():
|
||||
def run(self, force=False):
|
||||
if not sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return
|
||||
|
||||
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
|
||||
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
|
||||
logger.ERROR)
|
||||
|
@ -142,15 +142,18 @@ class GenericClient(object):
|
||||
|
||||
def _get_torrent_hash(self, result):
|
||||
|
||||
result.hash = None
|
||||
if result.url.startswith('magnet'):
|
||||
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
|
||||
if len(torrent_hash) == 32:
|
||||
torrent_hash = b16encode(b32decode(torrent_hash)).lower()
|
||||
result.hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
|
||||
if len(result.hash) == 32:
|
||||
result.hash = b16encode(b32decode(result.hash)).lower()
|
||||
else:
|
||||
info = bdecode(result.content)["info"]
|
||||
torrent_hash = sha1(bencode(info)).hexdigest()
|
||||
result.content = result.provider.getURL(result.url)
|
||||
if result.content:
|
||||
info = bdecode(result.content)["info"]
|
||||
result.hash = sha1(bencode(info)).hexdigest()
|
||||
|
||||
return torrent_hash
|
||||
return result
|
||||
|
||||
def sendTORRENT(self, result):
|
||||
|
||||
@ -163,8 +166,8 @@ class GenericClient(object):
|
||||
return r_code
|
||||
|
||||
try:
|
||||
|
||||
result.hash = self._get_torrent_hash(result)
|
||||
# Sets per provider seed ratio
|
||||
result.ratio = result.provider.seedRatio()
|
||||
|
||||
if result.url.startswith('magnet'):
|
||||
r_code = self._add_torrent_uri(result)
|
||||
|
@ -27,7 +27,6 @@ from sickbeard import helpers
|
||||
from sickbeard import logger
|
||||
from sickbeard import naming
|
||||
from sickbeard import db
|
||||
from sickbeard import version
|
||||
|
||||
naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
|
||||
"s%(seasonnumber)02de%(episodenumber)02d",
|
||||
@ -191,10 +190,50 @@ def change_VERSION_NOTIFY(version_notify):
|
||||
if oldSetting == False and version_notify == True:
|
||||
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
|
||||
|
||||
def change_VERSION(version):
|
||||
if sickbeard.version.SICKBEARD_VERSION != version:
|
||||
def change_DOWNLOAD_PROPERS(download_propers):
|
||||
if sickbeard.DOWNLOAD_PROPERS == download_propers:
|
||||
return
|
||||
|
||||
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
|
||||
sickbeard.DOWNLOAD_PROPERS = download_propers
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
sickbeard.properFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.properFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the PROPERFINDER thread to exit")
|
||||
try:
|
||||
sickbeard.properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
def change_USE_TRAKT(use_trakt):
|
||||
if sickbeard.USE_TRAKT == use_trakt:
|
||||
return
|
||||
|
||||
sickbeard.USE_TRAKT = use_trakt
|
||||
if sickbeard.USE_TRAKT:
|
||||
sickbeard.traktCheckerScheduler.start()
|
||||
else:
|
||||
sickbeard.traktCheckerScheduler.stop.set()
|
||||
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
|
||||
try:
|
||||
sickbeard.traktCheckerScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
def change_USE_SUBTITLES(use_subtitles):
|
||||
if sickbeard.USE_SUBTITLES == use_subtitles:
|
||||
return
|
||||
|
||||
sickbeard.USE_SUBTITLES = use_subtitles
|
||||
if sickbeard.USE_SUBTITLES:
|
||||
sickbeard.subtitlesFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.subtitlesFinderScheduler.stop.set()
|
||||
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
|
||||
try:
|
||||
sickbeard.subtitlesFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
def CheckSection(CFG, sec):
|
||||
""" Check if INI section exists, if not create it """
|
||||
@ -472,7 +511,7 @@ class ConfigMigrator():
|
||||
if old_season_format:
|
||||
try:
|
||||
new_season_format = old_season_format % 9
|
||||
new_season_format = new_season_format.replace('09', '%0S')
|
||||
new_season_format = str(new_season_format).replace('09', '%0S')
|
||||
new_season_format = new_season_format.replace('9', '%S')
|
||||
|
||||
logger.log(
|
||||
|
@ -15,6 +15,9 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
import os
|
||||
from subprocess import check_output, PIPE, Popen
|
||||
from os.path import join, split
|
||||
|
||||
try:
|
||||
import json
|
||||
@ -30,7 +33,7 @@ class GitHub(object):
|
||||
needs it for - list of commits.
|
||||
"""
|
||||
|
||||
def __init__(self, github_repo_user, github_repo, branch='master'):
|
||||
def __init__(self, github_repo_user, github_repo, branch):
|
||||
|
||||
self.github_repo_user = github_repo_user
|
||||
self.github_repo = github_repo
|
||||
@ -92,4 +95,37 @@ class GitHub(object):
|
||||
access_API = self._access_API(
|
||||
['repos', self.github_repo_user, self.github_repo, 'branches'],
|
||||
params={'per_page': 100})
|
||||
return access_API
|
||||
return access_API
|
||||
|
||||
def installed_branch(self):
|
||||
installed_path = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
|
||||
return self.hash_dir(installed_path)
|
||||
|
||||
def _lstree(self, files, dirs):
|
||||
"""Make git ls-tree like output."""
|
||||
for f, sha1 in files:
|
||||
yield "100644 blob {}\t{}\0".format(sha1, f)
|
||||
|
||||
for d, sha1 in dirs:
|
||||
yield "040000 tree {}\t{}\0".format(sha1, d)
|
||||
|
||||
|
||||
def _mktree(self, files, dirs):
|
||||
mkt = Popen(["git", "mktree", "-z"], stdin=PIPE, stdout=PIPE)
|
||||
return mkt.communicate("".join(self._lstree(files, dirs)))[0].strip()
|
||||
|
||||
def hash_file(self, path):
|
||||
"""Write file at path to Git index, return its SHA1 as a string."""
|
||||
return check_output(["git", "hash-object", "-w", "--", path]).strip()
|
||||
|
||||
def hash_dir(self, path):
|
||||
"""Write directory at path to Git index, return its SHA1 as a string."""
|
||||
dir_hash = {}
|
||||
|
||||
for root, dirs, files in os.walk(path, topdown=False):
|
||||
f_hash = ((f, self.hash_file(join(root, f))) for f in files)
|
||||
d_hash = ((d, dir_hash[join(root, d)]) for d in dirs)
|
||||
# split+join normalizes paths on Windows (note the imports)
|
||||
dir_hash[join(*split(root))] = self._mktree(f_hash, d_hash)
|
||||
|
||||
return dir_hash[path]
|
@ -17,12 +17,14 @@
|
||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import with_statement
|
||||
import getpass
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import stat
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib
|
||||
@ -1198,13 +1200,29 @@ def touchFile(fname, atime=None):
|
||||
return False
|
||||
|
||||
|
||||
def _getTempDir():
|
||||
"""Returns the [system temp dir]/tvdb_api-u501 (or
|
||||
tvdb_api-myuser)
|
||||
"""
|
||||
if hasattr(os, 'getuid'):
|
||||
uid = "u%d" % (os.getuid())
|
||||
else:
|
||||
# For Windows
|
||||
try:
|
||||
uid = getpass.getuser()
|
||||
except ImportError:
|
||||
return os.path.join(tempfile.gettempdir(), "sickrage")
|
||||
|
||||
return os.path.join(tempfile.gettempdir(), "sickrage-%s" % (uid))
|
||||
|
||||
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
|
||||
"""
|
||||
Returns a byte-string retrieved from the url provider.
|
||||
"""
|
||||
|
||||
# request session
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
|
||||
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
||||
|
||||
# request session headers
|
||||
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
|
||||
@ -1233,6 +1251,11 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
||||
}
|
||||
|
||||
resp = session.get(url, data=post_data, timeout=timeout)
|
||||
if not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
|
||||
return
|
||||
|
||||
except requests.exceptions.HTTPError, e:
|
||||
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
|
||||
return
|
||||
@ -1246,20 +1269,15 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return
|
||||
|
||||
if not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
|
||||
return
|
||||
|
||||
if json:
|
||||
return resp.json()
|
||||
|
||||
return resp.content
|
||||
|
||||
|
||||
def download_file(url, filename, session=None):
|
||||
# create session
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions')))
|
||||
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
|
||||
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
|
||||
|
||||
# request session headers
|
||||
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
|
||||
@ -1281,6 +1299,8 @@ def download_file(url, filename, session=None):
|
||||
try:
|
||||
resp = session.get(url)
|
||||
if not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
|
||||
return False
|
||||
|
||||
with open(filename, 'wb') as fp:
|
||||
@ -1311,14 +1331,6 @@ def download_file(url, filename, session=None):
|
||||
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
|
||||
return False
|
||||
|
||||
if not resp:
|
||||
logger.log(u"No data returned from " + url, logger.DEBUG)
|
||||
return False
|
||||
elif not resp.ok:
|
||||
logger.log(u"Requested url " + url + " returned status code is " + str(
|
||||
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -69,7 +69,7 @@ class PushbulletNotifier:
|
||||
if method == 'POST':
|
||||
uri = '/v2/pushes'
|
||||
else:
|
||||
uri = '/api/devices'
|
||||
uri = '/v2/devices'
|
||||
|
||||
logger.log(u"Pushbullet event: " + str(event), logger.DEBUG)
|
||||
logger.log(u"Pushbullet message: " + str(message), logger.DEBUG)
|
||||
@ -106,6 +106,7 @@ class PushbulletNotifier:
|
||||
response = http_handler.getresponse()
|
||||
request_body = response.read()
|
||||
request_status = response.status
|
||||
logger.log(u"Pushbullet response: %s" % request_body, logger.DEBUG)
|
||||
|
||||
if request_status == 200:
|
||||
if testMessage:
|
||||
|
@ -33,6 +33,7 @@ from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard import clients
|
||||
|
||||
from hachoir_parser import createParser
|
||||
|
||||
@ -405,8 +406,13 @@ class GenericProvider:
|
||||
epNum = SEASON_RESULT
|
||||
logger.log(u"Separating full season result to check for later", logger.DEBUG)
|
||||
|
||||
if not result:
|
||||
continue
|
||||
# validate torrent file if not magnet link to avoid invalid torrent links
|
||||
if self.providerType == self.TORRENT:
|
||||
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
||||
result = client._get_torrent_hash(result)
|
||||
if not result.hash:
|
||||
logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG)
|
||||
continue
|
||||
|
||||
if epNum not in results:
|
||||
results[epNum] = [result]
|
||||
|
@ -129,12 +129,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
||||
if sickbeard.TORRENT_METHOD == "blackhole":
|
||||
dlResult = _downloadResult(result)
|
||||
else:
|
||||
# Sets per provider seed ratio
|
||||
result.ratio = result.provider.seedRatio()
|
||||
|
||||
# Gets torrent file contents if not magnet link
|
||||
result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None
|
||||
|
||||
# Snatches torrent with client
|
||||
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
||||
dlResult = client.sendTORRENT(result)
|
||||
@ -333,7 +327,6 @@ def filterSearchResults(show, season, results):
|
||||
|
||||
return foundResults
|
||||
|
||||
|
||||
def searchForNeededEpisodes(show, episodes):
|
||||
foundResults = {}
|
||||
|
||||
|
@ -87,9 +87,6 @@ class SubtitlesFinder():
|
||||
and download subtitles. Only if the defined rule is true
|
||||
"""
|
||||
def run(self, force=False):
|
||||
if not sickbeard.USE_SUBTITLES:
|
||||
return
|
||||
|
||||
if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
|
||||
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)
|
||||
return
|
||||
|
@ -34,9 +34,6 @@ class TraktChecker():
|
||||
self.todoBacklog = []
|
||||
|
||||
def run(self, force=False):
|
||||
if not sickbeard.USE_TRAKT:
|
||||
return
|
||||
|
||||
try:
|
||||
# add shows from trakt.tv watchlist
|
||||
if sickbeard.TRAKT_USE_WATCHLIST:
|
||||
@ -60,10 +57,10 @@ class TraktChecker():
|
||||
logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR)
|
||||
return
|
||||
|
||||
return filter(lambda x: int(indexerid) in [int(x.tvdb_id), int(x.tvrage_id)], library)
|
||||
return filter(lambda x: int(indexerid) in [int(x['tvdb_id']) or 0, int(x['tvrage_id'])] or 0, library)
|
||||
|
||||
def syncLibrary(self):
|
||||
logger.log(u"Syncing library to trakt.tv show library", logger.DEBUG)
|
||||
logger.log(u"Syncing library to Trakt.tv show library", logger.DEBUG)
|
||||
if sickbeard.showList:
|
||||
for myShow in sickbeard.showList:
|
||||
self.addShowToTraktLibrary(myShow)
|
||||
|
@ -31,6 +31,7 @@ from sickbeard.exceptions import MultipleShowObjectsException
|
||||
from sickbeard.exceptions import AuthException
|
||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from sickbeard import clients
|
||||
|
||||
class CacheDBConnection(db.DBConnection):
|
||||
def __init__(self, providerName):
|
||||
@ -356,9 +357,15 @@ class TVCache():
|
||||
result.quality = curQuality
|
||||
result.release_group = curReleaseGroup
|
||||
result.version = curVersion
|
||||
result.content = self.provider.getURL(url) \
|
||||
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
|
||||
and not url.startswith('magnet') else None
|
||||
result.content = None
|
||||
|
||||
# validate torrent file if not magnet link to avoid invalid torrent links
|
||||
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT:
|
||||
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
||||
result = client._get_torrent_hash(result)
|
||||
if not result.hash:
|
||||
logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG)
|
||||
continue
|
||||
|
||||
# add it to the list
|
||||
if epObj not in neededEps:
|
||||
|
@ -1 +1 @@
|
||||
SICKBEARD_VERSION = "master"
|
||||
SICKBEARD_VERSION = "nightly"
|
||||
|
@ -30,14 +30,14 @@ import gh_api as github
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import helpers, notifiers
|
||||
from sickbeard import version, ui
|
||||
from sickbeard import ui
|
||||
from sickbeard import logger
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
class CheckVersion():
|
||||
"""
|
||||
Version check class meant to run as a thread object with the SB scheduler.
|
||||
Version check class meant to run as a thread object with the sr scheduler.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
@ -53,6 +53,9 @@ class CheckVersion():
|
||||
self.updater = None
|
||||
|
||||
def run(self, force=False):
|
||||
# set current branch version
|
||||
sickbeard.BRANCH = self.get_branch()
|
||||
|
||||
if self.check_for_new_version(force):
|
||||
if sickbeard.AUTO_UPDATE:
|
||||
logger.log(u"New update found for SickRage, starting auto-updater ...")
|
||||
@ -64,7 +67,7 @@ class CheckVersion():
|
||||
|
||||
def find_install_type(self):
|
||||
"""
|
||||
Determines how this copy of SB was installed.
|
||||
Determines how this copy of sr was installed.
|
||||
|
||||
returns: type of installation. Possible values are:
|
||||
'win': any compiled windows build
|
||||
@ -73,7 +76,7 @@ class CheckVersion():
|
||||
"""
|
||||
|
||||
# check if we're a windows build
|
||||
if sickbeard.version.SICKBEARD_VERSION.startswith('build '):
|
||||
if sickbeard.BRANCH.startswith('build '):
|
||||
install_type = 'win'
|
||||
elif os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')):
|
||||
install_type = 'git'
|
||||
@ -109,15 +112,16 @@ class CheckVersion():
|
||||
self.updater.set_newest_text()
|
||||
return True
|
||||
|
||||
def update(self, branch=None):
|
||||
if branch and branch != self.updater.branch:
|
||||
return self.updater.update(branch)
|
||||
elif self.updater.need_update():
|
||||
def update(self):
|
||||
if self.updater.need_update() or self.updater.branch != sickbeard.BRANCH:
|
||||
return self.updater.update()
|
||||
|
||||
def list_remote_branches(self):
|
||||
return self.updater.list_remote_branches()
|
||||
|
||||
def get_branch(self):
|
||||
return self.updater.branch
|
||||
|
||||
class UpdateManager():
|
||||
def get_github_repo_user(self):
|
||||
return 'echel0n'
|
||||
@ -145,7 +149,7 @@ class WindowsUpdateManager(UpdateManager):
|
||||
version = ''
|
||||
|
||||
try:
|
||||
version = sickbeard.version.SICKBEARD_VERSION
|
||||
version = sickbeard.BRANCH
|
||||
return int(version[6:])
|
||||
except ValueError:
|
||||
logger.log(u"Unknown SickRage Windows binary release: " + version, logger.ERROR)
|
||||
@ -200,10 +204,7 @@ class WindowsUpdateManager(UpdateManager):
|
||||
|
||||
sickbeard.NEWEST_VERSION_STRING = newest_text
|
||||
|
||||
def update(self, branch='windows_binaries'):
|
||||
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
def update(self):
|
||||
|
||||
zip_download_url = self._find_newest_version(True)
|
||||
logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG)
|
||||
@ -214,18 +215,18 @@ class WindowsUpdateManager(UpdateManager):
|
||||
|
||||
try:
|
||||
# prepare the update dir
|
||||
sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sb-update')
|
||||
sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update')
|
||||
|
||||
if os.path.isdir(sb_update_dir):
|
||||
logger.log(u"Clearing out update folder " + sb_update_dir + " before extracting")
|
||||
shutil.rmtree(sb_update_dir)
|
||||
if os.path.isdir(sr_update_dir):
|
||||
logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting")
|
||||
shutil.rmtree(sr_update_dir)
|
||||
|
||||
logger.log(u"Creating update folder " + sb_update_dir + " before extracting")
|
||||
os.makedirs(sb_update_dir)
|
||||
logger.log(u"Creating update folder " + sr_update_dir + " before extracting")
|
||||
os.makedirs(sr_update_dir)
|
||||
|
||||
# retrieve file
|
||||
logger.log(u"Downloading update from " + zip_download_url)
|
||||
zip_download_path = os.path.join(sb_update_dir, u'sb-update.zip')
|
||||
zip_download_path = os.path.join(sr_update_dir, u'sr-update.zip')
|
||||
urllib.urlretrieve(zip_download_url, zip_download_path)
|
||||
|
||||
if not ek.ek(os.path.isfile, zip_download_path):
|
||||
@ -236,10 +237,10 @@ class WindowsUpdateManager(UpdateManager):
|
||||
logger.log(u"Retrieved version from " + zip_download_url + " is corrupt, can't update", logger.ERROR)
|
||||
return False
|
||||
|
||||
# extract to sb-update dir
|
||||
logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sb_update_dir)
|
||||
# extract to sr-update dir
|
||||
logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sr_update_dir)
|
||||
update_zip = zipfile.ZipFile(zip_download_path, 'r')
|
||||
update_zip.extractall(sb_update_dir)
|
||||
update_zip.extractall(sr_update_dir)
|
||||
update_zip.close()
|
||||
|
||||
# delete the zip
|
||||
@ -247,15 +248,15 @@ class WindowsUpdateManager(UpdateManager):
|
||||
os.remove(zip_download_path)
|
||||
|
||||
# find update dir name
|
||||
update_dir_contents = [x for x in os.listdir(sb_update_dir) if
|
||||
os.path.isdir(os.path.join(sb_update_dir, x))]
|
||||
update_dir_contents = [x for x in os.listdir(sr_update_dir) if
|
||||
os.path.isdir(os.path.join(sr_update_dir, x))]
|
||||
|
||||
if len(update_dir_contents) != 1:
|
||||
logger.log(u"Invalid update data, update failed. Maybe try deleting your sb-update folder?",
|
||||
logger.log(u"Invalid update data, update failed. Maybe try deleting your sr-update folder?",
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
|
||||
content_dir = os.path.join(sr_update_dir, update_dir_contents[0])
|
||||
old_update_path = os.path.join(content_dir, u'updater.exe')
|
||||
new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe')
|
||||
logger.log(u"Copying new update.exe file from " + old_update_path + " to " + new_update_path)
|
||||
@ -278,7 +279,10 @@ class GitUpdateManager(UpdateManager):
|
||||
self._git_path = self._find_working_git()
|
||||
self.github_repo_user = self.get_github_repo_user()
|
||||
self.github_repo = self.get_github_repo()
|
||||
self.branch = self._find_git_branch()
|
||||
|
||||
self.branch = sickbeard.BRANCH
|
||||
if not sickbeard.BRANCH or not sickbeard.BRANCH == '':
|
||||
self.branch = self._find_installed_branch()
|
||||
|
||||
self._cur_commit_hash = None
|
||||
self._newest_commit_hash = None
|
||||
@ -310,7 +314,7 @@ class GitUpdateManager(UpdateManager):
|
||||
|
||||
alternative_git = []
|
||||
|
||||
# osx people who start SB from launchd have a broken path, so try a hail-mary attempt for them
|
||||
# osx people who start sr from launchd have a broken path, so try a hail-mary attempt for them
|
||||
if platform.system().lower() == 'darwin':
|
||||
alternative_git.append('/usr/local/git/bin/git')
|
||||
|
||||
@ -403,13 +407,12 @@ class GitUpdateManager(UpdateManager):
|
||||
else:
|
||||
return False
|
||||
|
||||
def _find_git_branch(self):
|
||||
def _find_installed_branch(self):
|
||||
branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable
|
||||
if exit_status == 0 and branch_info:
|
||||
branch = branch_info.strip().replace('refs/heads/', '', 1)
|
||||
if branch:
|
||||
sickbeard.version.SICKBEARD_VERSION = branch
|
||||
return sickbeard.version.SICKBEARD_VERSION
|
||||
return branch
|
||||
|
||||
def _check_github_for_update(self):
|
||||
"""
|
||||
@ -505,16 +508,13 @@ class GitUpdateManager(UpdateManager):
|
||||
|
||||
return False
|
||||
|
||||
def update(self, branch=sickbeard.version.SICKBEARD_VERSION):
|
||||
def update(self):
|
||||
"""
|
||||
Calls git pull origin <branch> in order to update SickRage. Returns a bool depending
|
||||
on the call's success.
|
||||
"""
|
||||
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
|
||||
if self.branch == sickbeard.version.SICKBEARD_VERSION:
|
||||
if sickbeard.BRANCH == self._find_installed_branch():
|
||||
output, err, exit_status = self._run_git(self._git_path, 'pull -f origin ' + self.branch) # @UnusedVariable
|
||||
else:
|
||||
output, err, exit_status = self._run_git(self._git_path, 'checkout -f ' + self.branch) # @UnusedVariable
|
||||
@ -537,30 +537,29 @@ class SourceUpdateManager(UpdateManager):
|
||||
def __init__(self):
|
||||
self.github_repo_user = self.get_github_repo_user()
|
||||
self.github_repo = self.get_github_repo()
|
||||
self.branch = sickbeard.version.SICKBEARD_VERSION
|
||||
|
||||
self.branch = sickbeard.BRANCH
|
||||
if not sickbeard.BRANCH or not sickbeard.BRANCH == '':
|
||||
self.branch = self._find_installed_branch()
|
||||
|
||||
self._cur_commit_hash = None
|
||||
self._newest_commit_hash = None
|
||||
self._num_commits_behind = 0
|
||||
|
||||
def _find_installed_version(self):
|
||||
|
||||
version_file = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')
|
||||
|
||||
if not os.path.isfile(version_file):
|
||||
self._cur_commit_hash = None
|
||||
return
|
||||
|
||||
try:
|
||||
with open(version_file, 'r') as fp:
|
||||
self._cur_commit_hash = fp.read().strip(' \n\r')
|
||||
except EnvironmentError, e:
|
||||
logger.log(u"Unable to open 'version.txt': " + ex(e), logger.DEBUG)
|
||||
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
|
||||
self._cur_commit_hash = gh.installed_branch()
|
||||
|
||||
if not self._cur_commit_hash:
|
||||
self._cur_commit_hash = None
|
||||
sickbeard.CUR_COMMIT_HASH = str(self._cur_commit_hash)
|
||||
|
||||
def _find_installed_branch(self):
|
||||
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
|
||||
for branch in gh.branches():
|
||||
if branch.commit['sha'] == self._cur_commit_hash:
|
||||
sickbeard.BRANCH = branch.name
|
||||
|
||||
def need_update(self):
|
||||
|
||||
self._find_installed_version()
|
||||
@ -645,32 +644,28 @@ class SourceUpdateManager(UpdateManager):
|
||||
|
||||
sickbeard.NEWEST_VERSION_STRING = newest_text
|
||||
|
||||
def update(self, branch=sickbeard.version.SICKBEARD_VERSION):
|
||||
def update(self):
|
||||
"""
|
||||
Downloads the latest source tarball from github and installs it over the existing version.
|
||||
"""
|
||||
|
||||
# set branch version
|
||||
self.branch = branch
|
||||
|
||||
base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo
|
||||
tar_download_url = base_url + '/tarball/' + self.branch
|
||||
version_path = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')
|
||||
|
||||
try:
|
||||
# prepare the update dir
|
||||
sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sb-update')
|
||||
sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update')
|
||||
|
||||
if os.path.isdir(sb_update_dir):
|
||||
logger.log(u"Clearing out update folder " + sb_update_dir + " before extracting")
|
||||
shutil.rmtree(sb_update_dir)
|
||||
if os.path.isdir(sr_update_dir):
|
||||
logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting")
|
||||
shutil.rmtree(sr_update_dir)
|
||||
|
||||
logger.log(u"Creating update folder " + sb_update_dir + " before extracting")
|
||||
os.makedirs(sb_update_dir)
|
||||
logger.log(u"Creating update folder " + sr_update_dir + " before extracting")
|
||||
os.makedirs(sr_update_dir)
|
||||
|
||||
# retrieve file
|
||||
logger.log(u"Downloading update from " + repr(tar_download_url))
|
||||
tar_download_path = os.path.join(sb_update_dir, u'sb-update.tar')
|
||||
tar_download_path = os.path.join(sr_update_dir, u'sr-update.tar')
|
||||
urllib.urlretrieve(tar_download_url, tar_download_path)
|
||||
|
||||
if not ek.ek(os.path.isfile, tar_download_path):
|
||||
@ -681,10 +676,10 @@ class SourceUpdateManager(UpdateManager):
|
||||
logger.log(u"Retrieved version from " + tar_download_url + " is corrupt, can't update", logger.ERROR)
|
||||
return False
|
||||
|
||||
# extract to sb-update dir
|
||||
# extract to sr-update dir
|
||||
logger.log(u"Extracting file " + tar_download_path)
|
||||
tar = tarfile.open(tar_download_path)
|
||||
tar.extractall(sb_update_dir)
|
||||
tar.extractall(sr_update_dir)
|
||||
tar.close()
|
||||
|
||||
# delete .tar.gz
|
||||
@ -692,12 +687,12 @@ class SourceUpdateManager(UpdateManager):
|
||||
os.remove(tar_download_path)
|
||||
|
||||
# find update dir name
|
||||
update_dir_contents = [x for x in os.listdir(sb_update_dir) if
|
||||
os.path.isdir(os.path.join(sb_update_dir, x))]
|
||||
update_dir_contents = [x for x in os.listdir(sr_update_dir) if
|
||||
os.path.isdir(os.path.join(sr_update_dir, x))]
|
||||
if len(update_dir_contents) != 1:
|
||||
logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR)
|
||||
return False
|
||||
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
|
||||
content_dir = os.path.join(sr_update_dir, update_dir_contents[0])
|
||||
|
||||
# walk temp folder and move files to main folder
|
||||
logger.log(u"Moving files from " + content_dir + " to " + sickbeard.PROG_DIR)
|
||||
@ -723,15 +718,6 @@ class SourceUpdateManager(UpdateManager):
|
||||
if os.path.isfile(new_path):
|
||||
os.remove(new_path)
|
||||
os.renames(old_path, new_path)
|
||||
|
||||
# update version.txt with commit hash
|
||||
try:
|
||||
with open(version_path, 'w') as ver_file:
|
||||
ver_file.write(self._newest_commit_hash)
|
||||
except EnvironmentError, e:
|
||||
logger.log(u"Unable to write version file, update not complete: " + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
|
||||
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
|
||||
@ -744,4 +730,5 @@ class SourceUpdateManager(UpdateManager):
|
||||
|
||||
def list_remote_branches(self):
|
||||
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
|
||||
return gh.branches()
|
||||
return [x.name for x in gh.branches()]
|
||||
|
||||
|
@ -1284,7 +1284,7 @@ class CMD_SickBeard(ApiCall):
|
||||
|
||||
def run(self):
|
||||
""" display misc sickbeard related information """
|
||||
data = {"sb_version": sickbeard.version.SICKBEARD_VERSION, "api_version": Api.version,
|
||||
data = {"sb_version": sickbeard.BRANCH, "api_version": Api.version,
|
||||
"api_commands": sorted(_functionMaper.keys())}
|
||||
return _responds(RESULT_SUCCESS, data)
|
||||
|
||||
|
@ -2769,7 +2769,7 @@ class NewHomeAddShows(MainHandler):
|
||||
return
|
||||
|
||||
map(final_results.append,
|
||||
([int(show['tvdb_id']), show['url'], show['title'], show['overview'],
|
||||
([int(show['tvdb_id'] or 0) if sickbeard.TRAKT_DEFAULT_INDEXER == 1 else int(show['tvdb_id'] or 0), show['url'], show['title'], show['overview'],
|
||||
datetime.date.fromtimestamp(int(show['first_aired']) / 1000.0).strftime('%Y%m%d')] for show in
|
||||
recommendedlist if not helpers.findCertainShow(sickbeard.showList, indexerid=int(show['tvdb_id']))))
|
||||
|
||||
@ -3449,12 +3449,12 @@ class Home(MainHandler):
|
||||
|
||||
return _munge(t)
|
||||
|
||||
def update(self, pid=None, branch=None):
|
||||
def update(self, pid=None):
|
||||
|
||||
if str(pid) != str(sickbeard.PID):
|
||||
redirect("/home/")
|
||||
|
||||
updated = sickbeard.versionCheckScheduler.action.update(branch) # @UndefinedVariable
|
||||
updated = sickbeard.versionCheckScheduler.action.update() # @UndefinedVariable
|
||||
if updated:
|
||||
# do a hard restart
|
||||
sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
|
||||
@ -3466,7 +3466,9 @@ class Home(MainHandler):
|
||||
"Update wasn't successful, not restarting. Check your log for more information.")
|
||||
|
||||
def branchCheckout(self, branch):
|
||||
return self.update(sickbeard.PID, branch)
|
||||
sickbeard.BRANCH = branch
|
||||
ui.notifications.message('Checking out branch: ', branch)
|
||||
return self.update(sickbeard.PID)
|
||||
|
||||
def displayShow(self, show=None):
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user