1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-13 11:32:20 -05:00

Merge remote-tracking branch 'git.darksystems.lan/nightly' into dev

This commit is contained in:
echel0n 2014-07-28 21:56:43 -07:00
commit 7d4e7f04a4
25 changed files with 358 additions and 263 deletions

View File

@ -56,14 +56,11 @@ import threading
import getopt import getopt
import sickbeard import sickbeard
from sickbeard import db from sickbeard import db, logger, network_timezones, failed_history, name_cache, versionChecker
from sickbeard.tv import TVShow from sickbeard.tv import TVShow
from sickbeard import logger, network_timezones, failed_history, name_cache
from sickbeard.webserveInit import SRWebServer from sickbeard.webserveInit import SRWebServer
from sickbeard.version import SICKBEARD_VERSION
from sickbeard.databases.mainDB import MIN_DB_VERSION, MAX_DB_VERSION from sickbeard.databases.mainDB import MIN_DB_VERSION, MAX_DB_VERSION
from sickbeard.event_queue import Events from sickbeard.event_queue import Events
from lib.configobj import ConfigObj from lib.configobj import ConfigObj
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d') throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
@ -350,7 +347,7 @@ class SickRage(object):
os._exit(1) os._exit(1)
if self.consoleLogging: if self.consoleLogging:
print "Starting up SickRage " + SICKBEARD_VERSION + " from " + sickbeard.CONFIG_FILE print "Starting up SickRage " + sickbeard.BRANCH + " from " + sickbeard.CONFIG_FILE
# Fire up all our threads # Fire up all our threads
sickbeard.start() sickbeard.start()
@ -369,6 +366,7 @@ class SickRage(object):
if self.forceUpdate or sickbeard.UPDATE_SHOWS_ON_START: if self.forceUpdate or sickbeard.UPDATE_SHOWS_ON_START:
sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable sickbeard.showUpdateScheduler.action.run(force=True) # @UndefinedVariable
# Launch browser
if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon): if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
sickbeard.launchBrowser(self.startPort) sickbeard.launchBrowser(self.startPort)

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -30,7 +30,7 @@
<table class="infoTable" cellspacing="1" border="0" cellpadding="0"> <table class="infoTable" cellspacing="1" border="0" cellpadding="0">
<tr><td class="infoTableHeader">SR Version: </td><td class="infoTableCell"> <tr><td class="infoTableHeader">SR Version: </td><td class="infoTableCell">
#if $sickbeard.VERSION_NOTIFY #if $sickbeard.VERSION_NOTIFY
BRANCH: ($sickbeard.version.SICKBEARD_VERSION) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- &ndash; build.date //--><br /> BRANCH: ($sickbeard.BRANCH) / COMMIT: ($sickbeard.CUR_COMMIT_HASH) <!-- &ndash; build.date //--><br />
#else #else
You don't have version checking turned on. Please turn on "Check for Update" in Config > General.<br /> You don't have version checking turned on. Please turn on "Check for Update" in Config > General.<br />
#end if #end if

View File

@ -282,7 +282,7 @@
<span class="component-desc"> <span class="component-desc">
<select id="branchVersion" name="branchVersion"> <select id="branchVersion" name="branchVersion">
#for $cur_branch in $sickbeard.versionCheckScheduler.action.list_remote_branches(): #for $cur_branch in $sickbeard.versionCheckScheduler.action.list_remote_branches():
<option value="$cur_branch" #if $cur_branch == $sickbeard.version.SICKBEARD_VERSION then "selected=\"selected\"" else ""#>$cur_branch.capitalize()</option> <option value="$cur_branch" #if $cur_branch == $sickbeard.BRANCH then "selected=\"selected\"" else ""#>$cur_branch.capitalize()</option>
#end for #end for
</select> </select>
<input class="btn" class="btn" type="button" id="branchCheckout" value="Checkout Branch"> <input class="btn" class="btn" type="button" id="branchCheckout" value="Checkout Branch">

View File

@ -1,4 +1,3 @@
#import sickbeard.version
#import sickbeard #import sickbeard
#import urllib #import urllib
@ -10,7 +9,7 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="robots" content="noindex"> <meta name="robots" content="noindex">
<meta name="apple-mobile-web-app-capable" content="yes"> <meta name="apple-mobile-web-app-capable" content="yes">
<title>SickRage - BRANCH:$sickbeard.version.SICKBEARD_VERSION - $title</title> <title>SickRage - BRANCH:[$sickbeard.BRANCH] - $title</title>
<!--[if lt IE 9]> <!--[if lt IE 9]>
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script> <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]--> <![endif]-->

View File

@ -268,10 +268,12 @@ $(document).ready(function(){
var devices = jQuery.parseJSON(data).devices; var devices = jQuery.parseJSON(data).devices;
$("#pushbullet_device_list").html(''); $("#pushbullet_device_list").html('');
for (var i = 0; i < devices.length; i++) { for (var i = 0; i < devices.length; i++) {
if(devices[i].active == true) {
if(current_pushbullet_device == devices[i].iden) { if(current_pushbullet_device == devices[i].iden) {
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'" selected>' + devices[i].extras.nickname + '</option>') $("#pushbullet_device_list").append('<option value="'+devices[i].iden+'" selected>' + devices[i].nickname + '</option>')
} else { } else {
$("#pushbullet_device_list").append('<option value="'+devices[i].iden+'">' + devices[i].extras.nickname + '</option>') $("#pushbullet_device_list").append('<option value="'+devices[i].iden+'">' + devices[i].nickname + '</option>')
}
} }
} }
if(msg) { if(msg) {

View File

@ -471,13 +471,11 @@ class Tvdb:
if cache is True: if cache is True:
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = self._getTempDir() self.config['cache_location'] = self._getTempDir()
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False: elif cache is False:
self.config['cache_enabled'] = False self.config['cache_enabled'] = False
elif isinstance(cache, basestring): elif isinstance(cache, basestring):
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = cache self.config['cache_location'] = cache
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
else: else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
@ -565,14 +563,15 @@ class Tvdb:
# get response from TVDB # get response from TVDB
if self.config['cache_enabled']: if self.config['cache_enabled']:
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
if self.config['proxy']: if self.config['proxy']:
log().debug("Using proxy for URL: %s" % url) log().debug("Using proxy for URL: %s" % url)
self.sess.proxies = { session.proxies = {
"http": self.config['proxy'], "http": self.config['proxy'],
"https": self.config['proxy'], "https": self.config['proxy'],
} }
resp = self.sess.get(url, cache_auto=True, params=params) resp = session.get(url, cache_auto=True, params=params)
else: else:
resp = requests.get(url, params=params) resp = requests.get(url, params=params)
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError, e:
@ -630,7 +629,7 @@ class Tvdb:
""" """
try: try:
src = self._loadUrl(url, params=params, language=language) src = self._loadUrl(url, params=params, language=language)
src = [src[item] for item in src][0] src = [src[item] for item in src][0] if src else []
except: except:
errormsg = "There was an error with the XML retrieved from thetvdb.com:" errormsg = "There was an error with the XML retrieved from thetvdb.com:"

View File

@ -305,7 +305,6 @@ class TVRage:
self.shows = ShowContainer() # Holds all Show classes self.shows = ShowContainer() # Holds all Show classes
self.corrections = {} # Holds show-name to show_id mapping self.corrections = {} # Holds show-name to show_id mapping
self.sess = requests.session() # HTTP Session
self.config = {} self.config = {}
@ -323,13 +322,11 @@ class TVRage:
if cache is True: if cache is True:
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = self._getTempDir() self.config['cache_location'] = self._getTempDir()
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
elif cache is False: elif cache is False:
self.config['cache_enabled'] = False self.config['cache_enabled'] = False
elif isinstance(cache, basestring): elif isinstance(cache, basestring):
self.config['cache_enabled'] = True self.config['cache_enabled'] = True
self.config['cache_location'] = cache self.config['cache_location'] = cache
self.sess = CacheControl(cache=caches.FileCache(self.config['cache_location']))
else: else:
raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache))) raise ValueError("Invalid value for Cache %r (type was %s)" % (cache, type(cache)))
@ -396,6 +393,7 @@ class TVRage:
except ImportError: except ImportError:
return os.path.join(tempfile.gettempdir(), "tvrage_api") return os.path.join(tempfile.gettempdir(), "tvrage_api")
return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid)) return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid))
#@retry(tvrage_error) #@retry(tvrage_error)
@ -405,14 +403,15 @@ class TVRage:
# get response from TVRage # get response from TVRage
if self.config['cache_enabled']: if self.config['cache_enabled']:
session = CacheControl(cache=caches.FileCache(self.config['cache_location']))
if self.config['proxy']: if self.config['proxy']:
log().debug("Using proxy for URL: %s" % url) log().debug("Using proxy for URL: %s" % url)
self.sess.proxies = { session.proxies = {
"http": self.config['proxy'], "http": self.config['proxy'],
"https": self.config['proxy'], "https": self.config['proxy'],
} }
resp = self.sess.get(url.strip(), cache_auto=True, params=params) resp = session.get(url.strip(), cache_auto=True, params=params)
else: else:
resp = requests.get(url.strip(), params=params) resp = requests.get(url.strip(), params=params)
@ -488,7 +487,7 @@ class TVRage:
try: try:
src = self._loadUrl(url, params) src = self._loadUrl(url, params)
src = [src[item] for item in src][0] src = [src[item] for item in src][0] if src else []
except: except:
errormsg = "There was an error with the XML retrieved from tvrage.com" errormsg = "There was an error with the XML retrieved from tvrage.com"

View File

@ -1,12 +1,30 @@
import re import re
import urllib, ConfigParser import urllib
from distutils.core import setup import ConfigParser
import py2exe, sys, os, shutil, datetime, zipfile, subprocess, fnmatch import sys
import os
import shutil
import zipfile
import subprocess
import fnmatch
import googlecode_upload import googlecode_upload
from lib.pygithub import github
from distutils.core import setup
try:
import py2exe
except:
print "The Python module py2exe is required"
sys.exit(1)
try:
import pygithub.github
except:
print "The Python module pyGitHub is required"
sys.exit(1)
# mostly stolen from the SABnzbd package.py file # mostly stolen from the SABnzbd package.py file
name = 'SickBeard' name = 'SickRage'
version = '0.1' version = '0.1'
release = name + '-' + version release = name + '-' + version
@ -14,11 +32,11 @@ release = name + '-' + version
Win32ConsoleName = 'SickBeard-console.exe' Win32ConsoleName = 'SickBeard-console.exe'
Win32WindowName = 'SickBeard.exe' Win32WindowName = 'SickBeard.exe'
def findLatestBuild(): def findLatestBuild():
regex = "http\://sickrage\.googlecode\.com/files/SickRage\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip"
regex = "http\://sickbeard\.googlecode\.com/files/SickBeard\-win32\-alpha\-build(\d+)(?:\.\d+)?\.zip" svnFile = urllib.urlopen("http://code.google.com/p/sickrage/downloads/list")
svnFile = urllib.urlopen("http://code.google.com/p/sickbeard/downloads/list")
for curLine in svnFile.readlines(): for curLine in svnFile.readlines():
match = re.search(regex, curLine) match = re.search(regex, curLine)
@ -28,8 +46,8 @@ def findLatestBuild():
return None return None
def recursive_find_data_files(root_dir, allowed_extensions=('*')):
def recursive_find_data_files(root_dir, allowed_extensions=('*')):
to_return = {} to_return = {}
for (dirpath, dirnames, filenames) in os.walk(root_dir): for (dirpath, dirnames, filenames) in os.walk(root_dir):
if not filenames: if not filenames:
@ -51,7 +69,6 @@ def recursive_find_data_files(root_dir, allowed_extensions=('*')):
def find_all_libraries(root_dirs): def find_all_libraries(root_dirs):
libs = [] libs = []
for cur_root_dir in root_dirs: for cur_root_dir in root_dirs:
@ -103,19 +120,14 @@ if 'test' in oldArgs:
else: else:
currentBuildNumber = latestBuild + 1 currentBuildNumber = latestBuild + 1
# write the version file before we compile
versionFile = open("sickbeard/version.py", "w")
versionFile.write("SICKBEARD_VERSION = \"build "+str(currentBuildNumber)+"\"")
versionFile.close()
# set up the compilation options # set up the compilation options
data_files = recursive_find_data_files('data', ['gif', 'png', 'jpg', 'ico', 'js', 'css', 'tmpl']) data_files = recursive_find_data_files('data', ['gif', 'png', 'jpg', 'ico', 'js', 'css', 'tmpl'])
options = dict( options = dict(
name=name, name=name,
version=release, version=release,
author='Nic Wolfe', author='echel0n',
author_email='nic@wolfeden.ca', author_email='sickrage.tv@gmail.com',
description=name + ' ' + release, description=name + ' ' + release,
scripts=['SickBeard.py'], scripts=['SickBeard.py'],
packages=find_all_libraries(['sickbeard', 'lib']), packages=find_all_libraries(['sickbeard', 'lib']),
@ -132,7 +144,7 @@ options['options'] = {'py2exe':
'compressed': 0 'compressed': 0
} }
} }
options['zipfile'] = 'lib/sickbeard.zip' options['zipfile'] = 'lib/sickrage.zip'
options['console'] = program options['console'] = program
options['data_files'] = data_files options['data_files'] = data_files
@ -202,7 +214,7 @@ else:
changeString = "" changeString = ""
# cycle through all the git commits and save their commit messages # cycle through all the git commits and save their commit messages
for curCommit in gh.commits.forBranch('midgetspy', 'Sick-Beard'): for curCommit in gh.commits.forBranch('echel0n', 'SickRage'):
if curCommit.id == lastCommit: if curCommit.id == lastCommit:
break break
@ -227,7 +239,7 @@ if os.path.exists("CHANGELOG.txt"):
# figure out what we're going to call the zip file # figure out what we're going to call the zip file
print 'Zipping files...' print 'Zipping files...'
zipFilename = 'SickBeard-win32-alpha-build'+str(currentBuildNumber) zipFilename = 'SickRage-win32-alpha-build' + str(currentBuildNumber)
if os.path.isfile(zipFilename + '.zip'): if os.path.isfile(zipFilename + '.zip'):
zipNum = 2 zipNum = 2
while os.path.isfile(zipFilename + '.{0:0>2}.zip'.format(str(zipNum))): while os.path.isfile(zipFilename + '.{0:0>2}.zip'.format(str(zipNum))):
@ -245,12 +257,6 @@ z.close()
print "Created zip at", zipFilename print "Created zip at", zipFilename
# leave version file as it is in source
print "Reverting version file to master"
versionFile = open("sickbeard/version.py", "w")
versionFile.write("SICKBEARD_VERSION = \"master\"")
versionFile.close()
# i store my google code username/pw in a config so i can have this file in public source control # i store my google code username/pw in a config so i can have this file in public source control
config = ConfigParser.ConfigParser() config = ConfigParser.ConfigParser()
configFilename = os.path.join(compile_dir, "gc.ini") configFilename = os.path.join(compile_dir, "gc.ini")
@ -262,12 +268,15 @@ gc_password = config.get("GC", "password")
# upload to google code unless I tell it not to # upload to google code unless I tell it not to
if "noup" not in oldArgs and "test" not in oldArgs: if "noup" not in oldArgs and "test" not in oldArgs:
print "Uploading zip to google code" print "Uploading zip to google code"
googlecode_upload.upload(os.path.abspath(zipFilename+".zip"), "sickbeard", gc_username, gc_password, "Win32 alpha build "+str(currentBuildNumber)+" (unstable/development release)", ["Featured","Type-Executable","OpSys-Windows"]) googlecode_upload.upload(os.path.abspath(zipFilename + ".zip"), "sickrage", gc_username, gc_password,
"Win32 alpha build " + str(currentBuildNumber) + " (unstable/development release)",
["Featured", "Type-Executable", "OpSys-Windows"])
if 'nopush' not in oldArgs and 'test' not in oldArgs: if 'nopush' not in oldArgs and 'test' not in oldArgs:
# tag commit as a new build and push changes to github # tag commit as a new build and push changes to github
print 'Tagging commit and pushing' print 'Tagging commit and pushing'
p = subprocess.Popen('git tag -a "build-'+str(currentBuildNumber)+'" -m "Windows build '+zipFilename+'"', shell=True, cwd=compile_dir) p = subprocess.Popen('git tag -a "build-' + str(currentBuildNumber) + '" -m "Windows build ' + zipFilename + '"',
shell=True, cwd=compile_dir)
o, e = p.communicate() o, e = p.communicate()
p = subprocess.Popen('git push --tags origin windows_binaries', shell=True, cwd=compile_dir) p = subprocess.Popen('git push --tags origin windows_binaries', shell=True, cwd=compile_dir)
o, e = p.communicate() o, e = p.communicate()

View File

@ -102,6 +102,7 @@ VERSION_NOTIFY = False
AUTO_UPDATE = False AUTO_UPDATE = False
NOTIFY_ON_UPDATE = False NOTIFY_ON_UPDATE = False
CUR_COMMIT_HASH = None CUR_COMMIT_HASH = None
BRANCH = None
INIT_LOCK = Lock() INIT_LOCK = Lock()
started = False started = False
@ -442,7 +443,7 @@ __INITIALIZED__ = False
def initialize(consoleLogging=True): def initialize(consoleLogging=True):
with INIT_LOCK: with INIT_LOCK:
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ global BRANCH, ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \ HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \ SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
@ -509,6 +510,9 @@ def initialize(consoleLogging=True):
CheckSection(CFG, 'Pushbullet') CheckSection(CFG, 'Pushbullet')
CheckSection(CFG, 'Subtitles') CheckSection(CFG, 'Subtitles')
# branch
BRANCH = check_setting_str(CFG, 'General', 'branch', '')
ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', 'cache') ACTUAL_CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', 'cache')
# fix bad configs due to buggy code # fix bad configs due to buggy code
if ACTUAL_CACHE_DIR == 'None': if ACTUAL_CACHE_DIR == 'None':
@ -1140,15 +1144,19 @@ def start():
searchQueueScheduler.start() searchQueueScheduler.start()
# start the queue checker # start the queue checker
if DOWNLOAD_PROPERS:
properFinderScheduler.start() properFinderScheduler.start()
# start the proper finder # start the proper finder
if PROCESS_AUTOMATICALLY:
autoPostProcesserScheduler.start() autoPostProcesserScheduler.start()
# start the subtitles finder # start the subtitles finder
if USE_SUBTITLES:
subtitlesFinderScheduler.start() subtitlesFinderScheduler.start()
# start the trakt checker # start the trakt checker
if USE_TRAKT:
traktCheckerScheduler.start() traktCheckerScheduler.start()
started = True started = True
@ -1170,7 +1178,7 @@ def halt():
events.stop.set() events.stop.set()
logger.log(u"Waiting for the EVENTS thread to exit") logger.log(u"Waiting for the EVENTS thread to exit")
try: try:
events.join() events.join(10)
except: except:
pass pass
@ -1216,6 +1224,7 @@ def halt():
except: except:
pass pass
if PROCESS_AUTOMATICALLY:
autoPostProcesserScheduler.stop.set() autoPostProcesserScheduler.stop.set()
logger.log(u"Waiting for the POSTPROCESSER thread to exit") logger.log(u"Waiting for the POSTPROCESSER thread to exit")
try: try:
@ -1223,6 +1232,7 @@ def halt():
except: except:
pass pass
if USE_TRAKT:
traktCheckerScheduler.stop.set() traktCheckerScheduler.stop.set()
logger.log(u"Waiting for the TRAKTCHECKER thread to exit") logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
try: try:
@ -1230,6 +1240,7 @@ def halt():
except: except:
pass pass
if DOWNLOAD_PROPERS:
properFinderScheduler.stop.set() properFinderScheduler.stop.set()
logger.log(u"Waiting for the PROPERFINDER thread to exit") logger.log(u"Waiting for the PROPERFINDER thread to exit")
try: try:
@ -1237,6 +1248,7 @@ def halt():
except: except:
pass pass
if USE_SUBTITLES:
subtitlesFinderScheduler.stop.set() subtitlesFinderScheduler.stop.set()
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit") logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
try: try:
@ -1255,7 +1267,6 @@ def halt():
__INITIALIZED__ = False __INITIALIZED__ = False
started = False started = False
def sig_handler(signum=None, frame=None): def sig_handler(signum=None, frame=None):
if type(signum) != type(None): if type(signum) != type(None):
logger.log(u"Signal %i caught, saving and exiting..." % int(signum)) logger.log(u"Signal %i caught, saving and exiting..." % int(signum))
@ -1291,6 +1302,7 @@ def save_config():
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() # For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
new_config['General'] = {} new_config['General'] = {}
new_config['General']['branch'] = BRANCH
new_config['General']['config_version'] = CONFIG_VERSION new_config['General']['config_version'] = CONFIG_VERSION
new_config['General']['encryption_version'] = int(ENCRYPTION_VERSION) new_config['General']['encryption_version'] = int(ENCRYPTION_VERSION)
new_config['General']['log_dir'] = ACTUAL_LOG_DIR if ACTUAL_LOG_DIR else 'Logs' new_config['General']['log_dir'] = ACTUAL_LOG_DIR if ACTUAL_LOG_DIR else 'Logs'

View File

@ -27,9 +27,6 @@ from sickbeard import processTV
class PostProcesser(): class PostProcesser():
def run(self, force=False): def run(self, force=False):
if not sickbeard.PROCESS_AUTOMATICALLY:
return
if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR): if not ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist", logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
logger.ERROR) logger.ERROR)

View File

@ -142,15 +142,18 @@ class GenericClient(object):
def _get_torrent_hash(self, result): def _get_torrent_hash(self, result):
result.hash = None
if result.url.startswith('magnet'): if result.url.startswith('magnet'):
torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0] result.hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0]
if len(torrent_hash) == 32: if len(result.hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).lower() result.hash = b16encode(b32decode(result.hash)).lower()
else: else:
result.content = result.provider.getURL(result.url)
if result.content:
info = bdecode(result.content)["info"] info = bdecode(result.content)["info"]
torrent_hash = sha1(bencode(info)).hexdigest() result.hash = sha1(bencode(info)).hexdigest()
return torrent_hash return result
def sendTORRENT(self, result): def sendTORRENT(self, result):
@ -163,8 +166,8 @@ class GenericClient(object):
return r_code return r_code
try: try:
# Sets per provider seed ratio
result.hash = self._get_torrent_hash(result) result.ratio = result.provider.seedRatio()
if result.url.startswith('magnet'): if result.url.startswith('magnet'):
r_code = self._add_torrent_uri(result) r_code = self._add_torrent_uri(result)

View File

@ -27,7 +27,6 @@ from sickbeard import helpers
from sickbeard import logger from sickbeard import logger
from sickbeard import naming from sickbeard import naming
from sickbeard import db from sickbeard import db
from sickbeard import version
naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d", naming_ep_type = ("%(seasonnumber)dx%(episodenumber)02d",
"s%(seasonnumber)02de%(episodenumber)02d", "s%(seasonnumber)02de%(episodenumber)02d",
@ -191,10 +190,50 @@ def change_VERSION_NOTIFY(version_notify):
if oldSetting == False and version_notify == True: if oldSetting == False and version_notify == True:
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable
def change_VERSION(version): def change_DOWNLOAD_PROPERS(download_propers):
if sickbeard.version.SICKBEARD_VERSION != version: if sickbeard.DOWNLOAD_PROPERS == download_propers:
return
sickbeard.versionCheckScheduler.action.run() # @UndefinedVariable sickbeard.DOWNLOAD_PROPERS = download_propers
if sickbeard.DOWNLOAD_PROPERS:
sickbeard.properFinderScheduler.start()
else:
sickbeard.properFinderScheduler.stop.set()
logger.log(u"Waiting for the PROPERFINDER thread to exit")
try:
sickbeard.properFinderScheduler.join(10)
except:
pass
def change_USE_TRAKT(use_trakt):
if sickbeard.USE_TRAKT == use_trakt:
return
sickbeard.USE_TRAKT = use_trakt
if sickbeard.USE_TRAKT:
sickbeard.traktCheckerScheduler.start()
else:
sickbeard.traktCheckerScheduler.stop.set()
logger.log(u"Waiting for the TRAKTCHECKER thread to exit")
try:
sickbeard.traktCheckerScheduler.join(10)
except:
pass
def change_USE_SUBTITLES(use_subtitles):
if sickbeard.USE_SUBTITLES == use_subtitles:
return
sickbeard.USE_SUBTITLES = use_subtitles
if sickbeard.USE_SUBTITLES:
sickbeard.subtitlesFinderScheduler.start()
else:
sickbeard.subtitlesFinderScheduler.stop.set()
logger.log(u"Waiting for the SUBTITLESFINDER thread to exit")
try:
sickbeard.subtitlesFinderScheduler.join(10)
except:
pass
def CheckSection(CFG, sec): def CheckSection(CFG, sec):
""" Check if INI section exists, if not create it """ """ Check if INI section exists, if not create it """
@ -472,7 +511,7 @@ class ConfigMigrator():
if old_season_format: if old_season_format:
try: try:
new_season_format = old_season_format % 9 new_season_format = old_season_format % 9
new_season_format = new_season_format.replace('09', '%0S') new_season_format = str(new_season_format).replace('09', '%0S')
new_season_format = new_season_format.replace('9', '%S') new_season_format = new_season_format.replace('9', '%S')
logger.log( logger.log(

View File

@ -15,6 +15,9 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os
from subprocess import check_output, PIPE, Popen
from os.path import join, split
try: try:
import json import json
@ -30,7 +33,7 @@ class GitHub(object):
needs it for - list of commits. needs it for - list of commits.
""" """
def __init__(self, github_repo_user, github_repo, branch='master'): def __init__(self, github_repo_user, github_repo, branch):
self.github_repo_user = github_repo_user self.github_repo_user = github_repo_user
self.github_repo = github_repo self.github_repo = github_repo
@ -93,3 +96,36 @@ class GitHub(object):
['repos', self.github_repo_user, self.github_repo, 'branches'], ['repos', self.github_repo_user, self.github_repo, 'branches'],
params={'per_page': 100}) params={'per_page': 100})
return access_API return access_API
def installed_branch(self):
installed_path = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
return self.hash_dir(installed_path)
def _lstree(self, files, dirs):
"""Make git ls-tree like output."""
for f, sha1 in files:
yield "100644 blob {}\t{}\0".format(sha1, f)
for d, sha1 in dirs:
yield "040000 tree {}\t{}\0".format(sha1, d)
def _mktree(self, files, dirs):
mkt = Popen(["git", "mktree", "-z"], stdin=PIPE, stdout=PIPE)
return mkt.communicate("".join(self._lstree(files, dirs)))[0].strip()
def hash_file(self, path):
"""Write file at path to Git index, return its SHA1 as a string."""
return check_output(["git", "hash-object", "-w", "--", path]).strip()
def hash_dir(self, path):
"""Write directory at path to Git index, return its SHA1 as a string."""
dir_hash = {}
for root, dirs, files in os.walk(path, topdown=False):
f_hash = ((f, self.hash_file(join(root, f))) for f in files)
d_hash = ((d, dir_hash[join(root, d)]) for d in dirs)
# split+join normalizes paths on Windows (note the imports)
dir_hash[join(*split(root))] = self._mktree(f_hash, d_hash)
return dir_hash[path]

View File

@ -17,12 +17,14 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement from __future__ import with_statement
import getpass
import os import os
import re import re
import shutil import shutil
import socket import socket
import stat import stat
import tempfile
import time import time
import traceback import traceback
import urllib import urllib
@ -1198,13 +1200,29 @@ def touchFile(fname, atime=None):
return False return False
def _getTempDir():
"""Returns the [system temp dir]/tvdb_api-u501 (or
tvdb_api-myuser)
"""
if hasattr(os, 'getuid'):
uid = "u%d" % (os.getuid())
else:
# For Windows
try:
uid = getpass.getuser()
except ImportError:
return os.path.join(tempfile.gettempdir(), "sickrage")
return os.path.join(tempfile.gettempdir(), "sickrage-%s" % (uid))
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False): def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False):
""" """
Returns a byte-string retrieved from the url provider. Returns a byte-string retrieved from the url provider.
""" """
# request session # request session
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions'))) cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
# request session headers # request session headers
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'} req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}
@ -1233,6 +1251,11 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
} }
resp = session.get(url, data=post_data, timeout=timeout) resp = session.get(url, data=post_data, timeout=timeout)
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
return
except requests.exceptions.HTTPError, e: except requests.exceptions.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return return
@ -1246,20 +1269,15 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
return return
if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
return
if json: if json:
return resp.json() return resp.json()
return resp.content return resp.content
def download_file(url, filename, session=None): def download_file(url, filename, session=None):
# create session # create session
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(sickbeard.CACHE_DIR, 'sessions'))) cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
# request session headers # request session headers
session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'}) session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
@ -1281,6 +1299,8 @@ def download_file(url, filename, session=None):
try: try:
resp = session.get(url) resp = session.get(url)
if not resp.ok: if not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.DEBUG)
return False return False
with open(filename, 'wb') as fp: with open(filename, 'wb') as fp:
@ -1311,14 +1331,6 @@ def download_file(url, filename, session=None):
logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING) logger.log(u"Unknown exception while loading URL " + url + ": " + traceback.format_exc(), logger.WARNING)
return False return False
if not resp:
logger.log(u"No data returned from " + url, logger.DEBUG)
return False
elif not resp.ok:
logger.log(u"Requested url " + url + " returned status code is " + str(
resp.status_code) + ': ' + clients.http_error_code[resp.status_code], logger.WARNING)
return False
return True return True

View File

@ -69,7 +69,7 @@ class PushbulletNotifier:
if method == 'POST': if method == 'POST':
uri = '/v2/pushes' uri = '/v2/pushes'
else: else:
uri = '/api/devices' uri = '/v2/devices'
logger.log(u"Pushbullet event: " + str(event), logger.DEBUG) logger.log(u"Pushbullet event: " + str(event), logger.DEBUG)
logger.log(u"Pushbullet message: " + str(message), logger.DEBUG) logger.log(u"Pushbullet message: " + str(message), logger.DEBUG)
@ -106,6 +106,7 @@ class PushbulletNotifier:
response = http_handler.getresponse() response = http_handler.getresponse()
request_body = response.read() request_body = response.read()
request_status = response.status request_status = response.status
logger.log(u"Pushbullet response: %s" % request_body, logger.DEBUG)
if request_status == 200: if request_status == 200:
if testMessage: if testMessage:

View File

@ -33,6 +33,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality from sickbeard.common import Quality
from sickbeard import clients
from hachoir_parser import createParser from hachoir_parser import createParser
@ -405,7 +406,12 @@ class GenericProvider:
epNum = SEASON_RESULT epNum = SEASON_RESULT
logger.log(u"Separating full season result to check for later", logger.DEBUG) logger.log(u"Separating full season result to check for later", logger.DEBUG)
if not result: # validate torrent file if not magnet link to avoid invalid torrent links
if self.providerType == self.TORRENT:
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
result = client._get_torrent_hash(result)
if not result.hash:
logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG)
continue continue
if epNum not in results: if epNum not in results:

View File

@ -129,12 +129,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
if sickbeard.TORRENT_METHOD == "blackhole": if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result) dlResult = _downloadResult(result)
else: else:
# Sets per provider seed ratio
result.ratio = result.provider.seedRatio()
# Gets torrent file contents if not magnet link
result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None
# Snatches torrent with client # Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result) dlResult = client.sendTORRENT(result)
@ -333,7 +327,6 @@ def filterSearchResults(show, season, results):
return foundResults return foundResults
def searchForNeededEpisodes(show, episodes): def searchForNeededEpisodes(show, episodes):
foundResults = {} foundResults = {}

View File

@ -87,9 +87,6 @@ class SubtitlesFinder():
and download subtitles. Only if the defined rule is true and download subtitles. Only if the defined rule is true
""" """
def run(self, force=False): def run(self, force=False):
if not sickbeard.USE_SUBTITLES:
return
if len(sickbeard.subtitles.getEnabledServiceList()) < 1: if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR) logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)
return return

View File

@ -34,9 +34,6 @@ class TraktChecker():
self.todoBacklog = [] self.todoBacklog = []
def run(self, force=False): def run(self, force=False):
if not sickbeard.USE_TRAKT:
return
try: try:
# add shows from trakt.tv watchlist # add shows from trakt.tv watchlist
if sickbeard.TRAKT_USE_WATCHLIST: if sickbeard.TRAKT_USE_WATCHLIST:
@ -60,10 +57,10 @@ class TraktChecker():
logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR) logger.log(u"Could not connect to trakt service, aborting library check", logger.ERROR)
return return
return filter(lambda x: int(indexerid) in [int(x.tvdb_id), int(x.tvrage_id)], library) return filter(lambda x: int(indexerid) in [int(x['tvdb_id']) or 0, int(x['tvrage_id'])] or 0, library)
def syncLibrary(self): def syncLibrary(self):
logger.log(u"Syncing library to trakt.tv show library", logger.DEBUG) logger.log(u"Syncing library to Trakt.tv show library", logger.DEBUG)
if sickbeard.showList: if sickbeard.showList:
for myShow in sickbeard.showList: for myShow in sickbeard.showList:
self.addShowToTraktLibrary(myShow) self.addShowToTraktLibrary(myShow)

View File

@ -31,6 +31,7 @@ from sickbeard.exceptions import MultipleShowObjectsException
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.rssfeeds import RSSFeeds from sickbeard.rssfeeds import RSSFeeds
from sickbeard import clients
class CacheDBConnection(db.DBConnection): class CacheDBConnection(db.DBConnection):
def __init__(self, providerName): def __init__(self, providerName):
@ -356,9 +357,15 @@ class TVCache():
result.quality = curQuality result.quality = curQuality
result.release_group = curReleaseGroup result.release_group = curReleaseGroup
result.version = curVersion result.version = curVersion
result.content = self.provider.getURL(url) \ result.content = None
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
and not url.startswith('magnet') else None # validate torrent file if not magnet link to avoid invalid torrent links
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT:
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
result = client._get_torrent_hash(result)
if not result.hash:
logger.log(u'Unable to get torrent hash for ' + title + ', skipping it', logger.DEBUG)
continue
# add it to the list # add it to the list
if epObj not in neededEps: if epObj not in neededEps:

View File

@ -1 +1 @@
SICKBEARD_VERSION = "master" SICKBEARD_VERSION = "nightly"

View File

@ -30,14 +30,14 @@ import gh_api as github
import sickbeard import sickbeard
from sickbeard import helpers, notifiers from sickbeard import helpers, notifiers
from sickbeard import version, ui from sickbeard import ui
from sickbeard import logger from sickbeard import logger
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
class CheckVersion(): class CheckVersion():
""" """
Version check class meant to run as a thread object with the SB scheduler. Version check class meant to run as a thread object with the sr scheduler.
""" """
def __init__(self): def __init__(self):
@ -53,6 +53,9 @@ class CheckVersion():
self.updater = None self.updater = None
def run(self, force=False): def run(self, force=False):
# set current branch version
sickbeard.BRANCH = self.get_branch()
if self.check_for_new_version(force): if self.check_for_new_version(force):
if sickbeard.AUTO_UPDATE: if sickbeard.AUTO_UPDATE:
logger.log(u"New update found for SickRage, starting auto-updater ...") logger.log(u"New update found for SickRage, starting auto-updater ...")
@ -64,7 +67,7 @@ class CheckVersion():
def find_install_type(self): def find_install_type(self):
""" """
Determines how this copy of SB was installed. Determines how this copy of sr was installed.
returns: type of installation. Possible values are: returns: type of installation. Possible values are:
'win': any compiled windows build 'win': any compiled windows build
@ -73,7 +76,7 @@ class CheckVersion():
""" """
# check if we're a windows build # check if we're a windows build
if sickbeard.version.SICKBEARD_VERSION.startswith('build '): if sickbeard.BRANCH.startswith('build '):
install_type = 'win' install_type = 'win'
elif os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')): elif os.path.isdir(ek.ek(os.path.join, sickbeard.PROG_DIR, u'.git')):
install_type = 'git' install_type = 'git'
@ -109,15 +112,16 @@ class CheckVersion():
self.updater.set_newest_text() self.updater.set_newest_text()
return True return True
def update(self, branch=None): def update(self):
if branch and branch != self.updater.branch: if self.updater.need_update() or self.updater.branch != sickbeard.BRANCH:
return self.updater.update(branch)
elif self.updater.need_update():
return self.updater.update() return self.updater.update()
def list_remote_branches(self): def list_remote_branches(self):
return self.updater.list_remote_branches() return self.updater.list_remote_branches()
def get_branch(self):
return self.updater.branch
class UpdateManager(): class UpdateManager():
def get_github_repo_user(self): def get_github_repo_user(self):
return 'echel0n' return 'echel0n'
@ -145,7 +149,7 @@ class WindowsUpdateManager(UpdateManager):
version = '' version = ''
try: try:
version = sickbeard.version.SICKBEARD_VERSION version = sickbeard.BRANCH
return int(version[6:]) return int(version[6:])
except ValueError: except ValueError:
logger.log(u"Unknown SickRage Windows binary release: " + version, logger.ERROR) logger.log(u"Unknown SickRage Windows binary release: " + version, logger.ERROR)
@ -200,10 +204,7 @@ class WindowsUpdateManager(UpdateManager):
sickbeard.NEWEST_VERSION_STRING = newest_text sickbeard.NEWEST_VERSION_STRING = newest_text
def update(self, branch='windows_binaries'): def update(self):
# set branch version
self.branch = branch
zip_download_url = self._find_newest_version(True) zip_download_url = self._find_newest_version(True)
logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG) logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG)
@ -214,18 +215,18 @@ class WindowsUpdateManager(UpdateManager):
try: try:
# prepare the update dir # prepare the update dir
sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sb-update') sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update')
if os.path.isdir(sb_update_dir): if os.path.isdir(sr_update_dir):
logger.log(u"Clearing out update folder " + sb_update_dir + " before extracting") logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting")
shutil.rmtree(sb_update_dir) shutil.rmtree(sr_update_dir)
logger.log(u"Creating update folder " + sb_update_dir + " before extracting") logger.log(u"Creating update folder " + sr_update_dir + " before extracting")
os.makedirs(sb_update_dir) os.makedirs(sr_update_dir)
# retrieve file # retrieve file
logger.log(u"Downloading update from " + zip_download_url) logger.log(u"Downloading update from " + zip_download_url)
zip_download_path = os.path.join(sb_update_dir, u'sb-update.zip') zip_download_path = os.path.join(sr_update_dir, u'sr-update.zip')
urllib.urlretrieve(zip_download_url, zip_download_path) urllib.urlretrieve(zip_download_url, zip_download_path)
if not ek.ek(os.path.isfile, zip_download_path): if not ek.ek(os.path.isfile, zip_download_path):
@ -236,10 +237,10 @@ class WindowsUpdateManager(UpdateManager):
logger.log(u"Retrieved version from " + zip_download_url + " is corrupt, can't update", logger.ERROR) logger.log(u"Retrieved version from " + zip_download_url + " is corrupt, can't update", logger.ERROR)
return False return False
# extract to sb-update dir # extract to sr-update dir
logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sb_update_dir) logger.log(u"Unzipping from " + str(zip_download_path) + " to " + sr_update_dir)
update_zip = zipfile.ZipFile(zip_download_path, 'r') update_zip = zipfile.ZipFile(zip_download_path, 'r')
update_zip.extractall(sb_update_dir) update_zip.extractall(sr_update_dir)
update_zip.close() update_zip.close()
# delete the zip # delete the zip
@ -247,15 +248,15 @@ class WindowsUpdateManager(UpdateManager):
os.remove(zip_download_path) os.remove(zip_download_path)
# find update dir name # find update dir name
update_dir_contents = [x for x in os.listdir(sb_update_dir) if update_dir_contents = [x for x in os.listdir(sr_update_dir) if
os.path.isdir(os.path.join(sb_update_dir, x))] os.path.isdir(os.path.join(sr_update_dir, x))]
if len(update_dir_contents) != 1: if len(update_dir_contents) != 1:
logger.log(u"Invalid update data, update failed. Maybe try deleting your sb-update folder?", logger.log(u"Invalid update data, update failed. Maybe try deleting your sr-update folder?",
logger.ERROR) logger.ERROR)
return False return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) content_dir = os.path.join(sr_update_dir, update_dir_contents[0])
old_update_path = os.path.join(content_dir, u'updater.exe') old_update_path = os.path.join(content_dir, u'updater.exe')
new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe') new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe')
logger.log(u"Copying new update.exe file from " + old_update_path + " to " + new_update_path) logger.log(u"Copying new update.exe file from " + old_update_path + " to " + new_update_path)
@ -278,7 +279,10 @@ class GitUpdateManager(UpdateManager):
self._git_path = self._find_working_git() self._git_path = self._find_working_git()
self.github_repo_user = self.get_github_repo_user() self.github_repo_user = self.get_github_repo_user()
self.github_repo = self.get_github_repo() self.github_repo = self.get_github_repo()
self.branch = self._find_git_branch()
self.branch = sickbeard.BRANCH
if not sickbeard.BRANCH or not sickbeard.BRANCH == '':
self.branch = self._find_installed_branch()
self._cur_commit_hash = None self._cur_commit_hash = None
self._newest_commit_hash = None self._newest_commit_hash = None
@ -310,7 +314,7 @@ class GitUpdateManager(UpdateManager):
alternative_git = [] alternative_git = []
# osx people who start SB from launchd have a broken path, so try a hail-mary attempt for them # osx people who start sr from launchd have a broken path, so try a hail-mary attempt for them
if platform.system().lower() == 'darwin': if platform.system().lower() == 'darwin':
alternative_git.append('/usr/local/git/bin/git') alternative_git.append('/usr/local/git/bin/git')
@ -403,13 +407,12 @@ class GitUpdateManager(UpdateManager):
else: else:
return False return False
def _find_git_branch(self): def _find_installed_branch(self):
branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable
if exit_status == 0 and branch_info: if exit_status == 0 and branch_info:
branch = branch_info.strip().replace('refs/heads/', '', 1) branch = branch_info.strip().replace('refs/heads/', '', 1)
if branch: if branch:
sickbeard.version.SICKBEARD_VERSION = branch return branch
return sickbeard.version.SICKBEARD_VERSION
def _check_github_for_update(self): def _check_github_for_update(self):
""" """
@ -505,16 +508,13 @@ class GitUpdateManager(UpdateManager):
return False return False
def update(self, branch=sickbeard.version.SICKBEARD_VERSION): def update(self):
""" """
Calls git pull origin <branch> in order to update SickRage. Returns a bool depending Calls git pull origin <branch> in order to update SickRage. Returns a bool depending
on the call's success. on the call's success.
""" """
# set branch version if sickbeard.BRANCH == self._find_installed_branch():
self.branch = branch
if self.branch == sickbeard.version.SICKBEARD_VERSION:
output, err, exit_status = self._run_git(self._git_path, 'pull -f origin ' + self.branch) # @UnusedVariable output, err, exit_status = self._run_git(self._git_path, 'pull -f origin ' + self.branch) # @UnusedVariable
else: else:
output, err, exit_status = self._run_git(self._git_path, 'checkout -f ' + self.branch) # @UnusedVariable output, err, exit_status = self._run_git(self._git_path, 'checkout -f ' + self.branch) # @UnusedVariable
@ -537,30 +537,29 @@ class SourceUpdateManager(UpdateManager):
def __init__(self): def __init__(self):
self.github_repo_user = self.get_github_repo_user() self.github_repo_user = self.get_github_repo_user()
self.github_repo = self.get_github_repo() self.github_repo = self.get_github_repo()
self.branch = sickbeard.version.SICKBEARD_VERSION
self.branch = sickbeard.BRANCH
if not sickbeard.BRANCH or not sickbeard.BRANCH == '':
self.branch = self._find_installed_branch()
self._cur_commit_hash = None self._cur_commit_hash = None
self._newest_commit_hash = None self._newest_commit_hash = None
self._num_commits_behind = 0 self._num_commits_behind = 0
def _find_installed_version(self): def _find_installed_version(self):
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
version_file = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt') self._cur_commit_hash = gh.installed_branch()
if not os.path.isfile(version_file):
self._cur_commit_hash = None
return
try:
with open(version_file, 'r') as fp:
self._cur_commit_hash = fp.read().strip(' \n\r')
except EnvironmentError, e:
logger.log(u"Unable to open 'version.txt': " + ex(e), logger.DEBUG)
if not self._cur_commit_hash: if not self._cur_commit_hash:
self._cur_commit_hash = None self._cur_commit_hash = None
sickbeard.CUR_COMMIT_HASH = str(self._cur_commit_hash) sickbeard.CUR_COMMIT_HASH = str(self._cur_commit_hash)
def _find_installed_branch(self):
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
for branch in gh.branches():
if branch.commit['sha'] == self._cur_commit_hash:
sickbeard.BRANCH = branch.name
def need_update(self): def need_update(self):
self._find_installed_version() self._find_installed_version()
@ -645,32 +644,28 @@ class SourceUpdateManager(UpdateManager):
sickbeard.NEWEST_VERSION_STRING = newest_text sickbeard.NEWEST_VERSION_STRING = newest_text
def update(self, branch=sickbeard.version.SICKBEARD_VERSION): def update(self):
""" """
Downloads the latest source tarball from github and installs it over the existing version. Downloads the latest source tarball from github and installs it over the existing version.
""" """
# set branch version
self.branch = branch
base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo base_url = 'http://github.com/' + self.github_repo_user + '/' + self.github_repo
tar_download_url = base_url + '/tarball/' + self.branch tar_download_url = base_url + '/tarball/' + self.branch
version_path = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')
try: try:
# prepare the update dir # prepare the update dir
sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sb-update') sr_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR, u'sr-update')
if os.path.isdir(sb_update_dir): if os.path.isdir(sr_update_dir):
logger.log(u"Clearing out update folder " + sb_update_dir + " before extracting") logger.log(u"Clearing out update folder " + sr_update_dir + " before extracting")
shutil.rmtree(sb_update_dir) shutil.rmtree(sr_update_dir)
logger.log(u"Creating update folder " + sb_update_dir + " before extracting") logger.log(u"Creating update folder " + sr_update_dir + " before extracting")
os.makedirs(sb_update_dir) os.makedirs(sr_update_dir)
# retrieve file # retrieve file
logger.log(u"Downloading update from " + repr(tar_download_url)) logger.log(u"Downloading update from " + repr(tar_download_url))
tar_download_path = os.path.join(sb_update_dir, u'sb-update.tar') tar_download_path = os.path.join(sr_update_dir, u'sr-update.tar')
urllib.urlretrieve(tar_download_url, tar_download_path) urllib.urlretrieve(tar_download_url, tar_download_path)
if not ek.ek(os.path.isfile, tar_download_path): if not ek.ek(os.path.isfile, tar_download_path):
@ -681,10 +676,10 @@ class SourceUpdateManager(UpdateManager):
logger.log(u"Retrieved version from " + tar_download_url + " is corrupt, can't update", logger.ERROR) logger.log(u"Retrieved version from " + tar_download_url + " is corrupt, can't update", logger.ERROR)
return False return False
# extract to sb-update dir # extract to sr-update dir
logger.log(u"Extracting file " + tar_download_path) logger.log(u"Extracting file " + tar_download_path)
tar = tarfile.open(tar_download_path) tar = tarfile.open(tar_download_path)
tar.extractall(sb_update_dir) tar.extractall(sr_update_dir)
tar.close() tar.close()
# delete .tar.gz # delete .tar.gz
@ -692,12 +687,12 @@ class SourceUpdateManager(UpdateManager):
os.remove(tar_download_path) os.remove(tar_download_path)
# find update dir name # find update dir name
update_dir_contents = [x for x in os.listdir(sb_update_dir) if update_dir_contents = [x for x in os.listdir(sr_update_dir) if
os.path.isdir(os.path.join(sb_update_dir, x))] os.path.isdir(os.path.join(sr_update_dir, x))]
if len(update_dir_contents) != 1: if len(update_dir_contents) != 1:
logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR) logger.log(u"Invalid update data, update failed: " + str(update_dir_contents), logger.ERROR)
return False return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0]) content_dir = os.path.join(sr_update_dir, update_dir_contents[0])
# walk temp folder and move files to main folder # walk temp folder and move files to main folder
logger.log(u"Moving files from " + content_dir + " to " + sickbeard.PROG_DIR) logger.log(u"Moving files from " + content_dir + " to " + sickbeard.PROG_DIR)
@ -723,15 +718,6 @@ class SourceUpdateManager(UpdateManager):
if os.path.isfile(new_path): if os.path.isfile(new_path):
os.remove(new_path) os.remove(new_path)
os.renames(old_path, new_path) os.renames(old_path, new_path)
# update version.txt with commit hash
try:
with open(version_path, 'w') as ver_file:
ver_file.write(self._newest_commit_hash)
except EnvironmentError, e:
logger.log(u"Unable to write version file, update not complete: " + ex(e), logger.ERROR)
return False
except Exception, e: except Exception, e:
logger.log(u"Error while trying to update: " + ex(e), logger.ERROR) logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG) logger.log(u"Traceback: " + traceback.format_exc(), logger.DEBUG)
@ -744,4 +730,5 @@ class SourceUpdateManager(UpdateManager):
def list_remote_branches(self): def list_remote_branches(self):
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch) gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
return gh.branches() return [x.name for x in gh.branches()]

View File

@ -1284,7 +1284,7 @@ class CMD_SickBeard(ApiCall):
def run(self): def run(self):
""" display misc sickbeard related information """ """ display misc sickbeard related information """
data = {"sb_version": sickbeard.version.SICKBEARD_VERSION, "api_version": Api.version, data = {"sb_version": sickbeard.BRANCH, "api_version": Api.version,
"api_commands": sorted(_functionMaper.keys())} "api_commands": sorted(_functionMaper.keys())}
return _responds(RESULT_SUCCESS, data) return _responds(RESULT_SUCCESS, data)

View File

@ -2769,7 +2769,7 @@ class NewHomeAddShows(MainHandler):
return return
map(final_results.append, map(final_results.append,
([int(show['tvdb_id']), show['url'], show['title'], show['overview'], ([int(show['tvdb_id'] or 0) if sickbeard.TRAKT_DEFAULT_INDEXER == 1 else int(show['tvdb_id'] or 0), show['url'], show['title'], show['overview'],
datetime.date.fromtimestamp(int(show['first_aired']) / 1000.0).strftime('%Y%m%d')] for show in datetime.date.fromtimestamp(int(show['first_aired']) / 1000.0).strftime('%Y%m%d')] for show in
recommendedlist if not helpers.findCertainShow(sickbeard.showList, indexerid=int(show['tvdb_id'])))) recommendedlist if not helpers.findCertainShow(sickbeard.showList, indexerid=int(show['tvdb_id']))))
@ -3449,12 +3449,12 @@ class Home(MainHandler):
return _munge(t) return _munge(t)
def update(self, pid=None, branch=None): def update(self, pid=None):
if str(pid) != str(sickbeard.PID): if str(pid) != str(sickbeard.PID):
redirect("/home/") redirect("/home/")
updated = sickbeard.versionCheckScheduler.action.update(branch) # @UndefinedVariable updated = sickbeard.versionCheckScheduler.action.update() # @UndefinedVariable
if updated: if updated:
# do a hard restart # do a hard restart
sickbeard.events.put(sickbeard.events.SystemEvent.RESTART) sickbeard.events.put(sickbeard.events.SystemEvent.RESTART)
@ -3466,7 +3466,9 @@ class Home(MainHandler):
"Update wasn't successful, not restarting. Check your log for more information.") "Update wasn't successful, not restarting. Check your log for more information.")
def branchCheckout(self, branch): def branchCheckout(self, branch):
return self.update(sickbeard.PID, branch) sickbeard.BRANCH = branch
ui.notifications.message('Checking out branch: ', branch)
return self.update(sickbeard.PID)
def displayShow(self, show=None): def displayShow(self, show=None):