1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-13 11:32:20 -05:00

Merge branch 'pulls/65'

This commit is contained in:
echel0n 2014-04-25 00:31:10 -07:00
commit dd20cf80ee
7 changed files with 55 additions and 9 deletions

View File

@ -274,6 +274,16 @@
</label> </label>
</div> </div>
<div class="field-pair">
<label class="nocheck clearfix">
<span class="component-title">Proxy Settings</span>
<input type="text" name="proxy_setting" value="$sickbeard.PROXY_SETTING" size="40" />
</label>
<label class="nocheck clearfix">
<span class="component-title">&nbsp;</span>
<span class="component-desc">Proxy to use for connecting to providers. Leave empty to not use proxy</b></span>
</label>
</div>
<input type="submit" class="btn config_submitter" value="Save Changes" /> <input type="submit" class="btn config_submitter" value="Save Changes" />
</fieldset> </fieldset>
</div><!-- /component-group4 //--> </div><!-- /component-group4 //-->

View File

@ -487,7 +487,8 @@ def initialize(consoleLogging=True):
GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \ GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, \
METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ METADATA_WDTV, METADATA_TIVO, IGNORE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \ ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, subtitlesFinderScheduler, \
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, \
PROXY_SETTING
if __INITIALIZED__: if __INITIALIZED__:
return False return False
@ -541,6 +542,7 @@ def initialize(consoleLogging=True):
LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '') LOCALHOST_IP = check_setting_str(CFG, 'General', 'localhost_ip', '')
ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?') ANON_REDIRECT = check_setting_str(CFG, 'General', 'anon_redirect', 'http://dereferer.org/?')
PROXY_SETTING = check_setting_str(CFG, 'General', 'proxy_setting', '')
# attempt to help prevent users from breaking links by using a bad url # attempt to help prevent users from breaking links by using a bad url
if not ANON_REDIRECT.endswith('?'): if not ANON_REDIRECT.endswith('?'):
ANON_REDIRECT = '' ANON_REDIRECT = ''
@ -1300,6 +1302,7 @@ def save_config():
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER) new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START) new_config['General']['update_shows_on_start'] = int(UPDATE_SHOWS_ON_START)
new_config['General']['sort_article'] = int(SORT_ARTICLE) new_config['General']['sort_article'] = int(SORT_ARTICLE)
new_config['General']['proxy_setting'] = PROXY_SETTING if PROXY_SETTING else ''
new_config['General']['use_listview'] = int(USE_LISTVIEW) new_config['General']['use_listview'] = int(USE_LISTVIEW)
new_config['General']['metadata_xbmc'] = METADATA_XBMC new_config['General']['metadata_xbmc'] = METADATA_XBMC

View File

@ -184,7 +184,16 @@ Returns a byte-string retrieved from the url provider.
url = urlparse.urlunparse(parsed) url = urlparse.urlunparse(parsed)
it = iter(req_headers) it = iter(req_headers)
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)))
if sickbeard.PROXY_SETTING:
proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)), proxies=proxies)
else:
resp = requests.get(url, params=params, data=post_data, headers=dict(zip(it, it)))
except requests.HTTPError, e: except requests.HTTPError, e:
logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING) logger.log(u"HTTP error " + str(e.errno) + " while loading URL " + url, logger.WARNING)
return None return None

View File

@ -313,7 +313,15 @@ class KATProvider(generic.TorrentProvider):
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed) url = urlparse.urlunparse(parsed)
r = requests.get(url) if sickbeard.PROXY_SETTING:
proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
r = requests.get(url, proxies=proxies)
else:
r = requests.get(url)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
return None return None

View File

@ -308,7 +308,15 @@ class ThePirateBayProvider(generic.TorrentProvider):
headers.update({'referer': self.proxy.getProxyURL()}) headers.update({'referer': self.proxy.getProxyURL()})
try: try:
r = requests.get(url, headers=headers) if sickbeard.PROXY_SETTING:
proxies = {
"http": sickbeard.PROXY_SETTING,
"https": sickbeard.PROXY_SETTING,
}
r = requests.get(url, headers=headers, proxies=proxies)
else:
r = requests.get(url, headers=headers)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) logger.log(u"Error loading " + self.name + " URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
return None return None

View File

@ -976,7 +976,7 @@ class ConfigGeneral:
def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None, def saveGeneral(self, log_dir=None, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
update_shows_on_start=None, launch_browser=None, web_username=None, use_api=None, api_key=None, update_shows_on_start=None, launch_browser=None, web_username=None, use_api=None, api_key=None,
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None, web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
sort_article=None, auto_update=None, sort_article=None, auto_update=None, proxy_settings=None,
anon_redirect=None, git_path=None, calendar_unprotected=None, date_preset=None, time_preset=None): anon_redirect=None, git_path=None, calendar_unprotected=None, date_preset=None, time_preset=None):
results = [] results = []
@ -991,6 +991,7 @@ class ConfigGeneral:
sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser) sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser)
sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article) sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article)
sickbeard.ANON_REDIRECT = anon_redirect sickbeard.ANON_REDIRECT = anon_redirect
sickbeard.PROXY_SETTINGS = proxy_settings
sickbeard.GIT_PATH = git_path sickbeard.GIT_PATH = git_path
sickbeard.CALENDAR_UNPROTECTED = config.checkbox_to_value(calendar_unprotected) sickbeard.CALENDAR_UNPROTECTED = config.checkbox_to_value(calendar_unprotected)
# sickbeard.LOG_DIR is set in config.change_LOG_DIR() # sickbeard.LOG_DIR is set in config.change_LOG_DIR()

View File

@ -31,16 +31,23 @@ from sickbeard import classes
class APICheck(unittest.TestCase): class APICheck(unittest.TestCase):
lang = "en" lang = "en"
search_term = 'american' search_term = 'american dad'
results = {} results = {}
final_results = [] final_results = []
lINDEXER_API_PARMS = indexerApi(1).api_params.copy()
lINDEXER_API_PARMS['language'] = lang
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
t = indexerApi(1).indexer(**lINDEXER_API_PARMS)
test = t[search_term]
for indexer in indexerApi().indexers: for indexer in indexerApi().indexers:
lINDEXER_API_PARMS = indexerApi(indexer).api_params.copy() lINDEXER_API_PARMS = indexerApi(indexer).api_params.copy()
lINDEXER_API_PARMS['language'] = lang lINDEXER_API_PARMS['language'] = lang
lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI lINDEXER_API_PARMS['custom_ui'] = classes.AllShowsListUI
t = indexerApi(indexer).indexer(**lINDEXER_API_PARMS) t = indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
t[search_term]
print("Searching for Show with searchterm: %s on Indexer: %s" % (search_term, indexerApi(indexer).name)) print("Searching for Show with searchterm: %s on Indexer: %s" % (search_term, indexerApi(indexer).name))
try: try: