1
0
mirror of https://github.com/moparisthebest/SickRage synced 2025-01-08 12:28:05 -05:00

Fixed a bunch of bugs/issues with tvcache module code flow and added in proper error handling for the threaded cache updates plus resolved some provider issues.

This commit is contained in:
echel0n 2014-12-03 06:41:51 -08:00
parent fa02ff40e7
commit 4254916ae9
14 changed files with 108 additions and 88 deletions

View File

@ -51,6 +51,15 @@
</span> </span>
</label> </label>
</div> </div>
<div class="field-pair">
<label for="randomize_providers">
<span class="component-title">Randomize Providers</span>
<span class="component-desc">
<input type="checkbox" name="randomize_providers" id="randomize_providers" class="enabler" <%= html_checked if sickbeard.RANDOMIZE_PROVIDERS == True else '' %>/>
<p>randomize the provider search order instead of going in order of placement</p>
</span>
</label>
</div>
<div id="content_download_propers"> <div id="content_download_propers">
<div class="field-pair"> <div class="field-pair">
<label for="check_propers_interval"> <label for="check_propers_interval">

View File

@ -203,6 +203,7 @@ TORRENT_DIR = None
DOWNLOAD_PROPERS = False DOWNLOAD_PROPERS = False
CHECK_PROPERS_INTERVAL = None CHECK_PROPERS_INTERVAL = None
ALLOW_HIGH_PRIORITY = False ALLOW_HIGH_PRIORITY = False
RANDOMIZE_PROVIDERS = False
AUTOPOSTPROCESSER_FREQUENCY = None AUTOPOSTPROCESSER_FREQUENCY = None
DAILYSEARCH_FREQUENCY = None DAILYSEARCH_FREQUENCY = None
@ -480,7 +481,7 @@ def initialize(consoleLogging=True):
with INIT_LOCK: with INIT_LOCK:
global BRANCH, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \ global BRANCH, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \ HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \ SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, \ TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, \
@ -696,6 +697,8 @@ def initialize(consoleLogging=True):
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'): if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
CHECK_PROPERS_INTERVAL = 'daily' CHECK_PROPERS_INTERVAL = 'daily'
RANDOMIZE_PROVIDERS = bool(check_setting_int(CFG, 'General', 'randomize_providers', 0))
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1)) ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
DAILYSEARCH_STARTUP = bool(check_setting_int(CFG, 'General', 'dailysearch_startup', 1)) DAILYSEARCH_STARTUP = bool(check_setting_int(CFG, 'General', 'dailysearch_startup', 1))
@ -1123,7 +1126,7 @@ def initialize(consoleLogging=True):
(METADATA_WDTV, metadata.wdtv), (METADATA_WDTV, metadata.wdtv),
(METADATA_TIVO, metadata.tivo), (METADATA_TIVO, metadata.tivo),
(METADATA_MEDE8ER, metadata.mede8er), (METADATA_MEDE8ER, metadata.mede8er),
]: ]:
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
tmp_provider = cur_metadata_class.metadata_class() tmp_provider = cur_metadata_class.metadata_class()
tmp_provider.set_config(cur_metadata_config) tmp_provider.set_config(cur_metadata_config)
@ -1430,6 +1433,7 @@ def save_config():
new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY) new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY)
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY) new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS) new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
new_config['General']['randomize_providers'] = int(RANDOMIZE_PROVIDERS)
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY) new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
new_config['General']['dailysearch_startup'] = int(DAILYSEARCH_STARTUP) new_config['General']['dailysearch_startup'] = int(DAILYSEARCH_STARTUP)

View File

@ -74,7 +74,9 @@ class DailySearcher():
continue continue
try: try:
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60)) end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs,
show.network) + datetime.timedelta(
minutes=helpers.tryInt(show.runtime, 60))
# filter out any episodes that haven't aried yet # filter out any episodes that haven't aried yet
if end_time > curTime: if end_time > curTime:
continue continue

View File

@ -74,7 +74,7 @@ class ProperFinder():
# for each provider get a list of the # for each provider get a list of the
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive()]
for curProvider in providers: for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"

View File

@ -45,9 +45,9 @@ import sickbeard
import generic import generic
from sickbeard import logger from sickbeard import logger
from os import sys from os import sys
from random import shuffle
def sortedProviderList(randomize=False):
def sortedProviderList():
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
providerDict = dict(zip([x.getID() for x in initialList], initialList)) providerDict = dict(zip([x.getID() for x in initialList], initialList))
@ -63,6 +63,9 @@ def sortedProviderList():
if providerDict[curModule] not in newList: if providerDict[curModule] not in newList:
newList.append(providerDict[curModule]) newList.append(providerDict[curModule])
if randomize:
shuffle(newList)
return newList return newList

View File

@ -90,9 +90,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
return True return True
if self._uid and self._hash: if self._uid and self._hash:
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
else: else:
login_params = {'username': self.username, login_params = {'username': self.username,
'password': self.password, 'password': self.password,
@ -112,17 +110,20 @@ class FreshOnTVProvider(generic.TorrentProvider):
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False return False
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']: try:
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
self.cookies = {'uid': self._uid, self.cookies = {'uid': self._uid,
'pass': self._hash 'pass': self._hash
} }
return True return True
else: except:
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR) pass
return False
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
return False
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):

View File

@ -205,16 +205,14 @@ class HDBitsCache(tvcache.TVCache):
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getRSSData(self):
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) try:
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(),
if not self.provider._checkAuthFromData(parsedJSON): json=True)
return [] if self.provider._checkAuthFromData(parsedJSON):
return parsedJSON['data']
if parsedJSON and 'data' in parsedJSON: except:
return parsedJSON['data'] pass
else:
return []
return []
provider = HDBitsProvider() provider = HDBitsProvider()

View File

@ -237,7 +237,7 @@ class NewznabProvider(generic.NZBProvider):
def _checkAuthFromData(self, data): def _checkAuthFromData(self, data):
if not data.get('entries', None): if not data:
return self._checkAuth() return self._checkAuth()
if data.feed.get('error', None): if data.feed.get('error', None):

View File

@ -83,9 +83,7 @@ class TorrentDayProvider(generic.TorrentProvider):
return True return True
if self._uid and self._hash: if self._uid and self._hash:
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
else: else:
login_params = {'username': self.username, login_params = {'username': self.username,
@ -94,6 +92,9 @@ class TorrentDayProvider(generic.TorrentProvider):
'submit.y': 0 'submit.y': 0
} }
if not self.session:
self.session = requests.Session()
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
@ -108,18 +109,20 @@ class TorrentDayProvider(generic.TorrentProvider):
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR) logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
return False return False
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']: try:
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
self.cookies = {'uid': self._uid, self.cookies = {'uid': self._uid,
'pass': self._hash 'pass': self._hash
} }
return True return True
except:
pass
else: logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR)
logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR) return False
return False
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):

View File

@ -60,7 +60,7 @@ class TvTorrentsProvider(generic.TorrentProvider):
return True return True
def _checkAuthFromData(self, data): def _checkAuthFromData(self, data):
if not data.get('entries', None): if not data:
return self._checkAuth() return self._checkAuth()
if "User can't be found" in data.feed.get('title', None) or "Invalid Hash" in data.feed.get('title', None): if "User can't be found" in data.feed.get('title', None) or "Invalid Hash" in data.feed.get('title', None):

View File

@ -17,10 +17,6 @@ from sqliteshelf import SQLiteShelf
class RSSFeeds: class RSSFeeds:
def __init__(self, db_name): def __init__(self, db_name):
self.rssItems = {'entries': None,
'feed': None
}
try: try:
db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name) + '.db' db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name) + '.db'
if not os.path.exists(os.path.dirname(db_name)): if not os.path.exists(os.path.dirname(db_name)):
@ -46,11 +42,6 @@ class RSSFeeds:
try: try:
fc = Cache(self.rssDB) fc = Cache(self.rssDB)
feed = fc.fetch(url, False, False, request_headers) return fc.fetch(url, False, False, request_headers)
self.rssItems['entries'] = feed.get('entries', None)
self.rssItems['feed'] = feed.get('feed', None)
finally: finally:
self.rssDB.close() self.rssDB.close()
return self.rssItems

View File

@ -370,19 +370,17 @@ def searchForNeededEpisodes():
episodes = [] episodes = []
for curShow in show_list: for curShow in show_list:
if curShow.paused: if not curShow.paused:
continue episodes.extend(wantedEpisodes(curShow, fromDate))
episodes.extend(wantedEpisodes(curShow, fromDate)) providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_daily]
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProvider in providers: for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation # spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + threads += [threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + " :: [" + curProvider.name + "]")]
" :: [" + curProvider.name + "]"))
# start the thread we just created # start the thread we just created
threads[-1].start() for t in threads:
t.start()
# wait for all threads to finish # wait for all threads to finish
for t in threads: for t in threads:
@ -390,9 +388,7 @@ def searchForNeededEpisodes():
for curProvider in providers: for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
curFoundResults = curProvider.searchRSS(episodes) curFoundResults = curProvider.searchRSS(episodes)
didSearch = True didSearch = True
# pick a single result for each episode, respecting existing results # pick a single result for each episode, respecting existing results
@ -452,7 +448,7 @@ def searchProviders(show, episodes, manualSearch=False):
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog] providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_backlog]
for providerNum, curProvider in enumerate(providers): for providerNum, curProvider in enumerate(providers):
if curProvider.anime_only and not show.is_anime: if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG) logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)

View File

@ -21,6 +21,7 @@ from __future__ import with_statement
import time import time
import datetime import datetime
import itertools import itertools
import traceback
import sickbeard import sickbeard
@ -28,7 +29,7 @@ from sickbeard import db
from sickbeard import logger from sickbeard import logger
from sickbeard.common import Quality from sickbeard.common import Quality
from sickbeard import helpers, show_name_helpers from sickbeard import helpers, show_name_helpers
from sickbeard.exceptions import MultipleShowObjectsException from sickbeard.exceptions import MultipleShowObjectsException, ex
from sickbeard.exceptions import AuthException from sickbeard.exceptions import AuthException
from sickbeard.rssfeeds import RSSFeeds from sickbeard.rssfeeds import RSSFeeds
from sickbeard import clients from sickbeard import clients
@ -112,31 +113,41 @@ class TVCache():
if not self.shouldUpdate(): if not self.shouldUpdate():
return return
if self._checkAuth(None): try:
data = self._getRSSData() if self._checkAuth(None):
if data.get('entries', None): data = self._getRSSData()
# clear cache if len(data) > 0:
self._clearCache() # clear cache
self._clearCache()
# set updated # set updated
self.setLastUpdate() self.setLastUpdate()
if self._checkAuth(data): try:
items = data.get('entries', [])
except:
items = data
cl = [] if self._checkAuth(items):
for item in data.get('entries', []): cl = []
title, url = self._get_title_and_url(item) for item in items:
ci = self._parseItem(title, url) title, url = self._get_title_and_url(item)
if ci is not None: ci = self._parseItem(title, url)
cl.append(ci) if ci is not None:
cl.append(ci)
if len(cl) > 0: if len(cl) > 0:
myDB = self._getDB() myDB = self._getDB()
myDB.mass_action(cl) myDB.mass_action(cl)
else: else:
raise AuthException( raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config") u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
except AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception, e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
return [] return []

View File

@ -1678,7 +1678,7 @@ class ConfigSearch(MainHandler):
nzbget_host=None, nzbget_use_https=None, backlog_days=None, backlog_frequency=None, nzbget_host=None, nzbget_use_https=None, backlog_days=None, backlog_frequency=None,
dailysearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None, dailysearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
download_propers=None, check_propers_interval=None, allow_high_priority=None, download_propers=None, check_propers_interval=None, allow_high_priority=None,
backlog_startup=None, dailysearch_startup=None, randomize_providers=None, backlog_startup=None, dailysearch_startup=None,
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_label_anime=None, torrent_path=None, torrent_verify_cert=None, torrent_label=None, torrent_label_anime=None, torrent_path=None, torrent_verify_cert=None,
torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None, torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None,
@ -1707,6 +1707,8 @@ class ConfigSearch(MainHandler):
sickbeard.IGNORE_WORDS = ignore_words if ignore_words else "" sickbeard.IGNORE_WORDS = ignore_words if ignore_words else ""
sickbeard.REQUIRE_WORDS = require_words if require_words else "" sickbeard.REQUIRE_WORDS = require_words if require_words else ""
sickbeard.RANDOMIZE_PROVIDERS = config.checkbox_to_value(randomize_providers)
sickbeard.DOWNLOAD_PROPERS = config.checkbox_to_value(download_propers) sickbeard.DOWNLOAD_PROPERS = config.checkbox_to_value(download_propers)
sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval