mirror of
https://github.com/moparisthebest/SickRage
synced 2025-01-06 03:18:01 -05:00
Fixed a bunch of bugs/issues with tvcache module code flow and added in proper error handling for the threaded cache updates plus resolved some provider issues.
This commit is contained in:
parent
fa02ff40e7
commit
4254916ae9
@ -51,6 +51,15 @@
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label for="randomize_providers">
|
||||
<span class="component-title">Randomize Providers</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="randomize_providers" id="randomize_providers" class="enabler" <%= html_checked if sickbeard.RANDOMIZE_PROVIDERS == True else '' %>/>
|
||||
<p>randomize the provider search order instead of going in order of placement</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="content_download_propers">
|
||||
<div class="field-pair">
|
||||
<label for="check_propers_interval">
|
||||
|
@ -203,6 +203,7 @@ TORRENT_DIR = None
|
||||
DOWNLOAD_PROPERS = False
|
||||
CHECK_PROPERS_INTERVAL = None
|
||||
ALLOW_HIGH_PRIORITY = False
|
||||
RANDOMIZE_PROVIDERS = False
|
||||
|
||||
AUTOPOSTPROCESSER_FREQUENCY = None
|
||||
DAILYSEARCH_FREQUENCY = None
|
||||
@ -480,7 +481,7 @@ def initialize(consoleLogging=True):
|
||||
with INIT_LOCK:
|
||||
|
||||
global BRANCH, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \
|
||||
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
|
||||
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, \
|
||||
@ -696,6 +697,8 @@ def initialize(consoleLogging=True):
|
||||
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
|
||||
CHECK_PROPERS_INTERVAL = 'daily'
|
||||
|
||||
RANDOMIZE_PROVIDERS = bool(check_setting_int(CFG, 'General', 'randomize_providers', 0))
|
||||
|
||||
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
|
||||
|
||||
DAILYSEARCH_STARTUP = bool(check_setting_int(CFG, 'General', 'dailysearch_startup', 1))
|
||||
@ -1123,7 +1126,7 @@ def initialize(consoleLogging=True):
|
||||
(METADATA_WDTV, metadata.wdtv),
|
||||
(METADATA_TIVO, metadata.tivo),
|
||||
(METADATA_MEDE8ER, metadata.mede8er),
|
||||
]:
|
||||
]:
|
||||
(cur_metadata_config, cur_metadata_class) = cur_metadata_tuple
|
||||
tmp_provider = cur_metadata_class.metadata_class()
|
||||
tmp_provider.set_config(cur_metadata_config)
|
||||
@ -1430,6 +1433,7 @@ def save_config():
|
||||
new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY)
|
||||
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
|
||||
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
|
||||
new_config['General']['randomize_providers'] = int(RANDOMIZE_PROVIDERS)
|
||||
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
|
||||
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
|
||||
new_config['General']['dailysearch_startup'] = int(DAILYSEARCH_STARTUP)
|
||||
|
@ -74,7 +74,9 @@ class DailySearcher():
|
||||
continue
|
||||
|
||||
try:
|
||||
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
||||
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs,
|
||||
show.network) + datetime.timedelta(
|
||||
minutes=helpers.tryInt(show.runtime, 60))
|
||||
# filter out any episodes that haven't aried yet
|
||||
if end_time > curTime:
|
||||
continue
|
||||
|
@ -74,7 +74,7 @@ class ProperFinder():
|
||||
|
||||
# for each provider get a list of the
|
||||
origThreadName = threading.currentThread().name
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive()]
|
||||
for curProvider in providers:
|
||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
||||
|
||||
|
@ -45,9 +45,9 @@ import sickbeard
|
||||
import generic
|
||||
from sickbeard import logger
|
||||
from os import sys
|
||||
from random import shuffle
|
||||
|
||||
|
||||
def sortedProviderList():
|
||||
def sortedProviderList(randomize=False):
|
||||
initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList
|
||||
providerDict = dict(zip([x.getID() for x in initialList], initialList))
|
||||
|
||||
@ -63,6 +63,9 @@ def sortedProviderList():
|
||||
if providerDict[curModule] not in newList:
|
||||
newList.append(providerDict[curModule])
|
||||
|
||||
if randomize:
|
||||
shuffle(newList)
|
||||
|
||||
return newList
|
||||
|
||||
|
||||
|
@ -90,9 +90,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
||||
return True
|
||||
|
||||
if self._uid and self._hash:
|
||||
|
||||
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
|
||||
|
||||
else:
|
||||
login_params = {'username': self.username,
|
||||
'password': self.password,
|
||||
@ -112,17 +110,20 @@ class FreshOnTVProvider(generic.TorrentProvider):
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
return False
|
||||
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
||||
try:
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
||||
|
||||
self.cookies = {'uid': self._uid,
|
||||
'pass': self._hash
|
||||
}
|
||||
return True
|
||||
else:
|
||||
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
|
||||
return False
|
||||
self.cookies = {'uid': self._uid,
|
||||
'pass': self._hash
|
||||
}
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
|
||||
return False
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
|
||||
|
@ -205,16 +205,14 @@ class HDBitsCache(tvcache.TVCache):
|
||||
self.minTime = 15
|
||||
|
||||
def _getRSSData(self):
|
||||
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
|
||||
|
||||
if not self.provider._checkAuthFromData(parsedJSON):
|
||||
return []
|
||||
|
||||
if parsedJSON and 'data' in parsedJSON:
|
||||
return parsedJSON['data']
|
||||
else:
|
||||
return []
|
||||
|
||||
try:
|
||||
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(),
|
||||
json=True)
|
||||
if self.provider._checkAuthFromData(parsedJSON):
|
||||
return parsedJSON['data']
|
||||
except:
|
||||
pass
|
||||
|
||||
return []
|
||||
|
||||
provider = HDBitsProvider()
|
||||
|
@ -237,7 +237,7 @@ class NewznabProvider(generic.NZBProvider):
|
||||
|
||||
def _checkAuthFromData(self, data):
|
||||
|
||||
if not data.get('entries', None):
|
||||
if not data:
|
||||
return self._checkAuth()
|
||||
|
||||
if data.feed.get('error', None):
|
||||
|
@ -83,9 +83,7 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
return True
|
||||
|
||||
if self._uid and self._hash:
|
||||
|
||||
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
|
||||
|
||||
else:
|
||||
|
||||
login_params = {'username': self.username,
|
||||
@ -94,6 +92,9 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
'submit.y': 0
|
||||
}
|
||||
|
||||
if not self.session:
|
||||
self.session = requests.Session()
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
@ -108,18 +109,20 @@ class TorrentDayProvider(generic.TorrentProvider):
|
||||
logger.log(u'Invalid username or password for ' + self.name + ', Check your settings!', logger.ERROR)
|
||||
return False
|
||||
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
||||
try:
|
||||
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
||||
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
||||
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
||||
|
||||
self.cookies = {'uid': self._uid,
|
||||
'pass': self._hash
|
||||
}
|
||||
return True
|
||||
self.cookies = {'uid': self._uid,
|
||||
'pass': self._hash
|
||||
}
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
|
||||
else:
|
||||
logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR)
|
||||
return False
|
||||
logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR)
|
||||
return False
|
||||
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
|
@ -60,7 +60,7 @@ class TvTorrentsProvider(generic.TorrentProvider):
|
||||
return True
|
||||
|
||||
def _checkAuthFromData(self, data):
|
||||
if not data.get('entries', None):
|
||||
if not data:
|
||||
return self._checkAuth()
|
||||
|
||||
if "User can't be found" in data.feed.get('title', None) or "Invalid Hash" in data.feed.get('title', None):
|
||||
|
@ -17,10 +17,6 @@ from sqliteshelf import SQLiteShelf
|
||||
|
||||
class RSSFeeds:
|
||||
def __init__(self, db_name):
|
||||
self.rssItems = {'entries': None,
|
||||
'feed': None
|
||||
}
|
||||
|
||||
try:
|
||||
db_name = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'rss', db_name) + '.db'
|
||||
if not os.path.exists(os.path.dirname(db_name)):
|
||||
@ -46,11 +42,6 @@ class RSSFeeds:
|
||||
|
||||
try:
|
||||
fc = Cache(self.rssDB)
|
||||
feed = fc.fetch(url, False, False, request_headers)
|
||||
|
||||
self.rssItems['entries'] = feed.get('entries', None)
|
||||
self.rssItems['feed'] = feed.get('feed', None)
|
||||
return fc.fetch(url, False, False, request_headers)
|
||||
finally:
|
||||
self.rssDB.close()
|
||||
|
||||
return self.rssItems
|
||||
self.rssDB.close()
|
@ -370,19 +370,17 @@ def searchForNeededEpisodes():
|
||||
episodes = []
|
||||
|
||||
for curShow in show_list:
|
||||
if curShow.paused:
|
||||
continue
|
||||
if not curShow.paused:
|
||||
episodes.extend(wantedEpisodes(curShow, fromDate))
|
||||
|
||||
episodes.extend(wantedEpisodes(curShow, fromDate))
|
||||
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_daily]
|
||||
for curProvider in providers:
|
||||
|
||||
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
|
||||
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
|
||||
" :: [" + curProvider.name + "]"))
|
||||
# start the thread we just created
|
||||
threads[-1].start()
|
||||
threads += [threading.Thread(target=curProvider.cache.updateCache, name=origThreadName + " :: [" + curProvider.name + "]")]
|
||||
|
||||
# start the thread we just created
|
||||
for t in threads:
|
||||
t.start()
|
||||
|
||||
# wait for all threads to finish
|
||||
for t in threads:
|
||||
@ -390,9 +388,7 @@ def searchForNeededEpisodes():
|
||||
|
||||
for curProvider in providers:
|
||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
||||
|
||||
curFoundResults = curProvider.searchRSS(episodes)
|
||||
|
||||
didSearch = True
|
||||
|
||||
# pick a single result for each episode, respecting existing results
|
||||
@ -452,7 +448,7 @@ def searchProviders(show, episodes, manualSearch=False):
|
||||
|
||||
origThreadName = threading.currentThread().name
|
||||
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_backlog]
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.isActive() and x.enable_backlog]
|
||||
for providerNum, curProvider in enumerate(providers):
|
||||
if curProvider.anime_only and not show.is_anime:
|
||||
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
|
||||
|
@ -21,6 +21,7 @@ from __future__ import with_statement
|
||||
import time
|
||||
import datetime
|
||||
import itertools
|
||||
import traceback
|
||||
|
||||
import sickbeard
|
||||
|
||||
@ -28,7 +29,7 @@ from sickbeard import db
|
||||
from sickbeard import logger
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard import helpers, show_name_helpers
|
||||
from sickbeard.exceptions import MultipleShowObjectsException
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
||||
from sickbeard.exceptions import AuthException
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from sickbeard import clients
|
||||
@ -112,31 +113,41 @@ class TVCache():
|
||||
if not self.shouldUpdate():
|
||||
return
|
||||
|
||||
if self._checkAuth(None):
|
||||
data = self._getRSSData()
|
||||
if data.get('entries', None):
|
||||
# clear cache
|
||||
self._clearCache()
|
||||
try:
|
||||
if self._checkAuth(None):
|
||||
data = self._getRSSData()
|
||||
if len(data) > 0:
|
||||
# clear cache
|
||||
self._clearCache()
|
||||
|
||||
# set updated
|
||||
self.setLastUpdate()
|
||||
# set updated
|
||||
self.setLastUpdate()
|
||||
|
||||
if self._checkAuth(data):
|
||||
try:
|
||||
items = data.get('entries', [])
|
||||
except:
|
||||
items = data
|
||||
|
||||
cl = []
|
||||
for item in data.get('entries', []):
|
||||
title, url = self._get_title_and_url(item)
|
||||
ci = self._parseItem(title, url)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
if self._checkAuth(items):
|
||||
cl = []
|
||||
for item in items:
|
||||
title, url = self._get_title_and_url(item)
|
||||
ci = self._parseItem(title, url)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
if len(cl) > 0:
|
||||
myDB = self._getDB()
|
||||
myDB.mass_action(cl)
|
||||
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
else:
|
||||
raise AuthException(
|
||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||
except AuthException, e:
|
||||
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
||||
except Exception, e:
|
||||
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
return []
|
||||
|
||||
|
@ -1678,7 +1678,7 @@ class ConfigSearch(MainHandler):
|
||||
nzbget_host=None, nzbget_use_https=None, backlog_days=None, backlog_frequency=None,
|
||||
dailysearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
|
||||
download_propers=None, check_propers_interval=None, allow_high_priority=None,
|
||||
backlog_startup=None, dailysearch_startup=None,
|
||||
randomize_providers=None, backlog_startup=None, dailysearch_startup=None,
|
||||
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
|
||||
torrent_label=None, torrent_label_anime=None, torrent_path=None, torrent_verify_cert=None,
|
||||
torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None,
|
||||
@ -1707,6 +1707,8 @@ class ConfigSearch(MainHandler):
|
||||
sickbeard.IGNORE_WORDS = ignore_words if ignore_words else ""
|
||||
sickbeard.REQUIRE_WORDS = require_words if require_words else ""
|
||||
|
||||
sickbeard.RANDOMIZE_PROVIDERS = config.checkbox_to_value(randomize_providers)
|
||||
|
||||
sickbeard.DOWNLOAD_PROPERS = config.checkbox_to_value(download_propers)
|
||||
sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user