2014-03-10 01:18:05 -04:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 01:18:05 -04:00
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 01:18:05 -04:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 01:18:05 -04:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 08:37:22 -04:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
import urllib
|
2014-05-17 05:27:17 -04:00
|
|
|
import time
|
2014-03-10 01:18:05 -04:00
|
|
|
import datetime
|
|
|
|
import os
|
|
|
|
|
|
|
|
try:
|
|
|
|
import xml.etree.cElementTree as etree
|
|
|
|
except ImportError:
|
|
|
|
import elementtree.ElementTree as etree
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
import generic
|
|
|
|
|
|
|
|
from sickbeard import classes
|
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import scene_exceptions
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import tvcache
|
|
|
|
from sickbeard.exceptions import ex, AuthException
|
|
|
|
|
2014-09-01 11:57:52 -04:00
|
|
|
from lib import requests
|
|
|
|
from lib.requests import exceptions
|
|
|
|
from lib.bencode import bdecode
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
class NewznabProvider(generic.NZBProvider):
|
2014-08-29 01:16:25 -04:00
|
|
|
def __init__(self, name, url, key='', catIDs='5030,5040', search_mode='eponly', search_fallback=False,
|
|
|
|
enable_daily=False, enable_backlog=False):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
generic.NZBProvider.__init__(self, name)
|
|
|
|
|
|
|
|
self.cache = NewznabCache(self)
|
|
|
|
|
|
|
|
self.url = url
|
|
|
|
|
|
|
|
self.key = key
|
|
|
|
|
2014-05-17 01:23:11 -04:00
|
|
|
self.search_mode = search_mode
|
|
|
|
self.search_fallback = search_fallback
|
2014-08-29 01:16:25 -04:00
|
|
|
self.enable_daily = enable_daily
|
|
|
|
self.enable_backlog = enable_backlog
|
2014-05-17 01:23:11 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# a 0 in the key spot indicates that no key is needed
|
|
|
|
if self.key == '0':
|
|
|
|
self.needs_auth = False
|
|
|
|
else:
|
|
|
|
self.needs_auth = True
|
|
|
|
|
|
|
|
if catIDs:
|
|
|
|
self.catIDs = catIDs
|
|
|
|
else:
|
2014-05-26 02:29:22 -04:00
|
|
|
self.catIDs = '5030,5040'
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
self.enabled = True
|
|
|
|
self.supportsBacklog = True
|
|
|
|
|
|
|
|
self.default = False
|
|
|
|
|
|
|
|
def configStr(self):
|
2014-07-14 22:00:53 -04:00
|
|
|
return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str(
|
2014-08-29 01:16:25 -04:00
|
|
|
int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(
|
|
|
|
int(self.enable_daily)) + '|' + str(int(self.enable_backlog))
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def imageName(self):
|
2014-08-29 01:16:25 -04:00
|
|
|
if ek.ek(os.path.isfile,
|
|
|
|
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers',
|
|
|
|
self.getID() + '.png')):
|
2014-03-10 01:18:05 -04:00
|
|
|
return self.getID() + '.png'
|
|
|
|
return 'newznab.png'
|
|
|
|
|
|
|
|
def isEnabled(self):
|
|
|
|
return self.enabled
|
|
|
|
|
2014-09-01 11:57:52 -04:00
|
|
|
def _getURL(self, url, post_data=None, params=None, timeout=30, json=False):
|
|
|
|
"""
|
|
|
|
By default this is just a simple urlopen call but this method should be overridden
|
|
|
|
for providers with special URL requirements (like cookies)
|
|
|
|
Not really changed much from the superclass, can be used in future.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# check for auth
|
|
|
|
if not self._doLogin():
|
|
|
|
return
|
|
|
|
|
|
|
|
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
|
|
|
session=self.session, json=json)
|
|
|
|
|
|
|
|
def get_newznab_categories(self):
|
|
|
|
"""
|
|
|
|
Uses the newznab provider url and apikey to get the capabilities.
|
|
|
|
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
|
|
|
|
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
|
|
|
|
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
|
|
|
|
"""
|
|
|
|
return_categories = []
|
|
|
|
|
|
|
|
self._checkAuth()
|
|
|
|
|
|
|
|
params = {"t": "caps"}
|
|
|
|
if self.needs_auth and self.key:
|
|
|
|
params['apikey'] = self.key
|
|
|
|
|
2014-09-10 10:26:16 -04:00
|
|
|
try:
|
|
|
|
categories = self.getURL("%s/api" % (self.url), params=params, timeout=10)
|
|
|
|
except:
|
|
|
|
logger.log(u"Error getting html for [%s]" %
|
|
|
|
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items())) ), logger.DEBUG)
|
|
|
|
return (False, return_categories, "Error getting html for [%s]" %
|
|
|
|
("%s/api?%s" % (self.url, '&'.join("%s=%s" % (x,y) for x,y in params.items()) )))
|
2014-09-01 11:57:52 -04:00
|
|
|
|
|
|
|
xml_categories = helpers.parse_xml(categories)
|
|
|
|
|
|
|
|
if not xml_categories:
|
2014-09-10 10:26:16 -04:00
|
|
|
logger.log(u"Error parsing xml for [%s]" % (self.name),
|
|
|
|
logger.DEBUG)
|
2014-09-01 11:57:52 -04:00
|
|
|
return (False, return_categories, "Error parsing xml for [%s]" % (self.name))
|
|
|
|
|
|
|
|
try:
|
|
|
|
for category in xml_categories.iter('category'):
|
|
|
|
if category.get('name') == 'TV':
|
|
|
|
for subcat in category.findall('subcat'):
|
|
|
|
return_categories.append(subcat.attrib)
|
|
|
|
except:
|
2014-09-10 10:26:16 -04:00
|
|
|
logger.log(u"Error parsing result for [%s]" % (self.name),
|
|
|
|
logger.DEBUG)
|
2014-09-01 11:57:52 -04:00
|
|
|
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
|
|
|
|
|
|
|
|
return (True, return_categories, "")
|
|
|
|
|
2014-04-30 09:49:50 -04:00
|
|
|
def _get_season_search_strings(self, ep_obj):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
to_return = []
|
2014-07-14 22:00:53 -04:00
|
|
|
cur_params = {}
|
|
|
|
|
|
|
|
# season
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
|
|
date_str = str(ep_obj.airdate).split('-')[0]
|
|
|
|
cur_params['season'] = date_str
|
|
|
|
cur_params['q'] = date_str.replace('-', '.')
|
|
|
|
elif ep_obj.show.is_anime:
|
|
|
|
cur_params['season'] = "%d" % ep_obj.scene_absolute_number
|
|
|
|
else:
|
|
|
|
cur_params['season'] = str(ep_obj.scene_season)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# search
|
2014-07-30 19:33:28 -04:00
|
|
|
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
|
|
|
if rid:
|
2014-08-29 02:15:51 -04:00
|
|
|
cur_return = cur_params.copy()
|
|
|
|
cur_return['rid'] = rid
|
|
|
|
to_return.append(cur_return)
|
|
|
|
|
|
|
|
# add new query strings for exceptions
|
|
|
|
name_exceptions = list(
|
|
|
|
set(scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]))
|
|
|
|
for cur_exception in name_exceptions:
|
|
|
|
if 'q' in cur_params:
|
|
|
|
cur_params['q'] = helpers.sanitizeSceneName(cur_exception) + '.' + cur_params['q']
|
2014-07-14 22:00:53 -04:00
|
|
|
to_return.append(cur_params)
|
2014-05-03 23:16:26 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
return to_return
|
|
|
|
|
2014-04-30 09:49:50 -04:00
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
2014-07-14 22:00:53 -04:00
|
|
|
to_return = []
|
2014-03-10 01:18:05 -04:00
|
|
|
params = {}
|
|
|
|
|
2014-04-30 09:49:50 -04:00
|
|
|
if not ep_obj:
|
2014-03-10 01:18:05 -04:00
|
|
|
return [params]
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
2014-05-03 05:23:26 -04:00
|
|
|
date_str = str(ep_obj.airdate)
|
2014-04-30 09:49:50 -04:00
|
|
|
params['season'] = date_str.partition('-')[0]
|
2014-05-03 05:23:26 -04:00
|
|
|
params['ep'] = date_str.partition('-')[2].replace('-', '/')
|
2014-07-14 22:00:53 -04:00
|
|
|
elif ep_obj.show.anime:
|
|
|
|
params['ep'] = "%i" % int(ep_obj.scene_absolute_number)
|
2014-03-10 01:18:05 -04:00
|
|
|
else:
|
2014-04-30 09:49:50 -04:00
|
|
|
params['season'] = ep_obj.scene_season
|
|
|
|
params['ep'] = ep_obj.scene_episode
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# search
|
2014-07-30 19:33:28 -04:00
|
|
|
rid = helpers.mapIndexersToShow(ep_obj.show)[2]
|
|
|
|
if rid:
|
2014-08-29 02:15:51 -04:00
|
|
|
cur_return = params.copy()
|
|
|
|
cur_return['rid'] = rid
|
|
|
|
to_return.append(cur_return)
|
|
|
|
|
|
|
|
# add new query strings for exceptions
|
|
|
|
name_exceptions = list(
|
|
|
|
set(scene_exceptions.get_scene_exceptions(ep_obj.show.indexerid) + [ep_obj.show.name]))
|
|
|
|
for cur_exception in name_exceptions:
|
|
|
|
params['q'] = helpers.sanitizeSceneName(cur_exception)
|
2014-07-14 22:00:53 -04:00
|
|
|
to_return.append(params)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
return to_return
|
|
|
|
|
2014-04-30 09:49:50 -04:00
|
|
|
def _doGeneralSearch(self, search_string):
|
|
|
|
return self._doSearch({'q': search_string})
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def _checkAuth(self):
|
|
|
|
|
|
|
|
if self.needs_auth and not self.key:
|
2014-03-25 01:57:24 -04:00
|
|
|
logger.log(u"Incorrect authentication credentials for " + self.name + " : " + "API key is missing",
|
|
|
|
logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-04-25 21:49:38 -04:00
|
|
|
def _checkAuthFromData(self, data):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-25 21:49:38 -04:00
|
|
|
if data is None:
|
2014-03-10 01:18:05 -04:00
|
|
|
return self._checkAuth()
|
|
|
|
|
2014-04-28 06:57:30 -04:00
|
|
|
if 'error' in data.feed:
|
2014-07-14 22:00:53 -04:00
|
|
|
code = data.feed['error']['code']
|
|
|
|
|
|
|
|
if code == '100':
|
|
|
|
raise AuthException("Your API key for " + self.name + " is incorrect, check your config.")
|
|
|
|
elif code == '101':
|
|
|
|
raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.")
|
|
|
|
elif code == '102':
|
|
|
|
raise AuthException(
|
|
|
|
"Your account isn't allowed to use the API on " + self.name + ", contact the administrator")
|
|
|
|
else:
|
|
|
|
logger.log(u"Unknown error given from " + self.name + ": " + data.feed['error']['description'],
|
|
|
|
logger.ERROR)
|
|
|
|
return False
|
2014-04-27 20:08:12 -04:00
|
|
|
|
2014-04-26 07:23:00 -04:00
|
|
|
return True
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-21 01:47:13 -04:00
|
|
|
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
self._checkAuth()
|
|
|
|
|
|
|
|
params = {"t": "tvsearch",
|
|
|
|
"maxage": sickbeard.USENET_RETENTION,
|
|
|
|
"limit": 100,
|
2014-08-19 20:44:05 -04:00
|
|
|
"attrs": "rageid",
|
|
|
|
"offset": 0}
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
# category ids
|
2014-05-26 04:05:47 -04:00
|
|
|
if self.show and self.show.is_sports:
|
2014-07-14 22:00:53 -04:00
|
|
|
params['cat'] = self.catIDs + ',5060'
|
2014-05-26 04:05:47 -04:00
|
|
|
elif self.show and self.show.is_anime:
|
2014-07-14 22:00:53 -04:00
|
|
|
params['cat'] = self.catIDs + ',5070'
|
|
|
|
else:
|
|
|
|
params['cat'] = self.catIDs
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
# if max_age is set, use it, don't allow it to be missing
|
2014-05-08 10:03:50 -04:00
|
|
|
if age or not params['maxage']:
|
|
|
|
params['maxage'] = age
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
if search_params:
|
|
|
|
params.update(search_params)
|
|
|
|
|
2014-09-10 11:27:28 -04:00
|
|
|
if 'rid' not in search_params and 'q' not in search_params:
|
|
|
|
logger.log("Error no rid or search term given. Report to forums with a full debug log")
|
|
|
|
return []
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if self.needs_auth and self.key:
|
|
|
|
params['apikey'] = self.key
|
|
|
|
|
2014-08-19 20:44:05 -04:00
|
|
|
results = []
|
2014-09-07 03:44:48 -04:00
|
|
|
offset = total = 0
|
2014-09-07 03:48:09 -04:00
|
|
|
|
2014-09-10 11:27:28 -04:00
|
|
|
while (total >= offset) and (offset < 1000):
|
2014-08-19 20:44:05 -04:00
|
|
|
search_url = self.url + 'api?' + urllib.urlencode(params)
|
|
|
|
logger.log(u"Search url: " + search_url, logger.DEBUG)
|
|
|
|
data = self.cache.getRSSFeed(search_url)
|
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
if not data or not self._checkAuthFromData(data):
|
|
|
|
break
|
|
|
|
|
|
|
|
for item in data.entries:
|
2014-08-19 20:44:05 -04:00
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
(title, url) = self._get_title_and_url(item)
|
2014-08-19 20:44:05 -04:00
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
if title and url:
|
|
|
|
results.append(item)
|
|
|
|
else:
|
|
|
|
logger.log(
|
|
|
|
u"The data returned from the " + self.name + " is incomplete, this result is unusable",
|
|
|
|
logger.DEBUG)
|
2014-08-19 20:44:05 -04:00
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
# get total and offset attribs
|
|
|
|
if total == 0:
|
|
|
|
total = int(data.feed.newznab_response['total'] or 0)
|
|
|
|
offset = int(data.feed.newznab_response['offset'] or 0)
|
2014-09-05 06:40:05 -04:00
|
|
|
|
2014-09-10 12:25:22 -04:00
|
|
|
if offset != params['offset']:
|
|
|
|
logger.log("Tell your newznab provider to fix their bloody newznab responses")
|
|
|
|
break
|
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
# if there are more items available then the amount given in one call, grab some more
|
|
|
|
params['offset'] += params['limit']
|
2014-09-05 06:40:05 -04:00
|
|
|
|
2014-09-07 03:48:09 -04:00
|
|
|
logger.log(str(
|
|
|
|
total - offset) + " more items to be fetched from provider. Fetching another " + str(
|
|
|
|
params['limit']) + " items.", logger.DEBUG)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-09-14 20:59:37 -04:00
|
|
|
time.sleep(0.2)
|
|
|
|
|
2014-08-19 20:44:05 -04:00
|
|
|
return results
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
def findPropers(self, search_date=None):
|
|
|
|
|
|
|
|
search_terms = ['.proper.', '.repack.']
|
|
|
|
|
|
|
|
cache_results = self.cache.listPropers(search_date)
|
2014-07-14 22:00:53 -04:00
|
|
|
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
2014-03-25 01:57:24 -04:00
|
|
|
cache_results]
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-05-21 15:08:08 -04:00
|
|
|
index = 0
|
|
|
|
alt_search = ('nzbs_org' == self.getID())
|
|
|
|
term_items_found = False
|
|
|
|
do_search_alt = False
|
|
|
|
|
|
|
|
while index < len(search_terms):
|
|
|
|
search_params = {'q': search_terms[index]}
|
|
|
|
if alt_search:
|
|
|
|
|
|
|
|
if do_search_alt:
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
if term_items_found:
|
|
|
|
do_search_alt = True
|
|
|
|
term_items_found = False
|
|
|
|
else:
|
|
|
|
if do_search_alt:
|
2014-05-26 04:05:47 -04:00
|
|
|
search_params['t'] = "search"
|
2014-05-21 15:08:08 -04:00
|
|
|
|
|
|
|
do_search_alt = (True, False)[do_search_alt]
|
|
|
|
|
|
|
|
else:
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
for item in self._doSearch(search_params, age=4):
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
(title, url) = self._get_title_and_url(item)
|
|
|
|
|
2014-05-01 01:17:54 -04:00
|
|
|
if item.has_key('published_parsed') and item['published_parsed']:
|
2014-04-25 23:42:35 -04:00
|
|
|
result_date = item.published_parsed
|
2014-03-10 01:18:05 -04:00
|
|
|
if result_date:
|
|
|
|
result_date = datetime.datetime(*result_date[0:6])
|
2014-05-01 01:17:54 -04:00
|
|
|
else:
|
|
|
|
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
|
|
|
|
continue
|
2014-03-10 01:18:05 -04:00
|
|
|
|
|
|
|
if not search_date or result_date > search_date:
|
2014-07-14 22:00:53 -04:00
|
|
|
search_result = classes.Proper(title, url, result_date, self.show)
|
2014-03-10 01:18:05 -04:00
|
|
|
results.append(search_result)
|
2014-05-21 15:08:08 -04:00
|
|
|
term_items_found = True
|
|
|
|
do_search_alt = False
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-09-14 21:03:21 -04:00
|
|
|
time.sleep(0.2)
|
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
class NewznabCache(tvcache.TVCache):
|
|
|
|
def __init__(self, provider):
|
|
|
|
|
|
|
|
tvcache.TVCache.__init__(self, provider)
|
|
|
|
|
|
|
|
# only poll newznab providers every 15 minutes max
|
|
|
|
self.minTime = 15
|
|
|
|
|
|
|
|
def _getRSSData(self):
|
|
|
|
|
|
|
|
params = {"t": "tvsearch",
|
2014-07-14 22:00:53 -04:00
|
|
|
"cat": self.provider.catIDs + ',5060,5070',
|
|
|
|
"attrs": "rageid"}
|
2014-05-26 02:29:22 -04:00
|
|
|
|
2014-03-10 01:18:05 -04:00
|
|
|
if self.provider.needs_auth and self.provider.key:
|
|
|
|
params['apikey'] = self.provider.key
|
|
|
|
|
|
|
|
rss_url = self.provider.url + 'api?' + urllib.urlencode(params)
|
|
|
|
|
|
|
|
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
|
|
|
|
|
2014-05-04 08:05:27 -04:00
|
|
|
return self.getRSSFeed(rss_url)
|
2014-03-10 01:18:05 -04:00
|
|
|
|
2014-04-25 21:49:38 -04:00
|
|
|
def _checkAuth(self, data):
|
|
|
|
return self.provider._checkAuthFromData(data)
|
2014-05-11 08:49:07 -04:00
|
|
|
|
|
|
|
def updateCache(self):
|
2014-05-18 11:33:31 -04:00
|
|
|
|
2014-08-30 04:47:00 -04:00
|
|
|
if self.shouldUpdate() and self._checkAuth(None):
|
2014-05-11 08:49:07 -04:00
|
|
|
data = self._getRSSData()
|
|
|
|
|
|
|
|
# as long as the http request worked we count this as an update
|
2014-08-30 04:47:00 -04:00
|
|
|
if not data:
|
2014-05-11 08:49:07 -04:00
|
|
|
return []
|
|
|
|
|
2014-08-30 04:47:00 -04:00
|
|
|
self.setLastUpdate()
|
|
|
|
|
|
|
|
# clear cache
|
|
|
|
self._clearCache()
|
|
|
|
|
2014-05-11 08:49:07 -04:00
|
|
|
if self._checkAuth(data):
|
|
|
|
items = data.entries
|
2014-07-14 22:00:53 -04:00
|
|
|
cl = []
|
2014-05-11 08:49:07 -04:00
|
|
|
for item in items:
|
|
|
|
ci = self._parseItem(item)
|
|
|
|
if ci is not None:
|
2014-07-14 22:00:53 -04:00
|
|
|
cl.append(ci)
|
2014-07-02 14:51:14 -04:00
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
if len(cl) > 0:
|
2014-06-21 21:35:57 -04:00
|
|
|
myDB = self._getDB()
|
2014-07-14 22:00:53 -04:00
|
|
|
myDB.mass_action(cl)
|
2014-05-11 08:49:07 -04:00
|
|
|
|
|
|
|
else:
|
|
|
|
raise AuthException(
|
|
|
|
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
# overwrite method with that parses the rageid from the newznab feed
|
|
|
|
def _parseItem(self, item):
|
|
|
|
title = item.title
|
|
|
|
url = item.link
|
|
|
|
|
2014-07-14 22:00:53 -04:00
|
|
|
attrs = item.newznab_attr
|
|
|
|
if not isinstance(attrs, list):
|
|
|
|
attrs = [item.newznab_attr]
|
|
|
|
|
|
|
|
tvrageid = 0
|
|
|
|
for attr in attrs:
|
|
|
|
if attr['name'] == 'tvrageid':
|
|
|
|
tvrageid = int(attr['value'])
|
|
|
|
break
|
|
|
|
|
2014-05-11 08:49:07 -04:00
|
|
|
self._checkItemAuth(title, url)
|
|
|
|
|
|
|
|
if not title or not url:
|
|
|
|
logger.log(
|
|
|
|
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
|
|
|
logger.DEBUG)
|
|
|
|
return None
|
|
|
|
|
|
|
|
url = self._translateLinkURL(url)
|
|
|
|
|
2014-05-12 04:52:14 -04:00
|
|
|
logger.log(u"Attempting to add item from RSS to cache: " + title, logger.DEBUG)
|
2014-07-14 22:00:53 -04:00
|
|
|
return self._addCacheEntry(title, url, indexer_id=tvrageid)
|