diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py index ea452715..51f07b35 100644 --- a/sickbeard/providers/dtt.py +++ b/sickbeard/providers/dtt.py @@ -85,14 +85,13 @@ class DTTProvider(generic.TorrentProvider): logger.log(u"Search string: " + searchURL, logger.DEBUG) - data = self.getURL(searchURL) + data = self.getRSSFeed(searchURL) if not data: return [] try: - parsedXML = parseString(data) - items = parsedXML.getElementsByTagName('item') + items = data.entries except Exception, e: logger.log(u"Error trying to load DTT RSS feed: " + ex(e), logger.ERROR) logger.log(u"RSS data: " + data, logger.DEBUG) @@ -107,10 +106,8 @@ class DTTProvider(generic.TorrentProvider): return results def _get_title_and_url(self, item): - description_node = item.getElementsByTagName('description')[0] - - title = get_xml_text(description_node).replace('_', '.').split(' (')[0] - url = item.getElementsByTagName('enclosure')[0].getAttribute('url') + title = item.title + url = item.enclosures[0].href return (title, url) @@ -134,7 +131,7 @@ class DTTCache(tvcache.TVCache): url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) logger.log(u"DTT cache update URL: " + url, logger.DEBUG) - data = self.provider.getURL(url) + data = self.provider.getRSSFeed(url) return data def _parseItem(self, item): diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 02ce1f63..046e9f10 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -112,19 +112,13 @@ class EZRSSProvider(generic.TorrentProvider): logger.log(u"Search string: " + search_url, logger.DEBUG) - data = self.getURL(search_url) + data = self.getRSSFeed(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] - parsedXML = helpers.parse_xml(data) - - if parsedXML is None: - logger.log(u"Error trying to load " + self.name + " RSS feed", logger.ERROR) - return [] - - items = parsedXML.findall('.//item') + items = data.entries results = [] @@ -178,7 +172,7 @@ class EZRSSCache(tvcache.TVCache): rss_url = self.provider.url + 'feed/' logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - data = self.provider.getURL(rss_url) + data = self.provider.getRSSFeed(rss_url) if not data: logger.log(u"No data returned from " + rss_url, logger.ERROR) diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index d7070b66..a05f334b 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -23,6 +23,7 @@ import datetime import os import sys import re +import urllib import urllib2 import copy import itertools @@ -119,18 +120,20 @@ class GenericProvider: return data - def getRSSFeed(self, url): + def getRSSFeed(self, url, post_data=None): parsed = list(urlparse.urlparse(url)) parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one - f = feedparser.parse(url) - data = f.entries + if post_data: + url = url + 'api?' + urllib.urlencode(post_data) - if not data: - logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) + f = feedparser.parse(url) + + if not f: + logger.log(u"Error loading " + self.name + " URL: " + url, logger.ERROR) return None - return data + return f def downloadResult(self, result): """ @@ -226,11 +229,11 @@ class GenericProvider: Returns: A tuple containing two strings representing title and URL respectively """ - title = helpers.get_xml_text(item.find('title')) + title = item.title if title: title = title.replace(' ', '.') - url = helpers.get_xml_text(item.find('link')) + url = item.link if url: url = url.replace('&', '&') diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 2a1c8353..9473fe77 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -231,7 +231,7 @@ class HDBitsCache(tvcache.TVCache): return [] def _getRSSData(self): - return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON()) + return self.provider.getRSSFeed(self.provider.rss_url, post_data=self.provider._make_post_data_JSON()) def _parseItem(self, item): diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py index ce9f452b..05c84b81 100644 --- a/sickbeard/providers/newzbin.py +++ b/sickbeard/providers/newzbin.py @@ -285,14 +285,13 @@ class NewzbinProvider(generic.NZBProvider): item_list = [] try: - parsedXML = parseString(data) - items = parsedXML.getElementsByTagName('item') + items = data.entries except Exception, e: logger.log("Error trying to load Newzbin RSS feed: " + ex(e), logger.ERROR) return [] for cur_item in items: - title = helpers.get_xml_text(cur_item.getElementsByTagName('title')[0]) + title = cur_item.title if title == 'Feeds Error': raise exceptions.AuthException("The feed wouldn't load, probably because of invalid auth info") if sickbeard.USENET_RETENTION is not None: @@ -345,7 +344,7 @@ class NewzbinProvider(generic.NZBProvider): url = self.url + "search/?%s" % urllib.urlencode(params) logger.log("Newzbin search URL: " + url, logger.DEBUG) - data = self.getURL(url) + data = self.getRSSFeed(url) return data diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index 617e12a1..faf53110 100644 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -166,14 +166,15 @@ class NewznabProvider(generic.NZBProvider): if parsedXML is None: return self._checkAuth() - if parsedXML.tag == 'error': - code = parsedXML.attrib['code'] - - if code == '100': + status = parsedXML.status + if status: + if status == 200: + return True + if status == 100: raise AuthException("Your API key for " + self.name + " is incorrect, check your config.") - elif code == '101': + elif status == 101: raise AuthException("Your account on " + self.name + " has been suspended, contact the administrator.") - elif code == '102': + elif status == 102: raise AuthException( "Your account isn't allowed to use the API on " + self.name + ", contact the administrator") else: @@ -181,8 +182,6 @@ class NewznabProvider(generic.NZBProvider): logger.ERROR) return False - return True - def _doSearch(self, search_params, show=None, max_age=0): self._checkAuth() @@ -206,31 +205,15 @@ class NewznabProvider(generic.NZBProvider): logger.log(u"Search url: " + search_url, logger.DEBUG) - data = self.getURL(search_url) + data = self.getRSSFeed(search_url) if not data: logger.log(u"No data returned from " + search_url, logger.ERROR) return [] - # hack this in until it's fixed server side - if not data.startswith('' + data - - parsedXML = helpers.parse_xml(data) - - if parsedXML is None: - logger.log(u"Error trying to load " + self.name + " XML data", logger.ERROR) - return [] - - if self._checkAuthFromData(parsedXML): - - if parsedXML.tag == 'rss': - items = parsedXML.findall('.//item') - - else: - logger.log(u"Resulting XML from " + self.name + " isn't RSS, not parsing it", logger.ERROR) - return [] + if self._checkAuthFromData(data): + items = data.entries results = [] for curItem in items: @@ -307,15 +290,15 @@ class NewznabCache(tvcache.TVCache): logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - data = self.provider.getURL(rss_url) + data = self.provider.getRSSFeed(rss_url) if not data: logger.log(u"No data returned from " + rss_url, logger.ERROR) return None # hack this in until it's fixed server side - if data and not data.startswith('' + data + #if data and not data.startswith('' + data return data diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index dcb19460..e9ec0146 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -86,10 +86,11 @@ class TorrentRssProvider(generic.TorrentProvider): if not data: return (False, 'No data returned from url: ' + self.url) - if not len(data) > 0: + items = data.entries + if not len(items) > 0: return (False, 'No items found in the RSS feed ' + self.url) - (title, url) = self._get_title_and_url(data[0]) + (title, url) = self._get_title_and_url(items[0]) if not title: return (False, 'Unable to get title from first item') diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 2e64bd1f..323e6f0c 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -105,8 +105,9 @@ class TVCache(): self._clearCache() if self._checkAuth(data): + items = data.entries cl = [] - for item in data: + for item in items: ci = self._parseItem(item) if ci is not None: cl.append(ci)