diff --git a/sickbeard/providers/animezb.py b/sickbeard/providers/animezb.py index 5ad05b7c..69b0db91 100644 --- a/sickbeard/providers/animezb.py +++ b/sickbeard/providers/animezb.py @@ -52,9 +52,6 @@ class Animezb(generic.NZBProvider): def imageName(self): return 'animezb.png' - def _checkAuth(self): - return True - def _get_season_search_strings(self, ep_obj): return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] @@ -147,7 +144,12 @@ class AnimezbCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url).entries + data = self.getRSSFeed(rss_url) + + if data and 'entries' in data: + return data.entries + else: + return [] provider = Animezb() diff --git a/sickbeard/providers/bitsoup.py b/sickbeard/providers/bitsoup.py index 6b3db525..d4f40289 100644 --- a/sickbeard/providers/bitsoup.py +++ b/sickbeard/providers/bitsoup.py @@ -31,7 +31,7 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.exceptions import ex +from sickbeard.exceptions import ex, AuthException from sickbeard.helpers import sanitizeSceneName from sickbeard.bs4_parser import BS4Parser from unidecode import unidecode @@ -75,6 +75,12 @@ class BitSoupProvider(generic.TorrentProvider): quality = Quality.sceneQuality(item[0], anime) return quality + def _checkAuth(self): + if not self.username or not self.password: + raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + + return True + def _doLogin(self): login_params = {'username': self.username, diff --git a/sickbeard/providers/btn.py b/sickbeard/providers/btn.py index eaa03ee6..4786ff91 100644 --- a/sickbeard/providers/btn.py +++ b/sickbeard/providers/btn.py @@ -315,8 +315,5 @@ class BTNCache(tvcache.TVCache): return self.provider._doSearch(search_params=None, age=seconds_since_last_update) - def _checkAuth(self, data): - return self.provider._checkAuthFromData(data) - provider = BTNProvider() diff --git a/sickbeard/providers/dtt.py b/sickbeard/providers/dtt.py deleted file mode 100644 index bfe881e7..00000000 --- a/sickbeard/providers/dtt.py +++ /dev/null @@ -1,141 +0,0 @@ -# Author: Harm van Tilborg -# URL: https://github.com/hvt/Sick-Beard/tree/dtt -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -import urllib -import sickbeard -import generic - -from sickbeard.common import Quality -from sickbeard import logger -from sickbeard import tvcache -from sickbeard.helpers import sanitizeSceneName -from sickbeard import show_name_helpers -from sickbeard.exceptions import ex - - -class DTTProvider(generic.TorrentProvider): - def __init__(self): - generic.TorrentProvider.__init__(self, "DailyTvTorrents") - self.supportsBacklog = True - - self.enabled = False - self.ratio = None - - self.cache = DTTCache(self) - - self.url = 'http://www.dailytvtorrents.org/' - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 'dailytvtorrents.gif' - - def getQuality(self, item, anime=False): - url = item.enclosures[0].href - quality = Quality.sceneQuality(url) - return quality - - def findSearchResults(self, show, season, episodes, search_mode, manualSearch=False): - return generic.TorrentProvider.findSearchResults(self, show, season, episodes, search_mode, manualSearch) - - def _dtt_show_id(self, show_name): - return sanitizeSceneName(show_name).replace('.', '-').lower() - - def _get_season_search_strings(self, ep_obj): - search_string = [] - - for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): - show_string = sanitizeSceneName(show_name).replace('.', '-').lower() - search_string.append(show_string) - - return search_string - - def _get_episode_search_strings(self, ep_obj, add_string=''): - return self._get_season_search_strings(ep_obj) - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): - - # show_id = self._dtt_show_id(self.show.name) - - params = {"items": "all"} - - if sickbeard.DTT_NORAR: - params.update({"norar": "yes"}) - - if sickbeard.DTT_SINGLE: - params.update({"single": "yes"}) - - searchURL = self.url + "rss/show/" + search_params + "?" + urllib.urlencode(params) - - logger.log(u"Search string: " + searchURL, logger.DEBUG) - - data = self.cache.getRSSFeed(searchURL) - - if not data: - return [] - - try: - items = data.entries - except Exception, e: - logger.log(u"Error trying to load DTT RSS feed: " + ex(e), logger.ERROR) - logger.log(u"RSS data: " + data, logger.DEBUG) - return [] - - results = [] - - for curItem in items: - (title, url) = self._get_title_and_url(curItem) - results.append(curItem) - - return results - - def _get_title_and_url(self, item): - title = item.title - if title: - title = u'' + title - title = title.replace(' ', '.') - - url = item.enclosures[0].href - - return (title, url) - - -class DTTCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) - - # only poll DTT every 30 minutes max - self.minTime = 30 - - def _getDailyData(self): - - params = {"items": "all"} - - if sickbeard.DTT_NORAR: - params.update({"norar": "yes"}) - - if sickbeard.DTT_SINGLE: - params.update({"single": "yes"}) - - url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) - logger.log(u"DTT cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url).entries - - -provider = DTTProvider() \ No newline at end of file diff --git a/sickbeard/providers/ezrss.py b/sickbeard/providers/ezrss.py index 233a53e6..feb1765e 100644 --- a/sickbeard/providers/ezrss.py +++ b/sickbeard/providers/ezrss.py @@ -179,7 +179,11 @@ class EZRSSCache(tvcache.TVCache): rss_url = self.provider.url + 'feed/' logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url).entries + data = self.getRSSFeed(rss_url) + if data and 'entries' in data: + return data.entries + else: + return [] provider = EZRSSProvider() diff --git a/sickbeard/providers/fanzub.py b/sickbeard/providers/fanzub.py index ebcd2431..0ad73ccb 100644 --- a/sickbeard/providers/fanzub.py +++ b/sickbeard/providers/fanzub.py @@ -139,7 +139,12 @@ class FanzubCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url).entries + data = self.getRSSFeed(rss_url) + + if data and 'entries' in data: + return data.entries + else: + return [] provider = Fanzub() diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py index 5e7f0871..bab6ea7d 100755 --- a/sickbeard/providers/freshontv.py +++ b/sickbeard/providers/freshontv.py @@ -29,7 +29,7 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.exceptions import ex +from sickbeard.exceptions import ex, AuthException from sickbeard import clients from lib import requests from lib.requests import exceptions @@ -78,6 +78,13 @@ class FreshOnTVProvider(generic.TorrentProvider): quality = Quality.sceneQuality(item[0], anime) return quality + def _checkAuth(self): + + if not self.username or not self.password: + raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + + return True + def _doLogin(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): return True @@ -301,6 +308,6 @@ class FreshOnTVCache(tvcache.TVCache): def _getDailyData(self): search_params = {'RSS': ['']} - return self.provider._doSearch(search_params).entries + return self.provider._doSearch(search_params) provider = FreshOnTVProvider() \ No newline at end of file diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 899b93f8..5b02adbe 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -75,7 +75,7 @@ class GenericProvider: return self.getID() + '.png' def _checkAuth(self): - return + return True def _doLogin(self): return True diff --git a/sickbeard/providers/hdbits.py b/sickbeard/providers/hdbits.py index 66e379e3..85cd6017 100644 --- a/sickbeard/providers/hdbits.py +++ b/sickbeard/providers/hdbits.py @@ -67,9 +67,6 @@ class HDBitsProvider(generic.TorrentProvider): def _checkAuthFromData(self, parsedJSON): - if parsedJSON is None: - return self._checkAuth() - if 'status' in parsedJSON and 'message' in parsedJSON: if parsedJSON.get('status') == 5: logger.log(u"Incorrect authentication credentials for " + self.name + " : " + parsedJSON['message'], @@ -209,13 +206,15 @@ class HDBitsCache(tvcache.TVCache): def _getDailyData(self): parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) + + if not self.provider._checkAuthFromData(parsedJSON): + return [] + if parsedJSON and 'data' in parsedJSON: return parsedJSON['data'] else: return [] - def _checkAuth(self, data): - return self.provider._checkAuthFromData(data) provider = HDBitsProvider() diff --git a/sickbeard/providers/hdtorrents.py b/sickbeard/providers/hdtorrents.py index 8b02af0b..a02afbad 100644 --- a/sickbeard/providers/hdtorrents.py +++ b/sickbeard/providers/hdtorrents.py @@ -30,7 +30,7 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.exceptions import ex +from sickbeard.exceptions import ex, AuthException from sickbeard import clients from lib import requests from lib.requests import exceptions @@ -82,6 +82,13 @@ class HDTorrentsProvider(generic.TorrentProvider): quality = Quality.sceneQuality(item[0]) return quality + def _checkAuth(self): + + if not self.username or not self.password: + raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + + return True + def _doLogin(self): if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py index 19ae37fd..e24bb328 100644 --- a/sickbeard/providers/iptorrents.py +++ b/sickbeard/providers/iptorrents.py @@ -29,7 +29,7 @@ from sickbeard import db from sickbeard import classes from sickbeard import helpers from sickbeard import show_name_helpers -from sickbeard.exceptions import ex +from sickbeard.exceptions import ex, AuthException from sickbeard import clients from lib import requests from lib.requests import exceptions @@ -74,6 +74,13 @@ class IPTorrentsProvider(generic.TorrentProvider): quality = Quality.sceneQuality(item[0], anime) return quality + def _checkAuth(self): + + if not self.username or not self.password: + raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") + + return True + def _doLogin(self): login_params = {'username': self.username, diff --git a/sickbeard/providers/newzbin.py b/sickbeard/providers/newzbin.py deleted file mode 100644 index b0211355..00000000 --- a/sickbeard/providers/newzbin.py +++ /dev/null @@ -1,340 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -import os -import re -import sys -import time -import urllib, urlparse - -from datetime import datetime, timedelta - -import sickbeard -import generic - -import sickbeard.encodingKludge as ek -from sickbeard import classes, logger, helpers, exceptions, show_name_helpers -from sickbeard import tvcache -from sickbeard.common import Quality -from sickbeard.exceptions import ex -from lib.dateutil.parser import parse as parseDate - - -class NewzbinDownloader(urllib.FancyURLopener): - def __init__(self): - urllib.FancyURLopener.__init__(self) - - def http_error_default(self, url, fp, errcode, errmsg, headers): - - # if newzbin is throttling us, wait seconds and try again - if errcode == 400: - - newzbinErrCode = int(headers.getheader('X-DNZB-RCode')) - - if newzbinErrCode == 450: - rtext = str(headers.getheader('X-DNZB-RText')) - result = re.search("wait (\d+) seconds", rtext) - logger.log("Newzbin throttled our NZB downloading, pausing for " + result.group(1) + "seconds") - time.sleep(int(result.group(1))) - raise exceptions.NewzbinAPIThrottled() - - elif newzbinErrCode == 401: - raise exceptions.AuthException("Newzbin username or password incorrect") - - elif newzbinErrCode == 402: - raise exceptions.AuthException("Newzbin account not premium status, can't download NZBs") - -class NewzbinProvider(generic.NZBProvider): - def __init__(self): - - generic.NZBProvider.__init__(self, "Newzbin") - - self.supportsBacklog = True - - self.cache = NewzbinCache(self) - - self.url = 'https://www.newzbin2.es/' - - self.NEWZBIN_DATE_FORMAT = '%a, %d %b %Y %H:%M:%S %Z' - - def isEnabled(self): - return sickbeard.NEWZBIN - - def getQuality(self, item, anime=False): - attributes = item.report[0] - attr_dict = {} - - for attribute in attributes.getElementsByTagName('report:attribute'): - cur_attr = attribute.getAttribute('type') - cur_attr_value = helpers.get_xml_text(attribute) - if cur_attr not in attr_dict: - attr_dict[cur_attr] = [cur_attr_value] - else: - attr_dict[cur_attr].append(cur_attr_value) - - logger.log("Finding quality of item based on attributes " + str(attr_dict), logger.DEBUG) - - if self._is_SDTV(attr_dict): - quality = Quality.SDTV - elif self._is_SDDVD(attr_dict): - quality = Quality.SDDVD - elif self._is_HDTV(attr_dict): - quality = Quality.HDTV - elif self._is_WEBDL(attr_dict): - quality = Quality.HDWEBDL - elif self._is_720pBluRay(attr_dict): - quality = Quality.HDBLURAY - elif self._is_1080pBluRay(attr_dict): - quality = Quality.FULLHDBLURAY - else: - quality = Quality.UNKNOWN - - logger.log("Resulting quality: " + str(quality), logger.DEBUG) - - return quality - - def _is_SDTV(self, attrs): - - # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i - video_fmt = 'Video Fmt' in attrs and ( - 'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ - and ('720p' not in attrs['Video Fmt']) \ - and ('1080p' not in attrs['Video Fmt']) \ - and ('1080i' not in attrs['Video Fmt']) - - # Source: TV Cap or HDTV or (None) - source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source'] - - # Subtitles: (None) - subs = 'Subtitles' not in attrs - - return video_fmt and source and subs - - def _is_SDDVD(self, attrs): - - # Video Fmt: (XviD, DivX, H.264/x264), NOT 720p, NOT 1080p, NOT 1080i - video_fmt = 'Video Fmt' in attrs and ( - 'XviD' in attrs['Video Fmt'] or 'DivX' in attrs['Video Fmt'] or 'H.264/x264' in attrs['Video Fmt']) \ - and ('720p' not in attrs['Video Fmt']) \ - and ('1080p' not in attrs['Video Fmt']) \ - and ('1080i' not in attrs['Video Fmt']) - - # Source: DVD - source = 'Source' in attrs and 'DVD' in attrs['Source'] - - # Subtitles: (None) - subs = 'Subtitles' not in attrs - - return video_fmt and source and subs - - def _is_HDTV(self, attrs): - # Video Fmt: H.264/x264, 720p - video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) - - # Source: TV Cap or HDTV or (None) - source = 'Source' not in attrs or 'TV Cap' in attrs['Source'] or 'HDTV' in attrs['Source'] - - # Subtitles: (None) - subs = 'Subtitles' not in attrs - - return video_fmt and source and subs - - def _is_WEBDL(self, attrs): - - # Video Fmt: H.264/x264, 720p - video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) - - # Source: WEB-DL - source = 'Source' in attrs and 'WEB-DL' in attrs['Source'] - - # Subtitles: (None) - subs = 'Subtitles' not in attrs - - return video_fmt and source and subs - - def _is_720pBluRay(self, attrs): - - # Video Fmt: H.264/x264, 720p - video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('720p' in attrs['Video Fmt']) - - # Source: Blu-ray or HD-DVD - source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source']) - - return video_fmt and source - - def _is_1080pBluRay(self, attrs): - - # Video Fmt: H.264/x264, 1080p - video_fmt = 'Video Fmt' in attrs and ('H.264/x264' in attrs['Video Fmt']) \ - and ('1080p' in attrs['Video Fmt']) - - # Source: Blu-ray or HD-DVD - source = 'Source' in attrs and ('Blu-ray' in attrs['Source'] or 'HD-DVD' in attrs['Source']) - - return video_fmt and source - - - def getIDFromURL(self, url): - id_regex = re.escape(self.url) + 'browse/post/(\d+)/' - id_match = re.match(id_regex, url) - if not id_match: - return None - else: - return id_match.group(1) - - def downloadResult(self, nzb): - - id = self.getIDFromURL(nzb.url) - if not id: - logger.log("Unable to get an ID from " + str(nzb.url) + ", can't download from Newzbin's API", logger.ERROR) - return False - - logger.log("Downloading an NZB from newzbin with id " + id) - - fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, helpers.sanitizeFileName(nzb.name) + '.nzb') - logger.log("Saving to " + fileName) - - urllib._urlopener = NewzbinDownloader() - - params = urllib.urlencode( - {"username": sickbeard.NEWZBIN_USERNAME, "password": sickbeard.NEWZBIN_PASSWORD, "reportid": id}) - try: - urllib.urlretrieve(self.url + "api/dnzb/", fileName, data=params) - except exceptions.NewzbinAPIThrottled: - logger.log("Done waiting for Newzbin API throttle limit, starting downloads again") - self.downloadResult(nzb) - except (urllib.ContentTooShortError, IOError), e: - logger.log("Error downloading NZB: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR) - return False - - return True - - def _get_season_search_strings(self, ep_obj): - return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - return ['^' + x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] - - def _doSearch(self, searchStr, show=None, age=None): - - data = self._getRSSData(searchStr.encode('utf-8')) - - item_list = [] - - try: - items = data.entries - except Exception, e: - logger.log("Error trying to load Newzbin RSS feed: " + ex(e), logger.ERROR) - return [] - - for cur_item in items: - title = cur_item.title - if title == 'Feeds Error': - raise exceptions.AuthException("The feed wouldn't load, probably because of invalid auth info") - if sickbeard.USENET_RETENTION is not None: - try: - dateString = helpers.get_xml_text(cur_item.getElementsByTagName('report:postdate')[0]) - # use the parse (imported as parseDate) function from the dateutil lib - # and we have to remove the timezone info from it because the retention_date will not have one - # and a comparison of them is not possible - post_date = parseDate(dateString).replace(tzinfo=None) - retention_date = datetime.now() - timedelta(days=sickbeard.USENET_RETENTION) - if post_date < retention_date: - logger.log(u"Date " + str(post_date) + " is out of retention range, skipping", logger.DEBUG) - continue - except Exception, e: - logger.log("Error parsing date from Newzbin RSS feed: " + str(e), logger.ERROR) - continue - - item_list.append(cur_item) - - return item_list - - - def _getRSSData(self, search=None): - - params = { - 'searchaction': 'Search', - 'fpn': 'p', - 'category': 8, - 'u_nfo_posts_only': 0, - 'u_url_posts_only': 0, - 'u_comment_posts_only': 0, - 'u_show_passworded': 0, - 'u_v3_retention': 0, - 'ps_rb_video_format': 3082257, - 'ps_rb_language': 4096, - 'sort': 'date', - 'order': 'desc', - 'u_post_results_amt': 50, - 'feed': 'rss', - 'hauth': 1, - } - - if search: - params['q'] = search + " AND " - else: - params['q'] = '' - - params['q'] += 'Attr:Lang~Eng AND NOT Attr:VideoF=DVD' - - url = self.url + "search/?%s" % urllib.urlencode(params) - logger.log("Newzbin search URL: " + url, logger.DEBUG) - - return self.cache.getRSSFeed(url) - - def _checkAuth(self): - if sickbeard.NEWZBIN_USERNAME in (None, "") or sickbeard.NEWZBIN_PASSWORD in (None, ""): - raise exceptions.AuthException("Newzbin authentication details are empty, check your config") - - -class NewzbinCache(tvcache.TVCache): - def __init__(self, provider): - - tvcache.TVCache.__init__(self, provider) - - # only poll Newzbin every 10 mins max - self.minTime = 1 - - def _getDailyData(self): - - return self.provider._getRSSData().entries - - def _parseItem(self, item): - - (title, url) = self.provider._get_title_and_url(item) - - if title == 'Feeds Error': - logger.log("There's an error in the feed, probably bad auth info", logger.DEBUG) - raise exceptions.AuthException("Invalid Newzbin username/password") - - if not title or not url: - logger.log( - "The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable", - logger.ERROR) - return - - logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG) - - self._addCacheEntry(title, url) - - -provider = NewzbinProvider() diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py index f2aed6c3..2ac991cb 100644 --- a/sickbeard/providers/nyaatorrents.py +++ b/sickbeard/providers/nyaatorrents.py @@ -137,7 +137,12 @@ class NyaaCache(tvcache.TVCache): logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG) - return self.getRSSFeed(url).entries + data = self.getRSSFeed(url) + + if data and 'entries' in data: + return data.entries + else: + return [] provider = NyaaProvider() diff --git a/sickbeard/providers/nzbs_org_old.py b/sickbeard/providers/nzbs_org_old.py deleted file mode 100644 index 5e30e82e..00000000 --- a/sickbeard/providers/nzbs_org_old.py +++ /dev/null @@ -1,162 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - - - -import datetime -import re -import time -import urllib - -from xml.dom.minidom import parseString - -import sickbeard -import generic - -from sickbeard import classes, show_name_helpers, helpers - -from sickbeard import exceptions, logger -from sickbeard import tvcache -from sickbeard.exceptions import ex - - -class NZBsProvider(generic.NZBProvider): - def __init__(self): - - generic.NZBProvider.__init__(self, "NZBs.org Old") - - self.supportsBacklog = True - - self.cache = NZBsCache(self) - - self.url = 'https://secure.nzbs.org/' - - def isEnabled(self): - return sickbeard.NZBS - - def _checkAuth(self): - if sickbeard.NZBS_UID in (None, "") or sickbeard.NZBS_HASH in (None, ""): - raise exceptions.AuthException("NZBs.org authentication details are empty, check your config") - - def _get_season_search_strings(self, ep_obj): - return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - return ['^' + x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] - - def _doSearch(self, curString, show=None, age=None): - - curString = curString.replace('.', ' ') - - params = {"action": "search", - "q": curString.encode('utf-8'), - "dl": 1, - "i": sickbeard.NZBS_UID, - "h": sickbeard.NZBS_HASH, - "age": sickbeard.USENET_RETENTION, - "num": 100, - "type": 1} - - searchURL = self.url + "rss.php?" + urllib.urlencode(params) - - logger.log(u"Search string: " + searchURL, logger.DEBUG) - - data = self.cache.getRSSFeed(searchURL) - - # Pause to avoid 503's - time.sleep(5) - - if data is None: - logger.log(u"Error trying to load NZBs.org RSS feed: " + searchURL, logger.ERROR) - return [] - - items = data.entries - - results = [] - - for curItem in items: - (title, url) = self._get_title_and_url(curItem) - - if not title or not url: - logger.log( - u"The XML returned from the NZBs.org RSS feed is incomplete, this result is unusable: " + data, - logger.ERROR) - continue - - if "&i=" not in url and "&h=" not in url: - raise exceptions.AuthException( - "The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") - - results.append(curItem) - - return results - - def findPropers(self, date=None): - - results = [] - - for curString in (".PROPER.", ".REPACK."): - - for curResult in self._doSearch(curString): - - (title, url) = self._get_title_and_url(curResult) - - pubDate_node = curResult.getElementsByTagName('pubDate')[0] - pubDate = helpers.get_xml_text(pubDate_node) - - match = re.search('(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate) - if not match: - continue - - resultDate = datetime.datetime.strptime(match.group(1), "%a, %d %b %Y %H:%M:%S") - - if date is None or resultDate > date: - results.append(classes.Proper(title, url, resultDate, self.show)) - - return results - - -class NZBsCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) - - # only poll NZBs.org every 15 minutes max - self.minTime = 15 - - def _getRSSData(self): - url = self.provider.url + 'rss.php?' - urlArgs = {'type': 1, - 'dl': 1, - 'num': 100, - 'i': sickbeard.NZBS_UID, - 'h': sickbeard.NZBS_HASH, - 'age': sickbeard.USENET_RETENTION} - - url += urllib.urlencode(urlArgs) - - logger.log(u"NZBs cache update URL: " + url, logger.DEBUG) - - return self.provider.getURL(url) - - def _checkItemAuth(self, title, url): - if "&i=" not in url and "&h=" not in url: - raise exceptions.AuthException( - "The NZBs.org result URL has no auth info which means your UID/hash are incorrect, check your config") - - -provider = NZBsProvider() \ No newline at end of file diff --git a/sickbeard/providers/nzbsrus.py b/sickbeard/providers/nzbsrus.py deleted file mode 100644 index 570b24ed..00000000 --- a/sickbeard/providers/nzbsrus.py +++ /dev/null @@ -1,116 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -import urllib -import generic -import sickbeard - -try: - import xml.etree.cElementTree as etree -except ImportError: - import xml.etree.ElementTree as etree - -from sickbeard import exceptions, logger -from sickbeard import tvcache, show_name_helpers - -class NZBsRUSProvider(generic.NZBProvider): - def __init__(self): - generic.NZBProvider.__init__(self, "NZBs'R'US") - self.cache = NZBsRUSCache(self) - self.url = 'https://www.nzbsrus.com/' - self.supportsBacklog = True - - def isEnabled(self): - return sickbeard.NZBSRUS - - def _checkAuth(self): - if sickbeard.NZBSRUS_UID in (None, "") or sickbeard.NZBSRUS_HASH in (None, ""): - raise exceptions.AuthException("NZBs'R'US authentication details are empty, check your config") - - def _get_season_search_strings(self, ep_obj): - return ['^' + x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - return ['^' + x for x in show_name_helpers.makeSceneSearchString(self.show, ep_obj)] - - def _doSearch(self, search, show=None, age=None): - params = {'uid': sickbeard.NZBSRUS_UID, - 'key': sickbeard.NZBSRUS_HASH, - 'xml': 1, - 'age': sickbeard.USENET_RETENTION, - 'lang0': 1, # English only from CouchPotato - 'lang1': 1, - 'lang3': 1, - 'c91': 1, # TV:HD - 'c104': 1, # TV:SD-x264 - 'c75': 1, # TV:XviD - 'searchtext': search} - - if not params['age']: - params['age'] = 500 - - searchURL = self.url + 'api.php?' + urllib.urlencode(params) - logger.log(u"NZBS'R'US search url: " + searchURL, logger.DEBUG) - - data = self.cache.getRSSFeed(searchURL) - if not data: - return [] - - items = data.entries - if not len(items) > 0: - logger.log(u"Error trying to parse NZBS'R'US XML data.", logger.ERROR) - logger.log(u"RSS data: " + data, logger.DEBUG) - return [] - - return items - - def _get_title_and_url(self, item): - if item.title: # RSS feed - title = item.title - url = item.link - else: # API item - title = item.name - nzbID = item.id - key = item.key - url = self.url + 'nzbdownload_rss.php' + '/' + \ - nzbID + '/' + sickbeard.NZBSRUS_UID + '/' + key + '/' - return (title, url) - - -class NZBsRUSCache(tvcache.TVCache): - def __init__(self, provider): - tvcache.TVCache.__init__(self, provider) - # only poll NZBs'R'US every 15 minutes max - self.minTime = 15 - - def _getDailyData(self): - url = self.provider.url + 'rssfeed.php?' - urlArgs = {'cat': '91,75,104', # HD,XviD,SD-x264 - 'i': sickbeard.NZBSRUS_UID, - 'h': sickbeard.NZBSRUS_HASH} - - url += urllib.urlencode(urlArgs) - logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG) - - return self.getRSSFeed(url).entries - - def _checkAuth(self, data): - return data != 'Invalid Link' - - -provider = NZBsRUSProvider() diff --git a/sickbeard/providers/omgwtfnzbs.py b/sickbeard/providers/omgwtfnzbs.py index 946a3f41..26177ba5 100644 --- a/sickbeard/providers/omgwtfnzbs.py +++ b/sickbeard/providers/omgwtfnzbs.py @@ -164,9 +164,11 @@ class OmgwtfnzbsCache(tvcache.TVCache): logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) - return self.getRSSFeed(rss_url).entries + data = self.getRSSFeed(rss_url) - def _checkAuth(self, data): - return self.provider._checkAuthFromData(data) + if data and 'entries' in data: + return data.entries + else: + return [] provider = OmgwtfnzbsProvider() diff --git a/sickbeard/providers/publichd.py b/sickbeard/providers/publichd.py deleted file mode 100644 index ff719c01..00000000 --- a/sickbeard/providers/publichd.py +++ /dev/null @@ -1,253 +0,0 @@ -# Author: Mr_Orange -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickRage. -# -# SickRage is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickRage is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickRage. If not, see . - -from __future__ import with_statement - -import sys -import os -import traceback -import urllib, urlparse -import re -import datetime -import sickbeard -import generic - -from sickbeard.common import Quality -from sickbeard import logger -from sickbeard import tvcache -from sickbeard import helpers -from sickbeard import db -from sickbeard import classes -from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName -from sickbeard.exceptions import ex -from sickbeard import encodingKludge as ek -from sickbeard import clients - -from lib import requests -from lib.requests import exceptions -from sickbeard.bs4_parser import BS4Parser -from lib.unidecode import unidecode - - -class PublicHDProvider(generic.TorrentProvider): - def __init__(self): - - generic.TorrentProvider.__init__(self, "PublicHD") - - self.supportsBacklog = True - - self.enabled = False - self.ratio = None - self.minseed = None - self.minleech = None - - self.cache = PublicHDCache(self) - - self.url = 'http://phdproxy.com/' - - self.searchurl = self.url + 'index.php?page=torrents&search=%s&active=0&category=%s&order=5&by=2' #order by seed - - self.categories = {'Season': ['23'], 'Episode': ['7', '14', '24'], 'RSS': ['7', '14', '23', '24']} - - def isEnabled(self): - return self.enabled - - def imageName(self): - return 'publichd.png' - - def getQuality(self, item, anime=False): - - quality = Quality.sceneQuality(item[0], anime) - return quality - - def _get_season_search_strings(self, ep_obj): - search_string = {'Season': []} - - for show_name in set(allPossibleShowNames(self.show)): - if ep_obj.show.air_by_date or ep_obj.show.sports: - ep_string = show_name + str(ep_obj.airdate).split('-')[0] - else: - ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX -SXXE - search_string['Season'].append(ep_string) - - if ep_obj.show.air_by_date or ep_obj.show.sports: - ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0] - else: - ep_string = show_name + ' Season ' + str(ep_obj.scene_season) #2) showName Season X - search_string['Season'].append(ep_string) - - return [search_string] - - def _get_episode_search_strings(self, ep_obj, add_string=''): - - search_string = {'Episode': []} - - if not ep_obj: - return [] - - if self.show.air_by_date: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - str(ep_obj.airdate).replace('-', '|') - search_string['Episode'].append(ep_string) - elif self.show.sports: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - str(ep_obj.airdate).replace('-', '|') + '|' + \ - ep_obj.airdate.strftime('%b') - search_string['Episode'].append(ep_string) - else: - for show_name in set(allPossibleShowNames(self.show)): - ep_string = sanitizeSceneName(show_name) + ' ' + \ - sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, - 'episodenumber': ep_obj.scene_episode} - - for x in add_string.split('|'): - to_search = re.sub('\s+', ' ', ep_string + ' %s' % x) - search_string['Episode'].append(to_search) - - return [search_string] - - def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): - - results = [] - items = {'Season': [], 'Episode': [], 'RSS': []} - - for mode in search_params.keys(): - for search_string in search_params[mode]: - - if mode == 'RSS': - searchURL = self.url + 'index.php?page=torrents&active=1&category=%s' % ( - ';'.join(self.categories[mode])) - logger.log(u"PublicHD cache update URL: " + searchURL, logger.DEBUG) - else: - searchURL = self.searchurl % ( - urllib.quote(unidecode(search_string)), ';'.join(self.categories[mode])) - logger.log(u"Search string: " + searchURL, logger.DEBUG) - - html = self.getURL(searchURL) - if not html: - continue - - #remove unneccecary