1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Fixed minor issues with provider rss caching code.

Fixed login issues with T411 provider.
Added 2 new subcats to T411 provider - 455 and 637
This commit is contained in:
echel0n 2014-12-05 22:16:30 -08:00
parent 86e7912c41
commit f814de4c82
19 changed files with 138 additions and 170 deletions

View File

@ -246,10 +246,7 @@ class BitSoupProvider(generic.TorrentProvider):
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []
for sqlshow in sqlResults:
for sqlshow in sqlResults or []:
self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
if self.show:
curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))

View File

@ -78,6 +78,7 @@ class BTNProvider(generic.TorrentProvider):
self._checkAuth()
results = []
params = {}
apikey = self.api_key
@ -120,16 +121,13 @@ class BTNProvider(generic.TorrentProvider):
if 'torrents' in parsedJSON:
found_torrents.update(parsedJSON['torrents'])
results = []
for torrentid, torrent_info in found_torrents.iteritems():
(title, url) = self._get_title_and_url(torrent_info)
if title and url:
results.append(torrent_info)
return results
return []
return results
def _api_call(self, apikey, params={}, results_per_page=1000, offset=0):

View File

@ -181,7 +181,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
freeleech = '3' if self.freeleech else '0'
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -205,14 +205,16 @@ class HDBitsCache(tvcache.TVCache):
self.minTime = 15
def _getRSSData(self):
results = []
try:
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(),
json=True)
if self.provider._checkAuthFromData(parsedJSON):
return parsedJSON['data']
results = parsedJSON['data']
except:
pass
return []
return results
provider = HDBitsProvider()

View File

@ -179,7 +179,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -158,7 +158,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
freeleech = '&free=on' if self.freeleech else ''
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -359,5 +359,4 @@ class KATCache(tvcache.TVCache):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
provider = KATProvider()

View File

@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import traceback
import urllib
import time
@ -427,36 +428,45 @@ class NewznabCache(tvcache.TVCache):
return self.provider._checkAuthFromData(data)
def updateCache(self):
if not self.shouldUpdate():
return
if self.shouldUpdate() and self._checkAuth(None):
data = self._getRSSData()
try:
if self._checkAuth(None):
data = self._getRSSData()
if not data or not len(data) > 0:
return
# as long as the http request worked we count this as an update
if not data:
return []
# clear cache
self._clearCache()
self.setLastUpdate()
# set updated
self.setLastUpdate()
# clear cache
self._clearCache()
try:
items = data.get('entries', [])
except:
items = data
if self._checkAuth(data):
items = data.entries
cl = []
for item in items:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if self._checkAuth(items):
cl = []
for item in items:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
return []
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
except AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception, e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
# overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item):

View File

@ -190,7 +190,7 @@ class NextGenProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():

View File

@ -107,12 +107,11 @@ class TorrentRssProvider(generic.TorrentProvider):
if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies)
items = self.cache._getRSSData()
if not len(items) > 0:
data = self.cache._getRSSData()
if not data or not len(data) > 0:
return (False, 'No items found in the RSS feed ' + self.url)
(title, url) = self._get_title_and_url(items[0])
(title, url) = self._get_title_and_url(data.entries[0])
if not title:
return (False, 'Unable to get title from first item')

View File

@ -164,7 +164,7 @@ class SCCProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
data = []
searchURLS = []
@ -307,6 +307,4 @@ class SCCCache(tvcache.TVCache):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
provider = SCCProvider()

View File

@ -151,7 +151,7 @@ class SpeedCDProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:
@ -256,7 +256,5 @@ class SpeedCDCache(tvcache.TVCache):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
provider = SpeedCDProvider()

View File

@ -12,7 +12,7 @@
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
@ -25,6 +25,8 @@ import sickbeard
import generic
from lib import requests
from lib.requests import exceptions
from sickbeard.common import USER_AGENT, Quality, cpu_presets
from sickbeard import logger
from sickbeard import tvcache
@ -34,32 +36,29 @@ from sickbeard import db
from sickbeard import helpers
from sickbeard import classes
from sickbeard.helpers import sanitizeSceneName
from sickbeard.exceptions import ex
class T411Provider(generic.TorrentProvider):
urls = {'base_url': 'http://www.t411.me/',
'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=433&search=%s&submit=Recherche',
'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=%s&search=%s&submit=Recherche',
'login_page': 'http://www.t411.me/users/login/',
'download': 'http://www.t411.me/torrents/download/?id=%s',
}
def __init__(self):
generic.TorrentProvider.__init__(self, "T411")
self.supportsBacklog = True
self.enabled = False
self.username = None
self.password = None
self.ratio = None
self.cache = T411Cache(self)
self.url = self.urls['base_url']
self.last_login_check = None
self.login_opener = None
self.subcategories = [637, 455, 433]
def isEnabled(self):
return self.enabled
@ -68,60 +67,27 @@ class T411Provider(generic.TorrentProvider):
return 't411.png'
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def getLoginParams(self):
return {
'login': self.username,
'password': self.password,
'remember': '1',
def _doLogin(self):
login_params = {'login': self.username,
'password': self.password,
}
def loginSuccess(self, output):
if "<span>Ratio: <strong class" in output.text:
return True
else:
return False
def _doLogin(self):
now = time.time()
if self.login_opener and self.last_login_check < (now - 3600):
try:
output = self.login_opener.open(self.urls['test'])
if self.loginSuccess(output):
self.last_login_check = now
return True
else:
self.login_opener = None
except:
self.login_opener = None
if self.login_opener:
return True
self.session = requests.Session()
try:
login_params = self.getLoginParams()
self.session = requests.Session()
self.session.headers.update({'User-Agent': USER_AGENT})
data = self.session.get(self.urls['login_page'], verify=False)
output = self.session.post(self.urls['login_page'], data=login_params, verify=False)
if self.loginSuccess(output):
self.last_login_check = now
self.login_opener = self.session
return True
response = self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
return False
error = 'unknown'
except:
error = traceback.format_exc()
self.login_opener = None
if not re.search('/users/logout/', response.text.lower()):
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False
self.login_opener = None
logger.log(u'Failed to login:' + str(error), logger.ERROR)
return False
return True
def _get_season_search_strings(self, ep_obj):
@ -177,7 +143,7 @@ class T411Provider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
@ -187,54 +153,55 @@ class T411Provider(generic.TorrentProvider):
search_string2 = ''
else:
search_string2 = '%40name+' + search_string + '+'
searchURL = self.urls['search'] % (search_string, search_string2)
logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
data = self.getURL(searchURL)
for sc in self.subcategories:
searchURL = self.urls['search'] % (search_string, sc, search_string2)
logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
if not data:
continue
data = self.getURL(searchURL)
if not data:
continue
try:
with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html:
resultsTable = html.find('table', attrs={'class': 'results'})
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
resultsTable = html.find('table', attrs={'class': 'results'})
if not resultsTable:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.DEBUG)
continue
if not resultsTable:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.DEBUG)
continue
entries = resultsTable.find("tbody").findAll("tr")
entries = resultsTable.find("tbody").findAll("tr")
if len(entries) > 0:
for result in entries:
if len(entries) > 0:
for result in entries:
try:
link = result.find('a', title=True)
torrentName = link['title']
torrent_name = str(torrentName)
torrentId = result.find_all('td')[2].find_all('a')[0]['href'][1:].replace('torrents/nfo/?id=','')
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
except (AttributeError, TypeError):
continue
try:
link = result.find('a', title=True)
torrentName = link['title']
torrent_name = str(torrentName)
torrentId = result.find_all('td')[2].find_all('a')[0]['href'][1:].replace(
'torrents/nfo/?id=', '')
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
except (AttributeError, TypeError):
continue
if not torrent_name or not torrent_download_url:
continue
if not torrent_name or not torrent_download_url:
continue
item = torrent_name, torrent_download_url
logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")", logger.DEBUG)
items[mode].append(item)
item = torrent_name, torrent_download_url
logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
logger.DEBUG)
items[mode].append(item)
else:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.WARNING)
continue
except Exception, e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
logger.ERROR)
else:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.WARNING)
continue
except Exception, e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
logger.ERROR)
results += items[mode]
return results
@ -286,13 +253,12 @@ class T411Provider(generic.TorrentProvider):
class T411Cache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# Only poll T411 every 10 minutes max
self.minTime = 10
def _getDailyData(self):
def _getRSSData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)

View File

@ -342,7 +342,6 @@ class ThePirateBayCache(tvcache.TVCache):
search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params)
class ThePirateBayWebproxy:
def __init__(self):
self.Type = 'GlypeProxy'

View File

@ -151,7 +151,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -182,7 +182,7 @@ class TorrentDayProvider(generic.TorrentProvider):
freeleech = '&free=on' if self.freeleech else ''
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -156,7 +156,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
return results
for mode in search_params.keys():
for search_string in search_params[mode]:

View File

@ -42,6 +42,8 @@ class RSSFeeds:
try:
fc = Cache(self.rssDB)
return fc.fetch(url, False, False, request_headers)
feed = fc.fetch(url, False, False, request_headers)
return feed
finally:
self.rssDB.close()

View File

@ -116,41 +116,41 @@ class TVCache():
try:
if self._checkAuth(None):
data = self._getRSSData()
if len(data) > 0:
# clear cache
self._clearCache()
if not data or not len(data) > 0:
return
# set updated
self.setLastUpdate()
# clear cache
self._clearCache()
try:
items = data.get('entries', [])
except:
items = data
# set updated
self.setLastUpdate()
if self._checkAuth(items):
cl = []
for item in items:
title, url = self._get_title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None:
cl.append(ci)
try:
items = data.get('entries', [])
except:
items = data
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
if self._checkAuth(items):
cl = []
for item in items:
title, url = self._get_title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None:
cl.append(ci)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
else:
raise AuthException(
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
except AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
except Exception, e:
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
return []
def getRSSFeed(self, url, post_data=None, request_headers=None):
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers)