mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-12 11:02:21 -05:00
Updated KAT provider urls.
KAT Provider now uses feedcache to get its results via rss feeds. Fixed issues with Trakt.TV notifications and adding of trending shows
This commit is contained in:
parent
3d7e460079
commit
02a9148025
@ -149,7 +149,7 @@
|
||||
<i>$cur_show['ratings']['votes'] votes</i>
|
||||
|
||||
<div class="traktShowTitleIcons">
|
||||
<a href="$sbRoot/home/addTraktShow?indexer_id=${cur_show['tvdb_id']}&showName=${cur_show['title']}" class="btn btn-xs">Add Show</a>
|
||||
<a href="$sbRoot/home/addShows/addTraktShow?indexer_id=${cur_show['tvdb_id']}&showName=${cur_show['title']}" class="btn btn-xs">Add Show</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -121,12 +121,14 @@ class TraktNotifier:
|
||||
Returns: True if the request succeeded, False otherwise
|
||||
"""
|
||||
|
||||
trakt_api = TraktAPI(sickbeard.TRAKT_API, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD)
|
||||
trakt_api = TraktAPI(api, username, password)
|
||||
|
||||
try:
|
||||
if trakt_api.validateAccount():
|
||||
return True
|
||||
return "Test notice sent successfully to Trakt"
|
||||
except (traktException, traktAuthException, traktServerBusy) as e:
|
||||
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
|
||||
|
||||
return "Test notice failed to Trakt: %s" % ex(e)
|
||||
|
||||
notifier = TraktNotifier
|
||||
|
@ -61,8 +61,8 @@ class KATProvider(generic.TorrentProvider):
|
||||
|
||||
self.cache = KATCache(self)
|
||||
|
||||
self.urls = ['http://kickass.to/', 'http://katproxy.com/', 'http://www.kickmirror.com/']
|
||||
self.url = 'https://kickass.to/'
|
||||
self.urls = ['http://kickass.so/', 'http://katproxy.com/', 'http://www.kickmirror.com/']
|
||||
self.url = 'https://kickass.so/'
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
@ -227,68 +227,50 @@ class KATProvider(generic.TorrentProvider):
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
entries = []
|
||||
for url in self.urls:
|
||||
if mode != 'RSS':
|
||||
searchURL = url + 'usearch/%s/?field=seeders&sorder=desc' % (urllib.quote(unidecode(search_string)))
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
else:
|
||||
searchURL = url + 'tv/?field=time_add&sorder=desc'
|
||||
logger.log(u"KAT cache update URL: " + searchURL, logger.DEBUG)
|
||||
searchURL = url + 'usearch/%s/?field=seeders&sorder=desc&rss=1' % (urllib.quote(unidecode(search_string)))
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
html = self.getURL(searchURL)
|
||||
if html:
|
||||
entries = self.cache.getRSSFeed(url, items=['entries', 'feed'])['entries']
|
||||
if entries:
|
||||
self.url = url
|
||||
break
|
||||
|
||||
if not html:
|
||||
continue
|
||||
|
||||
try:
|
||||
with BS4Parser(html, features=["html5lib", "permissive"]) as soup:
|
||||
torrent_table = soup.find('table', attrs={'class': 'data'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
logger.log(u"The data returned from " + self.name + " does not contain any torrents",
|
||||
logger.WARNING)
|
||||
for item in entries or []:
|
||||
try:
|
||||
link = item['link']
|
||||
id = item['guid']
|
||||
title = item['title']
|
||||
url = item['torrent_magnetURI']
|
||||
verified = bool(item.get('torrent_verified', 0))
|
||||
seeders = int(item['torrent_seeds'])
|
||||
leechers = int(item['torrent_peers'])
|
||||
size = int(item['torrent_contentLength'])
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
for tr in torrent_rows[1:]:
|
||||
try:
|
||||
link = urlparse.urljoin(self.url,
|
||||
(tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
|
||||
id = tr.get('id')[-7:]
|
||||
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
|
||||
or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
|
||||
url = tr.find('a', 'imagnet')['href']
|
||||
verified = True if tr.find('a', 'iverify') else False
|
||||
trusted = True if tr.find('img', {'alt': 'verified'}) else False
|
||||
seeders = int(tr.find_all('td')[-2].text)
|
||||
leechers = int(tr.find_all('td')[-1].text)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
if seeders < self.minseed or leechers < self.minleech:
|
||||
continue
|
||||
|
||||
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
|
||||
continue
|
||||
if self.confirmed and not verified:
|
||||
logger.log(
|
||||
u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if self.confirmed and not verified:
|
||||
logger.log(
|
||||
u"KAT Provider found result " + title + " but that doesn't seem like a verified result so I'm ignoring it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
||||
if mode == 'Season' and search_mode == 'sponly':
|
||||
ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
|
||||
title = self._find_season_quality(title, link, ep_number)
|
||||
|
||||
#Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
|
||||
if mode == 'Season' and search_mode == 'sponly':
|
||||
ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
|
||||
title = self._find_season_quality(title, link, ep_number)
|
||||
if not title or not url:
|
||||
continue
|
||||
|
||||
if not title or not url:
|
||||
continue
|
||||
item = title, url, id, seeders, leechers
|
||||
|
||||
item = title, url, id, seeders, leechers
|
||||
|
||||
items[mode].append(item)
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed to parsing " + self.name + " Traceback: " + traceback.format_exc(),
|
||||
@ -356,7 +338,16 @@ class KATCache(tvcache.TVCache):
|
||||
self.minTime = 20
|
||||
|
||||
def _getRSSData(self):
|
||||
search_params = {'RSS': ['rss']}
|
||||
return {'entries': self.provider._doSearch(search_params)}
|
||||
data = {'entries': None}
|
||||
|
||||
for url in self.provider.urls:
|
||||
searchURL = url + 'tv/?field=time_add&sorder=desc&rss=1'
|
||||
logger.log(u"KAT cache update URL: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.getRSSFeed(url, items=['entries', 'feed'])['entries']
|
||||
if data:
|
||||
break
|
||||
|
||||
return data
|
||||
|
||||
provider = KATProvider()
|
||||
|
@ -901,12 +901,7 @@ class Home(WebRoot):
|
||||
|
||||
def testTrakt(self, api=None, username=None, password=None):
|
||||
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
|
||||
|
||||
result = notifiers.trakt_notifier.test_notify(api, username, password)
|
||||
if result:
|
||||
return "Test notice sent successfully to Trakt"
|
||||
else:
|
||||
return "Test notice failed to Trakt"
|
||||
return notifiers.trakt_notifier.test_notify(api, username, password)
|
||||
|
||||
|
||||
def loadShowNotifyLists(self, *args, **kwargs):
|
||||
@ -3148,7 +3143,7 @@ class Manage(WebRoot):
|
||||
|
||||
if re.search('localhost', sickbeard.TORRENT_HOST):
|
||||
|
||||
if sickbeard.LOCALHOST_IP == '':
|
||||
if not sickbeard.LOCALHOST_IP:
|
||||
t.webui_url = re.sub('localhost', helpers.get_lan_ip(), sickbeard.TORRENT_HOST)
|
||||
else:
|
||||
t.webui_url = re.sub('localhost', sickbeard.LOCALHOST_IP, sickbeard.TORRENT_HOST)
|
||||
|
Loading…
Reference in New Issue
Block a user