mirror of
https://github.com/moparisthebest/SickRage
synced 2024-12-12 11:02:21 -05:00
Add support for more sections on SceneAccess
Add (hopfully fully functioning) support for non-scene and foreign sections for SceneAccess provider.
This commit is contained in:
parent
f8ec897010
commit
d72966c203
@ -44,6 +44,8 @@ class SCCProvider(generic.TorrentProvider):
|
||||
'login': 'https://sceneaccess.eu/login',
|
||||
'detail': 'https://www.sceneaccess.eu/details?id=%s',
|
||||
'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
|
||||
'nonscene': 'https://sceneaccess.eu/nonscene?search=%s&method=1&c44=44&c45=44',
|
||||
'foreign': 'https://sceneaccess.eu/foreign?search=%s&method=1&c34=34&c33=33',
|
||||
'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
|
||||
'download': 'https://www.sceneaccess.eu/%s',
|
||||
}
|
||||
@ -159,53 +161,67 @@ class SCCProvider(generic.TorrentProvider):
|
||||
|
||||
if mode == 'Season':
|
||||
searchURL = self.urls['archive'] % (search_string)
|
||||
data = self.getURL(searchURL, headers=self.headers)
|
||||
else:
|
||||
searchURL = self.urls['search'] % (search_string, self.categories)
|
||||
nonsceneSearchURL = self.urls['nonscene'] % (search_string)
|
||||
foreignSearchURL = self.urls['foreign'] % (search_string)
|
||||
data = [self.getURL(searchURL, headers=self.headers),
|
||||
self.getURL(nonsceneSearchURL, headers=self.headers),
|
||||
self.getURL(foreignSearchURL, headers=self.headers)]
|
||||
logger.log(u"Search string: " + nonsceneSearchURL, logger.DEBUG)
|
||||
logger.log(u"Search string: " + foreignSearchURL, logger.DEBUG)
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.getURL(searchURL, headers=self.headers)
|
||||
if not data:
|
||||
continue
|
||||
|
||||
try:
|
||||
html = BeautifulSoup(data, features=["html5lib", "permissive"])
|
||||
|
||||
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
if html.title:
|
||||
source = self.name + " (" + html.title.string + ")"
|
||||
else:
|
||||
source = self.name
|
||||
logger.log(u"The Data returned from " + source + " does not contain any torrent", logger.DEBUG)
|
||||
continue
|
||||
|
||||
for result in torrent_table.find_all('tr')[1:]:
|
||||
|
||||
try:
|
||||
link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
|
||||
url = result.find('td', attrs={'class': 'td_dl'}).find('a')
|
||||
title = link.string
|
||||
download_url = self.urls['download'] % url['href']
|
||||
id = int(link['href'].replace('details?id=', ''))
|
||||
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
||||
leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
|
||||
except (AttributeError, TypeError):
|
||||
for dataItem in data:
|
||||
html = BeautifulSoup(dataItem, features=["html5lib", "permissive"])
|
||||
|
||||
torrent_table = html.find('table', attrs={'id': 'torrents-table'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
if len(torrent_rows) < 2:
|
||||
if html.title:
|
||||
source = self.name + " (" + html.title.string + ")"
|
||||
else:
|
||||
source = self.name
|
||||
logger.log(u"The Data returned from " + source + " does not contain any torrent", logger.DEBUG)
|
||||
continue
|
||||
|
||||
for result in torrent_table.find_all('tr')[1:]:
|
||||
|
||||
try:
|
||||
link = result.find('td', attrs={'class': 'ttr_name'}).find('a')
|
||||
url = result.find('td', attrs={'class': 'td_dl'}).find('a')
|
||||
title = link.string
|
||||
download_url = self.urls['download'] % url['href']
|
||||
id = int(link['href'].replace('details?id=', ''))
|
||||
seeders = int(result.find('td', attrs={'class': 'ttr_seeders'}).string)
|
||||
leechers = int(result.find('td', attrs={'class': 'ttr_leechers'}).string)
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
if mode != 'RSS' and seeders == 0:
|
||||
continue
|
||||
|
||||
if not title or not download_url:
|
||||
continue
|
||||
|
||||
item = title, download_url, id, seeders, leechers
|
||||
|
||||
if mode != 'RSS' and seeders == 0:
|
||||
continue
|
||||
if re.search('<title>SceneAccess \| Non-Scene</title>', dataItem):
|
||||
logger.log(u"Found result: " + title + "(" + nonsceneSearchURL + ")", logger.DEBUG)
|
||||
elif re.search('<title>SceneAccess \| Non-Scene</title>', dataItem):
|
||||
logger.log(u"Found result: " + title + "(" + foreignSearchURL + ")", logger.DEBUG)
|
||||
else:
|
||||
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
|
||||
|
||||
if not title or not download_url:
|
||||
continue
|
||||
|
||||
item = title, download_url, id, seeders, leechers
|
||||
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
|
||||
|
||||
items[mode].append(item)
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
Loading…
Reference in New Issue
Block a user