2014-04-26 18:20:52 -04:00
|
|
|
# coding=UTF-8
|
|
|
|
# Author: Dennis Lutter <lad1337@gmail.com>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# This file is part of SickRage.
|
2014-04-26 18:20:52 -04:00
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-04-26 18:20:52 -04:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 08:37:22 -04:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-04-26 18:20:52 -04:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 08:37:22 -04:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-04-26 18:20:52 -04:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import unittest
|
|
|
|
import sys, os.path
|
|
|
|
|
|
|
|
sys.path.append(os.path.abspath('..'))
|
|
|
|
sys.path.append(os.path.abspath('../lib'))
|
|
|
|
|
2014-12-20 12:59:28 -05:00
|
|
|
import urlparse
|
2014-04-26 18:20:52 -04:00
|
|
|
import test_lib as test
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from sickbeard.helpers import getURL
|
|
|
|
|
|
|
|
class TorrentBasicTests(test.SickbeardTestDBCase):
|
|
|
|
|
|
|
|
def test_search(self):
|
|
|
|
self.url = 'http://kickass.to/'
|
|
|
|
searchURL = 'http://kickass.to/usearch/American%20Dad%21%20S08%20-S08E%20category%3Atv/?field=seeders&sorder=desc'
|
|
|
|
|
|
|
|
html = getURL(searchURL)
|
|
|
|
if not html:
|
|
|
|
return
|
|
|
|
|
2014-07-21 19:01:46 -04:00
|
|
|
soup = BeautifulSoup(html, features=["html5lib", "permissive"])
|
2014-04-26 18:20:52 -04:00
|
|
|
|
2014-07-21 19:01:46 -04:00
|
|
|
torrent_table = soup.find('table', attrs={'class': 'data'})
|
2014-04-26 18:20:52 -04:00
|
|
|
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
|
|
|
|
2014-07-21 18:16:04 -04:00
|
|
|
# cleanup memory
|
2014-07-21 19:01:46 -04:00
|
|
|
soup.clear(True)
|
2014-07-21 18:16:04 -04:00
|
|
|
|
2014-04-26 18:20:52 -04:00
|
|
|
#Continue only if one Release is found
|
|
|
|
if len(torrent_rows) < 2:
|
|
|
|
print(u"The data returned does not contain any torrents")
|
|
|
|
return
|
|
|
|
|
|
|
|
for tr in torrent_rows[1:]:
|
|
|
|
|
|
|
|
try:
|
|
|
|
link = urlparse.urljoin(self.url, (tr.find('div', {'class': 'torrentname'}).find_all('a')[1])['href'])
|
|
|
|
id = tr.get('id')[-7:]
|
|
|
|
title = (tr.find('div', {'class': 'torrentname'}).find_all('a')[1]).text \
|
|
|
|
or (tr.find('div', {'class': 'torrentname'}).find_all('a')[2]).text
|
|
|
|
url = tr.find('a', 'imagnet')['href']
|
|
|
|
verified = True if tr.find('a', 'iverify') else False
|
|
|
|
trusted = True if tr.find('img', {'alt': 'verified'}) else False
|
|
|
|
seeders = int(tr.find_all('td')[-2].text)
|
|
|
|
leechers = int(tr.find_all('td')[-1].text)
|
|
|
|
except (AttributeError, TypeError):
|
|
|
|
continue
|
|
|
|
|
|
|
|
print title
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
print "=================="
|
|
|
|
print "STARTING - XEM Scene Numbering TESTS"
|
|
|
|
print "=================="
|
|
|
|
print "######################################################################"
|
|
|
|
suite = unittest.TestLoader().loadTestsFromTestCase(TorrentBasicTests)
|