2014-07-12 22:29:53 -04:00
|
|
|
# Author: Idan Gutman
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
|
|
|
# This file is part of SickRage.
|
|
|
|
#
|
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import re
|
|
|
|
import traceback
|
|
|
|
import datetime
|
|
|
|
import urlparse
|
|
|
|
import sickbeard
|
|
|
|
import generic
|
|
|
|
from sickbeard.common import Quality, cpu_presets
|
|
|
|
from sickbeard import logger
|
|
|
|
from sickbeard import tvcache
|
|
|
|
from sickbeard import db
|
|
|
|
from sickbeard import classes
|
|
|
|
from sickbeard import helpers
|
|
|
|
from sickbeard import show_name_helpers
|
2014-08-12 06:09:11 -04:00
|
|
|
from sickbeard.exceptions import ex, AuthException
|
2014-07-12 22:29:53 -04:00
|
|
|
from sickbeard import clients
|
|
|
|
from lib import requests
|
|
|
|
from lib.requests import exceptions
|
2014-07-22 00:26:58 -04:00
|
|
|
from sickbeard.bs4_parser import BS4Parser
|
2014-07-12 22:29:53 -04:00
|
|
|
from lib.unidecode import unidecode
|
|
|
|
from sickbeard.helpers import sanitizeSceneName
|
|
|
|
|
|
|
|
|
|
|
|
class FreshOnTVProvider(generic.TorrentProvider):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
generic.TorrentProvider.__init__(self, "FreshOnTV")
|
|
|
|
|
|
|
|
self.supportsBacklog = True
|
|
|
|
|
|
|
|
self.enabled = False
|
|
|
|
self._uid = None
|
|
|
|
self._hash = None
|
|
|
|
self.username = None
|
|
|
|
self.password = None
|
|
|
|
self.ratio = None
|
|
|
|
self.minseed = None
|
|
|
|
self.minleech = None
|
|
|
|
self.freeleech = False
|
|
|
|
|
|
|
|
self.cache = FreshOnTVCache(self)
|
|
|
|
|
2014-12-20 00:57:11 -05:00
|
|
|
self.urls = {'base_url': 'http://freshon.tv/',
|
|
|
|
'login': 'http://freshon.tv/login.php?action=makelogin',
|
|
|
|
'detail': 'http://freshon.tv/details.php?id=%s',
|
|
|
|
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
|
|
|
|
'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
|
|
|
|
}
|
|
|
|
|
2014-07-12 22:29:53 -04:00
|
|
|
self.url = self.urls['base_url']
|
2014-12-20 00:57:11 -05:00
|
|
|
|
2014-07-12 22:29:53 -04:00
|
|
|
self.cookies = None
|
|
|
|
|
|
|
|
def isEnabled(self):
|
|
|
|
return self.enabled
|
|
|
|
|
|
|
|
def imageName(self):
|
|
|
|
return 'freshontv.png'
|
|
|
|
|
|
|
|
def getQuality(self, item, anime=False):
|
|
|
|
|
|
|
|
quality = Quality.sceneQuality(item[0], anime)
|
|
|
|
return quality
|
|
|
|
|
2014-08-12 06:09:11 -04:00
|
|
|
def _checkAuth(self):
|
|
|
|
|
|
|
|
if not self.username or not self.password:
|
|
|
|
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2014-07-12 22:29:53 -04:00
|
|
|
def _doLogin(self):
|
|
|
|
if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if self._uid and self._hash:
|
2014-12-20 00:57:11 -05:00
|
|
|
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
|
2014-07-12 22:29:53 -04:00
|
|
|
else:
|
|
|
|
login_params = {'username': self.username,
|
|
|
|
'password': self.password,
|
|
|
|
'login': 'submit'
|
|
|
|
}
|
|
|
|
|
|
|
|
if not self.session:
|
|
|
|
self.session = requests.Session()
|
|
|
|
|
|
|
|
try:
|
2014-07-24 14:16:59 -04:00
|
|
|
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
2014-07-12 22:29:53 -04:00
|
|
|
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
|
|
|
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
|
2014-12-20 00:57:11 -05:00
|
|
|
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
|
|
|
return False
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-12-03 09:41:51 -05:00
|
|
|
try:
|
|
|
|
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
|
2014-12-20 00:57:11 -05:00
|
|
|
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
|
|
|
|
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
|
2014-12-03 09:41:51 -05:00
|
|
|
|
2014-12-20 00:57:11 -05:00
|
|
|
self.cookies = {'uid': self._uid,
|
|
|
|
'pass': self._hash
|
|
|
|
}
|
|
|
|
return True
|
2014-12-03 09:41:51 -05:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
|
|
|
|
return False
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
def _get_season_search_strings(self, ep_obj):
|
|
|
|
|
|
|
|
search_string = {'Season': []}
|
|
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
|
|
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
|
|
|
ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
|
|
|
|
elif ep_obj.show.anime:
|
|
|
|
ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
|
|
|
|
else:
|
|
|
|
ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
|
|
|
|
|
|
|
search_string['Season'].append(ep_string)
|
|
|
|
|
|
|
|
return [search_string]
|
|
|
|
|
|
|
|
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
|
|
|
|
|
|
|
search_string = {'Episode': []}
|
|
|
|
|
|
|
|
if not ep_obj:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if self.show.air_by_date:
|
|
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
|
|
|
str(ep_obj.airdate).replace('-', '|')
|
|
|
|
search_string['Episode'].append(ep_string)
|
|
|
|
elif self.show.sports:
|
|
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
|
|
|
str(ep_obj.airdate).replace('-', '|') + '|' + \
|
|
|
|
ep_obj.airdate.strftime('%b')
|
|
|
|
search_string['Episode'].append(ep_string)
|
|
|
|
elif self.show.anime:
|
|
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
|
|
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
|
|
|
"%i" % int(ep_obj.scene_absolute_number)
|
|
|
|
search_string['Episode'].append(ep_string)
|
|
|
|
else:
|
|
|
|
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
|
|
|
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
|
|
|
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
|
2014-11-15 23:17:47 -05:00
|
|
|
'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
|
|
|
|
|
|
|
return [search_string]
|
|
|
|
|
2014-07-21 01:47:13 -04:00
|
|
|
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
results = []
|
|
|
|
items = {'Season': [], 'Episode': [], 'RSS': []}
|
|
|
|
|
|
|
|
freeleech = '3' if self.freeleech else '0'
|
|
|
|
|
|
|
|
if not self._doLogin():
|
2014-12-06 01:16:30 -05:00
|
|
|
return results
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
for mode in search_params.keys():
|
|
|
|
for search_string in search_params[mode]:
|
|
|
|
|
|
|
|
if isinstance(search_string, unicode):
|
|
|
|
search_string = unidecode(search_string)
|
|
|
|
|
|
|
|
|
|
|
|
searchURL = self.urls['search'] % (freeleech, search_string)
|
|
|
|
|
|
|
|
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
|
|
|
|
|
|
|
# returns top 15 results by default, expandable in user profile to 100
|
|
|
|
data = self.getURL(searchURL)
|
|
|
|
if not data:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
2014-07-22 00:26:58 -04:00
|
|
|
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
|
|
|
|
torrent_table = html.find('table', attrs={'class': 'frame'})
|
|
|
|
torrent_rows = torrent_table.findChildren('tr') if torrent_table else []
|
|
|
|
|
|
|
|
#Continue only if one Release is found
|
|
|
|
if len(torrent_rows) < 2:
|
|
|
|
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
|
|
|
logger.DEBUG)
|
|
|
|
continue
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
# skip colheader
|
|
|
|
for result in torrent_rows[1:]:
|
|
|
|
cells = result.findChildren('td')
|
2014-07-21 18:16:04 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
link = cells[1].find('a', attrs = {'class': 'torrent_name_link'})
|
|
|
|
#skip if torrent has been nuked due to poor quality
|
|
|
|
if cells[1].find('img', alt='Nuked') != None:
|
|
|
|
continue
|
2014-07-21 18:16:04 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
torrent_id = link['href'].replace('/details.php?id=', '')
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
try:
|
|
|
|
if link.has_key('title'):
|
|
|
|
title = cells[1].find('a', {'class': 'torrent_name_link'})['title']
|
|
|
|
else:
|
|
|
|
title = link.contents[0]
|
|
|
|
download_url = self.urls['download'] % (torrent_id)
|
|
|
|
id = int(torrent_id)
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
seeders = int(cells[8].find('a', {'class': 'link'}).span.contents[0].strip())
|
|
|
|
leechers = int(cells[9].find('a', {'class': 'link'}).contents[0].strip())
|
|
|
|
except (AttributeError, TypeError):
|
|
|
|
continue
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
#Filter unseeded torrent
|
|
|
|
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
|
|
|
|
continue
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
if not title or not download_url:
|
|
|
|
continue
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
item = title, download_url, id, seeders, leechers
|
|
|
|
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
|
2014-07-12 22:29:53 -04:00
|
|
|
|
2014-07-22 00:26:58 -04:00
|
|
|
items[mode].append(item)
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
except Exception, e:
|
|
|
|
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
|
|
|
|
|
|
|
#For each search mode sort all the items by seeders
|
|
|
|
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
|
|
|
|
|
|
|
results += items[mode]
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def _get_title_and_url(self, item):
|
|
|
|
|
|
|
|
title, url, id, seeders, leechers = item
|
|
|
|
|
2014-12-11 16:15:08 -05:00
|
|
|
if title:
|
|
|
|
title = u'' + title
|
|
|
|
title = title.replace(' ', '.')
|
|
|
|
|
2014-07-12 22:29:53 -04:00
|
|
|
if url:
|
|
|
|
url = str(url).replace('&', '&')
|
|
|
|
|
|
|
|
return (title, url)
|
|
|
|
|
|
|
|
def findPropers(self, search_date=datetime.datetime.today()):
|
|
|
|
|
|
|
|
results = []
|
|
|
|
|
|
|
|
myDB = db.DBConnection()
|
|
|
|
sqlResults = myDB.select(
|
|
|
|
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
|
|
|
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
|
|
|
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
|
|
|
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
|
|
|
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
|
|
|
)
|
|
|
|
|
|
|
|
if not sqlResults:
|
|
|
|
return []
|
|
|
|
|
|
|
|
for sqlshow in sqlResults:
|
|
|
|
self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
|
|
|
|
if self.show:
|
|
|
|
curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
|
|
|
|
|
|
|
|
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
|
|
|
|
|
|
|
|
for item in self._doSearch(searchString[0]):
|
|
|
|
title, url = self._get_title_and_url(item)
|
2014-07-21 01:47:13 -04:00
|
|
|
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def seedRatio(self):
|
|
|
|
return self.ratio
|
|
|
|
|
|
|
|
|
|
|
|
class FreshOnTVCache(tvcache.TVCache):
|
|
|
|
def __init__(self, provider):
|
|
|
|
|
|
|
|
tvcache.TVCache.__init__(self, provider)
|
|
|
|
|
|
|
|
# poll delay in minutes
|
|
|
|
self.minTime = 20
|
|
|
|
|
2014-08-30 04:47:00 -04:00
|
|
|
def _getRSSData(self):
|
2014-07-12 22:29:53 -04:00
|
|
|
search_params = {'RSS': ['']}
|
2014-12-07 12:16:41 -05:00
|
|
|
return {'entries': self.provider._doSearch(search_params)}
|
2014-07-12 22:29:53 -04:00
|
|
|
|
|
|
|
provider = FreshOnTVProvider()
|