From ad17b81560715a616dc8221e988941823b239d91 Mon Sep 17 00:00:00 2001 From: Justin Nielsen Date: Sun, 13 Jul 2014 10:29:53 +0800 Subject: [PATCH] Added TVTorrents.ro Provider Missed some files Removed seeders==0 as per request --- gui/slick/images/providers/freshontv.png | Bin 0 -> 766 bytes sickbeard/__init__.py | 2 +- sickbeard/providers/__init__.py | 3 +- sickbeard/providers/freshontv.py | 375 +++++++++++++++++++++++ 4 files changed, 378 insertions(+), 2 deletions(-) create mode 100755 gui/slick/images/providers/freshontv.png mode change 100644 => 100755 sickbeard/__init__.py mode change 100644 => 100755 sickbeard/providers/__init__.py create mode 100755 sickbeard/providers/freshontv.py diff --git a/gui/slick/images/providers/freshontv.png b/gui/slick/images/providers/freshontv.png new file mode 100755 index 0000000000000000000000000000000000000000..9b15d1b73e0b8d8f628f5d363e0e2e8040969bad GIT binary patch literal 766 zcmVUR3ueeQ-K2MMV`3HUg1;o7{pn?JP z0QW9*g+qa)mK2>k;}#GEilVqXJF_b;h-e@p*yfLYW!cVce7DO}%Q8>KNx{DIfCL9q#ZX~KziQWDHtvhg}{{8Ex^K9|( z2mr{r&FyDGES>q`az^`kLM<9--9gIh<^b^W{Dj^9z&aWVB-gr|csFO&svI|MKTorg zRZ0ctGyJXf?$97+N3V4stR)!{p*@~vBMI3gy5()_b0UJ2((y`&0x!=`?1J`qib7hg z%1Q41Z?(sh69Fp^)u~~)oX1ObDyVP!z2R~m{Oan@*L5?l&1}e=ynXbOT!$F#M3m4| zj8usTxlJM=3Wy&|5NIO5SZl?b1=v9ALEWeRZ59!U7}i-SUfz&yWt!eZy~2YXZV$Wm*uyhV>Yw-W$r8q zMQ0QTMaA%W`UoARAhGh!&Qb$Ks5@ngHImIy9zvYrgB$crHkhoRkuN_e^ynq>`L#A|Yq0bvafrAqv!W w!yZKD^6&f)m<@f&aT0uedmlHPJ3Xb^9|(CySpt?ti~s-t07*qoM6N<$f<^pYi~s-t literal 0 HcmV?d00001 diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py old mode 100644 new mode 100755 index 914249ab..6c502296 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -32,7 +32,7 @@ import sys from sickbeard import providers, metadata, config, webserveInit from sickbeard.providers.generic import GenericProvider from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \ - omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb + omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, freshontv from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \ naming_ep_type from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \ diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py old mode 100644 new mode 100755 index 47e52dcd..09ddb9c7 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -34,7 +34,8 @@ __all__ = ['ezrss', 'nyaatorrents', 'fanzub', 'torrentbytes', - 'animezb' + 'animezb', + 'freshontv' ] import sickbeard diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py new file mode 100755 index 00000000..de740e2b --- /dev/null +++ b/sickbeard/providers/freshontv.py @@ -0,0 +1,375 @@ +# Author: Idan Gutman +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of SickRage. +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see . + +import re +import traceback +import datetime +import urlparse +import time +import sickbeard +import generic +from sickbeard.common import Quality, cpu_presets +from sickbeard import logger +from sickbeard import tvcache +from sickbeard import db +from sickbeard import classes +from sickbeard import helpers +from sickbeard import show_name_helpers +from sickbeard.common import Overview +from sickbeard.exceptions import ex +from sickbeard import clients +from lib import requests +from lib.requests import exceptions +from bs4 import BeautifulSoup +from lib.unidecode import unidecode +from sickbeard.helpers import sanitizeSceneName + + +class FreshOnTVProvider(generic.TorrentProvider): + urls = {'base_url': 'http://freshon.tv/', + 'login': 'http://freshon.tv/login.php?action=makelogin', + 'detail': 'http://freshon.tv/details.php?id=%s', + 'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s', + 'download': 'http://freshon.tv/download.php?id=%s&type=torrent', + } + + def __init__(self): + + generic.TorrentProvider.__init__(self, "FreshOnTV") + + self.supportsBacklog = True + + self.enabled = False + self._uid = None + self._hash = None + self.username = None + self.password = None + self.ratio = None + self.minseed = None + self.minleech = None + self.freeleech = False + + self.cache = FreshOnTVCache(self) + + self.url = self.urls['base_url'] + self.cookies = None + + def isEnabled(self): + return self.enabled + + def imageName(self): + return 'freshontv.png' + + def getQuality(self, item, anime=False): + + quality = Quality.sceneQuality(item[0], anime) + return quality + + def _doLogin(self): + if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()): + return True + + if self._uid and self._hash: + + requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) + + else: + login_params = {'username': self.username, + 'password': self.password, + 'login': 'submit' + } + + if not self.session: + self.session = requests.Session() + + try: + response = self.session.post(self.urls['login'], data=login_params, timeout=30) + except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: + logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) + return False + + if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text): + logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR) + return False + + if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']: + self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] + self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] + + self.cookies = {'uid': self._uid, + 'pass': self._hash + } + return True + else: + logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR) + return False + + def _get_season_search_strings(self, ep_obj): + + search_string = {'Season': []} + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + if ep_obj.show.air_by_date or ep_obj.show.sports: + ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0] + elif ep_obj.show.anime: + ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number + else: + ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName SXX + + search_string['Season'].append(ep_string) + + return [search_string] + + def _get_episode_search_strings(self, ep_obj, add_string=''): + + search_string = {'Episode': []} + + if not ep_obj: + return [] + + if self.show.air_by_date: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate).replace('-', '|') + search_string['Episode'].append(ep_string) + elif self.show.sports: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + ' ' + \ + str(ep_obj.airdate).replace('-', '|') + '|' + \ + ep_obj.airdate.strftime('%b') + search_string['Episode'].append(ep_string) + elif self.show.anime: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = sanitizeSceneName(show_name) + ' ' + \ + "%i" % int(ep_obj.scene_absolute_number) + search_string['Episode'].append(ep_string) + else: + for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): + ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \ + sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, + 'episodenumber': ep_obj.scene_episode} + + search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) + + return [search_string] + + def _doSearch(self, search_params, epcount=0, age=0): + + results = [] + items = {'Season': [], 'Episode': [], 'RSS': []} + + freeleech = '3' if self.freeleech else '0' + + if not self._doLogin(): + return [] + + + for mode in search_params.keys(): + for search_string in search_params[mode]: + + if isinstance(search_string, unicode): + search_string = unidecode(search_string) + + + searchURL = self.urls['search'] % (freeleech, search_string) + + logger.log(u"Search string: " + searchURL, logger.DEBUG) + + # returns top 15 results by default, expandable in user profile to 100 + data = self.getURL(searchURL) + if not data: + continue + + try: + html = BeautifulSoup(data, features=["html5lib", "permissive"]) + + torrent_table = html.find('table', attrs={'class': 'frame'}) + torrent_rows = torrent_table.findChildren('tr') if torrent_table else [] + #Continue only if one Release is found + if len(torrent_rows) < 2: + logger.log(u"The Data returned from " + self.name + " do not contains any torrent", + logger.DEBUG) + continue + + # skip colheader + for result in torrent_rows[1:]: + cells = result.findChildren('td') + + link = cells[1].find('a', attrs = {'class': 'torrent_name_link'}) + #skip if torrent has been nuked due to poor quality + if cells[1].find('img', alt='Nuked') != None: + continue + + torrent_id = link['href'].replace('/details.php?id=', '') + + + try: + if link.has_key('title'): + title = cells[1].find('a', {'class': 'torrent_name_link'})['title'] + else: + title = link.contents[0] + download_url = self.urls['download'] % (torrent_id) + id = int(torrent_id) + + seeders = int(cells[8].find('a', {'class': 'link'}).span.contents[0].strip()) + leechers = int(cells[9].find('a', {'class': 'link'}).contents[0].strip()) + except (AttributeError, TypeError): + continue + + #Filter unseeded torrent + if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): + continue + + if not title or not download_url: + continue + + item = title, download_url, id, seeders, leechers + logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG) + + items[mode].append(item) + + except Exception, e: + logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) + + #For each search mode sort all the items by seeders + items[mode].sort(key=lambda tup: tup[3], reverse=True) + + results += items[mode] + + return results + + def _get_title_and_url(self, item): + + title, url, id, seeders, leechers = item + + if url: + url = str(url).replace('&', '&') + + return (title, url) + + def getURL(self, url, post_data=None, headers=None, json=False): + + if not self.session: + self._doLogin() + + if not headers: + headers = [] + + try: + # Remove double-slashes from url + parsed = list(urlparse.urlparse(url)) + parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one + url = urlparse.urlunparse(parsed) + + response = self.session.get(url, verify=False) + except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: + logger.log(u"Error loading " + self.name + " URL: " + ex(e), logger.ERROR) + return None + + if response.status_code != 200: + logger.log(self.name + u" page requested with url " + url + " returned status code is " + str( + response.status_code) + ': ' + clients.http_error_code[response.status_code], logger.WARNING) + return None + + return response.content + + def findPropers(self, search_date=datetime.datetime.today()): + + results = [] + + myDB = db.DBConnection() + sqlResults = myDB.select( + 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' + ) + + if not sqlResults: + return [] + + for sqlshow in sqlResults: + self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) + if self.show: + curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) + + searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') + + for item in self._doSearch(searchString[0]): + title, url = self._get_title_and_url(item) + results.append(classes.Proper(title, url, datetime.datetime.today())) + + return results + + def seedRatio(self): + return self.ratio + + +class FreshOnTVCache(tvcache.TVCache): + def __init__(self, provider): + + tvcache.TVCache.__init__(self, provider) + + # poll delay in minutes + self.minTime = 20 + + def updateCache(self): + + # delete anything older then 7 days + logger.log(u"Clearing " + self.provider.name + " cache") + self._clearCache() + + if not self.shouldUpdate(): + return + + search_params = {'RSS': ['']} + rss_results = self.provider._doSearch(search_params) + + if rss_results: + self.setLastUpdate() + else: + return [] + + cl = [] + for result in rss_results: + + item = (result[0], result[1]) + ci = self._parseItem(item) + if ci is not None: + cl.append(ci) + + + + if cl: + myDB = self._getDB() + myDB.mass_action(cl) + + + def _parseItem(self, item): + + (title, url) = item + + if not title or not url: + return None + + logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG) + + return self._addCacheEntry(title, url) + + +provider = FreshOnTVProvider() \ No newline at end of file