mirror of
https://github.com/moparisthebest/SickRage
synced 2024-11-05 17:05:03 -05:00
Added TNTVillage provider
This commit is contained in:
parent
c62d8f2c79
commit
b47d3baa35
BIN
gui/slick/images/providers/tntvillage.png
Normal file
BIN
gui/slick/images/providers/tntvillage.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.7 KiB |
@ -530,6 +530,45 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
|
||||
</div>
|
||||
#end if
|
||||
|
||||
#if $hasattr($curTorrentProvider, 'page'):
|
||||
<div class="field-pair">
|
||||
<label for="${curTorrentProvider.getID()}_page">
|
||||
<span class="component-title">Page Number:</span>
|
||||
<span class="component-desc">
|
||||
<input type="number" name="${curTorrentProvider.getID()}_page" id="${curTorrentProvider.getID()}_page" value="$curTorrentProvider.page" class="form-control input-sm input75"/>
|
||||
<p>number of torrent result page to donwload for each query</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
|
||||
#if $hasattr($curTorrentProvider, 'cat'):
|
||||
<div class="field-pair">
|
||||
<label for="${curTorrentProvider.getID()}_cat">
|
||||
<span class="component-title">Category:</span>
|
||||
<span class="component-desc">
|
||||
<select name="${curTorrentProvider.getID()}_cat" id="${curTorrentProvider.getID()}_cat" class="form-control input-sm">
|
||||
#for $i in $curTorrentProvider.category_dict.keys():
|
||||
<option value="$curTorrentProvider.category_dict[$i]" #if $curTorrentProvider.category_dict[$i] == $curTorrentProvider.cat then "selected=\"selected\"" else ""#>$i</option>
|
||||
#end for
|
||||
</select>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
|
||||
#if $hasattr($curTorrentProvider, 'subtitle'):
|
||||
<div class="field-pair">
|
||||
<label for="${curTorrentProvider.getID()}_subtitle">
|
||||
<span class="component-title">Subtitled</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="${curTorrentProvider.getID()}_subtitle" id="${curTorrentProvider.getID()}_subtitle" #if $curTorrentProvider.subtitle then "checked=\"checked\"" else ""#/>
|
||||
<p>select torrent also without italian language</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
|
||||
</div>
|
||||
#end for
|
||||
|
||||
|
@ -33,7 +33,7 @@ from github import Github
|
||||
from sickbeard import providers, metadata, config, webserveInit
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from providers import ezrss, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
|
||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \
|
||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, tntvillage, \
|
||||
freshontv, bitsoup, t411, tokyotoshokan
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
||||
naming_ep_type
|
||||
@ -1104,6 +1104,16 @@ def initialize(consoleLogging=True):
|
||||
curTorrentProvider.getID() + '_enable_backlog',
|
||||
1))
|
||||
|
||||
if hasattr(curTorrentProvider, 'page'):
|
||||
curTorrentProvider.page = check_setting_int(CFG, curTorrentProvider.getID().upper(),
|
||||
curTorrentProvider.getID() + '_page', 1)
|
||||
if hasattr(curTorrentProvider, 'cat'):
|
||||
curTorrentProvider.cat = check_setting_int(CFG, curTorrentProvider.getID().upper(),
|
||||
curTorrentProvider.getID() + '_cat', 0)
|
||||
if hasattr(curTorrentProvider, 'subtitle'):
|
||||
curTorrentProvider.subtitle = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(),
|
||||
curTorrentProvider.getID() + '_subtitle', 0))
|
||||
|
||||
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
||||
curProvider.providerType == GenericProvider.NZB]:
|
||||
curNzbProvider.enabled = bool(
|
||||
@ -1609,6 +1619,15 @@ def save_config():
|
||||
if hasattr(curTorrentProvider, 'enable_backlog'):
|
||||
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_backlog'] = int(
|
||||
curTorrentProvider.enable_backlog)
|
||||
if hasattr(curTorrentProvider, 'page'):
|
||||
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_page'] = int(
|
||||
curTorrentProvider.page)
|
||||
if hasattr(curTorrentProvider, 'cat'):
|
||||
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_cat'] = int(
|
||||
curTorrentProvider.cat)
|
||||
if hasattr(curTorrentProvider, 'subtitle'):
|
||||
new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_subtitle'] = int(
|
||||
curTorrentProvider.subtitle)
|
||||
|
||||
for curNzbProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
||||
curProvider.providerType == GenericProvider.NZB]:
|
||||
|
@ -39,6 +39,7 @@ __all__ = ['ezrss',
|
||||
'bitsoup',
|
||||
't411',
|
||||
'tokyotoshokan',
|
||||
'tntvillage',
|
||||
]
|
||||
|
||||
import sickbeard
|
||||
|
@ -517,4 +517,4 @@ class ProviderProxy:
|
||||
else:
|
||||
regx = re.sub('//1', '', regx)
|
||||
|
||||
return regx
|
||||
return regx
|
||||
|
480
sickbeard/providers/tntvillage.py
Normal file
480
sickbeard/providers/tntvillage.py
Normal file
@ -0,0 +1,480 @@
|
||||
# Author: Giovanni Borri
|
||||
# Modified by gborri, https://github.com/gborri for TNTVillage
|
||||
#
|
||||
# This file is part of SickRage.
|
||||
#
|
||||
# SickRage is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickRage is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import traceback
|
||||
import datetime
|
||||
import urlparse
|
||||
import sickbeard
|
||||
import generic
|
||||
from sickbeard.common import Quality, cpu_presets
|
||||
from sickbeard import logger
|
||||
from sickbeard import tvcache
|
||||
from sickbeard import db
|
||||
from sickbeard import classes
|
||||
from sickbeard import helpers
|
||||
from sickbeard import show_name_helpers
|
||||
from sickbeard.exceptions import ex, AuthException
|
||||
from sickbeard import clients
|
||||
from lib import requests
|
||||
from lib.requests import exceptions
|
||||
from sickbeard.bs4_parser import BS4Parser
|
||||
from lib.unidecode import unidecode
|
||||
from sickbeard.helpers import sanitizeSceneName
|
||||
|
||||
category_excluded = {
|
||||
'Sport' : 22,
|
||||
'Teatro' : 23,
|
||||
'Video Musicali' : 21,
|
||||
'Film' : 4,
|
||||
'Musica' : 2,
|
||||
'Students Releases' : 13,
|
||||
'E Books' : 3,
|
||||
'Linux' : 6,
|
||||
'Macintosh' : 9,
|
||||
'Windows Software' : 10,
|
||||
'Pc Game' : 11,
|
||||
'Playstation 2' : 12,
|
||||
'Wrestling' : 24,
|
||||
'Varie' : 25,
|
||||
'Xbox' : 26,
|
||||
'Immagini sfondi' : 27,
|
||||
'Altri Giochi' : 28,
|
||||
'Fumetteria' : 30,
|
||||
'Trash' : 31,
|
||||
'PlayStation 1' : 32,
|
||||
'PSP Portable' : 33,
|
||||
'A Book' : 34,
|
||||
'Podcast' : 35,
|
||||
'Edicola' : 36,
|
||||
'Mobile' : 37,
|
||||
}
|
||||
|
||||
class TNTVillageProvider(generic.TorrentProvider):
|
||||
def __init__(self):
|
||||
|
||||
generic.TorrentProvider.__init__(self, "TNTVillage")
|
||||
|
||||
self.supportsBacklog = True
|
||||
|
||||
self.enabled = False
|
||||
self._uid = None
|
||||
self._hash = None
|
||||
self.username = None
|
||||
self.password = None
|
||||
self.ratio = None
|
||||
self.cat = None
|
||||
self.page = None
|
||||
self.subtitle = None
|
||||
self.minseed = None
|
||||
self.minleech = None
|
||||
|
||||
self.category_dict = {
|
||||
'Serie TV' : 29,
|
||||
'Cartoni' : 8,
|
||||
'Anime' : 7,
|
||||
'Programmi e Film TV' : 1,
|
||||
'Documentari' : 14,
|
||||
'All' : 0,
|
||||
}
|
||||
|
||||
self.urls = {'base_url' : 'http://forum.tntvillage.scambioetico.org',
|
||||
'login' : 'http://forum.tntvillage.scambioetico.org/index.php?act=Login&CODE=01',
|
||||
'detail' : 'http://forum.tntvillage.scambioetico.org/index.php?showtopic=%s',
|
||||
'search' : 'http://forum.tntvillage.scambioetico.org/?act=allreleases&%s',
|
||||
'search_page' : 'http://forum.tntvillage.scambioetico.org/?act=allreleases&st={0}&{1}',
|
||||
'download' : 'http://forum.tntvillage.scambioetico.org/index.php?act=Attach&type=post&id=%s',
|
||||
}
|
||||
|
||||
self.url = self.urls['base_url']
|
||||
|
||||
self.cache = TNTVillageCache(self)
|
||||
|
||||
self.categories = "cat=29"
|
||||
|
||||
self.cookies = None
|
||||
|
||||
def isEnabled(self):
|
||||
return self.enabled
|
||||
|
||||
def imageName(self):
|
||||
return 'tntvillage.png'
|
||||
|
||||
def getQuality(self, item, anime=False):
|
||||
|
||||
quality = Quality.sceneQuality(item[0], anime)
|
||||
return quality
|
||||
|
||||
def _checkAuth(self):
|
||||
|
||||
if not self.username or not self.password:
|
||||
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
|
||||
|
||||
return True
|
||||
|
||||
def _doLogin(self):
|
||||
|
||||
# if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
|
||||
# return True
|
||||
#
|
||||
# if self._uid and self._hash:
|
||||
#
|
||||
# requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
|
||||
#
|
||||
# else:
|
||||
#
|
||||
login_params = {'UserName': self.username,
|
||||
'PassWord': self.password,
|
||||
'CookieDate': 1,
|
||||
'submit': 'Connettiti al Forum',
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
|
||||
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
|
||||
return False
|
||||
|
||||
if re.search('Sono stati riscontrati i seguenti errori', response.text) \
|
||||
or re.search('<title>Connettiti</title>', response.text) \
|
||||
or response.status_code == 401:
|
||||
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
|
||||
return False
|
||||
|
||||
# self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['member_id']
|
||||
# self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass_hash']
|
||||
#
|
||||
# self.cookies = {'uid': self._uid,
|
||||
# 'pass': self._hash
|
||||
# }
|
||||
|
||||
return True
|
||||
|
||||
def _get_season_search_strings(self, ep_obj):
|
||||
|
||||
search_string = {'Season': []}
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||
if ep_obj.show.air_by_date or ep_obj.show.sports:
|
||||
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
|
||||
elif ep_obj.show.anime:
|
||||
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
|
||||
else:
|
||||
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
|
||||
|
||||
search_string['Season'].append(ep_string)
|
||||
|
||||
|
||||
return [search_string]
|
||||
|
||||
def _get_episode_search_strings(self, ep_obj, add_string=''):
|
||||
|
||||
search_string = {'Episode': []}
|
||||
|
||||
if not ep_obj:
|
||||
return []
|
||||
|
||||
if self.show.air_by_date:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate).replace('-', '|')
|
||||
search_string['Episode'].append(ep_string)
|
||||
elif self.show.sports:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
str(ep_obj.airdate).replace('-', '|') + '|' + \
|
||||
ep_obj.airdate.strftime('%b')
|
||||
search_string['Episode'].append(ep_string)
|
||||
elif self.show.anime:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||
ep_string = sanitizeSceneName(show_name) + ' ' + \
|
||||
"%i" % int(ep_obj.scene_absolute_number)
|
||||
search_string['Episode'].append(ep_string)
|
||||
else:
|
||||
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
|
||||
ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
|
||||
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
|
||||
'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
|
||||
|
||||
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
|
||||
|
||||
return [search_string]
|
||||
|
||||
def _reverseQuality(self, quality):
|
||||
|
||||
quality_string = ''
|
||||
|
||||
if quality == Quality.SDTV:
|
||||
quality_string = ' HDTV x264'
|
||||
if quality == Quality.SDDVD:
|
||||
quality_string = ' DVDRIP'
|
||||
elif quality == Quality.HDTV:
|
||||
quality_string = ' 720p HDTV x264'
|
||||
elif quality == Quality.FULLHDTV:
|
||||
quality_string = ' 1080p HDTV x264'
|
||||
elif quality == Quality.RAWHDTV:
|
||||
quality_string = ' 1080i HDTV mpeg2'
|
||||
elif quality == Quality.HDWEBDL:
|
||||
quality_string = ' 720p WEB-DL h264'
|
||||
elif quality == Quality.FULLHDWEBDL:
|
||||
quality_string = ' 1080p WEB-DL h264'
|
||||
elif quality == Quality.HDBLURAY:
|
||||
quality_string = ' 720p Bluray x264'
|
||||
elif quality == Quality.FULLHDBLURAY:
|
||||
quality_string = ' 1080p Bluray x264'
|
||||
|
||||
return quality_string
|
||||
|
||||
def _episodeQuality(self,torrent_rows):
|
||||
"""
|
||||
Return The quality from the scene episode HTML row.
|
||||
"""
|
||||
|
||||
file_quality=''
|
||||
releaser=''
|
||||
|
||||
img_all = (torrent_rows.find_all('td'))[1].find_all('img')
|
||||
|
||||
for type in img_all:
|
||||
try:
|
||||
|
||||
file_quality = file_quality + " " + type['src'].replace("style_images/mkportal-636/","").replace(".gif","").replace(".png","")
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
checkName = lambda list, func: func([re.search(x, file_quality, re.I) for x in list])
|
||||
|
||||
dvdOptions = checkName(["dvd", "dvdrip", "dvdmux"], any)
|
||||
blueRayOptions = checkName(["BD","BDmux", "BDrip", "Bluray"], any)
|
||||
sdOptions = checkName(["h264", "divx", "XviD", "tv"], any)
|
||||
hdOptions = checkName(["720p"], any)
|
||||
fullHD = checkName(["1080p", "fullHD"], any)
|
||||
|
||||
file_quality = (torrent_rows.find_all('td'))[1].get_text()
|
||||
|
||||
webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any)
|
||||
|
||||
logger.log(u"dvdOptions: " + str(dvdOptions) + ", blueRayOptions: " + str(blueRayOptions) + ", sdOptions: " + str(sdOptions) + ", hdOptions: " + str(hdOptions) + ", fullHD: " + str(fullHD) + ", webdl: " + str(webdl), logger.DEBUG)
|
||||
|
||||
if sdOptions and not blueRayOptions and not dvdOptions and not fullHD and not hdOptions:
|
||||
return Quality.SDTV
|
||||
elif dvdOptions:
|
||||
return Quality.SDDVD
|
||||
elif hdOptions and not blueRayOptions and not fullHD and not webdl:
|
||||
return Quality.HDTV
|
||||
elif not hdOptions and not blueRayOptions and fullHD and not webdl:
|
||||
return Quality.FULLHDTV
|
||||
elif hdOptions and not blueRayOptions and not fullHD and webdl:
|
||||
return Quality.HDWEBDL
|
||||
elif not hdOptions and not blueRayOptions and fullHD and webdl:
|
||||
return Quality.FULLHDWEBDL
|
||||
elif blueRayOptions and hdOptions and not fullHD:
|
||||
return Quality.HDBLURAY
|
||||
elif blueRayOptions and fullHD and not hdOptions:
|
||||
return Quality.FULLHDBLURAY
|
||||
else:
|
||||
return Quality.UNKNOWN
|
||||
|
||||
def _is_italian(self,torrent_rows):
|
||||
|
||||
is_italian = 0
|
||||
|
||||
name=''
|
||||
|
||||
span_tag = (torrent_rows.find_all('td'))[1].find('b').find('span')
|
||||
|
||||
name = str(span_tag)
|
||||
name = name.split('sub')[0]
|
||||
|
||||
if re.search("ita", name, re.I):
|
||||
logger.log(u"Found Italian Language", logger.DEBUG)
|
||||
is_italian=1
|
||||
|
||||
return is_italian
|
||||
|
||||
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
|
||||
|
||||
results = []
|
||||
items = {'Season': [], 'Episode': [], 'RSS': []}
|
||||
|
||||
self.categories = "cat=" + str(self.cat)
|
||||
|
||||
if not self._doLogin():
|
||||
return results
|
||||
|
||||
for mode in search_params.keys():
|
||||
for search_string in search_params[mode]:
|
||||
|
||||
if isinstance(search_string, unicode):
|
||||
search_string = unidecode(search_string)
|
||||
|
||||
last_page=0
|
||||
y=int(self.page)
|
||||
|
||||
if search_string == '':
|
||||
continue
|
||||
|
||||
search_string = str(search_string).replace('.', ' ')
|
||||
|
||||
for x in range(0,y):
|
||||
|
||||
z=x*20
|
||||
if last_page:
|
||||
break
|
||||
|
||||
logger.log(u"Page: " + str(x) + " of " + str(y), logger.DEBUG)
|
||||
|
||||
if mode != 'RSS':
|
||||
searchURL = (self.urls['search_page'] + '&filter={2}').format(z,self.categories,search_string)
|
||||
else:
|
||||
searchURL = self.urls['search_page'].format(z,self.categories)
|
||||
|
||||
logger.log(u"Search string: " + searchURL, logger.DEBUG)
|
||||
|
||||
data = self.getURL(searchURL)
|
||||
if not data:
|
||||
logger.log(u"data is empty", logger.DEBUG)
|
||||
continue
|
||||
|
||||
try:
|
||||
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
|
||||
torrent_table = html.find('table', attrs = {'class' : 'copyright'})
|
||||
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
|
||||
|
||||
#Continue only if one Release is found
|
||||
logger.log(u"Num of Row: "+ str(len(torrent_rows)), logger.DEBUG)
|
||||
|
||||
if len(torrent_rows)<3:
|
||||
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
|
||||
logger.DEBUG)
|
||||
last_page=1
|
||||
continue
|
||||
|
||||
if len(torrent_rows) < 42:
|
||||
last_page=1
|
||||
|
||||
for result in torrent_table.find_all('tr')[2:]:
|
||||
|
||||
try:
|
||||
link = result.find('td').find('a')
|
||||
title = link.string
|
||||
id = ((result.find_all('td')[8].find('a'))['href'])[-8:]
|
||||
download_url = self.urls['download'] % (id)
|
||||
leechers = result.find_all('td')[3].find_all('td')[1].text
|
||||
leechers = int(leechers.strip('[]'))
|
||||
seeders = result.find_all('td')[3].find_all('td')[2].text
|
||||
seeders = int(seeders.strip('[]'))
|
||||
except (AttributeError, TypeError):
|
||||
continue
|
||||
|
||||
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
|
||||
continue
|
||||
|
||||
if not title or not download_url:
|
||||
continue
|
||||
|
||||
logger.log(u"name: " + title + "", logger.DEBUG)
|
||||
filename_qt = self._reverseQuality(self._episodeQuality(result))
|
||||
title = title.replace(" Versione 720p",filename_qt).replace(" V 720p",filename_qt).replace(" V HEVC",filename_qt).replace(" Versione 1080p",filename_qt).replace(" 720p HEVC",filename_qt)
|
||||
|
||||
if Quality.nameQuality(title) == Quality.UNKNOWN:
|
||||
title += filename_qt
|
||||
|
||||
logger.log(u"name, inserted quallity: " + title + "", logger.DEBUG)
|
||||
|
||||
item = title, download_url, id, seeders, leechers
|
||||
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
|
||||
|
||||
if not self._is_italian(result) and not self.subtitle:
|
||||
logger.log(u"Subtitled, Skipped", logger.DEBUG)
|
||||
continue
|
||||
else:
|
||||
logger.log(u"Not Subtitled or Forced, Got It!", logger.DEBUG)
|
||||
|
||||
items[mode].append(item)
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
|
||||
|
||||
#For each search mode sort all the items by seeders
|
||||
items[mode].sort(key=lambda tup: tup[3], reverse=True)
|
||||
|
||||
results += items[mode]
|
||||
|
||||
return results
|
||||
|
||||
def _get_title_and_url(self, item):
|
||||
|
||||
title, url, id, seeders, leechers = item
|
||||
|
||||
if title:
|
||||
title = u'' + title
|
||||
title = title.replace(' ', '.')
|
||||
|
||||
if url:
|
||||
url = str(url).replace('&', '&')
|
||||
|
||||
return (title, url)
|
||||
|
||||
def findPropers(self, search_date=datetime.datetime.today()):
|
||||
|
||||
results = []
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
|
||||
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
|
||||
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
|
||||
)
|
||||
|
||||
if not sqlResults:
|
||||
return []
|
||||
|
||||
for sqlshow in sqlResults:
|
||||
self.show = curshow = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
|
||||
if not self.show: continue
|
||||
curEp = curshow.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
|
||||
|
||||
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
|
||||
|
||||
for item in self._doSearch(searchString[0]):
|
||||
title, url = self._get_title_and_url(item)
|
||||
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
|
||||
|
||||
return results
|
||||
|
||||
def seedRatio(self):
|
||||
return self.ratio
|
||||
|
||||
|
||||
class TNTVillageCache(tvcache.TVCache):
|
||||
def __init__(self, provider):
|
||||
|
||||
tvcache.TVCache.__init__(self, provider)
|
||||
|
||||
# only poll TNTVillage every 30 minutes max
|
||||
self.minTime = 30
|
||||
|
||||
def _getRSSData(self):
|
||||
search_params = {'RSS': []}
|
||||
return {'entries': self.provider._doSearch(search_params)}
|
||||
|
||||
|
||||
provider = TNTVillageProvider()
|
@ -4315,6 +4315,25 @@ class ConfigProviders(Config):
|
||||
except:
|
||||
curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
|
||||
|
||||
if hasattr(curTorrentProvider, 'page'):
|
||||
try:
|
||||
curTorrentProvider.page = int(str(kwargs[curTorrentProvider.getID() + '_page']).strip())
|
||||
except:
|
||||
curTorrentProvider.page = 1
|
||||
|
||||
if hasattr(curTorrentProvider, 'cat'):
|
||||
try:
|
||||
curTorrentProvider.cat = int(str(kwargs[curTorrentProvider.getID() + '_cat']).strip())
|
||||
except:
|
||||
curTorrentProvider.cat = 0
|
||||
|
||||
if hasattr(curTorrentProvider, 'subtitle'):
|
||||
try:
|
||||
curTorrentProvider.subtitle = config.checkbox_to_value(
|
||||
kwargs[curTorrentProvider.getID() + '_subtitle'])
|
||||
except:
|
||||
curTorrentProvider.subtitle = 0
|
||||
|
||||
for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if
|
||||
curProvider.providerType == sickbeard.GenericProvider.NZB]:
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user