1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 19:12:26 -05:00

Merge pull request #781 from echel0n/dev

Dev
This commit is contained in:
adam111316 2014-09-06 19:03:22 +08:00
commit 1ed552d0a6
14 changed files with 622 additions and 36 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

View File

@ -22,7 +22,7 @@
\$(document).ready(function(){
var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
#for $curNewznabProvider in $sickbeard.newznabProviderList:
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '$curNewznabProvider.key', $int($curNewznabProvider.default), show_nzb_providers);
\$(this).addProvider('$curNewznabProvider.getID()', '$curNewznabProvider.name', '$curNewznabProvider.url', '$curNewznabProvider.key', '$curNewznabProvider.catIDs', $int($curNewznabProvider.default), show_nzb_providers);
#end for
});
//-->
@ -572,6 +572,24 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
<span class="component-desc">(if not required, type 0)</span>
</label>
</div>
<div class="field-pair">
<label class="nocheck clearfix">
<span class="component-title">NewzNab search categories</span>
<select id="newznab_cap" multiple="multiple" style="min-width:10em;" >
</select>
<select id="newznab_cat" multiple="multiple" style="min-width:10em;" >
</select>
</label>
<label class="clearfix">
<span class="component-title">&nbsp;</span>
<span class="component-desc">(Select your Newznab categories on the left, and click the "update categories" button to use them for searching.)
Don't forget to to save the form!</span>
</label>
<input class="btn" type="button" class="newznab_cat_update" id="newznab_cat_update" value="Update Categories" />
</div>
<div id="newznab_add_div">
<input class="btn" type="button" class="newznab_save" id="newznab_add" value="Add" />
</div>

View File

@ -13,7 +13,41 @@ $(document).ready(function(){
});
}
$.fn.addProvider = function (id, name, url, key, isDefault, showProvider) {
$.fn.getCategories = function (isDefault, name, url, key) {
if (!name)
return;
if (!url)
return;
if (!key)
return;
var params = {url: url, name: name, key: key};
var returnData;
$.ajaxSetup( { "async": false } );
$.getJSON(sbRoot + '/config/providers/getNewznabCategories', params,
function(data){
if (data.error != "") {
alert(data.error);
return false;
}
if (data.success == false) {
return false;
}
console.debug(data.tv_categories);
returnData = data;
});
$.ajaxSetup( { "async": true } );
return returnData;
}
$.fn.addProvider = function (id, name, url, key, cat, isDefault, showProvider) {
url = $.trim(url);
if (!url)
@ -25,7 +59,7 @@ $(document).ready(function(){
if (url.match('/$') == null)
url = url + '/';
var newData = [isDefault, [name, url, key]];
var newData = [isDefault, [name, url, key, cat]];
newznabProviders[id] = newData;
if (!isDefault){
@ -63,10 +97,11 @@ $(document).ready(function(){
}
$.fn.updateProvider = function (id, url, key) {
$.fn.updateProvider = function (id, url, key, cat) {
newznabProviders[id][1][1] = url;
newznabProviders[id][1][2] = key;
newznabProviders[id][1][3] = cat;
$(this).populateNewznabSection();
@ -108,17 +143,49 @@ $(document).ready(function(){
var isDefault = 0;
$('#newznab_add_div').show();
$('#newznab_update_div').hide();
$('#newznab_cat').attr('disabled','disabled');
$('#newznab_cap').attr('disabled','disabled');
$("#newznab_cat option").each(function() {
$(this).remove();
return;
});
$("#newznab_cap option").each(function() {
$(this).remove();
return;
});
} else {
var data = newznabProviders[selectedProvider][1];
var isDefault = newznabProviders[selectedProvider][0];
$('#newznab_add_div').hide();
$('#newznab_update_div').show();
$('#newznab_cat').removeAttr("disabled");
$('#newznab_cap').removeAttr("disabled");
}
$('#newznab_name').val(data[0]);
$('#newznab_url').val(data[1]);
$('#newznab_key').val(data[2]);
//Check if not already array
if (typeof data[3] === 'string') {
rrcat = data[3].split(",")
}
else {
rrcat = data[3];
}
// Update the category select box (on the right)
var newCatOptions = [];
if (rrcat) {
rrcat.forEach(function (cat) {
newCatOptions.push({text : cat, value : cat});
});
$("#newznab_cat").replaceOptions(newCatOptions);
};
if (selectedProvider == 'addNewznab') {
$('#newznab_name').removeAttr("disabled");
$('#newznab_url').removeAttr("disabled");
@ -132,11 +199,51 @@ $(document).ready(function(){
} else {
$('#newznab_url').removeAttr("disabled");
$('#newznab_delete').removeAttr("disabled");
//Get Categories Capabilities
if (data[0] && data[1] && data[2] && !ifExists($.fn.newznabProvidersCapabilities, data[0])) {
var categoryresult = $(this).getCategories(isDefault, data[0], data[1], data[2]);
if (categoryresult && categoryresult.success && categoryresult.tv_categories) {
$.fn.newznabProvidersCapabilities.push({'name' : data[0], 'categories' : categoryresult.tv_categories});
}
}
//Loop through the array and if currently selected newznab provider name matches one in the array, use it to
//update the capabilities select box (on the left).
if (data[0]) {
$.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) {
if (newzNabCap.name && newzNabCap.name == data[0] && newzNabCap.categories instanceof Array) {
var newCapOptions = [];
newzNabCap.categories.forEach(function(category_set) {
if (category_set.id && category_set.name) {
newCapOptions.push({value : category_set.id, text : category_set.name + "(" + category_set.id + ")"});
};
});
$("#newznab_cap").replaceOptions(newCapOptions);
}
});
};
}
}
}
ifExists = function(loopThroughArray, searchFor) {
var found = false;
loopThroughArray.forEach(function(rootObject) {
if (rootObject.name == searchFor) {
found = true;
}
console.log(rootObject.name + " while searching for: "+ searchFor);
});
return found;
};
$.fn.makeNewznabProviderString = function() {
var provStrings = new Array();
@ -294,9 +401,10 @@ $(document).ready(function(){
provider_id = provider_id.substring(0, provider_id.length-'_hash'.length);
var url = $('#'+provider_id+'_url').val();
var cat = $('#'+provider_id+'_cat').val();
var key = $(this).val();
$(this).updateProvider(provider_id, url, key);
$(this).updateProvider(provider_id, url, key, cat);
});
@ -310,7 +418,11 @@ $(document).ready(function(){
var url = $('#newznab_url').val();
var key = $('#newznab_key').val();
$(this).updateProvider(selectedProvider, url, key);
var cat = $('#newznab_cat option').map(function(i, opt) {
return $(opt).text();
}).toArray().join(',');
$(this).updateProvider(selectedProvider, url, key, cat);
});
@ -344,6 +456,48 @@ $(document).ready(function(){
$(this).refreshProviderList();
});
$(this).on('click', '#newznab_cat_update', function(){
console.debug('Clicked Button');
//Maybe check if there is anything selected?
$("#newznab_cat option").each(function() {
$(this).remove();
return;
});
var newOptions = [];
// When the update botton is clicked, loop through the capabilities list
// and copy the selected category id's to the category list on the right.
$("#newznab_cap option").each(function(){
if($(this).attr('selected') == 'selected')
{
var selected_cat = $(this).val();
console.debug(selected_cat);
newOptions.push({text: selected_cat, value: selected_cat})
};
});
$("#newznab_cat").replaceOptions(newOptions);
var selectedProvider = $('#editANewznabProvider :selected').val();
if (selectedProvider == "addNewznab")
return;
var url = $('#newznab_url').val();
var key = $('#newznab_key').val();
var cat = $('#newznab_cat option').map(function(i, opt) {
return $(opt).text();
}).toArray().join(',');
$("#newznab_cat option:not([value])").remove();
$(this).updateProvider(selectedProvider, url, key, cat);
});
$('#newznab_add').click(function(){
var selectedProvider = $('#editANewznabProvider :selected').val();
@ -351,6 +505,11 @@ $(document).ready(function(){
var name = $.trim($('#newznab_name').val());
var url = $.trim($('#newznab_url').val());
var key = $.trim($('#newznab_key').val());
//var cat = $.trim($('#newznab_cat').val());
var cat = $.trim($('#newznab_cat option').map(function(i, opt) {
return $(opt).text();}).toArray().join(','));
if (!name)
return;
@ -371,7 +530,7 @@ $(document).ready(function(){
return;
}
$(this).addProvider(data.success, name, url, key, 0);
$(this).addProvider(data.success, name, url, key, cat, 0);
});
});
@ -466,8 +625,26 @@ $(document).ready(function(){
});
$.fn.replaceOptions = function(options) {
var self, $option;
this.empty();
self = this;
$.each(options, function(index, option) {
$option = $("<option></option>")
.attr("value", option.value)
.text(option.text);
self.append($option);
});
};
// initialization stuff
$.fn.newznabProvidersCapabilities = [];
$(this).hideConfigTab();
$(this).showHideProviders();

View File

@ -32,7 +32,7 @@ from sickbeard import providers, metadata, config, webserveInit
from sickbeard.providers.generic import GenericProvider
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \
freshontv, bitsoup
freshontv, bitsoup, t411
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \

View File

@ -250,6 +250,10 @@ def checkbox_to_value(option, value_on=1, value_off=0):
Turns checkbox option 'on' or 'true' to value_on (1)
any other value returns value_off (0)
"""
if type(option) is list:
option = option[-1]
if option == 'on' or option == 'true':
return value_on

View File

@ -88,7 +88,7 @@ normal_regexes = [
# Show Name - 2010-11-23 - Ep Name
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(?P<air_date>(\d{4}[. _-]+\d{1,2}[. _-]+\d{1,2})|(\d{1,2}\w{2}[. _-]+\w+[. _-]+\d{4})|(\w+[. _-]+\d{1,2}\w{2}[. _-]+\d{4}))
(?P<air_date>(\d+[. _-]\d+[. _-]\d+)|(\d+\w+[. _-]\w+[. _-]\d+))[. _-]+
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$ # Group
@ -100,8 +100,8 @@ normal_regexes = [
# Show Name - 2010-11-23 - Ep Name
'''
^(?P<series_name>.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?)[. _-]+
((?P<series_num>\d{1,3}).*?)?
(?P<air_date>(\d{2}[. _-]+\d{2}[. _-]+\d{2})|(\d{4}[. _-]+\d{1,2}[. _-]+\d{1,2})|(\d{1,2}\w{2}[. _-]+\w+[. _-]+\d{4})|(\w+[. _-]+\d{1,2}\w{2}[. _-]+\d{4}))[. _-]*
((?P<series_num>\d{1,3})[. _-]+)?
(?P<air_date>(\d+[. _-]\d+[. _-]\d+)|(\d+\w+[. _-]\w+[. _-]\d+))[. _-]+
((?P<extra_info>.+?)((?<![. _-])
(?<!WEB)-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$
'''),

View File

@ -20,7 +20,7 @@
import base64
from httplib import HTTPSConnection, HTTPException
from urllib import urlencode
import json
from ssl import SSLError
import sickbeard
from sickbeard import logger, common
@ -97,8 +97,9 @@ class PushbulletNotifier:
'body': message.encode('utf-8'),
'device_iden': pushbullet_device,
'type': notificationType}
http_handler.request(method, uri, body=urlencode(data),
headers={'Authorization': 'Basic %s' % authString})
data = json.dumps(data)
http_handler.request(method, uri, body=data,
headers={'Content-Type': 'application/json', 'Authorization': 'Basic %s' % authString})
pass
except (SSLError, HTTPException):
return False

View File

@ -793,7 +793,7 @@ class PostProcessor(object):
(show, season, episodes, quality, version) = self._find_info()
if not show:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
logger.WARNING)
raise exceptions.PostProcessingFailed()
elif season == None or not episodes:
self._log(u"Not enough information to determine what episode this is", logger.DEBUG)

View File

@ -36,7 +36,8 @@ __all__ = ['ezrss',
'torrentbytes',
'animezb',
'freshontv',
'bitsoup'
'bitsoup',
't411'
]
import sickbeard
@ -186,7 +187,7 @@ def makeTorrentRssProvider(configString):
def getDefaultNewznabProviders():
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/|0|5030,5040|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/|0|5030,5040|0|eponly|0|0|0'
return 'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0'
def getProviderModule(name):

View File

@ -27,7 +27,7 @@ import sickbeard
import requests
from sickbeard import helpers, classes, logger, db
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from sickbeard import tvcache
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
@ -63,7 +63,10 @@ class GenericProvider:
self.session = requests.session()
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
#Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
#otherwise session might be broken and download fail, asking again for authentication
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT}
def getID(self):
return GenericProvider.makeID(self.name)

View File

@ -61,7 +61,7 @@ class KATProvider(generic.TorrentProvider):
self.cache = KATCache(self)
self.urls = ['http://kickass.to/', 'http://katproxy.com/', 'http://www.kickmirror.com']
self.urls = ['http://kickass.to/', 'http://katproxy.com/', 'http://www.kickmirror.com/']
self.url = None
def isEnabled(self):

View File

@ -37,6 +37,9 @@ from sickbeard import logger
from sickbeard import tvcache
from sickbeard.exceptions import ex, AuthException
from lib import requests
from lib.requests import exceptions
from lib.bencode import bdecode
class NewznabProvider(generic.NZBProvider):
def __init__(self, name, url, key='', catIDs='5030,5040', search_mode='eponly', search_fallback=False,
@ -86,6 +89,52 @@ class NewznabProvider(generic.NZBProvider):
def isEnabled(self):
return self.enabled
def _getURL(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
Not really changed much from the superclass, can be used in future.
"""
# check for auth
if not self._doLogin():
return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
def get_newznab_categories(self):
"""
Uses the newznab provider url and apikey to get the capabilities.
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
"""
return_categories = []
self._checkAuth()
params = {"t": "caps"}
if self.needs_auth and self.key:
params['apikey'] = self.key
categories = self.getURL("%s/api" % (self.url), params=params)
xml_categories = helpers.parse_xml(categories)
if not xml_categories:
return (False, return_categories, "Error parsing xml for [%s]" % (self.name))
try:
for category in xml_categories.iter('category'):
if category.get('name') == 'TV':
for subcat in category.findall('subcat'):
return_categories.append(subcat.attrib)
except:
return (False, return_categories, "Error parsing result for [%s]" % (self.name))
return (True, return_categories, "")
def _get_season_search_strings(self, ep_obj):
to_return = []
@ -239,6 +288,18 @@ class NewznabProvider(generic.NZBProvider):
except (AttributeError, TypeError):
break
# sanity check - limiting at 10 at getting 1000 results in-case incorrect total parameter is reported
if params['limit'] > 1000:
logger.log("Excessive results for search, ending search", logger.WARNING)
break
# sanity check - total should remain constant
if offset != 0 and total != initial_total:
logger.log("Total number of items on newznab response changed, ending search", logger.DEBUG)
break
else:
initial_total = total
# if there are more items available then the amount given in one call, grab some more
if (total - params['limit']) > offset == params['offset']:
params['offset'] += params['limit']
@ -248,9 +309,7 @@ class NewznabProvider(generic.NZBProvider):
else:
break
# sanity check - limiting at 10 at getting 1000 results in-case incorrect total parameter is reported
if params['limit'] > 1000:
break
else:
break

294
sickbeard/providers/t411.py Normal file
View File

@ -0,0 +1,294 @@
# -*- coding: latin-1 -*-
# Author: djoole <bobby.djoole@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import traceback
import time
import re
import datetime
import sickbeard
import generic
from lib import requests
from sickbeard.common import USER_AGENT, Quality, cpu_presets
from sickbeard import logger
from sickbeard import tvcache
from sickbeard import show_name_helpers
from sickbeard.bs4_parser import BS4Parser
class T411Provider(generic.TorrentProvider):
urls = {'base_url': 'http://www.t411.me/',
'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=433&search=%s&submit=Recherche',
'login_page': 'http://www.t411.me/users/login/',
'download': 'http://www.t411.me/torrents/download/?id=%s',
}
def __init__(self):
generic.TorrentProvider.__init__(self, "T411")
self.supportsBacklog = True
self.enabled = False
self.username = None
self.password = None
self.ratio = None
self.cache = T411Cache(self)
self.url = self.urls['base_url']
self.last_login_check = None
self.login_opener = None
def isEnabled(self):
return self.enabled
def imageName(self):
return 't411.png'
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def getLoginParams(self):
return {
'login': self.username,
'password': self.password,
'remember': '1',
}
def loginSuccess(self, output):
if "<span>Ratio: <strong class" in output.text:
return True
else:
return False
def _doLogin(self):
now = time.time()
if self.login_opener and self.last_login_check < (now - 3600):
try:
output = self.login_opener.open(self.urls['test'])
if self.loginSuccess(output):
self.last_login_check = now
return True
else:
self.login_opener = None
except:
self.login_opener = None
if self.login_opener:
return True
try:
login_params = self.getLoginParams()
self.session = requests.Session()
self.session.headers.update({'User-Agent': USER_AGENT})
data = self.session.get(self.urls['login_page'], verify=False)
output = self.session.post(self.urls['login_page'], data=login_params, verify=False)
if self.loginSuccess(output):
self.last_login_check = now
self.login_opener = self.session
return True
error = 'unknown'
except:
error = traceback.format_exc()
self.login_opener = None
self.login_opener = None
logger.log(u'Failed to login:' + str(error), logger.ERROR)
return False
def _get_season_search_strings(self, ep_obj):
search_string = {'Season': []}
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
else:
ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName.SXX
search_string['Season'].append(ep_string)
return [search_string]
def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = {'Episode': []}
if not ep_obj:
return []
if self.show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + '.' + \
str(ep_obj.airdate).replace('-', '|')
search_string['Episode'].append(ep_string)
elif self.show.sports:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + '.' + \
str(ep_obj.airdate).replace('-', '|') + '|' + \
ep_obj.airdate.strftime('%b')
search_string['Episode'].append(ep_string)
elif self.show.anime:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + '.' + \
"%i" % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string)
else:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode}
search_string['Episode'].append(re.sub('\s+', '.', ep_string))
return [search_string]
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
if not self._doLogin():
return []
for mode in search_params.keys():
for search_string in search_params[mode]:
if search_string == '':
search_string2 = ''
else:
search_string2 = '%40name+' + search_string + '+'
searchURL = self.urls['search'] % (search_string, search_string2)
logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
data = self.getURL(searchURL)
if not data:
continue
try:
with BS4Parser(data.decode('iso-8859-1'), features=["html5lib", "permissive"]) as html:
resultsTable = html.find('table', attrs={'class': 'results'})
if not resultsTable:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.DEBUG)
continue
entries = resultsTable.find("tbody").findAll("tr")
if len(entries) > 0:
for result in entries:
try:
link = result.find('a', title=True)
torrentName = link['title']
torrent_name = str(torrentName)
torrentId = result.find_all('td')[2].find_all('a')[0]['href'][1:].replace('torrents/nfo/?id=','')
torrent_download_url = (self.urls['download'] % torrentId).encode('utf8')
except (AttributeError, TypeError):
continue
if not torrent_name or not torrent_download_url:
continue
item = torrent_name, torrent_download_url
logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")", logger.DEBUG)
items[mode].append(item)
else:
logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
logger.WARNING)
continue
except Exception, e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
logger.ERROR)
results += items[mode]
return results
def _get_title_and_url(self, item):
title, url = item
if title:
title = u'' + title
title = title.replace(' ', '.')
if url:
url = str(url).replace('&amp;', '&')
return title, url
def findPropers(self, search_date=datetime.datetime.today()):
results = []
myDB = db.DBConnection()
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []
for sqlshow in sqlResults:
self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
if self.show:
curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
for item in self._doSearch(searchString[0]):
title, url = self._get_title_and_url(item)
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
return results
def seedRatio(self):
return self.ratio
class T411Cache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# Only poll T411 every 10 minutes max
self.minTime = 10
def _getDailyData(self):
search_params = {'RSS': ['']}
return self.provider._doSearch(search_params)
provider = T411Provider()

View File

@ -1909,6 +1909,34 @@ class ConfigProviders(MainHandler):
sickbeard.newznabProviderList.append(newProvider)
return newProvider.getID() + '|' + newProvider.configStr()
def getNewznabCategories(self, name, url, key):
'''
Retrieves a list of possible categories with category id's
Using the default url/api?cat
http://yournewznaburl.com/api?t=caps&apikey=yourapikey
'''
error = ""
success = False
if not name:
error += "\nNo Provider Name specified"
if not url:
error += "\nNo Provider Url specified"
if not key:
error += "\nNo Provider Api key specified"
if error <> "":
return json.dumps({'success' : False, 'error': error})
#Get list with Newznabproviders
#providerDict = dict(zip([x.getID() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
#Get newznabprovider obj with provided name
tempProvider= newznab.NewznabProvider(name, url, key)
success, tv_categories, error = tempProvider.get_newznab_categories()
return json.dumps({'success' : success,'tv_categories' : tv_categories, 'error' : error})
def deleteNewznabProvider(self, nnid):
@ -2002,18 +2030,19 @@ class ConfigProviders(MainHandler):
if not curNewznabProviderStr:
continue
cur_name, cur_url, cur_key = curNewznabProviderStr.split('|')
cur_name, cur_url, cur_key, cur_cat = curNewznabProviderStr.split('|')
cur_url = config.clean_url(cur_url)
newProvider = newznab.NewznabProvider(cur_name, cur_url, key=cur_key)
cur_id = newProvider.getID()
# if it already exists then update it
if cur_id in newznabProviderDict:
newznabProviderDict[cur_id].name = cur_name
newznabProviderDict[cur_id].url = cur_url
newznabProviderDict[cur_id].key = cur_key
newznabProviderDict[cur_id].catIDs = cur_cat
# a 0 in the key spot indicates that no key is needed
if cur_key == '0':
newznabProviderDict[cur_id].needs_auth = False
@ -2029,19 +2058,19 @@ class ConfigProviders(MainHandler):
newznabProviderDict[cur_id].search_fallback = config.checkbox_to_value(
kwargs[cur_id + '_search_fallback'])
except:
pass
newznabProviderDict[cur_id].search_fallback = 0
try:
newznabProviderDict[cur_id].enable_daily = config.checkbox_to_value(
kwargs[cur_id + '_enable_daily'])
except:
pass
newznabProviderDict[cur_id].enable_daily = 0
try:
newznabProviderDict[cur_id].enable_backlog = config.checkbox_to_value(
kwargs[cur_id + '_enable_backlog'])
except:
pass
newznabProviderDict[cur_id].enable_backlog = 0
else:
sickbeard.newznabProviderList.append(newProvider)
@ -2196,21 +2225,21 @@ class ConfigProviders(MainHandler):
curTorrentProvider.search_fallback = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_search_fallback'])
except:
curTorrentProvider.search_fallback = 0
curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes
if hasattr(curTorrentProvider, 'enable_daily'):
try:
curTorrentProvider.enable_daily = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_enable_daily'])
except:
curTorrentProvider.enable_daily = 1
curTorrentProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curTorrentProvider, 'enable_backlog'):
try:
curTorrentProvider.enable_backlog = config.checkbox_to_value(
kwargs[curTorrentProvider.getID() + '_enable_backlog'])
except:
curTorrentProvider.enable_backlog = 1
curTorrentProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
for curNzbProvider in [curProvider for curProvider in sickbeard.providers.sortedProviderList() if
curProvider.providerType == sickbeard.GenericProvider.NZB]:
@ -2238,21 +2267,21 @@ class ConfigProviders(MainHandler):
curNzbProvider.search_fallback = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_search_fallback'])
except:
curNzbProvider.search_fallback = 0
curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curNzbProvider, 'enable_daily'):
try:
curNzbProvider.enable_daily = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_enable_daily'])
except:
curNzbProvider.enable_daily = 1
curNzbProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes
if hasattr(curNzbProvider, 'enable_backlog'):
try:
curNzbProvider.enable_backlog = config.checkbox_to_value(
kwargs[curNzbProvider.getID() + '_enable_backlog'])
except:
curNzbProvider.enable_backlog = 1
curNzbProvider.enable_backlog = 0 # these exceptions are actually catching unselected checkboxes
sickbeard.NEWZNAB_DATA = '!!!'.join([x.configStr() for x in sickbeard.newznabProviderList])
sickbeard.PROVIDER_ORDER = provider_list