1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Added new webproxy per-provider option to allow accessing blocked content from different countries that may have restrictions, this feature is beta.

Performed misc PEP8 code cleanups.

Fixed sickragetv/sickrage-issues#213

Fixed sickragetv/sickrage-issues#217
This commit is contained in:
echel0n 2014-12-19 21:57:11 -08:00
parent 82f58c703a
commit 4b039b0313
33 changed files with 267 additions and 271 deletions

View File

@ -98,7 +98,7 @@ class Cache:
del self.storage[url] del self.storage[url]
return return
def fetch(self, url, force_update=False, offline=False, request_headers=None): def fetch(self, url, force_update=False, offline=False, request_headers=None, referrer=None):
"""Return the feed at url. """Return the feed at url.
url - The URL of the feed. url - The URL of the feed.
@ -112,6 +112,10 @@ class Cache:
cache and never access the remote cache and never access the remote
URL. URL.
request_headers=None - Add addition request headers to request
referrer=None - Added a referrer to request
If there is data for that feed in the cache already, check If there is data for that feed in the cache already, check
the expiration date before accessing the server. If the the expiration date before accessing the server. If the
cached data has not expired, return it without accessing the cached data has not expired, return it without accessing the
@ -175,6 +179,7 @@ class Cache:
agent=self.user_agent, agent=self.user_agent,
modified=modified, modified=modified,
etag=etag, etag=etag,
referrer=referrer,
request_headers=request_headers) request_headers=request_headers)
status = parsed_result.get('status', None) status = parsed_result.get('status', None)

View File

@ -30,47 +30,56 @@ __all__ = ['Guess', 'Language',
# it will then always be available # it will then always be available
# with code from http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/ # with code from http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/
import sys import sys
if sys.version_info[0] >= 3: if sys.version_info[0] >= 3:
PY3 = True PY3 = True
unicode_text_type = str unicode_text_type = str
native_text_type = str native_text_type = str
base_text_type = str base_text_type = str
def u(x): def u(x):
return str(x) return str(x)
def s(x): def s(x):
return x return x
class UnicodeMixin(object): class UnicodeMixin(object):
__str__ = lambda x: x.__unicode__() __str__ = lambda x: x.__unicode__()
import binascii import binascii
def to_hex(x): def to_hex(x):
return binascii.hexlify(x).decode('utf-8') return binascii.hexlify(x).decode('utf-8')
else: else:
PY3 = False PY3 = False
__all__ = [ str(s) for s in __all__ ] # fix imports for python2 __all__ = [str(s) for s in __all__] # fix imports for python2
unicode_text_type = unicode unicode_text_type = unicode
native_text_type = str native_text_type = str
base_text_type = basestring base_text_type = basestring
def u(x): def u(x):
if isinstance(x, str): if isinstance(x, str):
return x.decode('utf-8') return x.decode('utf-8')
return unicode(x) return unicode(x)
def s(x): def s(x):
if isinstance(x, unicode): if isinstance(x, unicode):
return x.encode('utf-8') return x.encode('utf-8')
if isinstance(x, list): if isinstance(x, list):
return [ s(y) for y in x ] return [s(y) for y in x]
if isinstance(x, tuple): if isinstance(x, tuple):
return tuple(s(y) for y in x) return tuple(s(y) for y in x)
if isinstance(x, dict): if isinstance(x, dict):
return dict((s(key), s(value)) for key, value in x.items()) return dict((s(key), s(value)) for key, value in x.items())
return x return x
class UnicodeMixin(object): class UnicodeMixin(object):
__str__ = lambda x: unicode(x).encode('utf-8') __str__ = lambda x: unicode(x).encode('utf-8')
def to_hex(x): def to_hex(x):
return x.encode('hex') return x.encode('hex')
from guessit.guess import Guess, merge_all from guessit.guess import Guess, merge_all
from guessit.language import Language from guessit.language import Language
from guessit.matcher import IterativeMatcher from guessit.matcher import IterativeMatcher
@ -80,7 +89,6 @@ import logging
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class NullHandler(logging.Handler): class NullHandler(logging.Handler):
def emit(self, record): def emit(self, record):
pass pass
@ -112,7 +120,6 @@ def _guess_filename(filename, filetype):
mtree = IterativeMatcher(filename, filetype=filetype, mtree = IterativeMatcher(filename, filetype=filetype,
opts=['skip_first_year']) opts=['skip_first_year'])
m = mtree.matched() m = mtree.matched()
if 'language' not in m and 'subtitleLanguage' not in m: if 'language' not in m and 'subtitleLanguage' not in m:
@ -123,7 +130,6 @@ def _guess_filename(filename, filetype):
opts=['nolanguage', 'nocountry']) opts=['nolanguage', 'nocountry'])
m2 = mtree2.matched() m2 = mtree2.matched()
if m.get('title') is None: if m.get('title') is None:
return m return m
@ -156,8 +162,8 @@ def _guess_filename(filename, filetype):
# if filetype is subtitle and the language appears last, just before # if filetype is subtitle and the language appears last, just before
# the extension, then it is likely a subtitle language # the extension, then it is likely a subtitle language
parts = clean_string(title.root.value).split() parts = clean_string(title.root.value).split()
if (m['type'] in ['moviesubtitle', 'episodesubtitle'] and if (m['type'] in ['moviesubtitle', 'episodesubtitle']):
parts.index(lang.value) == len(parts) - 2): if lang.value in parts and (parts.index(lang.value) == len(parts) - 2):
return m return m
# if the language was in the middle of the other potential title, # if the language was in the middle of the other potential title,
@ -177,7 +183,6 @@ def _guess_filename(filename, filetype):
return warning('Not sure of the title because of the language position') return warning('Not sure of the title because of the language position')
return m return m
@ -206,6 +211,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype == 'hash_mpc': elif infotype == 'hash_mpc':
from guessit.hash_mpc import hash_file from guessit.hash_mpc import hash_file
try: try:
result.append(Guess({'hash_mpc': hash_file(filename)}, result.append(Guess({'hash_mpc': hash_file(filename)},
confidence=1.0)) confidence=1.0))
@ -214,6 +220,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype == 'hash_ed2k': elif infotype == 'hash_ed2k':
from guessit.hash_ed2k import hash_file from guessit.hash_ed2k import hash_file
try: try:
result.append(Guess({'hash_ed2k': hash_file(filename)}, result.append(Guess({'hash_ed2k': hash_file(filename)},
confidence=1.0)) confidence=1.0))
@ -222,6 +229,7 @@ def guess_file_info(filename, filetype, info=None):
elif infotype.startswith('hash_'): elif infotype.startswith('hash_'):
import hashlib import hashlib
hashname = infotype[5:] hashname = infotype[5:]
try: try:
hasher = getattr(hashlib, hashname)() hasher = getattr(hashlib, hashname)()
@ -259,7 +267,6 @@ def guess_file_info(filename, filetype, info=None):
if 'series' in result and 'country' in result: if 'series' in result and 'country' in result:
result['series'] += ' (%s)' % result['country'].alpha2.upper() result['series'] += ' (%s)' % result['country'].alpha2.upper()
return result return result

View File

@ -1154,7 +1154,7 @@ def _getTempDir():
return os.path.join(tempfile.gettempdir(), "sickrage-%s" % (uid)) return os.path.join(tempfile.gettempdir(), "sickrage-%s" % (uid))
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False): def getURL(url, post_data=None, params=None, headers={}, timeout=30, session=None, json=False):
""" """
Returns a byte-string retrieved from the url provider. Returns a byte-string retrieved from the url provider.
""" """
@ -1164,10 +1164,8 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions'))) session = CacheControl(sess=session, cache=caches.FileCache(os.path.join(cache_dir, 'sessions')))
# request session headers # request session headers
req_headers = {'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'} session.headers.update({'User-Agent': USER_AGENT, 'Accept-Encoding': 'gzip,deflate'})
if headers: session.headers.update(headers)
req_headers.update(headers)
session.headers.update(req_headers)
# request session ssl verify # request session ssl verify
session.verify = False session.verify = False
@ -1176,11 +1174,6 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
session.params = params session.params = params
try: try:
# Remove double-slashes from url
parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
url = urlparse.urlunparse(parsed)
# request session proxies # request session proxies
if sickbeard.PROXY_SETTING: if sickbeard.PROXY_SETTING:
logger.log("Using proxy for url: " + url, logger.DEBUG) logger.log("Using proxy for url: " + url, logger.DEBUG)

View File

@ -43,6 +43,7 @@ __all__ = ['ezrss',
import sickbeard import sickbeard
import generic import generic
from sickbeard import logger from sickbeard import logger
from os import sys from os import sys
from random import shuffle from random import shuffle

View File

@ -27,15 +27,15 @@ from sickbeard import classes, show_name_helpers, helpers
from sickbeard import exceptions, logger from sickbeard import exceptions, logger
from sickbeard.common import * from sickbeard.common import *
from sickbeard import tvcache from sickbeard import tvcache
from lib.dateutil.parser import parse as parseDate
class Animezb(generic.NZBProvider): class Animezb(generic.NZBProvider):
def __init__(self): def __init__(self):
generic.NZBProvider.__init__(self, "Animezb") generic.NZBProvider.__init__(self, "Animezb")
self.urls = {'base_url': 'https://animezb.com/'}
self.url = self.urls['base_url']
self.supportsBacklog = False self.supportsBacklog = False
self.supportsAbsoluteNumbering = True self.supportsAbsoluteNumbering = True
self.anime_only = True self.anime_only = True
@ -44,8 +44,6 @@ class Animezb(generic.NZBProvider):
self.cache = AnimezbCache(self) self.cache = AnimezbCache(self)
self.url = 'https://animezb.com/'
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -38,19 +38,19 @@ from unidecode import unidecode
class BitSoupProvider(generic.TorrentProvider): class BitSoupProvider(generic.TorrentProvider):
urls = {'base_url': 'https://www.bitsoup.me', def __init__(self):
generic.TorrentProvider.__init__(self, "BitSoup")
self.urls = {'base_url': 'https://www.bitsoup.me',
'login': 'https://www.bitsoup.me/takelogin.php', 'login': 'https://www.bitsoup.me/takelogin.php',
'detail': 'https://www.bitsoup.me/details.php?id=%s', 'detail': 'https://www.bitsoup.me/details.php?id=%s',
'search': 'https://www.bitsoup.me/browse.php?search=%s%s', 'search': 'https://www.bitsoup.me/browse.php?search=%s%s',
'download': 'https://bitsoup.me/%s', 'download': 'https://bitsoup.me/%s',
} }
def __init__(self): self.url = self.urls['base_url']
generic.TorrentProvider.__init__(self, "BitSoup")
self.supportsBacklog = True self.supportsBacklog = True
self.enabled = False self.enabled = False
self.username = None self.username = None
self.password = None self.password = None
@ -60,8 +60,6 @@ class BitSoupProvider(generic.TorrentProvider):
self.cache = BitSoupCache(self) self.cache = BitSoupCache(self)
self.url = self.urls['base_url']
self.categories = "&c42=1&c45=1&c49=1&c7=1" self.categories = "&c42=1&c45=1&c49=1&c7=1"
def isEnabled(self): def isEnabled(self):

View File

@ -47,7 +47,10 @@ class BTNProvider(generic.TorrentProvider):
self.cache = BTNCache(self) self.cache = BTNCache(self)
self.url = "http://api.btnapps.net" self.urls = {'base_url': "http://api.btnapps.net"}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -36,17 +36,18 @@ from sickbeard import helpers
class EZRSSProvider(generic.TorrentProvider): class EZRSSProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
self.urls = {'base_url': 'https://www.ezrss.it/'}
self.url = self.urls['base_url']
generic.TorrentProvider.__init__(self, "EZRSS") generic.TorrentProvider.__init__(self, "EZRSS")
self.supportsBacklog = True self.supportsBacklog = True
self.enabled = False self.enabled = False
self.ratio = None self.ratio = None
self.cache = EZRSSCache(self) self.cache = EZRSSCache(self)
self.url = 'https://www.ezrss.it/'
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -44,7 +44,9 @@ class Fanzub(generic.NZBProvider):
self.cache = FanzubCache(self) self.cache = FanzubCache(self)
self.url = 'https://fanzub.com/' self.urls = {'base_url': 'https://fanzub.com/'}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -39,12 +39,6 @@ from sickbeard.helpers import sanitizeSceneName
class FreshOnTVProvider(generic.TorrentProvider): class FreshOnTVProvider(generic.TorrentProvider):
urls = {'base_url': 'http://freshon.tv/',
'login': 'http://freshon.tv/login.php?action=makelogin',
'detail': 'http://freshon.tv/details.php?id=%s',
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
}
def __init__(self): def __init__(self):
@ -64,7 +58,15 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.cache = FreshOnTVCache(self) self.cache = FreshOnTVCache(self)
self.urls = {'base_url': 'http://freshon.tv/',
'login': 'http://freshon.tv/login.php?action=makelogin',
'detail': 'http://freshon.tv/details.php?id=%s',
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
'download': 'http://freshon.tv/download.php?id=%s&type=torrent',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.cookies = None self.cookies = None
def isEnabled(self): def isEnabled(self):

View File

@ -23,6 +23,7 @@ import datetime
import os import os
import re import re
import itertools import itertools
import urllib
import sickbeard import sickbeard
import requests import requests
@ -33,7 +34,6 @@ from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality from sickbeard.common import Quality
from sickbeard import clients
from hachoir_parser import createParser from hachoir_parser import createParser
from base64 import b16encode, b32decode from base64 import b16encode, b32decode
@ -46,6 +46,9 @@ class GenericProvider:
# these need to be set in the subclass # these need to be set in the subclass
self.providerType = None self.providerType = None
self.name = name self.name = name
self.proxy = ProviderProxy()
self.urls = {}
self.url = '' self.url = ''
self.show = None self.show = None
@ -63,11 +66,7 @@ class GenericProvider:
self.session = requests.session() self.session = requests.session()
self.headers = { self.headers = {'User-Agent': USER_AGENT}
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
#otherwise session might be broken and download fail, asking again for authentication
#'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT}
def getID(self): def getID(self):
return GenericProvider.makeID(self.name) return GenericProvider.makeID(self.name)
@ -125,7 +124,8 @@ class GenericProvider:
if not self._doLogin(): if not self._doLogin():
return return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, self.headers.update({'Referer': self.proxy.getProxyURL()})
return helpers.getURL(self.proxy._buildURL(url), post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json) session=self.session, json=json)
def downloadResult(self, result): def downloadResult(self, result):
@ -471,3 +471,44 @@ class TorrentProvider(GenericProvider):
GenericProvider.__init__(self, name) GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT self.providerType = GenericProvider.TORRENT
class ProviderProxy:
def __init__(self):
self.Type = 'GlypeProxy'
self.param = 'browse.php?u='
self.option = '&b=32&f=norefer'
self.enabled = False
self.url = None
self.urls = {
'getprivate.eu (NL)': 'http://getprivate.eu/',
'hideme.nl (NL)': 'http://hideme.nl/',
'proxite.eu (DE)': 'http://proxite.eu/',
'interproxy.net (EU)': 'http://interproxy.net/',
}
def isEnabled(self):
""" Return True if we Choose to call TPB via Proxy """
return self.enabled
def getProxyURL(self):
""" Return the Proxy URL Choosen via Provider Setting """
return str(self.url)
def _buildURL(self, url):
""" Return the Proxyfied URL of the page """
if self.isEnabled():
url = self.getProxyURL() + self.param + urllib.quote_plus(url) + self.option
logger.log(u"Proxified URL: " + url, logger.DEBUG)
return url
def _buildRE(self, regx):
""" Return the Proxyfied RE string """
if self.isEnabled():
regx = re.sub('//1', self.option, regx).replace('&', '&')
logger.log(u"Proxified REGEX: " + regx, logger.DEBUG)
else:
regx = re.sub('//1', '', regx)
return regx

View File

@ -13,23 +13,16 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import time
import datetime import datetime
import urllib import urllib
import urlparse
import sys
import generic
import sickbeard
from lib import requests import sickbeard
from lib.requests import exceptions import generic
from sickbeard import classes from sickbeard import classes
from sickbeard import logger, tvcache, exceptions from sickbeard import logger, tvcache
from sickbeard import helpers from sickbeard.exceptions import AuthException
from sickbeard import clients
from sickbeard.common import cpu_presets
from sickbeard.exceptions import ex, AuthException
try: try:
import json import json
except ImportError: except ImportError:
@ -50,10 +43,13 @@ class HDBitsProvider(generic.TorrentProvider):
self.cache = HDBitsCache(self) self.cache = HDBitsCache(self)
self.url = 'https://hdbits.org' self.urls = {'base_url': 'https://hdbits.org',
self.search_url = 'https://hdbits.org/api/torrents' 'search': 'https://hdbits.org/api/torrents',
self.rss_url = 'https://hdbits.org/api/torrents' 'rss': 'https://hdbits.org/api/torrents',
self.download_url = 'https://hdbits.org/download.php?' 'download': 'https://hdbits.org/download.php?'
}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled
@ -91,7 +87,7 @@ class HDBitsProvider(generic.TorrentProvider):
title = u'' + title title = u'' + title
title = title.replace(' ', '.') title = title.replace(' ', '.')
url = self.download_url + urllib.urlencode({'id': item['id'], 'passkey': self.passkey}) url = self.urls['download'] + urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
return (title, url) return (title, url)
@ -100,9 +96,10 @@ class HDBitsProvider(generic.TorrentProvider):
self._checkAuth() self._checkAuth()
logger.log(u"Search url: " + self.search_url + " search_params: " + search_params, logger.DEBUG) logger.log(u"Search url: " + self.urls['search'] + " search_params: " + search_params,
logger.DEBUG)
parsedJSON = self.getURL(self.search_url, post_data=search_params, json=True) parsedJSON = self.getURL(self.urls['search'], post_data=search_params, json=True)
if not parsedJSON: if not parsedJSON:
return [] return []
@ -208,8 +205,7 @@ class HDBitsCache(tvcache.TVCache):
results = [] results = []
try: try:
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), parsedJSON = self.provider.getURL(self.provider.urls['rss'], post_data=self.provider._make_post_data_JSON(), json=True)
json=True)
if self.provider._checkAuthFromData(parsedJSON): if self.provider._checkAuthFromData(parsedJSON):
results = parsedJSON['data'] results = parsedJSON['data']
@ -218,4 +214,5 @@ class HDBitsCache(tvcache.TVCache):
return {'entries': results} return {'entries': results}
provider = HDBitsProvider() provider = HDBitsProvider()

View File

@ -40,14 +40,6 @@ from sickbeard.helpers import sanitizeSceneName
class HDTorrentsProvider(generic.TorrentProvider): class HDTorrentsProvider(generic.TorrentProvider):
urls = {'base_url': 'https://hdts.ru/index.php',
'login': 'https://hdts.ru/login.php',
'detail': 'https://www.hdts.ru/details.php?id=%s',
'search': 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
'download': 'https://www.sceneaccess.eu/%s',
'home': 'https://www.hdts.ru/%s'
}
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, "HDTorrents") generic.TorrentProvider.__init__(self, "HDTorrents")
@ -63,10 +55,18 @@ class HDTorrentsProvider(generic.TorrentProvider):
self.minseed = None self.minseed = None
self.minleech = None self.minleech = None
self.cache = HDTorrentsCache(self) self.urls = {'base_url': 'https://hdts.ru/index.php',
'login': 'https://hdts.ru/login.php',
'detail': 'https://www.hdts.ru/details.php?id=%s',
'search': 'https://hdts.ru/torrents.php?search=%s&active=1&options=0%s',
'download': 'https://www.sceneaccess.eu/%s',
'home': 'https://www.hdts.ru/%s'
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.cache = HDTorrentsCache(self)
self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38" self.categories = "&category[]=59&category[]=60&category[]=30&category[]=38"
self.cookies = None self.cookies = None

View File

@ -40,11 +40,6 @@ from sickbeard.show_name_helpers import allPossibleShowNames
class IPTorrentsProvider(generic.TorrentProvider): class IPTorrentsProvider(generic.TorrentProvider):
urls = {'base_url': 'https://www.iptorrents.com',
'login': 'https://www.iptorrents.com/torrents/',
'search': 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
}
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, "IPTorrents") generic.TorrentProvider.__init__(self, "IPTorrents")
@ -59,6 +54,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
self.cache = IPTorrentsCache(self) self.cache = IPTorrentsCache(self)
self.urls = {'base_url': 'https://www.iptorrents.com',
'login': 'https://www.iptorrents.com/torrents/',
'search': 'https://www.iptorrents.com/torrents/?%s%s&q=%s&qf=ti',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categorie = 'l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1' self.categorie = 'l73=1&l78=1&l66=1&l65=1&l79=1&l5=1&l4=1'

View File

@ -19,16 +19,15 @@
from __future__ import with_statement from __future__ import with_statement
import sys
import os import os
import traceback import traceback
import urllib import urllib
import re import re
import datetime import datetime
import urlparse
import sickbeard import sickbeard
import generic import generic
from sickbeard.common import Quality from sickbeard.common import Quality
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard import logger from sickbeard import logger
@ -37,12 +36,7 @@ from sickbeard import helpers
from sickbeard import db from sickbeard import db
from sickbeard import classes from sickbeard import classes
from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName
from sickbeard.exceptions import ex
from sickbeard import encodingKludge as ek
from sickbeard import clients
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from lib import requests
from lib.requests import exceptions
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -61,8 +55,9 @@ class KATProvider(generic.TorrentProvider):
self.cache = KATCache(self) self.cache = KATCache(self)
self.urls = ['http://kickass.so/', 'http://katproxy.com/', 'http://www.kickass.to/'] self.urls = {'base_url': 'http://kickass.so/'}
self.url = self.urls[0]
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled
@ -229,20 +224,15 @@ class KATProvider(generic.TorrentProvider):
if isinstance(search_string, unicode): if isinstance(search_string, unicode):
search_string = unidecode(search_string) search_string = unidecode(search_string)
entries = []
for url in self.urls:
if mode != 'RSS': if mode != 'RSS':
searchURL = url + 'usearch/%s/?field=seeders&sorder=desc&rss=1' % urllib.quote(search_string) searchURL = self.url + 'usearch/%s/?field=seeders&sorder=desc&rss=1' % urllib.quote_plus(search_string)
else: else:
searchURL = url + 'tv/?field=time_add&sorder=desc&rss=1' searchURL = self.url + 'tv/?field=time_add&sorder=desc&rss=1'
logger.log(u"Search string: " + searchURL, logger.DEBUG) logger.log(u"Search string: " + searchURL, logger.DEBUG)
entries = self.cache.getRSSFeed(searchURL, items=['entries', 'feed'])['entries']
if entries:
break
try: try:
entries = self.cache.getRSSFeed(searchURL, items=['entries', 'feed'])['entries']
for item in entries or []: for item in entries or []:
try: try:
link = item['link'] link = item['link']

View File

@ -50,7 +50,9 @@ class NewznabProvider(generic.NZBProvider):
self.cache = NewznabCache(self) self.cache = NewznabCache(self)
self.url = url self.urls = {'base_url': url}
self.url = self.urls['base_url']
self.key = key self.key = key
@ -91,18 +93,7 @@ class NewznabProvider(generic.NZBProvider):
return self.enabled return self.enabled
def _getURL(self, url, post_data=None, params=None, timeout=30, json=False): def _getURL(self, url, post_data=None, params=None, timeout=30, json=False):
""" return self.getURL(url, post_data=post_data, params=params, timeout=timeout, json=json)
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
Not really changed much from the superclass, can be used in future.
"""
# check for auth
if not self._doLogin():
return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json)
def get_newznab_categories(self): def get_newznab_categories(self):
""" """

View File

@ -42,13 +42,6 @@ from sickbeard.helpers import sanitizeSceneName
class NextGenProvider(generic.TorrentProvider): class NextGenProvider(generic.TorrentProvider):
urls = {'base_url': 'https://nxtgn.org/',
'search': 'https://nxtgn.org/browse.php?search=%s&cat=0&incldead=0&modes=%s',
'login_page': 'https://nxtgn.org/login.php',
'detail': 'https://nxtgn.org/details.php?id=%s',
'download': 'https://nxtgn.org/download.php?id=%s',
'takelogin': 'https://nxtgn.org/takelogin.php?csrf=',
}
def __init__(self): def __init__(self):
@ -63,6 +56,14 @@ class NextGenProvider(generic.TorrentProvider):
self.cache = NextGenCache(self) self.cache = NextGenCache(self)
self.urls = {'base_url': 'https://nxtgn.org/',
'search': 'https://nxtgn.org/browse.php?search=%s&cat=0&incldead=0&modes=%s',
'login_page': 'https://nxtgn.org/login.php',
'detail': 'https://nxtgn.org/details.php?id=%s',
'download': 'https://nxtgn.org/download.php?id=%s',
'takelogin': 'https://nxtgn.org/takelogin.php?csrf=',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categories = '&c7=1&c24=1&c17=1&c22=1&c42=1&c46=1&c26=1&c28=1&c43=1&c4=1&c31=1&c45=1&c33=1' self.categories = '&c7=1&c24=1&c17=1&c22=1&c42=1&c46=1&c26=1&c28=1&c43=1&c4=1&c31=1&c45=1&c33=1'

View File

@ -42,7 +42,9 @@ class NyaaProvider(generic.TorrentProvider):
self.cache = NyaaCache(self) self.cache = NyaaCache(self)
self.url = 'http://www.nyaa.se/' self.urls = {'base_url': 'http://www.nyaa.se/'}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -17,8 +17,9 @@
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib import urllib
import generic
import sickbeard import sickbeard
import generic
from sickbeard import tvcache from sickbeard import tvcache
from sickbeard import helpers from sickbeard import helpers
@ -46,7 +47,10 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
self.username = None self.username = None
self.api_key = None self.api_key = None
self.cache = OmgwtfnzbsCache(self) self.cache = OmgwtfnzbsCache(self)
self.url = 'https://omgwtfnzbs.org/'
self.urls = {'base_url': 'https://omgwtfnzbs.org/'}
self.url = self.urls['base_url']
self.supportsBacklog = True self.supportsBacklog = True
def isEnabled(self): def isEnabled(self):

View File

@ -18,7 +18,6 @@
import os import os
import re import re
import urlparse
import sickbeard import sickbeard
import generic import generic
@ -27,11 +26,9 @@ from sickbeard import helpers
from sickbeard import encodingKludge as ek from sickbeard import encodingKludge as ek
from sickbeard import logger from sickbeard import logger
from sickbeard import tvcache from sickbeard import tvcache
from sickbeard import clients
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
from lib import requests from lib import requests
from lib.requests import exceptions
from lib.bencode import bdecode from lib.bencode import bdecode
@ -40,8 +37,11 @@ class TorrentRssProvider(generic.TorrentProvider):
enable_backlog=False): enable_backlog=False):
generic.TorrentProvider.__init__(self, name) generic.TorrentProvider.__init__(self, name)
self.cache = TorrentRssCache(self) self.cache = TorrentRssCache(self)
self.url = re.sub('\/$', '', url)
self.url = url self.urls = {'base_url': re.sub('\/$', '', url)}
self.url = self.urls['base_url']
self.enabled = True self.enabled = True
self.ratio = None self.ratio = None
self.supportsBacklog = False self.supportsBacklog = False
@ -162,8 +162,7 @@ class TorrentRssCache(tvcache.TVCache):
def _getRSSData(self): def _getRSSData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None
if self.provider.cookies: if self.provider.cookies:
request_headers = {'Cookie': self.provider.cookies} self.provider.headers.update({'Cookie': self.provider.cookies})
return self.getRSSFeed(self.provider.url, request_headers=request_headers, items=['entries', 'feed']) return self.getRSSFeed(self.provider.url, items=['entries', 'feed'])

View File

@ -40,15 +40,6 @@ from sickbeard.helpers import sanitizeSceneName
class SCCProvider(generic.TorrentProvider): class SCCProvider(generic.TorrentProvider):
urls = {'base_url': 'https://sceneaccess.eu',
'login': 'https://sceneaccess.eu/login',
'detail': 'https://www.sceneaccess.eu/details?id=%s',
'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
'nonscene': 'https://sceneaccess.eu/nonscene?search=%s&method=1&c44=44&c45=44',
'foreign': 'https://sceneaccess.eu/foreign?search=%s&method=1&c34=34&c33=33',
'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
'download': 'https://www.sceneaccess.eu/%s',
}
def __init__(self): def __init__(self):
@ -65,6 +56,16 @@ class SCCProvider(generic.TorrentProvider):
self.cache = SCCCache(self) self.cache = SCCCache(self)
self.urls = {'base_url': 'https://sceneaccess.eu',
'login': 'https://sceneaccess.eu/login',
'detail': 'https://www.sceneaccess.eu/details?id=%s',
'search': 'https://sceneaccess.eu/browse?search=%s&method=1&%s',
'nonscene': 'https://sceneaccess.eu/nonscene?search=%s&method=1&c44=44&c45=44',
'foreign': 'https://sceneaccess.eu/foreign?search=%s&method=1&c34=34&c33=33',
'archive': 'https://sceneaccess.eu/archive?search=%s&method=1&c26=26',
'download': 'https://www.sceneaccess.eu/%s',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categories = "c27=27&c17=17&c11=11" self.categories = "c27=27&c17=17&c11=11"

View File

@ -39,12 +39,6 @@ from sickbeard.helpers import sanitizeSceneName
class SpeedCDProvider(generic.TorrentProvider): class SpeedCDProvider(generic.TorrentProvider):
urls = {'base_url': 'http://speed.cd/',
'login': 'http://speed.cd/take_login.php',
'detail': 'http://speed.cd/t/%s',
'search': 'http://speed.cd/V3/API/API.php',
'download': 'http://speed.cd/download.php?torrent=%s',
}
def __init__(self): def __init__(self):
@ -62,6 +56,13 @@ class SpeedCDProvider(generic.TorrentProvider):
self.cache = SpeedCDCache(self) self.cache = SpeedCDCache(self)
self.urls = {'base_url': 'http://speed.cd/',
'login': 'http://speed.cd/take_login.php',
'detail': 'http://speed.cd/t/%s',
'search': 'http://speed.cd/V3/API/API.php',
'download': 'http://speed.cd/download.php?torrent=%s',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c49': 1}, 'RSS': {'c14': 1, 'c2': 1, 'c49': 1}} self.categories = {'Season': {'c14': 1}, 'Episode': {'c2': 1, 'c49': 1}, 'RSS': {'c14': 1, 'c2': 1, 'c49': 1}}

View File

@ -40,11 +40,6 @@ from sickbeard.exceptions import ex
class T411Provider(generic.TorrentProvider): class T411Provider(generic.TorrentProvider):
urls = {'base_url': 'http://www.t411.me/',
'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=%s&search=%s&submit=Recherche',
'login_page': 'http://www.t411.me/users/login/',
'download': 'http://www.t411.me/torrents/download/?id=%s',
}
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, "T411") generic.TorrentProvider.__init__(self, "T411")
@ -56,6 +51,13 @@ class T411Provider(generic.TorrentProvider):
self.ratio = None self.ratio = None
self.cache = T411Cache(self) self.cache = T411Cache(self)
self.urls = {'base_url': 'http://www.t411.me/',
'search': 'http://www.t411.me/torrents/search/?name=%s&cat=210&subcat=%s&search=%s&submit=Recherche',
'login_page': 'http://www.t411.me/users/login/',
'download': 'http://www.t411.me/torrents/download/?id=%s',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.subcategories = [637, 455, 433] self.subcategories = [637, 455, 433]

View File

@ -59,9 +59,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
self.cache = ThePirateBayCache(self) self.cache = ThePirateBayCache(self)
self.proxy = ThePirateBayWebproxy() self.urls = {'base_url': 'https://oldpiratebay.org/'}
self.url = 'https://oldpiratebay.org/' self.url = self.urls['base_url']
self.searchurl = self.url + 'search.php?q=%s&Torrent_sort=seeders.desc' # order by seed self.searchurl = self.url + 'search.php?q=%s&Torrent_sort=seeders.desc' # order by seed
@ -114,11 +114,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
fileName = None fileName = None
fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) fileURL = self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)
if self.proxy and self.proxy.isEnabled():
self.headers.update({'referer': self.proxy.getProxyURL()})
data = self.getURL(fileURL) data = self.getURL(fileURL)
if not data: if not data:
return None return None
@ -225,18 +221,15 @@ class ThePirateBayProvider(generic.TorrentProvider):
results = [] results = []
items = {'Season': [], 'Episode': [], 'RSS': []} items = {'Season': [], 'Episode': [], 'RSS': []}
if self.proxy and self.proxy.isEnabled():
self.headers.update({'referer': self.proxy.getProxyURL()})
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
if isinstance(search_string, unicode): if isinstance(search_string, unicode):
search_string = unidecode(search_string) search_string = unidecode(search_string)
if mode != 'RSS': if mode != 'RSS':
searchURL = self.proxy._buildURL(self.searchurl % (urllib.quote(search_string))) searchURL = self.searchurl % (urllib.quote(search_string))
else: else:
searchURL = self.proxy._buildURL(self.url + 'tv/latest/') searchURL = self.url + 'tv/latest/'
logger.log(u"Search string: " + searchURL, logger.DEBUG) logger.log(u"Search string: " + searchURL, logger.DEBUG)
@ -245,9 +238,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
continue continue
re_title_url = self.proxy._buildRE(self.re_title_url) re_title_url = self.proxy._buildRE(self.re_title_url)
#Extracting torrent information from data returned by searchURL
match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data)) match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
for torrent in match: for torrent in match:
title = torrent.group('title').replace('_', title = torrent.group('title').replace('_',
@ -344,49 +336,4 @@ class ThePirateBayCache(tvcache.TVCache):
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
return {'entries': self.provider._doSearch(search_params)} return {'entries': self.provider._doSearch(search_params)}
class ThePirateBayWebproxy:
def __init__(self):
self.Type = 'GlypeProxy'
self.param = 'browse.php?u='
self.option = '&b=32'
self.enabled = False
self.url = None
self.urls = {
'Getprivate.eu (NL)': 'http://getprivate.eu/',
'15bb51.info (US)': 'http://15bb51.info/',
'Hideme.nl (NL)': 'http://hideme.nl/',
'Proxite.eu (DE)': 'http://proxite.eu/',
'Webproxy.cz (CZ)': 'http://webproxy.cz/',
'2me2u (CZ)': 'http://2me2u.me/',
'Interproxy.net (EU)': 'http://interproxy.net/',
'Unblockersurf.info (DK)': 'http://unblockersurf.info/',
'Hiload.org (NL)': 'http://hiload.org/',
}
def isEnabled(self):
""" Return True if we Choose to call TPB via Proxy """
return self.enabled
def getProxyURL(self):
""" Return the Proxy URL Choosen via Provider Setting """
return str(self.url)
def _buildURL(self, url):
""" Return the Proxyfied URL of the page """
if self.isEnabled():
url = self.getProxyURL() + self.param + url + self.option
return url
def _buildRE(self, regx):
""" Return the Proxyfied RE string """
if self.isEnabled():
regx = re.sub('//1', self.option, regx).replace('&', '&amp;')
else:
regx = re.sub('//1', '', regx)
return regx
provider = ThePirateBayProvider() provider = ThePirateBayProvider()

View File

@ -44,7 +44,8 @@ class TokyoToshokanProvider(generic.TorrentProvider):
self.cache = TokyoToshokanCache(self) self.cache = TokyoToshokanCache(self)
self.url = 'http://tokyotosho.info/' self.urls = {'base_url': 'http://tokyotosho.info/'}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -39,12 +39,6 @@ from sickbeard.helpers import sanitizeSceneName
class TorrentBytesProvider(generic.TorrentProvider): class TorrentBytesProvider(generic.TorrentProvider):
urls = {'base_url': 'https://www.torrentbytes.net',
'login': 'https://www.torrentbytes.net/takelogin.php',
'detail': 'https://www.torrentbytes.net/details.php?id=%s',
'search': 'https://www.torrentbytes.net/browse.php?search=%s%s',
'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s',
}
def __init__(self): def __init__(self):
@ -61,6 +55,13 @@ class TorrentBytesProvider(generic.TorrentProvider):
self.cache = TorrentBytesCache(self) self.cache = TorrentBytesCache(self)
self.urls = {'base_url': 'https://www.torrentbytes.net',
'login': 'https://www.torrentbytes.net/takelogin.php',
'detail': 'https://www.torrentbytes.net/details.php?id=%s',
'search': 'https://www.torrentbytes.net/browse.php?search=%s%s',
'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categories = "&c41=1&c33=1&c38=1&c32=1&c37=1" self.categories = "&c41=1&c33=1&c38=1&c32=1&c37=1"

View File

@ -35,11 +35,6 @@ from sickbeard.helpers import sanitizeSceneName
class TorrentDayProvider(generic.TorrentProvider): class TorrentDayProvider(generic.TorrentProvider):
urls = {'base_url': 'http://www.td.af',
'login': 'http://www.td.af/torrents/',
'search': 'http://www.td.af/V3/API/API.php',
'download': 'http://www.td.af/download.php/%s/%s'
}
def __init__(self): def __init__(self):
@ -59,6 +54,12 @@ class TorrentDayProvider(generic.TorrentProvider):
self.cache = TorrentDayCache(self) self.cache = TorrentDayCache(self)
self.urls = {'base_url': 'http://www.td.af',
'login': 'http://www.td.af/torrents/',
'search': 'http://www.td.af/V3/API/API.php',
'download': 'http://www.td.af/download.php/%s/%s'
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.cookies = None self.cookies = None

View File

@ -20,8 +20,10 @@ import re
import traceback import traceback
import datetime import datetime
import urlparse import urlparse
import sickbeard import sickbeard
import generic import generic
from sickbeard.common import Quality, cpu_presets from sickbeard.common import Quality, cpu_presets
from sickbeard import logger from sickbeard import logger
from sickbeard import tvcache from sickbeard import tvcache
@ -40,13 +42,6 @@ from sickbeard.helpers import sanitizeSceneName
class TorrentLeechProvider(generic.TorrentProvider): class TorrentLeechProvider(generic.TorrentProvider):
urls = {'base_url': 'https://torrentleech.org/',
'login': 'https://torrentleech.org/user/account/login/',
'detail': 'https://torrentleech.org/torrent/%s',
'search': 'https://torrentleech.org/torrents/browse/index/query/%s/categories/%s',
'download': 'https://torrentleech.org%s',
'index': 'https://torrentleech.org/torrents/browse/index/categories/%s',
}
def __init__(self): def __init__(self):
@ -63,6 +58,14 @@ class TorrentLeechProvider(generic.TorrentProvider):
self.cache = TorrentLeechCache(self) self.cache = TorrentLeechCache(self)
self.urls = {'base_url': 'https://torrentleech.org/',
'login': 'https://torrentleech.org/user/account/login/',
'detail': 'https://torrentleech.org/torrent/%s',
'search': 'https://torrentleech.org/torrents/browse/index/query/%s/categories/%s',
'download': 'https://torrentleech.org%s',
'index': 'https://torrentleech.org/torrents/browse/index/categories/%s',
}
self.url = self.urls['base_url'] self.url = self.urls['base_url']
self.categories = "2,26,27,32" self.categories = "2,26,27,32"

View File

@ -45,7 +45,8 @@ class TvTorrentsProvider(generic.TorrentProvider):
self.cache = TvTorrentsCache(self) self.cache = TvTorrentsCache(self)
self.url = 'https://www.tvtorrents.com/' self.urls = {'base_url': 'https://www.tvtorrents.com/'}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -30,7 +30,8 @@ class WombleProvider(generic.NZBProvider):
generic.NZBProvider.__init__(self, "Womble's Index") generic.NZBProvider.__init__(self, "Womble's Index")
self.enabled = False self.enabled = False
self.cache = WombleCache(self) self.cache = WombleCache(self)
self.url = 'https://newshost.co.za/' self.urls = {'base_url': 'https://newshost.co.za/'}
self.url = self.urls['base_url']
def isEnabled(self): def isEnabled(self):
return self.enabled return self.enabled

View File

@ -34,7 +34,7 @@ class RSSFeeds:
finally: finally:
self.rssDB.close() self.rssDB.close()
def getFeed(self, url, post_data=None, request_headers=None, items=[]): def getFeed(self, url, post_data=None, request_headers=None, referrer=None, items=[]):
parsed = list(urlparse.urlparse(url)) parsed = list(urlparse.urlparse(url))
parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one parsed[2] = re.sub("/{2,}", "/", parsed[2]) # replace two or more / with one
@ -45,7 +45,7 @@ class RSSFeeds:
try: try:
fc = Cache(self.rssDB) fc = Cache(self.rssDB)
resp = fc.fetch(url, False, False, request_headers) resp = fc.fetch(url, False, False, request_headers=request_headers, referrer=referrer)
for item in items: for item in items:
try: try:

View File

@ -79,7 +79,6 @@ class CacheDBConnection(db.DBConnection):
class TVCache(): class TVCache():
def __init__(self, provider): def __init__(self, provider):
self.provider = provider self.provider = provider
self.providerID = self.provider.getID() self.providerID = self.provider.getID()
self.providerDB = None self.providerDB = None
@ -139,8 +138,9 @@ class TVCache():
logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR) logger.log(u"Error while searching " + self.provider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG) logger.log(traceback.format_exc(), logger.DEBUG)
def getRSSFeed(self, url, post_data=None, request_headers=None, items=[]): def getRSSFeed(self, url, post_data=None, items=[]):
return RSSFeeds(self.providerID).getFeed(url, post_data, request_headers, items) referrer = self.provider.proxy.getProxyURL()
return RSSFeeds(self.providerID).getFeed(self.provider.proxy._buildURL(url), post_data, self.provider.headers, referrer, items)
def _translateTitle(self, title): def _translateTitle(self, title):
return u'' + title.replace(' ', '.') return u'' + title.replace(' ', '.')

View File

@ -77,6 +77,9 @@ class ApiHandler(RequestHandler):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ApiHandler, self).__init__(*args, **kwargs) super(ApiHandler, self).__init__(*args, **kwargs)
def set_default_headers(self):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def get(self, *args, **kwargs): def get(self, *args, **kwargs):
kwargs = self.request.arguments kwargs = self.request.arguments
for arg, value in kwargs.items(): for arg, value in kwargs.items():
@ -1635,7 +1638,6 @@ class CMD_SickBeardPing(ApiCall):
def run(self): def run(self):
""" check to see if sickrage is running """ """ check to see if sickrage is running """
self.set_header('Cache-Control', "max-age=0,no-cache,no-store")
if sickbeard.started: if sickbeard.started:
return _responds(RESULT_SUCCESS, {"pid": sickbeard.PID}, "Pong") return _responds(RESULT_SUCCESS, {"pid": sickbeard.PID}, "Pong")
else: else: