1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-08-13 16:53:54 -04:00

Merge pull request #693 from echel0n/dev

Merge pull requests and changes from DEV branch into MASTER
This commit is contained in:
echel0n 2014-08-07 22:22:13 -07:00
commit 1c4cc6851f
34 changed files with 154 additions and 717 deletions

View File

@ -101,7 +101,7 @@
#end if #end if
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a> <a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
#if $xem_numbering or $xem_absolute_numbering: #if $xem_numbering or $xem_absolute_numbering:
<a href="http://thexem.de/search?q=$show.name" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thexem.de/search?q-$show.name"><img alt="[xem]" height="16" width="16" src="$sbRoot/images/xem.png" style="margin-top: -1px;"/> <a href="http://thexem.de/search?q=$show.name" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://thexem.de/search?q-$show.name"><img alt="[xem]" height="16" width="16" src="$sbRoot/images/xem.png" style="margin-top: -1px;"/></a>
#end if #end if
</span> </span>
</span> </span>

View File

@ -136,6 +136,15 @@ def replaceExtension(filename, newExt):
return sepFile[0] + "." + newExt return sepFile[0] + "." + newExt
def isBtsyncFile(filename):
sepFile = filename.rpartition(".")
if sepFile[2].lower() == '!sync':
return True
else:
return False
def isMediaFile(filename): def isMediaFile(filename):
# ignore samples # ignore samples
if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I): if re.search('(^|[\W_])(sample\d*)[\W_]', filename, re.I):

View File

@ -167,8 +167,9 @@ normal_regexes = [
# 01 - Ep Name # 01 - Ep Name
''' '''
^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator ^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator
(?P<ep_num>\d{1,2}) # 02 (?P<ep_num>\d{1,3}) # 02
(?:-(?P<extra_ep_num>\d{1,2}))* # 02 (?:-(?P<extra_ep_num>\d{1,3}))* # -03-04-05 etc
\s?of?\s?\d{1,3}? # of joiner (with or without spaces) and series total ep
[. _-]+((?P<extra_info>.+?) # Source_Quality_Etc- [. _-]+((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group ((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$ # Group -(?P<release_group>[^- ]+([. _-]\[.*\])?))?)?$ # Group

View File

@ -143,7 +143,7 @@ class XBMCNotifier:
command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % ( command = '{"jsonrpc":"2.0","method":"GUI.ShowNotification","params":{"title":"%s","message":"%s", "image": "%s"},"id":1}' % (
title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url) title.encode("utf-8"), message.encode("utf-8"), self.sb_logo_url)
notifyResult = self._send_to_xbmc_json(command, curHost, username, password) notifyResult = self._send_to_xbmc_json(command, curHost, username, password)
if getattr(notifyResult, 'result', None): if notifyResult.get('result'):
result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING) result += curHost + ':' + notifyResult["result"].decode(sickbeard.SYS_ENCODING)
else: else:
if sickbeard.XBMC_ALWAYS_ON or force: if sickbeard.XBMC_ALWAYS_ON or force:

View File

@ -138,6 +138,13 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
path, dirs, files = get_path_dir_files(dirName, nzbName, type) path, dirs, files = get_path_dir_files(dirName, nzbName, type)
btsyncFiles = filter(helpers.isBtsyncFile, files)
# Don't post process if files are still being synced from btsync
if btsyncFiles:
returnStr += logHelper(u"Found .!sync files, skipping post processing", logger.ERROR)
return returnStr
returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG) returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG) returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)
@ -179,6 +186,13 @@ def processDir(dirName, nzbName=None, process_method=None, force=False, is_prior
for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False): for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):
btsyncFiles = filter(helpers.isBtsyncFile, fileList)
# Don't post process if files are still being synced from btsync
if btsyncFiles:
returnStr += logHelper(u"Found .!sync files, skipping post processing", logger.ERROR)
return returnStr
rarFiles = filter(helpers.isRarFile, fileList) rarFiles = filter(helpers.isRarFile, fileList)
rarContent = unRAR(processPath, rarFiles, force) rarContent = unRAR(processPath, rarFiles, force)
fileList = set(fileList + rarContent) fileList = set(fileList + rarContent)

View File

@ -136,13 +136,10 @@ class ProperFinder():
curProper.indexer = parse_result.show.indexer curProper.indexer = parse_result.show.indexer
# populate our Proper instance # populate our Proper instance
if parse_result.is_anime:
logger.log(u"I am sorry '" + curProper.name + "' seams to be an anime proper seach is not yet suported",
logger.DEBUG)
continue
else:
curProper.season = parse_result.season_number if parse_result.season_number != None else 1 curProper.season = parse_result.season_number if parse_result.season_number != None else 1
curProper.episode = parse_result.episode_numbers[0] curProper.episode = parse_result.episode_numbers[0]
# only get anime proper if it has release group and version
if parse_result.is_anime: if parse_result.is_anime:
if parse_result.release_group and parse_result.version: if parse_result.release_group and parse_result.version:
curProper.release_group = parse_result.release_group curProper.release_group = parse_result.release_group
@ -153,7 +150,7 @@ class ProperFinder():
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime) curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
if not show_name_helpers.filterBadReleases(curProper.name): if not show_name_helpers.filterBadReleases(curProper.name):
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, igoring it", logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it",
logger.DEBUG) logger.DEBUG)
continue continue

View File

@ -29,6 +29,7 @@ from sickbeard.common import *
from sickbeard import tvcache from sickbeard import tvcache
from lib.dateutil.parser import parse as parseDate from lib.dateutil.parser import parse as parseDate
class Animezb(generic.NZBProvider): class Animezb(generic.NZBProvider):
def __init__(self): def __init__(self):
@ -60,7 +61,8 @@ class Animezb(generic.NZBProvider):
def _get_episode_search_strings(self, ep_obj, add_string=''): def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = [] search_string = []
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = '+'.join([helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)]) ep_string = '+'.join(
[helpers.sanitizeSceneName(show_name).replace('.', '+'), str(ep_obj.scene_absolute_number).zfill(2)])
search_string.append(ep_string) search_string.append(ep_string)
return search_string return search_string
@ -106,8 +108,7 @@ class Animezb(generic.NZBProvider):
results = [] results = []
for i in [2, 3, 4]: # we will look for a version 2, 3 and 4 for item in self._doSearch("v2 OR v3 OR v4 OR v5"):
for item in self._doSearch("v" + str(i)):
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
@ -125,6 +126,7 @@ class Animezb(generic.NZBProvider):
return results return results
class AnimezbCache(tvcache.TVCache): class AnimezbCache(tvcache.TVCache):
def __init__(self, provider): def __init__(self, provider):
@ -132,12 +134,12 @@ class AnimezbCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
# only poll Animezb every 20 minutes max # only poll Animezb every 20 minutes max
# we get 100 post each call !
self.minTime = 20 self.minTime = 20
def _getRSSData(self): def _getDailyData(self):
params = {"cat": "anime".encode('utf-8'), params = {
"cat": "anime".encode('utf-8'),
"max": "100".encode('utf-8') "max": "100".encode('utf-8')
} }
@ -145,9 +147,7 @@ class AnimezbCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url).entries
def _checkItemAuth(self, title, url):
return True
provider = Animezb() provider = Animezb()

View File

@ -268,46 +268,9 @@ class BitSoupCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max # only poll TorrentBytes every 20 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = BitSoupProvider() provider = BitSoupProvider()

View File

@ -297,44 +297,7 @@ class BTNCache(tvcache.TVCache):
# At least 15 minutes between queries # At least 15 minutes between queries
self.minTime = 15 self.minTime = 15
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
if self._checkAuth(None):
data = self._getRSSData()
# As long as we got something from the provider we count it as an update
if data:
self.setLastUpdate()
else:
return []
if self._checkAuth(data):
# By now we know we've got data and no auth errors, all we need to do is put it in the database
cl = []
for item in data:
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
else:
raise AuthException(
"Your authentication info for " + self.provider.name + " is incorrect, check your config")
else:
return []
def _getRSSData(self):
# Get the torrents uploaded since last check. # Get the torrents uploaded since last check.
seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple()))
@ -352,17 +315,6 @@ class BTNCache(tvcache.TVCache):
return self.provider._doSearch(search_params=None, age=seconds_since_last_update) return self.provider._doSearch(search_params=None, age=seconds_since_last_update)
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if title and url:
logger.log(u"Adding item to results: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
else:
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
logger.ERROR)
return None
def _checkAuth(self, data): def _checkAuth(self, data):
return self.provider._checkAuthFromData(data) return self.provider._checkAuthFromData(data)

View File

@ -123,7 +123,7 @@ class DTTCache(tvcache.TVCache):
# only poll DTT every 30 minutes max # only poll DTT every 30 minutes max
self.minTime = 30 self.minTime = 30
def _getRSSData(self): def _getDailyData(self):
params = {"items": "all"} params = {"items": "all"}
@ -135,12 +135,7 @@ class DTTCache(tvcache.TVCache):
url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params) url = self.provider.url + 'rss/allshows?' + urllib.urlencode(params)
logger.log(u"DTT cache update URL: " + url, logger.DEBUG) logger.log(u"DTT cache update URL: " + url, logger.DEBUG)
return self.getRSSFeed(url) return self.getRSSFeed(url).entries
def _parseItem(self, item):
title, url = self.provider._get_title_and_url(item)
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
provider = DTTProvider() provider = DTTProvider()

View File

@ -174,27 +174,12 @@ class EZRSSCache(tvcache.TVCache):
# only poll EZRSS every 15 minutes max # only poll EZRSS every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getDailyData(self):
rss_url = self.provider.url + 'feed/' rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url).entries
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if title and url:
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
url = self._translateLinkURL(url)
return self._addCacheEntry(title, url)
else:
logger.log(
u"The XML returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
logger.ERROR)
return None
provider = EZRSSProvider() provider = EZRSSProvider()

View File

@ -29,6 +29,7 @@ from sickbeard.common import *
from sickbeard import tvcache from sickbeard import tvcache
from lib.dateutil.parser import parse as parseDate from lib.dateutil.parser import parse as parseDate
class Fanzub(generic.NZBProvider): class Fanzub(generic.NZBProvider):
def __init__(self): def __init__(self):
@ -51,9 +52,6 @@ class Fanzub(generic.NZBProvider):
def imageName(self): def imageName(self):
return 'fanzub.gif' return 'fanzub.gif'
def _checkAuth(self):
return True
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)] return [x for x in show_name_helpers.makeSceneSeasonSearchString(self.show, ep_obj)]
@ -102,8 +100,7 @@ class Fanzub(generic.NZBProvider):
results = [] results = []
for i in [2, 3, 4]: # we will look for a version 2, 3 and 4 for item in self._doSearch("v2|v3|v4|v5"):
for item in self._doSearch("v" + str(i)):
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
@ -121,6 +118,7 @@ class Fanzub(generic.NZBProvider):
return results return results
class FanzubCache(tvcache.TVCache): class FanzubCache(tvcache.TVCache):
def __init__(self, provider): def __init__(self, provider):
@ -128,12 +126,12 @@ class FanzubCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
# only poll Fanzub every 20 minutes max # only poll Fanzub every 20 minutes max
# we get 100 post each call !
self.minTime = 20 self.minTime = 20
def _getRSSData(self): def _getDailyData(self):
params = {"cat": "anime".encode('utf-8'), params = {
"cat": "anime".encode('utf-8'),
"max": "100".encode('utf-8') "max": "100".encode('utf-8')
} }
@ -141,9 +139,7 @@ class FanzubCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url).entries
def _checkItemAuth(self, title, url):
return True
provider = Fanzub() provider = Fanzub()

View File

@ -299,46 +299,8 @@ class FreshOnTVCache(tvcache.TVCache):
# poll delay in minutes # poll delay in minutes
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
logger.log(u"Clearing " + self.provider.name + " cache")
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params).entries
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = FreshOnTVProvider() provider = FreshOnTVProvider()

View File

@ -177,17 +177,17 @@ class HDBitsProvider(generic.TorrentProvider):
if show.air_by_date or show.sports: if show.air_by_date or show.sports:
post_data['tvdb'] = { post_data['tvdb'] = {
'id': show.indexerid, 'id': show.indexerid,
'season': str(episode.airdate)[:7], 'season': str(season.airdate)[:7],
} }
elif show.anime: elif show.anime:
post_data['tvdb'] = { post_data['tvdb'] = {
'id': show.indexerid, 'id': show.indexerid,
'season': "%d" % episode.scene_absolute_number, 'season': "%d" % season.scene_absolute_number,
} }
else: else:
post_data['tvdb'] = { post_data['tvdb'] = {
'id': show.indexerid, 'id': show.indexerid,
'season': episode.scene_season, 'season': season.scene_season,
} }
if search_term: if search_term:
@ -251,18 +251,6 @@ class HDBitsCache(tvcache.TVCache):
def _getRSSData(self): def _getRSSData(self):
return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) return self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if title and url:
logger.log(u"Adding item to results: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
else:
logger.log(u"The data returned from the " + self.provider.name + " is incomplete, this result is unusable",
logger.ERROR)
return None
def _checkAuth(self, data): def _checkAuth(self, data):
return self.provider._checkAuthFromData(data) return self.provider._checkAuthFromData(data)

View File

@ -329,47 +329,9 @@ class HDTorrentsCache(tvcache.TVCache):
# only poll HDTorrents every 10 minutes max # only poll HDTorrents every 10 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': []} search_params = {'RSS': []}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = HDTorrentsProvider() provider = HDTorrentsProvider()

View File

@ -269,47 +269,9 @@ class IPTorrentsCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max # Only poll IPTorrents every 10 minutes max
self.minTime = 10 self.minTime = 10
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = IPTorrentsProvider() provider = IPTorrentsProvider()

View File

@ -355,43 +355,9 @@ class KATCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max # only poll ThePirateBay every 10 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = KATProvider() provider = KATProvider()

View File

@ -314,9 +314,9 @@ class NewzbinCache(tvcache.TVCache):
# only poll Newzbin every 10 mins max # only poll Newzbin every 10 mins max
self.minTime = 1 self.minTime = 1
def _getRSSData(self): def _getDailyData(self):
return self.provider._getRSSData() return self.provider._getRSSData().entries
def _parseItem(self, item): def _parseItem(self, item):

View File

@ -74,8 +74,7 @@ class NewznabProvider(generic.NZBProvider):
int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback))
def imageName(self): def imageName(self):
if ek.ek(os.path.isfile, if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')):
ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', 'slick', 'images', 'providers', self.getID() + '.png')):
return self.getID() + '.png' return self.getID() + '.png'
return 'newznab.png' return 'newznab.png'

View File

@ -318,47 +318,9 @@ class NextGenCache(tvcache.TVCache):
# Only poll NextGen every 10 minutes max # Only poll NextGen every 10 minutes max
self.minTime = 10 self.minTime = 10
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = NextGenProvider() provider = NextGenProvider()

View File

@ -28,7 +28,6 @@ from sickbeard.common import Quality
from sickbeard import tvcache from sickbeard import tvcache
from sickbeard import show_name_helpers from sickbeard import show_name_helpers
REMOTE_DBG = False
class NyaaProvider(generic.TorrentProvider): class NyaaProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
@ -70,7 +69,8 @@ class NyaaProvider(generic.TorrentProvider):
logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name)) logger.log(u"" + str(self.show.name) + " is not an anime skiping " + str(self.name))
return [] return []
params = {"term": search_string.encode('utf-8'), params = {
"term": search_string.encode('utf-8'),
"cats": '1_37', # Limit to English-translated Anime (for now) "cats": '1_37', # Limit to English-translated Anime (for now)
"sort": '2', # Sort Descending By Seeders "sort": '2', # Sort Descending By Seeders
} }
@ -118,6 +118,7 @@ class NyaaProvider(generic.TorrentProvider):
def seedRatio(self): def seedRatio(self):
return self.ratio return self.ratio
class NyaaCache(tvcache.TVCache): class NyaaCache(tvcache.TVCache):
def __init__(self, provider): def __init__(self, provider):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
@ -125,29 +126,18 @@ class NyaaCache(tvcache.TVCache):
# only poll NyaaTorrents every 15 minutes max # only poll NyaaTorrents every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getDailyData(self):
params = { params = {
"page": 'rss', # Use RSS page "page": 'rss', # Use RSS page
"order": '1' #Sort Descending By Date "order": '1', # Sort Descending By Date
"cats": '1_37', # Limit to English-translated Anime (for now)
} }
url = self.provider.url + '?' + urllib.urlencode(params) url = self.provider.url + '?' + urllib.urlencode(params)
logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG) logger.log(u"NyaaTorrents cache update URL: " + url, logger.DEBUG)
return self.getRSSFeed(url) return self.getRSSFeed(url).entries
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if not title or not url:
logger.log(u"The XML returned from the NyaaTorrents RSS feed is incomplete, this result is unusable",
logger.ERROR)
return None
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)
provider = NyaaProvider() provider = NyaaProvider()

View File

@ -98,7 +98,7 @@ class NZBsRUSCache(tvcache.TVCache):
# only poll NZBs'R'US every 15 minutes max # only poll NZBs'R'US every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getDailyData(self):
url = self.provider.url + 'rssfeed.php?' url = self.provider.url + 'rssfeed.php?'
urlArgs = {'cat': '91,75,104', # HD,XviD,SD-x264 urlArgs = {'cat': '91,75,104', # HD,XviD,SD-x264
'i': sickbeard.NZBSRUS_UID, 'i': sickbeard.NZBSRUS_UID,
@ -107,7 +107,7 @@ class NZBsRUSCache(tvcache.TVCache):
url += urllib.urlencode(urlArgs) url += urllib.urlencode(urlArgs)
logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG) logger.log(u"NZBs'R'US cache update URL: " + url, logger.DEBUG)
return self.getRSSFeed(url) return self.getRSSFeed(url).entries
def _checkAuth(self, data): def _checkAuth(self, data):
return data != 'Invalid Link' return data != 'Invalid Link'

View File

@ -154,7 +154,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
self.minTime = 20 self.minTime = 20
def _getRSSData(self): def _getDailyData(self):
params = {'user': provider.username, params = {'user': provider.username,
'api': provider.api_key, 'api': provider.api_key,
'eng': 1, 'eng': 1,
@ -164,7 +164,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url) return self.getRSSFeed(rss_url).entries
def _checkAuth(self, data): def _checkAuth(self, data):
return self.provider._checkAuthFromData(data) return self.provider._checkAuthFromData(data)

View File

@ -245,47 +245,9 @@ class PublicHDCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max # only poll ThePirateBay every 10 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = PublicHDProvider() provider = PublicHDProvider()

View File

@ -53,7 +53,7 @@ class TorrentRssProvider(generic.TorrentProvider):
return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only)) return self.name + '|' + self.url + '|' + self.cookies + '|' + str(int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str(int(self.backlog_only))
def imageName(self): def imageName(self):
if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'data', 'images', 'providers', self.getID() + '.png')): if ek.ek(os.path.isfile, ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers', self.getID() + '.png')):
return self.getID() + '.png' return self.getID() + '.png'
return 'torrentrss.png' return 'torrentrss.png'
@ -152,21 +152,11 @@ class TorrentRssCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getDailyData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None request_headers = None
if self.provider.cookies: if self.provider.cookies:
request_headers = { 'Cookie': self.provider.cookies } request_headers = { 'Cookie': self.provider.cookies }
return self.getRSSFeed(self.provider.url, request_headers=request_headers) return self.getRSSFeed(self.provider.url, request_headers=request_headers).entries
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if not title or not url:
logger.log(u"The XML returned from the RSS feed is incomplete, this result is unusable", logger.ERROR)
return None
logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url)

View File

@ -312,47 +312,10 @@ class SCCCache(tvcache.TVCache):
# only poll SCC every 10 minutes max # only poll SCC every 10 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = SCCProvider() provider = SCCProvider()

View File

@ -252,47 +252,10 @@ class SpeedCDCache(tvcache.TVCache):
# only poll Speedcd every 20 minutes max # only poll Speedcd every 20 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = SpeedCDProvider() provider = SpeedCDProvider()

View File

@ -338,47 +338,9 @@ class ThePirateBayCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max # only poll ThePirateBay every 10 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
class ThePirateBayWebproxy: class ThePirateBayWebproxy:

View File

@ -274,45 +274,9 @@ class TorrentBytesCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max # only poll TorrentBytes every 20 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title + "]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = TorrentBytesProvider() provider = TorrentBytesProvider()

View File

@ -277,47 +277,10 @@ class TorrentDayCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max # Only poll IPTorrents every 10 minutes max
self.minTime = 10 self.minTime = 10
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = TorrentDayProvider() provider = TorrentDayProvider()

View File

@ -271,47 +271,9 @@ class TorrentLeechCache(tvcache.TVCache):
# only poll TorrentLeech every 20 minutes max # only poll TorrentLeech every 20 minutes max
self.minTime = 20 self.minTime = 20
def updateCache(self): def _getDailyData(self):
# delete anything older then 7 days
self._clearCache()
if not self.shouldUpdate():
return
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
rss_results = self.provider._doSearch(search_params) return self.provider._doSearch(search_params)
if rss_results:
self.setLastUpdate()
else:
return []
cl = []
for result in rss_results:
item = (result[0], result[1])
ci = self._parseItem(item)
if ci is not None:
cl.append(ci)
if len(cl) > 0:
myDB = self._getDB()
myDB.mass_action(cl)
def _parseItem(self, item):
(title, url) = item
if not title or not url:
return None
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
return self._addCacheEntry(title, url)
provider = TorrentLeechProvider() provider = TorrentLeechProvider()

View File

@ -86,7 +86,7 @@ class TvTorrentsCache(tvcache.TVCache):
# only poll TvTorrents every 15 minutes max # only poll TvTorrents every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getRSSData(self): def _getDailyData(self):
# These will be ignored on the serverside. # These will be ignored on the serverside.
ignore_regex = "all.month|month.of|season[\s\d]*complete" ignore_regex = "all.month|month.of|season[\s\d]*complete"

View File

@ -148,6 +148,7 @@ class TraktChecker():
self.addDefaultShow(indexer, indexer_id, show["title"], SKIPPED) self.addDefaultShow(indexer, indexer_id, show["title"], SKIPPED)
newShow = helpers.findCertainShow(sickbeard.showList, indexer_id) newShow = helpers.findCertainShow(sickbeard.showList, indexer_id)
try:
if newShow and int(newShow['indexer']) == indexer: if newShow and int(newShow['indexer']) == indexer:
for episode in show["episodes"]: for episode in show["episodes"]:
if newShow is not None: if newShow is not None:
@ -155,6 +156,8 @@ class TraktChecker():
else: else:
self.todoWanted.append((indexer_id, episode["season"], episode["number"])) self.todoWanted.append((indexer_id, episode["season"], episode["number"]))
self.startBacklog(newShow) self.startBacklog(newShow)
except TypeError:
logger.log(u"Could not parse the output from trakt for " + show["title"], logger.DEBUG)
def addDefaultShow(self, indexer, indexer_id, name, status): def addDefaultShow(self, indexer, indexer_id, name, status):
""" """

View File

@ -102,6 +102,9 @@ class TVCache():
return data return data
def _getDailyData(self):
return None
def _checkAuth(self, data): def _checkAuth(self, data):
return True return True
@ -112,7 +115,7 @@ class TVCache():
if self.shouldUpdate() and self._checkAuth(None): if self.shouldUpdate() and self._checkAuth(None):
# as long as the http request worked we count this as an update # as long as the http request worked we count this as an update
data = self._getRSSData() data = self._getDailyData()
if not data: if not data:
return [] return []
@ -125,8 +128,9 @@ class TVCache():
# parse data # parse data
if self._checkAuth(data): if self._checkAuth(data):
cl = [] cl = []
for item in data.entries: for item in data:
ci = self._parseItem(item) title, url = self.provider._get_title_and_url(item)
ci = self._parseItem(title, url)
if ci is not None: if ci is not None:
cl.append(ci) cl.append(ci)
@ -148,9 +152,7 @@ class TVCache():
def _translateLinkURL(self, url): def _translateLinkURL(self, url):
return url.replace('&amp;', '&') return url.replace('&amp;', '&')
def _parseItem(self, item): def _parseItem(self, title, url):
title = item.title
url = item.link
self._checkItemAuth(title, url) self._checkItemAuth(title, url)
@ -158,7 +160,7 @@ class TVCache():
title = self._translateTitle(title) title = self._translateTitle(title)
url = self._translateLinkURL(url) url = self._translateLinkURL(url)
logger.log(u"Checking if item from RSS feed is in the cache: " + title, logger.DEBUG) logger.log(u"Attempting to add item to cache: " + title, logger.DEBUG)
return self._addCacheEntry(title, url) return self._addCacheEntry(title, url)
else: else: