1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-11-13 21:05:11 -05:00

Merge pull request #759 from echel0n/dev

Dev
This commit is contained in:
echel0n 2014-08-27 15:17:13 -07:00
commit afea7337dd
11 changed files with 55 additions and 36 deletions

View File

@ -43,19 +43,22 @@
textExtraction: { textExtraction: {
2: function(node) { return \$(node).find("span").text().toLowerCase(); }, 2: function(node) { return \$(node).find("span").text().toLowerCase(); },
3: function(node) { return \$(node).find("img").attr("alt"); }, 3: function(node) { return \$(node).find("img").attr("alt"); },
4: function(node) { return \$(node).find("img").attr("alt"); } 4: function(node) { return \$(node).find("img").attr("alt"); },
5: function(node) { return \$(node).find("img").attr("alt"); },
6: function(node) { return \$(node).find("img").attr("alt"); },
7: function(node) { return \$(node).find("img").attr("alt"); }
}, },
widgets: ['zebra'], widgets: ['zebra'],
headers: { headers: {
0: { sorter: false}, 0: { sorter: false},
1: { sorter: 'showNames'}, 1: { sorter: 'showNames'},
2: { sorter: 'quality'}, 2: { sorter: 'quality'},
3: { sorter: 'flatfold'}, 3: { sorter: true},
4: { sorter: 'paused'}, 4: { sorter: true},
5: { sorter: 'status'}, 5: { sorter: true},
6: { sorter: false}, 6: { sorter: true},
7: { sorter: false}, 7: { sorter: true},
8: { sorter: false}, 8: { sorter: true},
9: { sorter: false}, 9: { sorter: false},
10: { sorter: false}, 10: { sorter: false},
11: { sorter: false}, 11: { sorter: false},

View File

@ -195,7 +195,7 @@ DAILYSEARCH_STARTUP = False
BACKLOG_STARTUP = False BACKLOG_STARTUP = False
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1 MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
MIN_BACKLOG_FREQUENCY = 10 MIN_BACKLOG_FREQUENCY = 1440
MIN_DAILYSEARCH_FREQUENCY = 10 MIN_DAILYSEARCH_FREQUENCY = 10
MIN_UPDATE_FREQUENCY = 1 MIN_UPDATE_FREQUENCY = 1
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10 DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10

View File

@ -570,7 +570,7 @@ class ParseResult(object):
@property @property
def is_air_by_date(self): def is_air_by_date(self):
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date: if self.air_date:
return True return True
return False return False

View File

@ -95,7 +95,8 @@ class LibnotifyNotifier:
def notify_git_update(self, new_version = "??"): def notify_git_update(self, new_version = "??"):
if sickbeard.USE_LIBNOTIFY: if sickbeard.USE_LIBNOTIFY:
update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT], title=common.notifyStrings[common.NOTIFY_GIT_UPDATE] update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]
title=common.notifyStrings[common.NOTIFY_GIT_UPDATE]
self._notify(title, update_text + new_version) self._notify(title, update_text + new_version)
def test_notify(self): def test_notify(self):

View File

@ -107,7 +107,7 @@ class ProperFinder():
for curProper in sortedPropers: for curProper in sortedPropers:
try: try:
myParser = NameParser(False, showObj=curProper.show) myParser = NameParser(False)
parse_result = myParser.parse(curProper.name) parse_result = myParser.parse(curProper.name)
except InvalidNameException: except InvalidNameException:
logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG) logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG)

View File

@ -188,7 +188,7 @@ class BitSoupProvider(generic.TorrentProvider):
cells = result.find_all('td') cells = result.find_all('td')
link = cells[1].find('a') link = cells[1].find('a')
download_url = self.urls['download'] % cells[3].find('a')['href'] download_url = self.urls['download'] % cells[2].find('a')['href']
id = link['href'] id = link['href']
id = id.replace('details.php?id=','') id = id.replace('details.php?id=','')

View File

@ -166,7 +166,11 @@ class GenericProvider:
else: else:
logger.log(u"Saved result to " + filename, logger.MESSAGE) logger.log(u"Saved result to " + filename, logger.MESSAGE)
return self._verify_download(filename) if self._verify_download(filename):
return True
logger.log(u"Failed to download result", logger.ERROR)
return False
def _verify_download(self, file_name=None): def _verify_download(self, file_name=None):
""" """
@ -182,7 +186,9 @@ class GenericProvider:
parser.stream._input.close() parser.stream._input.close()
except: except:
pass pass
if mime_type != 'application/x-bittorrent': if mime_type == 'application/x-bittorrent':
return True
logger.log(u"Result is not a valid torrent file", logger.WARNING) logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False return False

View File

@ -61,7 +61,7 @@ class KATProvider(generic.TorrentProvider):
self.cache = KATCache(self) self.cache = KATCache(self)
self.urls = ['http://kickass.to/', 'http://katproxy.com/'] self.urls = ['http://kickass.to/', 'http://katproxy.com/', 'http://www.kickmirror.com']
self.url = None self.url = None
def isEnabled(self): def isEnabled(self):

View File

@ -184,7 +184,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
link = cells[1].find('a', attrs={'class': 'index'}) link = cells[1].find('a', attrs={'class': 'index'})
full_id = link['href'].replace('details.php?id=', '') full_id = link['href'].replace('details.php?id=', '')
torrent_id = full_id[:6] torrent_id = full_id.split("&")[0]
try: try:
if link.has_key('title'): if link.has_key('title'):

View File

@ -129,6 +129,14 @@ def snatchEpisode(result, endStatus=SNATCHED):
if sickbeard.TORRENT_METHOD == "blackhole": if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result) dlResult = _downloadResult(result)
else: else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client # Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)() client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result) dlResult = client.sendTORRENT(result)
@ -477,9 +485,9 @@ def searchProviders(show, season, episodes, manualSearch=False):
anyQualities, bestQualities = Quality.splitQuality(show.quality) anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB # pick the best season NZB
bestSeasonNZB = None bestSeasonResult = None
if SEASON_RESULT in foundResults[curProvider.name]: if SEASON_RESULT in foundResults[curProvider.name]:
bestSeasonNZB = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show, bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
anyQualities + bestQualities) anyQualities + bestQualities)
highest_quality_overall = 0 highest_quality_overall = 0
@ -491,12 +499,12 @@ def searchProviders(show, season, episodes, manualSearch=False):
logger.DEBUG) logger.DEBUG)
# see if every episode is wanted # see if every episode is wanted
if bestSeasonNZB: if bestSeasonResult:
# get the quality of the season nzb # get the quality of the season nzb
seasonQual = bestSeasonNZB.quality seasonQual = bestSeasonResult.quality
logger.log( logger.log(
u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[ u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG) seasonQual], logger.DEBUG)
myDB = db.DBConnection() myDB = db.DBConnection()
@ -514,28 +522,28 @@ def searchProviders(show, season, episodes, manualSearch=False):
anyWanted = True anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred) # if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
if allWanted and bestSeasonNZB.quality == highest_quality_overall: if allWanted and bestSeasonResult.quality == highest_quality_overall:
logger.log( logger.log(
u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name) u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
epObjs = [] epObjs = []
for curEpNum in allEps: for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum)) epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs bestSeasonResult.episodes = epObjs
return [bestSeasonNZB] return [bestSeasonResult]
elif not anyWanted: elif not anyWanted:
logger.log( logger.log(
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonNZB.name, u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
logger.DEBUG) logger.DEBUG)
else: else:
if bestSeasonNZB.provider.providerType == GenericProvider.NZB: if bestSeasonResult.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG) logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results # if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonNZB) individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter( individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, individualResults) lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, individualResults)
@ -560,13 +568,13 @@ def searchProviders(show, season, episodes, manualSearch=False):
epObjs = [] epObjs = []
for curEpNum in allEps: for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum)) epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs bestSeasonResult.episodes = epObjs
epNum = MULTI_EP_RESULT epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]: if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(bestSeasonNZB) foundResults[curProvider.name][epNum].append(bestSeasonResult)
else: else:
foundResults[curProvider.name][epNum] = [bestSeasonNZB] foundResults[curProvider.name][epNum] = [bestSeasonResult]
# go through multi-ep results and see if we really want them or not, get rid of the rest # go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {} multiResults = {}

View File

@ -1750,6 +1750,7 @@ class CMD_Show(ApiCall):
showDict["anime"] = showObj.anime showDict["anime"] = showObj.anime
#clean up tvdb horrible airs field #clean up tvdb horrible airs field
showDict["airs"] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ') showDict["airs"] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
showDict["indexerid"] = self.indexerid
showDict["tvrage_id"] = helpers.mapIndexersToShow(showObj)[2] showDict["tvrage_id"] = helpers.mapIndexersToShow(showObj)[2]
showDict["tvrage_name"] = showObj.name showDict["tvrage_name"] = showObj.name
showDict["network"] = showObj.network showDict["network"] = showObj.network