1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-11-10 19:35:08 -05:00

Merge pull request #759 from echel0n/dev

Dev
This commit is contained in:
echel0n 2014-08-27 15:17:13 -07:00
commit afea7337dd
11 changed files with 55 additions and 36 deletions

View File

@ -43,28 +43,31 @@
textExtraction: {
2: function(node) { return \$(node).find("span").text().toLowerCase(); },
3: function(node) { return \$(node).find("img").attr("alt"); },
4: function(node) { return \$(node).find("img").attr("alt"); }
4: function(node) { return \$(node).find("img").attr("alt"); },
5: function(node) { return \$(node).find("img").attr("alt"); },
6: function(node) { return \$(node).find("img").attr("alt"); },
7: function(node) { return \$(node).find("img").attr("alt"); }
},
widgets: ['zebra'],
headers: {
0: { sorter: false},
1: { sorter: 'showNames'},
2: { sorter: 'quality'},
3: { sorter: 'flatfold'},
4: { sorter: 'paused'},
5: { sorter: 'status'},
6: { sorter: false},
7: { sorter: false},
8: { sorter: false},
3: { sorter: true},
4: { sorter: true},
5: { sorter: true},
6: { sorter: true},
7: { sorter: true},
8: { sorter: true},
9: { sorter: false},
10: { sorter: false},
11: { sorter: false},
12: { sorter: false}
#if $sickbeard.USE_SUBTITLES
#if $sickbeard.USE_SUBTITLES
, 13: { sorter: false}
#end if
#end if
}
});
});
});
//-->
</script>

View File

@ -195,7 +195,7 @@ DAILYSEARCH_STARTUP = False
BACKLOG_STARTUP = False
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
MIN_BACKLOG_FREQUENCY = 10
MIN_BACKLOG_FREQUENCY = 1440
MIN_DAILYSEARCH_FREQUENCY = 10
MIN_UPDATE_FREQUENCY = 1
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10

View File

@ -570,7 +570,7 @@ class ParseResult(object):
@property
def is_air_by_date(self):
if self.season_number == None and len(self.episode_numbers) == 0 and self.air_date:
if self.air_date:
return True
return False

View File

@ -95,7 +95,8 @@ class LibnotifyNotifier:
def notify_git_update(self, new_version = "??"):
if sickbeard.USE_LIBNOTIFY:
update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT], title=common.notifyStrings[common.NOTIFY_GIT_UPDATE]
update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]
title=common.notifyStrings[common.NOTIFY_GIT_UPDATE]
self._notify(title, update_text + new_version)
def test_notify(self):

View File

@ -107,7 +107,7 @@ class ProperFinder():
for curProper in sortedPropers:
try:
myParser = NameParser(False, showObj=curProper.show)
myParser = NameParser(False)
parse_result = myParser.parse(curProper.name)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG)

View File

@ -188,7 +188,7 @@ class BitSoupProvider(generic.TorrentProvider):
cells = result.find_all('td')
link = cells[1].find('a')
download_url = self.urls['download'] % cells[3].find('a')['href']
download_url = self.urls['download'] % cells[2].find('a')['href']
id = link['href']
id = id.replace('details.php?id=','')

View File

@ -166,7 +166,11 @@ class GenericProvider:
else:
logger.log(u"Saved result to " + filename, logger.MESSAGE)
return self._verify_download(filename)
if self._verify_download(filename):
return True
logger.log(u"Failed to download result", logger.ERROR)
return False
def _verify_download(self, file_name=None):
"""
@ -182,9 +186,11 @@ class GenericProvider:
parser.stream._input.close()
except:
pass
if mime_type != 'application/x-bittorrent':
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
if mime_type == 'application/x-bittorrent':
return True
logger.log(u"Result is not a valid torrent file", logger.WARNING)
return False
return True

View File

@ -61,7 +61,7 @@ class KATProvider(generic.TorrentProvider):
self.cache = KATCache(self)
self.urls = ['http://kickass.to/', 'http://katproxy.com/']
self.urls = ['http://kickass.to/', 'http://katproxy.com/', 'http://www.kickmirror.com']
self.url = None
def isEnabled(self):

View File

@ -184,7 +184,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
link = cells[1].find('a', attrs={'class': 'index'})
full_id = link['href'].replace('details.php?id=', '')
torrent_id = full_id[:6]
torrent_id = full_id.split("&")[0]
try:
if link.has_key('title'):

View File

@ -129,6 +129,14 @@ def snatchEpisode(result, endStatus=SNATCHED):
if sickbeard.TORRENT_METHOD == "blackhole":
dlResult = _downloadResult(result)
else:
# make sure we have the torrent file content
if not result.content:
if not result.url.startswith('magnet'):
result.content = result.provider.getURL(result.url)
if not result.content:
logger.log(
u"Torrent content failed to download from " + result.url, logger.ERROR
)
# Snatches torrent with client
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
dlResult = client.sendTORRENT(result)
@ -477,9 +485,9 @@ def searchProviders(show, season, episodes, manualSearch=False):
anyQualities, bestQualities = Quality.splitQuality(show.quality)
# pick the best season NZB
bestSeasonNZB = None
bestSeasonResult = None
if SEASON_RESULT in foundResults[curProvider.name]:
bestSeasonNZB = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
bestSeasonResult = pickBestResult(foundResults[curProvider.name][SEASON_RESULT], show,
anyQualities + bestQualities)
highest_quality_overall = 0
@ -491,12 +499,12 @@ def searchProviders(show, season, episodes, manualSearch=False):
logger.DEBUG)
# see if every episode is wanted
if bestSeasonNZB:
if bestSeasonResult:
# get the quality of the season nzb
seasonQual = bestSeasonNZB.quality
seasonQual = bestSeasonResult.quality
logger.log(
u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[
u"The quality of the season " + bestSeasonResult.provider.providerType + " is " + Quality.qualityStrings[
seasonQual], logger.DEBUG)
myDB = db.DBConnection()
@ -514,28 +522,28 @@ def searchProviders(show, season, episodes, manualSearch=False):
anyWanted = True
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
if allWanted and bestSeasonNZB.quality == highest_quality_overall:
if allWanted and bestSeasonResult.quality == highest_quality_overall:
logger.log(
u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name)
u"Every ep in this season is needed, downloading the whole " + bestSeasonResult.provider.providerType + " " + bestSeasonResult.name)
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs
bestSeasonResult.episodes = epObjs
return [bestSeasonNZB]
return [bestSeasonResult]
elif not anyWanted:
logger.log(
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonNZB.name,
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonResult.name,
logger.DEBUG)
else:
if bestSeasonNZB.provider.providerType == GenericProvider.NZB:
if bestSeasonResult.provider.providerType == GenericProvider.NZB:
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
# if not, break it apart and add them as the lowest priority results
individualResults = nzbSplitter.splitResult(bestSeasonNZB)
individualResults = nzbSplitter.splitResult(bestSeasonResult)
individualResults = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and x.show == show, individualResults)
@ -560,13 +568,13 @@ def searchProviders(show, season, episodes, manualSearch=False):
epObjs = []
for curEpNum in allEps:
epObjs.append(show.getEpisode(season, curEpNum))
bestSeasonNZB.episodes = epObjs
bestSeasonResult.episodes = epObjs
epNum = MULTI_EP_RESULT
if epNum in foundResults[curProvider.name]:
foundResults[curProvider.name][epNum].append(bestSeasonNZB)
foundResults[curProvider.name][epNum].append(bestSeasonResult)
else:
foundResults[curProvider.name][epNum] = [bestSeasonNZB]
foundResults[curProvider.name][epNum] = [bestSeasonResult]
# go through multi-ep results and see if we really want them or not, get rid of the rest
multiResults = {}

View File

@ -1750,6 +1750,7 @@ class CMD_Show(ApiCall):
showDict["anime"] = showObj.anime
#clean up tvdb horrible airs field
showDict["airs"] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
showDict["indexerid"] = self.indexerid
showDict["tvrage_id"] = helpers.mapIndexersToShow(showObj)[2]
showDict["tvrage_name"] = showObj.name
showDict["network"] = showObj.network