1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-04 15:12:23 -05:00

sbRoot missing in some img url's

Added queued.png image.. for manual searching

Fixed issue where consecutive manual searches where not queued properly
Added first backend processing for retrieving all queued searches. This should fix the frondend blocking when doing manual searches for episodes, because the backend is free sooner. It now only queues the search.
Created a returning ajax call for getting a list of all searches in queue en running on the displayShow page.

For the getManualSearchStatus() function, only use curItem from the ManualSearchQueueItem or FailedQueueItem threads.

Conflicts:
	sickbeard/search_queue.py
This commit is contained in:
KontiSR 2014-09-15 09:23:55 +02:00
parent 627debcf88
commit 419e35f300
6 changed files with 268 additions and 39 deletions

BIN
gui/slick/images/queued.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 466 B

View File

@ -411,9 +411,9 @@
<td class="search">
#if int($epResult["season"]) != 0:
#if ( int($epResult["status"]) in $Quality.SNATCHED or int($epResult["status"]) in $Quality.DOWNLOADED ) and $sickbeard.USE_FAILED_DOWNLOADS:
<a class="epRetry" href="retryEpisode?show=$show.indexerid&amp;season=$epResult["season"]&amp;episode=$epResult["episode"]"><img src="$sbRoot/images/search32.png" height="16" alt="retry" title="Retry Download" /></a>
<a class="epRetry" id="<%=str(epResult["season"])+'x'+str(epResult["episode"])%>" name="<%=str(epResult["season"]) +"x"+str(epResult["episode"]) %>" href="retryEpisode?show=$show.indexerid&amp;season=$epResult["season"]&amp;episode=$epResult["episode"]"><img src="$sbRoot/images/search32.png" height="16" alt="retry" title="Retry Download" /></a>
#else:
<a class="epSearch" href="searchEpisode?show=$show.indexerid&amp;season=$epResult["season"]&amp;episode=$epResult["episode"]"><img src="$sbRoot/images/search32.png" width="16" height="16" alt="search" title="Manual Search" /></a>
<a class="epSearch" id="<%=str(epResult["season"])+'x'+str(epResult["episode"])%>" name="<%=str(epResult["season"]) +"x"+str(epResult["episode"]) %>" href="searchEpisode?show=$show.indexerid&amp;season=$epResult["season"]&amp;episode=$epResult["episode"]"><img src="$sbRoot/images/search32.png" width="16" height="16" alt="search" title="Manual Search" /></a>
#end if
#end if
#if $sickbeard.USE_SUBTITLES and $show.subtitles and len(set(str($epResult["subtitles"]).split(',')).intersection(set($subtitles.wantedLanguages()))) < len($subtitles.wantedLanguages()) and $epResult["location"]

View File

@ -1,3 +1,112 @@
var search_status_url = sbRoot + '/getManualSearchStatus';
$.pnotify.defaults.width = "400px";
$.pnotify.defaults.styling = "jqueryui";
$.pnotify.defaults.history = false;
$.pnotify.defaults.shadow = false;
$.pnotify.defaults.delay = 4000;
$.pnotify.defaults.maxonscreen = 5;
$.fn.manualSearches = [];
function check_manual_searches() {
var poll_interval = 5000;
$.ajax({
url: search_status_url + '?show=' + $('#showID').val(),
success: function (data) {
if (data.episodes) {
poll_interval = 5000;
}
else {
poll_interval = 15000;
}
updateImages(data);
//cleanupManualSearches(data);
},
error: function () {
poll_interval = 30000;
},
type: "GET",
dataType: "json",
complete: function () {
setTimeout(check_manual_searches, poll_interval);
},
timeout: 15000 // timeout every 15 secs
});
}
function updateImages(data) {
$.each(data.episodes, function (name, ep) {
console.debug(ep.searchstatus);
// Get td element for current ep
var loadingImage = 'loading16_dddddd.gif';
var queuedImage = 'queued.png';
var searchImage = 'search32.png';
var status = null;
//Try to get the <a> Element
el=$('a[id=' + ep.season + 'x' + ep.episode+']');
img=el.children('img');
parent=el.parent();
if (el) {
if (ep.searchstatus == 'searching') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Searching');
img.attr('alt','searching');
img.attr('src',sbRoot+'/images/' + loadingImage);
disableLink(el);
// Update Status and Quality
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = ep.searchstatus;
}
else if (ep.searchstatus == 'queued') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Queued');
img.attr('alt','queued');
img.attr('src',sbRoot+'/images/' + queuedImage );
disableLink(el);
HtmlContent = ep.searchstatus;
}
else if (ep.searchstatus == 'finished') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Searching');
img.attr('alt','searching');
img.parent().attr('class','epRetry');
img.attr('src',sbRoot+'/images/' + searchImage);
enableLink(el);
// Update Status and Quality
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = ep.status.replace(rSearchTerm,"$1"+' <span class="quality '+ep.quality+'">'+"$2"+'</span>');
}
// update the status column if it exists
parent.siblings('.status_column').html(HtmlContent)
}
});
}
$(document).ready(function () {
check_manual_searches();
});
function enableLink(el) {
el.on('click.disabled', false);
el.attr('enableClick', '1');
el.fadeTo("fast", 1)
}
function disableLink(el) {
el.off('click.disabled');
el.attr('enableClick', '0');
el.fadeTo("fast", .5)
}
(function(){
$.ajaxEpSearch = {
@ -5,6 +114,7 @@
size: 16,
colorRow: false,
loadingImage: 'loading16_dddddd.gif',
queuedImage: 'queued.png',
noImage: 'no16.png',
yesImage: 'yes16.png'
}
@ -13,22 +123,42 @@
$.fn.ajaxEpSearch = function(options){
options = $.extend({}, $.ajaxEpSearch.defaults, options);
$('.epSearch').click(function(){
var parent = $(this).parent();
$('.epSearch').click(function(event){
event.preventDefault();
// put the ajax spinner (for non white bg) placeholder while we wait
parent.empty();
parent.append($("<img/>").attr({"src": sbRoot+"/images/"+options.loadingImage, "height": options.size, "alt": "", "title": "loading"}));
// Check if we have disabled the click
if ( $(this).attr('enableClick') == '0' ) {
console.debug("Already queued, not downloading!");
return false;
}
if ( $(this).attr('class') == "epRetry" ) {
if ( !confirm("Mark download as bad and retry?") )
return false;
};
var parent = $(this).parent();
// Create var for anchor
link = $(this);
// Create var for img under anchor and set options for the loading gif
img=$(this).children('img');
img.attr('title','loading');
img.attr('alt','');
img.attr('src',sbRoot+'/images/' + options.loadingImage);
$.getJSON($(this).attr('href'), function(data){
// if they failed then just put the red X
// if they failed then just put the red X
if (data.result == 'failure') {
img_name = options.noImage;
img_result = 'failed';
// if the snatch was successful then apply the corresponding class and fill in the row appropriately
} else {
img_name = options.yesImage;
img_name = options.loadingImage;
img_result = 'success';
// color the row
if (options.colorRow)
@ -37,16 +167,22 @@
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = data.result.replace(rSearchTerm,"$1"+' <span class="quality '+data.quality+'">'+"$2"+'</span>');
// update the status column if it exists
parent.siblings('.status_column').html(HtmlContent)
parent.siblings('.status_column').html(HtmlContent)
// Only if the queing was succesfull, disable the onClick event of the loading image
disableLink(link);
}
// put the corresponding image as the result for the the row
parent.empty();
parent.append($("<img/>").attr({"src": sbRoot+"/images/"+img_name, "height": options.size, "alt": img_result, "title": img_result}));
// put the corresponding image as the result of queuing of the manual search
img.attr('title',img_result);
img.attr('alt',img_result);
img.attr('height', options.size);
img.attr('src',sbRoot+"/images/"+img_name);
});
// fon't follow the link
//
// don't follow the link
return false;
});
}
})();

View File

@ -1,7 +1,7 @@
$(document).ready(function () {
$('#sbRoot').ajaxEpSearch({'colorRow': true});
$('#sbRoot').ajaxEpRetry({'colorRow': true});
//$('#sbRoot').ajaxEpRetry({'colorRow': true});
$('#sbRoot').ajaxEpSubtitlesSearch();

View File

@ -37,6 +37,8 @@ DAILY_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 40
MANUAL_SEARCH_HISTORY = []
MANUAL_SEARCH_HISTORY_SIZE = 100
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
@ -54,7 +56,23 @@ class SearchQueue(generic_queue.GenericQueue):
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
return True
return False
def is_show_in_queue(self, show):
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
return True
return False
def get_all_ep_from_queue(self, show):
ep_obj_list = []
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
ep_obj_list.append(cur_item)
if ep_obj_list:
return ep_obj_list
return False
def pause_backlog(self):
self.min_priority = generic_queue.QueuePriorities.HIGH
@ -65,6 +83,12 @@ class SearchQueue(generic_queue.GenericQueue):
# backlog priorities are NORMAL, this should be done properly somewhere
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def is_manualsearch_in_progress(self):
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)):
return True
return False
def is_backlog_in_progress(self):
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem):
@ -140,12 +164,15 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.success = None
self.show = show
self.segment = segment
self.started = None
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log("Beginning manual search for: [" + self.segment.prettyName() + "]")
self.started = True
searchResult = search.searchProviders(self.show, [self.segment], True)
if searchResult:
@ -164,7 +191,10 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
### Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
@ -245,4 +275,9 @@ class FailedQueueItem(generic_queue.QueueItem):
if self.success is None:
self.success = False
self.finish()
self.finish()
def fifo(myList, item, maxSize = 100):
if len(myList) >= maxSize:
myList.pop(0)
myList.append(item)

View File

@ -4307,10 +4307,9 @@ class Home(MainHandler):
root_ep_obj.rename()
redirect("/home/displayShow?show=" + show)
def searchEpisode(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj = _getEpisode(show, season, episode)
if isinstance(ep_obj, str):
@ -4318,28 +4317,87 @@ class Home(MainHandler):
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj)
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
# wait until the queue item tells us whether it worked or not
while ep_queue_item.success is None: # @UndefinedVariable
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
# return the correct json value
if ep_queue_item.success:
# Find the quality class for the episode
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
for x in (SD, HD720p, HD1080p):
if ep_quality in Quality.splitQuality(x)[0]:
quality_class = qualityPresetStrings[x]
break
return returnManualSearchResult(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'})
else:
return json.dumps({'result': 'failure'})
return json.dumps({'result': statusStrings[ep_obj.status],
'quality': quality_class
})
### Returns the current ep_queue_item status for the current viewed show.
# Possible status: Downloaded, Snatched, etc...
# Returns {'show': 279530, 'episodes' : ['episode' : 6, 'season' : 1, 'searchstatus' : 'queued', 'status' : 'running', 'quality': '4013']
def getManualSearchStatus(self, show=None, season=None):
return json.dumps({'result': 'failure'})
episodes = []
currentManualSearchThreadsQueued = []
currentManualSearchThreadActive = []
finishedManualSearchThreadItems= []
# Queued Searches
currentManualSearchThreadsQueued = sickbeard.searchQueueScheduler.action.get_all_ep_from_queue(show)
# Running Searches
if (sickbeard.searchQueueScheduler.action.is_manualsearch_in_progress()):
currentManualSearchThreadActive = sickbeard.searchQueueScheduler.action.currentItem
# Finished Searches
finishedManualSearchThreadItems = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
if currentManualSearchThreadsQueued:
for searchThread in currentManualSearchThreadsQueued:
searchstatus = 'queued'
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
if currentManualSearchThreadActive:
searchThread = currentManualSearchThreadActive
searchstatus = 'searching'
if searchThread.success:
searchstatus = 'finished'
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
if finishedManualSearchThreadItems:
for searchThread in finishedManualSearchThreadItems:
if str(searchThread.show.indexerid) == show and not [x for x in episodes if x['episodeindexid'] == searchThread.segment.indexerid]:
searchstatus = 'finished'
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
return json.dumps({'show': show, 'episodes' : episodes})
#return json.dumps()
def getQualityClass(self, ep_obj):
# return the correct json value
# Find the quality class for the episode
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
for x in (SD, HD720p, HD1080p):
if ep_quality in Quality.splitQuality(x)[0]:
quality_class = qualityPresetStrings[x]
break
return quality_class
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):