Merge branch 'feature/FixSearchRetrySnatched' into develop

This commit is contained in:
JackDandy 2018-10-02 20:05:31 +01:00
commit a2e884561e
15 changed files with 516 additions and 370 deletions

View file

@ -24,6 +24,10 @@
* Add search results sort by oldest aired
* Change requirements.txt Cheetah >= 3.1.0
* Add Snowfl torrent provider
* Fix manual search button on displayShow and episode view page
* Change feedback result of manual search on the clicked button image/tooltip
* Change reduce browser I/O on displayShow
* Fix displayShow bug where click holding on a season btn and then dragging away leaves 50% white
[develop changelog]

View file

@ -1060,6 +1060,7 @@ fieldset[disabled] .navbar-default .btn-link:focus{
outline:thin dotted #333;
outline:5px auto -webkit-focus-ring-color;
outline-offset:-2px;
background-position:0;
color:#ddd
}

View file

@ -2130,6 +2130,19 @@ td.col-search{
width:46px
}
td.col-search{
font-size:10px
}
.ep-search,
.ep-retry,
.ep-search img[src=""],
.ep-retry img[src=""]{
display:inline-block;
width:16px;
height:16px
}
#testRenameTable tbody td.col-checkbox,
#testRenameTable tbody td.col-ep{width:1%;vertical-align:middle}
#testRenameTable tbody td.col-name{

View file

@ -50,11 +50,11 @@
<script type="text/javascript" src="$sbRoot/js/lib/jquery.collapser.min.js?v=$sbPID"></script>
<style>
.bfr{position:absolute;left:-999px;top:-999px}.bfr img{width:16px;height:16px}.spinner{display:inline-block;width:16px;height:16px;background:url(${sbRoot}/images/loading16${theme_suffix}.gif) no-repeat 0 0}
.bfr{position:absolute;left:-999px;top:-999px}.bfr img,img.spinner,img.queued,img.search{display:inline-block;width:16px;height:16px}img.spinner{background:url(${sbRoot}/images/loading16${theme_suffix}.gif) no-repeat 0 0}img.queued{background:url(${sbRoot}/images/queued.png) no-repeat 0 0}img.search{background:url(${sbRoot}/images/search16.png) no-repeat 0 0}
.images i{margin-right:6px;margin-top:5px}.hide{display:none}
.tvshowImg {border:1px solid transparent;min-width:226px;min-hieght:332px}
</style>
<div class="bfr"><img src="$sbRoot/images/loading16${theme_suffix}.gif" /></div>
<div class="bfr"><img src="$sbRoot/images/loading16${theme_suffix}.gif"><img src="$sbRoot/images/queued.png"><img src="$sbRoot/images/search16.png"><img src="$sbRoot/images/no16.png"><img src="$sbRoot/images/yes16.png"></div>
<div id="background-container">
#if $has_art

View file

@ -17,6 +17,7 @@
#set $restart = 'Restart SickGear for new features on this page'
#set $show_message = (None, $restart)[not $varExists('fanart')]
#set global $page_body_attr = 'episode-view" class="' + $css
#set theme_suffix = ('', '-dark')['dark' == $sg_str('THEME_NAME', 'dark')]
##
#import os.path
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_top.tmpl')
@ -176,7 +177,9 @@
.asc{border-top:0; border-bottom:8px solid}
.desc{border-top:8px solid; border-bottom:0}
#end if
.bfr{position:absolute;left:-999px;top:-999px}.bfr img,img.spinner,img.queued,img.search{display:inline-block;width:16px;height:16px}img.spinner{background:url(${sbRoot}/images/loading16${theme_suffix}.gif) no-repeat 0 0}img.queued{background:url(${sbRoot}/images/queued.png) no-repeat 0 0}img.search{background:url(${sbRoot}/images/search16.png) no-repeat 0 0}
</style>
<div class="bfr"><img src="$sbRoot/images/loading16${theme_suffix}.gif"><img src="$sbRoot/images/queued.png"><img src="$sbRoot/images/search16.png"><img src="$sbRoot/images/no16.png"><img src="$sbRoot/images/yes16.png"></div>
#if $show_message
<div class="alert alert-info" style="margin:-40px 0 50px">
@ -293,9 +296,9 @@
});
$(document).ready(function(){
sortList = [[$table_sort_header_codes[$sort], 0]];
#end raw
var sortList = [[$table_sort_header_codes[$sort], 0]];
#raw
$('#showListTable:has(tbody tr)').tablesorter({
widgets: ['stickyHeaders'],
sortList: sortList,
@ -381,9 +384,10 @@
#end if
#end if
#set $show_id = '%s_%sx%s' % (str($cur_result['showid']), str($cur_result['season']), str($cur_result['episode']))
#set $show_id = '%s_%sx%s' % ($cur_result['showid'], $cur_result['season'], $cur_result['episode'])
#set $id_sxe = '%s_%s' % ($cur_result['indexer'], $show_id)
<!-- start $cur_result['show_name'] //-->
<tr id="show-${show_id}" class="$show_div" data-rawname="$cur_result['show_name']">
<tr id="show-${show_id}" class="$show_div" data-rawname="$cur_result['show_name']" data-show-id="$id_sxe">
## forced to use a div to wrap airdate, the column sort went crazy with a span
<td align="center" class="nowrap">
<div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($cur_result['localtime']).decode($sickbeard.SYS_ENCODING)</div><span class="sort-data">$cur_result['localtime'].strftime('%Y%m%d%H%M')</span>
@ -428,7 +432,7 @@
</td>
<td align="center">
<a href="$sbRoot/home/searchEpisode?show=${cur_result['showid']}&amp;season=$cur_result['season']&amp;episode=$cur_result['episode']" title="Manual Search" id="forceUpdate-${cur_result['showid']}" class="forceUpdate epSearch"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search16.png" id="forceUpdateImage-${cur_result['showid']}" /></a>
<a class="ep-search" href="$sbRoot/home/episode_search?show=${cur_result['showid']}&amp;season=$cur_result['season']&amp;episode=$cur_result['episode']" title="Manual Search"><img title="[search]" alt="[search]" height="16" width="16" src="$sbRoot/images/search16.png" /></a>
</td>
</tr>
<!-- end $cur_result['show_name'] //-->
@ -455,7 +459,7 @@
<!--
#raw
$(document).ready(function(){
$('#sbRoot').ajaxEpSearch({'size': 16, 'loadingImage': 'loading16' + themeSpinner + '.gif'});
$('#sbRoot').ajaxEpSearch();
$('.ep_summary').hide();
$('.ep_summaryTrigger').click(function() {
$(this).next('.ep_summary').slideToggle('normal', function() {
@ -588,7 +592,8 @@
#end if
#slurp
<!-- start $cur_result['show_name'] //-->
<div class="$show_div" id="listing-${cur_result['showid']}">
#set $id_sxe = '%s_%s_%sx%s' % ($cur_result['indexer'], $cur_result['showid'], $cur_result['season'], $cur_result['episode'])
<div class="$show_div" id="listing-${cur_result['showid']}" data-show-id="$id_sxe">
<div class="tvshowDiv">
<table width="100%" border="0" cellpadding="0" cellspacing="0">
<tr>
@ -614,7 +619,7 @@
<a href="<%= anon_url(cur_result['imdb_url']) %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false" title="${cur_result['imdb_url']}"><img alt="[$sickbeard.indexerApi(INDEXER_IMDB).name]" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi(INDEXER_IMDB).config.get('icon')" /></a>
#end if
<a href="<%= anon_url(sickbeard.indexerApi(cur_indexer).config['show_url'] % cur_result['showid']) %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false" title="${sickbeard.indexerApi($cur_indexer).config['show_url'] % cur_result['showid']}"><img alt="$sickbeard.indexerApi($cur_indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($cur_indexer).config['icon']" /></a>
<span><a href="$sbRoot/home/searchEpisode?show=${cur_result['showid']}&amp;season=$cur_result['season']&amp;episode=$cur_result['episode']" title="Manual Search" id="forceUpdate-${cur_result['showid']}" class="epSearch forceUpdate"><img alt="[search]" height="16" width="16" src="$sbRoot/images/search16.png" id="forceUpdateImage-${cur_result['showid']}" /></a></span>
<span><a class="ep-search" href="$sbRoot/home/episode_search?show=${cur_result['showid']}&amp;season=$cur_result['season']&amp;episode=$cur_result['episode']" title="Manual Search"><img title="[search]" alt="[search]" height="16" width="16" src="$sbRoot/images/search16.png" /></a></span>
</span>
</div>

View file

@ -1,7 +1,7 @@
#import datetime
#import sickbeard
#from sickbeard import network_timezones, sbdatetime, subtitles
#from sickbeard.common import Overview, Quality, statusStrings, ARCHIVED, UNAIRED, SUBTITLED
#from sickbeard.common import Overview, Quality, statusStrings, ARCHIVED, UNAIRED, SUBTITLED, SNATCHED_ANY, DOWNLOADED
#from lib import subliminal
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
@ -101,16 +101,17 @@
#slurp
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status']))
#if Quality.NONE != $curQuality
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# <span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span></td>
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# #if 'Unknown' != $statusStrings[$curStatus]#<span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span>#end if#</td>
#else
<td class="col-status">$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</td>
#end if
<td class="col-search">
#if 0 != int($ep['season'])
#if (int($ep['status']) in $Quality.SNATCHED or int($ep['status']) in $Quality.DOWNLOADED) and $sg_var('USE_FAILED_DOWNLOADS')
<a class="epRetry" id="$ep_str" name="$ep_str" href="$sbRoot/home/retryEpisode?show=$show.indexerid&amp;season=$ep['season']&amp;episode=$ep['episode']"><img src="$sbRoot/images/search16.png" height="16" alt="retry" title="Retry download"></a>
#set $status = $Quality.splitCompositeStatus(int($ep['status']))[0]
#if ($status in $SNATCHED_ANY + [$DOWNLOADED, $ARCHIVED]) and $sg_var('USE_FAILED_DOWNLOADS')
<a class="ep-retry" href="$sbRoot/home/episode_retry?show=$show.indexerid&amp;season=$ep['season']&amp;episode=$ep['episode']"><img src="$sbRoot/images/search16.png" height="16" alt="retry" title="Retry download"></a>
#else
<a class="epSearch" id="$ep_str" name="$ep_str" href="$sbRoot/home/searchEpisode?show=$show.indexerid&amp;season=$ep['season']&amp;episode=$ep['episode']"><img src="$sbRoot/images/search16.png" width="16" height="16" alt="search" title="Manual search"></a>
<a class="ep-search" href="$sbRoot/home/episode_search?show=$show.indexerid&amp;season=$ep['season']&amp;episode=$ep['episode']"><img src="$sbRoot/images/search16.png" width="16" height="16" alt="search" title="Manual search"></a>
#end if
#end if
#slurp

View file

@ -1,105 +1,162 @@
var search_status_url = sbRoot + '/home/getManualSearchStatus';
/** @namespace $.SickGear.Root */
/** @namespace data.episodes */
/** @namespace ep.showindexer */
/** @namespace ep.showindexid */
/** @namespace ep.season */
/** @namespace ep.episode */
/** @namespace ep.searchstate */
/** @namespace ep.status */
/** @namespace ep.quality */
/** @namespace ep.retrystate */
/** @namespace ep.statusoverview */
var dev = !1,
logInfo = dev ? console.info.bind(window.console) : function() {},
logErr = dev ? console.error.bind(window.console) : function() {};
PNotify.prototype.options.maxonscreen = 5;
$.fn.manualSearches = [];
function check_manual_searches() {
var poll_interval = 5000;
$.ajax({
url: search_status_url + '?show=' + $('#showID').val(),
success: function (data) {
if (data.episodes) {
poll_interval = 5000;
}
else {
poll_interval = 15000;
}
updateImages(data);
//cleanupManualSearches(data);
},
error: function () {
poll_interval = 30000;
},
type: "GET",
dataType: "json",
complete: function () {
setTimeout(check_manual_searches, poll_interval);
},
timeout: 15000 // timeout every 15 secs
});
}
function updateImages(data) {
$.each(data.episodes, function (name, ep) {
console.debug(ep.searchstatus);
// Get td element for current ep
var loadingImage = 'loading16.gif';
var queuedImage = 'queued.png';
var searchImage = 'search16.png';
var status = null;
//Try to get the <a> Element
el=$('a[id=' + ep.season + 'x' + ep.episode+']');
img=el.children('img');
parent=el.parent();
if (el) {
if (ep.searchstatus == 'searching') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Searching');
img.attr('alt','earching');
img.attr('src',sbRoot+'/images/' + loadingImage);
disableLink(el);
// Update Status and Quality
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = ep.searchstatus;
}
else if (ep.searchstatus == 'queued') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Queued');
img.attr('alt','queued');
img.attr('src',sbRoot+'/images/' + queuedImage );
disableLink(el);
HtmlContent = ep.searchstatus;
}
else if (ep.searchstatus == 'finished') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Searching');
img.attr('alt','searching');
if (ep.retrystatus) {img.parent().attr('class','epRetry');} else {img.parent().attr('class','epSearch');}
img.attr('src',sbRoot+'/images/' + searchImage);
enableLink(el);
// Update Status and Quality
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = ep.status.replace(rSearchTerm,"$1"+' <span class="quality '+ep.quality+'">'+"$2"+'</span>');
}
// update the status column if it exists
parent.siblings('.col-status').html(HtmlContent)
}
});
}
$(document).ready(function () {
check_manual_searches();
ajaxConsumer.checkManualSearches();
});
function enableLink(el) {
el.on('click.disabled', false);
el.attr('enableClick', '1');
el.fadeTo("fast", 1)
var ajaxConsumer = function () {
var that = this;
that.timeoutId = 0;
that.pollInterval = 0;
logInfo('init ajaxConsumer');
return {
checkManualSearches : function () {
logInfo('ajaxConsumer.checkManualSearches()');
var showId = $('#showID').val();
$.getJSON({
url: $.SickGear.Root + '/home/search_q_progress' + (/undefined/i.test(showId) ? '' : '?show=' + showId),
timeout: 15000 // timeout request after 15 secs
})
.done(function (data) {
logInfo('search_q_progress.success(data)', data);
if (!data.episodes || 0 === data.episodes.length) {
imgRestore();
}
// using 5s as a reasonable max. when updating images from historical statuses after a page refresh
that.pollInterval = data.episodes && data.episodes.length
? (uiUpdateComplete(data) ? 5000 : 1000) : 10000; // 10000/0
})
.fail(function () {
logErr('search_q_progress.error()');
that.pollInterval = 30000;
})
.always(function (jqXHR, textStatus) {
logInfo('search_q_progress.complete(textStatus)', '\'' + textStatus + '\'.');
clearTimeout(that.timeoutId);
if (that.pollInterval)
that.timeoutId = setTimeout(ajaxConsumer.checkManualSearches, that.pollInterval);
logInfo(that.pollInterval ? '^-- ' + that.pollInterval/1000 + 's to next work' : '^-- no more work');
logInfo('====');
});
}
};
}();
function uiUpdateComplete(data) {
var isFinished = !0;
$.each(data.episodes, function (name, ep) {
var sxe = ep.season + 'x' + ep.episode,
displayShow$ = $('#' + sxe).closest('tr'),
episodeView$ = $('[data-show-id="' + ep.showindexer + '_' + ep.showindexid + '_' + sxe + '"]'),
link$ = (displayShow$.length ? displayShow$ : episodeView$).find('.ep-search, .ep-retry'),
uiOptions = $.ajaxEpSearch.defaults;
logInfo('^-- data item', name, ep.searchstate, ep.showindexid, sxe, ep.statusoverview);
if (link$.length) {
var htmlContent = '', imgTip, imgCls;
switch (ep.searchstate) {
case 'searching':
isFinished = !1;
imgUpdate(link$, 'Searching', uiOptions.loadingImage);
disableLink(link$);
htmlContent = '[' + ep.searchstate + ']';
break;
case 'queued':
isFinished = !1;
imgUpdate(link$, 'Queued', uiOptions.queuedImage);
disableLink(link$);
htmlContent = '[' + ep.searchstate + ']';
break;
case 'finished':
var attrName = !!getAttr(link$, 'href') ? 'href' : 'data-href', href = getAttr(link$, attrName);
if (ep.retrystate) {
imgTip = 'Click to retry download';
link$.attr('class', 'ep-retry').attr(attrName, href.replace('search', 'retry'));
} else {
imgTip = 'Click for manual search';
link$.attr('class', 'ep-search').attr(attrName, href.replace('retry', 'search'));
}
if (/good/i.test(ep.statusoverview)) {
imgCls = uiOptions.searchImage;
} else if (/snatched/i.test(ep.statusoverview)) {
imgCls = uiOptions.imgYes;
} else {
imgTip = 'Last manual search failed. Click to try again';
imgCls = uiOptions.imgNo;
}
imgUpdate(link$, imgTip, imgCls);
enableLink(link$);
// update row status
if (ep.statusoverview) {
link$.closest('tr')
.removeClass('skipped wanted qual good unaired snatched')
.addClass(ep.statusoverview);
}
// update quality text for status column
var rSearchTerm = /(\w+)\s\((.+?)\)/;
htmlContent = ep.status.replace(rSearchTerm,
'$1' + ' <span class="quality ' + ep.quality + '">' + '$2' + '</span>');
// remove backed vars
link$.removeAttr('data-status data-imgclass');
}
function disableLink(el) {
el.off('click.disabled');
el.attr('enableClick', '0');
el.fadeTo("fast", .5)
// update the status area
link$.closest('.col-search').siblings('.col-status').html(htmlContent);
}
});
return isFinished;
}
function enableLink(el$) {
el$.attr('href', el$.attr('data-href')).removeAttr('data-href').fadeTo('fast', 1);
}
function disableLink(el$) {
el$.attr('data-href', el$.attr('href')).removeAttr('href').fadeTo('fast', .7);
}
function getAttr(el$, name) {
return el$.is('[' + name + ']') ? el$.attr(name) : !1;
}
function imgUpdate(link$, tip, cls) {
link$.find('img').attr('src', '').attr('title', tip).prop('alt', '')
.removeClass('spinner queued search no yes').addClass(cls);
}
function imgRestore() {
$('a[data-status]').each(function() {
$(this).closest('.col-search').siblings('.col-status').html($(this).attr('data-status'));
imgUpdate($(this),
getAttr($(this), 'data-imgtitle'),
getAttr($(this), 'data-imgclass') || $.ajaxEpSearch.defaults.searchImage);
$(this).removeAttr('data-status data-imgclass data-imgtitle');
});
}
(function() {
@ -107,77 +164,72 @@ function disableLink(el) {
$.ajaxEpSearch = {
defaults: {
size: 16,
colorRow: false,
loadingImage: 'loading16.gif',
queuedImage: 'queued.png',
noImage: 'no16.png',
yesImage: 'yes16.png'
colorRow: !1,
loadingImage: 'spinner',
queuedImage: 'queued',
searchImage: 'search',
imgNo: 'no',
imgYes: 'yes'
}
};
$.fn.ajaxEpSearch = function(options){
options = $.extend({}, $.ajaxEpSearch.defaults, options);
$.fn.ajaxEpSearch = function(uiOptions) {
uiOptions = $.extend( {}, $.ajaxEpSearch.defaults, uiOptions);
$('.epSearch, .epRetry').click(function(event){
$('.ep-search, .ep-retry').on('click', function(event) {
event.preventDefault();
logInfo(($(this).hasClass('ep-search') ? 'Search' : 'Retry') + ' clicked');
// Check if we have disabled the click
if ( $(this).attr('enableClick') == '0' ) {
console.debug("Already queued, not downloading!");
return false;
// check if we have disabled the click
if (!!getAttr($(this), 'data-href')) {
logInfo('Already queued, not downloading!');
return !1;
}
if ( $(this).attr('class') == "epRetry" ) {
if ( !confirm("Mark download as bad and retry?") )
return false;
if ($(this).hasClass('ep-retry')
&& !confirm('Mark download as bad and retry?')) {
return !1;
}
var link$ = $(this), img$ = link$.find('img'), img = ['Failed', uiOptions.imgNo], imgCls;
// backup ui vars
if (link$.closest('.col-search') && link$.closest('.col-search').siblings('.col-status')) {
link$.attr('data-status', link$.closest('.col-search').siblings('.col-status').html().trim());
}
link$.attr('data-imgtitle', getAttr(img$, 'title'));
if (imgCls = getAttr(img$, 'class')) {
link$.attr('data-imgclass', imgCls.trim());
}
imgUpdate(link$, 'Loading', uiOptions.loadingImage);
$.getJSON({url: $(this).attr('href'), timeout: 15000})
.done(function(data) {
logInfo('getJSON() data...', data);
// if failed, replace success/queued with initiated red X/No
if ('failure' !== data.result) {
// otherwise, queued successfully
// update ui status
link$.closest('.col-search').siblings('.col-status').html('[' + data.result + ']');
// prevent further interaction
disableLink(link$);
img = 'queueing' === data.result
? ['Queueing', uiOptions.queuedImage]
: ['Searching', uiOptions.loadingImage];
}
// update ui image
imgUpdate(link$, img[0], img[1]);
ajaxConsumer.checkManualSearches();
})
.fail(function() { imgRestore(); });
// prevent following the clicked link
return !1;
});
};
var parent = $(this).parent();
// Create var for anchor
link = $(this);
// Create var for img under anchor and set options for the loading gif
img=$(this).children('img');
img.attr('title','loading');
img.attr('alt','');
img.attr('src',sbRoot+'/images/' + options.loadingImage);
$.getJSON($(this).attr('href'), function(data){
// if they failed then just put the red X
if (data.result == 'failure') {
img_name = options.noImage;
img_result = 'failed';
// if the snatch was successful then apply the corresponding class and fill in the row appropriately
} else {
img_name = options.loadingImage;
img_result = 'success';
// color the row
if (options.colorRow)
parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');
// applying the quality class
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = data.result.replace(rSearchTerm,"$1"+' <span class="quality '+data.quality+'">'+"$2"+'</span>');
// update the status column if it exists
parent.siblings('.col-status').html(HtmlContent)
// Only if the queing was succesfull, disable the onClick event of the loading image
disableLink(link);
}
// put the corresponding image as the result of queuing of the manual search
img.attr('title',img_result);
img.attr('alt',img_result);
img.attr('height', options.size);
img.attr('src',sbRoot+"/images/"+img_name);
});
//
// don't follow the link
return false;
});
}
})();

View file

@ -173,7 +173,7 @@ $(document).ready(function() {
qTips($('.addQTip'));
function table_init(table$) {
$('#sbRoot').ajaxEpSearch({'colorRow': true});
$('#sbRoot').ajaxEpSearch();
$('#sbRoot').ajaxEpSubtitlesSearch();
if ($.SickGear.config.useFuzzy) {

View file

@ -452,3 +452,17 @@ else:
return v if v is not None else default
sickbeard.ENV = LinuxEnv(os.environ)
# backport from python 3
class SimpleNamespace:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
def __eq__(self, other):
return self.__dict__ == other.__dict__

View file

@ -18,6 +18,7 @@
import datetime
import threading
import copy
from sickbeard import logger
@ -98,6 +99,7 @@ class GenericQueue(object):
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
self.currentItem.start()
class QueueItem(threading.Thread):
def __init__(self, name, action_id=0):
super(QueueItem, self).__init__()
@ -109,6 +111,24 @@ class QueueItem(threading.Thread):
self.stop = threading.Event()
self.added = None
def copy(self, deepcopy_obj=None):
"""
Returns a shallow copy of QueueItem with optional deepcopy of in deepcopy_obj listed objects
:param deepcopy_obj: List of properties to be deep copied
:type deepcopy_obj: list
:return: return QueueItem
:rtype: QueueItem
"""
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
if deepcopy_obj:
for o in deepcopy_obj:
if self.__dict__.get(o):
new_seg = copy.deepcopy(self.__dict__.get(o))
result.__dict__[o] = new_seg
return result
def run(self):
"""Implementing classes should call this"""

View file

@ -59,7 +59,7 @@ except ImportError:
from sickbeard.exceptions import MultipleShowObjectsException, ex
from sickbeard import logger, db, notifiers, clients
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, cpu_presets, statusStrings, \
SNATCHED_ANY, DOWNLOADED, ARCHIVED, IGNORED, Quality
SNATCHED_ANY, DOWNLOADED, ARCHIVED, IGNORED, WANTED, SKIPPED, UNAIRED, UNKNOWN, SUBTITLED, FAILED, Quality, Overview
from sickbeard import encodingKludge as ek
from lib.cachecontrol import CacheControl, caches
@ -1797,3 +1797,34 @@ def clean_data(data):
from lib.six.moves.html_parser import HTMLParser
return HTMLParser().unescape(data).strip().replace(u'&amp;', u'&')
return data
def getOverview(epStatus, show_quality, upgrade_once):
status, quality = Quality.splitCompositeStatus(epStatus)
if ARCHIVED == status:
return Overview.GOOD
if WANTED == status:
return Overview.WANTED
if status in (SKIPPED, IGNORED):
return Overview.SKIPPED
if status in (UNAIRED, UNKNOWN):
return Overview.UNAIRED
if status in [SUBTITLED] + Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.FAILED:
if FAILED == status:
return Overview.WANTED
if status in SNATCHED_ANY:
return Overview.SNATCHED
void, best_qualities = Quality.splitQuality(show_quality)
# if re-downloads aren't wanted then mark it "good" if there is anything
if not len(best_qualities):
return Overview.GOOD
min_best, max_best = min(best_qualities), max(best_qualities)
if quality >= max_best \
or (upgrade_once and
(quality in best_qualities or (None is not min_best and quality > min_best))):
return Overview.GOOD
return Overview.QUAL

View file

@ -22,12 +22,13 @@ import traceback
import threading
import datetime
import re
import copy
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
failed_history, history, ui, properFinder
from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired
from sickbeard.classes import Proper
from sickbeard.classes import Proper, SimpleNamespace
from sickbeard.indexers.indexer_config import INDEXER_TVDB
@ -69,18 +70,6 @@ class SearchQueue(generic_queue.GenericQueue):
return True
return False
def get_all_ep_from_queue(self, show):
with self.lock:
ep_obj_list = []
for cur_item in self.queue:
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
show == str(cur_item.show.indexerid)):
ep_obj_list.append(cur_item)
if ep_obj_list:
return ep_obj_list
return False
def pause_backlog(self):
with self.lock:
self.min_priority = generic_queue.QueuePriorities.HIGH
@ -101,9 +90,33 @@ class SearchQueue(generic_queue.GenericQueue):
return True
return False
def is_manualsearch_in_progress(self):
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
def get_queued_manual(self, show):
"""
Returns None or List of base info items of all show related items in manual or failed queue
:param show: show indexerid or None for all q items
:type show: String or None
:return: List with 0 or more items
"""
ep_obj_list = []
with self.lock:
for cur_item in self.queue:
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
(not show or show == str(cur_item.show.indexerid))):
ep_obj_list.append(cur_item.base_info())
return ep_obj_list
def get_current_manual_item(self, show):
"""
Returns a base info item of the currently active manual search item
:param show: show indexerid or None for all q items
:type show: String or None
:return: base info item of ManualSearchQueueItem or FailedQueueItem or None
"""
with self.lock:
if self.currentItem and isinstance(self.currentItem, (ManualSearchQueueItem, FailedQueueItem)) \
and (not show or show == str(self.currentItem.show.indexerid)):
return self.currentItem.base_info()
def is_backlog_in_progress(self):
return self._is_in_progress(BacklogQueueItem)
@ -188,6 +201,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
self.success = None
self.episodes = []
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
self.snatched_eps = set([])
def run(self):
generic_queue.QueueItem.run(self)
@ -243,6 +257,9 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
self.success = search.snatch_episode(result)
if self.success:
for ep in result.episodes:
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
helpers.cpu_sleep()
@ -391,14 +408,50 @@ class ProperSearchQueueItem(generic_queue.QueueItem):
self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem):
class BaseSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment, name, action_id=0):
super(BaseSearchQueueItem, self).__init__(name, action_id)
self.segment = segment
self.show = show
self.added_dt = None
self.success = None
self.snatched_eps = set([])
def base_info(self):
return SimpleNamespace(
success=self.success,
added_dt=self.added_dt,
snatched_eps=copy.deepcopy(self.snatched_eps),
show=SimpleNamespace(
indexer=self.show.indexer, indexerid=self.show.indexerid,
quality=self.show.quality, upgrade_once=self.show.upgrade_once),
segment=[SimpleNamespace(
season=s.season, episode=s.episode, status=s.status,
show=SimpleNamespace(
indexer=s.show.indexer, indexerid=s.show.indexerid,
quality=s.show.quality, upgrade_once=s.show.upgrade_once
)) for s in ([self.segment], self.segment)[isinstance(self.segment, list)]])
# def copy(self, deepcopy_obj=None):
# if not isinstance(deepcopy_obj, list):
# deepcopy_obj = []
# deepcopy_obj += ['segment']
# same_show = True
# if (isinstance(self.segment, list) and getattr(self.segment[0], 'show') is not self.show) \
# or getattr(self.segment, 'show') is not self.show:
# same_show = False
# deepcopy_obj += ['show']
# n_o = super(BaseSearchQueueItem, self).copy(deepcopy_obj)
# if same_show:
# n_o.show = (getattr(n_o.segment, 'show'), getattr(n_o.segment[0], 'show'))[isinstance(n_o.segment, list)]
# return n_o
class ManualSearchQueueItem(BaseSearchQueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
super(ManualSearchQueueItem, self).__init__(show, segment, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'MANUAL-%s' % show.indexerid
self.success = None
self.show = show
self.segment = segment
self.started = None
def run(self):
@ -417,6 +470,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
# just use the first result for now
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
self.success = search.snatch_episode(search_result[0])
for ep in search_result[0].episodes:
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
helpers.cpu_sleep()
@ -430,8 +485,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
logger.log(traceback.format_exc(), logger.ERROR)
finally:
# Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
# Keep a list with the last executed searches
fifo(MANUAL_SEARCH_HISTORY, self.base_info())
if self.success is None:
self.success = False
@ -439,14 +494,11 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.finish()
class BacklogQueueItem(generic_queue.QueueItem):
class BacklogQueueItem(BaseSearchQueueItem):
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False, torrent_only=False):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
super(BacklogQueueItem, self).__init__(show, segment, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.name = 'BACKLOG-%s' % show.indexerid
self.success = None
self.show = show
self.segment = segment
self.standard_backlog = standard_backlog
self.limited_backlog = limited_backlog
self.forced = forced
@ -472,7 +524,9 @@ class BacklogQueueItem(generic_queue.QueueItem):
for result in search_result:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
search.snatch_episode(result)
if search.snatch_episode(result):
for ep in result.episodes:
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
helpers.cpu_sleep()
else:
@ -486,14 +540,11 @@ class BacklogQueueItem(generic_queue.QueueItem):
self.finish()
class FailedQueueItem(generic_queue.QueueItem):
class FailedQueueItem(BaseSearchQueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
super(FailedQueueItem, self).__init__(show, segment, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'RETRY-%s' % show.indexerid
self.show = show
self.segment = segment
self.success = None
self.started = None
def run(self):
@ -515,7 +566,7 @@ class FailedQueueItem(generic_queue.QueueItem):
logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName())
set_wanted_aired(ep_obj, True, ep_count, ep_count_scene)
set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True)
search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True)
@ -523,7 +574,9 @@ class FailedQueueItem(generic_queue.QueueItem):
for result in search_result:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
search.snatch_episode(result)
if search.snatch_episode(result):
for ep in result.episodes:
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
helpers.cpu_sleep()
else:
@ -533,8 +586,8 @@ class FailedQueueItem(generic_queue.QueueItem):
logger.log(traceback.format_exc(), logger.ERROR)
finally:
# Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
# Keep a list with the last executed searches
fifo(MANUAL_SEARCH_HISTORY, self.base_info())
if self.success is None:
self.success = False
@ -542,7 +595,18 @@ class FailedQueueItem(generic_queue.QueueItem):
self.finish()
def fifo(my_list, item, max_size=100):
if len(my_list) >= max_size:
def fifo(my_list, item):
remove_old_fifo(my_list)
item.added_dt = datetime.datetime.now()
if len(my_list) >= MANUAL_SEARCH_HISTORY_SIZE:
my_list.pop(0)
my_list.append(item)
def remove_old_fifo(my_list, age=datetime.timedelta(minutes=30)):
try:
now = datetime.datetime.now()
my_list[:] = [i for i in my_list if not isinstance(getattr(i, 'added_dt', None), datetime.datetime)
or now - i.added_dt < age]
except (StandardError, Exception):
pass

View file

@ -85,6 +85,9 @@ class ShowUpdater:
logger.log('image cache cleanup error', logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
# cleanup manual search history
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
# add missing mapped ids
if not sickbeard.background_mapping_task.is_alive():
logger.log(u'Updating the Indexer mappings')

View file

@ -1615,34 +1615,7 @@ class TVShow(object):
return False
def getOverview(self, epStatus):
status, quality = Quality.splitCompositeStatus(epStatus)
if ARCHIVED == status:
return Overview.GOOD
if WANTED == status:
return Overview.WANTED
if status in (SKIPPED, IGNORED):
return Overview.SKIPPED
if status in (UNAIRED, UNKNOWN):
return Overview.UNAIRED
if status in [SUBTITLED] + Quality.SNATCHED_ANY + Quality.DOWNLOADED + Quality.FAILED:
if FAILED == status:
return Overview.WANTED
if status in SNATCHED_ANY:
return Overview.SNATCHED
void, best_qualities = Quality.splitQuality(self.quality)
# if re-downloads aren't wanted then mark it "good" if there is anything
if not len(best_qualities):
return Overview.GOOD
min_best, max_best = min(best_qualities), max(best_qualities)
if quality >= max_best \
or (self.upgrade_once and
(quality in best_qualities or (None is not min_best and quality > min_best))):
return Overview.GOOD
return Overview.QUAL
return helpers.getOverview(epStatus, self.quality, self.upgrade_once)
def __getstate__(self):
d = dict(self.__dict__)

View file

@ -2765,132 +2765,117 @@ class Home(MainHandler):
self.redirect('/home/displayShow?show=' + show)
def searchEpisode(self, show=None, season=None, episode=None):
def episode_search(self, show=None, season=None, episode=None, retry=False):
result = dict(result='failure')
# retrieve the episode object and fail if we can't get one
ep_obj = self._getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
if not isinstance(ep_obj, str):
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj)
# make a queue item for the TVEpisode and put it on the queue
ep_queue_item = (search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj),
search_queue.FailedQueueItem(ep_obj.show, [ep_obj]))[retry]
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item)
if ep_queue_item.success:
return returnManualSearchResult(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'})
else:
return json.dumps({'result': 'failure'})
if None is ep_queue_item.success: # invocation
result.update(dict(result=('success', 'queueing')[not ep_queue_item.started]))
# elif ep_queue_item.success:
# return self.search_q_progress(str(ep_obj.show.indexerid)) # page refresh
### Returns the current ep_queue_item status for the current viewed show.
# Possible status: Downloaded, Snatched, etc...
# Returns {'show': 279530, 'episodes' : ['episode' : 6, 'season' : 1, 'searchstatus' : 'queued', 'status' : 'running', 'quality': '4013']
def getManualSearchStatus(self, show=None, season=None):
return json.dumps(result)
def episode_retry(self, show, season, episode):
return self.episode_search(show, season, episode, True)
# Return progress for queued, active and finished episodes
def search_q_progress(self, show=None):
episodes = []
currentManualSearchThreadsQueued = []
currentManualSearchThreadActive = []
finishedManualSearchThreadItems= []
seen_eps = set([])
# Queued Searches
currentManualSearchThreadsQueued = sickbeard.searchQueueScheduler.action.get_all_ep_from_queue(show)
# Running Searches
if (sickbeard.searchQueueScheduler.action.is_manualsearch_in_progress()):
currentManualSearchThreadActive = sickbeard.searchQueueScheduler.action.currentItem
# Queued searches
queued = sickbeard.searchQueueScheduler.action.get_queued_manual(show)
# Finished Searches
finishedManualSearchThreadItems = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
# Active search
active = sickbeard.searchQueueScheduler.action.get_current_manual_item(show)
if currentManualSearchThreadsQueued:
for searchThread in currentManualSearchThreadsQueued:
searchstatus = 'queued'
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
elif hasattr(searchThread, 'segment'):
for epObj in searchThread.segment:
episodes.append({'episode': epObj.episode,
'episodeindexid': epObj.indexerid,
'season' : epObj.season,
'searchstatus' : searchstatus,
'status' : statusStrings[epObj.status],
'quality': self.getQualityClass(epObj)})
# Finished searches
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
results = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
retry_statues = SNATCHED_ANY + [DOWNLOADED, ARCHIVED]
if currentManualSearchThreadActive:
searchThread = currentManualSearchThreadActive
searchstatus = 'searching'
if searchThread.success:
searchstatus = 'finished'
else:
searchstatus = 'searching'
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'retrystatus': Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
elif hasattr(searchThread, 'segment'):
for epObj in searchThread.segment:
episodes.append({'episode': epObj.episode,
'episodeindexid': epObj.indexerid,
'season' : epObj.season,
'searchstatus' : searchstatus,
'retrystatus': Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
'status' : statusStrings[epObj.status],
'quality': self.getQualityClass(epObj)})
for item in filter(lambda q: hasattr(q, 'segment'), queued):
for ep_base in item.segment:
ep, uniq_sxe = self.prepare_episode(ep_base, 'queued')
episodes.append(ep)
seen_eps.add(uniq_sxe)
if finishedManualSearchThreadItems:
for searchThread in finishedManualSearchThreadItems:
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
if str(searchThread.show.indexerid) == show and not [x for x in episodes if x['episodeindexid'] == searchThread.segment.indexerid]:
searchstatus = 'finished'
episodes.append({'episode': searchThread.segment.episode,
'episodeindexid': searchThread.segment.indexerid,
'season' : searchThread.segment.season,
'searchstatus' : searchstatus,
'retrystatus': Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
'status' : statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
### These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segement/episodes
elif hasattr(searchThread, 'segment') and str(searchThread.show.indexerid) == show:
for epObj in searchThread.segment:
if not [x for x in episodes if x['episodeindexid'] == epObj.indexerid]:
searchstatus = 'finished'
episodes.append({'episode': epObj.episode,
'episodeindexid': epObj.indexerid,
'season' : epObj.season,
'searchstatus' : searchstatus,
'retrystatus': Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
'status' : statusStrings[epObj.status],
'quality': self.getQualityClass(epObj)})
if active and hasattr(active, 'segment'):
episode_params = dict(([('searchstate', 'finished'), ('statusoverview', True)],
[('searchstate', 'searching'), ('statusoverview', False)])[None is active.success],
retrystate=True)
for ep_base in active.segment:
ep, uniq_sxe = self.prepare_episode(ep_base, **episode_params)
episodes.append(ep)
seen_eps.add(uniq_sxe)
return json.dumps({'show': show, 'episodes' : episodes})
episode_params = dict(searchstate='finished', retrystate=True, statusoverview=True)
for item in filter(lambda r: hasattr(r, 'segment') and (not show or show == str(r.show.indexerid)), results):
for ep_base in filter(
lambda e: (e.show.indexer, e.show.indexerid, e.season, e.episode) not in seen_eps, item.segment):
ep, uniq_sxe = self.prepare_episode(ep_base, **episode_params)
episodes.append(ep)
seen_eps.add(uniq_sxe)
#return json.dumps()
for snatched in filter(lambda s: (s not in seen_eps), item.snatched_eps):
try:
show = helpers.find_show_by_id(sickbeard.showList, dict({snatched[0]: snatched[1]}))
ep_obj = show.getEpisode(season=snatched[2], episode=snatched[3])
except (StandardError, Exception):
continue
ep, uniq_sxe = self.prepare_episode(ep_obj, **episode_params)
episodes.append(ep)
seen_eps.add(uniq_sxe)
def getQualityClass(self, ep_obj):
# return the correct json value
return json.dumps(dict(episodes=episodes))
@staticmethod
def prepare_episode(ep, searchstate, retrystate=False, statusoverview=False):
"""
Prepare episode data and its unique id
:param ep: Episode structure containing the show that it relates to
:type ep: TVEpisode object or Episode Base Namespace
:param searchstate: Progress of search
:type searchstate: string
:param retrystate: True to add retrystate to data
:type retrystate: bool
:param statusoverview: True to add statusoverview to data
:type statusoverview: bool
:return: Episode data and its unique episode id
:rtype: tuple containing a dict and a tuple
"""
# Find the quality class for the episode
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
ep_status, ep_quality = Quality.splitCompositeStatus(ep.status)
for x in (SD, HD720p, HD1080p, UHD2160p):
if ep_quality in Quality.splitQuality(x)[0]:
quality_class = qualityPresetStrings[x]
break
return quality_class
ep_data = dict(showindexer=ep.show.indexer, showindexid=ep.show.indexerid,
season=ep.season, episode=ep.episode, quality=quality_class,
searchstate=searchstate, status=statusStrings[ep.status])
if retrystate:
retry_statuses = SNATCHED_ANY + [DOWNLOADED, ARCHIVED]
ep_data.update(dict(retrystate=sickbeard.USE_FAILED_DOWNLOADS and ep_status in retry_statuses))
if statusoverview:
ep_data.update(dict(statusoverview=Overview.overviewStrings[
helpers.getOverview(ep.status, ep.show.quality, ep.show.upgrade_once)]))
return ep_data, (ep.show.indexer, ep.show.indexerid, ep.season, ep.episode)
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
@ -2925,26 +2910,6 @@ class Home(MainHandler):
return json.dumps(result)
def retryEpisode(self, show, season, episode):
# retrieve the episode object and fail if we can't get one
ep_obj = self._getEpisode(show, season, episode)
if isinstance(ep_obj, str):
return json.dumps({'result': 'failure'})
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, [ep_obj])
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
if ep_queue_item.success:
return returnManualSearchResult(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'})
else:
return json.dumps({'result': 'failure'})
@staticmethod
def fetch_releasegroups(show_name):