diff --git a/gui/slick/interfaces/default/inc_displayShow.tmpl b/gui/slick/interfaces/default/inc_displayShow.tmpl
index 80e383d6..a65e1493 100644
--- a/gui/slick/interfaces/default/inc_displayShow.tmpl
+++ b/gui/slick/interfaces/default/inc_displayShow.tmpl
@@ -1,7 +1,7 @@
#import datetime
#import sickbeard
#from sickbeard import network_timezones, sbdatetime, subtitles
-#from sickbeard.common import Overview, Quality, statusStrings, ARCHIVED, UNAIRED, SUBTITLED
+#from sickbeard.common import Overview, Quality, statusStrings, ARCHIVED, UNAIRED, SUBTITLED, SNATCHED_ANY, DOWNLOADED
#from lib import subliminal
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
@@ -107,7 +107,8 @@
#end if
#if 0 != int($ep['season'])
- #if (int($ep['status']) in $Quality.SNATCHED or int($ep['status']) in $Quality.DOWNLOADED) and $sg_var('USE_FAILED_DOWNLOADS')
+ #set $status = $Quality.splitCompositeStatus(int($ep['status']))[0]
+ #if ($status in $SNATCHED_ANY + [$DOWNLOADED, $ARCHIVED]) and $sg_var('USE_FAILED_DOWNLOADS')
#else
diff --git a/gui/slick/js/ajaxEpSearch.js b/gui/slick/js/ajaxEpSearch.js
index 2c6ebea2..acfb8170 100644
--- a/gui/slick/js/ajaxEpSearch.js
+++ b/gui/slick/js/ajaxEpSearch.js
@@ -47,7 +47,7 @@ function updateImages(data) {
if (ep.searchstatus == 'searching') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Searching');
- img.attr('alt','earching');
+ img.prop('alt','searching');
img.attr('src',sbRoot+'/images/' + loadingImage);
disableLink(el);
// Update Status and Quality
@@ -58,20 +58,31 @@ function updateImages(data) {
else if (ep.searchstatus == 'queued') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
img.attr('title','Queued');
- img.attr('alt','queued');
+ img.prop('alt','queued');
img.attr('src',sbRoot+'/images/' + queuedImage );
disableLink(el);
HtmlContent = ep.searchstatus;
}
else if (ep.searchstatus == 'finished') {
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
- img.attr('title','Searching');
- img.attr('alt','searching');
- if (ep.retrystatus) {img.parent().attr('class','epRetry');} else {img.parent().attr('class','epSearch');}
+ imgparent=img.parent();
+ if (ep.retrystatus) {
+ imgparent.attr('class','epRetry');
+ imgparent.attr('href', imgparent.attr('href').replace('/home/searchEpisode?', '/home/retryEpisode?'));
+ img.attr('title','Retry download');
+ img.prop('alt', 'retry download');
+ }
+ else {
+ imgparent.attr('class','epSearch');
+ imgparent.attr('href', imgparent.attr('href').replace('/home/retryEpisode?', '/home/searchEpisode?'));
+ img.attr('title','Manual search');
+ img.prop('alt', 'manual search');
+ }
img.attr('src',sbRoot+'/images/' + searchImage);
enableLink(el);
// Update Status and Quality
+ parent.closest('tr').removeClass('skipped wanted qual good unaired snatched').addClass(ep.statusoverview);
var rSearchTerm = /(\w+)\s\((.+?)\)/;
HtmlContent = ep.status.replace(rSearchTerm,"$1"+' '+"$2"+'');
@@ -140,7 +151,7 @@ function disableLink(el) {
// Create var for img under anchor and set options for the loading gif
img=$(this).children('img');
img.attr('title','loading');
- img.attr('alt','');
+ img.prop('alt','');
img.attr('src',sbRoot+'/images/' + options.loadingImage);
@@ -169,7 +180,7 @@ function disableLink(el) {
// put the corresponding image as the result of queuing of the manual search
img.attr('title',img_result);
- img.attr('alt',img_result);
+ img.prop('alt',img_result);
img.attr('height', options.size);
img.attr('src',sbRoot+"/images/"+img_name);
});
diff --git a/sickbeard/generic_queue.py b/sickbeard/generic_queue.py
index f5d49bd5..ec2fd103 100644
--- a/sickbeard/generic_queue.py
+++ b/sickbeard/generic_queue.py
@@ -18,6 +18,7 @@
import datetime
import threading
+import copy
from sickbeard import logger
@@ -98,6 +99,7 @@ class GenericQueue(object):
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
self.currentItem.start()
+
class QueueItem(threading.Thread):
def __init__(self, name, action_id=0):
super(QueueItem, self).__init__()
@@ -109,6 +111,24 @@ class QueueItem(threading.Thread):
self.stop = threading.Event()
self.added = None
+ def copy(self, deepcopy_obj=None):
+ """
+ Returns a shallow copy of QueueItem with optional deepcopises of in deepcopy_obj listed objects
+ :param deepcopy_obj: List of properties to be deepcopied
+ :type deepcopy_obj: list
+ :return: return QueueItem
+ :rtype: QueueItem
+ """
+ cls = self.__class__
+ result = cls.__new__(cls)
+ result.__dict__.update(self.__dict__)
+ if deepcopy_obj:
+ for o in deepcopy_obj:
+ if self.__dict__.get(o):
+ new_seg = copy.deepcopy(self.__dict__.get(o))
+ result.__dict__[o] = new_seg
+ return result
+
def run(self):
"""Implementing classes should call this"""
diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py
index 783f94d9..10da45e2 100644
--- a/sickbeard/search_queue.py
+++ b/sickbeard/search_queue.py
@@ -70,12 +70,18 @@ class SearchQueue(generic_queue.GenericQueue):
return False
def get_all_ep_from_queue(self, show):
+ """
+ Returns False or List of copies of all show related items in manual or failed queue
+ :param show: indexerid
+ :type show: str
+ :return: False or List of copies of all show related items in manual or failed queue
+ """
with self.lock:
ep_obj_list = []
for cur_item in self.queue:
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
show == str(cur_item.show.indexerid)):
- ep_obj_list.append(cur_item)
+ ep_obj_list.append(cur_item.copy())
if ep_obj_list:
return ep_obj_list
@@ -101,6 +107,18 @@ class SearchQueue(generic_queue.GenericQueue):
return True
return False
+ def get_current_manualsearch_item(self, show):
+ """
+ Returns a static copy of the current item
+ :param show: indexerid
+ :type show: str
+ :return: copy of ManualSearchQueueItem or FailedQueueItem or None
+ """
+ with self.lock:
+ if self.currentItem and isinstance(self.currentItem, (ManualSearchQueueItem, FailedQueueItem)) \
+ and show == str(self.currentItem.show.indexerid):
+ return self.currentItem.copy()
+
def is_manualsearch_in_progress(self):
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
@@ -188,6 +206,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
self.success = None
self.episodes = []
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
+ self.snatched_eps = {}
def run(self):
generic_queue.QueueItem.run(self)
@@ -243,6 +262,10 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
self.success = search.snatch_episode(result)
+ if self.success:
+ for ep in result.episodes:
+ self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
+ {'season': ep.season, 'episode': ep.episode}})
helpers.cpu_sleep()
@@ -400,6 +423,13 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
self.show = show
self.segment = segment
self.started = None
+ self.snatched_eps = {}
+
+ def copy(self, deepcopy_obj=None):
+ if not isinstance(deepcopy_obj, list):
+ deepcopy_obj = []
+ deepcopy_obj += ['segment', 'show']
+ return super(ManualSearchQueueItem, self).copy(deepcopy_obj)
def run(self):
generic_queue.QueueItem.run(self)
@@ -417,6 +447,9 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
# just use the first result for now
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
self.success = search.snatch_episode(search_result[0])
+ for ep in search_result[0].episodes:
+ self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
+ {'season': ep.season, 'episode': ep.episode}})
helpers.cpu_sleep()
@@ -451,6 +484,13 @@ class BacklogQueueItem(generic_queue.QueueItem):
self.limited_backlog = limited_backlog
self.forced = forced
self.torrent_only = torrent_only
+ self.snatched_eps = {}
+
+ def copy(self, deepcopy_obj=None):
+ if not isinstance(deepcopy_obj, list):
+ deepcopy_obj = []
+ deepcopy_obj += ['segment', 'show']
+ return super(BacklogQueueItem, self).copy(deepcopy_obj)
def run(self):
generic_queue.QueueItem.run(self)
@@ -472,7 +512,10 @@ class BacklogQueueItem(generic_queue.QueueItem):
for result in search_result:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
- search.snatch_episode(result)
+ if search.snatch_episode(result):
+ for ep in result.episodes:
+ self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
+ {'season': ep.season, 'episode': ep.episode}})
helpers.cpu_sleep()
else:
@@ -495,6 +538,13 @@ class FailedQueueItem(generic_queue.QueueItem):
self.segment = segment
self.success = None
self.started = None
+ self.snatched_eps = {}
+
+ def copy(self, deepcopy_obj=None):
+ if not isinstance(deepcopy_obj, list):
+ deepcopy_obj = []
+ deepcopy_obj += ['segment', 'show']
+ return super(FailedQueueItem, self).copy(deepcopy_obj)
def run(self):
generic_queue.QueueItem.run(self)
@@ -515,7 +565,7 @@ class FailedQueueItem(generic_queue.QueueItem):
logger.log(u'Beginning failed download search for: [%s]' % ep_obj.prettyName())
- set_wanted_aired(ep_obj, True, ep_count, ep_count_scene)
+ set_wanted_aired(ep_obj, True, ep_count, ep_count_scene, manual=True)
search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True)
@@ -523,7 +573,10 @@ class FailedQueueItem(generic_queue.QueueItem):
for result in search_result:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
- search.snatch_episode(result)
+ if search.snatch_episode(result):
+ for ep in result.episodes:
+ self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
+ {'season': ep.season, 'episode': ep.episode}})
helpers.cpu_sleep()
else:
@@ -543,6 +596,17 @@ class FailedQueueItem(generic_queue.QueueItem):
def fifo(my_list, item, max_size=100):
+ remove_old_fifo(my_list)
+ item.added_dt = datetime.datetime.now()
if len(my_list) >= max_size:
my_list.pop(0)
my_list.append(item)
+
+
+def remove_old_fifo(my_list, age=datetime.timedelta(minutes=30)):
+ try:
+ now = datetime.datetime.now()
+ my_list[:] = [i for i in my_list if not isinstance(getattr(i, 'added_dt', None), datetime.datetime)
+ or now - i.added_dt < age]
+ except (StandardError, Exception):
+ pass
diff --git a/sickbeard/show_updater.py b/sickbeard/show_updater.py
index 1d2fc27a..0058db1d 100644
--- a/sickbeard/show_updater.py
+++ b/sickbeard/show_updater.py
@@ -85,6 +85,12 @@ class ShowUpdater:
logger.log('image cache cleanup error', logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
+ # cleanup manual search history
+ try:
+ sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
+ except (StandardError, Exception):
+ pass
+
# add missing mapped ids
if not sickbeard.background_mapping_task.is_alive():
logger.log(u'Updating the Indexer mappings')
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 209d847e..e50a3dee 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -2777,8 +2777,8 @@ class Home(MainHandler):
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
- if ep_queue_item.success:
- return returnManualSearchResult(ep_queue_item)
+ #if ep_queue_item.success:
+ # return returnManualSearchResult(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
@@ -2792,6 +2792,7 @@ class Home(MainHandler):
def getManualSearchStatus(self, show=None, season=None):
episodes = []
+ eps_list = set()
currentManualSearchThreadsQueued = []
currentManualSearchThreadActive = []
finishedManualSearchThreadItems= []
@@ -2799,83 +2800,108 @@ class Home(MainHandler):
# Queued Searches
currentManualSearchThreadsQueued = sickbeard.searchQueueScheduler.action.get_all_ep_from_queue(show)
# Running Searches
- if (sickbeard.searchQueueScheduler.action.is_manualsearch_in_progress()):
- currentManualSearchThreadActive = sickbeard.searchQueueScheduler.action.currentItem
+ currentManualSearchThreadActive = sickbeard.searchQueueScheduler.action.get_current_manualsearch_item(show)
# Finished Searches
- finishedManualSearchThreadItems = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
+ sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
+ finishedManualSearchThreadItems = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
if currentManualSearchThreadsQueued:
for searchThread in currentManualSearchThreadsQueued:
searchstatus = 'queued'
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
+ eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
episodes.append({'episode': searchThread.segment.episode,
- 'episodeindexid': searchThread.segment.indexerid,
- 'season' : searchThread.segment.season,
- 'searchstatus' : searchstatus,
- 'status' : statusStrings[searchThread.segment.status],
+ 'showindexer': searchThread.show.indexer,
+ 'showindexid': searchThread.show.indexerid,
+ 'season': searchThread.segment.season,
+ 'searchstatus': searchstatus,
+ 'status': statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
elif hasattr(searchThread, 'segment'):
for epObj in searchThread.segment:
+ eps_list.add('%sx%s' % (epObj.season, epObj.episode))
episodes.append({'episode': epObj.episode,
- 'episodeindexid': epObj.indexerid,
- 'season' : epObj.season,
- 'searchstatus' : searchstatus,
- 'status' : statusStrings[epObj.status],
- 'quality': self.getQualityClass(epObj)})
+ 'showindexer': epObj.show.indexer,
+ 'showindexid': epObj.show.indexerid,
+ 'season': epObj.season,
+ 'searchstatus': searchstatus,
+ 'status': statusStrings[epObj.status],
+ 'quality': self.getQualityClass(epObj)})
retry_statues = SNATCHED_ANY + [DOWNLOADED, ARCHIVED]
if currentManualSearchThreadActive:
searchThread = currentManualSearchThreadActive
- searchstatus = 'searching'
if searchThread.success:
searchstatus = 'finished'
else:
searchstatus = 'searching'
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
+ eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
episodes.append({'episode': searchThread.segment.episode,
- 'episodeindexid': searchThread.segment.indexerid,
- 'season' : searchThread.segment.season,
- 'searchstatus' : searchstatus,
- 'retrystatus': Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
- 'status' : statusStrings[searchThread.segment.status],
+ 'showindexer': searchThread.show.indexer,
+ 'showindexid': searchThread.show.indexerid,
+ 'season': searchThread.segment.season,
+ 'searchstatus': searchstatus,
+ 'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
+ 'status': statusStrings[searchThread.segment.status],
'quality': self.getQualityClass(searchThread.segment)})
elif hasattr(searchThread, 'segment'):
for epObj in searchThread.segment:
+ eps_list.add('%sx%s' % (epObj.season, epObj.episode))
episodes.append({'episode': epObj.episode,
- 'episodeindexid': epObj.indexerid,
- 'season' : epObj.season,
- 'searchstatus' : searchstatus,
- 'retrystatus': Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
- 'status' : statusStrings[epObj.status],
+ 'showindexer': epObj.show.indexer,
+ 'showindexid': epObj.show.indexerid,
+ 'season': epObj.season,
+ 'searchstatus': searchstatus,
+ 'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
+ 'status': statusStrings[epObj.status],
'quality': self.getQualityClass(epObj)})
if finishedManualSearchThreadItems:
+ searchstatus = 'finished'
for searchThread in finishedManualSearchThreadItems:
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
- if str(searchThread.show.indexerid) == show and not [x for x in episodes if x['episodeindexid'] == searchThread.segment.indexerid]:
- searchstatus = 'finished'
+ if str(searchThread.show.indexerid) == show and '%sx%s' % (searchThread.segment.season, searchThread.segment.episode) not in eps_list:
+ eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
episodes.append({'episode': searchThread.segment.episode,
- 'episodeindexid': searchThread.segment.indexerid,
- 'season' : searchThread.segment.season,
- 'searchstatus' : searchstatus,
- 'retrystatus': Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
- 'status' : statusStrings[searchThread.segment.status],
- 'quality': self.getQualityClass(searchThread.segment)})
+ 'showindexer': searchThread.show.indexer,
+ 'showindexid': searchThread.show.indexerid,
+ 'season': searchThread.segment.season,
+ 'searchstatus': searchstatus,
+ 'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
+ 'status': statusStrings[searchThread.segment.status],
+ 'quality': self.getQualityClass(searchThread.segment),
+ 'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(searchThread.segment.status)]})
### These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segement/episodes
elif hasattr(searchThread, 'segment') and str(searchThread.show.indexerid) == show:
for epObj in searchThread.segment:
- if not [x for x in episodes if x['episodeindexid'] == epObj.indexerid]:
- searchstatus = 'finished'
+ if '%sx%s' % (epObj.season, epObj.episode) not in eps_list:
+ eps_list.add('%sx%s' % (epObj.season, epObj.episode))
episodes.append({'episode': epObj.episode,
- 'episodeindexid': epObj.indexerid,
- 'season' : epObj.season,
- 'searchstatus' : searchstatus,
- 'retrystatus': Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
- 'status' : statusStrings[epObj.status],
- 'quality': self.getQualityClass(epObj)})
+ 'showindexer': epObj.show.indexer,
+ 'showindexid': epObj.show.indexerid,
+ 'season': epObj.season,
+ 'searchstatus': searchstatus,
+ 'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
+ 'status': statusStrings[epObj.status],
+ 'quality': self.getQualityClass(epObj),
+ 'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(epObj.status)]})
- return json.dumps({'show': show, 'episodes' : episodes})
+ for ep in [v for k, v in searchThread.snatched_eps.iteritems() if k not in eps_list]:
+ ep_obj = searchThread.show.getEpisode(season=ep['season'], episode=ep['episode'])
+ episodes.append({'episode': ep['episode'],
+ 'showindexer': searchThread.show.indexer,
+ 'showindexid': searchThread.show.indexerid,
+ 'season': ep['season'],
+ 'searchstatus': searchstatus,
+ 'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(ep_obj.status)[0] in retry_statues,
+ 'status': statusStrings[ep_obj.status],
+ 'quality': self.getQualityClass(ep_obj),
+ 'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(ep_obj.status)]})
+
+
+ return json.dumps({'show': show, 'episodes': episodes})
#return json.dumps()
@@ -2936,8 +2962,8 @@ class Home(MainHandler):
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, [ep_obj])
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
- if ep_queue_item.success:
- return returnManualSearchResult(ep_queue_item)
+ #if ep_queue_item.success:
+ # return returnManualSearchResult(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
|