mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Change code refactor.
Devel: Use camel case for JS vars. Correct typos. Define attr added_dt. Remove redundant try/expect.
This commit is contained in:
parent
1d00b0e4c2
commit
85a5f6ea00
5 changed files with 149 additions and 200 deletions
|
@ -65,16 +65,16 @@ function updateImages(data) {
|
||||||
}
|
}
|
||||||
else if (ep.searchstatus == 'finished') {
|
else if (ep.searchstatus == 'finished') {
|
||||||
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
|
//el=$('td#' + ep.season + 'x' + ep.episode + '.search img');
|
||||||
imgparent=img.parent();
|
var imgParent = img.parent();
|
||||||
if (ep.retrystatus) {
|
if (ep.retrystatus) {
|
||||||
imgparent.attr('class','epRetry');
|
imgParent.attr('class','epRetry');
|
||||||
imgparent.attr('href', imgparent.attr('href').replace('/home/searchEpisode?', '/home/retryEpisode?'));
|
imgParent.attr('href', imgParent.attr('href').replace('/home/searchEpisode?', '/home/retryEpisode?'));
|
||||||
img.attr('title','Retry download');
|
img.attr('title','Retry download');
|
||||||
img.prop('alt', 'retry download');
|
img.prop('alt', 'retry download');
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
imgparent.attr('class','epSearch');
|
imgParent.attr('class','epSearch');
|
||||||
imgparent.attr('href', imgparent.attr('href').replace('/home/retryEpisode?', '/home/searchEpisode?'));
|
imgParent.attr('href', imgParent.attr('href').replace('/home/retryEpisode?', '/home/searchEpisode?'));
|
||||||
img.attr('title','Manual search');
|
img.attr('title','Manual search');
|
||||||
img.prop('alt', 'manual search');
|
img.prop('alt', 'manual search');
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,8 +113,8 @@ class QueueItem(threading.Thread):
|
||||||
|
|
||||||
def copy(self, deepcopy_obj=None):
|
def copy(self, deepcopy_obj=None):
|
||||||
"""
|
"""
|
||||||
Returns a shallow copy of QueueItem with optional deepcopises of in deepcopy_obj listed objects
|
Returns a shallow copy of QueueItem with optional deepcopy of in deepcopy_obj listed objects
|
||||||
:param deepcopy_obj: List of properties to be deepcopied
|
:param deepcopy_obj: List of properties to be deep copied
|
||||||
:type deepcopy_obj: list
|
:type deepcopy_obj: list
|
||||||
:return: return QueueItem
|
:return: return QueueItem
|
||||||
:rtype: QueueItem
|
:rtype: QueueItem
|
||||||
|
|
|
@ -69,24 +69,6 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_all_ep_from_queue(self, show):
|
|
||||||
"""
|
|
||||||
Returns False or List of copies of all show related items in manual or failed queue
|
|
||||||
:param show: indexerid
|
|
||||||
:type show: str
|
|
||||||
:return: False or List of copies of all show related items in manual or failed queue
|
|
||||||
"""
|
|
||||||
with self.lock:
|
|
||||||
ep_obj_list = []
|
|
||||||
for cur_item in self.queue:
|
|
||||||
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
|
|
||||||
show == str(cur_item.show.indexerid)):
|
|
||||||
ep_obj_list.append(cur_item.copy())
|
|
||||||
|
|
||||||
if ep_obj_list:
|
|
||||||
return ep_obj_list
|
|
||||||
return False
|
|
||||||
|
|
||||||
def pause_backlog(self):
|
def pause_backlog(self):
|
||||||
with self.lock:
|
with self.lock:
|
||||||
self.min_priority = generic_queue.QueuePriorities.HIGH
|
self.min_priority = generic_queue.QueuePriorities.HIGH
|
||||||
|
@ -107,22 +89,34 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_current_manualsearch_item(self, show):
|
def get_queued_manual(self, show):
|
||||||
"""
|
"""
|
||||||
Returns a static copy of the current item
|
Returns None or List of copies of all show related items in manual or failed queue
|
||||||
:param show: indexerid
|
:param show: show indexerid or None for all q items
|
||||||
:type show: str
|
:type show: String or None
|
||||||
|
:return: List with 0 or more items
|
||||||
|
"""
|
||||||
|
ep_obj_list = []
|
||||||
|
with self.lock:
|
||||||
|
for cur_item in self.queue:
|
||||||
|
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
|
||||||
|
(not show or show == str(cur_item.show.indexerid))):
|
||||||
|
ep_obj_list.append(cur_item.copy())
|
||||||
|
|
||||||
|
return ep_obj_list
|
||||||
|
|
||||||
|
def get_current_manual_item(self, show):
|
||||||
|
"""
|
||||||
|
Returns a static copy of the currently active manual search item
|
||||||
|
:param show: show indexerid or None for all q items
|
||||||
|
:type show: String or None
|
||||||
:return: copy of ManualSearchQueueItem or FailedQueueItem or None
|
:return: copy of ManualSearchQueueItem or FailedQueueItem or None
|
||||||
"""
|
"""
|
||||||
with self.lock:
|
with self.lock:
|
||||||
if self.currentItem and isinstance(self.currentItem, (ManualSearchQueueItem, FailedQueueItem)) \
|
if self.currentItem and isinstance(self.currentItem, (ManualSearchQueueItem, FailedQueueItem)) \
|
||||||
and show == str(self.currentItem.show.indexerid):
|
and (not show or show == str(self.currentItem.show.indexerid)):
|
||||||
return self.currentItem.copy()
|
return self.currentItem.copy()
|
||||||
|
|
||||||
def is_manualsearch_in_progress(self):
|
|
||||||
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
|
|
||||||
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
|
|
||||||
|
|
||||||
def is_backlog_in_progress(self):
|
def is_backlog_in_progress(self):
|
||||||
return self._is_in_progress(BacklogQueueItem)
|
return self._is_in_progress(BacklogQueueItem)
|
||||||
|
|
||||||
|
@ -206,7 +200,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
self.success = None
|
self.success = None
|
||||||
self.episodes = []
|
self.episodes = []
|
||||||
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
|
||||||
self.snatched_eps = {}
|
self.snatched_eps = set([])
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
generic_queue.QueueItem.run(self)
|
generic_queue.QueueItem.run(self)
|
||||||
|
@ -264,8 +258,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||||
self.success = search.snatch_episode(result)
|
self.success = search.snatch_episode(result)
|
||||||
if self.success:
|
if self.success:
|
||||||
for ep in result.episodes:
|
for ep in result.episodes:
|
||||||
self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
|
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
|
||||||
{'season': ep.season, 'episode': ep.episode}})
|
|
||||||
|
|
||||||
helpers.cpu_sleep()
|
helpers.cpu_sleep()
|
||||||
|
|
||||||
|
@ -423,7 +416,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
self.show = show
|
self.show = show
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
self.started = None
|
self.started = None
|
||||||
self.snatched_eps = {}
|
self.added_dt = None
|
||||||
|
self.snatched_eps = set([])
|
||||||
|
|
||||||
def copy(self, deepcopy_obj=None):
|
def copy(self, deepcopy_obj=None):
|
||||||
if not isinstance(deepcopy_obj, list):
|
if not isinstance(deepcopy_obj, list):
|
||||||
|
@ -448,8 +442,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
|
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
|
||||||
self.success = search.snatch_episode(search_result[0])
|
self.success = search.snatch_episode(search_result[0])
|
||||||
for ep in search_result[0].episodes:
|
for ep in search_result[0].episodes:
|
||||||
self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
|
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
|
||||||
{'season': ep.season, 'episode': ep.episode}})
|
|
||||||
|
|
||||||
helpers.cpu_sleep()
|
helpers.cpu_sleep()
|
||||||
|
|
||||||
|
@ -463,8 +456,8 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(traceback.format_exc(), logger.ERROR)
|
logger.log(traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Keep a list with the 100 last executed searches
|
# Keep a list with the last executed searches
|
||||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
fifo(MANUAL_SEARCH_HISTORY, self)
|
||||||
|
|
||||||
if self.success is None:
|
if self.success is None:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
@ -484,7 +477,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
self.limited_backlog = limited_backlog
|
self.limited_backlog = limited_backlog
|
||||||
self.forced = forced
|
self.forced = forced
|
||||||
self.torrent_only = torrent_only
|
self.torrent_only = torrent_only
|
||||||
self.snatched_eps = {}
|
self.snatched_eps = set([])
|
||||||
|
|
||||||
def copy(self, deepcopy_obj=None):
|
def copy(self, deepcopy_obj=None):
|
||||||
if not isinstance(deepcopy_obj, list):
|
if not isinstance(deepcopy_obj, list):
|
||||||
|
@ -514,8 +507,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||||
if search.snatch_episode(result):
|
if search.snatch_episode(result):
|
||||||
for ep in result.episodes:
|
for ep in result.episodes:
|
||||||
self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
|
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
|
||||||
{'season': ep.season, 'episode': ep.episode}})
|
|
||||||
|
|
||||||
helpers.cpu_sleep()
|
helpers.cpu_sleep()
|
||||||
else:
|
else:
|
||||||
|
@ -538,7 +530,8 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
self.success = None
|
self.success = None
|
||||||
self.started = None
|
self.started = None
|
||||||
self.snatched_eps = {}
|
self.added_dt = None
|
||||||
|
self.snatched_eps = set([])
|
||||||
|
|
||||||
def copy(self, deepcopy_obj=None):
|
def copy(self, deepcopy_obj=None):
|
||||||
if not isinstance(deepcopy_obj, list):
|
if not isinstance(deepcopy_obj, list):
|
||||||
|
@ -575,8 +568,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||||
if search.snatch_episode(result):
|
if search.snatch_episode(result):
|
||||||
for ep in result.episodes:
|
for ep in result.episodes:
|
||||||
self.snatched_eps.update({'%sx%s' % (ep.season, ep.episode):
|
self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))
|
||||||
{'season': ep.season, 'episode': ep.episode}})
|
|
||||||
|
|
||||||
helpers.cpu_sleep()
|
helpers.cpu_sleep()
|
||||||
else:
|
else:
|
||||||
|
@ -586,8 +578,8 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
logger.log(traceback.format_exc(), logger.ERROR)
|
logger.log(traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Keep a list with the 100 last executed searches
|
# Keep a list with the last executed searches
|
||||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
fifo(MANUAL_SEARCH_HISTORY, self)
|
||||||
|
|
||||||
if self.success is None:
|
if self.success is None:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
@ -595,10 +587,10 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
def fifo(my_list, item, max_size=100):
|
def fifo(my_list, item):
|
||||||
remove_old_fifo(my_list)
|
remove_old_fifo(my_list)
|
||||||
item.added_dt = datetime.datetime.now()
|
item.added_dt = datetime.datetime.now()
|
||||||
if len(my_list) >= max_size:
|
if len(my_list) >= MANUAL_SEARCH_HISTORY_SIZE:
|
||||||
my_list.pop(0)
|
my_list.pop(0)
|
||||||
my_list.append(item)
|
my_list.append(item)
|
||||||
|
|
||||||
|
|
|
@ -86,10 +86,7 @@ class ShowUpdater:
|
||||||
logger.log(traceback.format_exc(), logger.ERROR)
|
logger.log(traceback.format_exc(), logger.ERROR)
|
||||||
|
|
||||||
# cleanup manual search history
|
# cleanup manual search history
|
||||||
try:
|
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
|
||||||
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
|
|
||||||
except (StandardError, Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# add missing mapped ids
|
# add missing mapped ids
|
||||||
if not sickbeard.background_mapping_task.is_alive():
|
if not sickbeard.background_mapping_task.is_alive():
|
||||||
|
|
|
@ -2765,158 +2765,138 @@ class Home(MainHandler):
|
||||||
|
|
||||||
self.redirect('/home/displayShow?show=' + show)
|
self.redirect('/home/displayShow?show=' + show)
|
||||||
|
|
||||||
def searchEpisode(self, show=None, season=None, episode=None):
|
def episode_search(self, show=None, season=None, episode=None, retry=False):
|
||||||
|
|
||||||
|
result = dict(result='failure')
|
||||||
|
|
||||||
# retrieve the episode object and fail if we can't get one
|
# retrieve the episode object and fail if we can't get one
|
||||||
ep_obj = self._getEpisode(show, season, episode)
|
ep_obj = self._getEpisode(show, season, episode)
|
||||||
if isinstance(ep_obj, str):
|
if not isinstance(ep_obj, str):
|
||||||
return json.dumps({'result': 'failure'})
|
|
||||||
|
|
||||||
# make a queue item for it and put it on the queue
|
# make a queue item for the TVEpisode and put it on the queue
|
||||||
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj)
|
ep_queue_item = (search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj),
|
||||||
|
search_queue.FailedQueueItem(ep_obj.show, [ep_obj]))[retry]
|
||||||
|
|
||||||
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
|
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item)
|
||||||
|
|
||||||
#if ep_queue_item.success:
|
if None is ep_queue_item.success: # invocation
|
||||||
# return returnManualSearchResult(ep_queue_item)
|
result.update(dict(result=('success', 'queueing')[not ep_queue_item.started]))
|
||||||
if not ep_queue_item.started and ep_queue_item.success is None:
|
# elif ep_queue_item.success:
|
||||||
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
|
# return self.search_q_progress(str(ep_obj.show.indexerid)) # page refresh
|
||||||
if ep_queue_item.started and ep_queue_item.success is None:
|
|
||||||
return json.dumps({'result': 'success'})
|
|
||||||
else:
|
|
||||||
return json.dumps({'result': 'failure'})
|
|
||||||
|
|
||||||
### Returns the current ep_queue_item status for the current viewed show.
|
return json.dumps(result)
|
||||||
# Possible status: Downloaded, Snatched, etc...
|
|
||||||
# Returns {'show': 279530, 'episodes' : ['episode' : 6, 'season' : 1, 'searchstatus' : 'queued', 'status' : 'running', 'quality': '4013']
|
def episode_retry(self, show, season, episode):
|
||||||
def getManualSearchStatus(self, show=None, season=None):
|
|
||||||
|
return self.episode_search(show, season, episode, True)
|
||||||
|
|
||||||
|
# Return progress for queued, active and finished episodes
|
||||||
|
def search_q_progress(self, show=None):
|
||||||
|
|
||||||
episodes = []
|
episodes = []
|
||||||
eps_list = set()
|
seen_eps = set([])
|
||||||
currentManualSearchThreadsQueued = []
|
|
||||||
currentManualSearchThreadActive = []
|
|
||||||
finishedManualSearchThreadItems= []
|
|
||||||
|
|
||||||
# Queued Searches
|
# Queued searches
|
||||||
currentManualSearchThreadsQueued = sickbeard.searchQueueScheduler.action.get_all_ep_from_queue(show)
|
queued_items = sickbeard.searchQueueScheduler.action.get_queued_manual(show)
|
||||||
# Running Searches
|
|
||||||
currentManualSearchThreadActive = sickbeard.searchQueueScheduler.action.get_current_manualsearch_item(show)
|
|
||||||
|
|
||||||
# Finished Searches
|
# Active search
|
||||||
|
active_item = sickbeard.searchQueueScheduler.action.get_current_manual_item(show)
|
||||||
|
|
||||||
|
# Finished searches
|
||||||
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
|
sickbeard.search_queue.remove_old_fifo(sickbeard.search_queue.MANUAL_SEARCH_HISTORY)
|
||||||
finishedManualSearchThreadItems = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
|
finished_items = sickbeard.search_queue.MANUAL_SEARCH_HISTORY
|
||||||
|
|
||||||
if currentManualSearchThreadsQueued:
|
progress = 'queued'
|
||||||
for searchThread in currentManualSearchThreadsQueued:
|
for thread in queued_items:
|
||||||
searchstatus = 'queued'
|
if isinstance(thread, sickbeard.search_queue.ManualSearchQueueItem):
|
||||||
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
|
ep, uniq_sxe = self.prepare_episode(thread.show, thread.segment, progress)
|
||||||
eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
|
episodes.append(ep)
|
||||||
episodes.append({'episode': searchThread.segment.episode,
|
seen_eps.add(uniq_sxe)
|
||||||
'showindexer': searchThread.show.indexer,
|
|
||||||
'showindexid': searchThread.show.indexerid,
|
|
||||||
'season': searchThread.segment.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'status': statusStrings[searchThread.segment.status],
|
|
||||||
'quality': self.getQualityClass(searchThread.segment)})
|
|
||||||
elif hasattr(searchThread, 'segment'):
|
|
||||||
for epObj in searchThread.segment:
|
|
||||||
eps_list.add('%sx%s' % (epObj.season, epObj.episode))
|
|
||||||
episodes.append({'episode': epObj.episode,
|
|
||||||
'showindexer': epObj.show.indexer,
|
|
||||||
'showindexid': epObj.show.indexerid,
|
|
||||||
'season': epObj.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'status': statusStrings[epObj.status],
|
|
||||||
'quality': self.getQualityClass(epObj)})
|
|
||||||
|
|
||||||
retry_statues = SNATCHED_ANY + [DOWNLOADED, ARCHIVED]
|
elif hasattr(thread, 'segment'):
|
||||||
if currentManualSearchThreadActive:
|
for ep_obj in thread.segment:
|
||||||
searchThread = currentManualSearchThreadActive
|
ep, uniq_sxe = self.prepare_episode(ep_obj.show, ep_obj, progress)
|
||||||
if searchThread.success:
|
episodes.append(ep)
|
||||||
searchstatus = 'finished'
|
seen_eps.add(uniq_sxe)
|
||||||
else:
|
|
||||||
searchstatus = 'searching'
|
|
||||||
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
|
|
||||||
eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
|
|
||||||
episodes.append({'episode': searchThread.segment.episode,
|
|
||||||
'showindexer': searchThread.show.indexer,
|
|
||||||
'showindexid': searchThread.show.indexerid,
|
|
||||||
'season': searchThread.segment.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
|
|
||||||
'status': statusStrings[searchThread.segment.status],
|
|
||||||
'quality': self.getQualityClass(searchThread.segment)})
|
|
||||||
elif hasattr(searchThread, 'segment'):
|
|
||||||
for epObj in searchThread.segment:
|
|
||||||
eps_list.add('%sx%s' % (epObj.season, epObj.episode))
|
|
||||||
episodes.append({'episode': epObj.episode,
|
|
||||||
'showindexer': epObj.show.indexer,
|
|
||||||
'showindexid': epObj.show.indexerid,
|
|
||||||
'season': epObj.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
|
|
||||||
'status': statusStrings[epObj.status],
|
|
||||||
'quality': self.getQualityClass(epObj)})
|
|
||||||
|
|
||||||
if finishedManualSearchThreadItems:
|
if active_item:
|
||||||
searchstatus = 'finished'
|
thread = active_item
|
||||||
for searchThread in finishedManualSearchThreadItems:
|
episode_params = dict(([('searchstate', 'finished'), ('statusoverview', True)],
|
||||||
if isinstance(searchThread, sickbeard.search_queue.ManualSearchQueueItem):
|
[('searchstate', 'searching'), ('statusoverview', False)])[None is thread.success],
|
||||||
if str(searchThread.show.indexerid) == show and '%sx%s' % (searchThread.segment.season, searchThread.segment.episode) not in eps_list:
|
retrystate=True)
|
||||||
eps_list.add('%sx%s' % (searchThread.segment.season, searchThread.segment.episode))
|
if isinstance(thread, sickbeard.search_queue.ManualSearchQueueItem):
|
||||||
episodes.append({'episode': searchThread.segment.episode,
|
ep, uniq_sxe = self.prepare_episode(thread.show, thread.segment, **episode_params)
|
||||||
'showindexer': searchThread.show.indexer,
|
episodes.append(ep)
|
||||||
'showindexid': searchThread.show.indexerid,
|
seen_eps.add(uniq_sxe)
|
||||||
'season': searchThread.segment.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(searchThread.segment.status)[0] in retry_statues,
|
|
||||||
'status': statusStrings[searchThread.segment.status],
|
|
||||||
'quality': self.getQualityClass(searchThread.segment),
|
|
||||||
'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(searchThread.segment.status)]})
|
|
||||||
### These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segement/episodes
|
|
||||||
elif hasattr(searchThread, 'segment') and str(searchThread.show.indexerid) == show:
|
|
||||||
for epObj in searchThread.segment:
|
|
||||||
if '%sx%s' % (epObj.season, epObj.episode) not in eps_list:
|
|
||||||
eps_list.add('%sx%s' % (epObj.season, epObj.episode))
|
|
||||||
episodes.append({'episode': epObj.episode,
|
|
||||||
'showindexer': epObj.show.indexer,
|
|
||||||
'showindexid': epObj.show.indexerid,
|
|
||||||
'season': epObj.season,
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(epObj.status)[0] in retry_statues,
|
|
||||||
'status': statusStrings[epObj.status],
|
|
||||||
'quality': self.getQualityClass(epObj),
|
|
||||||
'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(epObj.status)]})
|
|
||||||
|
|
||||||
for ep in [v for k, v in searchThread.snatched_eps.iteritems() if k not in eps_list]:
|
elif hasattr(thread, 'segment'):
|
||||||
ep_obj = searchThread.show.getEpisode(season=ep['season'], episode=ep['episode'])
|
for ep_obj in thread.segment:
|
||||||
episodes.append({'episode': ep['episode'],
|
ep, uniq_sxe = self.prepare_episode(ep_obj.show, ep_obj, **episode_params)
|
||||||
'showindexer': searchThread.show.indexer,
|
episodes.append(ep)
|
||||||
'showindexid': searchThread.show.indexerid,
|
seen_eps.add(uniq_sxe)
|
||||||
'season': ep['season'],
|
|
||||||
'searchstatus': searchstatus,
|
|
||||||
'retrystatus': sickbeard.USE_FAILED_DOWNLOADS and Quality.splitCompositeStatus(ep_obj.status)[0] in retry_statues,
|
|
||||||
'status': statusStrings[ep_obj.status],
|
|
||||||
'quality': self.getQualityClass(ep_obj),
|
|
||||||
'statusoverview': Overview.overviewStrings[searchThread.show.getOverview(ep_obj.status)]})
|
|
||||||
|
|
||||||
|
episode_params = dict(searchstate='finished', retrystate=True, statusoverview=True)
|
||||||
|
for thread in finished_items:
|
||||||
|
if isinstance(thread, sickbeard.search_queue.ManualSearchQueueItem):
|
||||||
|
if (not show or show == str(thread.show.indexerid)) and \
|
||||||
|
(thread.show.indexer, thread.show.indexerid, thread.segment.season, thread.segment.episode) \
|
||||||
|
not in seen_eps:
|
||||||
|
ep, uniq_sxe = self.prepare_episode(thread.show, thread.segment, **episode_params)
|
||||||
|
episodes.append(ep)
|
||||||
|
seen_eps.add(uniq_sxe)
|
||||||
|
|
||||||
return json.dumps({'show': show, 'episodes': episodes})
|
# These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segment/episodes
|
||||||
|
elif hasattr(thread, 'segment') and show == str(thread.show.indexerid):
|
||||||
|
for ep_obj in thread.segment:
|
||||||
|
if (ep_obj.show.indexer, ep_obj.show.indexerid, ep_obj.season, ep_obj.episode) not in seen_eps:
|
||||||
|
ep, uniq_sxe = self.prepare_episode(ep_obj.show, ep_obj, **episode_params)
|
||||||
|
episodes.append(ep)
|
||||||
|
seen_eps.add(uniq_sxe)
|
||||||
|
|
||||||
#return json.dumps()
|
for snatched in filter(lambda v: v not in seen_eps, thread.snatched_eps):
|
||||||
|
ep_obj = thread.show.getEpisode(season=snatched[2], episode=snatched[3])
|
||||||
|
ep, uniq_sxe = self.prepare_episode(thread.show, ep_obj, **episode_params)
|
||||||
|
episodes.append(ep)
|
||||||
|
seen_eps.add(uniq_sxe)
|
||||||
|
|
||||||
def getQualityClass(self, ep_obj):
|
return json.dumps(dict(episodes=episodes))
|
||||||
# return the correct json value
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def prepare_episode(show, ep, searchstate, retrystate=False, statusoverview=False):
|
||||||
|
"""
|
||||||
|
Prepare episode data and its unique id
|
||||||
|
|
||||||
|
:param show: Show object
|
||||||
|
:type show: TVShow object
|
||||||
|
:param ep: Episode object
|
||||||
|
:type ep: TVEpisode object
|
||||||
|
:param searchstate: Progress of search
|
||||||
|
:type searchstate: string
|
||||||
|
:param retrystate: True to add retrystate to data
|
||||||
|
:type retrystate: bool
|
||||||
|
:param statusoverview: True to add statusoverview to data
|
||||||
|
:type statusoverview: bool
|
||||||
|
:return: Episode data and its unique episode id
|
||||||
|
:rtype: tuple containing a dict and a tuple
|
||||||
|
"""
|
||||||
# Find the quality class for the episode
|
# Find the quality class for the episode
|
||||||
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
quality_class = Quality.qualityStrings[Quality.UNKNOWN]
|
||||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
ep_status, ep_quality = Quality.splitCompositeStatus(ep.status)
|
||||||
for x in (SD, HD720p, HD1080p, UHD2160p):
|
for x in (SD, HD720p, HD1080p, UHD2160p):
|
||||||
if ep_quality in Quality.splitQuality(x)[0]:
|
if ep_quality in Quality.splitQuality(x)[0]:
|
||||||
quality_class = qualityPresetStrings[x]
|
quality_class = qualityPresetStrings[x]
|
||||||
break
|
break
|
||||||
|
|
||||||
return quality_class
|
ep_data = dict(showindexer=show.indexer, showindexid=show.indexerid,
|
||||||
|
season=ep.season, episode=ep.episode, quality=quality_class,
|
||||||
|
searchstate=searchstate, status=statusStrings[ep.status])
|
||||||
|
if retrystate:
|
||||||
|
retry_statuses = SNATCHED_ANY + [DOWNLOADED, ARCHIVED]
|
||||||
|
ep_data.update(dict(retrystate=sickbeard.USE_FAILED_DOWNLOADS and ep_status in retry_statuses))
|
||||||
|
if statusoverview:
|
||||||
|
ep_data.update(dict(statusoverview=Overview.overviewStrings[show.getOverview(ep.status)]))
|
||||||
|
|
||||||
|
return ep_data, (show.indexer, show.indexerid, ep.season, ep.episode)
|
||||||
|
|
||||||
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
|
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
|
||||||
# retrieve the episode object and fail if we can't get one
|
# retrieve the episode object and fail if we can't get one
|
||||||
|
@ -2951,26 +2931,6 @@ class Home(MainHandler):
|
||||||
|
|
||||||
return json.dumps(result)
|
return json.dumps(result)
|
||||||
|
|
||||||
def retryEpisode(self, show, season, episode):
|
|
||||||
|
|
||||||
# retrieve the episode object and fail if we can't get one
|
|
||||||
ep_obj = self._getEpisode(show, season, episode)
|
|
||||||
if isinstance(ep_obj, str):
|
|
||||||
return json.dumps({'result': 'failure'})
|
|
||||||
|
|
||||||
# make a queue item for it and put it on the queue
|
|
||||||
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, [ep_obj])
|
|
||||||
sickbeard.searchQueueScheduler.action.add_item(ep_queue_item) # @UndefinedVariable
|
|
||||||
|
|
||||||
#if ep_queue_item.success:
|
|
||||||
# return returnManualSearchResult(ep_queue_item)
|
|
||||||
if not ep_queue_item.started and ep_queue_item.success is None:
|
|
||||||
return json.dumps({'result': 'success'}) #I Actually want to call it queued, because the search hasnt been started yet!
|
|
||||||
if ep_queue_item.started and ep_queue_item.success is None:
|
|
||||||
return json.dumps({'result': 'success'})
|
|
||||||
else:
|
|
||||||
return json.dumps({'result': 'failure'})
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fetch_releasegroups(show_name):
|
def fetch_releasegroups(show_name):
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue