2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
2014-05-17 09:27:17 +00:00
|
|
|
import time
|
2014-05-08 14:03:50 +00:00
|
|
|
import traceback
|
2014-05-11 12:49:07 +00:00
|
|
|
import threading
|
2015-03-11 12:31:57 +00:00
|
|
|
import datetime
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import sickbeard
|
2015-03-11 12:31:57 +00:00
|
|
|
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
2015-05-04 19:14:29 +00:00
|
|
|
failed_history, history, ui, properFinder
|
2015-09-18 00:06:34 +00:00
|
|
|
from sickbeard.search import wanted_episodes
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2014-05-06 11:29:25 +00:00
|
|
|
|
2014-05-11 12:49:07 +00:00
|
|
|
search_queue_lock = threading.Lock()
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
BACKLOG_SEARCH = 10
|
2014-12-22 18:30:53 +00:00
|
|
|
RECENT_SEARCH = 20
|
2014-05-05 03:04:46 +00:00
|
|
|
FAILED_SEARCH = 30
|
2014-07-27 10:59:21 +00:00
|
|
|
MANUAL_SEARCH = 40
|
2015-05-04 19:14:29 +00:00
|
|
|
PROPER_SEARCH = 50
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-09-15 07:23:55 +00:00
|
|
|
MANUAL_SEARCH_HISTORY = []
|
|
|
|
MANUAL_SEARCH_HISTORY_SIZE = 100
|
2014-05-26 20:16:07 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class SearchQueue(generic_queue.GenericQueue):
|
|
|
|
def __init__(self):
|
|
|
|
generic_queue.GenericQueue.__init__(self)
|
2015-05-04 19:14:29 +00:00
|
|
|
self.queue_name = 'SEARCHQUEUE'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def is_in_queue(self, show, segment):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
|
|
|
return True
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-09-15 00:40:59 +00:00
|
|
|
def is_ep_in_queue(self, segment):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
|
|
|
|
return True
|
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-09-15 07:23:55 +00:00
|
|
|
def is_show_in_queue(self, show):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
|
|
|
|
return True
|
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-09-15 07:23:55 +00:00
|
|
|
def get_all_ep_from_queue(self, show):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
ep_obj_list = []
|
|
|
|
for cur_item in self.queue:
|
|
|
|
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
|
|
|
|
ep_obj_list.append(cur_item)
|
|
|
|
|
|
|
|
if ep_obj_list:
|
|
|
|
return ep_obj_list
|
|
|
|
return False
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def pause_backlog(self):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
self.min_priority = generic_queue.QueuePriorities.HIGH
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def unpause_backlog(self):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
self.min_priority = 0
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
def is_backlog_paused(self):
|
|
|
|
# backlog priorities are NORMAL, this should be done properly somewhere
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _is_in_progress(self, item_type):
|
2015-05-04 19:14:29 +00:00
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue + [self.currentItem]:
|
2015-09-18 00:06:34 +00:00
|
|
|
if isinstance(cur_item, item_type):
|
2015-05-04 19:14:29 +00:00
|
|
|
return True
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-09-15 07:23:55 +00:00
|
|
|
def is_manualsearch_in_progress(self):
|
2014-09-23 09:06:02 +00:00
|
|
|
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
|
2015-05-04 19:14:29 +00:00
|
|
|
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
def is_backlog_in_progress(self):
|
|
|
|
return self._is_in_progress(BacklogQueueItem)
|
2015-04-05 18:12:15 +00:00
|
|
|
|
2014-12-22 18:30:53 +00:00
|
|
|
def is_recentsearch_in_progress(self):
|
2015-05-04 19:14:29 +00:00
|
|
|
return self._is_in_progress(RecentSearchQueueItem)
|
|
|
|
|
|
|
|
def is_propersearch_in_progress(self):
|
2015-09-18 00:06:34 +00:00
|
|
|
return self._is_in_progress(ProperSearchQueueItem)
|
2015-05-04 19:14:29 +00:00
|
|
|
|
|
|
|
def is_standard_backlog_in_progress(self):
|
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue + [self.currentItem]:
|
|
|
|
if isinstance(cur_item, BacklogQueueItem) and cur_item.standard_backlog:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def type_of_backlog_in_progress(self):
|
|
|
|
limited = full = other = False
|
|
|
|
with self.lock:
|
|
|
|
for cur_item in self.queue + [self.currentItem]:
|
|
|
|
if isinstance(cur_item, BacklogQueueItem):
|
|
|
|
if cur_item.standard_backlog:
|
|
|
|
if cur_item.limited_backlog:
|
|
|
|
limited = True
|
|
|
|
else:
|
|
|
|
full = True
|
|
|
|
else:
|
|
|
|
other = True
|
|
|
|
|
|
|
|
types = []
|
|
|
|
for msg, variant in ['Limited', limited], ['Full', full], ['On Demand', other]:
|
|
|
|
if variant:
|
|
|
|
types.append(msg)
|
|
|
|
message = 'None'
|
|
|
|
if types:
|
|
|
|
message = ', '.join(types)
|
|
|
|
return message
|
2014-05-19 17:40:25 +00:00
|
|
|
|
2014-09-08 02:48:48 +00:00
|
|
|
def queue_length(self):
|
2015-05-04 19:14:29 +00:00
|
|
|
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
|
|
|
|
with self.lock:
|
|
|
|
for cur_item in [self.currentItem] + self.queue:
|
|
|
|
if isinstance(cur_item, RecentSearchQueueItem):
|
|
|
|
length['recent'] += 1
|
|
|
|
elif isinstance(cur_item, BacklogQueueItem):
|
|
|
|
length['backlog'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment, cur_item.standard_backlog, cur_item.limited_backlog, cur_item.forced])
|
|
|
|
elif isinstance(cur_item, ProperSearchQueueItem):
|
|
|
|
length['proper'] += 1
|
|
|
|
elif isinstance(cur_item, ManualSearchQueueItem):
|
|
|
|
length['manual'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
|
|
|
elif isinstance(cur_item, FailedQueueItem):
|
|
|
|
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
|
|
|
return length
|
2014-09-08 02:48:48 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def add_item(self, item):
|
2015-05-04 19:14:29 +00:00
|
|
|
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
|
|
|
|
# recent and proper searches
|
2014-09-20 12:03:48 +00:00
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
|
|
|
|
# backlog searches
|
2014-09-07 04:36:23 +00:00
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment):
|
|
|
|
# manual and failed searches
|
2014-03-10 05:18:05 +00:00
|
|
|
generic_queue.GenericQueue.add_item(self, item)
|
|
|
|
else:
|
2015-05-04 19:14:29 +00:00
|
|
|
logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG)
|
2014-07-18 05:57:35 +00:00
|
|
|
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2014-12-22 18:30:53 +00:00
|
|
|
class RecentSearchQueueItem(generic_queue.QueueItem):
|
2014-09-20 12:03:48 +00:00
|
|
|
def __init__(self):
|
2014-09-21 20:42:01 +00:00
|
|
|
self.success = None
|
2015-03-11 12:31:57 +00:00
|
|
|
self.episodes = []
|
2014-12-22 18:30:53 +00:00
|
|
|
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
def run(self):
|
|
|
|
generic_queue.QueueItem.run(self)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
try:
|
|
|
|
self._change_missing_episodes()
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
self.update_providers()
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
show_list = sickbeard.showList
|
2015-09-18 00:06:34 +00:00
|
|
|
from_date = datetime.date.fromordinal(1)
|
2015-05-04 19:14:29 +00:00
|
|
|
for curShow in show_list:
|
|
|
|
if curShow.paused:
|
|
|
|
continue
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
self.episodes.extend(wanted_episodes(curShow, from_date))
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
if not self.episodes:
|
|
|
|
logger.log(u'No search of cache for episodes required')
|
|
|
|
self.success = True
|
|
|
|
else:
|
|
|
|
num_shows = len(set([ep.show.name for ep in self.episodes]))
|
|
|
|
logger.log(u'Found %d needed episode%s spanning %d show%s'
|
|
|
|
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
|
|
|
|
num_shows, helpers.maybe_plural(num_shows)))
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
try:
|
|
|
|
logger.log(u'Beginning recent search for episodes')
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results = search.search_for_needed_episodes(self.episodes)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
if not len(found_results):
|
|
|
|
logger.log(u'No needed episodes found')
|
|
|
|
else:
|
|
|
|
for result in found_results:
|
|
|
|
# just use the first result for now
|
|
|
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
2015-09-18 00:06:34 +00:00
|
|
|
self.success = search.snatch_episode(result)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
# give the CPU a break
|
|
|
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
except Exception:
|
|
|
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
2014-05-26 20:16:07 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if None is self.success:
|
2015-05-04 19:14:29 +00:00
|
|
|
self.success = False
|
2014-09-21 20:42:01 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
finally:
|
|
|
|
self.finish()
|
2014-07-18 05:57:35 +00:00
|
|
|
|
2015-03-11 12:31:57 +00:00
|
|
|
@staticmethod
|
|
|
|
def _change_missing_episodes():
|
|
|
|
if not network_timezones.network_dict:
|
|
|
|
network_timezones.update_network_dict()
|
|
|
|
|
|
|
|
if network_timezones.network_dict:
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_date = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
2015-03-11 12:31:57 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_date = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_time = datetime.datetime.now(network_timezones.sb_timezone)
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql_results = my_db.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
|
|
|
|
[common.UNAIRED, cur_date])
|
2015-03-11 12:31:57 +00:00
|
|
|
|
|
|
|
sql_l = []
|
2015-03-14 02:48:38 +00:00
|
|
|
show = None
|
|
|
|
wanted = False
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for sqlEp in sql_results:
|
2015-03-11 12:31:57 +00:00
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
if not show or show.indexerid != int(sqlEp['showid']):
|
2015-03-11 12:31:57 +00:00
|
|
|
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid']))
|
|
|
|
|
|
|
|
# for when there is orphaned series in the database but not loaded into our showlist
|
|
|
|
if not show:
|
|
|
|
continue
|
|
|
|
|
|
|
|
except exceptions.MultipleShowObjectsException:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'ERROR: expected to find a single show matching %s' % sqlEp['showid'])
|
2015-03-11 12:31:57 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
2015-03-12 23:23:32 +00:00
|
|
|
# filter out any episodes that haven't aired yet
|
2015-09-18 00:06:34 +00:00
|
|
|
if end_time > cur_time:
|
2015-03-11 12:31:57 +00:00
|
|
|
continue
|
|
|
|
except:
|
2015-03-12 23:23:32 +00:00
|
|
|
# if an error occurred assume the episode hasn't aired yet
|
2015-03-11 12:31:57 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode']))
|
|
|
|
with ep.lock:
|
2015-03-12 23:23:32 +00:00
|
|
|
# Now that it is time, change state of UNAIRED show into expected or skipped
|
|
|
|
ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused]
|
|
|
|
result = ep.get_sql()
|
|
|
|
if None is not result:
|
2015-03-14 02:48:38 +00:00
|
|
|
sql_l.append(result)
|
2015-03-12 23:23:32 +00:00
|
|
|
wanted |= (False, True)[common.WANTED == ep.status]
|
2015-03-11 12:31:57 +00:00
|
|
|
else:
|
2015-03-12 23:23:32 +00:00
|
|
|
logger.log(u'No unaired episodes marked wanted')
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
if 0 < len(sql_l):
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
my_db.mass_action(sql_l)
|
2015-03-12 23:23:32 +00:00
|
|
|
if wanted:
|
|
|
|
logger.log(u'Found new episodes marked wanted')
|
2015-03-11 12:31:57 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def update_providers():
|
2015-09-18 00:06:34 +00:00
|
|
|
orig_thread_name = threading.currentThread().name
|
2015-03-11 12:31:57 +00:00
|
|
|
threads = []
|
|
|
|
|
|
|
|
logger.log('Updating provider caches with recent upload data')
|
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_provider in providers:
|
2015-03-11 12:31:57 +00:00
|
|
|
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
|
2015-09-18 00:06:34 +00:00
|
|
|
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
|
|
|
|
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
|
2015-03-11 12:31:57 +00:00
|
|
|
# start the thread we just created
|
|
|
|
threads[-1].start()
|
|
|
|
|
|
|
|
# wait for all threads to finish
|
|
|
|
for t in threads:
|
|
|
|
t.join()
|
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
logger.log('Finished updating provider caches')
|
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
class ProperSearchQueueItem(generic_queue.QueueItem):
|
|
|
|
def __init__(self):
|
|
|
|
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
|
|
|
|
self.priority = generic_queue.QueuePriorities.HIGH
|
|
|
|
self.success = None
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
generic_queue.QueueItem.run(self)
|
|
|
|
|
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
properFinder.search_propers()
|
2015-05-04 19:14:29 +00:00
|
|
|
finally:
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
2014-05-16 03:39:46 +00:00
|
|
|
def __init__(self, show, segment):
|
2014-03-10 05:18:05 +00:00
|
|
|
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
|
|
|
self.priority = generic_queue.QueuePriorities.HIGH
|
2015-09-18 00:06:34 +00:00
|
|
|
self.name = 'MANUAL-%s' % show.indexerid
|
2014-03-10 05:18:05 +00:00
|
|
|
self.success = None
|
2014-05-16 03:39:46 +00:00
|
|
|
self.show = show
|
|
|
|
self.segment = segment
|
2014-09-15 07:23:55 +00:00
|
|
|
self.started = None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
def run(self):
|
|
|
|
generic_queue.QueueItem.run(self)
|
2014-05-07 07:50:49 +00:00
|
|
|
|
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName())
|
2014-09-15 07:23:55 +00:00
|
|
|
self.started = True
|
2014-05-18 15:33:31 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_result = search.search_providers(self.show, [self.segment], True)
|
|
|
|
|
|
|
|
if search_result:
|
2014-05-18 18:21:18 +00:00
|
|
|
# just use the first result for now
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Downloading %s from %s' % (search_result[0].name, search_result[0].provider.name))
|
|
|
|
self.success = search.snatch_episode(search_result[0])
|
2014-05-18 18:21:18 +00:00
|
|
|
|
|
|
|
# give the CPU a break
|
2014-05-20 01:04:23 +00:00
|
|
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
2014-05-18 18:21:18 +00:00
|
|
|
|
2014-05-13 21:47:54 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
ui.notifications.message('No downloads found',
|
|
|
|
u'Could not find a download for <i>%s</i>' % self.segment.prettyName())
|
2014-05-13 21:47:54 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Unable to find a download for: [%s]' % self.segment.prettyName())
|
2014-05-11 12:49:07 +00:00
|
|
|
|
|
|
|
except Exception:
|
2014-05-08 14:03:50 +00:00
|
|
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
finally:
|
2015-09-18 00:06:34 +00:00
|
|
|
# Keep a list with the 100 last executed searches
|
2015-05-04 19:14:29 +00:00
|
|
|
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
|
|
|
|
|
|
|
if self.success is None:
|
|
|
|
self.success = False
|
2014-07-18 05:57:35 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
self.finish()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-18 05:57:35 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
class BacklogQueueItem(generic_queue.QueueItem):
|
2015-05-04 19:14:29 +00:00
|
|
|
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
|
2014-03-10 05:18:05 +00:00
|
|
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
|
|
|
self.priority = generic_queue.QueuePriorities.LOW
|
2015-09-18 00:06:34 +00:00
|
|
|
self.name = 'BACKLOG-%s' % show.indexerid
|
2014-05-12 17:49:59 +00:00
|
|
|
self.success = None
|
2014-03-10 05:18:05 +00:00
|
|
|
self.show = show
|
|
|
|
self.segment = segment
|
2015-04-05 18:12:15 +00:00
|
|
|
self.standard_backlog = standard_backlog
|
2015-05-04 19:14:29 +00:00
|
|
|
self.limited_backlog = limited_backlog
|
|
|
|
self.forced = forced
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
def run(self):
|
|
|
|
generic_queue.QueueItem.run(self)
|
2014-05-06 11:29:25 +00:00
|
|
|
|
2014-07-18 05:57:35 +00:00
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Beginning backlog search for: [%s]' % self.show.name)
|
|
|
|
search_result = search.search_providers(self.show, self.segment, False)
|
2014-05-08 14:03:50 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if search_result:
|
|
|
|
for result in search_result:
|
2014-09-07 04:36:23 +00:00
|
|
|
# just use the first result for now
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
|
|
|
search.snatch_episode(result)
|
2014-05-18 18:21:18 +00:00
|
|
|
|
2014-09-07 04:36:23 +00:00
|
|
|
# give the CPU a break
|
|
|
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'No needed episodes found during backlog search for: [%s]' % self.show.name)
|
2014-07-18 05:57:35 +00:00
|
|
|
except Exception:
|
|
|
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
finally:
|
|
|
|
self.finish()
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-26 20:16:07 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
class FailedQueueItem(generic_queue.QueueItem):
|
2014-05-16 03:39:46 +00:00
|
|
|
def __init__(self, show, segment):
|
2014-05-05 03:04:46 +00:00
|
|
|
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
2014-03-10 05:18:05 +00:00
|
|
|
self.priority = generic_queue.QueuePriorities.HIGH
|
2015-09-18 00:06:34 +00:00
|
|
|
self.name = 'RETRY-%s' % show.indexerid
|
2014-03-10 05:18:05 +00:00
|
|
|
self.show = show
|
2014-05-16 03:39:46 +00:00
|
|
|
self.segment = segment
|
2014-03-10 05:18:05 +00:00
|
|
|
self.success = None
|
2014-09-15 13:01:18 +00:00
|
|
|
self.started = None
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-07-15 02:00:53 +00:00
|
|
|
def run(self):
|
|
|
|
generic_queue.QueueItem.run(self)
|
2014-09-17 10:24:54 +00:00
|
|
|
self.started = True
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-07-18 01:06:42 +00:00
|
|
|
try:
|
2014-09-17 10:24:54 +00:00
|
|
|
for epObj in self.segment:
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
logger.log(u'Marking episode as bad: [%s]' % epObj.prettyName())
|
|
|
|
|
2014-09-17 10:24:54 +00:00
|
|
|
failed_history.markFailed(epObj)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-09-17 10:24:54 +00:00
|
|
|
(release, provider) = failed_history.findRelease(epObj)
|
|
|
|
if release:
|
|
|
|
failed_history.logFailed(release)
|
|
|
|
history.logFailed(epObj, release, provider)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-09-17 10:24:54 +00:00
|
|
|
failed_history.revertEpisode(epObj)
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Beginning failed download search for: []' % epObj.prettyName())
|
2014-09-07 04:36:23 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_result = search.search_providers(self.show, self.segment, True)
|
2014-09-07 04:36:23 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if search_result:
|
|
|
|
for result in search_result:
|
2014-09-07 04:36:23 +00:00
|
|
|
# just use the first result for now
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
|
|
|
search.snatch_episode(result)
|
2014-09-07 04:36:23 +00:00
|
|
|
|
|
|
|
# give the CPU a break
|
|
|
|
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
|
|
|
else:
|
2014-09-17 10:24:54 +00:00
|
|
|
pass
|
2015-09-18 00:06:34 +00:00
|
|
|
# logger.log(u'No valid episode found to retry for: [%s]' % self.segment.prettyName())
|
2014-07-18 01:06:42 +00:00
|
|
|
except Exception:
|
|
|
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
finally:
|
2015-09-18 00:06:34 +00:00
|
|
|
# Keep a list with the 100 last executed searches
|
2015-05-04 19:14:29 +00:00
|
|
|
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
2014-05-08 14:03:50 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
if self.success is None:
|
|
|
|
self.success = False
|
2014-07-20 01:06:04 +00:00
|
|
|
|
2015-05-04 19:14:29 +00:00
|
|
|
self.finish()
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
def fifo(my_list, item, max_size=100):
|
|
|
|
if len(my_list) >= max_size:
|
|
|
|
my_list.pop(0)
|
|
|
|
my_list.append(item)
|