2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# This file is part of SickGear.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-11-12 16:43:14 +00:00
|
|
|
# SickGear is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-11-12 16:43:14 +00:00
|
|
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import os
|
2014-04-24 05:18:16 +00:00
|
|
|
import re
|
2014-05-06 11:29:25 +00:00
|
|
|
import threading
|
2014-03-10 05:18:05 +00:00
|
|
|
import datetime
|
2014-07-24 16:12:29 +00:00
|
|
|
import traceback
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, Quality, SEASON_RESULT, MULTI_EP_RESULT
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
|
|
|
|
from sickbeard import sab
|
|
|
|
from sickbeard import nzbget
|
|
|
|
from sickbeard import clients
|
|
|
|
from sickbeard import history
|
|
|
|
from sickbeard import notifiers
|
|
|
|
from sickbeard import nzbSplitter
|
|
|
|
from sickbeard import ui
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import failed_history
|
|
|
|
from sickbeard.exceptions import ex
|
2014-07-24 16:12:29 +00:00
|
|
|
from sickbeard.providers.generic import GenericProvider
|
2014-09-20 12:03:48 +00:00
|
|
|
from sickbeard import common
|
2017-12-27 03:14:20 +00:00
|
|
|
from sickbeard.tv import TVEpisode
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2015-03-14 02:48:38 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def _download_result(result):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Downloads a result to the appropriate black hole folder.
|
|
|
|
|
|
|
|
Returns a bool representing success.
|
|
|
|
|
|
|
|
result: SearchResult instance to download.
|
|
|
|
"""
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
res_provider = result.provider
|
|
|
|
if None is res_provider:
|
|
|
|
logger.log(u'Invalid provider name - this is a coding error, report it please', logger.ERROR)
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# nzbs with an URL can just be downloaded from the provider
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'nzb' == result.resultType:
|
|
|
|
new_result = res_provider.download_result(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
# if it's an nzb data result
|
2015-09-18 00:06:34 +00:00
|
|
|
elif 'nzbdata' == result.resultType:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# get the final file path to the nzb
|
2015-09-18 00:06:34 +00:00
|
|
|
file_name = ek.ek(os.path.join, sickbeard.NZB_DIR, u'%s.nzb' % result.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Saving NZB to %s' % file_name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
new_result = True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# save the data to disk
|
|
|
|
try:
|
2017-02-13 20:00:55 +00:00
|
|
|
data = result.get_data()
|
|
|
|
if not data:
|
|
|
|
new_result = False
|
|
|
|
else:
|
|
|
|
with ek.ek(open, file_name, 'w') as file_out:
|
|
|
|
file_out.write(data)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2017-02-13 20:00:55 +00:00
|
|
|
helpers.chmodAsParent(file_name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-06-08 12:47:01 +00:00
|
|
|
except EnvironmentError as e:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR)
|
|
|
|
new_result = False
|
|
|
|
elif 'torrent' == res_provider.providerType:
|
|
|
|
new_result = res_provider.download_result(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Invalid provider type - this is a coding error, report it please', logger.ERROR)
|
|
|
|
new_result = False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
return new_result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-03-14 02:48:38 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def snatch_episode(result, end_status=SNATCHED):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Contains the internal logic necessary to actually "snatch" a result that
|
|
|
|
has been found.
|
|
|
|
|
|
|
|
Returns a bool representing success.
|
|
|
|
|
|
|
|
result: SearchResult instance to be snatched.
|
|
|
|
endStatus: the episode status that should be used for the episode object once it's snatched.
|
|
|
|
"""
|
2014-03-19 23:33:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if None is result:
|
2014-07-24 16:12:29 +00:00
|
|
|
return False
|
2014-03-19 23:33:49 +00:00
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
result.priority = 0 # -1 = low, 0 = normal, 1 = high
|
2014-03-10 05:18:05 +00:00
|
|
|
if sickbeard.ALLOW_HIGH_PRIORITY:
|
|
|
|
# if it aired recently make it high priority
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_ep in result.episodes:
|
|
|
|
if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
|
2014-03-10 05:18:05 +00:00
|
|
|
result.priority = 1
|
2017-09-13 17:18:59 +00:00
|
|
|
if 0 < result.properlevel:
|
2015-09-18 00:06:34 +00:00
|
|
|
end_status = SNATCHED_PROPER
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# NZBs can be sent straight to SAB or saved to disk
|
2015-09-18 00:06:34 +00:00
|
|
|
if result.resultType in ('nzb', 'nzbdata'):
|
|
|
|
if 'blackhole' == sickbeard.NZB_METHOD:
|
|
|
|
dl_result = _download_result(result)
|
|
|
|
elif 'sabnzbd' == sickbeard.NZB_METHOD:
|
2015-12-17 01:21:09 +00:00
|
|
|
dl_result = sab.send_nzb(result)
|
2015-09-18 00:06:34 +00:00
|
|
|
elif 'nzbget' == sickbeard.NZB_METHOD:
|
2017-09-13 17:18:59 +00:00
|
|
|
dl_result = nzbget.send_nzb(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Unknown NZB action specified in config: %s' % sickbeard.NZB_METHOD, logger.ERROR)
|
|
|
|
dl_result = False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# TORRENTs can be sent to clients or saved to disk
|
2015-09-18 00:06:34 +00:00
|
|
|
elif 'torrent' == result.resultType:
|
2017-12-16 02:52:08 +00:00
|
|
|
if not result.url.startswith('magnet') and None is not result.get_data_func:
|
|
|
|
result.url = result.get_data_func(result.url)
|
|
|
|
result.get_data_func = None # consume only once
|
|
|
|
if not result.url:
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
# torrents are saved to disk when blackhole mode
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'blackhole' == sickbeard.TORRENT_METHOD:
|
|
|
|
dl_result = _download_result(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2014-08-23 23:19:17 +00:00
|
|
|
# make sure we have the torrent file content
|
2015-05-24 13:08:20 +00:00
|
|
|
if not result.content and not result.url.startswith('magnet'):
|
2015-07-13 09:39:20 +00:00
|
|
|
result.content = result.provider.get_url(result.url)
|
2018-01-15 17:54:36 +00:00
|
|
|
if result.provider.should_skip() or not result.content:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
|
2015-05-24 13:08:20 +00:00
|
|
|
return False
|
2014-07-27 14:04:37 +00:00
|
|
|
# Snatches torrent with client
|
2016-06-23 19:58:26 +00:00
|
|
|
client = clients.get_client_instance(sickbeard.TORRENT_METHOD)()
|
2016-03-09 11:16:02 +00:00
|
|
|
dl_result = client.send_torrent(result)
|
2017-02-17 03:16:51 +00:00
|
|
|
|
|
|
|
if getattr(result, 'cache_file', None):
|
|
|
|
helpers.remove_file_failed(result.cache_file)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Unknown result type, unable to download it', logger.ERROR)
|
|
|
|
dl_result = False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not dl_result:
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
2017-02-17 03:16:51 +00:00
|
|
|
failed_history.add_snatched(result)
|
2014-05-23 06:58:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
ui.notifications.message(u'Episode snatched', result.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2017-09-13 17:18:59 +00:00
|
|
|
history.log_snatch(result)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# don't notify when we re-download an episode
|
2014-05-30 10:01:49 +00:00
|
|
|
sql_l = []
|
2015-09-18 00:06:34 +00:00
|
|
|
update_imdb_data = True
|
|
|
|
for cur_ep_obj in result.episodes:
|
|
|
|
with cur_ep_obj.lock:
|
2017-12-27 03:14:20 +00:00
|
|
|
if is_first_best_match(cur_ep_obj.status, result):
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_ep_obj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
2014-03-19 23:33:49 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_ep_obj.status = Quality.compositeStatus(end_status, result.quality)
|
2014-05-30 10:01:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
item = cur_ep_obj.get_sql()
|
2015-04-15 04:17:28 +00:00
|
|
|
if None is not item:
|
|
|
|
sql_l.append(item)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if cur_ep_obj.status not in Quality.DOWNLOADED:
|
|
|
|
notifiers.notify_snatch(cur_ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
update_imdb_data = update_imdb_data and cur_ep_obj.show.load_imdb_info()
|
2015-03-14 02:48:38 +00:00
|
|
|
|
|
|
|
if 0 < len(sql_l):
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
my_db.mass_action(sql_l)
|
2014-06-30 15:57:32 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
|
|
|
|
def pass_show_wordlist_checks(name, show):
|
|
|
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
|
|
|
result = show_name_helpers.contains_any(name, show.rls_ignore_words, **re_extras)
|
|
|
|
if None is not result and result:
|
|
|
|
logger.log(u'Ignored: %s for containing ignore word' % name)
|
|
|
|
return False
|
|
|
|
|
|
|
|
result = show_name_helpers.contains_any(name, show.rls_require_words, **re_extras)
|
|
|
|
if None is not result and not result:
|
|
|
|
logger.log(u'Ignored: %s for not containing any required word match' % name)
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
def pick_best_result(results, show, quality_list=None, filter_rls=False):
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# find the best result for the current episode
|
2015-09-18 00:06:34 +00:00
|
|
|
best_result = None
|
2018-05-14 02:21:08 +00:00
|
|
|
best_fallback_result = None
|
|
|
|
scene_only = scene_or_contain = scene_loose = scene_loose_active = scene_rej_nuked = scene_nuked_active = False
|
|
|
|
if filter_rls:
|
|
|
|
try:
|
|
|
|
provider = getattr(results[0], 'provider', None)
|
|
|
|
scene_only = getattr(provider, 'scene_only', False)
|
|
|
|
scene_or_contain = getattr(provider, 'scene_or_contain', '')
|
|
|
|
recent_task = 'RECENT' in filter_rls
|
|
|
|
scene_loose = getattr(provider, 'scene_loose', False) and recent_task
|
|
|
|
scene_loose_active = getattr(provider, 'scene_loose_active', False) and not recent_task
|
|
|
|
scene_rej_nuked = getattr(provider, 'scene_rej_nuked', False)
|
|
|
|
scene_nuked_active = getattr(provider, 'scene_nuked_active', False) and not recent_task
|
|
|
|
except (StandardError, Exception):
|
|
|
|
filter_rls = False
|
|
|
|
|
|
|
|
addendum = ''
|
2014-03-10 05:18:05 +00:00
|
|
|
for cur_result in results:
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if show.is_anime and not show.release_groups.is_valid(cur_result):
|
|
|
|
continue
|
2014-05-27 07:44:23 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if quality_list and cur_result.quality not in quality_list:
|
2018-05-14 02:21:08 +00:00
|
|
|
logger.log(u'Rejecting unwanted quality %s for [%s]' % (
|
|
|
|
Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
continue
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if not pass_show_wordlist_checks(cur_result.name, show):
|
2014-04-24 05:18:16 +00:00
|
|
|
continue
|
|
|
|
|
2015-02-15 01:39:40 +00:00
|
|
|
cur_size = getattr(cur_result, 'size', None)
|
2017-02-17 03:16:51 +00:00
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS and None is not cur_size and failed_history.has_failed(
|
2015-02-15 01:39:40 +00:00
|
|
|
cur_result.name, cur_size, cur_result.provider.name):
|
2017-02-17 03:16:51 +00:00
|
|
|
logger.log(u'Rejecting previously failed [%s]' % cur_result.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
continue
|
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
if filter_rls and any([scene_only, scene_loose, scene_loose_active, scene_rej_nuked, scene_nuked_active]):
|
|
|
|
if show.is_anime:
|
|
|
|
addendum = u'anime (skipping scene/nuke filter) '
|
|
|
|
else:
|
|
|
|
scene_contains = False
|
|
|
|
if scene_only and scene_or_contain:
|
|
|
|
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
|
|
|
r = show_name_helpers.contains_any(cur_result.name, scene_or_contain, **re_extras)
|
|
|
|
if None is not r and r:
|
|
|
|
scene_contains = True
|
|
|
|
|
|
|
|
if scene_contains and not scene_rej_nuked:
|
|
|
|
logger.log(u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG)
|
|
|
|
reject = False
|
|
|
|
else:
|
|
|
|
reject, url = can_reject(cur_result.name)
|
|
|
|
if reject:
|
|
|
|
if isinstance(reject, basestring):
|
|
|
|
if scene_rej_nuked and not scene_nuked_active:
|
|
|
|
logger.log(u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
|
|
|
logger.DEBUG)
|
|
|
|
elif scene_nuked_active:
|
|
|
|
best_fallback_result = best_candidate(best_fallback_result, cur_result)
|
|
|
|
else:
|
|
|
|
logger.log(u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
|
|
|
logger.DEBUG)
|
|
|
|
reject = False
|
|
|
|
elif scene_contains or any([scene_loose, scene_loose_active]):
|
|
|
|
best_fallback_result = best_candidate(best_fallback_result, cur_result)
|
|
|
|
else:
|
|
|
|
logger.log(u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG)
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
if reject:
|
|
|
|
continue
|
|
|
|
|
|
|
|
best_result = best_candidate(best_result, cur_result)
|
|
|
|
|
|
|
|
if best_result and scene_only and not show.is_anime:
|
|
|
|
addendum = u'scene release filtered '
|
|
|
|
elif not best_result and best_fallback_result:
|
|
|
|
addendum = u'non scene release filtered '
|
|
|
|
best_result = best_fallback_result
|
2015-09-18 00:06:34 +00:00
|
|
|
|
|
|
|
if best_result:
|
2018-05-14 02:21:08 +00:00
|
|
|
logger.log(u'Picked as the best %s[%s]' % (addendum, best_result.name), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'No result picked.', logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
return best_result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
def best_candidate(best_result, cur_result):
|
|
|
|
logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name))
|
|
|
|
|
|
|
|
if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN:
|
|
|
|
best_result = cur_result
|
|
|
|
|
|
|
|
elif best_result.quality == cur_result.quality:
|
|
|
|
if cur_result.properlevel > best_result.properlevel and \
|
|
|
|
(not cur_result.is_repack or cur_result.release_group == best_result.release_group):
|
|
|
|
best_result = cur_result
|
|
|
|
elif cur_result.properlevel == best_result.properlevel:
|
|
|
|
if 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
|
|
|
|
logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name)
|
|
|
|
best_result = cur_result
|
|
|
|
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
|
|
|
|
best_result = cur_result
|
|
|
|
|
|
|
|
return best_result
|
|
|
|
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def is_final_result(result):
|
2014-03-10 05:18:05 +00:00
|
|
|
"""
|
|
|
|
Checks if the given result is good enough quality that we can stop searching for other ones.
|
|
|
|
|
|
|
|
If the result is the highest quality in both the any/best quality lists then this function
|
|
|
|
returns True, if not then it's False
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Checking if searching should continue after finding %s' % result.name, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
show_obj = result.episodes[0].show
|
|
|
|
|
|
|
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
|
|
|
|
|
|
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
2015-09-18 00:06:34 +00:00
|
|
|
if best_qualities and max(best_qualities) > result.quality:
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
# if it does not match the shows black and white list its no good
|
2015-01-19 14:27:48 +00:00
|
|
|
elif show_obj.is_anime and show_obj.release_groups.is_valid(result):
|
2014-05-27 07:44:23 +00:00
|
|
|
return False
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
2014-05-14 02:19:28 +00:00
|
|
|
elif any_qualities and result.quality in any_qualities:
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
elif best_qualities and max(best_qualities) == result.quality:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# if this is the best redownload but we have a higher initial download then keep looking
|
2015-09-18 00:06:34 +00:00
|
|
|
if any_qualities and max(any_qualities) > result.quality:
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
# if this is the best redownload and we don't have a higher initial download then we're done
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
def is_first_best_match(ep_status, result):
|
2014-03-19 23:33:49 +00:00
|
|
|
"""
|
|
|
|
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
|
|
|
"""
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'Checking if the first best quality match should be archived for episode %s' %
|
|
|
|
result.name, logger.DEBUG)
|
2014-03-19 23:33:49 +00:00
|
|
|
|
|
|
|
show_obj = result.episodes[0].show
|
2017-12-27 03:14:20 +00:00
|
|
|
cur_status, cur_quality = Quality.splitCompositeStatus(ep_status)
|
2014-03-19 23:33:49 +00:00
|
|
|
|
|
|
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
# if there is a redownload that's a match to one of our best qualities and
|
|
|
|
# we want to archive the episode then we are done
|
2017-12-27 03:14:20 +00:00
|
|
|
if best_qualities and show_obj.upgrade_once and \
|
|
|
|
(result.quality in best_qualities and
|
|
|
|
(cur_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED) or
|
|
|
|
result.quality not in any_qualities)):
|
2014-03-19 23:33:49 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
def set_wanted_aired(ep_obj, unaired, ep_count, ep_count_scene, manual=False):
|
|
|
|
ep_status, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
|
|
|
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, ep_status, ep_quality, unaired=unaired, manual=manual)
|
|
|
|
ep_obj.eps_aired_in_season = ep_count.get(ep_obj.season, 0)
|
|
|
|
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
|
|
|
ep_obj.scene_season, 0) if ep_obj.scene_season else ep_obj.eps_aired_in_season
|
|
|
|
|
|
|
|
|
|
|
|
def get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=False, manual=False):
|
|
|
|
if isinstance(ep_obj, TVEpisode):
|
|
|
|
return sickbeard.WANTEDLIST_CACHE.get_wantedlist(ep_obj.show.quality, ep_obj.show.upgrade_once,
|
|
|
|
cur_quality, cur_status, unaired, manual)
|
|
|
|
|
|
|
|
return []
|
2014-09-20 12:03:48 +00:00
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
|
|
|
|
def get_aired_in_season(show, return_sql=False):
|
|
|
|
ep_count = {}
|
|
|
|
ep_count_scene = {}
|
|
|
|
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
2014-09-20 12:03:48 +00:00
|
|
|
|
|
|
|
if show.air_by_date:
|
2016-09-04 20:00:44 +00:00
|
|
|
sql_string = 'SELECT ep.status, ep.season, ep.scene_season, ep.episode, ep.airdate ' + \
|
|
|
|
'FROM [tv_episodes] AS ep, [tv_shows] AS show ' + \
|
|
|
|
'WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ' + \
|
|
|
|
'AND ep.showid = ? AND ep.indexer = ? AND show.air_by_date = 1'
|
2014-09-20 12:03:48 +00:00
|
|
|
else:
|
2016-09-04 20:00:44 +00:00
|
|
|
sql_string = 'SELECT status, season, scene_season, episode, airdate ' + \
|
|
|
|
'FROM [tv_episodes] ' + \
|
|
|
|
'WHERE showid = ? AND indexer = ? AND season > 0'
|
|
|
|
|
|
|
|
sql_results = my_db.select(sql_string, [show.indexerid, show.indexer])
|
|
|
|
for result in sql_results:
|
|
|
|
if 1 < helpers.tryInt(result['airdate']) <= tomorrow:
|
|
|
|
cur_season = helpers.tryInt(result['season'])
|
|
|
|
ep_count[cur_season] = ep_count.setdefault(cur_season, 0) + 1
|
|
|
|
cur_scene_season = helpers.tryInt(result['scene_season'], -1)
|
|
|
|
if -1 != cur_scene_season:
|
|
|
|
ep_count_scene[cur_scene_season] = ep_count.setdefault(cur_scene_season, 0) + 1
|
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
if return_sql:
|
|
|
|
return ep_count, ep_count_scene, sql_results
|
|
|
|
|
|
|
|
return ep_count, ep_count_scene
|
|
|
|
|
|
|
|
|
|
|
|
def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
|
|
|
|
|
|
|
ep_count, ep_count_scene, sql_results_org = get_aired_in_season(show, return_sql=True)
|
|
|
|
|
|
|
|
from_date_ord = from_date.toordinal()
|
2016-09-04 20:00:44 +00:00
|
|
|
if unaired:
|
2017-12-27 03:14:20 +00:00
|
|
|
sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord or s['airdate'] == 1]
|
2015-04-05 18:12:15 +00:00
|
|
|
else:
|
2017-12-27 03:14:20 +00:00
|
|
|
sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord]
|
2015-04-05 18:12:15 +00:00
|
|
|
|
|
|
|
if make_dict:
|
|
|
|
wanted = {}
|
|
|
|
else:
|
|
|
|
wanted = []
|
2017-12-27 03:14:20 +00:00
|
|
|
|
2015-04-05 18:12:15 +00:00
|
|
|
total_wanted = total_replacing = total_unaired = 0
|
2014-09-20 12:03:48 +00:00
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
if 0 < len(sql_results) and 2 < len(sql_results) - len(show.episodes):
|
|
|
|
myDB = db.DBConnection()
|
|
|
|
show_ep_sql = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND indexer = ?',
|
|
|
|
[show.indexerid, show.indexer])
|
|
|
|
else:
|
|
|
|
show_ep_sql = None
|
|
|
|
|
|
|
|
for result in sql_results:
|
|
|
|
ep_obj = show.getEpisode(int(result['season']), int(result['episode']), ep_sql=show_ep_sql)
|
|
|
|
cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
|
|
|
|
ep_obj.wantedQuality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
|
|
|
|
if not ep_obj.wantedQuality:
|
2015-04-05 18:12:15 +00:00
|
|
|
continue
|
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
|
|
|
|
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
|
|
|
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
|
|
|
|
if make_dict:
|
|
|
|
wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
|
2014-09-20 12:03:48 +00:00
|
|
|
else:
|
2017-12-27 03:14:20 +00:00
|
|
|
wanted.append(ep_obj)
|
2015-04-05 18:12:15 +00:00
|
|
|
|
2017-12-27 03:14:20 +00:00
|
|
|
if cur_status in (common.WANTED, common.FAILED):
|
|
|
|
total_wanted += 1
|
|
|
|
elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
|
|
|
|
total_unaired += 1
|
2015-04-05 18:12:15 +00:00
|
|
|
else:
|
2017-12-27 03:14:20 +00:00
|
|
|
total_replacing += 1
|
2014-09-20 12:03:48 +00:00
|
|
|
|
2015-04-05 18:12:15 +00:00
|
|
|
if 0 < total_wanted + total_replacing + total_unaired:
|
2015-03-12 23:23:32 +00:00
|
|
|
actions = []
|
2015-09-18 00:06:34 +00:00
|
|
|
for msg, total in ['%d episode%s', total_wanted], \
|
|
|
|
['to upgrade %d episode%s', total_replacing], \
|
|
|
|
['%d unaired episode%s', total_unaired]:
|
2015-03-12 23:23:32 +00:00
|
|
|
if 0 < total:
|
|
|
|
actions.append(msg % (total, helpers.maybe_plural(total)))
|
|
|
|
logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))
|
2015-03-11 12:31:57 +00:00
|
|
|
|
2014-09-20 12:03:48 +00:00
|
|
|
return wanted
|
|
|
|
|
2015-03-12 23:23:32 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
def search_for_needed_episodes(episodes):
|
|
|
|
found_results = {}
|
2014-07-24 19:05:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_done = False
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
orig_thread_name = threading.currentThread().name
|
2014-09-20 12:03:48 +00:00
|
|
|
|
2015-07-13 09:39:20 +00:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
|
2014-09-22 05:41:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_provider in providers:
|
|
|
|
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
2014-07-24 16:12:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_found_results = cur_provider.search_rss(episodes)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_done = True
|
2014-05-18 12:59:42 +00:00
|
|
|
|
|
|
|
# pick a single result for each episode, respecting existing results
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_ep in cur_found_results:
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if cur_ep.show.paused:
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'Show %s is paused, ignoring all RSS items for %s' %
|
|
|
|
(cur_ep.show.name, cur_ep.prettyName()), logger.DEBUG)
|
2014-05-18 12:59:42 +00:00
|
|
|
continue
|
|
|
|
|
2014-08-30 08:47:00 +00:00
|
|
|
# find the best result for the current episode
|
2018-05-14 02:21:08 +00:00
|
|
|
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show, filter_rls=orig_thread_name)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
|
|
|
# if all results were rejected move on to the next episode
|
2015-09-18 00:06:34 +00:00
|
|
|
if not best_result:
|
|
|
|
logger.log(u'All found results for %s were rejected.' % cur_ep.prettyName(), logger.DEBUG)
|
2014-05-18 12:59:42 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
|
2015-09-18 00:06:34 +00:00
|
|
|
if cur_ep in found_results and best_result.quality <= found_results[cur_ep].quality:
|
2014-05-18 12:59:42 +00:00
|
|
|
continue
|
|
|
|
|
2015-05-18 18:49:45 +00:00
|
|
|
# filter out possible bad torrents from providers
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
|
|
|
|
best_result.content = None
|
|
|
|
if not best_result.url.startswith('magnet'):
|
|
|
|
best_result.content = best_result.provider.get_url(best_result.url)
|
2018-01-15 17:54:36 +00:00
|
|
|
if best_result.provider.should_skip():
|
|
|
|
break
|
2015-09-18 00:06:34 +00:00
|
|
|
if not best_result.content:
|
2014-08-18 05:21:37 +00:00
|
|
|
continue
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results[cur_ep] = best_result
|
|
|
|
|
2017-11-02 18:30:05 +00:00
|
|
|
try:
|
|
|
|
cur_provider.save_list()
|
|
|
|
except (StandardError, Exception):
|
|
|
|
pass
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
threading.currentThread().name = orig_thread_name
|
2014-09-22 05:41:29 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if not len(providers):
|
2017-01-02 18:44:35 +00:00
|
|
|
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif not search_done:
|
|
|
|
logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % (
|
|
|
|
len(providers), helpers.maybe_plural(len(providers))), logger.ERROR)
|
2014-05-18 12:59:42 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
return found_results.values()
|
2014-05-18 12:59:42 +00:00
|
|
|
|
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
def can_reject(release_name):
|
|
|
|
"""
|
|
|
|
Check if a release name should be rejected at external services.
|
|
|
|
If any site reports result as a valid scene release, then return None, None.
|
|
|
|
If predb reports result as nuked, then return nuke reason and url attempted.
|
|
|
|
If fail to find result at all services, return reject and url details for each site.
|
|
|
|
|
|
|
|
:param release_name: Release title
|
|
|
|
:type release_name: String
|
|
|
|
:return: None, None if release has no issue otherwise True/Nuke reason, URLs that rejected
|
|
|
|
:rtype: Tuple (None, None or True/String, String)
|
|
|
|
"""
|
|
|
|
rej_urls = []
|
|
|
|
srrdb_url = 'https://www.srrdb.com/api/search/r:%s/order:date-desc' % re.sub('\]\[', '', release_name)
|
|
|
|
resp = helpers.getURL(srrdb_url, json=True)
|
|
|
|
if not resp:
|
|
|
|
srrdb_rej = True
|
|
|
|
rej_urls += ['Failed contact \'%s\'' % srrdb_url]
|
|
|
|
else:
|
|
|
|
srrdb_rej = (not len(resp.get('results', []))
|
|
|
|
or release_name.lower() != resp.get('results', [{}])[0].get('release', '').lower())
|
|
|
|
rej_urls += ([], ['\'%s\'' % srrdb_url])[srrdb_rej]
|
|
|
|
|
|
|
|
sane_name = helpers.full_sanitizeSceneName(release_name)
|
|
|
|
predb_url = 'https://predb.ovh/api/v1/?q=@name "%s"' % sane_name
|
|
|
|
resp = helpers.getURL(predb_url, json=True)
|
|
|
|
predb_rej = True
|
|
|
|
if not resp:
|
|
|
|
rej_urls += ['Failed contact \'%s\'' % predb_url]
|
|
|
|
elif 'success' == resp.get('status', '').lower():
|
|
|
|
rows = resp and (resp.get('data') or {}).get('rows') or []
|
|
|
|
for data in rows:
|
|
|
|
if sane_name == helpers.full_sanitizeSceneName((data.get('name', '') or '').strip()):
|
|
|
|
nuke_type = (data.get('nuke') or {}).get('type')
|
|
|
|
if not nuke_type:
|
|
|
|
predb_rej = not helpers.tryInt(data.get('preAt'))
|
|
|
|
else:
|
|
|
|
predb_rej = 'un' not in nuke_type and data.get('nuke', {}).get('reason', 'Reason not set')
|
|
|
|
break
|
|
|
|
rej_urls += ([], ['\'%s\'' % predb_url])[bool(predb_rej)]
|
|
|
|
|
|
|
|
pred = any([not srrdb_rej, not predb_rej])
|
|
|
|
|
|
|
|
return pred and (None, None) or (predb_rej or True, ', '.join(rej_urls))
|
|
|
|
|
|
|
|
|
2017-02-17 05:11:53 +00:00
|
|
|
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results = {}
|
|
|
|
final_results = []
|
2014-05-18 15:33:31 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_done = False
|
2014-07-24 19:05:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
orig_thread_name = threading.currentThread().name
|
2014-07-24 16:12:29 +00:00
|
|
|
|
2017-02-17 03:16:51 +00:00
|
|
|
use_quality_list = None
|
|
|
|
if any([episodes]):
|
|
|
|
old_status = old_status or failed_history.find_old_status(episodes[0]) or episodes[0].status
|
|
|
|
if old_status:
|
|
|
|
status, quality = Quality.splitCompositeStatus(old_status)
|
|
|
|
use_quality_list = (status not in (
|
|
|
|
common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
|
2017-01-01 20:24:41 +00:00
|
|
|
(not torrent_only or x.providerType == GenericProvider.TORRENT) and
|
|
|
|
(not scheduled or x.enable_scheduled_backlog)]
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_provider in provider_list:
|
|
|
|
if cur_provider.anime_only and not show.is_anime:
|
|
|
|
logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
|
2014-05-27 07:44:23 +00:00
|
|
|
continue
|
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
|
|
|
provider_id = cur_provider.get_id()
|
2014-07-24 16:12:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results[provider_id] = {}
|
2014-05-28 21:13:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_count = 0
|
|
|
|
search_mode = cur_provider.search_mode
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
while True:
|
|
|
|
search_count += 1
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if 'eponly' == search_mode:
|
|
|
|
logger.log(u'Performing episode search for %s' % show.name)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Performing season pack search for %s' % show.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-16 09:16:01 +00:00
|
|
|
try:
|
2015-09-18 00:06:34 +00:00
|
|
|
cur_provider.cache._clearCache()
|
2016-09-04 20:00:44 +00:00
|
|
|
search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search,
|
|
|
|
try_other_searches=try_other_searches)
|
2016-09-07 20:24:10 +00:00
|
|
|
if any(search_results):
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(', '.join(['%s %s candidate%s' % (
|
|
|
|
len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode],
|
|
|
|
helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()]))
|
2015-06-08 12:47:01 +00:00
|
|
|
except exceptions.AuthException as e:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
|
2014-05-17 16:43:34 +00:00
|
|
|
break
|
2015-06-08 12:47:01 +00:00
|
|
|
except Exception as e:
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
|
2017-06-04 16:19:22 +00:00
|
|
|
logger.log(traceback.format_exc(), logger.ERROR)
|
2014-05-17 16:43:34 +00:00
|
|
|
break
|
2014-07-24 16:12:29 +00:00
|
|
|
finally:
|
2015-09-18 00:06:34 +00:00
|
|
|
threading.currentThread().name = orig_thread_name
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_done = True
|
2014-07-24 19:05:49 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if len(search_results):
|
2014-05-26 06:29:22 +00:00
|
|
|
# make a list of all the results for this provider
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_ep in search_results:
|
2014-05-26 06:29:22 +00:00
|
|
|
# skip non-tv crap
|
2015-09-18 00:06:34 +00:00
|
|
|
search_results[cur_ep] = filter(
|
2016-09-04 20:00:44 +00:00
|
|
|
lambda ep_item: show_name_helpers.pass_wordlist_checks(
|
2017-07-21 21:34:23 +00:00
|
|
|
ep_item.name, parse=False, indexer_lookup=False) and
|
|
|
|
ep_item.show == show, search_results[cur_ep])
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if cur_ep in found_results:
|
|
|
|
found_results[provider_id][cur_ep] += search_results[cur_ep]
|
2014-05-26 06:29:22 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results[provider_id][cur_ep] = search_results[cur_ep]
|
2014-05-26 06:29:22 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
break
|
2015-09-18 00:06:34 +00:00
|
|
|
elif not cur_provider.search_fallback or search_count == 2:
|
2014-05-17 10:01:09 +00:00
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
|
|
|
|
logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# skip to next provider if we have no results to process
|
2015-09-18 00:06:34 +00:00
|
|
|
if not len(found_results[provider_id]):
|
2014-07-24 16:12:29 +00:00
|
|
|
continue
|
2014-05-17 16:43:34 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
2014-05-17 05:23:11 +00:00
|
|
|
|
|
|
|
# pick the best season NZB
|
2015-09-18 00:06:34 +00:00
|
|
|
best_season_result = None
|
|
|
|
if SEASON_RESULT in found_results[provider_id]:
|
|
|
|
best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
|
|
|
|
any_qualities + best_qualities)
|
2014-05-17 05:23:11 +00:00
|
|
|
|
|
|
|
highest_quality_overall = 0
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_episode in found_results[provider_id]:
|
|
|
|
for cur_result in found_results[provider_id][cur_episode]:
|
|
|
|
if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
|
2014-05-17 05:23:11 +00:00
|
|
|
highest_quality_overall = cur_result.quality
|
2015-12-02 15:19:28 +00:00
|
|
|
logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall],
|
2014-05-17 05:23:11 +00:00
|
|
|
logger.DEBUG)
|
|
|
|
|
|
|
|
# see if every episode is wanted
|
2015-09-18 00:06:34 +00:00
|
|
|
if best_season_result:
|
2014-05-17 05:23:11 +00:00
|
|
|
# get the quality of the season nzb
|
2015-09-18 00:06:34 +00:00
|
|
|
season_qual = best_season_result.quality
|
2015-12-02 15:19:28 +00:00
|
|
|
logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual],
|
|
|
|
best_season_result.provider.providerType), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
my_db = db.DBConnection()
|
|
|
|
sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
|
|
|
|
(show.indexerid, ','.join([str(x.season) for x in episodes]))
|
|
|
|
ep_nums = [int(x['episode']) for x in my_db.select(sql)]
|
2014-05-18 15:33:31 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Executed query: [%s]' % sql)
|
|
|
|
logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
all_wanted = True
|
|
|
|
any_wanted = False
|
|
|
|
for ep_num in ep_nums:
|
|
|
|
for season in set([x.season for x in episodes]):
|
|
|
|
if not show.wantEpisode(season, ep_num, season_qual):
|
|
|
|
all_wanted = False
|
|
|
|
else:
|
|
|
|
any_wanted = True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
# if we need every ep in the season and there's nothing better then just download this and
|
|
|
|
# be done with it (unless single episodes are preferred)
|
2015-09-18 00:06:34 +00:00
|
|
|
if all_wanted and highest_quality_overall == best_season_result.quality:
|
|
|
|
logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
|
|
|
|
(best_season_result.provider.providerType, best_season_result.name))
|
|
|
|
ep_objs = []
|
|
|
|
for ep_num in ep_nums:
|
|
|
|
for season in set([x.season for x in episodes]):
|
|
|
|
ep_objs.append(show.getEpisode(season, ep_num))
|
|
|
|
best_season_result.episodes = ep_objs
|
|
|
|
|
|
|
|
return [best_season_result]
|
|
|
|
|
|
|
|
elif not any_wanted:
|
|
|
|
logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
|
|
|
|
best_season_result.name, logger.DEBUG)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
if GenericProvider.NZB == best_season_result.provider.providerType:
|
|
|
|
logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)
|
2014-05-17 05:23:11 +00:00
|
|
|
|
|
|
|
# if not, break it apart and add them as the lowest priority results
|
2015-09-18 00:06:34 +00:00
|
|
|
individual_results = nzbSplitter.splitResult(best_season_result)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
individual_results = filter(
|
2016-09-04 20:00:44 +00:00
|
|
|
lambda r: show_name_helpers.pass_wordlist_checks(
|
2017-07-21 21:34:23 +00:00
|
|
|
r.name, parse=False, indexer_lookup=False) and r.show == show, individual_results)
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_result in individual_results:
|
|
|
|
if 1 == len(cur_result.episodes):
|
|
|
|
ep_num = cur_result.episodes[0].episode
|
|
|
|
elif 1 < len(cur_result.episodes):
|
|
|
|
ep_num = MULTI_EP_RESULT
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if ep_num in found_results[provider_id]:
|
|
|
|
found_results[provider_id][ep_num].append(cur_result)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results[provider_id][ep_num] = [cur_result]
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
# If this is a torrent all we can do is leech the entire torrent,
|
|
|
|
# user will have to select which eps not do download in his torrent client
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
|
|
|
|
u'the episodes that you do not want to "don\'t download"')
|
|
|
|
ep_objs = []
|
|
|
|
for ep_num in ep_nums:
|
|
|
|
for season in set([x.season for x in episodes]):
|
|
|
|
ep_objs.append(show.getEpisode(season, ep_num))
|
|
|
|
best_season_result.episodes = ep_objs
|
|
|
|
|
|
|
|
ep_num = MULTI_EP_RESULT
|
|
|
|
if ep_num in found_results[provider_id]:
|
|
|
|
found_results[provider_id][ep_num].append(best_season_result)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
found_results[provider_id][ep_num] = [best_season_result]
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
2015-09-18 00:06:34 +00:00
|
|
|
multi_results = {}
|
|
|
|
if MULTI_EP_RESULT in found_results[provider_id]:
|
|
|
|
for multi_result in found_results[provider_id][MULTI_EP_RESULT]:
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2017-02-17 03:16:51 +00:00
|
|
|
logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2017-02-17 03:16:51 +00:00
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
|
|
|
|
multi_result.provider.name):
|
|
|
|
logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# see how many of the eps that this result covers aren't covered by single results
|
2015-09-18 00:06:34 +00:00
|
|
|
needed_eps = []
|
|
|
|
not_needed_eps = []
|
|
|
|
for ep_obj in multi_result.episodes:
|
|
|
|
ep_num = ep_obj.episode
|
2014-05-17 05:23:11 +00:00
|
|
|
# if we have results for the episode
|
2015-09-18 00:06:34 +00:00
|
|
|
if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
|
|
|
|
needed_eps.append(ep_num)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
not_needed_eps.append(ep_num)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
|
|
|
|
(needed_eps, not_needed_eps), logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not not_needed_eps:
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'All of these episodes were covered by single episode results, ' +
|
|
|
|
'ignoring this multi episode result', logger.DEBUG)
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# check if these eps are already covered by another multi-result
|
2015-09-18 00:06:34 +00:00
|
|
|
multi_needed_eps = []
|
|
|
|
multi_not_needed_eps = []
|
|
|
|
for ep_obj in multi_result.episodes:
|
|
|
|
ep_num = ep_obj.episode
|
|
|
|
if ep_num in multi_results:
|
|
|
|
multi_not_needed_eps.append(ep_num)
|
2014-05-11 19:04:47 +00:00
|
|
|
else:
|
2015-09-18 00:06:34 +00:00
|
|
|
multi_needed_eps.append(ep_num)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'Multi episode check result is... multi needed episodes: ' +
|
|
|
|
'%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if not multi_needed_eps:
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'All of these episodes were covered by another multi episode nzb, ' +
|
|
|
|
'ignoring this multi episode result',
|
2015-09-18 00:06:34 +00:00
|
|
|
logger.DEBUG)
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# if we're keeping this multi-result then remember it
|
2015-09-18 00:06:34 +00:00
|
|
|
for ep_obj in multi_result.episodes:
|
|
|
|
multi_results[ep_obj.episode] = multi_result
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# don't bother with the single result if we're going to get it with a multi result
|
2015-09-18 00:06:34 +00:00
|
|
|
for ep_obj in multi_result.episodes:
|
|
|
|
ep_num = ep_obj.episode
|
|
|
|
if ep_num in found_results[provider_id]:
|
2016-09-04 20:00:44 +00:00
|
|
|
logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' +
|
|
|
|
'#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
|
2015-09-18 00:06:34 +00:00
|
|
|
del found_results[provider_id][ep_num]
|
2014-05-17 05:23:11 +00:00
|
|
|
|
|
|
|
# of all the single ep results narrow it down to the best one for each episode
|
2015-09-18 00:06:34 +00:00
|
|
|
final_results += set(multi_results.values())
|
2017-02-17 03:16:51 +00:00
|
|
|
quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None
|
2015-09-18 00:06:34 +00:00
|
|
|
for cur_ep in found_results[provider_id]:
|
|
|
|
if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
if 0 == len(found_results[provider_id][cur_ep]):
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2018-05-14 02:21:08 +00:00
|
|
|
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
|
|
|
|
filter_rls=orig_thread_name)
|
2014-05-13 17:11:19 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# if all results were rejected move on to the next episode
|
2015-09-18 00:06:34 +00:00
|
|
|
if not best_result:
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-05-13 17:11:19 +00:00
|
|
|
|
2015-05-18 18:49:45 +00:00
|
|
|
# filter out possible bad torrents from providers
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if 'torrent' == best_result.resultType:
|
2017-12-16 02:52:08 +00:00
|
|
|
if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
|
|
|
|
best_result.url = best_result.get_data_func(best_result.url)
|
|
|
|
best_result.get_data_func = None # consume only once
|
|
|
|
if not best_result.url:
|
|
|
|
continue
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if best_result.url.startswith('magnet'):
|
|
|
|
if 'blackhole' != sickbeard.TORRENT_METHOD:
|
|
|
|
best_result.content = None
|
|
|
|
else:
|
2017-02-17 03:16:51 +00:00
|
|
|
cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(),
|
|
|
|
'%s.torrent' % (helpers.sanitizeFileName(best_result.name)))
|
|
|
|
if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session):
|
2014-08-18 05:21:37 +00:00
|
|
|
continue
|
2017-02-17 03:16:51 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open(cache_file, 'rb') as fh:
|
|
|
|
td = fh.read()
|
|
|
|
setattr(best_result, 'cache_file', cache_file)
|
|
|
|
except (StandardError, Exception):
|
|
|
|
continue
|
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if getattr(best_result.provider, 'chk_td', None):
|
|
|
|
name = None
|
|
|
|
try:
|
|
|
|
hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
|
|
|
|
x, v = len(hdr[0]), int(hdr[1])
|
2017-09-17 07:55:57 +00:00
|
|
|
while x < len(td):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
y = x + v
|
|
|
|
name = 'name' == td[x: y]
|
2017-09-17 07:55:57 +00:00
|
|
|
w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0]
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
x, v = y + len(w[0]), int(w[1])
|
|
|
|
if name:
|
|
|
|
name = td[x: x + v]
|
|
|
|
break
|
2017-02-17 03:16:51 +00:00
|
|
|
except (StandardError, Exception):
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
continue
|
|
|
|
if name:
|
|
|
|
if not pass_show_wordlist_checks(name, show):
|
|
|
|
continue
|
2017-07-21 21:34:23 +00:00
|
|
|
if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False):
|
2017-02-17 03:16:51 +00:00
|
|
|
logger.log('Ignored: %s (debug log has detail)' % name)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
continue
|
|
|
|
best_result.name = name
|
|
|
|
|
|
|
|
if 'blackhole' != sickbeard.TORRENT_METHOD:
|
|
|
|
best_result.content = td
|
2015-09-18 00:06:34 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# add result if its not a duplicate and
|
|
|
|
found = False
|
2015-09-18 00:06:34 +00:00
|
|
|
for i, result in enumerate(final_results):
|
|
|
|
for best_result_ep in best_result.episodes:
|
|
|
|
if best_result_ep in result.episodes:
|
|
|
|
if best_result.quality > result.quality:
|
|
|
|
final_results.pop(i)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
|
|
|
found = True
|
|
|
|
if not found:
|
2015-09-18 00:06:34 +00:00
|
|
|
final_results += [best_result]
|
2014-05-15 21:43:45 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# check that we got all the episodes we wanted first before doing a match and snatch
|
2015-09-18 00:06:34 +00:00
|
|
|
wanted_ep_count = 0
|
|
|
|
for wanted_ep in episodes:
|
|
|
|
for result in final_results:
|
|
|
|
if wanted_ep in result.episodes and is_final_result(result):
|
|
|
|
wanted_ep_count += 1
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# make sure we search every provider for results unless we found everything we wanted
|
2015-09-18 00:06:34 +00:00
|
|
|
if len(episodes) == wanted_ep_count:
|
2014-05-17 16:43:34 +00:00
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
if not len(provider_list):
|
2017-01-02 18:44:35 +00:00
|
|
|
logger.log('No NZB/Torrent providers in Media Providers/Options are allowed for active searching', logger.WARNING)
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
elif not search_done:
|
2017-01-02 18:44:35 +00:00
|
|
|
logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % (
|
Change validate and improve specific Torrent provider connections, IPT, KAT, SCC, TPB, TB, TD, TT.
Change refactor cache for torrent providers to reduce code.
Change improve search category selection BMTV, FSH, FF, TB.
Change identify more SD release qualities.
Change update SpeedCD, MoreThan, TVChaosuk.
Add torrent provider HD4Free.
Remove torrent provider BitSoup.
Change only create threads for providers needing a recent search instead of for all enabled.
Add 4489 as experimental value to "Recent search frequency" to use provider freqs instead of fixed width for all.
Fix searching nzb season packs.
Change remove some logging cruft.
2016-03-24 18:24:14 +00:00
|
|
|
len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR)
|
2017-02-17 03:16:51 +00:00
|
|
|
elif not any(final_results):
|
|
|
|
logger.log('No suitable candidates')
|
2014-07-24 16:12:29 +00:00
|
|
|
|
2015-09-18 00:06:34 +00:00
|
|
|
return final_results
|