2014-03-10 05:18:05 +00:00
|
|
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
|
|
|
# URL: http://code.google.com/p/sickbeard/
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# This file is part of SickRage.
|
2014-03-10 05:18:05 +00:00
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is free software: you can redistribute it and/or modify
|
2014-03-10 05:18:05 +00:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2014-05-23 12:37:22 +00:00
|
|
|
# SickRage is distributed in the hope that it will be useful,
|
2014-03-10 05:18:05 +00:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2014-05-23 12:37:22 +00:00
|
|
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
|
|
|
|
import os
|
2014-04-24 05:18:16 +00:00
|
|
|
import re
|
2014-05-06 11:29:25 +00:00
|
|
|
import threading
|
2014-03-10 05:18:05 +00:00
|
|
|
import datetime
|
|
|
|
|
|
|
|
import sickbeard
|
|
|
|
|
2014-04-27 10:31:54 +00:00
|
|
|
from common import SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, Quality, SEASON_RESULT, MULTI_EP_RESULT, Overview
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
from sickbeard import logger, db, show_name_helpers, exceptions, helpers
|
|
|
|
from sickbeard import sab
|
|
|
|
from sickbeard import nzbget
|
|
|
|
from sickbeard import clients
|
|
|
|
from sickbeard import history
|
|
|
|
from sickbeard import notifiers
|
|
|
|
from sickbeard import nzbSplitter
|
|
|
|
from sickbeard import ui
|
|
|
|
from sickbeard import encodingKludge as ek
|
|
|
|
from sickbeard import providers
|
|
|
|
from sickbeard import failed_history
|
|
|
|
from sickbeard.exceptions import ex
|
2014-04-29 13:14:19 +00:00
|
|
|
from sickbeard.providers.generic import GenericProvider, tvcache
|
2014-05-27 07:44:23 +00:00
|
|
|
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def _downloadResult(result):
|
|
|
|
"""
|
|
|
|
Downloads a result to the appropriate black hole folder.
|
|
|
|
|
|
|
|
Returns a bool representing success.
|
|
|
|
|
|
|
|
result: SearchResult instance to download.
|
|
|
|
"""
|
|
|
|
|
|
|
|
resProvider = result.provider
|
|
|
|
|
|
|
|
newResult = False
|
|
|
|
|
2014-03-20 18:03:22 +00:00
|
|
|
if resProvider == None:
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Invalid provider name - this is a coding error, report it please", logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# nzbs with an URL can just be downloaded from the provider
|
|
|
|
if result.resultType == "nzb":
|
|
|
|
newResult = resProvider.downloadResult(result)
|
|
|
|
|
|
|
|
# if it's an nzb data result
|
|
|
|
elif result.resultType == "nzbdata":
|
|
|
|
|
|
|
|
# get the final file path to the nzb
|
|
|
|
fileName = ek.ek(os.path.join, sickbeard.NZB_DIR, result.name + ".nzb")
|
|
|
|
|
|
|
|
logger.log(u"Saving NZB to " + fileName)
|
|
|
|
|
|
|
|
newResult = True
|
|
|
|
|
|
|
|
# save the data to disk
|
|
|
|
try:
|
|
|
|
with ek.ek(open, fileName, 'w') as fileOut:
|
|
|
|
fileOut.write(result.extraInfo[0])
|
|
|
|
|
|
|
|
helpers.chmodAsParent(fileName)
|
|
|
|
|
|
|
|
except EnvironmentError, e:
|
|
|
|
logger.log(u"Error trying to save NZB to black hole: " + ex(e), logger.ERROR)
|
|
|
|
newResult = False
|
|
|
|
|
|
|
|
elif resProvider.providerType == "torrent":
|
|
|
|
newResult = resProvider.downloadResult(result)
|
|
|
|
|
|
|
|
else:
|
|
|
|
logger.log(u"Invalid provider type - this is a coding error, report it please", logger.ERROR)
|
|
|
|
return False
|
|
|
|
|
|
|
|
if newResult and sickbeard.USE_FAILED_DOWNLOADS:
|
2014-03-25 05:57:24 +00:00
|
|
|
ui.notifications.message('Episode snatched',
|
|
|
|
'<b>%s</b> snatched from <b>%s</b>' % (result.name, resProvider.name))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
return newResult
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def snatchEpisode(result, endStatus=SNATCHED):
|
|
|
|
"""
|
|
|
|
Contains the internal logic necessary to actually "snatch" a result that
|
|
|
|
has been found.
|
|
|
|
|
|
|
|
Returns a bool representing success.
|
|
|
|
|
|
|
|
result: SearchResult instance to be snatched.
|
|
|
|
endStatus: the episode status that should be used for the episode object once it's snatched.
|
|
|
|
"""
|
2014-03-19 23:33:49 +00:00
|
|
|
|
|
|
|
if result is None: return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
result.priority = 0 # -1 = low, 0 = normal, 1 = high
|
2014-03-10 05:18:05 +00:00
|
|
|
if sickbeard.ALLOW_HIGH_PRIORITY:
|
|
|
|
# if it aired recently make it high priority
|
|
|
|
for curEp in result.episodes:
|
|
|
|
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
|
|
|
|
result.priority = 1
|
2014-05-05 22:48:28 +00:00
|
|
|
if re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I) != None:
|
|
|
|
endStatus = SNATCHED_PROPER
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
# NZBs can be sent straight to SAB or saved to disk
|
|
|
|
if result.resultType in ("nzb", "nzbdata"):
|
|
|
|
if sickbeard.NZB_METHOD == "blackhole":
|
|
|
|
dlResult = _downloadResult(result)
|
|
|
|
elif sickbeard.NZB_METHOD == "sabnzbd":
|
|
|
|
dlResult = sab.sendNZB(result)
|
|
|
|
elif sickbeard.NZB_METHOD == "nzbget":
|
2014-05-05 22:48:28 +00:00
|
|
|
is_proper = True if endStatus == SNATCHED_PROPER else False
|
|
|
|
dlResult = nzbget.sendNZB(result, is_proper)
|
2014-03-10 05:18:05 +00:00
|
|
|
else:
|
|
|
|
logger.log(u"Unknown NZB action specified in config: " + sickbeard.NZB_METHOD, logger.ERROR)
|
|
|
|
dlResult = False
|
|
|
|
|
|
|
|
# TORRENTs can be sent to clients or saved to disk
|
|
|
|
elif result.resultType == "torrent":
|
|
|
|
# torrents are saved to disk when blackhole mode
|
|
|
|
if sickbeard.TORRENT_METHOD == "blackhole":
|
|
|
|
dlResult = _downloadResult(result)
|
|
|
|
else:
|
2014-05-26 06:29:22 +00:00
|
|
|
# Sets per provider seed ratio
|
2014-05-08 22:28:28 +00:00
|
|
|
result.ratio = result.provider.seedRatio()
|
2014-03-10 05:18:05 +00:00
|
|
|
result.content = result.provider.getURL(result.url) if not result.url.startswith('magnet') else None
|
|
|
|
client = clients.getClientIstance(sickbeard.TORRENT_METHOD)()
|
|
|
|
dlResult = client.sendTORRENT(result)
|
|
|
|
else:
|
|
|
|
logger.log(u"Unknown result type, unable to download it", logger.ERROR)
|
|
|
|
dlResult = False
|
|
|
|
|
2014-05-02 11:33:06 +00:00
|
|
|
if not dlResult:
|
2014-03-10 05:18:05 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
|
|
|
failed_history.logSnatch(result)
|
2014-05-23 06:58:29 +00:00
|
|
|
|
|
|
|
ui.notifications.message('Episode snatched', result.name)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
history.logSnatch(result)
|
|
|
|
|
|
|
|
# don't notify when we re-download an episode
|
2014-05-30 10:01:49 +00:00
|
|
|
sql_l = []
|
2014-03-10 05:18:05 +00:00
|
|
|
for curEpObj in result.episodes:
|
|
|
|
with curEpObj.lock:
|
2014-03-19 23:33:49 +00:00
|
|
|
if isFirstBestMatch(result):
|
|
|
|
curEpObj.status = Quality.compositeStatus(SNATCHED_BEST, result.quality)
|
|
|
|
else:
|
|
|
|
curEpObj.status = Quality.compositeStatus(endStatus, result.quality)
|
2014-05-30 10:01:49 +00:00
|
|
|
|
|
|
|
sql_l.append(curEpObj.get_sql())
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if curEpObj.status not in Quality.DOWNLOADED:
|
|
|
|
notifiers.notify_snatch(curEpObj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
|
|
|
|
|
2014-05-30 11:42:31 +00:00
|
|
|
if sql_l:
|
2014-06-07 21:32:38 +00:00
|
|
|
with db.DBConnection() as myDB:
|
|
|
|
myDB.mass_action(sql_l)
|
2014-05-30 10:01:49 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-04-24 05:18:16 +00:00
|
|
|
def filter_release_name(name, filter_words):
|
|
|
|
"""
|
|
|
|
Filters out results based on filter_words
|
|
|
|
|
|
|
|
name: name to check
|
|
|
|
filter_words : Words to filter on, separated by comma
|
|
|
|
|
|
|
|
Returns: False if the release name is OK, True if it contains one of the filter_words
|
|
|
|
"""
|
|
|
|
if filter_words:
|
2014-04-26 06:33:02 +00:00
|
|
|
filters = [re.compile('(^|[\W_])%s($|[\W_])' % filter.strip(), re.I) for filter in filter_words.split(',')]
|
|
|
|
for regfilter in filters:
|
|
|
|
if regfilter.search(name):
|
|
|
|
logger.log(u"" + name + " contains pattern: " + regfilter.pattern, logger.DEBUG)
|
|
|
|
return True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-04-24 05:18:16 +00:00
|
|
|
return False
|
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-04-24 05:18:16 +00:00
|
|
|
def pickBestResult(results, show, quality_list=None):
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(u"Picking the best result out of " + str([x.name for x in results]), logger.DEBUG)
|
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
# build the black And white list
|
|
|
|
bwl = None
|
|
|
|
if show:
|
|
|
|
bwl = BlackAndWhiteList(show.indexerid)
|
|
|
|
else:
|
|
|
|
logger.log("Could not create black and white list no show was given", logger.DEBUG)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# find the best result for the current episode
|
|
|
|
bestResult = None
|
|
|
|
for cur_result in results:
|
|
|
|
logger.log("Quality of " + cur_result.name + " is " + Quality.qualityStrings[cur_result.quality])
|
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
if bwl:
|
|
|
|
if not bwl.is_valid(cur_result):
|
|
|
|
logger.log(cur_result.name+" does not match the blacklist or the whitelist, rejecting it. Result: " + bwl.get_last_result_msg(), logger.MESSAGE)
|
|
|
|
continue
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
if quality_list and cur_result.quality not in quality_list:
|
|
|
|
logger.log(cur_result.name + " is a quality we know we don't want, rejecting it", logger.DEBUG)
|
|
|
|
continue
|
|
|
|
|
2014-04-24 05:18:16 +00:00
|
|
|
if show.rls_ignore_words and filter_release_name(cur_result.name, show.rls_ignore_words):
|
|
|
|
logger.log(u"Ignoring " + cur_result.name + " based on ignored words filter: " + show.rls_ignore_words,
|
|
|
|
logger.MESSAGE)
|
|
|
|
continue
|
|
|
|
|
|
|
|
if show.rls_require_words and not filter_release_name(cur_result.name, show.rls_require_words):
|
|
|
|
logger.log(u"Ignoring " + cur_result.name + " based on required words filter: " + show.rls_require_words,
|
|
|
|
logger.MESSAGE)
|
|
|
|
continue
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
|
|
|
|
cur_result.provider.name):
|
2014-03-10 05:18:05 +00:00
|
|
|
logger.log(cur_result.name + u" has previously failed, rejecting it")
|
|
|
|
continue
|
|
|
|
|
2014-03-20 18:03:22 +00:00
|
|
|
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
|
2014-03-10 05:18:05 +00:00
|
|
|
bestResult = cur_result
|
2014-05-13 17:11:19 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
elif bestResult.quality == cur_result.quality:
|
|
|
|
if "proper" in cur_result.name.lower() or "repack" in cur_result.name.lower():
|
|
|
|
bestResult = cur_result
|
|
|
|
elif "internal" in bestResult.name.lower() and "internal" not in cur_result.name.lower():
|
|
|
|
bestResult = cur_result
|
2014-05-14 12:51:48 +00:00
|
|
|
elif "xvid" in bestResult.name.lower() and "x264" in cur_result.name.lower():
|
|
|
|
logger.log(u"Preferring " + cur_result.name + " (x264 over xvid)")
|
|
|
|
bestResult = cur_result
|
2014-03-10 05:18:05 +00:00
|
|
|
|
|
|
|
if bestResult:
|
|
|
|
logger.log(u"Picked " + bestResult.name + " as the best", logger.DEBUG)
|
|
|
|
else:
|
|
|
|
logger.log(u"No result picked.", logger.DEBUG)
|
|
|
|
|
|
|
|
return bestResult
|
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
def isFinalResult(result):
|
|
|
|
"""
|
|
|
|
Checks if the given result is good enough quality that we can stop searching for other ones.
|
|
|
|
|
|
|
|
If the result is the highest quality in both the any/best quality lists then this function
|
|
|
|
returns True, if not then it's False
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger.log(u"Checking if we should keep searching after we've found " + result.name, logger.DEBUG)
|
|
|
|
|
|
|
|
show_obj = result.episodes[0].show
|
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
bwl = BlackAndWhiteList(show_obj.indexerid)
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
|
|
|
|
|
|
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
|
|
|
if best_qualities and result.quality < max(best_qualities):
|
|
|
|
return False
|
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
# if it does not match the shows black and white list its no good
|
|
|
|
elif not bwl.is_valid(result):
|
|
|
|
return False
|
|
|
|
|
2014-03-10 05:18:05 +00:00
|
|
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
2014-05-14 02:19:28 +00:00
|
|
|
elif any_qualities and result.quality in any_qualities:
|
2014-03-10 05:18:05 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
elif best_qualities and result.quality == max(best_qualities):
|
|
|
|
|
|
|
|
# if this is the best redownload but we have a higher initial download then keep looking
|
|
|
|
if any_qualities and result.quality < max(any_qualities):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# if this is the best redownload and we don't have a higher initial download then we're done
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# if we got here than it's either not on the lists, they're empty, or it's lower than the highest required
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
|
2014-03-19 23:33:49 +00:00
|
|
|
def isFirstBestMatch(result):
|
|
|
|
"""
|
|
|
|
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
|
|
|
"""
|
|
|
|
|
2014-03-25 05:57:24 +00:00
|
|
|
logger.log(u"Checking if we should archive our first best quality match for for episode " + result.name,
|
|
|
|
logger.DEBUG)
|
2014-03-19 23:33:49 +00:00
|
|
|
|
|
|
|
show_obj = result.episodes[0].show
|
|
|
|
|
|
|
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
|
|
|
|
|
|
|
# if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
|
|
|
|
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
def filterSearchResults(show, season, results):
|
2014-05-02 04:53:34 +00:00
|
|
|
foundResults = {}
|
|
|
|
|
|
|
|
# make a list of all the results for this provider
|
2014-05-12 17:49:59 +00:00
|
|
|
for curEp in results:
|
2014-05-02 04:53:34 +00:00
|
|
|
# skip non-tv crap
|
|
|
|
results[curEp] = filter(
|
2014-05-26 06:29:22 +00:00
|
|
|
lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show,
|
|
|
|
season=season),
|
2014-05-02 04:53:34 +00:00
|
|
|
results[curEp])
|
|
|
|
|
2014-05-12 17:49:59 +00:00
|
|
|
if curEp in foundResults:
|
|
|
|
foundResults[curEp] += results[curEp]
|
|
|
|
else:
|
|
|
|
foundResults[curEp] = results[curEp]
|
2014-05-02 04:53:34 +00:00
|
|
|
|
|
|
|
return foundResults
|
|
|
|
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-05-27 07:44:23 +00:00
|
|
|
def searchForNeededEpisodes(show, episodes):
|
2014-05-18 12:59:42 +00:00
|
|
|
foundResults = {}
|
|
|
|
|
|
|
|
didSearch = False
|
|
|
|
|
|
|
|
# ask all providers for any episodes it finds
|
2014-05-18 15:33:31 +00:00
|
|
|
origThreadName = threading.currentThread().name
|
2014-05-18 16:39:30 +00:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and not x.backlog_only]
|
2014-05-18 12:59:42 +00:00
|
|
|
for curProviderCount, curProvider in enumerate(providers):
|
2014-05-27 07:44:23 +00:00
|
|
|
if curProvider.anime_only and not show.is_anime:
|
|
|
|
logger.log(u"" + str(show.name) + " is not an anime skiping ...")
|
|
|
|
continue
|
|
|
|
|
2014-05-28 21:13:29 +00:00
|
|
|
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
try:
|
2014-05-22 03:12:15 +00:00
|
|
|
logger.log(u"Updating RSS cache ...")
|
|
|
|
curProvider.cache.updateCache()
|
2014-05-18 12:59:42 +00:00
|
|
|
logger.log(u"Searching RSS cache ...")
|
2014-05-18 15:33:31 +00:00
|
|
|
curFoundResults = curProvider.searchRSS(episodes)
|
2014-05-18 12:59:42 +00:00
|
|
|
except exceptions.AuthException, e:
|
|
|
|
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
|
|
|
if curProviderCount != len(providers):
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
except Exception, e:
|
|
|
|
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
|
|
|
|
if curProviderCount != len(providers):
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
|
|
|
|
didSearch = True
|
|
|
|
|
|
|
|
# pick a single result for each episode, respecting existing results
|
|
|
|
for curEp in curFoundResults:
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2014-05-18 12:59:42 +00:00
|
|
|
if curEp.show.paused:
|
|
|
|
logger.log(
|
|
|
|
u"Show " + curEp.show.name + " is paused, ignoring all RSS items for " + curEp.prettyName(),
|
|
|
|
logger.DEBUG)
|
|
|
|
continue
|
|
|
|
|
|
|
|
bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
|
|
|
|
|
|
|
|
# if all results were rejected move on to the next episode
|
|
|
|
if not bestResult:
|
|
|
|
logger.log(u"All found results for " + curEp.prettyName() + " were rejected.", logger.DEBUG)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# if it's already in the list (from another provider) and the newly found quality is no better then skip it
|
|
|
|
if curEp in foundResults and bestResult.quality <= foundResults[curEp].quality:
|
|
|
|
continue
|
|
|
|
|
|
|
|
foundResults[curEp] = bestResult
|
|
|
|
|
|
|
|
if not didSearch:
|
|
|
|
logger.log(
|
2014-05-23 12:37:22 +00:00
|
|
|
u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
|
2014-05-18 12:59:42 +00:00
|
|
|
logger.ERROR)
|
|
|
|
|
|
|
|
return foundResults.values() if len(foundResults) else {}
|
|
|
|
|
|
|
|
|
2014-05-18 15:33:31 +00:00
|
|
|
def searchProviders(show, season, episodes, manualSearch=False):
|
|
|
|
foundResults = {}
|
|
|
|
finalResults = []
|
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# check if we want to search for season packs instead of just season/episode
|
|
|
|
seasonSearch = False
|
2014-05-27 07:44:23 +00:00
|
|
|
if not manualSearch:
|
|
|
|
seasonEps = show.getAllEpisodes(season)
|
|
|
|
if len(seasonEps) == len(episodes):
|
|
|
|
seasonSearch = True
|
2014-05-12 11:55:56 +00:00
|
|
|
|
2014-05-11 19:04:47 +00:00
|
|
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
2014-05-06 11:29:25 +00:00
|
|
|
|
2014-05-12 18:57:26 +00:00
|
|
|
if not len(providers):
|
|
|
|
logger.log(u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
|
|
|
|
logger.ERROR)
|
2014-05-18 15:33:31 +00:00
|
|
|
return
|
2014-05-12 18:57:26 +00:00
|
|
|
|
2014-05-18 15:33:31 +00:00
|
|
|
origThreadName = threading.currentThread().name
|
2014-05-17 05:23:11 +00:00
|
|
|
for providerNum, provider in enumerate(providers):
|
2014-05-27 07:44:23 +00:00
|
|
|
if provider.anime_only and not show.is_anime:
|
|
|
|
logger.log(u"" + str(show.name) + " is not an anime skiping ...")
|
|
|
|
continue
|
|
|
|
|
2014-05-28 21:13:29 +00:00
|
|
|
threading.currentThread().name = origThreadName + " :: [" + provider.name + "]"
|
|
|
|
foundResults.setdefault(provider.name, {})
|
|
|
|
searchCount = 0
|
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
search_mode = 'eponly'
|
|
|
|
if seasonSearch and provider.search_mode == 'sponly':
|
2014-05-18 15:33:31 +00:00
|
|
|
search_mode = provider.search_mode
|
2014-05-17 05:23:11 +00:00
|
|
|
|
2014-05-26 06:29:22 +00:00
|
|
|
while (True):
|
2014-05-17 05:23:11 +00:00
|
|
|
searchCount += 1
|
|
|
|
|
|
|
|
if search_mode == 'sponly':
|
|
|
|
logger.log(u"Searching for " + show.name + " Season " + str(season) + " pack")
|
|
|
|
else:
|
|
|
|
logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season))
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-16 09:16:01 +00:00
|
|
|
try:
|
2014-05-17 05:23:11 +00:00
|
|
|
searchResults = provider.findSearchResults(show, season, episodes, search_mode, manualSearch)
|
2014-05-16 09:16:01 +00:00
|
|
|
except exceptions.AuthException, e:
|
|
|
|
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
2014-05-17 16:43:34 +00:00
|
|
|
break
|
2014-05-16 09:16:01 +00:00
|
|
|
except Exception, e:
|
|
|
|
logger.log(u"Error while searching " + provider.name + ", skipping: " + ex(e), logger.ERROR)
|
2014-05-17 16:43:34 +00:00
|
|
|
break
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 10:01:09 +00:00
|
|
|
if len(searchResults):
|
2014-05-26 06:29:22 +00:00
|
|
|
# make a list of all the results for this provider
|
|
|
|
for curEp in searchResults:
|
|
|
|
# skip non-tv crap
|
|
|
|
searchResults[curEp] = filter(
|
|
|
|
lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name,
|
|
|
|
show,
|
|
|
|
season=season),
|
|
|
|
searchResults[curEp])
|
|
|
|
|
|
|
|
if curEp in foundResults:
|
|
|
|
foundResults[provider.name][curEp] += searchResults[curEp]
|
|
|
|
else:
|
|
|
|
foundResults[provider.name][curEp] = searchResults[curEp]
|
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
break
|
2014-05-17 10:01:09 +00:00
|
|
|
elif not provider.search_fallback or searchCount == 2:
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if search_mode == 'sponly':
|
|
|
|
logger.log(u"FALLBACK EPISODE SEARCH INITIATED ...")
|
|
|
|
search_mode = 'eponly'
|
|
|
|
else:
|
|
|
|
logger.log(u"FALLBACK SEASON PACK SEARCH INITIATED ...")
|
2014-05-19 15:59:22 +00:00
|
|
|
search_mode = 'sponly'
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# skip to next provider if we have no results to process
|
|
|
|
if not len(foundResults[provider.name]):
|
2014-05-17 16:43:34 +00:00
|
|
|
if providerNum != len(providers):
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
anyQualities, bestQualities = Quality.splitQuality(show.quality)
|
|
|
|
|
|
|
|
# pick the best season NZB
|
|
|
|
bestSeasonNZB = None
|
|
|
|
if SEASON_RESULT in foundResults[provider.name]:
|
|
|
|
bestSeasonNZB = pickBestResult(foundResults[provider.name][SEASON_RESULT], show,
|
|
|
|
anyQualities + bestQualities)
|
|
|
|
|
|
|
|
highest_quality_overall = 0
|
|
|
|
for cur_episode in foundResults[provider.name]:
|
|
|
|
for cur_result in foundResults[provider.name][cur_episode]:
|
|
|
|
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
|
|
|
highest_quality_overall = cur_result.quality
|
|
|
|
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall],
|
|
|
|
logger.DEBUG)
|
|
|
|
|
|
|
|
# see if every episode is wanted
|
|
|
|
if bestSeasonNZB:
|
|
|
|
|
|
|
|
# get the quality of the season nzb
|
|
|
|
seasonQual = Quality.sceneQuality(bestSeasonNZB.name)
|
|
|
|
seasonQual = bestSeasonNZB.quality
|
|
|
|
logger.log(
|
|
|
|
u"The quality of the season " + bestSeasonNZB.provider.providerType + " is " + Quality.qualityStrings[
|
|
|
|
seasonQual], logger.DEBUG)
|
|
|
|
|
2014-06-07 21:32:38 +00:00
|
|
|
with db.DBConnection() as myDB:
|
|
|
|
allEps = [int(x["episode"]) for x in
|
|
|
|
myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?",
|
|
|
|
[show.indexerid, season])]
|
2014-05-17 05:23:11 +00:00
|
|
|
logger.log(u"Episode list: " + str(allEps), logger.DEBUG)
|
|
|
|
|
|
|
|
allWanted = True
|
|
|
|
anyWanted = False
|
|
|
|
for curEpNum in allEps:
|
|
|
|
if not show.wantEpisode(season, curEpNum, seasonQual):
|
|
|
|
allWanted = False
|
|
|
|
else:
|
|
|
|
anyWanted = True
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
|
|
|
if allWanted and bestSeasonNZB.quality == highest_quality_overall:
|
|
|
|
logger.log(
|
|
|
|
u"Every ep in this season is needed, downloading the whole " + bestSeasonNZB.provider.providerType + " " + bestSeasonNZB.name)
|
|
|
|
epObjs = []
|
|
|
|
for curEpNum in allEps:
|
|
|
|
epObjs.append(show.getEpisode(season, curEpNum))
|
|
|
|
bestSeasonNZB.episodes = epObjs
|
2014-05-18 15:33:31 +00:00
|
|
|
|
|
|
|
return [bestSeasonNZB]
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
elif not anyWanted:
|
2014-05-16 09:16:01 +00:00
|
|
|
logger.log(
|
2014-05-17 05:23:11 +00:00
|
|
|
u"No eps from this season are wanted at this quality, ignoring the result of " + bestSeasonNZB.name,
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if bestSeasonNZB.provider.providerType == GenericProvider.NZB:
|
|
|
|
logger.log(u"Breaking apart the NZB and adding the individual ones to our results", logger.DEBUG)
|
|
|
|
|
|
|
|
# if not, break it apart and add them as the lowest priority results
|
|
|
|
individualResults = nzbSplitter.splitResult(bestSeasonNZB)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
individualResults = filter(
|
|
|
|
lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name,
|
2014-05-26 06:29:22 +00:00
|
|
|
show,
|
|
|
|
season=season),
|
2014-05-17 05:23:11 +00:00
|
|
|
individualResults)
|
|
|
|
|
|
|
|
for curResult in individualResults:
|
|
|
|
if len(curResult.episodes) == 1:
|
|
|
|
epNum = curResult.episodes[0].episode
|
|
|
|
elif len(curResult.episodes) > 1:
|
|
|
|
epNum = MULTI_EP_RESULT
|
|
|
|
|
|
|
|
if epNum in foundResults[provider.name]:
|
|
|
|
foundResults[provider.name][epNum].append(curResult)
|
|
|
|
else:
|
|
|
|
foundResults[provider.name][epNum] = [curResult]
|
|
|
|
|
|
|
|
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
|
|
|
else:
|
|
|
|
|
|
|
|
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
|
2014-05-11 19:04:47 +00:00
|
|
|
logger.log(
|
2014-05-17 05:23:11 +00:00
|
|
|
u"Adding multi-ep result for full-season torrent. Set the episodes you don't want to 'don't download' in your torrent client if desired!")
|
2014-05-11 19:04:47 +00:00
|
|
|
epObjs = []
|
|
|
|
for curEpNum in allEps:
|
|
|
|
epObjs.append(show.getEpisode(season, curEpNum))
|
|
|
|
bestSeasonNZB.episodes = epObjs
|
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
epNum = MULTI_EP_RESULT
|
|
|
|
if epNum in foundResults[provider.name]:
|
|
|
|
foundResults[provider.name][epNum].append(bestSeasonNZB)
|
|
|
|
else:
|
|
|
|
foundResults[provider.name][epNum] = [bestSeasonNZB]
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
|
|
|
multiResults = {}
|
|
|
|
if MULTI_EP_RESULT in foundResults[provider.name]:
|
|
|
|
for multiResult in foundResults[provider.name][MULTI_EP_RESULT]:
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(multiResult.name, multiResult.size,
|
|
|
|
multiResult.provider.name):
|
|
|
|
logger.log(multiResult.name + u" has previously failed, rejecting this multi-ep result")
|
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# see how many of the eps that this result covers aren't covered by single results
|
|
|
|
neededEps = []
|
|
|
|
notNeededEps = []
|
|
|
|
for epObj in multiResult.episodes:
|
|
|
|
epNum = epObj.episode
|
|
|
|
# if we have results for the episode
|
|
|
|
if epNum in foundResults[provider.name] and len(foundResults[provider.name][epNum]) > 0:
|
|
|
|
# but the multi-ep is worse quality, we don't want it
|
|
|
|
# TODO: wtf is this False for
|
2014-05-26 06:29:22 +00:00
|
|
|
# if False and multiResult.quality <= pickBestResult(foundResults[epNum]):
|
2014-05-17 05:23:11 +00:00
|
|
|
# notNeededEps.append(epNum)
|
|
|
|
#else:
|
|
|
|
neededEps.append(epNum)
|
|
|
|
else:
|
|
|
|
neededEps.append(epNum)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
logger.log(
|
|
|
|
u"Single-ep check result is neededEps: " + str(neededEps) + ", notNeededEps: " + str(notNeededEps),
|
|
|
|
logger.DEBUG)
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if not neededEps:
|
|
|
|
logger.log(u"All of these episodes were covered by single nzbs, ignoring this multi-ep result",
|
|
|
|
logger.DEBUG)
|
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# check if these eps are already covered by another multi-result
|
|
|
|
multiNeededEps = []
|
|
|
|
multiNotNeededEps = []
|
|
|
|
for epObj in multiResult.episodes:
|
|
|
|
epNum = epObj.episode
|
|
|
|
if epNum in multiResults:
|
|
|
|
multiNotNeededEps.append(epNum)
|
2014-05-11 19:04:47 +00:00
|
|
|
else:
|
2014-05-17 05:23:11 +00:00
|
|
|
multiNeededEps.append(epNum)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
logger.log(
|
|
|
|
u"Multi-ep check result is multiNeededEps: " + str(multiNeededEps) + ", multiNotNeededEps: " + str(
|
|
|
|
multiNotNeededEps), logger.DEBUG)
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if not multiNeededEps:
|
2014-05-11 19:04:47 +00:00
|
|
|
logger.log(
|
2014-05-17 05:23:11 +00:00
|
|
|
u"All of these episodes were covered by another multi-episode nzbs, ignoring this multi-ep result",
|
2014-05-11 19:04:47 +00:00
|
|
|
logger.DEBUG)
|
2014-05-17 05:23:11 +00:00
|
|
|
continue
|
2014-05-11 19:04:47 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# if we're keeping this multi-result then remember it
|
|
|
|
for epObj in multiResult.episodes:
|
|
|
|
multiResults[epObj.episode] = multiResult
|
2014-05-16 09:16:01 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# don't bother with the single result if we're going to get it with a multi result
|
|
|
|
for epObj in multiResult.episodes:
|
|
|
|
epNum = epObj.episode
|
|
|
|
if epNum in foundResults[provider.name]:
|
2014-05-14 02:19:28 +00:00
|
|
|
logger.log(
|
2014-05-17 05:23:11 +00:00
|
|
|
u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
|
|
|
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
|
|
|
del foundResults[provider.name][epNum]
|
|
|
|
|
|
|
|
# of all the single ep results narrow it down to the best one for each episode
|
2014-05-18 15:33:31 +00:00
|
|
|
finalResults += set(multiResults.values())
|
2014-05-17 05:23:11 +00:00
|
|
|
for curEp in foundResults[provider.name]:
|
|
|
|
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
if len(foundResults[provider.name][curEp]) == 0:
|
|
|
|
continue
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
bestResult = pickBestResult(foundResults[provider.name][curEp], show)
|
2014-05-13 17:11:19 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# if all results were rejected move on to the next episode
|
|
|
|
if not bestResult:
|
|
|
|
continue
|
2014-05-13 17:11:19 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# add result if its not a duplicate and
|
|
|
|
found = False
|
2014-05-18 15:33:31 +00:00
|
|
|
for i, result in enumerate(finalResults):
|
2014-05-17 05:23:11 +00:00
|
|
|
for bestResultEp in bestResult.episodes:
|
|
|
|
if bestResultEp in result.episodes:
|
|
|
|
if result.quality < bestResult.quality:
|
2014-05-18 15:33:31 +00:00
|
|
|
finalResults.pop(i)
|
2014-05-17 05:23:11 +00:00
|
|
|
else:
|
|
|
|
found = True
|
|
|
|
if not found:
|
2014-05-18 15:33:31 +00:00
|
|
|
finalResults += [bestResult]
|
2014-05-15 21:43:45 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# check that we got all the episodes we wanted first before doing a match and snatch
|
|
|
|
wantedEpCount = 0
|
|
|
|
for wantedEp in episodes:
|
2014-05-18 15:33:31 +00:00
|
|
|
for result in finalResults:
|
2014-05-17 05:23:11 +00:00
|
|
|
if wantedEp in result.episodes and isFinalResult(result):
|
|
|
|
wantedEpCount += 1
|
2014-05-14 02:19:28 +00:00
|
|
|
|
2014-05-17 05:23:11 +00:00
|
|
|
# make sure we search every provider for results unless we found everything we wanted
|
2014-05-17 16:43:34 +00:00
|
|
|
if providerNum == len(providers) or wantedEpCount == len(episodes):
|
|
|
|
break
|
2014-03-10 05:18:05 +00:00
|
|
|
|
2014-05-18 15:58:04 +00:00
|
|
|
return finalResults
|