mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 08:53:37 +00:00
parent
5ec5dde9ba
commit
aafe9ad522
5 changed files with 229 additions and 327 deletions
|
@ -259,11 +259,11 @@ class GenericProvider:
|
||||||
itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]]
|
itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]]
|
||||||
|
|
||||||
if epObj.episode in searchItems:
|
if epObj.episode in searchItems:
|
||||||
searchItems[epObj.episode] += itemList
|
searchItems[epObj] += itemList
|
||||||
else:
|
else:
|
||||||
searchItems[epObj.episode] = itemList
|
searchItems[epObj] = itemList
|
||||||
|
|
||||||
for episode, items in searchItems.items():
|
for ep_obj, items in searchItems.items():
|
||||||
for item in items:
|
for item in items:
|
||||||
|
|
||||||
|
|
||||||
|
@ -274,22 +274,25 @@ class GenericProvider:
|
||||||
# parse the file name
|
# parse the file name
|
||||||
try:
|
try:
|
||||||
myParser = NameParser(False)
|
myParser = NameParser(False)
|
||||||
parse_result = myParser.parse(title).convert()
|
if ep_obj.season == ep_obj.scene_season and ep_obj.episode == ep_obj.scene_episode:
|
||||||
|
parse_result = myParser.parse(title)
|
||||||
|
else:
|
||||||
|
parse_result = myParser.parse(title)
|
||||||
except InvalidNameException:
|
except InvalidNameException:
|
||||||
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not (self.show.air_by_date or self.show.sports):
|
if not (self.show.air_by_date or self.show.sports):
|
||||||
if not len(parse_result.episode_numbers) and (
|
if not len(parse_result.episode_numbers) and (
|
||||||
parse_result.season_number != None and parse_result.season_number != season) or (
|
parse_result.season_number != None and parse_result.season_number != ep_obj.season) or (
|
||||||
parse_result.season_number == None and season != 1):
|
parse_result.season_number == None and ep_obj.season != 1):
|
||||||
logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
|
logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
|
||||||
season) + ", ignoring", logger.DEBUG)
|
ep_obj.season) + ", ignoring", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
elif len(parse_result.episode_numbers) and (
|
elif len(parse_result.episode_numbers) and (
|
||||||
parse_result.season_number != season or episode not in parse_result.episode_numbers):
|
parse_result.season_number != ep_obj.season or ep_obj.episode not in parse_result.episode_numbers):
|
||||||
logger.log(u"Episode " + title + " isn't " + str(season) + "x" + str(
|
logger.log(u"Episode " + title + " isn't " + str(ep_obj.season) + "x" + str(
|
||||||
episode) + ", skipping it", logger.DEBUG)
|
ep_obj.episode) + ", skipping it", logger.DEBUG)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# we just use the existing info for normal searches
|
# we just use the existing info for normal searches
|
||||||
|
|
|
@ -175,8 +175,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
|
||||||
|
|
||||||
|
|
||||||
def searchForNeededEpisodes(curProvider):
|
def searchForNeededEpisodes(curProvider):
|
||||||
threading.currentThread().name = curProvider.name
|
|
||||||
|
|
||||||
logger.log(u"Searching all providers for any needed episodes")
|
logger.log(u"Searching all providers for any needed episodes")
|
||||||
foundResults = {}
|
foundResults = {}
|
||||||
|
|
||||||
|
@ -361,34 +359,38 @@ def filterSearchResults(show, results):
|
||||||
|
|
||||||
return foundResults
|
return foundResults
|
||||||
|
|
||||||
def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch=False, manualSearch=False):
|
def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manualSearch=False):
|
||||||
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
|
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
|
||||||
foundResults = {}
|
|
||||||
finalResults = []
|
finalResults = []
|
||||||
|
didSearch = False
|
||||||
|
|
||||||
# convert indexer numbering to scene numbering for searches
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
map(lambda x: x.convertToSceneNumbering, episodes)
|
|
||||||
|
for provider in providers:
|
||||||
|
foundResults = {provider.name:{}}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
curResults = curProvider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
|
curResults = provider.findSearchResults(show, season, episodes, seasonSearch, manualSearch)
|
||||||
except exceptions.AuthException, e:
|
except exceptions.AuthException, e:
|
||||||
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
||||||
return []
|
return []
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
|
logger.log(u"Error while searching " + provider.name + ", skipping: " + ex(e), logger.ERROR)
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
didSearch = True
|
||||||
|
|
||||||
if not len(curResults):
|
if not len(curResults):
|
||||||
return []
|
continue
|
||||||
|
|
||||||
curResults = filterSearchResults(show, curResults)
|
curResults = filterSearchResults(show, curResults)
|
||||||
if len(curResults):
|
if len(curResults):
|
||||||
foundResults.update(curResults)
|
foundResults[provider.name] = curResults
|
||||||
logger.log(u"Provider search results: " + repr(foundResults), logger.DEBUG)
|
logger.log(u"Provider search results: " + repr(foundResults), logger.DEBUG)
|
||||||
|
|
||||||
if not len(foundResults):
|
if not len(foundResults[provider.name]):
|
||||||
return []
|
continue
|
||||||
|
|
||||||
anyQualities, bestQualities = Quality.splitQuality(show.quality)
|
anyQualities, bestQualities = Quality.splitQuality(show.quality)
|
||||||
|
|
||||||
|
@ -398,8 +400,8 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], show, anyQualities + bestQualities)
|
bestSeasonNZB = pickBestResult(foundResults[SEASON_RESULT], show, anyQualities + bestQualities)
|
||||||
|
|
||||||
highest_quality_overall = 0
|
highest_quality_overall = 0
|
||||||
for cur_episode in foundResults:
|
for cur_episode in foundResults[provider.name]:
|
||||||
for cur_result in foundResults[cur_episode]:
|
for cur_result in foundResults[provider.name][cur_episode]:
|
||||||
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
||||||
highest_quality_overall = cur_result.quality
|
highest_quality_overall = cur_result.quality
|
||||||
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
|
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
|
||||||
|
@ -465,10 +467,10 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
elif len(curResult.episodes) > 1:
|
elif len(curResult.episodes) > 1:
|
||||||
epNum = MULTI_EP_RESULT
|
epNum = MULTI_EP_RESULT
|
||||||
|
|
||||||
if epNum in foundResults:
|
if epNum in foundResults[provider.name]:
|
||||||
foundResults[epNum].append(curResult)
|
foundResults[provider.name][epNum] += curResult
|
||||||
else:
|
else:
|
||||||
foundResults[epNum] = [curResult]
|
foundResults[provider.name][epNum] = [curResult]
|
||||||
|
|
||||||
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
||||||
else:
|
else:
|
||||||
|
@ -482,15 +484,15 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
bestSeasonNZB.episodes = epObjs
|
bestSeasonNZB.episodes = epObjs
|
||||||
|
|
||||||
epNum = MULTI_EP_RESULT
|
epNum = MULTI_EP_RESULT
|
||||||
if epNum in foundResults:
|
if epNum in foundResults[provider.name]:
|
||||||
foundResults[epNum].append(bestSeasonNZB)
|
foundResults[provider.name][epNum] += bestSeasonNZB
|
||||||
else:
|
else:
|
||||||
foundResults[epNum] = [bestSeasonNZB]
|
foundResults[provider.name][epNum] = [bestSeasonNZB]
|
||||||
|
|
||||||
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
# go through multi-ep results and see if we really want them or not, get rid of the rest
|
||||||
multiResults = {}
|
multiResults = {}
|
||||||
if MULTI_EP_RESULT in foundResults:
|
if MULTI_EP_RESULT in foundResults[provider.name]:
|
||||||
for multiResult in foundResults[MULTI_EP_RESULT]:
|
for multiResult in foundResults[provider.name][MULTI_EP_RESULT]:
|
||||||
|
|
||||||
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
|
logger.log(u"Seeing if we want to bother with multi-episode result " + multiResult.name, logger.DEBUG)
|
||||||
|
|
||||||
|
@ -505,7 +507,7 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
for epObj in multiResult.episodes:
|
for epObj in multiResult.episodes:
|
||||||
epNum = epObj.episode
|
epNum = epObj.episode
|
||||||
# if we have results for the episode
|
# if we have results for the episode
|
||||||
if epNum in foundResults and len(foundResults[epNum]) > 0:
|
if epNum in foundResults[provider.name] and len(foundResults[provider.name][epNum]) > 0:
|
||||||
# but the multi-ep is worse quality, we don't want it
|
# but the multi-ep is worse quality, we don't want it
|
||||||
# TODO: wtf is this False for
|
# TODO: wtf is this False for
|
||||||
#if False and multiResult.quality <= pickBestResult(foundResults[epNum]):
|
#if False and multiResult.quality <= pickBestResult(foundResults[epNum]):
|
||||||
|
@ -551,22 +553,36 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
# don't bother with the single result if we're going to get it with a multi result
|
# don't bother with the single result if we're going to get it with a multi result
|
||||||
for epObj in multiResult.episodes:
|
for epObj in multiResult.episodes:
|
||||||
epNum = epObj.episode
|
epNum = epObj.episode
|
||||||
if epNum in foundResults:
|
if epNum in foundResults[provider.name]:
|
||||||
logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
logger.log(u"A needed multi-episode result overlaps with a single-episode result for ep #" + str(
|
||||||
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
epNum) + ", removing the single-episode results from the list", logger.DEBUG)
|
||||||
del foundResults[epNum]
|
del foundResults[provider.name][epNum]
|
||||||
|
|
||||||
finalResults += set(multiResults.values())
|
finalResults += set(multiResults.values())
|
||||||
|
|
||||||
# of all the single ep results narrow it down to the best one for each episode
|
# of all the single ep results narrow it down to the best one for each episode
|
||||||
for curEp in foundResults:
|
for curEp in foundResults[provider.name]:
|
||||||
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
if curEp in (MULTI_EP_RESULT, SEASON_RESULT):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if len(foundResults[curEp]) == 0:
|
if len(foundResults[provider.name][curEp]) == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
finalResults.append(pickBestResult(foundResults[curEp], show))
|
result = pickBestResult(foundResults[provider.name][curEp], show)
|
||||||
|
finalResults.append(result)
|
||||||
|
|
||||||
|
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
||||||
|
any_qualities, best_qualities = Quality.splitQuality(show.quality)
|
||||||
|
|
||||||
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
||||||
|
if best_qualities and result.quality == max(best_qualities):
|
||||||
|
queueItem.results = [result]
|
||||||
|
return queueItem
|
||||||
|
|
||||||
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
||||||
|
elif any_qualities and result.quality in any_qualities:
|
||||||
|
queueItem.results = [result]
|
||||||
|
return queueItem
|
||||||
|
|
||||||
queueItem.results = finalResults
|
queueItem.results = finalResults
|
||||||
return queueItem
|
return queueItem
|
||||||
|
|
|
@ -29,7 +29,6 @@ from sickbeard import db, logger, common, exceptions, helpers
|
||||||
from sickbeard import generic_queue, scheduler
|
from sickbeard import generic_queue, scheduler
|
||||||
from sickbeard import search, failed_history, history
|
from sickbeard import search, failed_history, history
|
||||||
from sickbeard import ui
|
from sickbeard import ui
|
||||||
from sickbeard.snatch_queue import SnatchQueue
|
|
||||||
|
|
||||||
search_queue_lock = threading.Lock()
|
search_queue_lock = threading.Lock()
|
||||||
|
|
||||||
|
@ -89,6 +88,14 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
else:
|
else:
|
||||||
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
||||||
|
|
||||||
|
def snatch_item(self, item):
|
||||||
|
for result in item.results:
|
||||||
|
# just use the first result for now
|
||||||
|
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||||
|
status = search.snatchEpisode(result)
|
||||||
|
item.success = status
|
||||||
|
generic_queue.QueueItem.finish(item)
|
||||||
|
return status
|
||||||
|
|
||||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, ep_obj):
|
def __init__(self, ep_obj):
|
||||||
|
@ -104,31 +111,15 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
|
|
||||||
didSearch = False
|
didSearch = False
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
||||||
try:
|
try:
|
||||||
for provider in providers:
|
|
||||||
thread_name = self.thread_name + str(provider.name).upper()
|
|
||||||
threading.currentThread().name = thread_name
|
|
||||||
|
|
||||||
logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "]")
|
logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "]")
|
||||||
searchResult = search.searchProviders(self, self.show, self.ep_obj.season, [self.ep_obj], provider,
|
searchResult = search.searchProviders(self, self.show, self.ep_obj.season, [self.ep_obj],False,True)
|
||||||
False,
|
|
||||||
True)
|
|
||||||
|
|
||||||
didSearch = True
|
|
||||||
if searchResult:
|
if searchResult:
|
||||||
self.success = SnatchQueue().process_results(searchResult)
|
self.success = SearchQueue().snatch_item(searchResult)
|
||||||
if self.success:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
stop = True
|
|
||||||
|
|
||||||
if not didSearch:
|
|
||||||
logger.log(
|
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
|
||||||
logger.ERROR)
|
|
||||||
|
|
||||||
if not self.success:
|
if not self.success:
|
||||||
ui.notifications.message('No downloads were found',
|
ui.notifications.message('No downloads were found',
|
||||||
|
@ -194,28 +185,15 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for provider in providers:
|
|
||||||
thread_name = self.thread_name + str(provider.name).upper()
|
|
||||||
threading.currentThread().name = thread_name
|
|
||||||
|
|
||||||
logger.log("Beginning backlog search for episodes from [" + self.show.name + "] - Season[" + str(self.segment) + "]")
|
logger.log("Beginning backlog search for episodes from [" + self.show.name + "] - Season[" + str(self.segment) + "]")
|
||||||
searchResult = search.searchProviders(self, self.show, self.segment, self.wantedEpisodes, provider,
|
searchResult = search.searchProviders(self, self.show, self.segment, self.wantedEpisodes, seasonSearch, False)
|
||||||
seasonSearch, False)
|
|
||||||
|
|
||||||
didSearch = True
|
|
||||||
if searchResult:
|
if searchResult:
|
||||||
self.success = SnatchQueue().process_results(searchResult)
|
self.success = SearchQueue().snatch_item(searchResult)
|
||||||
if self.success:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not didSearch:
|
|
||||||
logger.log(
|
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
|
||||||
logger.ERROR)
|
|
||||||
|
|
||||||
if not self.success:
|
if not self.success:
|
||||||
logger.log(u"No needed episodes found during backlog search")
|
logger.log(u"No needed episodes found during backlog search")
|
||||||
|
|
||||||
|
@ -274,30 +252,16 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for provider in providers:
|
|
||||||
thread_name = self.thread_name + str(provider.name).upper()
|
|
||||||
threading.currentThread().name = thread_name
|
|
||||||
|
|
||||||
logger.log(
|
logger.log(
|
||||||
"Beginning failed download search for episodes from Season [" + self.episodes[0].season + "]")
|
"Beginning failed download search for episodes from Season [" + self.episodes[0].season + "]")
|
||||||
|
|
||||||
searchResult = search.searchProviders(self.show, self.episodes[0].season, self.episodes, provider,
|
searchResult = search.searchProviders(self.show, self.episodes[0].season, self.episodes, False, True)
|
||||||
False, True)
|
|
||||||
|
|
||||||
didSearch = True
|
|
||||||
if searchResult:
|
if searchResult:
|
||||||
self.success = SnatchQueue().process_results(searchResult)
|
self.success = SearchQueue().snatch_item(searchResult)
|
||||||
if self.success:
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not didSearch:
|
|
||||||
logger.log(
|
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
|
||||||
logger.ERROR)
|
|
||||||
|
|
||||||
if not self.success:
|
if not self.success:
|
||||||
logger.log(u"No needed episodes found on the RSS feeds")
|
logger.log(u"No needed episodes found on the RSS feeds")
|
||||||
|
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
import Queue
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import sickbeard
|
|
||||||
from sickbeard import logger, search, generic_queue, ui
|
|
||||||
from sickbeard.common import Quality
|
|
||||||
|
|
||||||
snatch_queue_lock = threading.Lock()
|
|
||||||
|
|
||||||
class SnatchQueue(generic_queue.GenericQueue):
|
|
||||||
def __init__(self):
|
|
||||||
generic_queue.GenericQueue.__init__(self)
|
|
||||||
self.queue_name = "SNATCHQUEUE"
|
|
||||||
|
|
||||||
# snatch queues
|
|
||||||
self.ManualQueue = Queue.Queue()
|
|
||||||
self.BacklogQueue = Queue.Queue()
|
|
||||||
self.FailedQueue = Queue.Queue()
|
|
||||||
|
|
||||||
def is_in_queue(self, queue, show, episodes, quality):
|
|
||||||
for i, cur_item in enumerate(queue.queue):
|
|
||||||
if cur_item.results.show == show and cur_item.results.episodes.sort() == episodes.sort():
|
|
||||||
if cur_item.results.quality < quality:
|
|
||||||
queue.queue.pop(i)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def add_item(self, item):
|
|
||||||
resultsKeep = []
|
|
||||||
for result in item.results:
|
|
||||||
show = result.extraInfo[0]
|
|
||||||
episodes = result.episodes
|
|
||||||
quality = result.quality
|
|
||||||
|
|
||||||
# check if we already have a item ready to snatch with same or better quality score
|
|
||||||
if not self.is_in_queue(self.queue, show, episodes, quality):
|
|
||||||
generic_queue.GenericQueue.add_item(self, item)
|
|
||||||
resultsKeep.append(result)
|
|
||||||
logger.log(
|
|
||||||
u"Adding item [" + result.name + "] to snatch queue",
|
|
||||||
logger.DEBUG)
|
|
||||||
else:
|
|
||||||
logger.log(
|
|
||||||
u"Not adding item [" + result.name + "] it's already in the queue with same or higher quality",
|
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
# update item with new results we want to snatch and disgard the rest
|
|
||||||
item.results = resultsKeep
|
|
||||||
|
|
||||||
def snatch_item(self, item):
|
|
||||||
for result in item.results:
|
|
||||||
# just use the first result for now
|
|
||||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
|
||||||
status = search.snatchEpisode(result)
|
|
||||||
item.success = status
|
|
||||||
generic_queue.QueueItem.finish(item)
|
|
||||||
return status
|
|
||||||
|
|
||||||
def process_results(self, item):
|
|
||||||
# dynamically select our snatch queue
|
|
||||||
if isinstance(item, sickbeard.search_queue.ManualSearchQueueItem):
|
|
||||||
self.queue = self.ManualQueue
|
|
||||||
elif isinstance(item, sickbeard.search_queue.BacklogQueueItem):
|
|
||||||
self.queue = self.BacklogQueue
|
|
||||||
elif isinstance(item, sickbeard.search_queue.FailedQueueItem):
|
|
||||||
self.queue = self.FailedQueue
|
|
||||||
|
|
||||||
for result in item.results:
|
|
||||||
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
|
||||||
show_obj = result.episodes[0].show
|
|
||||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
|
||||||
|
|
||||||
# if there is a redownload that's higher than this then we definitely need to keep looking
|
|
||||||
if best_qualities and result.quality == max(best_qualities):
|
|
||||||
return self.snatch_item(item)
|
|
||||||
|
|
||||||
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
|
||||||
elif any_qualities and result.quality in any_qualities:
|
|
||||||
return self.snatch_item(item)
|
|
||||||
|
|
||||||
# Add item to queue if we couldn't find a match to snatch
|
|
||||||
self.add_item(item)
|
|
|
@ -1158,6 +1158,8 @@ class TVEpisode(object):
|
||||||
|
|
||||||
self.specifyEpisode(self.season, self.episode)
|
self.specifyEpisode(self.season, self.episode)
|
||||||
|
|
||||||
|
self.convertToSceneNumbering()
|
||||||
|
|
||||||
self.relatedEps = []
|
self.relatedEps = []
|
||||||
|
|
||||||
self.checkForMetaFiles()
|
self.checkForMetaFiles()
|
||||||
|
|
Loading…
Reference in a new issue