Fixes for backlog search.

Fixes for downloading the same episode twice.
Fixes for caches.
This commit is contained in:
echel0n 2014-05-05 06:26:02 -07:00
parent 42218d1d04
commit b33e2be047
3 changed files with 117 additions and 113 deletions

View file

@ -945,25 +945,16 @@ def _check_against_names(name, show):
def get_show_by_name(name): def get_show_by_name(name):
showObj = None
if not sickbeard.showList:
return
indexerid = sickbeard.name_cache.retrieveNameFromCache(name)
showObj = sickbeard.name_cache.retrieveShowFromCache(name)
if not showObj:
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name))) showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
for showName in showNames if not indexerid else []: for showName in showNames if sickbeard.showList else []:
sceneResults = [x for x in sickbeard.showList if _check_against_names(showName, x)] sceneResults = [x for x in sickbeard.showList if _check_against_names(showName, x)]
showObj = sceneResults[0] if len(sceneResults) else None showObj = sceneResults[0] if len(sceneResults) else None
if showObj: if showObj:
break break
if showObj or indexerid:
logger.log(u"Found Indexer ID:[" + repr(indexerid) + "], using that for [" + str(name) + "}",logger.DEBUG)
if not showObj:
showObj = findCertainShow(sickbeard.showList, int(indexerid))
return showObj return showObj
def is_hidden_folder(folder): def is_hidden_folder(folder):

View file

@ -215,11 +215,12 @@ class GenericProvider:
Returns: A tuple containing two strings representing title and URL respectively Returns: A tuple containing two strings representing title and URL respectively
""" """
title = item.title
title = item.title if item.title else None
if title: if title:
title = title.replace(' ', '.') title = title.replace(' ', '.')
url = item.link url = item.link if item.link else None
if url: if url:
url = url.replace('&', '&') url = url.replace('&', '&')
@ -231,35 +232,42 @@ class GenericProvider:
self.show = show self.show = show
results = {} results = {}
searchStrings = [] searchItems = {}
itemList = []
for epObj in episodes: for epObj in episodes:
itemList = []
cacheResult = self.cache.searchCache(epObj, manualSearch) cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult): if len(cacheResult):
return cacheResult results.update(cacheResult)
continue
if epObj.show.air_by_date: if epObj.show.air_by_date:
logger.log(u'Searching "%s" for "%s"' % (self.name, epObj.prettyABDName())) logger.log(u'Searching "%s" for "%s"' % (self.name, epObj.prettyABDName()))
else: else:
logger.log(u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName())) logger.log(
u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
# get our search strings
if seasonSearch: if seasonSearch:
searchStrings += self._get_season_search_strings(epObj) for curString in self._get_season_search_strings(epObj):
searchStrings += self._get_episode_search_strings(epObj) itemList += self._doSearch(curString)
for curString in self._get_episode_search_strings(epObj):
# remove duplicate search strings
searchStrings = [i for n, i in enumerate(searchStrings) if i not in searchStrings[n + 1:]] if len(searchStrings) else []
for curString in sorted(searchStrings):
itemList += self._doSearch(curString) itemList += self._doSearch(curString)
# next episode if no search results
if not itemList:
continue
# remove duplicate items # remove duplicate items
itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]] if len(itemList) else [] itemList = [i for n, i in enumerate(itemList) if i not in itemList[n + 1:]]
for item in itemList: if epObj.episode in searchItems:
searchItems[epObj.episode] += itemList
else:
searchItems[epObj.episode] = itemList
for episode, items in searchItems.items():
for item in items:
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
quality = self.getQuality(item) quality = self.getQuality(item)
@ -273,14 +281,16 @@ class GenericProvider:
continue continue
if not (self.show.air_by_date or self.show.sports): if not (self.show.air_by_date or self.show.sports):
if not parse_result.episode_numbers and (parse_result.season_number != None and parse_result.season_number != season) or ( if not len(parse_result.episode_numbers) and (
parse_result.season_number != None and parse_result.season_number != season) or (
parse_result.season_number == None and season != 1): parse_result.season_number == None and season != 1):
logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str( logger.log(u"The result " + title + " doesn't seem to be a valid season for season " + str(
season) + ", ignoring", logger.DEBUG) season) + ", ignoring", logger.DEBUG)
continue continue
elif len(parse_result.episode_numbers) and (parse_result.season_number != season or parse_result.episode_numbers[0] not in parse_result.episode_numbers): elif len(parse_result.episode_numbers) and (
parse_result.season_number != season or episode not in parse_result.episode_numbers):
logger.log(u"Episode " + title + " isn't " + str(season) + "x" + str( logger.log(u"Episode " + title + " isn't " + str(season) + "x" + str(
parse_result.episode_numbers[0]) + ", skipping it", logger.DEBUG) episode) + ", skipping it", logger.DEBUG)
continue continue
# we just use the existing info for normal searches # we just use the existing info for normal searches
@ -295,7 +305,8 @@ class GenericProvider:
myDB = db.DBConnection() myDB = db.DBConnection()
sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?", sql_results = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[show.indexerid, parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()]) [show.indexerid,
parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()])
if len(sql_results) != 1: if len(sql_results) != 1:
logger.log( logger.log(
@ -314,7 +325,9 @@ class GenericProvider:
break break
if not wantEp: if not wantEp:
logger.log(u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[quality], logger.DEBUG) logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
quality], logger.DEBUG)
continue continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG) logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
@ -343,14 +356,14 @@ class GenericProvider:
result.extraInfo = [show] result.extraInfo = [show]
logger.log(u"Separating full season result to check for later", logger.DEBUG) logger.log(u"Separating full season result to check for later", logger.DEBUG)
if not result:
continue
if epNum in results: if epNum in results:
results[epNum].append(result) results[epNum].append(result)
else: else:
results[epNum] = [result] results[epNum] = [result]
# remove duplicate results
results[epNum] = list(set(results[epNum]))
return results return results
def findPropers(self, search_date=None): def findPropers(self, search_date=None):

View file

@ -350,7 +350,7 @@ def filterSearchResults(show, results):
foundResults = {} foundResults = {}
# make a list of all the results for this provider # make a list of all the results for this provider
for curEp in results: for curEp in results.keys():
# skip non-tv crap # skip non-tv crap
results[curEp] = filter( results[curEp] = filter(
lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), lambda x: show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show),