During backlog/manual/failed searches we now cache disgarded/ignored results pre-parsed for usage later on incase we end up setting a episode to wanted that matches said results, allows for maximum performance and helps limit the waste of resources used.

This commit is contained in:
echel0n 2014-07-21 23:00:58 -07:00
parent a317ff61c2
commit 3a2b67330c
3 changed files with 70 additions and 60 deletions

View file

@ -281,6 +281,7 @@ class GenericProvider:
itemList += itemsUnknown if itemsUnknown else []
# filter results
cl = []
for item in itemList:
(title, url) = self._get_title_and_url(item)
@ -299,29 +300,28 @@ class GenericProvider:
quality = parse_result.quality
release_group = parse_result.release_group
actual_season = None
actual_episodes = None
addCacheEntry = False
if not (showObj.air_by_date or showObj.sports):
if search_mode == 'sponly' and len(parse_result.episode_numbers):
logger.log(
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
logger.DEBUG)
continue
addCacheEntry = True
else:
if not len(parse_result.episode_numbers) and (
parse_result.season_number and parse_result.season_number != season) or (
not parse_result.season_number and season != 1):
logger.log(u"The result " + title + " doesn't seem to be a valid season that we want, ignoring",
logger.log(u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
logger.DEBUG)
continue
addCacheEntry = True
elif len(parse_result.episode_numbers) and (
parse_result.season_number != season or not [ep for ep in episodes if
ep.scene_episode in parse_result.episode_numbers]):
logger.log(u"The result " + title + " doesn't seem to be a valid episode that we want, ignoring",
logger.log(u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
continue
addCacheEntry = True
if not addCacheEntry:
# we just use the existing info for normal searches
actual_season = season
actual_episodes = parse_result.episode_numbers
@ -330,8 +330,8 @@ class GenericProvider:
logger.log(
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
logger.DEBUG)
continue
addCacheEntry = True
else:
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
myDB = db.DBConnection()
sql_results = myDB.select(
@ -342,11 +342,20 @@ class GenericProvider:
logger.log(
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
logger.WARNING)
continue
addCacheEntry = True
if not addCacheEntry:
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
# add parsed result to cache for usage later on
if addCacheEntry:
logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
if ci is not None:
cl.append(ci)
continue
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
@ -396,6 +405,11 @@ class GenericProvider:
else:
results[epNum].append(result)
# check if we have items to add to cache
if len(cl) > 0:
myDB = self.cache._getDB()
myDB.mass_action(cl)
return results
def findPropers(self, search_date=None):

View file

@ -22,7 +22,6 @@ import sys
import time
import urllib, urlparse
from xml.dom.minidom import parseString
from datetime import datetime, timedelta
import sickbeard
@ -352,13 +351,9 @@ class NewzbinCache(tvcache.TVCache):
logger.ERROR)
return
quality = self.provider.getQuality(item)
logger.log("Found quality " + str(quality), logger.DEBUG)
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
self._addCacheEntry(title, url, quality=quality)
self._addCacheEntry(title, url)
provider = NewzbinProvider()

View file

@ -140,11 +140,9 @@ class TVCache():
def _translateTitle(self, title):
return u'' + title.replace(' ', '.')
def _translateLinkURL(self, url):
return url.replace('&', '&')
def _parseItem(self, item):
title = item.title
url = item.link
@ -231,7 +229,10 @@ class TVCache():
return True
def _addCacheEntry(self, name, url, indexer_id=0, quality=None):
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0):
# check if we passed in a parsed result or should we try and create one
if not parse_result:
# create showObj from indexer_id if available
showObj=None
@ -251,6 +252,7 @@ class TVCache():
if not parse_result or not parse_result.series_name:
return None
# if we made it this far then lets add the parsed result to cache for usager later on
season = episodes = None
if parse_result.is_air_by_date or parse_result.is_sports:
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
@ -263,7 +265,7 @@ class TVCache():
season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])]
else:
season = parse_result.season_number if parse_result.season_number != None else 1
season = parse_result.season_number if parse_result.season_number else 1
episodes = parse_result.episode_numbers
if season and episodes:
@ -274,8 +276,7 @@ class TVCache():
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
# get quality of release
if quality is None:
quality = Quality.sceneQuality(name, parse_result.is_anime)
quality = parse_result.quality
if not isinstance(name, unicode):
name = unicode(name, 'utf-8', 'replace')