mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
During backlog/manual/failed searches we now cache disgarded/ignored results pre-parsed for usage later on incase we end up setting a episode to wanted that matches said results, allows for maximum performance and helps limit the waste of resources used.
This commit is contained in:
parent
a317ff61c2
commit
3a2b67330c
3 changed files with 70 additions and 60 deletions
|
@ -281,6 +281,7 @@ class GenericProvider:
|
||||||
itemList += itemsUnknown if itemsUnknown else []
|
itemList += itemsUnknown if itemsUnknown else []
|
||||||
|
|
||||||
# filter results
|
# filter results
|
||||||
|
cl = []
|
||||||
for item in itemList:
|
for item in itemList:
|
||||||
(title, url) = self._get_title_and_url(item)
|
(title, url) = self._get_title_and_url(item)
|
||||||
|
|
||||||
|
@ -299,53 +300,61 @@ class GenericProvider:
|
||||||
quality = parse_result.quality
|
quality = parse_result.quality
|
||||||
release_group = parse_result.release_group
|
release_group = parse_result.release_group
|
||||||
|
|
||||||
actual_season = None
|
addCacheEntry = False
|
||||||
actual_episodes = None
|
|
||||||
|
|
||||||
if not (showObj.air_by_date or showObj.sports):
|
if not (showObj.air_by_date or showObj.sports):
|
||||||
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
if search_mode == 'sponly' and len(parse_result.episode_numbers):
|
||||||
logger.log(
|
logger.log(
|
||||||
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
|
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
continue
|
addCacheEntry = True
|
||||||
|
else:
|
||||||
|
if not len(parse_result.episode_numbers) and (
|
||||||
|
parse_result.season_number and parse_result.season_number != season) or (
|
||||||
|
not parse_result.season_number and season != 1):
|
||||||
|
logger.log(u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
|
||||||
|
logger.DEBUG)
|
||||||
|
addCacheEntry = True
|
||||||
|
elif len(parse_result.episode_numbers) and (
|
||||||
|
parse_result.season_number != season or not [ep for ep in episodes if
|
||||||
|
ep.scene_episode in parse_result.episode_numbers]):
|
||||||
|
logger.log(u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
|
||||||
|
logger.DEBUG)
|
||||||
|
addCacheEntry = True
|
||||||
|
|
||||||
if not len(parse_result.episode_numbers) and (
|
if not addCacheEntry:
|
||||||
parse_result.season_number and parse_result.season_number != season) or (
|
# we just use the existing info for normal searches
|
||||||
not parse_result.season_number and season != 1):
|
actual_season = season
|
||||||
logger.log(u"The result " + title + " doesn't seem to be a valid season that we want, ignoring",
|
actual_episodes = parse_result.episode_numbers
|
||||||
logger.DEBUG)
|
|
||||||
continue
|
|
||||||
elif len(parse_result.episode_numbers) and (
|
|
||||||
parse_result.season_number != season or not [ep for ep in episodes if
|
|
||||||
ep.scene_episode in parse_result.episode_numbers]):
|
|
||||||
logger.log(u"The result " + title + " doesn't seem to be a valid episode that we want, ignoring",
|
|
||||||
logger.DEBUG)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# we just use the existing info for normal searches
|
|
||||||
actual_season = season
|
|
||||||
actual_episodes = parse_result.episode_numbers
|
|
||||||
else:
|
else:
|
||||||
if not (parse_result.is_air_by_date or parse_result.is_sports):
|
if not (parse_result.is_air_by_date or parse_result.is_sports):
|
||||||
logger.log(
|
logger.log(
|
||||||
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
|
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
|
||||||
logger.DEBUG)
|
logger.DEBUG)
|
||||||
continue
|
addCacheEntry = True
|
||||||
|
else:
|
||||||
|
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
||||||
|
myDB = db.DBConnection()
|
||||||
|
sql_results = myDB.select(
|
||||||
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
||||||
|
[showObj.indexerid, airdate])
|
||||||
|
|
||||||
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
if len(sql_results) != 1:
|
||||||
myDB = db.DBConnection()
|
logger.log(
|
||||||
sql_results = myDB.select(
|
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
|
||||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
|
logger.WARNING)
|
||||||
[showObj.indexerid, airdate])
|
addCacheEntry = True
|
||||||
|
|
||||||
if len(sql_results) != 1:
|
if not addCacheEntry:
|
||||||
logger.log(
|
actual_season = int(sql_results[0]["season"])
|
||||||
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
|
actual_episodes = [int(sql_results[0]["episode"])]
|
||||||
logger.WARNING)
|
|
||||||
continue
|
|
||||||
|
|
||||||
actual_season = int(sql_results[0]["season"])
|
# add parsed result to cache for usage later on
|
||||||
actual_episodes = [int(sql_results[0]["episode"])]
|
if addCacheEntry:
|
||||||
|
logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
|
||||||
|
ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
|
||||||
|
if ci is not None:
|
||||||
|
cl.append(ci)
|
||||||
|
continue
|
||||||
|
|
||||||
# make sure we want the episode
|
# make sure we want the episode
|
||||||
wantEp = True
|
wantEp = True
|
||||||
|
@ -396,6 +405,11 @@ class GenericProvider:
|
||||||
else:
|
else:
|
||||||
results[epNum].append(result)
|
results[epNum].append(result)
|
||||||
|
|
||||||
|
# check if we have items to add to cache
|
||||||
|
if len(cl) > 0:
|
||||||
|
myDB = self.cache._getDB()
|
||||||
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def findPropers(self, search_date=None):
|
def findPropers(self, search_date=None):
|
||||||
|
|
|
@ -22,7 +22,6 @@ import sys
|
||||||
import time
|
import time
|
||||||
import urllib, urlparse
|
import urllib, urlparse
|
||||||
|
|
||||||
from xml.dom.minidom import parseString
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
@ -352,13 +351,9 @@ class NewzbinCache(tvcache.TVCache):
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
quality = self.provider.getQuality(item)
|
|
||||||
|
|
||||||
logger.log("Found quality " + str(quality), logger.DEBUG)
|
|
||||||
|
|
||||||
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
|
logger.log(u"RSS Feed provider: [" + self.provider.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
|
||||||
|
|
||||||
self._addCacheEntry(title, url, quality=quality)
|
self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
||||||
provider = NewzbinProvider()
|
provider = NewzbinProvider()
|
||||||
|
|
|
@ -140,11 +140,9 @@ class TVCache():
|
||||||
def _translateTitle(self, title):
|
def _translateTitle(self, title):
|
||||||
return u'' + title.replace(' ', '.')
|
return u'' + title.replace(' ', '.')
|
||||||
|
|
||||||
|
|
||||||
def _translateLinkURL(self, url):
|
def _translateLinkURL(self, url):
|
||||||
return url.replace('&', '&')
|
return url.replace('&', '&')
|
||||||
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
title = item.title
|
title = item.title
|
||||||
url = item.link
|
url = item.link
|
||||||
|
@ -231,26 +229,30 @@ class TVCache():
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _addCacheEntry(self, name, url, indexer_id=0, quality=None):
|
def _addCacheEntry(self, name, url, parse_result=None, indexer_id=0):
|
||||||
|
|
||||||
# create showObj from indexer_id if available
|
# check if we passed in a parsed result or should we try and create one
|
||||||
showObj=None
|
if not parse_result:
|
||||||
if indexer_id:
|
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
|
||||||
|
|
||||||
try:
|
# create showObj from indexer_id if available
|
||||||
myParser = NameParser(showObj=showObj, convert=True)
|
showObj=None
|
||||||
parse_result = myParser.parse(name)
|
if indexer_id:
|
||||||
except InvalidNameException:
|
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
|
||||||
return None
|
|
||||||
except InvalidShowException:
|
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not parse_result or not parse_result.series_name:
|
try:
|
||||||
return None
|
myParser = NameParser(showObj=showObj, convert=True)
|
||||||
|
parse_result = myParser.parse(name)
|
||||||
|
except InvalidNameException:
|
||||||
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
||||||
|
return None
|
||||||
|
except InvalidShowException:
|
||||||
|
logger.log(u"Unable to parse the filename " + name + " into a valid show", logger.DEBUG)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not parse_result or not parse_result.series_name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# if we made it this far then lets add the parsed result to cache for usager later on
|
||||||
season = episodes = None
|
season = episodes = None
|
||||||
if parse_result.is_air_by_date or parse_result.is_sports:
|
if parse_result.is_air_by_date or parse_result.is_sports:
|
||||||
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
airdate = parse_result.air_date.toordinal() if parse_result.air_date else parse_result.sports_air_date.toordinal()
|
||||||
|
@ -263,7 +265,7 @@ class TVCache():
|
||||||
season = int(sql_results[0]["season"])
|
season = int(sql_results[0]["season"])
|
||||||
episodes = [int(sql_results[0]["episode"])]
|
episodes = [int(sql_results[0]["episode"])]
|
||||||
else:
|
else:
|
||||||
season = parse_result.season_number if parse_result.season_number != None else 1
|
season = parse_result.season_number if parse_result.season_number else 1
|
||||||
episodes = parse_result.episode_numbers
|
episodes = parse_result.episode_numbers
|
||||||
|
|
||||||
if season and episodes:
|
if season and episodes:
|
||||||
|
@ -274,8 +276,7 @@ class TVCache():
|
||||||
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
|
curTimestamp = int(time.mktime(datetime.datetime.today().timetuple()))
|
||||||
|
|
||||||
# get quality of release
|
# get quality of release
|
||||||
if quality is None:
|
quality = parse_result.quality
|
||||||
quality = Quality.sceneQuality(name, parse_result.is_anime)
|
|
||||||
|
|
||||||
if not isinstance(name, unicode):
|
if not isinstance(name, unicode):
|
||||||
name = unicode(name, 'utf-8', 'replace')
|
name = unicode(name, 'utf-8', 'replace')
|
||||||
|
|
Loading…
Reference in a new issue