Fixed issues with post-processing.

Fixed issue with priority post-processing, bug would allow 2nd attempt to go through even if priority settings were not selected.

Fixed issues with cache retrieval and storing of provider results, properly stored now with season and ep info converted to indexer numbering.

Fixed a bug that was caching unconverted scene numbered files for post-processing causing us to skip converting them when needed.
This commit is contained in:
echel0n 2014-05-26 03:42:34 -07:00
parent 7e711c0665
commit f7b11e1e98
8 changed files with 96 additions and 135 deletions

View file

@ -55,7 +55,7 @@ class FailedProcessor(object):
parser = NameParser(False) parser = NameParser(False)
try: try:
parsed = parser.parse(releaseName) parsed = parser.parse(releaseName).convert()
except InvalidNameException: except InvalidNameException:
self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING) self._log(u"Error: release name is invalid: " + releaseName, logger.WARNING)
raise exceptions.FailedProcessingFailed() raise exceptions.FailedProcessingFailed()
@ -69,22 +69,18 @@ class FailedProcessor(object):
logger.log(u" - " + str(parsed.air_date), logger.DEBUG) logger.log(u" - " + str(parsed.air_date), logger.DEBUG)
logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG) logger.log(u" - " + str(parsed.sports_event_date), logger.DEBUG)
self._show_obj = sickbeard.helpers.get_show_by_name(parsed.series_name) if parsed.show is None:
if self._show_obj is None:
self._log( self._log(
u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)", u"Could not create show object. Either the show hasn't been added to SickRage, or it's still loading (if SB was restarted recently)",
logger.WARNING) logger.WARNING)
raise exceptions.FailedProcessingFailed() raise exceptions.FailedProcessingFailed()
# scene -> indexer numbering
parsed = parsed.convert(self._show_obj)
segment = {parsed.season_number:[]} segment = {parsed.season_number:[]}
for episode in parsed.episode_numbers: for episode in parsed.episode_numbers:
epObj = self._show_obj.getEpisode(parsed.season_number, episode) epObj = parsed.show.getEpisode(parsed.season_number, episode)
segment[parsed.season_number].append(epObj) segment[parsed.season_number].append(epObj)
cur_failed_queue_item = search_queue.FailedQueueItem(self._show_obj, segment) cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item) sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
return True return True

View file

@ -1009,24 +1009,34 @@ def _check_against_names(nameInQuestion, show, season=-1):
def get_show_by_name(name, useIndexer=False): def get_show_by_name(name, useIndexer=False):
name = full_sanitizeSceneName(name) name = full_sanitizeSceneName(name)
showObj = sickbeard.name_cache.retrieveShowFromCache(name) try:
if not showObj and sickbeard.showList: # check cache for show
showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name))) showObj = sickbeard.name_cache.retrieveShowFromCache(name)
for showName in showNames: if showObj:
if showName in sickbeard.scene_exceptions.exceptionIndexerCache: return showObj
showObj = findCertainShow(sickbeard.showList, int(sickbeard.scene_exceptions.exceptionIndexerCache[showName]))
if showObj:
break
if useIndexer and not showObj: if not showObj and sickbeard.showList:
(sn, idx, id) = searchIndexerForShowID(showName, ui=classes.ShowListUI) showNames = list(set(sickbeard.show_name_helpers.sceneToNormalShowNames(name)))
if id: for showName in showNames:
showObj = findCertainShow(sickbeard.showList, int(id)) if showName in sickbeard.scene_exceptions.exceptionIndexerCache:
showObj = findCertainShow(sickbeard.showList, int(sickbeard.scene_exceptions.exceptionIndexerCache[showName]))
if showObj: if showObj:
break break
return showObj if useIndexer and not showObj:
(sn, idx, id) = searchIndexerForShowID(showName, ui=classes.ShowListUI)
if id:
showObj = findCertainShow(sickbeard.showList, int(id))
if showObj:
break
# add show to cache
if showObj:
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
except:
showObj = None
return showObj
def is_hidden_folder(folder): def is_hidden_folder(folder):
""" """

View file

@ -202,6 +202,7 @@ class NameParser(object):
result.show = show result.show = show
return result return result
elif cur_regex_type == 'normal': elif cur_regex_type == 'normal':
result.show = show if show else None
return result return result
return None return None
@ -254,7 +255,7 @@ class NameParser(object):
return result return result
def parse(self, name): def parse(self, name, cache_result=True):
name = self._unicodify(name) name = self._unicodify(name)
cached = name_parser_cache.get(name) cached = name_parser_cache.get(name)
@ -329,7 +330,9 @@ class NameParser(object):
if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.series_name: if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.series_name:
raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace')) raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace'))
name_parser_cache.add(name, final_result) if cache_result:
name_parser_cache.add(name, final_result)
return final_result return final_result
@ -436,8 +439,8 @@ class ParseResult(object):
return to_return.encode('utf-8') return to_return.encode('utf-8')
def convert(self, show): def convert(self):
if not show: return self # need show object if not self.show: return self # need show object
if not self.season_number: return self # can't work without a season if not self.season_number: return self # can't work without a season
if not len(self.episode_numbers): return self # need at least one episode if not len(self.episode_numbers): return self # need at least one episode
if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date if self.air_by_date or self.sports: return self # scene numbering does not apply to air-by-date
@ -451,7 +454,7 @@ class ParseResult(object):
if len(self.ab_episode_numbers): if len(self.ab_episode_numbers):
abNo = self.ab_episode_numbers[i] abNo = self.ab_episode_numbers[i]
(s, e, a) = scene_numbering.get_indexer_numbering(show.indexerid, show.indexer, self.season_number, (s, e, a) = scene_numbering.get_indexer_numbering(self.show.indexerid, self.show.indexer, self.season_number,
epNo, abNo) epNo, abNo)
new_episode_numbers.append(e) new_episode_numbers.append(e)
new_season_numbers.append(s) new_season_numbers.append(s)
@ -491,7 +494,8 @@ class ParseResult(object):
def _is_anime(self): def _is_anime(self):
if self.ab_episode_numbers: if self.ab_episode_numbers:
return True if self.show and self.show.is_anime:
return True
return False return False
is_anime = property(_is_anime) is_anime = property(_is_anime)

View file

@ -389,7 +389,7 @@ class PostProcessor(object):
Returns a (indexer_id, season, []) tuple. The first two may be None if none were found. Returns a (indexer_id, season, []) tuple. The first two may be None if none were found.
""" """
to_return = (None, None, None, [], None) to_return = (None, None, [], None)
# if we don't have either of these then there's nothing to use to search the history for anyway # if we don't have either of these then there's nothing to use to search the history for anyway
if not self.nzb_name and not self.folder_name: if not self.nzb_name and not self.folder_name:
@ -415,7 +415,7 @@ class PostProcessor(object):
if len(sql_results) == 0: if len(sql_results) == 0:
continue continue
indexer_id = int(sql_results[0]["showid"]) show = helpers.findCertainShow(sickbeard.showList, int(sql_results[0]["showid"]))
season = int(sql_results[0]["season"]) season = int(sql_results[0]["season"])
quality = int(sql_results[0]["quality"]) quality = int(sql_results[0]["quality"])
@ -423,7 +423,7 @@ class PostProcessor(object):
quality = None quality = None
self.in_history = True self.in_history = True
to_return = (indexer_id, None, season, [], quality) to_return = (show, season, [], quality)
self._log("Found result in history: " + str(to_return), logger.DEBUG) self._log("Found result in history: " + str(to_return), logger.DEBUG)
return to_return return to_return
@ -436,8 +436,7 @@ class PostProcessor(object):
# remember whether it's a proper # remember whether it's a proper
if parse_result.extra_info: if parse_result.extra_info:
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info,re.I) != None
re.I) != None
# if the result is complete then remember that for later # if the result is complete then remember that for later
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group: if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
@ -471,17 +470,14 @@ class PostProcessor(object):
logger.log(u"Analyzing name " + repr(name)) logger.log(u"Analyzing name " + repr(name))
indexer_id = None to_return = (None, None, [], None)
indexer = None
to_return = (indexer_id, indexer, None, [], None)
if not name: if not name:
return to_return return to_return
# parse the name to break it into show name, season, and episode # parse the name to break it into show name, season, and episode
np = NameParser(file, useIndexers=True) np = NameParser(file, useIndexers=True)
parse_result = np.parse(name) parse_result = np.parse(name).convert()
self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) self._log(u"Parsed " + name + " into " + str(parse_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG)
@ -495,22 +491,17 @@ class PostProcessor(object):
season = parse_result.season_number season = parse_result.season_number
episodes = parse_result.episode_numbers episodes = parse_result.episode_numbers
showObj = helpers.get_show_by_name(parse_result.series_name) to_return = (parse_result.show, season, episodes, None)
if showObj:
indexer_id = showObj.indexerid
indexer = showObj.indexer
to_return = (indexer_id, indexer, season, episodes, None)
self._finalize(parse_result) self._finalize(parse_result)
return to_return return to_return
def _analyze_anidb(self, filePath): def _analyze_anidb(self, filePath):
# TODO: rewrite this # TODO: rewrite this
return (None, None, None) return (None, None, None, None)
if not helpers.set_up_anidb_connection(): if not helpers.set_up_anidb_connection():
return (None, None, None) return (None, None, None, None)
ep = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath) ep = self._build_anidb_episode(sickbeard.ADBA_CONNECTION, filePath)
try: try:
@ -545,7 +536,7 @@ class PostProcessor(object):
else: else:
if len(episodes): if len(episodes):
self._log(u"Lookup successful from anidb. ", logger.DEBUG) self._log(u"Lookup successful from anidb. ", logger.DEBUG)
return (indexer_id, season, episodes) return (show, season, episodes, None)
if ep.anidb_file_name: if ep.anidb_file_name:
self._log(u"Lookup successful, using anidb filename " + str(ep.anidb_file_name), logger.DEBUG) self._log(u"Lookup successful, using anidb filename " + str(ep.anidb_file_name), logger.DEBUG)
@ -576,7 +567,7 @@ class PostProcessor(object):
For a given file try to find the showid, season, and episode. For a given file try to find the showid, season, and episode.
""" """
indexer_id = indexer = season = quality = None show = season = quality = None
episodes = [] episodes = []
# try to look up the nzb in history # try to look up the nzb in history
@ -595,23 +586,23 @@ class PostProcessor(object):
lambda: self._analyze_name(self.file_path), lambda: self._analyze_name(self.file_path),
# try to analyze the dir + file name together as one name # try to analyze the dir + file name together as one name
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name) lambda: self._analyze_name(self.folder_name + u' ' + self.file_name),
# try to analyze the file path with the help of aniDB
lambda: self._analyze_anidb(self.file_path)
] ]
# attempt every possible method to get our info # attempt every possible method to get our info
for cur_attempt in attempt_list: for cur_attempt in attempt_list:
try: try:
(cur_indexer_id, cur_indexer, cur_season, cur_episodes, cur_quality) = cur_attempt() (cur_show, cur_season, cur_episodes, cur_quality) = cur_attempt()
except InvalidNameException, e: except InvalidNameException, e:
logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG) logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
continue continue
# check and confirm first that the indexer_id exists in our shows list before setting it if cur_show:
if cur_indexer_id != indexer_id and cur_indexer: show = cur_show
indexer_id = cur_indexer_id
indexer = cur_indexer
if cur_quality and not (self.in_history and quality): if cur_quality and not (self.in_history and quality):
quality = cur_quality quality = cur_quality
@ -622,46 +613,46 @@ class PostProcessor(object):
episodes = cur_episodes episodes = cur_episodes
# for air-by-date shows we need to look up the season/episode from database # for air-by-date shows we need to look up the season/episode from database
if season == -1 and indexer_id and indexer and episodes: if season == -1 and show and episodes:
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode", self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
logger.DEBUG) logger.DEBUG)
airdate = episodes[0].toordinal() airdate = episodes[0].toordinal()
myDB = db.DBConnection() myDB = db.DBConnection()
sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", sql_result = myDB.select("SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
[indexer_id, indexer, airdate]) [show.indexerid, show.indexer, airdate])
if sql_result: if sql_result:
season = int(sql_result[0][0]) season = int(sql_result[0][0])
episodes = [int(sql_result[0][1])] episodes = [int(sql_result[0][1])]
else: else:
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str( self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
indexer_id) + u", skipping", logger.DEBUG) show.indexerid) + u", skipping", logger.DEBUG)
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
episodes = [] episodes = []
continue continue
# if there's no season then we can hopefully just use 1 automatically # if there's no season then we can hopefully just use 1 automatically
elif season == None and indexer_id and indexer: elif season == None and show:
myDB = db.DBConnection() myDB = db.DBConnection()
numseasonsSQlResult = myDB.select( numseasonsSQlResult = myDB.select(
"SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0", "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
[indexer_id, indexer]) [show.indexerid, show.indexer])
if int(numseasonsSQlResult[0][0]) == 1 and season == None: if int(numseasonsSQlResult[0][0]) == 1 and season == None:
self._log( self._log(
u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
logger.DEBUG) logger.DEBUG)
season = 1 season = 1
if indexer_id and indexer and season and episodes: if show and season and episodes:
return (indexer_id, indexer, season, episodes, quality) return (show, season, episodes, quality)
return (indexer_id, indexer, season, episodes, quality) return (show, season, episodes, quality)
def _get_ep_obj(self, indexer_id, indexer, season, episodes): def _get_ep_obj(self, show, season, episodes):
""" """
Retrieve the TVEpisode object requested. Retrieve the TVEpisode object requested.
indexer_id: The indexerid of the show (int) show: The show object belonging to the show we want to process
season: The season of the episode (int) season: The season of the episode (int)
episodes: A list of episodes to find (list of ints) episodes: A list of episodes to find (list of ints)
@ -669,36 +660,17 @@ class PostProcessor(object):
be instantiated and returned. If the episode can't be found then None will be returned. be instantiated and returned. If the episode can't be found then None will be returned.
""" """
self._log(u"Loading show object with Indexer ID:[" + str(indexer_id) + "] for Indexer:[" + str(sickbeard.indexerApi(indexer).name) + "]", logger.DEBUG)
# find the show in the showlist
try:
show_obj = helpers.findCertainShow(sickbeard.showList, indexer_id)
except exceptions.MultipleShowObjectsException:
raise #TODO: later I'll just log this, for now I want to know about it ASAP
# if we can't find the show then there's nothing we can really do
if not show_obj:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
raise exceptions.PostProcessingFailed()
root_ep = None root_ep = None
for cur_episode in episodes: for cur_episode in episodes:
self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG) self._log(u"Retrieving episode object for " + str(season) + "x" + str(cur_episode), logger.DEBUG)
# detect and convert scene numbered releases
season, cur_episode = sickbeard.scene_numbering.get_indexer_numbering(indexer_id,indexer,season,cur_episode)
# now that we've figured out which episode this file is just load it manually # now that we've figured out which episode this file is just load it manually
try: try:
curEp = show_obj.getEpisode(season, cur_episode) curEp = show.getEpisode(season, cur_episode)
except exceptions.EpisodeNotFoundException, e: except exceptions.EpisodeNotFoundException, e:
self._log(u"Unable to create episode: " + ex(e), logger.DEBUG) self._log(u"Unable to create episode: " + ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed() raise exceptions.PostProcessingFailed()
self._log(u"Episode object has been converted from Scene numbering " + str(curEp.scene_season) + "x" + str(
curEp.scene_episode) + " to Indexer numbering" + str(curEp.season) + "x" + str(curEp.episode))
# associate all the episodes together under a single root episode # associate all the episodes together under a single root episode
if root_ep == None: if root_ep == None:
root_ep = curEp root_ep = curEp
@ -856,14 +828,18 @@ class PostProcessor(object):
self.in_history = False self.in_history = False
# try to find the file info # try to find the file info
(indexer_id, indexer, season, episodes, quality) = self._find_info() (show, season, episodes, quality) = self._find_info()
if not indexer_id or not indexer or season == None or not episodes: if not show:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode",
logger.ERROR)
raise exceptions.PostProcessingFailed()
elif season == None or not episodes:
self._log(u"Not enough information to determine what episode this is", logger.DEBUG) self._log(u"Not enough information to determine what episode this is", logger.DEBUG)
self._log(u"Quitting post-processing", logger.DEBUG) self._log(u"Quitting post-processing", logger.DEBUG)
return False return False
# retrieve/create the corresponding TVEpisode objects # retrieve/create the corresponding TVEpisode objects
ep_obj = self._get_ep_obj(indexer_id, indexer, season, episodes) ep_obj = self._get_ep_obj(show, season, episodes)
# get the quality of the episode we're processing # get the quality of the episode we're processing
if quality: if quality:
@ -878,10 +854,6 @@ class PostProcessor(object):
priority_download = self._is_priority(ep_obj, new_ep_quality) priority_download = self._is_priority(ep_obj, new_ep_quality)
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG) self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
# set the status of the episodes
for curEp in [ep_obj] + ep_obj.relatedEps:
curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# check for an existing file # check for an existing file
existing_file_status = self._checkForExistingFile(ep_obj.location) existing_file_status = self._checkForExistingFile(ep_obj.location)
@ -907,6 +879,10 @@ class PostProcessor(object):
u"This download is marked a priority download so I'm going to replace an existing file if I find one", u"This download is marked a priority download so I'm going to replace an existing file if I find one",
logger.DEBUG) logger.DEBUG)
# set the status of the episodes
#for curEp in [ep_obj] + ep_obj.relatedEps:
# curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# delete the existing file (and company) # delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps: for cur_ep in [ep_obj] + ep_obj.relatedEps:
try: try:

View file

@ -208,14 +208,14 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
#check if the dir have at least one tv video file #check if the dir have at least one tv video file
for video in videoFiles: for video in videoFiles:
try: try:
NameParser().parse(video) NameParser().parse(video, cache_result=False)
return True return True
except InvalidNameException: except InvalidNameException:
pass pass
for dir in allDirs: for dir in allDirs:
try: try:
NameParser().parse(dir) NameParser().parse(dir, cache_result=False)
return True return True
except InvalidNameException: except InvalidNameException:
pass pass
@ -226,7 +226,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed):
for packed in packedFiles: for packed in packedFiles:
try: try:
NameParser().parse(packed) NameParser().parse(packed, cache_result=False)
return True return True
except InvalidNameException: except InvalidNameException:
pass pass

View file

@ -146,19 +146,12 @@ class ProperFinder():
curProper.indexerid = curShow.indexerid curProper.indexerid = curShow.indexerid
break break
showObj = None if not parse_result.show:
if curProper.indexerid:
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
if not showObj:
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0) sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
continue continue
if not in_cache: if not in_cache:
sickbeard.name_cache.addNameToCache(parse_result.series_name, curProper.indexerid) sickbeard.name_cache.addNameToCache(parse_result.series_name, parse_result.show.indexerid)
# scene numbering -> indexer numbering
parse_result = parse_result.convert(showObj)
if not parse_result.episode_numbers: if not parse_result.episode_numbers:
logger.log( logger.log(

View file

@ -286,16 +286,13 @@ class GenericProvider:
# parse the file name # parse the file name
try: try:
myParser = NameParser(False, show=show, useIndexers=manualSearch) myParser = NameParser(False, show=show, useIndexers=manualSearch)
parse_result = myParser.parse(title) parse_result = myParser.parse(title).convert()
except InvalidNameException: except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING) logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
continue continue
quality = self.getQuality(item, parse_result.is_anime) quality = self.getQuality(item, parse_result.is_anime)
# scene -> indexer numbering
parse_result = parse_result.convert(self.show)
if not (self.show.air_by_date or self.show.sports): if not (self.show.air_by_date or self.show.sports):
if search_mode == 'sponly' and len(parse_result.episode_numbers): if search_mode == 'sponly' and len(parse_result.episode_numbers):
logger.log( logger.log(

View file

@ -11,7 +11,7 @@
# SickRage is distributed in the hope that it will be useful, # SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of # but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. # GNU General Public License for more details.
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>. # along with SickRage. If not, see <http://www.gnu.org/licenses/>.
@ -45,6 +45,7 @@ from name_parser.parser import NameParser, InvalidNameException
cache_lock = threading.Lock() cache_lock = threading.Lock()
class CacheDBConnection(db.DBConnection): class CacheDBConnection(db.DBConnection):
def __init__(self, providerName): def __init__(self, providerName):
db.DBConnection.__init__(self, "cache.db") db.DBConnection.__init__(self, "cache.db")
@ -260,13 +261,10 @@ class TVCache():
return True return True
def _addCacheEntry(self, name, url, quality=None): def _addCacheEntry(self, name, url, quality=None):
indexerid = None
in_cache = False
# if we don't have complete info then parse the filename to get it # if we don't have complete info then parse the filename to get it
try: try:
myParser = NameParser() myParser = NameParser()
parse_result = myParser.parse(name) parse_result = myParser.parse(name).convert()
except InvalidNameException: except InvalidNameException:
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG) logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
return None return None
@ -279,36 +277,26 @@ class TVCache():
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG) logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
return None return None
cacheResult = sickbeard.name_cache.retrieveNameFromCache(parse_result.series_name) showObj = None
if cacheResult: if parse_result.show:
in_cache = True showObj = parse_result.show
indexerid = int(cacheResult)
elif cacheResult == 0:
return None
if not indexerid: if not showObj:
showResult = helpers.searchDBForShow(parse_result.series_name) showResult = helpers.searchDBForShow(parse_result.series_name)
if showResult: if showResult:
indexerid = int(showResult[0]) showObj = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
if not indexerid: if not showObj:
for curShow in sickbeard.showList: for curShow in sickbeard.showList:
if show_name_helpers.isGoodResult(name, curShow, False): if show_name_helpers.isGoodResult(name, curShow, False):
indexerid = curShow.indexerid showObj = curShow
break break
showObj = None
if indexerid:
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
if not showObj: if not showObj:
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG) logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0) sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
return None return None
# scene -> indexer numbering
parse_result = parse_result.convert(showObj)
season = episodes = None season = episodes = None
if parse_result.air_by_date or parse_result.sports: if parse_result.air_by_date or parse_result.sports:
myDB = db.DBConnection() myDB = db.DBConnection()
@ -316,7 +304,7 @@ class TVCache():
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal() airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
sql_results = myDB.select( sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?", "SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
[indexerid, showObj.indexer, airdate]) [showObj.indexerid, showObj.indexer, airdate])
if sql_results > 0: if sql_results > 0:
season = int(sql_results[0]["season"]) season = int(sql_results[0]["season"])
episodes = [int(sql_results[0]["episode"])] episodes = [int(sql_results[0]["episode"])]
@ -340,12 +328,9 @@ class TVCache():
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG) logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
if not in_cache:
sickbeard.name_cache.addNameToCache(parse_result.series_name, indexerid)
return [ return [
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)", "INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
[name, season, episodeText, indexerid, url, curTimestamp, quality]] [name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
def searchCache(self, episodes, manualSearch=False): def searchCache(self, episodes, manualSearch=False):
@ -420,7 +405,7 @@ class TVCache():
result.quality = curQuality result.quality = curQuality
result.content = self.provider.getURL(url) \ result.content = self.provider.getURL(url) \
if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \ if self.provider.providerType == sickbeard.providers.generic.GenericProvider.TORRENT \
and not url.startswith('magnet') else None and not url.startswith('magnet') else None
# add it to the list # add it to the list
if epObj not in neededEps: if epObj not in neededEps: