mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Fixed issues with post-processing, we now perform the auto-detection of the indexer in a spot that doesn't require the post-processing to start all the way from the begining allowing for less processing time to take place.
Fixed more code relating to scene converting and sports event episodes/shows. Post-processing now checks for indexer id in the cache as well as the other normal ways it did before.
This commit is contained in:
parent
7f44a2cfad
commit
c330bbb386
11 changed files with 143 additions and 97 deletions
|
@ -319,36 +319,38 @@ def searchDBForShow(regShowName, indexer_id=None):
|
|||
return None
|
||||
|
||||
|
||||
def searchIndexerForShowID(regShowName, indexer, indexer_id=None):
|
||||
showNames = [re.sub('[. -]', ' ', regShowName), regShowName]
|
||||
def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=True):
|
||||
showNames = list(set([re.sub('[. -]', ' ', regShowName), regShowName]))
|
||||
|
||||
# Query Indexers for each search term and build the list of results
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
||||
for indexer in sickbeard.indexerApi().indexers if not indexer else [int(indexer)]:
|
||||
# Query Indexers for each search term and build the list of results
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
|
||||
if ui:lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
for name in showNames:
|
||||
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG)
|
||||
try:
|
||||
if indexer_id:
|
||||
search = t[indexer_id]
|
||||
else:
|
||||
search = t[name]
|
||||
for name in showNames:
|
||||
logger.log(u"Trying to find " + name + " on " + sickbeard.indexerApi(indexer).name, logger.DEBUG)
|
||||
try:
|
||||
if indexer_id:
|
||||
search = t[indexer_id]
|
||||
else:
|
||||
search = t[name]
|
||||
|
||||
if isinstance(search, dict):
|
||||
search = [search]
|
||||
if isinstance(search, dict):
|
||||
search = [search]
|
||||
|
||||
# add search results
|
||||
for i in range(len(search)):
|
||||
part = search[i]
|
||||
seriesname = part['seriesname'].encode('UTF-8').lower()
|
||||
name = name.encode('UTF-8').lower()
|
||||
# add search results
|
||||
for i in range(len(search)):
|
||||
part = search[i]
|
||||
seriesname = part['seriesname'].encode('UTF-8').lower()
|
||||
name = name.encode('UTF-8').lower()
|
||||
|
||||
if (seriesname == name) or (indexer_id is not None and part['id'] == indexer_id):
|
||||
return [sickbeard.indexerApi(indexer).config['id'], part['id']]
|
||||
if (name in seriesname) or (indexer_id is not None and part['id'] == indexer_id):
|
||||
return [sickbeard.indexerApi(indexer).config['id'], part['id']]
|
||||
|
||||
except KeyError:break
|
||||
except Exception:continue
|
||||
except KeyError:break
|
||||
except Exception:continue
|
||||
|
||||
def sizeof_fmt(num):
|
||||
'''
|
||||
|
|
|
@ -33,9 +33,9 @@ from time import strptime
|
|||
|
||||
|
||||
class NameParser(object):
|
||||
ALL_REGEX = -1
|
||||
NORMAL_REGEX = 0
|
||||
SPORTS_REGEX = 1
|
||||
ALL_REGEX = 0
|
||||
NORMAL_REGEX = 1
|
||||
SPORTS_REGEX = 2
|
||||
|
||||
def __init__(self, file_name=True, regexMode=0):
|
||||
|
||||
|
@ -139,7 +139,7 @@ class NameParser(object):
|
|||
|
||||
try:
|
||||
if 'sports' in cur_regex_name:
|
||||
dtStr = '%s-%s-%s' % (day, month, year)
|
||||
dtStr = '%s-%s-%s' % (year, month, day)
|
||||
result.air_date = result.sports_date = datetime.datetime.strptime(dtStr, "%Y-%b-%d").date()
|
||||
else:
|
||||
dtStr = '%s-%s-%s' % (year, month, day)
|
||||
|
|
|
@ -190,7 +190,7 @@ sports_regexs = [
|
|||
('sports_event',
|
||||
# Show.Name.123.Event.Nov.23rd.2010.Source.Quality.Etc-Group
|
||||
'''
|
||||
^(?P<series_name>.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?)[. _-]+
|
||||
^(?P<series_name>.*?)[. _-]+
|
||||
(?P<parts>\d{1,3}\d{1,3}.*?)[. _-]+
|
||||
(?P<event>.*?)[. _-]+
|
||||
(?P<air_day>\d{1,2}).+
|
||||
|
@ -234,13 +234,4 @@ sports_regexs = [
|
|||
(?P<extra_info>.*?(?<![. _-])(?<!WEB))[. _-]+
|
||||
(?P<release_group>.*?)$
|
||||
'''),
|
||||
|
||||
('sports_bare',
|
||||
# Show.Name.Event.Nov.23rd.2010.Source.Quality.Etc-Group
|
||||
'''
|
||||
^(?P<series_name>.*?)[. _-]+
|
||||
(?P<parts>\d{1,3}\d{1,3}.*?)[. _-]+
|
||||
(?P<extra_info>.*?(?<![. _-])(?<!WEB))[. _-]+
|
||||
(?P<release_group>.*?)$
|
||||
'''),
|
||||
]
|
|
@ -189,7 +189,7 @@ def _generate_sample_ep(multi=None, abd=False, sports=False):
|
|||
ep._release_name = 'Show.Name.2011.03.09.HDTV.XviD-RLSGROUP'
|
||||
ep.show.air_by_date = 1
|
||||
elif sports:
|
||||
ep._release_name = 'Show.Name.2011.Mar.9th.HDTV.XviD-RLSGROUP'
|
||||
ep._release_name = 'Show.Name.2011.Mar.09.HDTV.XviD-RLSGROUP'
|
||||
ep.show.sports = 1
|
||||
else:
|
||||
ep._release_name = 'Show.Name.S02E03.HDTV.XviD-RLSGROUP'
|
||||
|
|
|
@ -39,7 +39,7 @@ from sickbeard import notifiers
|
|||
from sickbeard import show_name_helpers
|
||||
from sickbeard import scene_exceptions
|
||||
from sickbeard import failed_history
|
||||
from sickbeard import scene_numbering
|
||||
from sickbeard import name_cache
|
||||
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.exceptions import ex
|
||||
|
@ -507,6 +507,15 @@ class PostProcessor(object):
|
|||
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
|
||||
logger.log("Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
|
||||
|
||||
# for each possible interpretation of that scene name
|
||||
for cur_name in name_list:
|
||||
self._log(u"Checking cache for " + cur_name, logger.DEBUG)
|
||||
cache_id = name_cache.retrieveNameFromCache(parse_result.series_name)
|
||||
if cache_id:
|
||||
self._log(u"Cache lookup got a Indexer ID " + str(cache_id) + ", using that", logger.DEBUG)
|
||||
_finalize(parse_result)
|
||||
return (cache_id, season, episodes)
|
||||
|
||||
# for each possible interpretation of that scene name
|
||||
for cur_name in name_list:
|
||||
self._log(u"Checking scene exceptions for a match on " + cur_name, logger.DEBUG)
|
||||
|
@ -529,7 +538,7 @@ class PostProcessor(object):
|
|||
|
||||
# see if we can find the name on the Indexer
|
||||
for cur_name in name_list:
|
||||
foundInfo = helpers.searchIndexerForShowID(cur_name, self.indexer)
|
||||
foundInfo = helpers.searchIndexerForShowID(cur_name)
|
||||
if foundInfo:
|
||||
indexer_id = foundInfo[1]
|
||||
self._log(
|
||||
|
@ -588,43 +597,55 @@ class PostProcessor(object):
|
|||
|
||||
# for air-by-date shows we need to look up the season/episode from tvdb
|
||||
if season == -1 and indexer_id and episodes:
|
||||
self._log(u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
self._log(
|
||||
u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
|
||||
logger.DEBUG)
|
||||
|
||||
# try to get language set for this show
|
||||
indexer_lang = None
|
||||
try:
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||
if (showObj != None):
|
||||
# set the language of the show
|
||||
if showObj:
|
||||
indexer_lang = showObj.lang
|
||||
except exceptions.MultipleShowObjectsException:
|
||||
raise #TODO: later I'll just log this, for now I want to know about it ASAP
|
||||
|
||||
try:
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
if indexer_lang and not indexer_lang == 'en':
|
||||
lINDEXER_API_PARMS = {'language': indexer_lang}
|
||||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
epObj = t[indexer_id].airedOn(episodes[0])[0]
|
||||
|
||||
season = int(epObj["seasonnumber"])
|
||||
episodes = [int(epObj["episodenumber"])]
|
||||
|
||||
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
|
||||
except (KeyError, sickbeard.indexer_episodenotfound), e:
|
||||
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
|
||||
indexer_id) + u", skipping", logger.DEBUG)
|
||||
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
||||
episodes = []
|
||||
continue
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
|
||||
logger.WARNING)
|
||||
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
self.indexer = int(indexer)
|
||||
self._log(
|
||||
u"Searching " + sickbeard.indexerApi(self.indexer).name + ", trying to auto-detect Indexer for "
|
||||
"show")
|
||||
try:
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
if indexer_lang and not indexer_lang == 'en':
|
||||
lINDEXER_API_PARMS = {'language': indexer_lang}
|
||||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
epObj = t[indexer_id].airedOn(episodes[0])[0]
|
||||
|
||||
season = int(epObj["seasonnumber"])
|
||||
episodes = [int(epObj["episodenumber"])]
|
||||
|
||||
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
|
||||
except (KeyError, sickbeard.indexer_episodenotfound), e:
|
||||
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
|
||||
indexer_id) + u", skipping", logger.DEBUG)
|
||||
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
|
||||
continue
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to contact " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
|
||||
logger.WARNING)
|
||||
continue
|
||||
|
||||
# try to find the file info
|
||||
if indexer_id and season and episodes:
|
||||
break
|
||||
|
||||
episodes = []
|
||||
continue
|
||||
self._log(
|
||||
u"Can't find thhe show on " + sickbeard.indexerApi(
|
||||
self.indexer).name + ", trying next ""indexer", logger.WARNING)
|
||||
|
||||
# if there's no season then we can hopefully just use 1 automatically
|
||||
elif season == None and indexer_id:
|
||||
|
@ -638,7 +659,7 @@ class PostProcessor(object):
|
|||
logger.DEBUG)
|
||||
season = 1
|
||||
|
||||
if indexer_id and season != None and episodes:
|
||||
if indexer_id and season and episodes:
|
||||
return (indexer_id, season, episodes)
|
||||
|
||||
return (indexer_id, season, episodes)
|
||||
|
@ -830,22 +851,8 @@ class PostProcessor(object):
|
|||
self.in_history = False
|
||||
|
||||
# try to find the file info
|
||||
indexer_id = season = episodes = None
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
self.indexer = int(indexer)
|
||||
|
||||
self._log(u"Searching " + sickbeard.indexerApi(self.indexer).name + ", trying to auto-detect Indexer for "
|
||||
"show")
|
||||
|
||||
# try to find the file info
|
||||
(indexer_id, season, episodes) = self._find_info()
|
||||
if indexer_id and season != None and episodes:
|
||||
break
|
||||
|
||||
self._log(u"Can't find thhe show on " + sickbeard.indexerApi(self.indexer).name + ", trying next "
|
||||
"indexer", logger.WARNING)
|
||||
|
||||
if not indexer_id or season == None or not episodes:
|
||||
(indexer_id, season, episodes) = self._find_info()
|
||||
if not (indexer_id or season or episodes):
|
||||
self._log(u"Can't find thhe show on any of the Indexers, skipping",
|
||||
logger.WARNING)
|
||||
return False
|
||||
|
|
|
@ -117,7 +117,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
# parse the file name
|
||||
try:
|
||||
myParser = NameParser()
|
||||
parse_result = myParser.parse(title, True)
|
||||
parse_result = myParser.parse(title)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.WARNING)
|
||||
continue
|
||||
|
|
|
@ -148,7 +148,7 @@ class KATProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
myParser = NameParser()
|
||||
parse_result = myParser.parse(fileName, True)
|
||||
parse_result = myParser.parse(fileName)
|
||||
except InvalidNameException:
|
||||
return None
|
||||
|
||||
|
|
|
@ -157,7 +157,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
|
||||
try:
|
||||
myParser = NameParser()
|
||||
parse_result = myParser.parse(fileName, True)
|
||||
parse_result = myParser.parse(fileName)
|
||||
except InvalidNameException:
|
||||
return None
|
||||
|
||||
|
|
|
@ -1892,7 +1892,7 @@ class TVEpisode(object):
|
|||
else:
|
||||
show_name = self.show.name
|
||||
|
||||
return {
|
||||
normal = {
|
||||
'%SN': show_name,
|
||||
'%S.N': dot(show_name),
|
||||
'%S_N': us(show_name),
|
||||
|
@ -1912,10 +1912,10 @@ class TVEpisode(object):
|
|||
'%0XME': '%02d' % self.scene_episode,
|
||||
'%RN': release_name(self.release_name),
|
||||
'%RG': release_group(self.release_name),
|
||||
'%AD': self.airdate.strftime('%Y-%b-%d') if self.show.sports else str(self.airdate).replace('-', ' '),
|
||||
'%A.D': self.airdate.strftime('%Y-%b-%d') if self.show.sports else str(self.airdate).replace('-', '.'),
|
||||
'%A_D': us(self.airdate.strftime('%Y-%b-%d')) if self.show.sports else us(str(self.airdate)),
|
||||
'%A-D': self.airdate.strftime('%Y-%b-%d') if self.show.sports else str(self.airdate),
|
||||
'%AD': self.airdate.strftime('%d %b %Y') if self.show.sports else str(self.airdate).replace('-', ' '),
|
||||
'%A.D': self.airdate.strftime('%d.%b.%Y') if self.show.sports else str(self.airdate).replace('-', '.'),
|
||||
'%A_D': us(self.airdate.strftime('%d-%b-%Y')) if self.show.sports else us(str(self.airdate)),
|
||||
'%A-D': self.airdate.strftime('%d-%b-%Y') if self.show.sports else str(self.airdate),
|
||||
'%Y': str(self.airdate.year),
|
||||
'%M': self.airdate.strftime('%b') if self.show.sports else str(self.airdate.month),
|
||||
'%D': str(self.airdate.day),
|
||||
|
@ -1924,6 +1924,43 @@ class TVEpisode(object):
|
|||
'%RT': "PROPER" if self.is_proper else "",
|
||||
}
|
||||
|
||||
sports = {
|
||||
'%SN': show_name,
|
||||
'%S.N': dot(show_name),
|
||||
'%S_N': us(show_name),
|
||||
'%EN': ep_name,
|
||||
'%E.N': dot(ep_name),
|
||||
'%E_N': us(ep_name),
|
||||
'%QN': Quality.qualityStrings[epQual],
|
||||
'%Q.N': dot(Quality.qualityStrings[epQual]),
|
||||
'%Q_N': us(Quality.qualityStrings[epQual]),
|
||||
'%S': str(self.season),
|
||||
'%0S': '%02d' % self.season,
|
||||
'%E': str(self.episode),
|
||||
'%0E': '%02d' % self.episode,
|
||||
'%XMS': str(self.scene_season),
|
||||
'%0XMS': '%02d' % self.scene_season,
|
||||
'%XME': str(self.scene_episode),
|
||||
'%0XME': '%02d' % self.scene_episode,
|
||||
'%RN': release_name(self.release_name),
|
||||
'%RG': release_group(self.release_name),
|
||||
'%AD': self.airdate.strftime('%d %b %Y'),
|
||||
'%A.D': self.airdate.strftime('%d.%b.%Y'),
|
||||
'%A_D': us(self.airdate.strftime('%d-%b-%Y')),
|
||||
'%A-D': self.airdate.strftime('%d-%b-%Y'),
|
||||
'%Y': str(self.airdate.year),
|
||||
'%M': self.airdate.strftime('%b'),
|
||||
'%D': str(self.airdate.day),
|
||||
'%0M': '%02d' % self.airdate.month,
|
||||
'%0D': '%02d' % self.airdate.day,
|
||||
'%RT': "PROPER" if self.is_proper else "",
|
||||
}
|
||||
|
||||
if self.show.sports:
|
||||
return sports
|
||||
|
||||
return normal
|
||||
|
||||
def _format_string(self, pattern, replace_map):
|
||||
"""
|
||||
Replaces all template strings with the correct value
|
||||
|
|
|
@ -192,7 +192,7 @@ class TVCache():
|
|||
# if we don't have complete info then parse the filename to get it
|
||||
for curName in [name] + extraNames:
|
||||
try:
|
||||
myParser = NameParser(regexMode=-1)
|
||||
myParser = NameParser()
|
||||
parse_result = myParser.parse(curName)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse the filename " + curName + " into a valid episode", logger.DEBUG)
|
||||
|
@ -270,6 +270,17 @@ class TVCache():
|
|||
indexer_lang = curShow.lang
|
||||
break
|
||||
|
||||
# if the database failed, try looking up the show name from scene exceptions list
|
||||
if not indexer_id:
|
||||
logger.log(
|
||||
u"Checking Indexers for Indexer ID of " + parse_result.series_name,
|
||||
logger.DEBUG)
|
||||
indexerResult = helpers.searchIndexerForShowID(parse_result.series_name)
|
||||
if indexerResult:
|
||||
logger.log(
|
||||
u"" + str(parse_result.series_name) + " was found on " + str(sickbeard.indexerApi(indexerResult[0]).name) + " with Indexer ID: " + str(indexerResult[1]), logger.DEBUG)
|
||||
indexer_id = indexerResult[1]
|
||||
|
||||
# if indexer_id was anything but None (0 or a number) then
|
||||
if not from_cache:
|
||||
name_cache.addNameToCache(parse_result.series_name, indexer_id)
|
||||
|
@ -410,8 +421,6 @@ class TVCache():
|
|||
else:
|
||||
if episode:
|
||||
epObj = episode
|
||||
else:
|
||||
epObj = showObj.getEpisode(curSeason, curEp)
|
||||
|
||||
# build a result object
|
||||
title = curResult["name"]
|
||||
|
|
|
@ -59,7 +59,7 @@ class XEMBasicTests(test.SickbeardTestDBCase):
|
|||
pattern = u'%SN - %A-D - %EN'
|
||||
title = 'UFC.166.Velasquez.v.Dos Santos.III.19th.Oct.2013.HDTV.x264-Sir.Paul'
|
||||
try:
|
||||
myParser = NameParser(False, -1)
|
||||
myParser = NameParser(False, 1)
|
||||
parse_result = myParser.parse(title)
|
||||
except InvalidNameException:
|
||||
print(u"Unable to parse the filename " + ep.name + " into a valid episode")
|
||||
|
|
Loading…
Reference in a new issue