mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Merge pull request #656 from JackDandy/feature/FixNameParser
Fix rare NameParser case where numeric episode name was parsed as epi…
This commit is contained in:
commit
a837b8dca3
8 changed files with 75 additions and 37 deletions
|
@ -38,6 +38,7 @@
|
|||
* Add Emby notifier to config/Notifications
|
||||
* Fix alternative unicode show names from breaking search
|
||||
* Change show update, set shows with newly added airdate or existing episodes with future or never dates, to "Wanted"
|
||||
* Fix rare NameParser case where numeric episode name was parsed as episode number
|
||||
|
||||
|
||||
### 0.11.6 (2016-02-18 23:10:00 UTC)
|
||||
|
|
|
@ -32,16 +32,13 @@
|
|||
#set $indexer = $curDir['existing_info'][2]
|
||||
#end if
|
||||
|
||||
#set $indexer = $sickbeard.INDEXER_DEFAULT
|
||||
|
||||
#*
|
||||
#set $indexer = 0
|
||||
#if $curDir['existing_info'][0]
|
||||
#if $curDir['existing_info'][0] and $sickbeard.indexerApi($curDir['existing_info'][2]).config.get('active')
|
||||
#set $indexer = $curDir['existing_info'][2]
|
||||
#elif 0 < $sickbeard.INDEXER_DEFAULT
|
||||
#set $indexer = $sickbeard.INDEXER_DEFAULT
|
||||
#end if
|
||||
*#
|
||||
|
||||
<tr>
|
||||
<td class="col-checkbox">
|
||||
<input type="checkbox" id="$show_id" class="dirCheck" checked=checked>
|
||||
|
@ -49,8 +46,10 @@
|
|||
<td>
|
||||
<label for="$show_id">
|
||||
#set $display_name = (re.sub('^((?:A(?!\s+to)n?)|The)\s(\w)', r'<span class="article">\1</span> \2', $curDir['name']), $curDir['name'])[$sickbeard.SORT_ARTICLE]
|
||||
<span class="filepath#echo ('', ' red-text')[$curDir['highlight']]#">$curDir['path']</span>$display_name
|
||||
#echo ('', '<br />^ <span class="red-text">... (cannot add, this location is in use)</span>')[$curDir['highlight']]#
|
||||
<span class="filepath#if $curDir['highlight']# red-text#end if#">$curDir['path']</span>$display_name
|
||||
#if $curDir['highlight']
|
||||
<br />^ <span class="red-text">... (cannot add, this location is in use)</span>
|
||||
#end if
|
||||
</label>
|
||||
</td>
|
||||
#if $curDir['existing_info'][1] and $indexer > 0
|
||||
|
@ -63,9 +62,9 @@
|
|||
<td align="center">
|
||||
<select name="indexer">
|
||||
#for $curIndexer in $sickbeard.indexerApi().indexers.items()
|
||||
#if $curIndexer[0] == $sickbeard.INDEXER_DEFAULT
|
||||
#if $sickbeard.indexerApi($curIndexer[0]).config.get('active')
|
||||
<option value="$curIndexer[0]" #if $curIndexer[0] == $indexer then 'selected="selected"' else ''#>$curIndexer[1]</option>
|
||||
#end if
|
||||
#end if
|
||||
#end for
|
||||
</select>
|
||||
</td>
|
||||
|
|
|
@ -210,17 +210,21 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
|||
t = sickbeard.indexerApi(i).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
for name in showNames:
|
||||
logger.log(u'Trying to find ' + name + ' on ' + sickbeard.indexerApi(i).name, logger.DEBUG)
|
||||
logger.log('Trying to find %s on %s' % (name, sickbeard.indexerApi(i).name), logger.DEBUG)
|
||||
|
||||
try:
|
||||
result = t[indexer_id] if indexer_id else t[name]
|
||||
except:
|
||||
continue
|
||||
|
||||
seriesname = series_id = False
|
||||
for search in result:
|
||||
seriesname = search['seriesname']
|
||||
series_id = search['id']
|
||||
seriesname = series_id = None
|
||||
for search in result if isinstance(result, list) else [result]:
|
||||
try:
|
||||
seriesname = search['seriesname']
|
||||
series_id = search['id']
|
||||
except:
|
||||
series_id = seriesname = None
|
||||
continue
|
||||
if seriesname and series_id:
|
||||
break
|
||||
|
||||
|
@ -869,7 +873,7 @@ def full_sanitizeSceneName(name):
|
|||
return re.sub('[. -]', ' ', sanitizeSceneName(name)).lower().lstrip()
|
||||
|
||||
|
||||
def get_show(name, try_scene_exceptions=False):
|
||||
def get_show(name, try_scene_exceptions=False, use_cache=True):
|
||||
if not sickbeard.showList or None is name:
|
||||
return
|
||||
|
||||
|
@ -888,7 +892,7 @@ def get_show(name, try_scene_exceptions=False):
|
|||
show_obj = findCertainShow(sickbeard.showList, indexer_id)
|
||||
|
||||
# add show to cache
|
||||
if show_obj and not from_cache:
|
||||
if use_cache and show_obj and not from_cache:
|
||||
sickbeard.name_cache.addNameToCache(name, show_obj.indexerid)
|
||||
except Exception as e:
|
||||
logger.log(u'Error when attempting to find show: ' + name + ' in SickGear: ' + str(e), logger.DEBUG)
|
||||
|
|
|
@ -20,7 +20,13 @@ from __future__ import with_statement
|
|||
|
||||
import os.path
|
||||
|
||||
import xml.etree.cElementTree as etree
|
||||
try:
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
import re
|
||||
|
||||
|
@ -910,6 +916,8 @@ class GenericMetadata():
|
|||
Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
|
||||
"""
|
||||
|
||||
from sickbeard.indexers.indexer_config import INDEXER_TVDB
|
||||
|
||||
empty_return = (None, None, None)
|
||||
|
||||
metadata_path = ek.ek(os.path.join, folder, self._show_metadata_filename)
|
||||
|
@ -944,8 +952,13 @@ class GenericMetadata():
|
|||
|
||||
if showXML.findtext('tvdbid') != None:
|
||||
indexer_id = int(showXML.findtext('tvdbid'))
|
||||
indexer = INDEXER_TVDB
|
||||
elif showXML.findtext('id') != None:
|
||||
indexer_id = int(showXML.findtext('id'))
|
||||
try:
|
||||
indexer = INDEXER_TVDB if [s for s in showXML.findall('.//*') if s.text and s.text.find('thetvdb.com') != -1] else indexer
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
logger.log(u"Empty <id> or <tvdbid> field in NFO, unable to find a ID", logger.WARNING)
|
||||
return empty_return
|
||||
|
|
|
@ -135,24 +135,40 @@ class NameParser(object):
|
|||
result.season_number = tmp_season
|
||||
result.score += 1
|
||||
|
||||
if 'ep_num' in named_groups:
|
||||
ep_num = self._convert_number(match.group('ep_num'))
|
||||
if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
|
||||
result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
|
||||
result.score += 1
|
||||
def _process_epnum(captures, capture_names, grp_name, extra_grp_name, ep_numbers, parse_result):
|
||||
ep_num = self._convert_number(captures.group(grp_name))
|
||||
extra_grp_name = 'extra_%s' % extra_grp_name
|
||||
ep_numbers = '%sepisode_numbers' % ep_numbers
|
||||
if extra_grp_name in capture_names and captures.group(extra_grp_name):
|
||||
try:
|
||||
if hasattr(self.showObj, 'getEpisode'):
|
||||
ep = self.showObj.getEpisode(parse_result.season_number, ep_num)
|
||||
else:
|
||||
tmp_show = helpers.get_show(parse_result.series_name, True, False)
|
||||
if tmp_show and hasattr(tmp_show, 'getEpisode'):
|
||||
ep = tmp_show.getEpisode(parse_result.season_number, ep_num)
|
||||
else:
|
||||
ep = None
|
||||
except:
|
||||
ep = None
|
||||
en = ep and ep.name and re.match(r'^\W*(\d+)', ep.name) or None
|
||||
es = en and en.group(1) or None
|
||||
|
||||
extra_ep_num = self._convert_number(captures.group(extra_grp_name))
|
||||
parse_result.__dict__[ep_numbers] = range(ep_num, extra_ep_num + 1) if not (
|
||||
ep and es and es != captures.group(extra_grp_name)) and (
|
||||
0 < extra_ep_num - ep_num < 10) else [ep_num]
|
||||
parse_result.score += 1
|
||||
else:
|
||||
result.episode_numbers = [ep_num]
|
||||
result.score += 1
|
||||
parse_result.__dict__[ep_numbers] = [ep_num]
|
||||
parse_result.score += 1
|
||||
return parse_result
|
||||
|
||||
if 'ep_num' in named_groups:
|
||||
result = _process_epnum(match, named_groups, 'ep_num', 'ep_num', '', result)
|
||||
|
||||
if 'ep_ab_num' in named_groups:
|
||||
ep_ab_num = self._convert_number(match.group('ep_ab_num'))
|
||||
if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'):
|
||||
result.ab_episode_numbers = range(ep_ab_num,
|
||||
self._convert_number(match.group('extra_ab_ep_num')) + 1)
|
||||
result.score += 1
|
||||
else:
|
||||
result.ab_episode_numbers = [ep_ab_num]
|
||||
result.score += 1
|
||||
result = _process_epnum(match, named_groups, 'ep_ab_num', 'ab_ep_num', 'ab_', result)
|
||||
|
||||
if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
|
||||
year = int(match.group('air_year'))
|
||||
|
|
|
@ -227,7 +227,7 @@ def _download_propers(proper_list):
|
|||
continue
|
||||
|
||||
# make sure that none of the existing history downloads are the same proper we're trying to download
|
||||
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime()))
|
||||
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime))
|
||||
is_same = False
|
||||
for result in history_results:
|
||||
# if the result exists in history already we need to skip it
|
||||
|
|
|
@ -2358,15 +2358,18 @@ class NewHomeAddShows(Home):
|
|||
(indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path)
|
||||
|
||||
# default to TVDB if indexer was not detected
|
||||
if show_name and not (indexer or indexer_id):
|
||||
if show_name and (not indexer or not indexer_id):
|
||||
(sn, idx, id) = helpers.searchIndexerForShowID(show_name, indexer, indexer_id)
|
||||
|
||||
# set indexer and indexer_id from found info
|
||||
if not indexer and idx:
|
||||
if idx and id:
|
||||
indexer = idx
|
||||
|
||||
if not indexer_id and id:
|
||||
indexer_id = id
|
||||
show_name = sn
|
||||
|
||||
# in case we don't have both indexer + indexer_id, set both to None
|
||||
if not indexer or not indexer_id:
|
||||
indexer = indexer_id = None
|
||||
|
||||
cur_dir['existing_info'] = (indexer_id, show_name, indexer)
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ simple_test_cases = {
|
|||
'Show-Name-S06E01-1080i': parser.ParseResult(None, 'Show-Name', 6, [1], '1080i'),
|
||||
'Show.Name.S06E01.Other.WEB-DL': parser.ParseResult(None, 'Show Name', 6, [1], 'Other.WEB-DL'),
|
||||
'Show.Name.S06E01 Some-Stuff Here': parser.ParseResult(None, 'Show Name', 6, [1], 'Some-Stuff Here'),
|
||||
'Show.Name.S01E15-11001001': parser.ParseResult(None, 'Show Name', 1, [15], None),
|
||||
},
|
||||
|
||||
'fov': {
|
||||
|
@ -138,6 +139,7 @@ simple_test_cases = {
|
|||
'anime_standard': {
|
||||
'[Cthuko] Shirobako - 05v2 [720p H264 AAC][80C9B09B]': parser.ParseResult(None, 'Shirobako', None, [], '720p H264 AAC', 'Cthuko', None, [5]),
|
||||
'[Ayako]_Minami-ke_Okaeri_-_01v2_[1024x576 H264+AAC][B1912CD8]': parser.ParseResult(None, 'Minami-ke Okaeri', None, [], '1024x576 H264+AAC', 'Ayako', None, [1]),
|
||||
'Show.Name.123-11001001': parser.ParseResult(None, 'Show Name', None, [], None, None, None, [123]),
|
||||
},
|
||||
|
||||
'anime_ep_name': {
|
||||
|
|
Loading…
Reference in a new issue