mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change reduce number of DB calls for extra_info_no_name.
Add parse repack, proper level to recent search flow.
This commit is contained in:
parent
1a37bb1001
commit
406c9ad6c6
8 changed files with 63 additions and 49 deletions
|
@ -150,6 +150,8 @@
|
|||
* Change prevent setting show/episode attr to None from indexer data
|
||||
* Fix article link color on some page were changed blue
|
||||
* Fix error after SG is updated and where Slack notifier is not enabled
|
||||
* Change reduce number of DB calls for extra_info_no_name
|
||||
* Add parse repack, proper level to recent search flow
|
||||
|
||||
|
||||
### 0.12.35 (2017-10-27 20:30:00 UTC)
|
||||
|
|
|
@ -155,7 +155,7 @@ def history_snatched_proper_fix():
|
|||
pr = np.parse(r['resource'])
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
if 0 < Quality.get_proper_level(pr.extra_info_no_name, pr.version, pr.is_anime):
|
||||
if 0 < Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime):
|
||||
cl.append(['UPDATE history SET action = ? WHERE rowid = ?',
|
||||
[Quality.compositeStatus(SNATCHED_PROPER, int(r['quality'])),
|
||||
r['rowid']]])
|
||||
|
|
|
@ -472,32 +472,6 @@ class NameParser(object):
|
|||
|
||||
return number
|
||||
|
||||
@staticmethod
|
||||
def _replace_ep_name_helper(e_i_n_n, n):
|
||||
ep_regex = r'\W*%s\W*' % re.sub(r' ', r'\W', re.sub(r'[^a-zA-Z0-9 ]', r'\W?',
|
||||
re.sub(r'\W+$', '', n.strip())))
|
||||
if None is regex:
|
||||
return re.sub(ep_regex, '', e_i_n_n, flags=re.I)
|
||||
|
||||
return regex.sub(r'(%s){e<=%d}' % (
|
||||
ep_regex, trunc(len(re.findall(r'\w', ep_regex)) / 5)), '', e_i_n_n, flags=regex.I | regex.B)
|
||||
|
||||
def _extra_info_no_name(self, extra_info, show, season, episodes):
|
||||
extra_info_no_name = extra_info
|
||||
if isinstance(extra_info_no_name, basestring) and show and hasattr(show, 'indexer'):
|
||||
for e in episodes:
|
||||
if not hasattr(show, 'getEpisode'):
|
||||
continue
|
||||
ep = show.getEpisode(season, e)
|
||||
if ep and isinstance(getattr(ep, 'name', None), basestring) and ep.name.strip():
|
||||
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, ep.name)
|
||||
if hasattr(show, 'getAllEpisodes'):
|
||||
for e in [ep.name for ep in show.getAllEpisodes(check_related_eps=False) if getattr(ep, 'name', None)
|
||||
and re.search(r'real|proper|repack', ep.name, re.I)]:
|
||||
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, e)
|
||||
|
||||
return extra_info_no_name
|
||||
|
||||
def parse(self, name, cache_result=True):
|
||||
name = self._unicodify(name)
|
||||
|
||||
|
@ -558,10 +532,6 @@ class NameParser(object):
|
|||
final_result.show = self._combine_results(file_name_result, dir_name_result, 'show')
|
||||
final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality')
|
||||
|
||||
final_result.extra_info_no_name = self._extra_info_no_name(final_result.extra_info, final_result.show,
|
||||
final_result.season_number,
|
||||
final_result.episode_numbers)
|
||||
|
||||
if not final_result.show:
|
||||
if self.testing:
|
||||
pass
|
||||
|
@ -594,8 +564,7 @@ class ParseResult(object):
|
|||
show=None,
|
||||
score=None,
|
||||
quality=None,
|
||||
version=None,
|
||||
extra_info_no_name=None):
|
||||
version=None):
|
||||
|
||||
self.original_name = original_name
|
||||
|
||||
|
@ -617,7 +586,7 @@ class ParseResult(object):
|
|||
self.quality = quality
|
||||
|
||||
self.extra_info = extra_info
|
||||
self.extra_info_no_name = extra_info_no_name
|
||||
self._extra_info_no_name = None
|
||||
self.release_group = release_group
|
||||
|
||||
self.air_date = air_date
|
||||
|
@ -677,6 +646,37 @@ class ParseResult(object):
|
|||
|
||||
return to_return.encode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def _replace_ep_name_helper(e_i_n_n, n):
|
||||
ep_regex = r'\W*%s\W*' % re.sub(r' ', r'\W', re.sub(r'[^a-zA-Z0-9 ]', r'\W?',
|
||||
re.sub(r'\W+$', '', n.strip())))
|
||||
if None is regex:
|
||||
return re.sub(ep_regex, '', e_i_n_n, flags=re.I)
|
||||
|
||||
return regex.sub(r'(%s){e<=%d}' % (
|
||||
ep_regex, trunc(len(re.findall(r'\w', ep_regex)) / 5)), '', e_i_n_n, flags=regex.I | regex.B)
|
||||
|
||||
def get_extra_info_no_name(self):
|
||||
extra_info_no_name = self.extra_info
|
||||
if isinstance(extra_info_no_name, basestring) and self.show and hasattr(self.show, 'indexer'):
|
||||
for e in self.episode_numbers:
|
||||
if not hasattr(self.show, 'getEpisode'):
|
||||
continue
|
||||
ep = self.show.getEpisode(self.season_number, e)
|
||||
if ep and isinstance(getattr(ep, 'name', None), basestring) and ep.name.strip():
|
||||
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, ep.name)
|
||||
if hasattr(self.show, 'getAllEpisodes'):
|
||||
for e in [ep.name for ep in self.show.getAllEpisodes(check_related_eps=False) if getattr(ep, 'name', None)
|
||||
and re.search(r'real|proper|repack', ep.name, re.I)]:
|
||||
extra_info_no_name = self._replace_ep_name_helper(extra_info_no_name, e)
|
||||
|
||||
return extra_info_no_name
|
||||
|
||||
def extra_info_no_name(self):
|
||||
if None is self._extra_info_no_name and None is not self.extra_info:
|
||||
self._extra_info_no_name = self.get_extra_info_no_name()
|
||||
return self._extra_info_no_name
|
||||
|
||||
@property
|
||||
def is_air_by_date(self):
|
||||
if self.air_date:
|
||||
|
|
|
@ -504,8 +504,8 @@ class PostProcessor(object):
|
|||
self.release_group = parse_result.release_group
|
||||
|
||||
# remember whether it's a proper
|
||||
if parse_result.extra_info_no_name:
|
||||
self.is_proper = 0 < common.Quality.get_proper_level(parse_result.extra_info_no_name, parse_result.version,
|
||||
if parse_result.extra_info_no_name():
|
||||
self.is_proper = 0 < common.Quality.get_proper_level(parse_result.extra_info_no_name(), parse_result.version,
|
||||
parse_result.is_anime)
|
||||
|
||||
# if the result is complete then set release name
|
||||
|
@ -780,7 +780,7 @@ class PostProcessor(object):
|
|||
cur_proper_level = 0
|
||||
try:
|
||||
pr = np.parse(ep_obj.release_name)
|
||||
cur_proper_level = common.Quality.get_proper_level(pr.extra_info_no_name, pr.version, pr.is_anime)
|
||||
cur_proper_level = common.Quality.get_proper_level(pr.extra_info_no_name(), pr.version, pr.is_anime)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
new_name = (('', self.file_name)[isinstance(self.file_name, basestring)], self.nzb_name)[isinstance(
|
||||
|
@ -791,7 +791,7 @@ class PostProcessor(object):
|
|||
except (StandardError, Exception):
|
||||
npr = None
|
||||
if npr:
|
||||
is_repack, new_proper_level = common.Quality.get_proper_level(npr.extra_info_no_name, npr.version,
|
||||
is_repack, new_proper_level = common.Quality.get_proper_level(npr.extra_info_no_name(), npr.version,
|
||||
npr.is_anime, check_is_repack=True)
|
||||
if new_proper_level > cur_proper_level and \
|
||||
(not is_repack or npr.release_group == ep_obj.release_group):
|
||||
|
|
|
@ -97,9 +97,9 @@ def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_stat
|
|||
p = np.parse(result[0]['resource'])
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
level = Quality.get_proper_level(p.extra_info_no_name, p.version, showObj.is_anime)
|
||||
is_internal = p.extra_info_no_name and re.search(r'\binternal\b', p.extra_info_no_name, flags=re.I)
|
||||
codec = _get_codec(p.extra_info_no_name)
|
||||
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, showObj.is_anime)
|
||||
is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I)
|
||||
codec = _get_codec(p.extra_info_no_name())
|
||||
break
|
||||
return level, is_internal, codec
|
||||
|
||||
|
@ -158,13 +158,13 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
||||
x.show = parse_result.show.indexerid
|
||||
x.provider = cur_provider
|
||||
x.is_repack, x.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name,
|
||||
x.is_repack, x.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name(),
|
||||
parse_result.version,
|
||||
parse_result.is_anime,
|
||||
check_is_repack=True)
|
||||
x.is_internal = parse_result.extra_info_no_name and \
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name, flags=re.I)
|
||||
x.codec = _get_codec(parse_result.extra_info_no_name)
|
||||
x.is_internal = parse_result.extra_info_no_name() and \
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
|
||||
x.codec = _get_codec(parse_result.extra_info_no_name())
|
||||
propers[name] = x
|
||||
count += 1
|
||||
except (InvalidNameException, InvalidShowException):
|
||||
|
@ -237,7 +237,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
# don't take proper of the same level we already downloaded
|
||||
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
|
||||
cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(cur_proper.extra_info_no_name,
|
||||
cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(cur_proper.extra_info_no_name(),
|
||||
cur_proper.version,
|
||||
cur_proper.is_anime,
|
||||
check_is_repack=True)
|
||||
|
@ -252,7 +252,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False)
|
||||
try:
|
||||
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name
|
||||
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name()
|
||||
except (StandardError, Exception):
|
||||
extra_info = None
|
||||
|
||||
|
|
|
@ -589,7 +589,7 @@ class GenericProvider:
|
|||
result.content = None
|
||||
result.version = version
|
||||
result.size, result.puid = self.get_size_uid(item, **kwargs)
|
||||
result.is_repack, result.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name,
|
||||
result.is_repack, result.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name(),
|
||||
parse_result.version, show_obj.is_anime,
|
||||
check_is_repack=True)
|
||||
|
||||
|
|
|
@ -342,6 +342,18 @@ class TVCache:
|
|||
result.release_group = curReleaseGroup
|
||||
result.version = curVersion
|
||||
result.content = None
|
||||
np = NameParser(False, showObj=showObj)
|
||||
try:
|
||||
parsed_result = np.parse(title)
|
||||
extra_info_no_name = parsed_result.extra_info_no_name()
|
||||
version = parsed_result.version
|
||||
is_anime = parsed_result.is_anime
|
||||
except (StandardError, Exception):
|
||||
extra_info_no_name = None
|
||||
version = -1
|
||||
is_anime = False
|
||||
result.is_repack, result.properlevel = Quality.get_proper_level(extra_info_no_name, version, is_anime,
|
||||
check_is_repack=True)
|
||||
|
||||
# add it to the list
|
||||
if epObj not in neededEps:
|
||||
|
|
|
@ -19,7 +19,7 @@ class QualityTests(unittest.TestCase):
|
|||
np = NameParser(False, indexer_lookup=False, try_scene_exceptions=False, testing=True)
|
||||
for case, level in cases:
|
||||
p = np.parse(case)
|
||||
second = common.Quality.get_proper_level(p.extra_info_no_name, p.version, is_anime)
|
||||
second = common.Quality.get_proper_level(p.extra_info_no_name(), p.version, is_anime)
|
||||
self.assertEqual(level, second, 'fail %s != %s for case: %s' % (level, second, case))
|
||||
|
||||
# TODO: repack / proper ? air-by-date ? season rip? multi-ep?
|
||||
|
|
Loading…
Reference in a new issue