Merge branch 'feature/ChangeAnyGen' into dev

This commit is contained in:
JackDandy 2023-03-06 23:55:14 +00:00
commit a0d379595c
10 changed files with 24 additions and 22 deletions

View file

@ -4,6 +4,8 @@
* Change remove calls to legacy py2 fix encoding function
* Change requirements for pure py3
* Change codebase cleanups
* Change improve perf by using generators with `any`
[develop changelog]

View file

@ -105,7 +105,7 @@
#except
#pass
#end try
#if not any([x in $body_attr for x in ['back-art', 'pro', 'ii']])
#if not any(x in $body_attr for x in ['back-art', 'pro', 'ii'])
#set $parts = $body_attr.split('class="')
#set $body_attr = ('class="%s '.join($parts), $parts[0] + ' class="%s"')[1 == len($parts)] % {0: '', 1: 'pro', 2: 'pro ii'}.get(getattr($sickgear, 'DISPLAY_SHOW_VIEWMODE', 0))
#end if

View file

@ -381,7 +381,7 @@ class Plex(object):
section_path = re.sub(r'[/\\]+', '/', section.find('Location').get('path').lower())
section_path = re.sub(r'^(.{,2})[/\\]', '', section_path)
if not any([section_path in path for path in self.section_filter_path]):
if not any(section_path in path for path in self.section_filter_path):
continue
if section.get('key') not in self.ignore_sections \

View file

@ -195,7 +195,7 @@ class ShowInfoFilter(object):
return isinstance(show_info, dict) \
and 'seriesname' in show_info \
and isinstance(show_info['seriesname'], string_types) \
and any([x.search(show_info['seriesname']) for x in self.bad_names])
and any(x.search(show_info['seriesname']) for x in self.bad_names)
@staticmethod
def _fix_firstaired(show_info):

View file

@ -745,9 +745,9 @@ class NeededQualities(object):
else:
if not self.need_sd and min(wanted_qualities) <= NeededQualities.max_sd:
self.need_sd = True
if not self.need_hd and any([i in NeededQualities.hd_qualities for i in wanted_qualities]):
if not self.need_hd and any(i in NeededQualities.hd_qualities for i in wanted_qualities):
self.need_hd = True
if not self.need_webdl and any([i in NeededQualities.webdl_qualities for i in wanted_qualities]):
if not self.need_webdl and any(i in NeededQualities.webdl_qualities for i in wanted_qualities):
self.need_webdl = True
if not self.need_uhd and max(wanted_qualities) > NeededQualities.max_hd:
self.need_uhd = True

View file

@ -501,8 +501,8 @@ class GenericProvider(object):
if time_left > datetime.timedelta(seconds=0):
if log_warning:
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.current_thread().name
for x in sickgear.providers.sorted_sources()])]
prepend = ('[%s] :: ' % self.name, '')[any(x.name in threading.current_thread().name
for x in sickgear.providers.sorted_sources())]
logger.log('%sToo many requests reached at %s, waiting for %s' % (
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
return use_tmr_limit
@ -1009,9 +1009,9 @@ class GenericProvider(object):
headers = [re.sub(
r'\s+', '',
((any([cell.get_text()]) and any([rc[x].search(cell.get_text()) for x in iterkeys(rc)]) and cell.get_text())
or (cell.attrs.get('id') and any([rc[x].search(cell['id']) for x in iterkeys(rc)]) and cell['id'])
or (cell.attrs.get('title') and any([rc[x].search(cell['title']) for x in iterkeys(rc)]) and cell['title'])
((any([cell.get_text()]) and any(rc[x].search(cell.get_text()) for x in iterkeys(rc)) and cell.get_text())
or (cell.attrs.get('id') and any(rc[x].search(cell['id']) for x in iterkeys(rc)) and cell['id'])
or (cell.attrs.get('title') and any(rc[x].search(cell['title']) for x in iterkeys(rc)) and cell['title'])
or next(iter(set(filter(lambda rz: any([rz]), [
next(iter(set(filter(lambda ry: any([ry]), [
cell.find(tag, **p) for p in [{attr: rc[x]} for x in iterkeys(rc)]]))), {}).get(attr)
@ -1932,7 +1932,7 @@ class TorrentProvider(GenericProvider):
url_list = list(map(lambda u: '%s/' % u.rstrip('/'), url_list))
last_url, expire = sickgear.PROVIDER_HOMES.get(self.get_id(), ('', None))
url_drop = (url_exclude or []) + getattr(self, 'url_drop', [])
if url_drop and any([url in last_url for url in url_drop]): # deprecate url
if url_drop and any(url in last_url for url in url_drop): # deprecate url
last_url = ''
if 'site down' == last_url:

View file

@ -911,9 +911,9 @@ class NewznabProvider(generic.NZBProvider):
# category ids
cat = []
if 'Episode' == mode or 'Season' == mode:
if not (any([x in params for x in
[v for c, v in iteritems(self.caps)
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]]])):
if not (any(x in params for x in
[v for c, v in iteritems(self.caps)
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]])):
logger.log('Show is missing either an id or search term for search')
continue
@ -938,7 +938,7 @@ class NewznabProvider(generic.NZBProvider):
request_params = base_params.copy()
# if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
if 'Propers' == mode \
and 'q' in params and not (any([x in params for x in ['season', 'ep']])):
and 'q' in params and not (any(x in params for x in ['season', 'ep'])):
request_params['t'] = 'search'
request_params.update(params)
@ -1048,10 +1048,10 @@ class NewznabProvider(generic.NZBProvider):
if exit_log:
self._log_search(mode, len(results), search_url)
if not try_all_searches and any([x in request_params for x in [
if not try_all_searches and any(x in request_params for x in [
v for c, v in iteritems(self.caps)
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON,
NewznabConstants.SEARCH_TEXT]]]) and len(results):
NewznabConstants.SEARCH_TEXT]]) and len(results):
break
return results, n_spaces

View file

@ -583,6 +583,6 @@ def has_abs_episodes(ep_obj=None, name=None):
:return:
:rtype: bool
"""
return any([(name or ep_obj.show_obj.name or '').lower().startswith(x.lower()) for x in [
return any((name or ep_obj.show_obj.name or '').lower().startswith(x.lower()) for x in [
'The Eighties', 'The Making of the Mob', 'The Night Of', 'Roots 2016', 'Trepalium'
]])
])

View file

@ -3042,7 +3042,7 @@ class TVShow(TVShowBase):
page_url = 'https://www.imdb.com/title/{0}/'.format(imdb_id)
try:
response = requests.head(page_url, allow_redirects=True)
if response.history and any([h for h in response.history if 301 == h.status_code]):
if response.history and any(h for h in response.history if 301 == h.status_code):
return helpers.parse_imdb_id(response.url)
except (BaseException, Exception):
pass

View file

@ -600,7 +600,7 @@ class ApiCall(object):
elif isinstance(value, string_types):
if '|' in value:
li = [int(v) for v in value.split('|')]
if any([not isinstance(v, integer_types) for v in li]):
if any(not isinstance(v, integer_types) for v in li):
error = True
else:
value = li
@ -610,7 +610,7 @@ class ApiCall(object):
error = True
else:
li = value.split('|')
if any([sub_type is not type(v) for v in li]):
if any(sub_type is not type(v) for v in li):
error = True
else:
value = li