diff --git a/gui/slick/interfaces/default/cast_person.tmpl b/gui/slick/interfaces/default/cast_person.tmpl index 3ce066a3..3d9b9568 100644 --- a/gui/slick/interfaces/default/cast_person.tmpl +++ b/gui/slick/interfaces/default/cast_person.tmpl @@ -182,7 +182,11 @@ def param(visible=True, rid=None, cache_person=None, cache_char=None, person=Non #end if #set $section_links = False +#set $all_sources = $TVInfoAPI().all_sources #for $cur_src, $cur_sid in sorted(iteritems($person.ids)) + #if $cur_src not in $all_sources: + #continue + #end if #if $TVInfoAPI($cur_src).config.get('people_url') #if not $section_links #set $section_links = True diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl index 9f604721..e776220f 100644 --- a/gui/slick/interfaces/default/config_general.tmpl +++ b/gui/slick/interfaces/default/config_general.tmpl @@ -13,7 +13,6 @@ #from sickgear.sgdatetime import * <% def sg_var(varname, default=False): return getattr(sickgear, varname, default) %>#slurp# <% def sg_str(varname, default=''): return getattr(sickgear, varname, default) %>#slurp# -#from _23 import list_keys ## #set global $title = 'Config - General' #set global $header = 'General Settings' @@ -846,7 +845,7 @@ File logging level: #for $cur_quality in sorted($any_quality_list): @@ -96,7 +95,7 @@

Upgrade to

-#set $best_quality_list = filter_list(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings) +#set $best_quality_list = list(filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)) #for $curQuality in sorted($anyQualityList): @@ -78,7 +77,7 @@

Upgrade to

-#set $bestQualityList = filter_list(lambda x: x > $Quality.SDTV, $Quality.qualityStrings) +#set $bestQualityList = list(filter(lambda x: x > $Quality.SDTV, $Quality.qualityStrings)) -#set $levels = $list_keys($reverseNames) +#set $levels = $list($reverseNames) #set void = $levels.sort(key=lambda x: $reverseNames[$x]) #set $level_count = len($levels) #for $level in $levels diff --git a/lib/_23.py b/lib/_23.py index ea8835d6..d8f2b0c5 100644 --- a/lib/_23.py +++ b/lib/_23.py @@ -19,8 +19,8 @@ import datetime from collections import deque from itertools import islice from sys import version_info +from base64 import encodebytes as b64encodebytes -from six import binary_type, moves # noinspection PyUnresolvedReferences from six.moves.urllib.parse import quote, quote_plus, unquote as six_unquote, unquote_plus as six_unquote_plus, \ urlencode, urlsplit, urlunparse, urlunsplit @@ -42,15 +42,11 @@ if False: PY38 = version_info[0:2] >= (3, 8) -""" one off consumables (Iterators) """ -filter_iter = moves.filter # type: Callable[[Callable, Iterable], Iterator] -map_iter = moves.map # type: Callable[[Callable, ...], Iterator] - def map_consume(*args): # type: (...) -> None """Run a lambda over elements without returning anything""" - deque(moves.map(*args), maxlen=0) + deque(map(*args), maxlen=0) def consume(iterator, n=None): @@ -76,7 +72,7 @@ def consume(iterator, n=None): def decode_str(s, encoding='utf-8', errors=None): # type: (...) -> AnyStr - if isinstance(s, binary_type): + if isinstance(s, bytes): if None is errors: return s.decode(encoding) return s.decode(encoding, errors) @@ -99,7 +95,7 @@ def html_unescape(s): def list_range(*args, **kwargs): # type: (...) -> List - return list(moves.range(*args, **kwargs)) + return list(range(*args, **kwargs)) def urlparse(url, scheme='', allow_fragments=True): @@ -135,181 +131,45 @@ def b64encodestring(s, keep_eol=False): return data.rstrip() -if 2 != version_info[0]: - # --------- - # Python 3+ - # --------- - # noinspection PyUnresolvedReferences,PyProtectedMember - from base64 import decodebytes, encodebytes - b64decodebytes = decodebytes - b64encodebytes = encodebytes - # noinspection PyUnresolvedReferences,PyCompatibility - from configparser import ConfigParser - # noinspection PyUnresolvedReferences - from enum import Enum - # noinspection PyUnresolvedReferences - from os import scandir, DirEntry - # noinspection PyUnresolvedReferences - from itertools import zip_longest - # noinspection PyUnresolvedReferences - from inspect import getfullargspec as getargspec +# noinspection PyUnresolvedReferences,PyProtectedMember +# noinspection PyUnresolvedReferences,PyCompatibility +from configparser import ConfigParser +# noinspection PyUnresolvedReferences +from enum import Enum +# noinspection PyUnresolvedReferences +from os import scandir, DirEntry +# noinspection PyUnresolvedReferences +from itertools import zip_longest +# noinspection PyUnresolvedReferences +from inspect import getfullargspec as getargspec - # noinspection PyUnresolvedReferences - from subprocess import Popen +# noinspection PyUnresolvedReferences +from subprocess import Popen - # noinspection PyUnresolvedReferences, PyPep8Naming - import xml.etree.ElementTree as etree +# noinspection PyUnresolvedReferences, PyPep8Naming +import xml.etree.ElementTree as etree - ordered_dict = dict +native_timestamp = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float] - native_timestamp = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float] - def unquote(string, encoding='utf-8', errors='replace'): - return decode_str(six_unquote(decode_str(string, encoding, errors), encoding=encoding, errors=errors), - encoding, errors) +def unquote(string, encoding='utf-8', errors='replace'): + return decode_str(six_unquote(decode_str(string, encoding, errors), encoding=encoding, errors=errors), + encoding, errors) - def unquote_plus(string, encoding='utf-8', errors='replace'): - return decode_str(six_unquote_plus(decode_str(string, encoding, errors), encoding=encoding, errors=errors), - encoding, errors) - def decode_bytes(d, encoding='utf-8', errors='replace'): - if not isinstance(d, binary_type): - # noinspection PyArgumentList - return bytes(d, encoding=encoding, errors=errors) - return d +def unquote_plus(string, encoding='utf-8', errors='replace'): + return decode_str(six_unquote_plus(decode_str(string, encoding, errors), encoding=encoding, errors=errors), + encoding, errors) - def filter_list(*args): - # type: (...) -> List - return list(filter(*args)) - def list_items(d): - # type: (Dict) -> List[Tuple[Any, Any]] - """ - equivalent to python 2 .items() - """ - return list(d.items()) +def decode_bytes(d, encoding='utf-8', errors='replace'): + if not isinstance(d, bytes): + # noinspection PyArgumentList + return bytes(d, encoding=encoding, errors=errors) + return d - def list_keys(d): - # type: (Dict) -> List - """ - equivalent to python 2 .keys() - """ - return list(d) - def list_values(d): - # type: (Dict) -> List - """ - equivalent to python 2 .values() - """ - return list(d.values()) +def map_none(*args): + # type: (...) -> List + return list(zip_longest(*args)) - def map_list(*args): - # type: (...) -> List - return list(map(*args)) - - def map_none(*args): - # type: (...) -> List - return list(zip_longest(*args)) - - def unidecode(data): - # type: (AnyStr) -> AnyStr - return data - -else: - # --------- - # Python 2 - # --------- - import time - from lib.unidecode import unidecode as unicode_decode - # noinspection PyProtectedMember,PyDeprecation - from base64 import decodestring, encodestring - # noinspection PyDeprecation - b64decodebytes = decodestring - # noinspection PyDeprecation - b64encodebytes = encodestring - # noinspection PyUnresolvedReferences - from lib.backports.configparser import ConfigParser - # noinspection PyUnresolvedReferences - from lib.enum34 import Enum - # noinspection PyProtectedMember,PyUnresolvedReferences - from lib.scandir.scandir import scandir, GenericDirEntry as DirEntry - # noinspection PyUnresolvedReferences,PyDeprecation - from inspect import getargspec - - try: - # noinspection PyPep8Naming - import xml.etree.cElementTree as etree - except ImportError: - # noinspection PyPep8Naming - import xml.etree.ElementTree as etree - - from collections import OrderedDict - ordered_dict = OrderedDict - - def _totimestamp(dt=None): - # type: (datetime.datetime) -> float - """ This function should only be used in this module due to its 1970s+ limitation as that's all we need here and - sgdatatime can't be used at this module level - """ - return time.mktime(dt.timetuple()) - - native_timestamp = _totimestamp # type: Callable[[datetime.datetime], float] - - from subprocess import Popen as _Popen - - class Popen(_Popen): - - def __enter__(self): - return self - - def __exit__(self, *args, **kwargs): - for x in filter_iter(lambda y: y, [self.stdout, self.stderr, self.stdin]): - x.close() - self.wait() - - def unquote(string, encoding='utf-8', errors='replace'): - return decode_str(six_unquote(decode_str(string, encoding, errors)), encoding, errors) - - def unquote_plus(string, encoding='utf-8', errors='replace'): - return decode_str(six_unquote_plus(decode_str(string, encoding, errors)), encoding, errors) - - # noinspection PyUnusedLocal - def decode_bytes(d, encoding='utf-8', errors='replace'): - if not isinstance(d, binary_type): - return bytes(d) - return d - - def filter_list(*args): - # type: (...) -> List - # noinspection PyTypeChecker - return filter(*args) - - def list_items(d): - # type: (Dict) -> List[Tuple[Any, Any]] - # noinspection PyTypeChecker - return d.items() - - def list_keys(d): - # type: (Dict) -> List - # noinspection PyTypeChecker - return d.keys() - - def list_values(d): - # type: (Dict) -> List - # noinspection PyTypeChecker - return d.values() - - def map_list(*args): - # type: (...) -> List - # noinspection PyTypeChecker - return map(*args) - - def map_none(*args): - # type: (...) -> List - # noinspection PyTypeChecker - return map(None, *args) - - def unidecode(data): - # type: (AnyStr) -> AnyStr - # noinspection PyUnresolvedReferences - return isinstance(data, unicode) and unicode_decode(data) or data diff --git a/lib/api_tmdb/tmdb_api.py b/lib/api_tmdb/tmdb_api.py index c7db1dfc..2f3a8fad 100644 --- a/lib/api_tmdb/tmdb_api.py +++ b/lib/api_tmdb/tmdb_api.py @@ -21,7 +21,6 @@ from lib.tvinfo_base import CastList, PersonGenders, RoleTypes, \ from json_helper import json_dumps from sg_helpers import clean_data, get_url, iterate_chunk, try_int -from _23 import filter_list from six import iteritems # noinspection PyUnreachableCode @@ -682,12 +681,12 @@ class TmdbIndexer(TVInfoBase): season_cast_obj['id'] for season_cast_obj in season_data[season_obj[0]].get('cast') or []]) - for person_obj in sorted(filter_list(lambda a: a['id'] in main_cast_ids, - show_data['aggregate_credits']['cast'] or [])[:50], + for person_obj in sorted(list(filter(lambda a: a['id'] in main_cast_ids, + show_data['aggregate_credits']['cast'] or []))[:50], key=lambda c: (main_cast_ids.get(c['id'], 0) or 0, c['total_episode_count'], c['order'] * -1), reverse=True): - for character in sorted(filter_list(lambda b: b['credit_id'] in main_cast_credit_ids, - person_obj.get('roles', []) or []), + for character in sorted(list(filter(lambda b: b['credit_id'] in main_cast_credit_ids, + person_obj.get('roles', []) or [])), key=lambda c: c['episode_count'], reverse=True): character_obj = TVInfoCharacter( name=clean_data(character['character']), diff --git a/lib/api_tvdb/tvdb_api.py b/lib/api_tvdb/tvdb_api.py index 009b91d7..dc679ad5 100644 --- a/lib/api_tvdb/tvdb_api.py +++ b/lib/api_tvdb/tvdb_api.py @@ -39,7 +39,6 @@ from lib.tvinfo_base import CastList, TVInfoCharacter, CrewList, TVInfoPerson, R from .tvdb_exceptions import TvdbError, TvdbShownotfound, TvdbTokenexpired from .tvdb_ui import BaseUI, ConsoleUI -from _23 import filter_list, list_keys, list_values, map_list from six import integer_types, iteritems, PY2, string_types # noinspection PyUnreachableCode @@ -290,7 +289,7 @@ class Tvdb(TVInfoBase): 'nl': 'nld', 'no': 'nor', 'pl': 'pol', 'pt': 'pot', 'ru': 'rus', 'sk': 'slv', 'sv': 'swe', 'zh': 'zho', '_1': 'srp', } - self.config['valid_languages_3'] = list_values(self.config['langabbv_23']) + self.config['valid_languages_3'] = list(self.config['langabbv_23'].values()) # TheTvdb.com should be based around numeric language codes, # but to link to a series like http://thetvdb.com/?tab=series&id=79349&lid=16 @@ -358,7 +357,7 @@ class Tvdb(TVInfoBase): else: d_m = shows if d_m: - results = map_list(map_data, [d_m['data']]) + results = list(map(map_data, [d_m['data']])) if ids.get(TVINFO_TVDB_SLUG): cache_id_key = 's-id-%s-%s' % (TVINFO_TVDB, ids[TVINFO_TVDB_SLUG]) is_none, shows = self._get_cache_entry(cache_id_key) @@ -373,7 +372,7 @@ class Tvdb(TVInfoBase): if d_m: for r in d_m: if ids.get(TVINFO_TVDB_SLUG) == r['slug']: - results = map_list(map_data, [r]) + results = list(map(map_data, [r])) break if name: for n in ([name], name)[isinstance(name, list)]: @@ -390,7 +389,7 @@ class Tvdb(TVInfoBase): if r: if not isinstance(r, list): r = [r] - results.extend(map_list(map_data, r)) + results.extend(list(map(map_data, r))) seen = set() results = [seen.add(r['id']) or r for r in results if r['id'] not in seen] @@ -613,8 +612,8 @@ class Tvdb(TVInfoBase): # type: (int, Optional[str]) -> Optional[dict] results = self.search_tvs(sid, language=language) for cur_result in (isinstance(results, dict) and results.get('results') or []): - result = filter_list(lambda r: 'series' == r['type'] and sid == r['id'], - cur_result.get('nbHits') and cur_result.get('hits') or []) + result = list(filter(lambda r: 'series' == r['type'] and sid == r['id'], + cur_result.get('nbHits') and cur_result.get('hits') or [])) if 1 == len(result): result[0]['overview'] = self.clean_overview( result[0]['overviews'][self.config['langabbv_23'].get(language) or 'eng']) @@ -627,7 +626,7 @@ class Tvdb(TVInfoBase): # notify of new keys if ENV.get('SG_DEV_MODE'): - new_keys = set(list_keys(result[0])).difference({ + new_keys = set(list(result[0])).difference({ '_highlightResult', 'aliases', 'banner', 'fanart', 'firstaired', 'follower_count', 'id', 'image', 'is_tvdb_searchable', 'is_tvt_searchable', @@ -788,7 +787,7 @@ class Tvdb(TVInfoBase): series_found = self._getetsrc(self.config['url_search_series'], params=self.config['params_search_series'], language=self.config['language']) if series_found: - return list_values(series_found)[0] + return list(series_found.values())[0] except (BaseException, Exception): pass @@ -899,15 +898,15 @@ class Tvdb(TVInfoBase): try: for cur_result in (isinstance(results, dict) and results.get('results') or []): # sorts 'banners/images/missing/' to last before filter - people = filter_list( + people = list(filter( lambda r: 'person' == r['type'] and rc_clean.sub(name, '') == rc_clean.sub(r['name'], ''), cur_result.get('nbHits') and sorted(cur_result.get('hits'), - key=lambda x: len(x['image']), reverse=True) or []) + key=lambda x: len(x['image']), reverse=True) or [])) if ENV.get('SG_DEV_MODE'): for person in people: - new_keys = set(list_keys(person)).difference({ + new_keys = set(list(person)).difference({ '_highlightResult', 'banner', 'id', 'image', 'is_tvdb_searchable', 'is_tvt_searchable', 'name', 'objectID', 'people_birthdate', 'people_died', diff --git a/lib/api_tvmaze/tvmaze_api.py b/lib/api_tvmaze/tvmaze_api.py index 75400769..76cec287 100644 --- a/lib/api_tvmaze/tvmaze_api.py +++ b/lib/api_tvmaze/tvmaze_api.py @@ -27,7 +27,6 @@ from lib.tvinfo_base import TVInfoBase, TVInfoImage, TVInfoImageSize, TVInfoImag crew_type_names, TVInfoPerson, RoleTypes, TVInfoShow, TVInfoEpisode, TVInfoIDs, TVInfoNetwork, TVInfoSeason, \ PersonGenders, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB -from _23 import filter_iter from six import integer_types, iteritems, string_types # noinspection PyUnreachableCode @@ -683,7 +682,7 @@ class TvMaze(TVInfoBase): premieres = [] returning = [] rc_lang = re.compile('(?i)eng|jap') - for cur_show in filter_iter(lambda s: 1 == s.episode_number and ( + for cur_show in filter(lambda s: 1 == s.episode_number and ( None is s.show.language or rc_lang.search(s.show.language)), schedule): if 1 == cur_show.season_number: premieres += [cur_show] diff --git a/lib/enzyme/fourcc.py b/lib/enzyme/fourcc.py index 6f6cd324..a421443d 100644 --- a/lib/enzyme/fourcc.py +++ b/lib/enzyme/fourcc.py @@ -21,7 +21,7 @@ import string import re import struct from six import string_types, integer_types -from _23 import decode_str, list_items +from _23 import decode_str __all__ = ['resolve'] @@ -845,7 +845,7 @@ FOURCC = { } # make it fool prove -for code, value in list_items(FOURCC): +for code, value in list(FOURCC.items()): if not code.upper() in FOURCC: FOURCC[code.upper()] = value if code.endswith(' '): diff --git a/lib/rtorrent/__init__.py b/lib/rtorrent/__init__.py index 36544fec..e5c554e1 100644 --- a/lib/rtorrent/__init__.py +++ b/lib/rtorrent/__init__.py @@ -36,8 +36,6 @@ from .rpc import Method from .torrent import Torrent, methods as torrent_methods from .tracker import Tracker, methods as tracker_methods -from _23 import filter_iter, filter_list, map_list - __version__ = '0.2.10' __author__ = 'Chris Lucas' @@ -184,15 +182,16 @@ class RTorrent(object): @todo: add validity check for specified view """ self.torrents = [] - retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self), torrent_methods) + retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self), torrent_methods)) mc = rpc.Multicall(self) if self.method_exists('d.multicall2'): mc.add('d.multicall2', '', view, 'd.hash=', - *map_list(lambda m2: ((getattr(m2, 'aliases') or [''])[-1] or m2.rpc_call) + '=', retriever_methods)) + *list(map(lambda m2: ((getattr(m2, 'aliases') or [''])[-1] or m2.rpc_call) + '=', + retriever_methods))) else: mc.add('d.multicall', view, 'd.get_hash=', - *map_list(lambda m1: m1.rpc_call + '=', retriever_methods)) + *list(map(lambda m1: m1.rpc_call + '=', retriever_methods))) results = mc.call()[0] # only sent one call, only need first result @@ -240,7 +239,7 @@ class RTorrent(object): try: call, arg = x.split('=') method = rpc.find_method(call) - method_name = next(filter_iter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases)) + method_name = next(filter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases)) param += ['%s=%s' % (method_name, arg)] except (BaseException, Exception): pass @@ -267,7 +266,7 @@ class RTorrent(object): max_retries = 10 while max_retries: try: - t = next(filter_iter(lambda td: td.info_hash.upper() == info_hash, self.get_torrents())) + t = next(filter(lambda td: td.info_hash.upper() == info_hash, self.get_torrents())) break except (BaseException, Exception): time.sleep(self.request_interval) @@ -326,7 +325,7 @@ class RTorrent(object): if verify_load: while verify_retries: try: - t = next(filter_iter(lambda td: td.info_hash == info_hash, self.get_torrents())) + t = next(filter(lambda td: td.info_hash == info_hash, self.get_torrents())) break except (BaseException, Exception): time.sleep(self.request_interval) @@ -437,7 +436,7 @@ class RTorrent(object): method = rpc.find_method('d.get_local_id') result = True try: - func = next(filter_iter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases)) + func = next(filter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases)) getattr(self.get_connection(), func)(info_hash) except (BaseException, Exception): result = False @@ -466,7 +465,7 @@ class RTorrent(object): """ mc = rpc.Multicall(self) - for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self), methods): + for method in filter(lambda m: m.is_retriever() and m.is_available(self), methods): mc.add(method) mc.call() diff --git a/lib/rtorrent/file.py b/lib/rtorrent/file.py index 6b8d38f6..a13d76ff 100644 --- a/lib/rtorrent/file.py +++ b/lib/rtorrent/file.py @@ -22,8 +22,6 @@ from . import rpc from .common import safe_repr from .rpc import Method -from _23 import filter_iter - class File(object): """Represents an individual file within a L{Torrent} instance.""" @@ -48,7 +46,7 @@ class File(object): """ mc = rpc.Multicall(self) - for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods): + for method in filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods): mc.add(method, self.rpc_id) mc.call() diff --git a/lib/rtorrent/group.py b/lib/rtorrent/group.py index 82c8fdc0..c64884a6 100644 --- a/lib/rtorrent/group.py +++ b/lib/rtorrent/group.py @@ -21,8 +21,6 @@ from . import rpc from .rpc import Method -from _23 import filter_iter - class Group(object): __name__ = 'Group' @@ -72,7 +70,7 @@ class Group(object): def _get_method(self, *choices): try: - return next(filter_iter(lambda method: self._rt_obj.method_exists(method), choices)) + return next(filter(lambda method: self._rt_obj.method_exists(method), choices)) except (BaseException, Exception): pass diff --git a/lib/rtorrent/rpc/__init__.py b/lib/rtorrent/rpc/__init__.py index bda54fcd..abd824c2 100644 --- a/lib/rtorrent/rpc/__init__.py +++ b/lib/rtorrent/rpc/__init__.py @@ -27,8 +27,6 @@ import re import rtorrent -from _23 import filter_iter, map_list - def get_varname(rpc_call): """Transform rpc method into variable name. @@ -94,8 +92,8 @@ class Method(object): if rt_obj.get_client_version_tuple() >= self.min_version: try: - self.varname = get_varname(next(filter_iter(lambda f: rt_obj.method_exists(f), - (self.rpc_call,) + tuple(getattr(self, 'aliases', ''))))) + self.varname = get_varname(next(filter(lambda f: rt_obj.method_exists(f), + (self.rpc_call,) + tuple(getattr(self, 'aliases', ''))))) return True except (BaseException, Exception): pass @@ -162,7 +160,7 @@ class Multicall(object): getattr(xmc, rpc_call)(*args) try: - results = tuple(next(filter_iter(lambda x: isinstance(x, list), xmc().results))) + results = tuple(next(filter(lambda x: isinstance(x, list), xmc().results))) except (BaseException, Exception): return [[]] @@ -216,8 +214,8 @@ def find_method(rpc_call): """Return L{Method} instance associated with given RPC call""" try: rpc_call = rpc_call.lower() - return next(filter_iter(lambda m: rpc_call in map_list( - lambda n: n.lower(), [m.rpc_call] + list(getattr(m, 'aliases', []))), + return next(filter(lambda m: rpc_call in list(map( + lambda n: n.lower(), [m.rpc_call] + list(getattr(m, 'aliases', [])))), rtorrent.methods + rtorrent.torrent.methods + rtorrent.file.methods + rtorrent.tracker.methods + rtorrent.peer.methods)) except (BaseException, Exception): diff --git a/lib/rtorrent/torrent.py b/lib/rtorrent/torrent.py index e5574641..9b2e6ed9 100644 --- a/lib/rtorrent/torrent.py +++ b/lib/rtorrent/torrent.py @@ -25,8 +25,6 @@ from .peer import Peer, methods as peer_methods from .rpc import Method from .tracker import Tracker, methods as tracker_methods -from _23 import filter_iter, filter_list - class Torrent(object): """Represents an individual torrent within a L{RTorrent} instance.""" @@ -70,7 +68,7 @@ class Torrent(object): @note: also assigns return value to self.peers """ self.peers = [] - retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), peer_methods) + retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), peer_methods)) mc = rpc.Multicall(self) # need to leave 2nd arg empty (dunno why) @@ -97,7 +95,7 @@ class Torrent(object): @note: also assigns return value to self.trackers """ self.trackers = [] - retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), tracker_methods) + retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), tracker_methods)) mc = rpc.Multicall(self) # need to leave 2nd arg empty (dunno why) @@ -125,7 +123,7 @@ class Torrent(object): """ self.files = [] - retriever_methods = filter_list(lambda m: m.is_retriever() and m.is_available(self._rt_obj), file_methods) + retriever_methods = list(filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), file_methods)) mc = rpc.Multicall(self) # 2nd arg can be anything, but it'll return all files in torrent @@ -155,7 +153,7 @@ class Torrent(object): def _get_method(self, *choices): try: - return next(filter_iter(lambda method: self._rt_obj.method_exists(method), choices)) + return next(filter(lambda method: self._rt_obj.method_exists(method), choices)) except (BaseException, Exception): pass @@ -276,7 +274,7 @@ class Torrent(object): """ mc = rpc.Multicall(self) - for method in filter_iter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods): + for method in filter(lambda m: m.is_retriever() and m.is_available(self._rt_obj), methods): mc.add(method, self.rpc_id) mc.call() diff --git a/lib/rtorrent/tracker.py b/lib/rtorrent/tracker.py index 06904075..c1682476 100644 --- a/lib/rtorrent/tracker.py +++ b/lib/rtorrent/tracker.py @@ -22,8 +22,6 @@ from . import rpc from .common import safe_repr from .rpc import Method -from _23 import filter_iter - class Tracker(object): """Represents an individual tracker within a L{Torrent} instance.""" @@ -64,7 +62,7 @@ class Tracker(object): """ mc = rpc.Multicall(self) - for method in filter_iter(lambda fx: fx.is_retriever() and fx.is_available(self._rt_obj), methods): + for method in filter(lambda fx: fx.is_retriever() and fx.is_available(self._rt_obj), methods): mc.add(method, self.rpc_id) mc.call() diff --git a/lib/scandir/LICENSE.txt b/lib/scandir/LICENSE.txt deleted file mode 100644 index 0759f503..00000000 --- a/lib/scandir/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012, Ben Hoyt -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this -list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation -and/or other materials provided with the distribution. - -* Neither the name of Ben Hoyt nor the names of its contributors may be used -to endorse or promote products derived from this software without specific -prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/scandir/__init__.py b/lib/scandir/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/scandir/scandir.py b/lib/scandir/scandir.py deleted file mode 100644 index 9b9d7e71..00000000 --- a/lib/scandir/scandir.py +++ /dev/null @@ -1,697 +0,0 @@ -"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib - -scandir() is a generator version of os.listdir() that returns an -iterator over files in a directory, and also exposes the extra -information most OSes provide while iterating files in a directory -(such as type and stat information). - -This module also includes a version of os.walk() that uses scandir() -to speed it up significantly. - -See README.md or https://github.com/benhoyt/scandir for rationale and -docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for -more details on its inclusion into Python 3.5 - -scandir is released under the new BSD 3-clause license. See -LICENSE.txt for the full license text. -""" - -from __future__ import division - -from errno import ENOENT -from os import listdir, lstat, stat, strerror -from os.path import join, islink -from stat import S_IFDIR, S_IFLNK, S_IFREG -import collections -import sys - -try: - import _scandir -except ImportError: - _scandir = None - -try: - import ctypes -except ImportError: - ctypes = None - - -if None is not _scandir and None is not ctypes and not getattr(_scandir, 'DirEntry', None): - import warnings - warnings.warn("scandir compiled _scandir C module is too old, using slow generic fallback") - _scandir = None -elif _scandir is None and ctypes is None: - import warnings - warnings.warn("scandir can't find the compiled _scandir C module or ctypes, using slow generic fallback") - -__version__ = '1.10.0' -__all__ = ['scandir', 'walk'] - -# Windows FILE_ATTRIBUTE constants for interpreting the -# FIND_DATA.dwFileAttributes member -FILE_ATTRIBUTE_ARCHIVE = 32 -FILE_ATTRIBUTE_COMPRESSED = 2048 -FILE_ATTRIBUTE_DEVICE = 64 -FILE_ATTRIBUTE_DIRECTORY = 16 -FILE_ATTRIBUTE_ENCRYPTED = 16384 -FILE_ATTRIBUTE_HIDDEN = 2 -FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768 -FILE_ATTRIBUTE_NORMAL = 128 -FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192 -FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072 -FILE_ATTRIBUTE_OFFLINE = 4096 -FILE_ATTRIBUTE_READONLY = 1 -FILE_ATTRIBUTE_REPARSE_POINT = 1024 -FILE_ATTRIBUTE_SPARSE_FILE = 512 -FILE_ATTRIBUTE_SYSTEM = 4 -FILE_ATTRIBUTE_TEMPORARY = 256 -FILE_ATTRIBUTE_VIRTUAL = 65536 - -IS_PY3 = sys.version_info >= (3, 0) - -if IS_PY3: - unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax - - -class GenericDirEntry(object): - __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path') - - def __init__(self, scandir_path, name): - self._scandir_path = scandir_path - self.name = name - self._stat = None - self._lstat = None - self._path = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - self._stat = stat(self.path) - return self._stat - else: - if self._lstat is None: - self._lstat = lstat(self.path) - return self._lstat - - # The code duplication below is intentional: this is for slightly - # better performance on systems that fall back to GenericDirEntry. - # It avoids an additional attribute lookup and method call, which - # are relatively slow on CPython. - def is_dir(self, follow_symlinks=True): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFDIR - - def is_file(self, follow_symlinks=True): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFREG - - def is_symlink(self): - try: - st = self.stat(follow_symlinks=False) - except OSError as e: - if e.errno != ENOENT: - raise - return False # Path doesn't exist or is a broken symlink - return st.st_mode & 0o170000 == S_IFLNK - - def inode(self): - st = self.stat(follow_symlinks=False) - return st.st_ino - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - -def _scandir_generic(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - for name in listdir(path): - yield GenericDirEntry(path, name) - - -if IS_PY3 and sys.platform == 'win32': - def scandir_generic(path=unicode('.')): - if isinstance(path, bytes): - raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") - return _scandir_generic(path) - scandir_generic.__doc__ = _scandir_generic.__doc__ -else: - scandir_generic = _scandir_generic - - -scandir_c = None -scandir_python = None - - -if sys.platform == 'win32': - if ctypes is not None: - from ctypes import wintypes - - # Various constants from windows.h - INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value - ERROR_FILE_NOT_FOUND = 2 - ERROR_NO_MORE_FILES = 18 - IO_REPARSE_TAG_SYMLINK = 0xA000000C - - # Numer of seconds between 1601-01-01 and 1970-01-01 - SECONDS_BETWEEN_EPOCHS = 11644473600 - - kernel32 = ctypes.windll.kernel32 - - # ctypes wrappers for (wide string versions of) FindFirstFile, - # FindNextFile, and FindClose - FindFirstFile = kernel32.FindFirstFileW - FindFirstFile.argtypes = [ - wintypes.LPCWSTR, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindFirstFile.restype = wintypes.HANDLE - - FindNextFile = kernel32.FindNextFileW - FindNextFile.argtypes = [ - wintypes.HANDLE, - ctypes.POINTER(wintypes.WIN32_FIND_DATAW), - ] - FindNextFile.restype = wintypes.BOOL - - FindClose = kernel32.FindClose - FindClose.argtypes = [wintypes.HANDLE] - FindClose.restype = wintypes.BOOL - - Win32StatResult = collections.namedtuple('Win32StatResult', [ - 'st_mode', - 'st_ino', - 'st_dev', - 'st_nlink', - 'st_uid', - 'st_gid', - 'st_size', - 'st_atime', - 'st_mtime', - 'st_ctime', - 'st_atime_ns', - 'st_mtime_ns', - 'st_ctime_ns', - 'st_file_attributes', - ]) - - def filetime_to_time(filetime): - """Convert Win32 FILETIME to time since Unix epoch in seconds.""" - total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime - return total / 10000000 - SECONDS_BETWEEN_EPOCHS - - def find_data_to_stat(data): - """Convert Win32 FIND_DATA struct to stat_result.""" - # First convert Win32 dwFileAttributes to st_mode - attributes = data.dwFileAttributes - st_mode = 0 - if attributes & FILE_ATTRIBUTE_DIRECTORY: - st_mode |= S_IFDIR | 0o111 - else: - st_mode |= S_IFREG - if attributes & FILE_ATTRIBUTE_READONLY: - st_mode |= 0o444 - else: - st_mode |= 0o666 - if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and - data.dwReserved0 == IO_REPARSE_TAG_SYMLINK): - st_mode ^= st_mode & 0o170000 - st_mode |= S_IFLNK - - st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow - st_atime = filetime_to_time(data.ftLastAccessTime) - st_mtime = filetime_to_time(data.ftLastWriteTime) - st_ctime = filetime_to_time(data.ftCreationTime) - - # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev, - # st_nlink, st_uid, st_gid - return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size, - st_atime, st_mtime, st_ctime, - int(st_atime * 1000000000), - int(st_mtime * 1000000000), - int(st_ctime * 1000000000), - attributes) - - class Win32DirEntryPython(object): - __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode') - - def __init__(self, scandir_path, name, find_data): - self._scandir_path = scandir_path - self.name = name - self._stat = None - self._lstat = None - self._find_data = find_data - self._path = None - self._inode = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - if self.is_symlink(): - # It's a symlink, call link-following stat() - self._stat = stat(self.path) - else: - # Not a symlink, stat is same as lstat value - if self._lstat is None: - self._lstat = find_data_to_stat(self._find_data) - self._stat = self._lstat - return self._stat - else: - if self._lstat is None: - # Lazily convert to stat object, because it's slow - # in Python, and often we only need is_dir() etc - self._lstat = find_data_to_stat(self._find_data) - return self._lstat - - def is_dir(self, follow_symlinks=True): - is_symlink = self.is_symlink() - if follow_symlinks and is_symlink: - try: - return self.stat().st_mode & 0o170000 == S_IFDIR - except OSError as e: - if e.errno != ENOENT: - raise - return False - elif is_symlink: - return False - else: - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY != 0) - - def is_file(self, follow_symlinks=True): - is_symlink = self.is_symlink() - if follow_symlinks and is_symlink: - try: - return self.stat().st_mode & 0o170000 == S_IFREG - except OSError as e: - if e.errno != ENOENT: - raise - return False - elif is_symlink: - return False - else: - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_DIRECTORY == 0) - - def is_symlink(self): - return (self._find_data.dwFileAttributes & - FILE_ATTRIBUTE_REPARSE_POINT != 0 and - self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK) - - def inode(self): - if self._inode is None: - self._inode = lstat(self.path).st_ino - return self._inode - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def win_error(error, filename): - exc = WindowsError(error, ctypes.FormatError(error)) - exc.filename = filename - return exc - - def _scandir_python(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - # Call FindFirstFile and handle errors - if isinstance(path, bytes): - is_bytes = True - filename = join(path.decode('mbcs', 'strict'), '*.*') - else: - is_bytes = False - filename = join(path, '*.*') - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - handle = FindFirstFile(filename, data_p) - if handle == INVALID_HANDLE_VALUE: - error = ctypes.GetLastError() - if error == ERROR_FILE_NOT_FOUND: - # No files, don't yield anything - return - raise win_error(error, path) - - # Call FindNextFile in a loop, stopping when no more files - try: - while True: - # Skip '.' and '..' (current and parent directory), but - # otherwise yield (filename, stat_result) tuple - name = data.cFileName - if name not in ('.', '..'): - if is_bytes: - name = name.encode('mbcs', 'replace') - yield Win32DirEntryPython(path, name, data) - - data = wintypes.WIN32_FIND_DATAW() - data_p = ctypes.byref(data) - success = FindNextFile(handle, data_p) - if not success: - error = ctypes.GetLastError() - if error == ERROR_NO_MORE_FILES: - break - raise win_error(error, path) - finally: - if not FindClose(handle): - raise win_error(ctypes.GetLastError(), path) - - if IS_PY3: - def scandir_python(path=unicode('.')): - if isinstance(path, bytes): - raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead") - return _scandir_python(path) - scandir_python.__doc__ = _scandir_python.__doc__ - else: - scandir_python = _scandir_python - - if _scandir is not None: - scandir_c = _scandir.scandir - DirEntry_c = _scandir.DirEntry - - if _scandir is not None: - scandir = scandir_c - DirEntry = DirEntry_c - elif ctypes is not None: - scandir = scandir_python - DirEntry = Win32DirEntryPython - else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -# Linux, OS X, and BSD implementation -elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform: - have_dirent_d_type = (sys.platform != 'sunos5') - - if ctypes is not None and have_dirent_d_type: - import ctypes.util - - DIR_p = ctypes.c_void_p - - # Rather annoying how the dirent struct is slightly different on each - # platform. The only fields we care about are d_name and d_type. - class Dirent(ctypes.Structure): - if sys.platform.startswith('linux'): - _fields_ = ( - ('d_ino', ctypes.c_ulong), - ('d_off', ctypes.c_long), - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - elif 'openbsd' in sys.platform: - _fields_ = ( - ('d_ino', ctypes.c_uint64), - ('d_off', ctypes.c_uint64), - ('d_reclen', ctypes.c_uint16), - ('d_type', ctypes.c_uint8), - ('d_namlen', ctypes.c_uint8), - ('__d_padding', ctypes.c_uint8 * 4), - ('d_name', ctypes.c_char * 256), - ) - else: - _fields_ = ( - ('d_ino', ctypes.c_uint32), # must be uint32, not ulong - ('d_reclen', ctypes.c_ushort), - ('d_type', ctypes.c_byte), - ('d_namlen', ctypes.c_byte), - ('d_name', ctypes.c_char * 256), - ) - - DT_UNKNOWN = 0 - DT_DIR = 4 - DT_REG = 8 - DT_LNK = 10 - - Dirent_p = ctypes.POINTER(Dirent) - Dirent_pp = ctypes.POINTER(Dirent_p) - - libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) - opendir = libc.opendir - opendir.argtypes = [ctypes.c_char_p] - opendir.restype = DIR_p - - readdir_r = libc.readdir_r - readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp] - readdir_r.restype = ctypes.c_int - - closedir = libc.closedir - closedir.argtypes = [DIR_p] - closedir.restype = ctypes.c_int - - file_system_encoding = sys.getfilesystemencoding() - - class PosixDirEntry(object): - __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode') - - def __init__(self, scandir_path, name, d_type, inode): - self._scandir_path = scandir_path - self.name = name - self._d_type = d_type - self._inode = inode - self._stat = None - self._lstat = None - self._path = None - - @property - def path(self): - if self._path is None: - self._path = join(self._scandir_path, self.name) - return self._path - - def stat(self, follow_symlinks=True): - if follow_symlinks: - if self._stat is None: - if self.is_symlink(): - self._stat = stat(self.path) - else: - if self._lstat is None: - self._lstat = lstat(self.path) - self._stat = self._lstat - return self._stat - else: - if self._lstat is None: - self._lstat = lstat(self.path) - return self._lstat - - def is_dir(self, follow_symlinks=True): - if (self._d_type == DT_UNKNOWN or - (follow_symlinks and self.is_symlink())): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFDIR - else: - return self._d_type == DT_DIR - - def is_file(self, follow_symlinks=True): - if (self._d_type == DT_UNKNOWN or - (follow_symlinks and self.is_symlink())): - try: - st = self.stat(follow_symlinks=follow_symlinks) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFREG - else: - return self._d_type == DT_REG - - def is_symlink(self): - if self._d_type == DT_UNKNOWN: - try: - st = self.stat(follow_symlinks=False) - except OSError as e: - if e.errno != ENOENT: - raise - return False - return st.st_mode & 0o170000 == S_IFLNK - else: - return self._d_type == DT_LNK - - def inode(self): - return self._inode - - def __str__(self): - return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name) - - __repr__ = __str__ - - def posix_error(filename): - errno = ctypes.get_errno() - exc = OSError(errno, strerror(errno)) - exc.filename = filename - return exc - - def scandir_python(path=unicode('.')): - """Like os.listdir(), but yield DirEntry objects instead of returning - a list of names. - """ - if isinstance(path, bytes): - opendir_path = path - is_bytes = True - else: - opendir_path = path.encode(file_system_encoding) - is_bytes = False - dir_p = opendir(opendir_path) - if not dir_p: - raise posix_error(path) - try: - result = Dirent_p() - while True: - entry = Dirent() - if readdir_r(dir_p, entry, result): - raise posix_error(path) - if not result: - break - name = entry.d_name - if name not in (b'.', b'..'): - if not is_bytes: - name = name.decode(file_system_encoding) - yield PosixDirEntry(path, name, entry.d_type, entry.d_ino) - finally: - if closedir(dir_p): - raise posix_error(path) - - if _scandir is not None: - scandir_c = _scandir.scandir - DirEntry_c = _scandir.DirEntry - - if _scandir is not None: - scandir = scandir_c - DirEntry = DirEntry_c - elif ctypes is not None and have_dirent_d_type: - scandir = scandir_python - DirEntry = PosixDirEntry - else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -# Some other system -- no d_type or stat information -else: - scandir = scandir_generic - DirEntry = GenericDirEntry - - -def _walk(top, topdown=True, onerror=None, followlinks=False): - """Like Python 3.5's implementation of os.walk() -- faster than - the pre-Python 3.5 version as it uses scandir() internally. - """ - dirs = [] - nondirs = [] - - # We may not have read permission for top, in which case we can't - # get a list of the files the directory contains. os.walk - # always suppressed the exception then, rather than blow up for a - # minor reason when (say) a thousand readable directories are still - # left to visit. That logic is copied here. - try: - scandir_it = scandir(top) - except OSError as error: - if onerror is not None: - onerror(error) - return - - while True: - try: - try: - entry = next(scandir_it) - except StopIteration: - break - except OSError as error: - if onerror is not None: - onerror(error) - return - - try: - is_dir = entry.is_dir() - except OSError: - # If is_dir() raises an OSError, consider that the entry is not - # a directory, same behaviour than os.path.isdir(). - is_dir = False - - if is_dir: - dirs.append(entry.name) - else: - nondirs.append(entry.name) - - if not topdown and is_dir: - # Bottom-up: recurse into sub-directory, but exclude symlinks to - # directories if followlinks is False - if followlinks: - walk_into = True - else: - try: - is_symlink = entry.is_symlink() - except OSError: - # If is_symlink() raises an OSError, consider that the - # entry is not a symbolic link, same behaviour than - # os.path.islink(). - is_symlink = False - walk_into = not is_symlink - - if walk_into: - for entry in walk(entry.path, topdown, onerror, followlinks): - yield entry - - # Yield before recursion if going top down - if topdown: - yield top, dirs, nondirs - - # Recurse into sub-directories - for name in dirs: - new_path = join(top, name) - # Issue #23605: os.path.islink() is used instead of caching - # entry.is_symlink() result during the loop on os.scandir() because - # the caller can replace the directory entry during the "yield" - # above. - if followlinks or not islink(new_path): - for entry in walk(new_path, topdown, onerror, followlinks): - yield entry - else: - # Yield after recursion if going bottom up - yield top, dirs, nondirs - - -if IS_PY3 or sys.platform != 'win32': - walk = _walk -else: - # Fix for broken unicode handling on Windows on Python 2.x, see: - # https://github.com/benhoyt/scandir/issues/54 - file_system_encoding = sys.getfilesystemencoding() - - def walk(top, topdown=True, onerror=None, followlinks=False): - if isinstance(top, bytes): - top = top.decode(file_system_encoding) - return _walk(top, topdown, onerror, followlinks) diff --git a/lib/sg_futures/__init__.py b/lib/sg_futures/__init__.py index 2160ea97..76abd7b0 100644 --- a/lib/sg_futures/__init__.py +++ b/lib/sg_futures/__init__.py @@ -14,9 +14,5 @@ # # You should have received a copy of the GNU General Public License # along with SickGear. If not, see . -import sys -if 2 == sys.version_info[0]: - from .py2 import * -else: - from .py3 import * +from .py3 import * diff --git a/lib/sg_futures/base.py b/lib/sg_futures/base.py index 606bf195..e041c595 100644 --- a/lib/sg_futures/base.py +++ b/lib/sg_futures/base.py @@ -1,13 +1,8 @@ import re -import sys import threading -if 2 == sys.version_info[0]: - # noinspection PyProtectedMember - from .futures.thread import _WorkItem -else: - # noinspection PyCompatibility,PyProtectedMember - from concurrent.futures.thread import _WorkItem +# noinspection PyProtectedMember,PyUnresolvedReferences +from concurrent.futures.thread import _WorkItem class GenericWorkItem(_WorkItem): diff --git a/lib/sg_futures/futures/__init__.py b/lib/sg_futures/futures/__init__.py deleted file mode 100644 index e1c1545f..00000000 --- a/lib/sg_futures/futures/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Execute computations asynchronously using threads or processes.""" - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -from ._base import (FIRST_COMPLETED, - FIRST_EXCEPTION, - ALL_COMPLETED, - CancelledError, - TimeoutError, - Future, - Executor, - wait, - as_completed) -from .thread import ThreadPoolExecutor - -try: - from .process import ProcessPoolExecutor -except ImportError: - # some platforms don't have multiprocessing - pass diff --git a/lib/sg_futures/futures/_base.py b/lib/sg_futures/futures/_base.py deleted file mode 100644 index f7f525f6..00000000 --- a/lib/sg_futures/futures/_base.py +++ /dev/null @@ -1,673 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -import collections -import logging -import threading -import itertools -import time -import types - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -FIRST_COMPLETED = 'FIRST_COMPLETED' -FIRST_EXCEPTION = 'FIRST_EXCEPTION' -ALL_COMPLETED = 'ALL_COMPLETED' -_AS_COMPLETED = '_AS_COMPLETED' - -# Possible future states (for internal use by the futures package). -PENDING = 'PENDING' -RUNNING = 'RUNNING' -# The future was cancelled by the user... -CANCELLED = 'CANCELLED' -# ...and _Waiter.add_cancelled() was called by a worker. -CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED' -FINISHED = 'FINISHED' - -_FUTURE_STATES = [ - PENDING, - RUNNING, - CANCELLED, - CANCELLED_AND_NOTIFIED, - FINISHED -] - -_STATE_TO_DESCRIPTION_MAP = { - PENDING: "pending", - RUNNING: "running", - CANCELLED: "cancelled", - CANCELLED_AND_NOTIFIED: "cancelled", - FINISHED: "finished" -} - -# Logger for internal use by the futures package. -LOGGER = logging.getLogger("concurrent.futures") - -class Error(Exception): - """Base class for all future-related exceptions.""" - pass - -class CancelledError(Error): - """The Future was cancelled.""" - pass - -class TimeoutError(Error): - """The operation exceeded the given deadline.""" - pass - -class _Waiter(object): - """Provides the event that wait() and as_completed() block on.""" - def __init__(self): - self.event = threading.Event() - self.finished_futures = [] - - def add_result(self, future): - self.finished_futures.append(future) - - def add_exception(self, future): - self.finished_futures.append(future) - - def add_cancelled(self, future): - self.finished_futures.append(future) - -class _AsCompletedWaiter(_Waiter): - """Used by as_completed().""" - - def __init__(self): - super(_AsCompletedWaiter, self).__init__() - self.lock = threading.Lock() - - def add_result(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_result(future) - self.event.set() - - def add_exception(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_exception(future) - self.event.set() - - def add_cancelled(self, future): - with self.lock: - super(_AsCompletedWaiter, self).add_cancelled(future) - self.event.set() - -class _FirstCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_COMPLETED).""" - - def add_result(self, future): - super(_FirstCompletedWaiter, self).add_result(future) - self.event.set() - - def add_exception(self, future): - super(_FirstCompletedWaiter, self).add_exception(future) - self.event.set() - - def add_cancelled(self, future): - super(_FirstCompletedWaiter, self).add_cancelled(future) - self.event.set() - -class _AllCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" - - def __init__(self, num_pending_calls, stop_on_exception): - self.num_pending_calls = num_pending_calls - self.stop_on_exception = stop_on_exception - self.lock = threading.Lock() - super(_AllCompletedWaiter, self).__init__() - - def _decrement_pending_calls(self): - with self.lock: - self.num_pending_calls -= 1 - if not self.num_pending_calls: - self.event.set() - - def add_result(self, future): - super(_AllCompletedWaiter, self).add_result(future) - self._decrement_pending_calls() - - def add_exception(self, future): - super(_AllCompletedWaiter, self).add_exception(future) - if self.stop_on_exception: - self.event.set() - else: - self._decrement_pending_calls() - - def add_cancelled(self, future): - super(_AllCompletedWaiter, self).add_cancelled(future) - self._decrement_pending_calls() - -class _AcquireFutures(object): - """A context manager that does an ordered acquire of Future conditions.""" - - def __init__(self, futures): - self.futures = sorted(futures, key=id) - - def __enter__(self): - for future in self.futures: - future._condition.acquire() - - def __exit__(self, *args): - for future in self.futures: - future._condition.release() - -def _create_and_install_waiters(fs, return_when): - if return_when == _AS_COMPLETED: - waiter = _AsCompletedWaiter() - elif return_when == FIRST_COMPLETED: - waiter = _FirstCompletedWaiter() - else: - pending_count = sum( - f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs) - - if return_when == FIRST_EXCEPTION: - waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True) - elif return_when == ALL_COMPLETED: - waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False) - else: - raise ValueError("Invalid return condition: %r" % return_when) - - for f in fs: - f._waiters.append(waiter) - - return waiter - - -def _yield_finished_futures(fs, waiter, ref_collect): - """ - Iterate on the list *fs*, yielding finished futures one by one in - reverse order. - Before yielding a future, *waiter* is removed from its waiters - and the future is removed from each set in the collection of sets - *ref_collect*. - - The aim of this function is to avoid keeping stale references after - the future is yielded and before the iterator resumes. - """ - while fs: - f = fs[-1] - for futures_set in ref_collect: - futures_set.remove(f) - with f._condition: - f._waiters.remove(waiter) - del f - # Careful not to keep a reference to the popped value - yield fs.pop() - - -def as_completed(fs, timeout=None): - """An iterator over the given futures that yields each as it completes. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - iterate over. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - - Returns: - An iterator that yields the given Futures as they complete (finished or - cancelled). If any given Futures are duplicated, they will be returned - once. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - """ - if timeout is not None: - end_time = timeout + time.time() - - fs = set(fs) - total_futures = len(fs) - with _AcquireFutures(fs): - finished = set( - f for f in fs - if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) - pending = fs - finished - waiter = _create_and_install_waiters(fs, _AS_COMPLETED) - finished = list(finished) - try: - for f in _yield_finished_futures(finished, waiter, - ref_collect=(fs,)): - f = [f] - yield f.pop() - - while pending: - if timeout is None: - wait_timeout = None - else: - wait_timeout = end_time - time.time() - if wait_timeout < 0: - raise TimeoutError( - '%d (of %d) futures unfinished' % ( - len(pending), total_futures)) - - waiter.event.wait(wait_timeout) - - with waiter.lock: - finished = waiter.finished_futures - waiter.finished_futures = [] - waiter.event.clear() - - # reverse to keep finishing order - finished.reverse() - for f in _yield_finished_futures(finished, waiter, - ref_collect=(fs, pending)): - f = [f] - yield f.pop() - - finally: - # Remove waiter from unfinished futures - for f in fs: - with f._condition: - f._waiters.remove(waiter) - -DoneAndNotDoneFutures = collections.namedtuple( - 'DoneAndNotDoneFutures', 'done not_done') -def wait(fs, timeout=None, return_when=ALL_COMPLETED): - """Wait for the futures in the given sequence to complete. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - wait upon. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - return_when: Indicates when this function should return. The options - are: - - FIRST_COMPLETED - Return when any future finishes or is - cancelled. - FIRST_EXCEPTION - Return when any future finishes by raising an - exception. If no future raises an exception - then it is equivalent to ALL_COMPLETED. - ALL_COMPLETED - Return when all futures finish or are cancelled. - - Returns: - A named 2-tuple of sets. The first set, named 'done', contains the - futures that completed (is finished or cancelled) before the wait - completed. The second set, named 'not_done', contains uncompleted - futures. - """ - with _AcquireFutures(fs): - done = set(f for f in fs - if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) - not_done = set(fs) - done - - if (return_when == FIRST_COMPLETED) and done: - return DoneAndNotDoneFutures(done, not_done) - elif (return_when == FIRST_EXCEPTION) and done: - if any(f for f in done - if not f.cancelled() and f.exception() is not None): - return DoneAndNotDoneFutures(done, not_done) - - if len(done) == len(fs): - return DoneAndNotDoneFutures(done, not_done) - - waiter = _create_and_install_waiters(fs, return_when) - - waiter.event.wait(timeout) - for f in fs: - with f._condition: - f._waiters.remove(waiter) - - done.update(waiter.finished_futures) - return DoneAndNotDoneFutures(done, set(fs) - done) - -class Future(object): - """Represents the result of an asynchronous computation.""" - - def __init__(self): - """Initializes the future. Should not be called by clients.""" - self._condition = threading.Condition() - self._state = PENDING - self._result = None - self._exception = None - self._traceback = None - self._waiters = [] - self._done_callbacks = [] - - def _invoke_callbacks(self): - for callback in self._done_callbacks: - try: - callback(self) - except Exception: - LOGGER.exception('exception calling callback for %r', self) - except BaseException: - # Explicitly let all other new-style exceptions through so - # that we can catch all old-style exceptions with a simple - # "except:" clause below. - # - # All old-style exception objects are instances of - # types.InstanceType, but "except types.InstanceType:" does - # not catch old-style exceptions for some reason. Thus, the - # only way to catch all old-style exceptions without catching - # any new-style exceptions is to filter out the new-style - # exceptions, which all derive from BaseException. - raise - except: - # Because of the BaseException clause above, this handler only - # executes for old-style exception objects. - LOGGER.exception('exception calling callback for %r', self) - - def __repr__(self): - with self._condition: - if self._state == FINISHED: - if self._exception: - return '<%s at %#x state=%s raised %s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state], - self._exception.__class__.__name__) - else: - return '<%s at %#x state=%s returned %s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state], - self._result.__class__.__name__) - return '<%s at %#x state=%s>' % ( - self.__class__.__name__, - id(self), - _STATE_TO_DESCRIPTION_MAP[self._state]) - - def cancel(self): - """Cancel the future if possible. - - Returns True if the future was cancelled, False otherwise. A future - cannot be cancelled if it is running or has already completed. - """ - with self._condition: - if self._state in [RUNNING, FINISHED]: - return False - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - return True - - self._state = CANCELLED - self._condition.notify_all() - - self._invoke_callbacks() - return True - - def cancelled(self): - """Return True if the future was cancelled.""" - with self._condition: - return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED] - - def running(self): - """Return True if the future is currently executing.""" - with self._condition: - return self._state == RUNNING - - def done(self): - """Return True of the future was cancelled or finished executing.""" - with self._condition: - return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED] - - def __get_result(self): - if self._exception: - if isinstance(self._exception, types.InstanceType): - # The exception is an instance of an old-style class, which - # means type(self._exception) returns types.ClassType instead - # of the exception's actual class type. - exception_type = self._exception.__class__ - else: - exception_type = type(self._exception) - raise exception_type, self._exception, self._traceback - else: - return self._result - - def add_done_callback(self, fn): - """Attaches a callable that will be called when the future finishes. - - Args: - fn: A callable that will be called with this future as its only - argument when the future completes or is cancelled. The callable - will always be called by a thread in the same process in which - it was added. If the future has already completed or been - cancelled then the callable will be called immediately. These - callables are called in the order that they were added. - """ - with self._condition: - if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]: - self._done_callbacks.append(fn) - return - fn(self) - - def result(self, timeout=None): - """Return the result of the call that the future represents. - - Args: - timeout: The number of seconds to wait for the result if the future - isn't done. If None, then there is no limit on the wait time. - - Returns: - The result of the call that the future represents. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - Exception: If the call raised then that exception will be raised. - """ - with self._condition: - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self.__get_result() - - self._condition.wait(timeout) - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self.__get_result() - else: - raise TimeoutError() - - def exception_info(self, timeout=None): - """Return a tuple of (exception, traceback) raised by the call that the - future represents. - - Args: - timeout: The number of seconds to wait for the exception if the - future isn't done. If None, then there is no limit on the wait - time. - - Returns: - The exception raised by the call that the future represents or None - if the call completed without raising. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - """ - with self._condition: - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self._exception, self._traceback - - self._condition.wait(timeout) - - if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: - raise CancelledError() - elif self._state == FINISHED: - return self._exception, self._traceback - else: - raise TimeoutError() - - def exception(self, timeout=None): - """Return the exception raised by the call that the future represents. - - Args: - timeout: The number of seconds to wait for the exception if the - future isn't done. If None, then there is no limit on the wait - time. - - Returns: - The exception raised by the call that the future represents or None - if the call completed without raising. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - """ - return self.exception_info(timeout)[0] - - # The following methods should only be used by Executors and in tests. - def set_running_or_notify_cancel(self): - """Mark the future as running or process any cancel notifications. - - Should only be used by Executor implementations and unit tests. - - If the future has been cancelled (cancel() was called and returned - True) then any threads waiting on the future completing (though calls - to as_completed() or wait()) are notified and False is returned. - - If the future was not cancelled then it is put in the running state - (future calls to running() will return True) and True is returned. - - This method should be called by Executor implementations before - executing the work associated with this future. If this method returns - False then the work should not be executed. - - Returns: - False if the Future was cancelled, True otherwise. - - Raises: - RuntimeError: if this method was already called or if set_result() - or set_exception() was called. - """ - with self._condition: - if self._state == CANCELLED: - self._state = CANCELLED_AND_NOTIFIED - for waiter in self._waiters: - waiter.add_cancelled(self) - # self._condition.notify_all() is not necessary because - # self.cancel() triggers a notification. - return False - elif self._state == PENDING: - self._state = RUNNING - return True - else: - LOGGER.critical('Future %s in unexpected state: %s', - id(self), - self._state) - raise RuntimeError('Future in unexpected state') - - def set_result(self, result): - """Sets the return value of work associated with the future. - - Should only be used by Executor implementations and unit tests. - """ - with self._condition: - self._result = result - self._state = FINISHED - for waiter in self._waiters: - waiter.add_result(self) - self._condition.notify_all() - self._invoke_callbacks() - - def set_exception_info(self, exception, traceback): - """Sets the result of the future as being the given exception - and traceback. - - Should only be used by Executor implementations and unit tests. - """ - with self._condition: - self._exception = exception - self._traceback = traceback - self._state = FINISHED - for waiter in self._waiters: - waiter.add_exception(self) - self._condition.notify_all() - self._invoke_callbacks() - - def set_exception(self, exception): - """Sets the result of the future as being the given exception. - - Should only be used by Executor implementations and unit tests. - """ - self.set_exception_info(exception, None) - -class Executor(object): - """This is an abstract base class for concrete asynchronous executors.""" - - def submit(self, fn, *args, **kwargs): - """Submits a callable to be executed with the given arguments. - - Schedules the callable to be executed as fn(*args, **kwargs) and returns - a Future instance representing the execution of the callable. - - Returns: - A Future representing the given call. - """ - raise NotImplementedError() - - def map(self, fn, *iterables, **kwargs): - """Returns an iterator equivalent to map(fn, iter). - - Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - - Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. - """ - timeout = kwargs.get('timeout') - if timeout is not None: - end_time = timeout + time.time() - - fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)] - - # Yield must be hidden in closure so that the futures are submitted - # before the first iterator value is required. - def result_iterator(): - try: - # reverse to keep finishing order - fs.reverse() - while fs: - # Careful not to keep a reference to the popped future - if timeout is None: - yield fs.pop().result() - else: - yield fs.pop().result(end_time - time.time()) - finally: - for future in fs: - future.cancel() - return result_iterator() - - def shutdown(self, wait=True): - """Clean-up the resources associated with the Executor. - - It is safe to call this method several times. Otherwise, no other - methods can be called after this one. - - Args: - wait: If True then shutdown will not return until all running - futures have finished executing and the resources used by the - executor have been reclaimed. - """ - pass - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.shutdown(wait=True) - return False - - -class BrokenExecutor(RuntimeError): - """ - Raised when a executor has become non-functional after a severe failure. - """ diff --git a/lib/sg_futures/futures/process.py b/lib/sg_futures/futures/process.py deleted file mode 100644 index 5ba8db85..00000000 --- a/lib/sg_futures/futures/process.py +++ /dev/null @@ -1,363 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Implements ProcessPoolExecutor. - -The follow diagram and text describe the data-flow through the system: - -|======================= In-process =====================|== Out-of-process ==| - -+----------+ +----------+ +--------+ +-----------+ +---------+ -| | => | Work Ids | => | | => | Call Q | => | | -| | +----------+ | | +-----------+ | | -| | | ... | | | | ... | | | -| | | 6 | | | | 5, call() | | | -| | | 7 | | | | ... | | | -| Process | | ... | | Local | +-----------+ | Process | -| Pool | +----------+ | Worker | | #1..n | -| Executor | | Thread | | | -| | +----------- + | | +-----------+ | | -| | <=> | Work Items | <=> | | <= | Result Q | <= | | -| | +------------+ | | +-----------+ | | -| | | 6: call() | | | | ... | | | -| | | future | | | | 4, result | | | -| | | ... | | | | 3, except | | | -+----------+ +------------+ +--------+ +-----------+ +---------+ - -Executor.submit() called: -- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict -- adds the id of the _WorkItem to the "Work Ids" queue - -Local worker thread: -- reads work ids from the "Work Ids" queue and looks up the corresponding - WorkItem from the "Work Items" dict: if the work item has been cancelled then - it is simply removed from the dict, otherwise it is repackaged as a - _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" - until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because - calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). -- reads _ResultItems from "Result Q", updates the future stored in the - "Work Items" dict and deletes the dict entry - -Process #1..n: -- reads _CallItems from "Call Q", executes the calls, and puts the resulting - _ResultItems in "Request Q" -""" - -import atexit -from . import _base -import Queue as queue -import multiprocessing -import threading -import weakref -import sys - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -# Workers are created as daemon threads and processes. This is done to allow the -# interpreter to exit when there are still idle processes in a -# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However, -# allowing workers to die with the interpreter has two undesirable properties: -# - The workers would still be running during interpretor shutdown, -# meaning that they would fail in unpredictable ways. -# - The workers could be killed while evaluating a work item, which could -# be bad if the callable being evaluated has external side-effects e.g. -# writing to a file. -# -# To work around this problem, an exit handler is installed which tells the -# workers to exit when their work queues are empty and then waits until the -# threads/processes finish. - -_threads_queues = weakref.WeakKeyDictionary() -_shutdown = False - -def _python_exit(): - global _shutdown - _shutdown = True - items = list(_threads_queues.items()) if _threads_queues else () - for t, q in items: - q.put(None) - for t, q in items: - t.join(sys.maxint) - -# Controls how many more calls than processes will be queued in the call queue. -# A smaller number will mean that processes spend more time idle waiting for -# work while a larger number will make Future.cancel() succeed less frequently -# (Futures in the call queue cannot be cancelled). -EXTRA_QUEUED_CALLS = 1 - -class _WorkItem(object): - def __init__(self, future, fn, args, kwargs): - self.future = future - self.fn = fn - self.args = args - self.kwargs = kwargs - -class _ResultItem(object): - def __init__(self, work_id, exception=None, result=None): - self.work_id = work_id - self.exception = exception - self.result = result - -class _CallItem(object): - def __init__(self, work_id, fn, args, kwargs): - self.work_id = work_id - self.fn = fn - self.args = args - self.kwargs = kwargs - -def _process_worker(call_queue, result_queue): - """Evaluates calls from call_queue and places the results in result_queue. - - This worker is run in a separate process. - - Args: - call_queue: A multiprocessing.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A multiprocessing.Queue of _ResultItems that will written - to by the worker. - shutdown: A multiprocessing.Event that will be set as a signal to the - worker that it should exit when call_queue is empty. - """ - while True: - call_item = call_queue.get(block=True) - if call_item is None: - # Wake up queue management thread - result_queue.put(None) - return - try: - r = call_item.fn(*call_item.args, **call_item.kwargs) - except: - e = sys.exc_info()[1] - result_queue.put(_ResultItem(call_item.work_id, - exception=e)) - else: - result_queue.put(_ResultItem(call_item.work_id, - result=r)) - -def _add_call_item_to_queue(pending_work_items, - work_ids, - call_queue): - """Fills call_queue with _WorkItems from pending_work_items. - - This function never blocks. - - Args: - pending_work_items: A dict mapping work ids to _WorkItems e.g. - {5: <_WorkItem...>, 6: <_WorkItem...>, ...} - work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids - are consumed and the corresponding _WorkItems from - pending_work_items are transformed into _CallItems and put in - call_queue. - call_queue: A multiprocessing.Queue that will be filled with _CallItems - derived from _WorkItems. - """ - while True: - if call_queue.full(): - return - try: - work_id = work_ids.get(block=False) - except queue.Empty: - return - else: - work_item = pending_work_items[work_id] - - if work_item.future.set_running_or_notify_cancel(): - call_queue.put(_CallItem(work_id, - work_item.fn, - work_item.args, - work_item.kwargs), - block=True) - else: - del pending_work_items[work_id] - continue - -def _queue_management_worker(executor_reference, - processes, - pending_work_items, - work_ids_queue, - call_queue, - result_queue): - """Manages the communication between this process and the worker processes. - - This function is run in a local thread. - - Args: - executor_reference: A weakref.ref to the ProcessPoolExecutor that owns - this thread. Used to determine if the ProcessPoolExecutor has been - garbage collected and that this function can exit. - process: A list of the multiprocessing.Process instances used as - workers. - pending_work_items: A dict mapping work ids to _WorkItems e.g. - {5: <_WorkItem...>, 6: <_WorkItem...>, ...} - work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]). - call_queue: A multiprocessing.Queue that will be filled with _CallItems - derived from _WorkItems for processing by the process workers. - result_queue: A multiprocessing.Queue of _ResultItems generated by the - process workers. - """ - nb_shutdown_processes = [0] - def shutdown_one_process(): - """Tell a worker to terminate, which will in turn wake us again""" - call_queue.put(None) - nb_shutdown_processes[0] += 1 - while True: - _add_call_item_to_queue(pending_work_items, - work_ids_queue, - call_queue) - - result_item = result_queue.get(block=True) - if result_item is not None: - work_item = pending_work_items[result_item.work_id] - del pending_work_items[result_item.work_id] - - if result_item.exception: - work_item.future.set_exception(result_item.exception) - else: - work_item.future.set_result(result_item.result) - # Delete references to object. See issue16284 - del work_item - # Check whether we should start shutting down. - executor = executor_reference() - # No more work items can be added if: - # - The interpreter is shutting down OR - # - The executor that owns this worker has been collected OR - # - The executor that owns this worker has been shutdown. - if _shutdown or executor is None or executor._shutdown_thread: - # Since no new work items can be added, it is safe to shutdown - # this thread if there are no pending work items. - if not pending_work_items: - while nb_shutdown_processes[0] < len(processes): - shutdown_one_process() - # If .join() is not called on the created processes then - # some multiprocessing.Queue methods may deadlock on Mac OS - # X. - for p in processes: - p.join() - call_queue.close() - return - del executor - -_system_limits_checked = False -_system_limited = None -def _check_system_limits(): - global _system_limits_checked, _system_limited - if _system_limits_checked: - if _system_limited: - raise NotImplementedError(_system_limited) - _system_limits_checked = True - try: - import os - nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") - except (AttributeError, ValueError): - # sysconf not available or setting not available - return - if nsems_max == -1: - # indetermine limit, assume that limit is determined - # by available memory only - return - if nsems_max >= 256: - # minimum number of semaphores available - # according to POSIX - return - _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max - raise NotImplementedError(_system_limited) - - -class ProcessPoolExecutor(_base.Executor): - def __init__(self, max_workers=None): - """Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - """ - _check_system_limits() - - if max_workers is None: - self._max_workers = multiprocessing.cpu_count() - else: - if max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - - self._max_workers = max_workers - - # Make the call queue slightly larger than the number of processes to - # prevent the worker processes from idling. But don't make it too big - # because futures in the call queue cannot be cancelled. - self._call_queue = multiprocessing.Queue(self._max_workers + - EXTRA_QUEUED_CALLS) - self._result_queue = multiprocessing.Queue() - self._work_ids = queue.Queue() - self._queue_management_thread = None - self._processes = set() - - # Shutdown is a two-step process. - self._shutdown_thread = False - self._shutdown_lock = threading.Lock() - self._queue_count = 0 - self._pending_work_items = {} - - def _start_queue_management_thread(self): - # When the executor gets lost, the weakref callback will wake up - # the queue management thread. - def weakref_cb(_, q=self._result_queue): - q.put(None) - if self._queue_management_thread is None: - self._queue_management_thread = threading.Thread( - target=_queue_management_worker, - args=(weakref.ref(self, weakref_cb), - self._processes, - self._pending_work_items, - self._work_ids, - self._call_queue, - self._result_queue)) - self._queue_management_thread.daemon = True - self._queue_management_thread.start() - _threads_queues[self._queue_management_thread] = self._result_queue - - def _adjust_process_count(self): - for _ in range(len(self._processes), self._max_workers): - p = multiprocessing.Process( - target=_process_worker, - args=(self._call_queue, - self._result_queue)) - p.start() - self._processes.add(p) - - def submit(self, fn, *args, **kwargs): - with self._shutdown_lock: - if self._shutdown_thread: - raise RuntimeError('cannot schedule new futures after shutdown') - - f = _base.Future() - w = _WorkItem(f, fn, args, kwargs) - - self._pending_work_items[self._queue_count] = w - self._work_ids.put(self._queue_count) - self._queue_count += 1 - # Wake up queue management thread - self._result_queue.put(None) - - self._start_queue_management_thread() - self._adjust_process_count() - return f - submit.__doc__ = _base.Executor.submit.__doc__ - - def shutdown(self, wait=True): - with self._shutdown_lock: - self._shutdown_thread = True - if self._queue_management_thread: - # Wake up queue management thread - self._result_queue.put(None) - if wait: - self._queue_management_thread.join(sys.maxint) - # To reduce the risk of openning too many files, remove references to - # objects that use file descriptors. - self._queue_management_thread = None - self._call_queue = None - self._result_queue = None - self._processes = None - shutdown.__doc__ = _base.Executor.shutdown.__doc__ - -atexit.register(_python_exit) diff --git a/lib/sg_futures/futures/thread.py b/lib/sg_futures/futures/thread.py deleted file mode 100644 index f593de40..00000000 --- a/lib/sg_futures/futures/thread.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2009 Brian Quinlan. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""Implements ThreadPoolExecutor.""" - -import atexit -from six import PY2 -if PY2: - from . import _base -else: - from concurrent.futures import _base -import itertools -import Queue as queue -import threading -import weakref -import sys - -try: - from multiprocessing import cpu_count -except ImportError: - # some platforms don't have multiprocessing - def cpu_count(): - return None - -__author__ = 'Brian Quinlan (brian@sweetapp.com)' - -# Workers are created as daemon threads. This is done to allow the interpreter -# to exit when there are still idle threads in a ThreadPoolExecutor's thread -# pool (i.e. shutdown() was not called). However, allowing workers to die with -# the interpreter has two undesirable properties: -# - The workers would still be running during interpretor shutdown, -# meaning that they would fail in unpredictable ways. -# - The workers could be killed while evaluating a work item, which could -# be bad if the callable being evaluated has external side-effects e.g. -# writing to a file. -# -# To work around this problem, an exit handler is installed which tells the -# workers to exit when their work queues are empty and then waits until the -# threads finish. - -_threads_queues = weakref.WeakKeyDictionary() -_shutdown = False - -def _python_exit(): - global _shutdown - _shutdown = True - items = list(_threads_queues.items()) if _threads_queues else () - for t, q in items: - q.put(None) - for t, q in items: - t.join(sys.maxint) - -atexit.register(_python_exit) - -class _WorkItem(object): - def __init__(self, future, fn, args, kwargs): - self.future = future - self.fn = fn - self.args = args - self.kwargs = kwargs - - def run(self): - if not self.future.set_running_or_notify_cancel(): - return - - try: - result = self.fn(*self.args, **self.kwargs) - except: - e, tb = sys.exc_info()[1:] - self.future.set_exception_info(e, tb) - else: - self.future.set_result(result) - -def _worker(executor_reference, work_queue, initializer, initargs): - if initializer is not None: - try: - initializer(*initargs) - except BaseException: - _base.LOGGER.critical('Exception in initializer:', exc_info=True) - executor = executor_reference() - if executor is not None: - executor._initializer_failed() - return - try: - while True: - work_item = work_queue.get(block=True) - if work_item is not None: - work_item.run() - # Delete references to object. See issue16284 - del work_item - - # attempt to increment idle count - executor = executor_reference() - if executor is not None: - executor._idle_semaphore.release() - del executor - continue - executor = executor_reference() - # Exit if: - # - The interpreter is shutting down OR - # - The executor that owns the worker has been collected OR - # - The executor that owns the worker has been shutdown. - if _shutdown or executor is None or executor._shutdown: - # Notice other workers - work_queue.put(None) - return - del executor - except: - _base.LOGGER.critical('Exception in worker', exc_info=True) - - -class BrokenThreadPool(_base.BrokenExecutor): - """ - Raised when a worker thread in a ThreadPoolExecutor failed initializing. - """ - - -class ThreadPoolExecutor(_base.Executor): - - # Used to assign unique thread names when thread_name_prefix is not supplied. - _counter = itertools.count().next - - def __init__(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=()): - """Initializes a new ThreadPoolExecutor instance. - - Args: - max_workers: The maximum number of threads that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - """ - if max_workers is None: - # Use this number because ThreadPoolExecutor is often - # used to overlap I/O instead of CPU work. - max_workers = (cpu_count() or 1) * 5 - if max_workers <= 0: - raise ValueError("max_workers must be greater than 0") - - self._max_workers = max_workers - self._initializer = initializer - self._initargs = initargs - self._work_queue = queue.Queue() - self._idle_semaphore = threading.Semaphore(0) - self._threads = set() - self._broken = False - self._shutdown = False - self._shutdown_lock = threading.Lock() - self._thread_name_prefix = (thread_name_prefix or - ("ThreadPoolExecutor-%d" % self._counter())) - - def submit(self, fn, *args, **kwargs): - with self._shutdown_lock: - if self._broken: - raise BrokenThreadPool(self._broken) - if self._shutdown: - raise RuntimeError('cannot schedule new futures after shutdown') - - f = _base.Future() - w = _WorkItem(f, fn, args, kwargs) - - self._work_queue.put(w) - self._adjust_thread_count() - return f - submit.__doc__ = _base.Executor.submit.__doc__ - - def _adjust_thread_count(self): - # if idle threads are available, don't spin new threads - if self._idle_semaphore.acquire(False): - return - - # When the executor gets lost, the weakref callback will wake up - # the worker threads. - def weakref_cb(_, q=self._work_queue): - q.put(None) - - num_threads = len(self._threads) - if num_threads < self._max_workers: - thread_name = '%s_%d' % (self._thread_name_prefix or self, - num_threads) - t = threading.Thread(name=thread_name, target=_worker, - args=(weakref.ref(self, weakref_cb), - self._work_queue, self._initializer, self._initargs)) - t.daemon = True - t.start() - self._threads.add(t) - _threads_queues[t] = self._work_queue - - def _initializer_failed(self): - with self._shutdown_lock: - self._broken = ('A thread initializer failed, the thread pool ' - 'is not usable anymore') - # Drain work queue and mark pending futures failed - while True: - try: - work_item = self._work_queue.get_nowait() - except queue.Empty: - break - if work_item is not None: - work_item.future.set_exception(BrokenThreadPool(self._broken)) - - def shutdown(self, wait=True): - with self._shutdown_lock: - self._shutdown = True - self._work_queue.put(None) - if wait: - for t in self._threads: - t.join(sys.maxint) - shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/lib/sg_futures/py2.py b/lib/sg_futures/py2.py deleted file mode 100644 index 45974445..00000000 --- a/lib/sg_futures/py2.py +++ /dev/null @@ -1,55 +0,0 @@ -# coding=utf-8 -# -# This file is part of SickGear. -# -# SickGear is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickGear is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickGear. If not, see . - -# noinspection PyUnresolvedReferences -import sys - -# noinspection PyProtectedMember -from .futures.thread import _base, BrokenThreadPool, ThreadPoolExecutor - -from .base import * - - -class SgWorkItem(GenericWorkItem): - - def run(self): - if self.future.set_running_or_notify_cancel(): - try: - self._set_thread_name() - result = self.fn(*self.args, **self.kwargs) - except (BaseException, Exception): - e, tb = sys.exc_info()[1:] - self.future.set_exception_info(e, tb) - else: - self.future.set_result(result) - - -class SgThreadPoolExecutor(ThreadPoolExecutor): - - def submit(self, fn, *args, **kwargs): - with self._shutdown_lock: - if self._broken: - raise BrokenThreadPool(self._broken) - if self._shutdown: - raise RuntimeError('cannot schedule new futures after shutdown') - - f = _base.Future() - w = SgWorkItem(f, fn, args, kwargs) - - self._work_queue.put(w) - self._adjust_thread_count() - return f diff --git a/lib/sg_helpers.py b/lib/sg_helpers.py index 4601b512..1b50aa5f 100644 --- a/lib/sg_helpers.py +++ b/lib/sg_helpers.py @@ -35,8 +35,8 @@ from send2trash import send2trash from encodingKludge import SYS_ENCODING import requests -from _23 import decode_bytes, filter_list, html_unescape, list_range, \ - ordered_dict, Popen, scandir, urlparse, urlsplit, urlunparse +from _23 import decode_bytes, html_unescape, list_range, \ + Popen, scandir, urlparse, urlsplit, urlunparse from six import integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types, text_type import zipfile @@ -810,8 +810,8 @@ def get_url(url, # type: AnyStr response_attr = ('text', 'content')[as_binary] # selectively mute some errors - mute = filter_list(lambda x: kwargs.pop(x, False), [ - 'mute_connect_err', 'mute_read_timeout', 'mute_connect_timeout', 'mute_http_error']) + mute = list(filter(lambda x: kwargs.pop(x, False), [ + 'mute_connect_err', 'mute_read_timeout', 'mute_connect_timeout', 'mute_http_error'])) # reuse or instantiate request session resp_sess = kwargs.pop('resp_sess', None) @@ -1617,12 +1617,12 @@ def ast_eval(value, default=None): return default if 'OrderedDict()' == value: - value = ordered_dict() + value = dict() elif 'OrderedDict([(' == value[0:14]: try: list_of_tuples = ast.literal_eval(value[12:-1]) - value = ordered_dict() + value = dict() for cur_tuple in list_of_tuples: value[cur_tuple[0]] = cur_tuple[1] except (BaseException, Exception): diff --git a/lib/tvinfo_base/base.py b/lib/tvinfo_base/base.py index a1d57bc4..7173aded 100644 --- a/lib/tvinfo_base/base.py +++ b/lib/tvinfo_base/base.py @@ -8,7 +8,6 @@ import time from exceptions_helper import ex from six import integer_types, iteritems, iterkeys, string_types, text_type -from _23 import list_items, list_values from lib.tvinfo_base.exceptions import * from sg_helpers import calc_age, make_path @@ -53,7 +52,7 @@ tv_src_names = { TVINFO_IMDB: 'imdb', TVINFO_TRAKT: 'trakt', TVINFO_TMDB: 'tmdb', - TVINFO_TVDB_SLUG : 'tvdb slug', + TVINFO_TVDB_SLUG: 'tvdb slug', TVINFO_TRAKT_SLUG: 'trakt slug', TVINFO_SLUG: 'generic slug', @@ -67,7 +66,7 @@ tv_src_names = { log = logging.getLogger('TVInfo') log.addHandler(logging.NullHandler()) -TVInfoShowContainer = {} # type: Dict[ShowContainer] +TVInfoShowContainer = {} # type: Dict[str, ShowContainer] class ShowContainer(dict): @@ -94,7 +93,7 @@ class ShowContainer(dict): if acquired_lock: try: current_time = time.time() - for k, v in list_items(self): + for k, v in list(self.items()): if self.max_age < current_time - v[1]: lock_acquired = self[k].lock.acquire(False) if lock_acquired: @@ -125,7 +124,7 @@ class TVInfoIDs(object): trakt=None, # type: integer_types rage=None, # type: integer_types ids=None # type: Dict[int, integer_types] - ): # type: (...) -> TVInfoIDs + ): ids = ids or {} self.tvdb = tvdb or ids.get(TVINFO_TVDB) self.tmdb = tmdb or ids.get(TVINFO_TMDB) @@ -156,7 +155,7 @@ class TVInfoIDs(object): class TVInfoSocialIDs(object): def __init__(self, twitter=None, instagram=None, facebook=None, wikipedia=None, ids=None): - # type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> TVInfoSocialIDs + # type: (str_int, str_int, str_int, str_int, Dict[int, str_int]) -> None ids = ids or {} self.twitter = twitter or ids.get(TVINFO_TWITTER) self.instagram = instagram or ids.get(TVINFO_INSTAGRAM) @@ -231,7 +230,7 @@ class TVInfoImage(object): lang=None, height=None, width=None, aspect_ratio=None): self.img_id = img_id # type: Optional[integer_types] self.image_type = image_type # type: integer_types - self.sizes = sizes # type: Dict[TVInfoImageSize, AnyStr] + self.sizes = sizes # type: Dict[int, AnyStr] self.type_str = type_str # type: AnyStr self.main_image = main_image # type: bool self.rating = rating # type: Optional[Union[float, integer_types]] @@ -243,7 +242,7 @@ class TVInfoImage(object): def __str__(self): return '' % (TVInfoImageType.reverse_str.get(self.image_type, 'unknown'), - ', '.join(TVInfoImageSize.reverse_str.get(s, 'unkown') for s in self.sizes)) + ', '.join(TVInfoImageSize.reverse_str.get(s, 'unknown') for s in self.sizes)) __repr__ = __str__ @@ -409,7 +408,7 @@ class TVInfoShow(dict): match, and so on. """ results = [] - for cur_season in list_values(self): + for cur_season in self.values(): searchresult = cur_season.search(term=term, key=key) if 0 != len(searchresult): results.extend(searchresult) @@ -487,7 +486,7 @@ class TVInfoSeason(dict): instances. """ results = [] - for ep in list_values(self): + for ep in self.values(): searchresult = ep.search(term=term, key=key) if None is not searchresult: results.append(searchresult) @@ -679,7 +678,7 @@ class PersonBase(dict): ids=None, # type: Dict thumb_url=None, # type: AnyStr **kwargs # type: Dict - ): # type: (...) -> PersonBase + ): super(PersonBase, self).__init__(**kwargs) self.id = p_id # type: Optional[integer_types] self.name = name # type: Optional[AnyStr] @@ -769,7 +768,7 @@ class TVInfoPerson(PersonBase): real_name=None, # type: AnyStr akas=None, # type: Set[AnyStr] **kwargs # type: Dict - ): # type: (...) -> TVInfoPerson + ): super(TVInfoPerson, self).__init__( p_id=p_id, name=name, image=image, thumb_url=thumb_url, bio=bio, gender=gender, birthdate=birthdate, deathdate=deathdate, country=country, images=images, @@ -795,7 +794,7 @@ class TVInfoPerson(PersonBase): class TVInfoCharacter(PersonBase): def __init__(self, person=None, voice=None, plays_self=None, regular=None, show=None, start_year=None, end_year=None, **kwargs): - # type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> TVInfoCharacter + # type: (List[TVInfoPerson], bool, bool, bool, TVInfoShow, int, int, Dict) -> None super(TVInfoCharacter, self).__init__(**kwargs) self.person = person # type: List[TVInfoPerson] self.voice = voice # type: Optional[bool] diff --git a/sickgear/__init__.py b/sickgear/__init__.py index a0a0ed49..e827ddaf 100644 --- a/sickgear/__init__.py +++ b/sickgear/__init__.py @@ -55,8 +55,8 @@ from browser_ua import get_ua from configobj import ConfigObj from api_trakt import TraktAPI -from _23 import b64encodestring, decode_bytes, filter_iter, list_items, map_list, ordered_dict, scandir -from six import iteritems, PY2, string_types +from _23 import b64encodestring, decode_bytes, scandir +from six import iteritems, string_types import sg_helpers # noinspection PyUnreachableCode @@ -1353,10 +1353,10 @@ def init_stage_1(console_logging): EPISODE_VIEW_MISSED_RANGE = check_setting_int(CFG, 'GUI', 'episode_view_missed_range', 7) HISTORY_LAYOUT = check_setting_str(CFG, 'GUI', 'history_layout', 'detailed') - BROWSELIST_HIDDEN = map_list( + BROWSELIST_HIDDEN = list(map( lambda y: TVidProdid.glue in y and y or '%s%s%s' % ( (TVINFO_TVDB, TVINFO_IMDB)[bool(helpers.parse_imdb_id(y))], TVidProdid.glue, y), - [x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()]) + [x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()])) BROWSELIST_MRU = sg_helpers.ast_eval(check_setting_str(CFG, 'GUI', 'browselist_prefs', None), {}) BACKUP_DB_PATH = check_setting_str(CFG, 'Backup', 'backup_db_path', '') @@ -1450,7 +1450,7 @@ def init_stage_1(console_logging): setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default)) elif isinstance(default, int): setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default)) - for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in ( + for cur_provider in filter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in ( 1449593765, 1597250020, 1524942228, 160758496, 2925374331 ) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in ( 2417143804,)), providers.sortedProviderList()): @@ -1505,24 +1505,6 @@ def init_stage_1(console_logging): pass logger.sb_log_instance.init_logging(console_logging=console_logging) - if PY2: - try: - import _scandir - except ImportError: - _scandir = None - - try: - import ctypes - except ImportError: - ctypes = None - - if None is not _scandir and None is not ctypes and not getattr(_scandir, 'DirEntry', None): - MODULE_UPDATE_STRING = \ - 'Your scandir binary module is outdated, using the slow but newer Python module.' \ - '
Upgrade the binary at a command prompt with' \ - ' # python -m pip install -U scandir' \ - '
Important: You must Shutdown SickGear before upgrading' - showList = [] showDict = {} @@ -1865,7 +1847,7 @@ def save_config(): # For passwords you must include the word `password` in the item_name and # add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config() - new_config['General'] = ordered_dict() + new_config['General'] = dict() s_z = check_setting_int(CFG, 'General', 'stack_size', 0) if s_z: new_config['General']['stack_size'] = s_z @@ -1927,7 +1909,8 @@ def save_config(): new_config['General']['flatten_folders_default'] = int(FLATTEN_FOLDERS_DEFAULT) new_config['General']['anime_default'] = int(ANIME_DEFAULT) new_config['General']['provider_order'] = ' '.join(PROVIDER_ORDER) - new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list_items(PROVIDER_HOMES) if pid in [ + new_config['General']['provider_homes'] = '%s' % dict([(pid, v) for pid, v in list(PROVIDER_HOMES.items()) + if pid in [ p.get_id() for p in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]]]) new_config['General']['update_notify'] = int(UPDATE_NOTIFY) new_config['General']['update_auto'] = int(UPDATE_AUTO) @@ -2014,7 +1997,7 @@ def save_config(): new_config['Backup']['backup_db_max_count'] = BACKUP_DB_MAX_COUNT default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'use_after_get_data') - for src in filter_iter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sortedProviderList()): + for src in filter(lambda px: GenericProvider.TORRENT == px.providerType, providers.sortedProviderList()): src_id = src.get_id() src_id_uc = src_id.upper() new_config[src_id_uc] = {} @@ -2052,19 +2035,19 @@ def save_config(): del new_config[src_id_uc] default_not_zero = ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog') - for src in filter_iter(lambda px: GenericProvider.NZB == px.providerType, providers.sortedProviderList()): + for src in filter(lambda px: GenericProvider.NZB == px.providerType, providers.sortedProviderList()): src_id = src.get_id() src_id_uc = src.get_id().upper() new_config[src_id_uc] = {} if int(src.enabled): new_config[src_id_uc][src_id] = int(src.enabled) - for attr in filter_iter(lambda _a: None is not getattr(src, _a, None), + for attr in filter(lambda _a: None is not getattr(src, _a, None), ('api_key', 'digest', 'username', 'search_mode')): if 'search_mode' != attr or 'eponly' != getattr(src, attr): new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr) - for attr in filter_iter(lambda _a: None is not getattr(src, _a, None), ( + for attr in filter(lambda _a: None is not getattr(src, _a, None), ( 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active', @@ -2280,7 +2263,7 @@ def save_config(): cfg_lc = cfg.lower() cfg_keys += [cfg] new_config[cfg] = {} - for (k, v) in filter_iter(lambda arg: any([arg[1]]) or ( + for (k, v) in filter(lambda arg: any([arg[1]]) or ( # allow saving where item value default is non-zero but 0 is a required setting value cfg_lc in ('kodi', 'xbmc', 'synoindex', 'nzbget', 'torrent', 'telegram') and arg[0] in ('always_on', 'priority', 'send_image')) @@ -2320,7 +2303,7 @@ def save_config(): new_config[notifier]['%s_notify_onsubtitledownload' % notifier.lower()] = int(onsubtitledownload) # remove empty stanzas - for k in filter_iter(lambda c: not new_config[c], cfg_keys): + for k in filter(lambda c: not new_config[c], cfg_keys): del new_config[k] new_config['Newznab'] = {} diff --git a/sickgear/_legacy.py b/sickgear/_legacy.py index 1727a4cb..cd6c2907 100644 --- a/sickgear/_legacy.py +++ b/sickgear/_legacy.py @@ -32,7 +32,7 @@ from tornado import gen from tornado.escape import utf8 from tornado.web import RequestHandler -from _23 import decode_str, filter_iter +from _23 import decode_str from six import iteritems from sg_futures import SgThreadPoolExecutor try: @@ -103,7 +103,7 @@ class LegacyBaseHandler(LegacyBase): def redirect_args(self, new_url, exclude=(None,), **kwargs): args = '&'.join(['%s=%s' % (k, v) for (k, v) in - filter_iter(lambda arg: arg[1] not in exclude, iteritems(kwargs))]) + filter(lambda arg: arg[1] not in exclude, iteritems(kwargs))]) self.redirect('%s%s' % (new_url, ('', '?' + args)[bool(args)]), permanent=True) """ deprecated from BaseHandler ------------------------------------------------------------------------------------ diff --git a/sickgear/classes.py b/sickgear/classes.py index 3cdecae0..5065e05a 100644 --- a/sickgear/classes.py +++ b/sickgear/classes.py @@ -25,7 +25,7 @@ import sickgear from ._legacy_classes import LegacySearchResult, LegacyProper from .common import Quality -from six import integer_types, iteritems, PY2, string_types +from six import integer_types, iteritems, string_types # noinspection PyUnreachableCode if False: @@ -359,41 +359,11 @@ class OrderedDefaultdict(OrderedDict): args = (self.default_factory,) if self.default_factory else () return self.__class__, args, None, None, iteritems(self) - if PY2: - # backport from python 3 - def move_to_end(self, key, last=True): - """Move an existing element to the end (or beginning if last==False). + def first_key(self): + return next(iter(self)) - Raises KeyError if the element does not exist. - When last=True, acts like a fast version of self[key]=self.pop(key). - - """ - link_prev, link_next, key = link = getattr(self, '_OrderedDict__map')[key] - link_prev[1] = link_next - link_next[0] = link_prev - root = getattr(self, '_OrderedDict__root') - if last: - last = root[0] - link[0] = last - link[1] = root - last[1] = root[0] = link - else: - first = root[1] - link[0] = root - link[1] = first - root[1] = first[0] = link - - def first_key(self): - return getattr(self, '_OrderedDict__root')[1][2] - - def last_key(self): - return getattr(self, '_OrderedDict__root')[0][2] - else: - def first_key(self): - return next(iter(self)) - - def last_key(self): - return next(reversed(self)) + def last_key(self): + return next(reversed(self)) class ImageUrlList(list): @@ -455,61 +425,14 @@ class EnvVar(object): pass def __getitem__(self, key): - return os.environ(key) + return os.environ[key] @staticmethod def get(key, default=None): return os.environ.get(key, default) -if not PY2: - sickgear.ENV = EnvVar() - -elif 'nt' == os.name: - from ctypes import windll, create_unicode_buffer - - # noinspection PyCompatibility - class WinEnvVar(EnvVar): - - @staticmethod - def get_environment_variable(name): - # noinspection PyUnresolvedReferences - name = unicode(name) # ensures string argument is unicode - n = windll.kernel32.GetEnvironmentVariableW(name, None, 0) - env_value = None - if n: - buf = create_unicode_buffer(u'\0' * n) - windll.kernel32.GetEnvironmentVariableW(name, buf, n) - env_value = buf.value - return env_value - - def __getitem__(self, key): - return self.get_environment_variable(key) - - def get(self, key, default=None): - r = self.get_environment_variable(key) - return r if None is not r else default - - sickgear.ENV = WinEnvVar() -else: - # noinspection PyCompatibility - class LinuxEnvVar(EnvVar): - # noinspection PyMissingConstructor - def __init__(self, environ): - self.environ = environ - - def __getitem__(self, key): - v = self.environ.get(key) - try: - return v if not isinstance(v, str) else v.decode(sickgear.SYS_ENCODING) - except (UnicodeDecodeError, UnicodeEncodeError): - return v - - def get(self, key, default=None): - v = self[key] - return v if None is not v else default - - sickgear.ENV = LinuxEnvVar(os.environ) +sickgear.ENV = EnvVar() # backport from python 3 diff --git a/sickgear/clients/download_station.py b/sickgear/clients/download_station.py index 3147ab58..62fc27ff 100644 --- a/sickgear/clients/download_station.py +++ b/sickgear/clients/download_station.py @@ -26,7 +26,7 @@ from .. import logger from ..sgdatetime import timestamp_near import sickgear -from _23 import filter_iter, filter_list, map_list, unquote_plus +from _23 import unquote_plus from six import string_types # noinspection PyUnreachableCode @@ -96,21 +96,21 @@ class DownloadStationAPI(GenericClient): id=t['id'], title=t['title'], total_size=t.get('size') or 0, added_ts=d.get('create_time'), last_completed_ts=d.get('completed_time'), last_started_ts=d.get('started_time'), seed_elapsed_secs=d.get('seedelapsed'), - wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None, - wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None, + wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None, + wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None, tally_down=downloaded(tx), tally_up=tx.get('size_uploaded'), - state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(filter_list( - lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f))] + state='done' if re.search('finish', t['status']) else ('seed', 'down')[any(list(filter( + lambda tf: wanted(tf) and (downloaded(tf, -1) < tf.get('size', 0)), f)))] )) # only available during "download" and "seeding" file_list = (lambda t: t.get('additional', {}).get('file', {})) valid_stat = (lambda ti: not ti.get('error') and isinstance(ti.get('status'), string_types) - and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))) - result = map_list(lambda t: base_state( + and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))) + result = list(map(lambda t: base_state( t, t.get('additional', {}).get('detail', {}), t.get('additional', {}).get('transfer', {}), file_list(t)), - filter_list(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t), - tasks)) + list(filter(lambda t: t['status'] in ('downloading', 'seeding', 'finished') and valid_stat(t), + tasks)))) return result @@ -133,13 +133,13 @@ class DownloadStationAPI(GenericClient): t_params=dict(additional='detail,file,transfer'))['data']['tasks'] else: # noinspection PyUnresolvedReferences - tasks = (filter_list(lambda d: d.get('id') == rid, self._testdata), self._testdata)[not rid] + tasks = (list(filter(lambda d: d.get('id') == rid, self._testdata)), self._testdata)[not rid] result += tasks and (isinstance(tasks, list) and tasks or (isinstance(tasks, dict) and [tasks])) \ or ([], [{'error': True, 'id': rid}])[err] except (BaseException, Exception): if getinfo: result += [dict(error=True, id=rid)] - for t in filter_iter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result): + for t in filter(lambda d: isinstance(d.get('title'), string_types) and d.get('title'), result): t['title'] = unquote_plus(t.get('title')) return result @@ -211,7 +211,7 @@ class DownloadStationAPI(GenericClient): :return: True if success, Id(s) that could not be acted upon, else Falsy if failure """ if isinstance(ids, (string_types, list)): - rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(',')) + rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(','))) result = pause_first and self._pause_torrent(rids) # get items not paused result = (isinstance(result, list) and result or []) @@ -225,7 +225,7 @@ class DownloadStationAPI(GenericClient): if isinstance(ids, (string_types, list)): item = dict(fail=[], ignore=[]) - for task in filter_iter(filter_func, self._tinf(ids, err=True)): + for task in filter(filter_func, self._tinf(ids, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')] # retry items not acted on @@ -237,7 +237,7 @@ class DownloadStationAPI(GenericClient): logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) time.sleep(i) item['fail'] = [] - for task in filter_iter(filter_func, self._tinf(retry_ids, err=True)): + for task in filter(filter_func, self._tinf(retry_ids, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('id')] if not item['fail']: @@ -303,7 +303,7 @@ class DownloadStationAPI(GenericClient): # noinspection PyUnresolvedReferences if response and response.get('success'): for s in (1, 3, 5, 10, 15, 30, 60): - tasks = filter_list(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf()) + tasks = list(filter(lambda t: task_stamp <= t['additional']['detail']['create_time'], self._tinf())) try: return str(self._client_has(tasks, uri, files)[0].get('id')) except IndexError: @@ -324,8 +324,8 @@ class DownloadStationAPI(GenericClient): if uri or files: u = isinstance(uri, dict) and (uri.get('uri', '') or '').lower() or None f = isinstance(files, dict) and (files.get('file', [''])[0]).lower() or None - result = filter_list(lambda t: u and t['additional']['detail']['uri'].lower() == u - or f and t['additional']['detail']['uri'].lower() in f, tasks) + result = list(filter(lambda t: u and t['additional']['detail']['uri'].lower() == u + or f and t['additional']['detail']['uri'].lower() in f, tasks)) return result def _client_request(self, method, t_id=None, t_params=None, files=None): @@ -360,7 +360,7 @@ class DownloadStationAPI(GenericClient): return self._error_task(response) if None is not t_id and None is t_params and 'create' != method: - return filter_list(lambda r: r.get('error'), response.get('data', {})) or True + return list(filter(lambda r: r.get('error'), response.get('data', {}))) or True return response diff --git a/sickgear/clients/qbittorrent.py b/sickgear/clients/qbittorrent.py index f0aa5ebd..20b7690c 100644 --- a/sickgear/clients/qbittorrent.py +++ b/sickgear/clients/qbittorrent.py @@ -26,7 +26,7 @@ import sickgear from requests.exceptions import HTTPError -from _23 import filter_iter, filter_list, map_list, unquote_plus +from _23 import unquote_plus from six import string_types # noinspection PyUnreachableCode @@ -58,9 +58,9 @@ class QbittorrentAPI(GenericClient): id=t['hash'], title=t['name'], total_size=gp.get('total_size') or 0, added_ts=gp.get('addition_date'), last_completed_ts=gp.get('completion_date'), last_started_ts=None, seed_elapsed_secs=gp.get('seeding_time'), - wanted_size=sum(map_list(lambda tf: wanted(tf) and tf.get('size') or 0, f)) or None, - wanted_down=sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, f)) or None, - tally_down=sum(map_list(lambda tf: downloaded(tf) or 0, f)) or None, + wanted_size=sum(list(map(lambda tf: wanted(tf) and tf.get('size') or 0, f))) or None, + wanted_down=sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, f))) or None, + tally_down=sum(list(map(lambda tf: downloaded(tf) or 0, f))) or None, tally_up=gp.get('total_uploaded'), state='done' if 'pausedUP' == t.get('state') else ('down', 'seed')['up' in t.get('state').lower()] )) @@ -68,10 +68,10 @@ class QbittorrentAPI(GenericClient): ('torrents/files', 'query/propertiesFiles/%s' % ti['hash'])[not self.api_ns], params=({'hash': ti['hash']}, {})[not self.api_ns], json=True) or {}) valid_stat = (lambda ti: not self._ignore_state(ti) - and sum(map_list(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti)))) - result = map_list(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)), - filter_list(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and - valid_stat(t), self._tinf(ids, False))) + and sum(list(map(lambda tf: wanted(tf) and downloaded(tf) or 0, file_list(ti))))) + result = list(map(lambda t: base_state(t, self._tinf(t['hash'])[0], file_list(t)), + list(filter(lambda t: re.search('(?i)queue|stall|(up|down)load|pausedUP', t['state']) and + valid_stat(t), self._tinf(ids, False))))) return result @@ -109,8 +109,7 @@ class QbittorrentAPI(GenericClient): except (BaseException, Exception): if getinfo: result += [dict(error=True, id=rid)] - for t in filter_iter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), - (result, [])[getinfo]): + for t in filter(lambda d: isinstance(d.get('name'), string_types) and d.get('name'), (result, [])[getinfo]): t['name'] = unquote_plus(t.get('name')) return result @@ -290,7 +289,7 @@ class QbittorrentAPI(GenericClient): :return: True if success, Id(s) that could not be acted upon, else Falsy if failure """ if isinstance(ids, (string_types, list)): - rids = ids if isinstance(ids, list) else map_list(lambda x: x.strip(), ids.split(',')) + rids = ids if isinstance(ids, list) else list(map(lambda x: x.strip(), ids.split(','))) result = pause_first and self._pause_torrent(rids) # get items not paused result = (isinstance(result, list) and result or []) @@ -304,7 +303,7 @@ class QbittorrentAPI(GenericClient): if isinstance(ids, (string_types, list)): item = dict(fail=[], ignore=[]) - for task in filter_iter(filter_func, self._tinf(ids, use_props=False, err=True)): + for task in filter(filter_func, self._tinf(ids, use_props=False, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')] # retry items that are not acted on @@ -316,7 +315,7 @@ class QbittorrentAPI(GenericClient): logger.log('%s: retry %s %s item(s) in %ss' % (self.name, act, len(item['fail']), i), logger.DEBUG) time.sleep(i) item['fail'] = [] - for task in filter_iter(filter_func, self._tinf(retry_ids, use_props=False, err=True)): + for task in filter(filter_func, self._tinf(retry_ids, use_props=False, err=True)): item[('fail', 'ignore')[self._ignore_state(task)]] += [task.get('hash')] if not item['fail']: @@ -378,7 +377,7 @@ class QbittorrentAPI(GenericClient): if True is response: for s in (1, 3, 5, 10, 15, 30, 60): - if filter_list(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash)): + if list(filter(lambda t: task_stamp <= t['addition_date'], self._tinf(data.hash))): return data.hash time.sleep(s) return True diff --git a/sickgear/common.py b/sickgear/common.py index 804fee6b..9ad5f3ef 100644 --- a/sickgear/common.py +++ b/sickgear/common.py @@ -25,7 +25,6 @@ import uuid import sickgear -from _23 import map_list from six import integer_types, iterkeys, string_types # noinspection PyUnresolvedReferences @@ -563,8 +562,8 @@ for (attr_name, qual_val) in [ ('SNATCHED', SNATCHED), ('SNATCHED_PROPER', SNATCHED_PROPER), ('SNATCHED_BEST', SNATCHED_BEST), ('DOWNLOADED', DOWNLOADED), ('ARCHIVED', ARCHIVED), ('FAILED', FAILED), ]: - setattr(Quality, attr_name, map_list(lambda qk: Quality.compositeStatus(qual_val, qk), - iterkeys(Quality.qualityStrings))) + setattr(Quality, attr_name, list(map(lambda qk: Quality.compositeStatus(qual_val, qk), + iterkeys(Quality.qualityStrings)))) Quality.SNATCHED_ANY = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST SD = Quality.combineQualities([Quality.SDTV, Quality.SDDVD], []) diff --git a/sickgear/config.py b/sickgear/config.py index c98df792..39a497c3 100644 --- a/sickgear/config.py +++ b/sickgear/config.py @@ -23,7 +23,7 @@ import sickgear.providers from . import db, helpers, logger, naming from lib.api_trakt import TraktAPI -from _23 import filter_list, urlsplit, urlunsplit +from _23 import urlsplit, urlunsplit from six import string_types @@ -831,7 +831,7 @@ class ConfigMigrator(object): # Migration v15: Transmithe.net variables def _migrate_v15(self): try: - neb = filter_list(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList())[0] + neb = list(filter(lambda p: 'Nebulance' in p.name, sickgear.providers.sortedProviderList()))[0] except (BaseException, Exception): return # get the old settings from the file and store them in the new variable names diff --git a/sickgear/db.py b/sickgear/db.py index b9ee5a4e..bce8ed81 100644 --- a/sickgear/db.py +++ b/sickgear/db.py @@ -32,7 +32,7 @@ from .sgdatetime import timestamp_near from sg_helpers import make_path, compress_file, remove_file_perm, scantree -from _23 import filter_iter, filter_list, list_values, scandir +from _23 import scandir from six import iterkeys, iteritems, itervalues # noinspection PyUnreachableCode @@ -80,12 +80,12 @@ def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True): # sanity: remove k, v pairs in keyDict from valueDict if sanitise: - value_dict = dict(filter_iter(lambda k: k[0] not in key_dict, iteritems(value_dict))) + value_dict = dict(filter(lambda k: k[0] not in key_dict, iteritems(value_dict))) # noinspection SqlResolve cl.append(['UPDATE [%s] SET %s WHERE %s' % (table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))), - list_values(value_dict) + list_values(key_dict)]) + list(value_dict.values()) + list(key_dict.values())]) # noinspection SqlResolve cl.append(['INSERT INTO [' + table_name + '] (' + @@ -304,14 +304,14 @@ class DBConnection(object): query = 'UPDATE [%s] SET %s WHERE %s' % ( table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))) - self.action(query, list_values(value_dict) + list_values(key_dict)) + self.action(query, list(value_dict.values()) + list(key_dict.values())) if self.connection.total_changes == changes_before: # noinspection SqlResolve query = 'INSERT INTO [' + table_name + ']' \ + ' (%s)' % ', '.join(itertools.chain(iterkeys(value_dict), iterkeys(key_dict))) \ + ' VALUES (%s)' % ', '.join(['?'] * (len(value_dict) + len(key_dict))) - self.action(query, list_values(value_dict) + list_values(key_dict)) + self.action(query, list(value_dict.values()) + list(key_dict.values())) def tableInfo(self, table_name): # type: (AnyStr) -> Dict[AnyStr, Dict[AnyStr, AnyStr]] @@ -544,7 +544,7 @@ class SchemaUpgrade(object): # get old table columns and store the ones we want to keep result = self.connection.select('pragma table_info([%s])' % table) columns_list = ([column], column)[isinstance(column, list)] - keptColumns = filter_list(lambda col: col['name'] not in columns_list, result) + keptColumns = list(filter(lambda col: col['name'] not in columns_list, result)) keptColumnsNames = [] final = [] @@ -759,9 +759,9 @@ def cleanup_old_db_backups(filename): d, filename = os.path.split(filename) if not d: d = sickgear.DATA_DIR - for f in filter_iter(lambda fn: fn.is_file() and filename in fn.name and - re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), - scandir(d)): + for f in filter(lambda fn: fn.is_file() and filename in fn.name and + re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), + scandir(d)): try: os.unlink(f.path) except (BaseException, Exception): diff --git a/sickgear/failed_history.py b/sickgear/failed_history.py index 5af91a6d..0989c0ed 100644 --- a/sickgear/failed_history.py +++ b/sickgear/failed_history.py @@ -25,7 +25,6 @@ from .history import dateFormat from exceptions_helper import EpisodeNotFoundException, ex from _23 import unquote -from six import PY2, text_type # noinspection PyUnresolvedReferences # noinspection PyUnreachableCode @@ -83,10 +82,6 @@ def prepare_failed_name(release): fixed = re.sub(r'[.\-+ ]', '_', fixed) - # noinspection PyUnresolvedReferences - if PY2 and not isinstance(fixed, unicode): - fixed = text_type(fixed, 'utf-8', 'replace') - return fixed diff --git a/sickgear/helpers.py b/sickgear/helpers.py index 58dd3562..9a8b9db6 100644 --- a/sickgear/helpers.py +++ b/sickgear/helpers.py @@ -43,8 +43,9 @@ import requests import requests.exceptions import subliminal from lxml_etree import etree, is_lxml +from base64 import decodebytes as b64decodebytes, encodebytes as b64encodebytes -from _23 import b64decodebytes, b64encodebytes, decode_bytes, decode_str, filter_iter, scandir +from _23 import decode_bytes, decode_str, scandir from six import iteritems, string_types, text_type # noinspection PyUnresolvedReferences from six.moves import zip @@ -1317,7 +1318,7 @@ def has_anime(): :rtype: bool """ # noinspection PyTypeChecker - return False if not sickgear.showList else any(filter_iter(lambda show: show.is_anime, sickgear.showList)) + return False if not sickgear.showList else any(filter(lambda show: show.is_anime, sickgear.showList)) def cpu_sleep(): @@ -1682,7 +1683,7 @@ def upgrade_new_naming(): (d_entry.path, new_dir_name, repr(e), ex(e)), logger.WARNING) if os.path.isdir(new_dir_name): try: - f_n = filter_iter(lambda fn: fn.is_file(), scandir(new_dir_name)) + f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name)) except OSError as e: logger.log('Unable to rename %s / %s' % (repr(e), ex(e)), logger.WARNING) diff --git a/sickgear/history.py b/sickgear/history.py index 844088c5..49d2f68e 100644 --- a/sickgear/history.py +++ b/sickgear/history.py @@ -22,8 +22,6 @@ from .common import FAILED, SNATCHED, SNATCHED_PROPER, SUBTITLED, Quality from .name_parser.parser import NameParser import sickgear -from six import PY2, text_type - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr @@ -47,9 +45,6 @@ def _log_history_item(action, tvid, prodid, season, episode, quality, resource, """ log_date = datetime.datetime.now().strftime(dateFormat) - if PY2 and not isinstance(resource, text_type): - resource = text_type(resource, 'utf-8', 'replace') - my_db = db.DBConnection() my_db.action( 'INSERT INTO history' diff --git a/sickgear/indexermapper.py b/sickgear/indexermapper.py index b5eafcf8..26e70480 100644 --- a/sickgear/indexermapper.py +++ b/sickgear/indexermapper.py @@ -26,8 +26,7 @@ import sickgear from lib.dateutil.parser import parse -from _23 import unidecode -from six import iteritems, moves, string_types, PY2 +from six import iteritems, moves, string_types # noinspection PyUnreachableCode if False: @@ -178,9 +177,7 @@ def clean_show_name(showname): :return: :rtype: AnyStr """ - if not PY2: - return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname) - return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', unidecode(showname)) + return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', showname) def get_show_name_date(show_obj): diff --git a/sickgear/indexers/indexer_api.py b/sickgear/indexers/indexer_api.py index 530faa96..c5ee5f65 100644 --- a/sickgear/indexers/indexer_api.py +++ b/sickgear/indexers/indexer_api.py @@ -20,8 +20,6 @@ from sg_helpers import proxy_setting import sickgear from lib.tvinfo_base import TVInfoBase -from _23 import list_values - # noinspection PyUnreachableCode if False: from typing import AnyStr, Dict @@ -83,13 +81,13 @@ class TVInfoAPI(object): @property def sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and True is not x.get('fallback') and True is not x.get('people_only')]) @property def search_sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if not x['mapped_only'] and + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if not x['mapped_only'] and x.get('active') and not x.get('defunct') and True is not x.get('fallback') and True is not x.get('people_only')]) @@ -99,7 +97,7 @@ class TVInfoAPI(object): """ :return: return all indexers including mapped only indexers excluding fallback indexers """ - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is not x.get('fallback') + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is not x.get('fallback') and True is not x.get('people_only')]) @property @@ -108,9 +106,9 @@ class TVInfoAPI(object): """ :return: return all fallback indexers """ - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if True is x.get('fallback')]) + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if True is x.get('fallback')]) @property def xem_supported_sources(self): # type: () -> Dict[int, AnyStr] - return dict([(int(x['id']), x['name']) for x in list_values(tvinfo_config) if x.get('xem_origin')]) + return dict([(int(x['id']), x['name']) for x in list(tvinfo_config.values()) if x.get('xem_origin')]) diff --git a/sickgear/metadata/__init__.py b/sickgear/metadata/__init__.py index 8e1a4315..95fbcf48 100644 --- a/sickgear/metadata/__init__.py +++ b/sickgear/metadata/__init__.py @@ -19,11 +19,10 @@ __all__ = ['generic', 'helpers', 'kodi', 'mede8er', 'mediabrowser', 'ps3', 'tivo import sys from . import kodi, mede8er, mediabrowser, ps3, tivo, wdtv, xbmc, xbmc_12plus -from _23 import filter_list def available_generators(): - return filter_list(lambda x: x not in ('generic', 'helpers'), __all__) + return list(filter(lambda x: x not in ('generic', 'helpers'), __all__)) def _getMetadataModule(name): diff --git a/sickgear/metadata/generic.py b/sickgear/metadata/generic.py index 810a01f0..d80022b3 100644 --- a/sickgear/metadata/generic.py +++ b/sickgear/metadata/generic.py @@ -35,7 +35,6 @@ from lib.fanart.core import Request as fanartRequest import lib.fanart as fanart from lxml_etree import etree -from _23 import filter_iter, list_keys from six import iteritems, itervalues, string_types # noinspection PyUnreachableCode @@ -874,7 +873,7 @@ class GenericMetadata(object): tv_id).name + ", not downloading images: " + ex(e), logger.WARNING) # todo: when tmdb is added as tv source remove the hardcoded TVINFO_TMDB - for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list_keys(sickgear.TVInfoAPI().search_sources) + + for tv_src in list(OrderedDict.fromkeys([show_obj.tvid] + list(sickgear.TVInfoAPI().search_sources) + [TVINFO_TMDB])): if tv_src != show_obj.tvid and not show_obj.ids.get(tv_src, {}).get('id'): continue @@ -1220,9 +1219,9 @@ class GenericMetadata(object): resp = request.response() itemlist = [] dedupe = [] - for art in filter_iter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]), - # remove "[0:2]" ... to strictly use only data where "en" is at source - resp[types[image_type]]): # type: dict + for art in filter(lambda i: 10 < len(i.get('url', '')) and (lang == i.get('lang', '')[0:2]), + # remove "[0:2]" ... to strictly use only data where "en" is at source + resp[types[image_type]]): # type: dict try: url = (art['url'], art['url'].replace('/fanart/', '/preview/'))[thumb] if url not in dedupe: diff --git a/sickgear/metadata/kodi.py b/sickgear/metadata/kodi.py index e679ebeb..8472f30c 100644 --- a/sickgear/metadata/kodi.py +++ b/sickgear/metadata/kodi.py @@ -29,7 +29,7 @@ import exceptions_helper from exceptions_helper import ex from lxml_etree import etree -from _23 import decode_str, map_iter +from _23 import decode_str from six import string_types # noinspection PyUnreachableCode @@ -157,7 +157,7 @@ class KODIMetadata(generic.GenericMetadata): has_id = False tvdb_id = None - for tvid, slug in map_iter( + for tvid, slug in map( lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config.get('kodi_slug')), list(sickgear.TVInfoAPI().all_sources)): mid = slug and show_obj.ids[tvid].get('id') diff --git a/sickgear/name_parser/parser.py b/sickgear/name_parser/parser.py index 8d63bb59..a1d37109 100644 --- a/sickgear/name_parser/parser.py +++ b/sickgear/name_parser/parser.py @@ -39,8 +39,8 @@ from lib.tvinfo_base.exceptions import * from ..classes import OrderedDefaultdict from .._legacy_classes import LegacyParseResult -from _23 import decode_str, list_keys, list_range -from six import iteritems, iterkeys, itervalues, PY2, string_types, text_type +from _23 import decode_str, list_range +from six import iteritems, iterkeys, itervalues, string_types, text_type # noinspection PyUnreachableCode if False: @@ -166,7 +166,7 @@ class NameParser(object): result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num - named_groups = list_keys(match.groupdict()) + named_groups = list(match.groupdict()) if 'series_name' in named_groups: result.series_name = match.group('series_name') @@ -511,10 +511,7 @@ class NameParser(object): @staticmethod def _unicodify(obj, encoding='utf-8'): - if PY2 and isinstance(obj, string_types): - if not isinstance(obj, text_type): - obj = text_type(obj, encoding, 'replace') - if not PY2 and isinstance(obj, text_type): + if isinstance(obj, text_type): try: return obj.encode('latin1').decode('utf8') except (BaseException, Exception): @@ -751,9 +748,7 @@ class ParseResult(LegacyParseResult): self.release_group, self.air_date, tuple(self.ab_episode_numbers))) def __str__(self): - if not PY2: - return self.__unicode__() - return self.__unicode__().encode('utf-8', errors='ignore') + return self.__unicode__() def __unicode__(self): if None is not self.series_name: diff --git a/sickgear/network_timezones.py b/sickgear/network_timezones.py index 04c70aef..961b9511 100644 --- a/sickgear/network_timezones.py +++ b/sickgear/network_timezones.py @@ -29,8 +29,7 @@ from lib.dateutil import tz, zoneinfo from lib.tzlocal import get_localzone from sg_helpers import remove_file_perm, scantree -from six import integer_types, iteritems, string_types, PY2 -from _23 import list_keys +from six import integer_types, iteritems, string_types # noinspection PyUnreachableCode if False: @@ -547,7 +546,7 @@ def _load_network_conversions(): # remove deleted records if 0 < len(conversions_db): - network_name = list_keys(conversions_db) + network_name = list(conversions_db) cl.append(['DELETE FROM network_conversions WHERE tvdb_network' ' IN (%s)' % ','.join(['?'] * len(network_name)), network_name]) @@ -632,8 +631,6 @@ def get_episode_time(d, # type: int if d and None is not ep_time and None is not tzinfo: ep_date = datetime.date.fromordinal(helpers.try_int(d)) - if PY2: - return datetime.datetime.combine(ep_date, ep_time).replace(tzinfo=tzinfo) return datetime.datetime.combine(ep_date, ep_time, tzinfo) return parse_date_time(d, t, tzinfo) diff --git a/sickgear/notifiers/__init__.py b/sickgear/notifiers/__init__.py index 1b56c4f5..b35ae421 100644 --- a/sickgear/notifiers/__init__.py +++ b/sickgear/notifiers/__init__.py @@ -25,8 +25,6 @@ from . import emby, kodi, plex, xbmc, \ import sickgear -from _23 import filter_iter, list_values - class NotifierFactory(object): @@ -68,32 +66,27 @@ class NotifierFactory(object): :return: ID String :rtype: String """ - for n in filter_iter(lambda v: v.is_enabled(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled(), list(self.notifiers.values())): yield n.id() @property def enabled_onsnatch(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsnatch(), list(self.notifiers.values())): yield n.id() @property def enabled_ondownload(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_ondownload(), list(self.notifiers.values())): yield n.id() @property def enabled_onsubtitledownload(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(), list(self.notifiers.values())): yield n.id() @property def enabled_library(self): - for n in filter_iter(lambda v: v.is_enabled() and v.is_enabled_library(), - list_values(self.notifiers)): + for n in filter(lambda v: v.is_enabled() and v.is_enabled_library(), list(self.notifiers.values())): yield n.id() def get(self, nid): diff --git a/sickgear/notifiers/emby.py b/sickgear/notifiers/emby.py index 249c6639..81065c00 100644 --- a/sickgear/notifiers/emby.py +++ b/sickgear/notifiers/emby.py @@ -21,7 +21,7 @@ from .generic import Notifier from json_helper import json_loads import sickgear -from _23 import decode_bytes, decode_str, map_list +from _23 import decode_bytes, decode_str class EmbyNotifier(Notifier): @@ -50,7 +50,7 @@ class EmbyNotifier(Notifier): timeout=20, hooks=dict(response=self._cb_response), json=True) return self.response and self.response.get('ok') and 200 == self.response.get('status_code') and \ - version <= map_list(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.')) + version <= list(map(lambda x: int(x), (response and response.get('Version') or '0.0.0.0').split('.'))) def update_library(self, show_obj=None, **kwargs): """ Update library function diff --git a/sickgear/notifiers/plex.py b/sickgear/notifiers/plex.py index 5eaf646c..b84c7d89 100644 --- a/sickgear/notifiers/plex.py +++ b/sickgear/notifiers/plex.py @@ -20,8 +20,8 @@ from .generic import Notifier import sickgear from exceptions_helper import ex -from _23 import b64encodestring, decode_str, etree, filter_iter, list_values, unquote_plus, urlencode -from six import iteritems, text_type, PY2 +from _23 import b64encodestring, decode_str, etree, unquote_plus, urlencode +from six import iteritems # noinspection PyUnresolvedReferences from six.moves import urllib @@ -49,8 +49,7 @@ class PLEXNotifier(Notifier): return False for key in command: - if not PY2 or type(command[key]) == text_type: - command[key] = command[key].encode('utf-8') + command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) @@ -203,7 +202,7 @@ class PLEXNotifier(Notifier): hosts_failed.append(cur_host) continue - for section in filter_iter(lambda x: 'show' == x.attrib['type'], sections): + for section in filter(lambda x: 'show' == x.attrib['type'], sections): if str(section.attrib['key']) in hosts_all: continue keyed_host = [(str(section.attrib['key']), cur_host)] @@ -247,18 +246,14 @@ class PLEXNotifier(Notifier): return '' hosts = [ - host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'), - list_values(hosts_all))] + host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), list(hosts_all.values()))] secured = [ - host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'), - list_values(hosts_all))] + host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), list(hosts_all.values()))] failed = ', '.join([ - host.replace('http://', '') for host in filter_iter(lambda x: x.startswith('http:'), - hosts_failed)]) - failed_secured = ', '.join(filter_iter( + host.replace('http://', '') for host in filter(lambda x: x.startswith('http:'), hosts_failed)]) + failed_secured = ', '.join(filter( lambda x: x not in hosts, - [host.replace('https://', '') for host in filter_iter(lambda x: x.startswith('https:'), - hosts_failed)])) + [host.replace('https://', '') for host in filter(lambda x: x.startswith('https:'), hosts_failed)])) return '
' + '
'.join([result for result in [ ('', 'Fail: username/password when fetching credentials from plex.tv')[False is token_arg], diff --git a/sickgear/notifiers/trakt.py b/sickgear/notifiers/trakt.py index dcd2a28a..cb24c4ff 100644 --- a/sickgear/notifiers/trakt.py +++ b/sickgear/notifiers/trakt.py @@ -22,7 +22,6 @@ import sickgear from lib.api_trakt import TraktAPI, exceptions from exceptions_helper import ConnectionSkipException -from _23 import list_keys from six import iteritems # noinspection PyUnreachableCode @@ -38,7 +37,7 @@ class TraktNotifier(BaseNotifier): def is_enabled_library(cls): if sickgear.TRAKT_ACCOUNTS: for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION): - if tid in list_keys(sickgear.TRAKT_ACCOUNTS): + if tid in list(sickgear.TRAKT_ACCOUNTS): return True return False @@ -89,7 +88,7 @@ class TraktNotifier(BaseNotifier): data['shows'][0]['seasons'][0]['episodes'].append({'number': cur_ep_obj.episode}) for tid, locations in iteritems(sickgear.TRAKT_UPDATE_COLLECTION): - if tid not in list_keys(sickgear.TRAKT_ACCOUNTS): + if tid not in list(sickgear.TRAKT_ACCOUNTS): continue for loc in locations: if not ep_obj.location.startswith('%s%s' % (loc.rstrip(os.path.sep), os.path.sep)): diff --git a/sickgear/notifiers/xbmc.py b/sickgear/notifiers/xbmc.py index 71b24718..67b0412e 100644 --- a/sickgear/notifiers/xbmc.py +++ b/sickgear/notifiers/xbmc.py @@ -23,7 +23,6 @@ from exceptions_helper import ex from json_helper import json_dumps, json_load from _23 import b64encodestring, decode_str, etree, quote, unquote, unquote_plus, urlencode -from six import PY2, text_type # noinspection PyUnresolvedReferences from six.moves import urllib @@ -150,8 +149,7 @@ class XBMCNotifier(Notifier): password = self._choose(password, sickgear.XBMC_PASSWORD) for key in command: - if not PY2 or type(command[key]) == text_type: - command[key] = command[key].encode('utf-8') + command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) diff --git a/sickgear/piper.py b/sickgear/piper.py index 65217b8d..d4c8a3a9 100644 --- a/sickgear/piper.py +++ b/sickgear/piper.py @@ -10,8 +10,7 @@ import re from json_helper import json_loads from sg_helpers import cmdline_runner, is_virtualenv -from _23 import filter_list, ordered_dict -from six import iteritems, PY2 +from six import iteritems # noinspection PyUnreachableCode if False: @@ -51,10 +50,6 @@ def run_pip(pip_cmd, suppress_stderr=False): pip_cmd += ['--progress-bar', 'off'] new_pip_arg = ['--no-python-version-warning'] - if PY2: - pip_version, _, _ = _get_pip_version() - if pip_version and 20 > int(pip_version.split('.')[0]): - new_pip_arg = [] return cmdline_runner( [sys.executable, '-m', 'pip'] + new_pip_arg + ['--disable-pip-version-check'] + pip_cmd, @@ -72,7 +67,7 @@ def initial_requirements(): from Cheetah import VersionTuple is_cheetah2 = (3, 0, 0) > VersionTuple[0:3] - is_cheetah3py3 = not PY2 and (3, 3, 0) > VersionTuple[0:3] + is_cheetah3py3 = (3, 3, 0) > VersionTuple[0:3] if not (is_cheetah2 or is_cheetah3py3): return @@ -158,13 +153,10 @@ def check_pip_env(): _, _, installed, failed_names = _check_pip_env() - py2_last = 'final py2 release' boost = 'performance boost' extra_info = dict({'Cheetah3': 'filled requirement', 'CT3': 'filled requirement', 'lxml': boost, 'python-Levenshtein': boost}) - extra_info.update((dict(cryptography=py2_last, pip=py2_last, regex=py2_last, - scandir=boost, setuptools=py2_last), - dict(regex=boost))[not PY2]) + extra_info.update(dict(regex=boost)) return installed, extra_info, failed_names @@ -256,9 +248,9 @@ def _check_pip_env(pip_outdated=False, reset_fails=False): names_outdated = dict({cur_item.get('name'): {k: cur_item.get(k) for k in ('version', 'latest_version', 'latest_filetype')} for cur_item in json_loads(output)}) - to_update = set(filter_list( + to_update = set(list(filter( lambda name: name in specifiers and names_outdated[name]['latest_version'] in specifiers[name], - set(names_reco).intersection(set(names_outdated)))) + set(names_reco).intersection(set(names_outdated))))) # check whether to ignore direct reference specification updates if not dev mode if not int(os.environ.get('CHK_URL_SPECIFIERS', 0)): @@ -272,7 +264,7 @@ def _check_pip_env(pip_outdated=False, reset_fails=False): except (BaseException, Exception): pass - updates_todo = ordered_dict() + updates_todo = dict() todo = to_install.union(to_update, requirement_update) for cur_name in [cur_n for cur_n in names_reco if cur_n in todo]: updates_todo[cur_name] = dict({ diff --git a/sickgear/postProcessor.py b/sickgear/postProcessor.py index 945f257b..ac08f1df 100644 --- a/sickgear/postProcessor.py +++ b/sickgear/postProcessor.py @@ -33,7 +33,7 @@ from .indexers.indexer_config import TVINFO_TVDB from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from _23 import decode_str -from six import iteritems, PY2, string_types +from six import iteritems, string_types from sg_helpers import long_path, cmdline_runner # noinspection PyUnreachableCode @@ -824,12 +824,7 @@ class PostProcessor(object): script_cmd[0] = os.path.abspath(script_cmd[0]) self._log(u'Absolute path to script: ' + script_cmd[0], logger.DEBUG) - if PY2: - script_cmd += [ep_obj.location.encode(sickgear.SYS_ENCODING), - self.file_path.encode(sickgear.SYS_ENCODING) - ] - else: - script_cmd += [ep_obj.location, self.file_path] + script_cmd += [ep_obj.location, self.file_path] script_cmd += ([], [str(ep_obj.show_obj.tvid)])[new_call] + [ str(ep_obj.show_obj.prodid), @@ -1174,9 +1169,8 @@ class PostProcessor(object): keepalive = keepalive_stop = None if self.webhandler: def keep_alive(webh, stop_event): - if not PY2: - import asyncio - asyncio.set_event_loop(asyncio.new_event_loop()) + import asyncio + asyncio.set_event_loop(asyncio.new_event_loop()) while not stop_event.is_set(): stop_event.wait(60) webh('.') diff --git a/sickgear/processTV.py b/sickgear/processTV.py index 18a7a0dc..78fff9fd 100644 --- a/sickgear/processTV.py +++ b/sickgear/processTV.py @@ -35,8 +35,7 @@ from .history import reset_status from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from .sgdatetime import timestamp_near -from _23 import filter_list, filter_iter, list_values, map_iter -from six import iteritems, iterkeys, string_types, PY2, text_type +from six import iteritems, iterkeys, string_types, text_type from sg_helpers import long_path, scantree import lib.rarfile.rarfile as rarfile @@ -281,7 +280,7 @@ class ProcessTVShow(object): build_path = (lambda old_path: '%s%s' % (helpers.real_path(old_path).rstrip(os.path.sep), os.path.sep)) process_path = build_path(path) - for parent in map_iter(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]): + for parent in map(lambda p: build_path(p), sickgear.ROOT_DIRS.split('|')[1:]): if process_path.startswith(parent): return parent.rstrip(os.path.sep) @@ -352,7 +351,7 @@ class ProcessTVShow(object): path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) - if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)): + if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) return self.result @@ -367,7 +366,7 @@ class ProcessTVShow(object): work_files += [joined] rar_files, rarfile_history = self.unused_archives( - path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method) + path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method) rar_content = self._unrar(path, rar_files, force) if self.fail_detected: self._process_failed(dir_name, nzb_name, show_obj=show_obj) @@ -376,8 +375,8 @@ class ProcessTVShow(object): rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(path, x))] path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type) files = [x for x in files if not helpers.is_link(os.path.join(path, x))] - video_files = filter_list(helpers.has_media_ext, files) - video_in_rar = filter_list(helpers.has_media_ext, rar_content) + video_files = list(filter(helpers.has_media_ext, files)) + video_in_rar = list(filter(helpers.has_media_ext, rar_content)) work_files += [os.path.join(path, item) for item in rar_content] if 0 < len(files): @@ -438,7 +437,7 @@ class ProcessTVShow(object): for walk_path, walk_dir, files in os.walk(os.path.join(path, directory), topdown=False): - if sickgear.POSTPONE_IF_SYNC_FILES and any(filter_iter(helpers.is_sync_file, files)): + if sickgear.POSTPONE_IF_SYNC_FILES and any(filter(helpers.is_sync_file, files)): self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR) return self.result @@ -452,7 +451,7 @@ class ProcessTVShow(object): files = [x for x in files if not helpers.is_link(os.path.join(walk_path, x))] rar_files, rarfile_history = self.unused_archives( - walk_path, filter_list(helpers.is_first_rar_volume, files), pp_type, process_method, + walk_path, list(filter(helpers.is_first_rar_volume, files)), pp_type, process_method, rarfile_history) rar_content = self._unrar(walk_path, rar_files, force) work_files += [os.path.join(walk_path, item) for item in rar_content] @@ -461,8 +460,8 @@ class ProcessTVShow(object): continue rar_content = [x for x in rar_content if not helpers.is_link(os.path.join(walk_path, x))] files = list(set(files + rar_content)) - video_files = filter_list(helpers.has_media_ext, files) - video_in_rar = filter_list(helpers.has_media_ext, rar_content) + video_files = list(filter(helpers.has_media_ext, files)) + video_in_rar = list(filter(helpers.has_media_ext, rar_content)) notwanted_files = [x for x in files if x not in video_files] # Don't Link media when the media is extracted from a rar in the same path @@ -640,7 +639,7 @@ class ProcessTVShow(object): all_dirs += process_dir all_files += fileList - video_files = filter_list(helpers.has_media_ext, all_files) + video_files = list(filter(helpers.has_media_ext, all_files)) all_dirs.append(dir_name) # check if the directory have at least one tv video file @@ -660,7 +659,7 @@ class ProcessTVShow(object): if sickgear.UNPACK and process_path and all_files: # Search for packed release - packed_files = filter_list(helpers.is_first_rar_volume, all_files) + packed_files = list(filter(helpers.is_first_rar_volume, all_files)) for packed in packed_files: try: @@ -719,9 +718,8 @@ class ProcessTVShow(object): rar_content = [os.path.normpath(x.filename) for x in rar_handle.infolist() if not x.is_dir()] renamed = self.cleanup_names(path, rar_content) cur_unpacked = rar_content if not renamed else \ - (list(set(rar_content) - set(iterkeys(renamed))) + list_values(renamed)) - self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map_iter(text_type, - cur_unpacked))) + (list(set(rar_content) - set(iterkeys(renamed))) + list(renamed.values())) + self._log_helper(u'Unpacked content: [u\'%s\']' % '\', u\''.join(map(text_type, cur_unpacked))) unpacked_files += cur_unpacked except (rarfile.PasswordRequired, rarfile.RarWrongPassword): self._log_helper(u'Failed to unpack archive PasswordRequired: %s' % archive, logger.ERROR) @@ -928,10 +926,6 @@ class ProcessTVShow(object): if force or not self.any_vid_processed: return False - # Needed for accessing DB with a unicode dir_name - if PY2 and not isinstance(dir_name, text_type): - dir_name = text_type(dir_name, 'utf_8') - parse_result = None try: parse_result = NameParser(convert=True).parse(videofile, cache_result=False) @@ -974,8 +968,6 @@ class ProcessTVShow(object): else: # This is needed for video whose name differ from dir_name - if PY2 and not isinstance(videofile, text_type): - videofile = text_type(videofile, 'utf_8') sql_result = my_db.select( 'SELECT * FROM tv_episodes WHERE release_name = ?', [videofile.rpartition('.')[0]]) diff --git a/sickgear/properFinder.py b/sickgear/properFinder.py index 9e26b98c..5c3f899a 100644 --- a/sickgear/properFinder.py +++ b/sickgear/properFinder.py @@ -32,7 +32,7 @@ from .history import dateFormat from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from .sgdatetime import timestamp_near -from _23 import filter_iter, filter_list, list_values, map_consume, map_list +from _23 import map_consume from six import string_types # noinspection PyUnreachableCode @@ -251,9 +251,9 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime # filter provider list for: # 1. from recent search: recent search enabled providers # 2. native proper search: active search enabled providers - provider_list = filter_list( + provider_list = list(filter( lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict], - sickgear.providers.sortedProviderList()) + sickgear.providers.sortedProviderList())) search_threads = [] if None is proper_dict: @@ -487,7 +487,7 @@ def _get_proper_list(aired_since_shows, # type: datetime.datetime cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name) - return list_values(propers) + return list(propers.values()) def _download_propers(proper_list): @@ -507,24 +507,24 @@ def _download_propers(proper_list): # get verified list; sort the list of unique Propers for highest proper_level, newest first for cur_proper in sorted( - filter_iter(lambda p: p not in consumed_proper, - # allows Proper to fail or be rejected and another to be tried (with a different name) - filter_iter(lambda p: _epid(p) not in downloaded_epid, proper_list)), + filter(lambda p: p not in consumed_proper, + # allows Proper to fail or be rejected and another to be tried (with a different name) + filter(lambda p: _epid(p) not in downloaded_epid, proper_list)), key=operator.attrgetter('properlevel', 'date'), reverse=True): # type: Proper epid = _epid(cur_proper) # if the show is in our list and there hasn't been a Proper already added for that particular episode # then add it to our list of Propers - if epid not in map_list(_epid, verified_propers): + if epid not in list(map(_epid, verified_propers)): logger.log('Proper may be useful [%s]' % cur_proper.name) verified_propers.add(cur_proper) else: # use Proper with the highest level remove_propers = set() map_consume(lambda vp: remove_propers.add(vp), - filter_iter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), - verified_propers)) + filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), + verified_propers)) if remove_propers: verified_propers -= remove_propers diff --git a/sickgear/providers/__init__.py b/sickgear/providers/__init__.py index 4ba6218d..5ba75287 100644 --- a/sickgear/providers/__init__.py +++ b/sickgear/providers/__init__.py @@ -22,7 +22,6 @@ from .newznab import NewznabConstants from .. import logger import sickgear -from _23 import filter_list, filter_iter from six import iteritems, itervalues # noinspection PyUnreachableCode @@ -50,7 +49,7 @@ for module in __all__: try: m = importlib.import_module('.' + module, 'sickgear.providers') globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__') - else dict(filter_iter(lambda t: '_' != t[0][0], iteritems(m.__dict__)))) + else dict(filter(lambda t: '_' != t[0][0], iteritems(m.__dict__)))) except ImportError as e: if 'custom' != module[0:6]: raise e @@ -74,12 +73,12 @@ def sortedProviderList(): newList.append(providerDict[curModule]) if not sickgear.PROVIDER_ORDER: - nzb = filter_list(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict)) - tor = filter_list(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict)) - newList = sorted(filter_iter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \ - sorted(filter_iter(lambda p: p.anime_only, tor), key=lambda v: v.get_id()) + nzb = list(filter(lambda p: p.providerType == generic.GenericProvider.NZB, itervalues(providerDict))) + tor = list(filter(lambda p: p.providerType != generic.GenericProvider.NZB, itervalues(providerDict))) + newList = sorted(filter(lambda p: not p.anime_only, nzb), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: not p.anime_only, tor), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: p.anime_only, nzb), key=lambda v: v.get_id()) + \ + sorted(filter(lambda p: p.anime_only, tor), key=lambda v: v.get_id()) # add any modules that are missing from that list for curModule in providerDict: @@ -119,7 +118,7 @@ def make_unique_list(p_list, d_list=None): default_names = [d.name for d in d_list or []] - p_list = filter_iter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list) + p_list = filter(lambda _x: _x.get_id() not in ['sick_beard_index'], p_list) for cur_p in p_list: g_name = generic_provider_name(cur_p.name) g_url = generic_provider_url(cur_p.url) @@ -139,7 +138,7 @@ def make_unique_list(p_list, d_list=None): def getNewznabProviderList(data): # type: (AnyStr) -> List defaultList = [makeNewznabProvider(x) for x in getDefaultNewznabProviders().split('!!!')] - providerList = make_unique_list(filter_list(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')]), + providerList = make_unique_list(list(filter(lambda _x: _x, [makeNewznabProvider(x) for x in data.split('!!!')])), defaultList) providerDict = dict(zip([x.name for x in providerList], providerList)) @@ -158,7 +157,7 @@ def getNewznabProviderList(data): 'server_type'): setattr(providerDict[curDefault.name], k, getattr(curDefault, k)) - return filter_list(lambda _x: _x, providerList) + return list(filter(lambda _x: _x, providerList)) def makeNewznabProvider(config_string): @@ -189,9 +188,9 @@ def makeNewznabProvider(config_string): def getTorrentRssProviderList(data): - providerList = filter_list(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')]) + providerList = list(filter(lambda _x: _x, [makeTorrentRssProvider(x) for x in data.split('!!!')])) - return filter_list(lambda _x: _x, providerList) + return list(filter(lambda _x: _x, providerList)) def makeTorrentRssProvider(config_string): diff --git a/sickgear/providers/alpharatio.py b/sickgear/providers/alpharatio.py index 4b4ed911..eb4e9a2e 100644 --- a/sickgear/providers/alpharatio.py +++ b/sickgear/providers/alpharatio.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -63,7 +62,6 @@ class AlphaRatioProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech]) html = self.get_url(search_url) diff --git a/sickgear/providers/bithdtv.py b/sickgear/providers/bithdtv.py index 4e7b4be9..86e37964 100644 --- a/sickgear/providers/bithdtv.py +++ b/sickgear/providers/bithdtv.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -67,7 +66,6 @@ class BitHDTVProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) html = self.get_url(search_url, timeout=90) diff --git a/sickgear/providers/blutopia.py b/sickgear/providers/blutopia.py index 0ef6bdb2..c8458a22 100644 --- a/sickgear/providers/blutopia.py +++ b/sickgear/providers/blutopia.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_iter, unidecode from six import iteritems @@ -107,7 +106,6 @@ class BlutopiaProvider(generic.TorrentProvider): return results for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ( self._token, search_string.replace('.', ' '), self._categories_string(template=''), '', '', '') @@ -136,7 +134,7 @@ class BlutopiaProvider(generic.TorrentProvider): marked = ','.join([x.attrs.get('data-original-title', '').lower() for x in tr.find_all( 'i', attrs={'class': ['text-gold', 'fa-diamond', 'fa-certificate']})]) # noinspection PyTypeChecker - munged = ''.join(filter_iter(marked.__contains__, ['free', 'double', 'feat'])) + munged = ''.join(filter(marked.__contains__, ['free', 'double', 'feat'])) # noinspection PyUnboundLocalVariable if ((non_marked and rc['filter'].search(munged)) or (not non_marked and not rc['filter'].search(munged))): diff --git a/sickgear/providers/btn.py b/sickgear/providers/btn.py index f5373228..7af84475 100644 --- a/sickgear/providers/btn.py +++ b/sickgear/providers/btn.py @@ -32,7 +32,6 @@ from bs4_parser import BS4Parser from exceptions_helper import AuthException from json_helper import json_dumps -from _23 import unidecode from six import iteritems @@ -201,7 +200,6 @@ class BTNProvider(generic.TorrentProvider): del (self.session.headers['Referer']) self.auth_html = True - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1')) html = self.get_url(search_url, use_tmr_limit=False) diff --git a/sickgear/providers/eztv.py b/sickgear/providers/eztv.py index 86bad378..5a723b1b 100644 --- a/sickgear/providers/eztv.py +++ b/sickgear/providers/eztv.py @@ -23,7 +23,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, unidecode +from _23 import b64decodestring from six import iteritems @@ -62,7 +62,6 @@ class EztvProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['browse'] % search_string if 'Cache' == mode else \ self.urls['search'] % search_string.replace('.', ' ') diff --git a/sickgear/providers/fano.py b/sickgear/providers/fano.py index 67eb8395..ebb34fc8 100644 --- a/sickgear/providers/fano.py +++ b/sickgear/providers/fano.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems FLTAG = r'\s+]+%s[^<]+Torrent let'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % search_string # fetches 15 results by default, and up to 100 if allowed in user profile diff --git a/sickgear/providers/nebulance.py b/sickgear/providers/nebulance.py index 99feacd0..f8005eca 100644 --- a/sickgear/providers/nebulance.py +++ b/sickgear/providers/nebulance.py @@ -25,7 +25,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from json_helper import json_dumps -from _23 import filter_list, unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems @@ -83,7 +83,6 @@ class NebulanceProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'nodots': r'[\.\s]+'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['browse'] % (self.user_authkey, self.user_passkey) if 'Cache' != mode: @@ -164,7 +163,7 @@ class NebulanceProvider(generic.TorrentProvider): ('(?i)%s(Proper)%s' % (bl, br), r'`\1`'), (r'%s\s*%s' % (bl, br), '`')]: title = re.sub(r[0], r[1], title) - grp = filter_list(lambda rn: '.release' in rn.lower(), item['tags']) + grp = list(filter(lambda rn: '.release' in rn.lower(), item['tags'])) title = '%s%s-%s' % (('', t[0])[1 < len(t)], title, (any(grp) and grp[0] or 'nogrp').upper().replace('.RELEASE', '')) @@ -186,7 +185,7 @@ class NebulanceProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unquote_plus(unidecode(search_string)) + search_string = unquote_plus(search_string) params = {'release': search_string} if 'Cache' == mode: diff --git a/sickgear/providers/nyaa.py b/sickgear/providers/nyaa.py index 8b2bd5a9..65156509 100644 --- a/sickgear/providers/nyaa.py +++ b/sickgear/providers/nyaa.py @@ -22,7 +22,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -51,7 +50,6 @@ class NyaaProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'view', 'get': '(?:torrent|magnet:)'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string.replace('.', ' ')) html = self.get_url(search_url) diff --git a/sickgear/providers/pretome.py b/sickgear/providers/pretome.py index 87acb764..23d067dd 100644 --- a/sickgear/providers/pretome.py +++ b/sickgear/providers/pretome.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -58,7 +57,6 @@ class PreToMeProvider(generic.TorrentProvider): rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'details', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % search_string html = self.get_url(search_url) diff --git a/sickgear/providers/privatehd.py b/sickgear/providers/privatehd.py index 5f8fbdf0..7ba28252 100644 --- a/sickgear/providers/privatehd.py +++ b/sickgear/providers/privatehd.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_iter, unidecode from six import iteritems @@ -93,7 +92,6 @@ class PrivateHDProvider(generic.TorrentProvider): return results for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ( '+'.join(search_string.split()), self._categories_string(mode, '')) @@ -120,7 +118,7 @@ class PrivateHDProvider(generic.TorrentProvider): if any(self.filter): marked = ','.join([x.attrs.get('title', '').lower() for x in tr.find_all( 'i', attrs={'class': ['fa-star', 'fa-diamond', 'fa-star-half-o']})]) - munged = ''.join(filter_iter(marked.__contains__, ['free', 'half', 'double'])) + munged = ''.join(filter(marked.__contains__, ['free', 'half', 'double'])) # noinspection PyUnboundLocalVariable if ((non_marked and rc['filter'].search(munged)) or (not non_marked and not rc['filter'].search(munged))): diff --git a/sickgear/providers/ptf.py b/sickgear/providers/ptf.py index 3870b82f..da1c94f2 100644 --- a/sickgear/providers/ptf.py +++ b/sickgear/providers/ptf.py @@ -26,7 +26,6 @@ from .. import logger from ..helpers import anon_url, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -82,7 +81,6 @@ class PTFProvider(generic.TorrentProvider): for mode in search_params: rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode)) html = self.get_url(search_url) diff --git a/sickgear/providers/revtt.py b/sickgear/providers/revtt.py index 0ee68d6e..50527f39 100644 --- a/sickgear/providers/revtt.py +++ b/sickgear/providers/revtt.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -61,7 +60,6 @@ class RevTTProvider(generic.TorrentProvider): for mode in search_params: rc['cats'] = re.compile('(?i)cat=(?:%s)' % self._categories_string(mode, template='', delimiter='|')) for search_string in search_params[mode]: - search_string = unidecode(search_string) html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))) diff --git a/sickgear/providers/scenehd.py b/sickgear/providers/scenehd.py index 4b982fe0..74da4457 100644 --- a/sickgear/providers/scenehd.py +++ b/sickgear/providers/scenehd.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -62,7 +61,6 @@ class SceneHDProvider(generic.TorrentProvider): 'nuked': 'nuke', 'filter': 'free'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ',')) html = self.get_url(search_url, timeout=90) diff --git a/sickgear/providers/scenetime.py b/sickgear/providers/scenetime.py index 96d14262..f4f783fb 100644 --- a/sickgear/providers/scenetime.py +++ b/sickgear/providers/scenetime.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import anon_url, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -70,7 +69,6 @@ class SceneTimeProvider(generic.TorrentProvider): urls = [] for search_string in search_params[mode]: urls += [[]] - search_string = unidecode(search_string) search_url = self.urls['search'] % (self._categories_string(), '+'.join(search_string.replace('.', ' ').split()), ('', '&freeleech=on')[self.freeleech]) diff --git a/sickgear/providers/shazbat.py b/sickgear/providers/shazbat.py index b0187e49..3121924d 100644 --- a/sickgear/providers/shazbat.py +++ b/sickgear/providers/shazbat.py @@ -26,7 +26,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems, text_type @@ -75,7 +75,6 @@ class ShazbatProvider(generic.TorrentProvider): if self.should_skip(): return results else: - search_string = unidecode(search_string) search_string = search_string.replace(show_detail, '').strip() search_url = self.urls['search'] % search_string html = self.get_url(search_url) diff --git a/sickgear/providers/showrss.py b/sickgear/providers/showrss.py index b630b2fb..e9356e14 100644 --- a/sickgear/providers/showrss.py +++ b/sickgear/providers/showrss.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import sanitize_scene_name from bs4_parser import BS4Parser -from _23 import decode_str, filter_list, html_unescape, list_keys, list_values, unidecode +from _23 import decode_str, html_unescape from six import iteritems, iterkeys @@ -51,11 +51,11 @@ class ShowRSSProvider(generic.TorrentProvider): def logged_in(self, y): if all([None is y or 'logout' in y, - bool(filter_list(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies)))]): + bool(list(filter(lambda c: 'remember_web_' in c, iterkeys(self.session.cookies))))]): if None is not y: self.shows = dict(re.findall(r'', y)) for k, v in iteritems(self.shows): - self.shows[k] = sanitize_scene_name(html_unescape(unidecode(decode_str(v)))) + self.shows[k] = sanitize_scene_name(html_unescape(decode_str(v))) return True return False @@ -74,13 +74,12 @@ class ShowRSSProvider(generic.TorrentProvider): if 'Cache' == mode: search_url = self.urls['browse'] else: - search_string = unidecode(search_string) - show_name = filter_list(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()), - list_values(self.shows)) + show_name = list(filter(lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()), + list(self.shows.values()))) if not show_name: continue - search_url = self.urls['search'] % list_keys(self.shows)[ - list_values(self.shows).index(show_name[0])] + search_url = self.urls['search'] % list(self.shows)[ + list(self.shows.values()).index(show_name[0])] if search_url in urls: continue diff --git a/sickgear/providers/snowfl.py b/sickgear/providers/snowfl.py index e78f1f78..eb7986cc 100644 --- a/sickgear/providers/snowfl.py +++ b/sickgear/providers/snowfl.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import try_int from json_helper import json_loads -from _23 import b64encodestring, filter_iter, map_list, quote, unidecode +from _23 import b64encodestring, quote from six import iteritems # noinspection PyUnreachableCode @@ -74,7 +74,7 @@ class SnowflProvider(generic.TorrentProvider): params = dict(token=token[0], ent=token[1]) if 'Cache' != mode: - params.update({'ss': quote_fx(unidecode(search_string))}) + params.update({'ss': quote_fx(search_string)}) data_json = None vals = [i for i in range(3, 8)] @@ -92,13 +92,13 @@ class SnowflProvider(generic.TorrentProvider): if self.should_skip(): return results - for item in filter_iter(lambda di: re.match('(?i).*?(tv|television)', - di.get('type', '') or di.get('category', '')) - and (not self.confirmed or di.get('trusted') or di.get('verified')), - data_json or {}): - seeders, leechers, size = map_list(lambda arg: try_int( + for item in filter(lambda di: re.match('(?i).*?(tv|television)', + di.get('type', '') or di.get('category', '')) + and (not self.confirmed or di.get('trusted') or di.get('verified')), + data_json or {}): + seeders, leechers, size = list(map(lambda arg: try_int( *([item.get(arg[0]) if None is not item.get(arg[0]) else item.get(arg[1])]) * 2), - (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size'))) + (('seeder', 'seed'), ('leecher', 'leech'), ('size', 'size')))) if self._reject_item(seeders, leechers): continue title = item.get('name') or item.get('title') @@ -163,7 +163,7 @@ class SnowflProvider(generic.TorrentProvider): else: from sickgear import providers if 'torlock' in url.lower(): - prov = next(filter_iter(lambda p: 'torlock' == p.name.lower(), (filter_iter( + prov = next(filter(lambda p: 'torlock' == p.name.lower(), (filter( lambda sp: sp.providerType == self.providerType, providers.sortedProviderList())))) state = prov.enabled prov.enabled = True diff --git a/sickgear/providers/speedapp.py b/sickgear/providers/speedapp.py index e730e193..478e20b3 100644 --- a/sickgear/providers/speedapp.py +++ b/sickgear/providers/speedapp.py @@ -21,7 +21,6 @@ from . import generic from ..helpers import try_int from six import string_types -from _23 import filter_list, map_list, unidecode class SpeedAppProvider(generic.TorrentProvider): @@ -55,14 +54,15 @@ class SpeedAppProvider(generic.TorrentProvider): self.perms_needed = self.perms if isinstance(resp, dict) and isinstance(resp.get('scopes'), list): self._authd = True - self.perms_needed = filter_list(lambda x: True is not x, [p in resp.get('scopes') or p for p in self.perms]) + self.perms_needed = list(filter(lambda x: True is not x, + [p in resp.get('scopes') or p for p in self.perms])) if not self.perms_needed: self.categories = None resp = self.get_url(self.urls['cats'], skip_auth=True, parse_json=True, headers=self.auth_header()) if isinstance(resp, list): - categories = [category['id'] for category in filter_list( + categories = [category['id'] for category in list(filter( lambda c: isinstance(c.get('id'), int) and isinstance(c.get('name'), string_types) - and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp)] + and c.get('name').upper() in ('TV PACKS', 'TV HD', 'TV SD'), resp))] self.categories = {'Cache': categories, 'Episode': categories, 'Season': categories} return not any(self.perms_needed) @@ -81,7 +81,7 @@ class SpeedAppProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: search_url = self.urls['search'] % ( - unidecode(search_string), self._categories_string(mode, template='categories[]=%s')) + search_string, self._categories_string(mode, template='categories[]=%s')) data_json = self.get_url(search_url, skip_auth=True, parse_json=True, headers=self.auth_header()) if self.should_skip(): @@ -111,10 +111,10 @@ class SpeedAppProvider(generic.TorrentProvider): ('%s_api_key_tip' % self.get_id()) == key and \ ((not_authd or self.perms_needed) and ('create token at %s site
' - 'with perms %s' % (self.url_base, self.name, map_list( + 'with perms %s' % (self.url_base, self.name, list(map( lambda p: 't.read' in p and 'Read torrents' or 't.down' in p and 'Download torrents' - or 'ch.read' in p and 'Read snatches', self.perms_needed))) + or 'ch.read' in p and 'Read snatches', self.perms_needed)))) .replace('[', '').replace(']', '') or 'token is valid and required permissions are enabled') \ or '' diff --git a/sickgear/providers/speedcd.py b/sickgear/providers/speedcd.py index 8f21401f..9964362a 100644 --- a/sickgear/providers/speedcd.py +++ b/sickgear/providers/speedcd.py @@ -25,7 +25,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from requests.cookies import cookiejar_from_dict -from _23 import filter_list, quote, unquote +from _23 import quote, unquote from six import string_types, iteritems @@ -63,12 +63,12 @@ class SpeedCDProvider(generic.TorrentProvider): self.session.cookies.clear() json = self.get_url(self.urls['login_1'], skip_auth=True, post_data={'username': self.username}, parse_json=True) - resp = filter_list(lambda l: isinstance(l, list), json.get('Fs', [])) + resp = list(filter(lambda l: isinstance(l, list), json.get('Fs', []))) def get_html(_resp): for cur_item in _resp: if isinstance(cur_item, list): - _html = filter_list(lambda s: isinstance(s, string_types) and 'password' in s, cur_item) + _html = list(filter(lambda s: isinstance(s, string_types) and 'password' in s, cur_item)) if not _html: _html = get_html(cur_item) if _html: @@ -128,13 +128,13 @@ class SpeedCDProvider(generic.TorrentProvider): cnt = len(items[mode]) try: - html = filter_list(lambda l: isinstance(l, list), data_json.get('Fs', [])) + html = list(filter(lambda l: isinstance(l, list), data_json.get('Fs', []))) while html: if html and all(isinstance(x, string_types) for x in html): str_lengths = [len(x) for x in html] html = html[str_lengths.index(max(str_lengths))] break - html = filter_list(lambda l: isinstance(l, list), html) + html = list(filter(lambda l: isinstance(l, list), html)) if html and 0 < len(html): html = html[0] diff --git a/sickgear/providers/thepiratebay.py b/sickgear/providers/thepiratebay.py index 51cbd129..bf57db9f 100644 --- a/sickgear/providers/thepiratebay.py +++ b/sickgear/providers/thepiratebay.py @@ -25,7 +25,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, unidecode +from _23 import b64decodestring from six import iteritems @@ -90,7 +90,6 @@ class ThePirateBayProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) if 'Cache' != mode: search_url = self.urls['api'] % search_string diff --git a/sickgear/providers/tokyotoshokan.py b/sickgear/providers/tokyotoshokan.py index 39592d61..338f38f9 100644 --- a/sickgear/providers/tokyotoshokan.py +++ b/sickgear/providers/tokyotoshokan.py @@ -22,7 +22,7 @@ from .. import show_name_helpers, tvcache from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import filter_list, map_list, urlencode +from _23 import urlencode from six import iteritems @@ -78,10 +78,10 @@ class TokyoToshokanProvider(generic.TorrentProvider): info = top.find('td', class_='desc-top') title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip()) - links = info and map_list(lambda l: l.get('href', ''), info.find_all('a')) or None + links = info and list(map(lambda l: l.get('href', ''), info.find_all('a'))) or None download_url = self._link( - (filter_list(lambda l: 'magnet:' in l, links) - or filter_list(lambda l: not re.search(r'(magnet:|\.se).+', l), links))[0]) + (list(filter(lambda l: 'magnet:' in l, links)) + or list(filter(lambda l: not re.search(r'(magnet:|\.se).+', l), links)))[0]) except (AttributeError, TypeError, ValueError, IndexError): continue diff --git a/sickgear/providers/torlock.py b/sickgear/providers/torlock.py index 52fa16b8..79374449 100644 --- a/sickgear/providers/torlock.py +++ b/sickgear/providers/torlock.py @@ -23,7 +23,7 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import b64decodestring, quote_plus, unidecode +from _23 import b64decodestring, quote_plus from six import iteritems @@ -66,8 +66,6 @@ class TorLockProvider(generic.TorrentProvider): for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) - search_url = self.urls['browse'] if 'Cache' == mode \ else self.urls['search'] % (quote_plus(search_string).replace('+', '-')) diff --git a/sickgear/providers/torrenting.py b/sickgear/providers/torrenting.py index cf17d82e..0870d459 100644 --- a/sickgear/providers/torrenting.py +++ b/sickgear/providers/torrenting.py @@ -23,7 +23,6 @@ from .. import logger from ..helpers import try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -67,7 +66,6 @@ class TorrentingProvider(generic.TorrentProvider): 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(search_string) search_url = self.urls['search'] % (self._categories_string(), search_string) html = self.get_url(search_url) diff --git a/sickgear/providers/torrentleech.py b/sickgear/providers/torrentleech.py index f65a3efb..148353f9 100644 --- a/sickgear/providers/torrentleech.py +++ b/sickgear/providers/torrentleech.py @@ -21,8 +21,7 @@ import re from . import generic from ..helpers import anon_url, try_int -from _23 import unidecode -from six import iteritems, PY2 +from six import iteritems class TorrentLeechProvider(generic.TorrentProvider): @@ -66,7 +65,7 @@ class TorrentLeechProvider(generic.TorrentProvider): for page in range((3, 5)['Cache' == mode])[1:]: urls[-1] += [self.urls[('search', 'browse')['Cache' == mode]] % { 'cats': self._categories_string(mode, '', ','), - 'query': unidecode(search_string) or search_string, + 'query': search_string, 'x': '%spage/%s' % (('facets/tags:FREELEECH/', '')[not self.freeleech], page) }] results += self._search_urls(mode, last_recent_search, urls) @@ -125,8 +124,7 @@ class TorrentLeechProvider(generic.TorrentProvider): download_url = None if dl and dl_id: # noinspection PyUnresolvedReferences - download_url = self._link('download/%s/%s' % (dl_id, dl), - url_quote=PY2 and isinstance(dl, unicode) or None) + download_url = self._link('download/%s/%s' % (dl_id, dl)) except (BaseException, Exception): continue diff --git a/sickgear/providers/tvchaosuk.py b/sickgear/providers/tvchaosuk.py index 244759cb..8897cf92 100644 --- a/sickgear/providers/tvchaosuk.py +++ b/sickgear/providers/tvchaosuk.py @@ -27,7 +27,7 @@ from ..helpers import try_int from bs4_parser import BS4Parser from dateutil.parser import parse -from _23 import unidecode, unquote_plus +from _23 import unquote_plus from six import iteritems @@ -80,7 +80,7 @@ class TVChaosUKProvider(generic.TorrentProvider): 'info': r'/torrents?/(?P(?P\d{2,})[^"]*)', 'get': 'download'})]) for mode in search_params: for search_string in search_params[mode]: - search_string = unidecode(unquote_plus(search_string)) + search_string = unquote_plus(search_string) vals = [i for i in range(5, 16)] random.SystemRandom().shuffle(vals) diff --git a/sickgear/providers/xspeeds.py b/sickgear/providers/xspeeds.py index 4b11a356..e500b438 100644 --- a/sickgear/providers/xspeeds.py +++ b/sickgear/providers/xspeeds.py @@ -25,7 +25,6 @@ from .. import logger from ..helpers import has_anime, try_int from bs4_parser import BS4Parser -from _23 import unidecode from six import iteritems @@ -70,7 +69,6 @@ class XspeedsProvider(generic.TorrentProvider): for search_string in search_params[mode]: search_string = search_string.replace(u'£', '%') search_string = re.sub(r'[\s.]+', '%', search_string) - search_string = unidecode(search_string) kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0', 'category': '0', 'search_type': 't_name', 'sort': 'added', diff --git a/sickgear/scene_exceptions.py b/sickgear/scene_exceptions.py index a9fa0afa..9aa9591d 100644 --- a/sickgear/scene_exceptions.py +++ b/sickgear/scene_exceptions.py @@ -35,8 +35,8 @@ from .sgdatetime import timestamp_near import lib.rarfile.rarfile as rarfile -from _23 import filter_iter, list_range, map_iter -from six import iteritems, PY2, text_type +from _23 import list_range +from six import iteritems, text_type # noinspection PyUnreachableCode if False: @@ -303,7 +303,7 @@ def retrieve_exceptions(): list(cur_tvid_prodid))] # if this exception isn't already in the DB then add it - for cur_exception_dict in filter_iter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]): + for cur_exception_dict in filter(lambda e: e not in existing_exceptions, exception_dict[cur_tvid_prodid]): try: cur_exception, cur_season = next(iteritems(cur_exception_dict)) except (BaseException, Exception): @@ -311,9 +311,6 @@ def retrieve_exceptions(): logger.log(traceback.format_exc(), logger.ERROR) continue - if PY2 and not isinstance(cur_exception, text_type): - cur_exception = text_type(cur_exception, 'utf-8', 'replace') - cl.append(['INSERT INTO scene_exceptions' ' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)', list(cur_tvid_prodid) + [cur_exception, cur_season]]) @@ -368,9 +365,6 @@ def update_scene_exceptions(tvid, prodid, scene_exceptions): exceptionsCache[(tvid, prodid)][cur_season].append(cur_exception) - if PY2 and not isinstance(cur_exception, text_type): - cur_exception = text_type(cur_exception, 'utf-8', 'replace') - my_db.action('INSERT INTO scene_exceptions' ' (indexer, indexer_id, show_name, season) VALUES (?,?,?,?)', [tvid, prodid, cur_exception, cur_season]) @@ -489,7 +483,7 @@ def _anidb_exceptions_fetcher(): if should_refresh('anidb'): logger.log(u'Checking for AniDB scene exception updates') - for cur_show_obj in filter_iter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList): + for cur_show_obj in filter(lambda _s: _s.is_anime and TVINFO_TVDB == _s.tvid, sickgear.showList): try: anime = create_anidb_obj(name=cur_show_obj.name, tvdbid=cur_show_obj.prodid, autoCorrectName=True) except (BaseException, Exception): @@ -559,8 +553,8 @@ def _xem_get_ids(infosrc_name, xem_origin): % (task.lower() % ('', 's'), infosrc_name, url), logger.ERROR) else: if 'success' == parsed_json.get('result', '') and 'data' in parsed_json: - xem_ids = list(set(filter_iter(lambda prodid: 0 < prodid, - map_iter(lambda pid: helpers.try_int(pid), parsed_json['data'])))) + xem_ids = list(set(filter(lambda prodid: 0 < prodid, + map(lambda pid: helpers.try_int(pid), parsed_json['data'])))) if 0 == len(xem_ids): logger.log(u'Failed %s %s, no data items parsed from URL: %s' % (task.lower() % ('', 's'), infosrc_name, url), logger.WARNING) diff --git a/sickgear/scene_numbering.py b/sickgear/scene_numbering.py index 8bfa2cb7..a93d4776 100644 --- a/sickgear/scene_numbering.py +++ b/sickgear/scene_numbering.py @@ -32,8 +32,6 @@ from .helpers import try_int from .scene_exceptions import xem_ids_list from .sgdatetime import timestamp_near -from _23 import filter_iter, map_list - # noinspection PyUnreachableCode if False: from typing import Dict, List, Optional, Tuple, Union @@ -718,8 +716,8 @@ def _get_absolute_numbering_for_show(tbl, tvid, prodid): """ % (tbl, ('indexer_id', 'showid')['tv_episodes' == tbl]), [int(tvid), int(prodid)]) for cur_row in sql_result: - season, episode, abs_num = map_list(lambda x: try_int(cur_row[x], None), - ('season', 'episode', 'absolute_number')) + season, episode, abs_num = list(map(lambda x: try_int(cur_row[x], None), + ('season', 'episode', 'absolute_number'))) if None is season and None is episode and None is not abs_num: season, episode, _ = _get_sea(tvid, prodid, absolute_number=abs_num) @@ -815,7 +813,7 @@ def xem_refresh(tvid, prodid, force=False): return if 'success' in parsed_json['result']: - cl = map_list(lambda entry: [ + cl = list(map(lambda entry: [ """ UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? @@ -824,7 +822,7 @@ def xem_refresh(tvid, prodid, force=False): for v in ('season', 'episode', 'absolute')] + [tvid, prodid] + [entry.get(xem_origin).get(v) for v in ('season', 'episode')] - ], filter_iter(lambda x: 'scene' in x, parsed_json['data'])) + ], filter(lambda x: 'scene' in x, parsed_json['data']))) if 0 < len(cl): my_db = db.DBConnection() diff --git a/sickgear/search.py b/sickgear/search.py index 898f1f36..c7609512 100644 --- a/sickgear/search.py +++ b/sickgear/search.py @@ -34,7 +34,6 @@ from .common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, MULTI_ from .providers.generic import GenericProvider from .tv import TVEpisode, TVShow -from _23 import filter_list, filter_iter, list_values from six import iteritems, itervalues, string_types # noinspection PyUnreachableCode @@ -590,7 +589,7 @@ def search_for_needed_episodes(ep_obj_list): orig_thread_name = threading.current_thread().name - providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sortedProviderList()) + providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, sickgear.providers.sortedProviderList())) for cur_provider in providers: threading.current_thread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name) @@ -646,7 +645,7 @@ def search_for_needed_episodes(ep_obj_list): logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % ( len(providers), helpers.maybe_plural(providers)), logger.ERROR) - return list_values(found_results) + return list(found_results.values()) def can_reject(release_name): @@ -738,10 +737,10 @@ def _search_provider_thread(provider, provider_results, show_obj, ep_obj_list, m # make a list of all the results for this provider for cur_search_result in search_result_list: # skip non-tv crap - search_result_list[cur_search_result] = filter_list( + search_result_list[cur_search_result] = list(filter( lambda ep_item: ep_item.show_obj == show_obj and show_name_helpers.pass_wordlist_checks( ep_item.name, parse=False, indexer_lookup=False, show_obj=ep_item.show_obj), - search_result_list[cur_search_result]) + search_result_list[cur_search_result])) if cur_search_result in provider_results: provider_results[cur_search_result] += search_result_list[cur_search_result] @@ -941,7 +940,7 @@ def search_providers( # if not, break it apart and add them as the lowest priority results individual_results = nzbSplitter.splitResult(best_season_result) - for cur_result in filter_iter( + for cur_result in filter( lambda r: r.show_obj == show_obj and show_name_helpers.pass_wordlist_checks( r.name, parse=False, indexer_lookup=False, show_obj=r.show_obj), individual_results): ep_num = None diff --git a/sickgear/search_backlog.py b/sickgear/search_backlog.py index 904d54a9..eb659770 100644 --- a/sickgear/search_backlog.py +++ b/sickgear/search_backlog.py @@ -28,7 +28,6 @@ from .search import wanted_episodes from .sgdatetime import SGDatetime, timestamp_near from .tv import TVidProdid, TVEpisode, TVShow -from _23 import filter_list, map_iter, map_list from six import iteritems, itervalues, moves # noinspection PyUnreachableCode @@ -212,7 +211,7 @@ class BacklogSearcher(object): any_torrent_enabled = continued_backlog = False if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp( self._get_last_runtime())) < datetime.timedelta(hours=23): - any_torrent_enabled = any(map_iter( + any_torrent_enabled = any(map( lambda x: x.is_active() and getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType, sickgear.providers.sortedProviderList())) @@ -291,8 +290,8 @@ class BacklogSearcher(object): if not runparts and parts: runparts = parts[0] - wanted_list = filter_list( - lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list) + wanted_list = list(filter( + lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list)) limited_wanted_list = [] if standard_backlog and not any_torrent_enabled and runparts: @@ -314,8 +313,8 @@ class BacklogSearcher(object): for i, l in enumerate(parts): if 0 == i: continue - cl += map_list(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)', - [i + 1] + TVidProdid(m).list], l) + cl += list(map(lambda m: ['INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)', + [i + 1] + TVidProdid(m).list], l)) if 0 < len(cl): my_db.mass_action(cl) diff --git a/sickgear/search_queue.py b/sickgear/search_queue.py index 36f804dd..62844ac8 100644 --- a/sickgear/search_queue.py +++ b/sickgear/search_queue.py @@ -34,8 +34,6 @@ from .classes import Proper, SimpleNamespace from .search import wanted_episodes, get_aired_in_season, set_wanted_aired from .tv import TVEpisode -from _23 import filter_list - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Optional, Union @@ -520,8 +518,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem): orig_thread_name = threading.current_thread().name threads = [] - providers = filter_list(lambda x: x.is_active() and x.enable_recentsearch, - sickgear.providers.sortedProviderList()) + providers = list(filter(lambda x: x.is_active() and x.enable_recentsearch, + sickgear.providers.sortedProviderList())) for cur_provider in providers: if not cur_provider.cache.should_update(): continue diff --git a/sickgear/sgdatetime.py b/sickgear/sgdatetime.py index 86bb84b5..043bedb0 100644 --- a/sickgear/sgdatetime.py +++ b/sickgear/sgdatetime.py @@ -23,7 +23,7 @@ import sys import sickgear from dateutil import tz -from six import integer_types, PY2, string_types +from six import integer_types, string_types # noinspection PyUnreachableCode if False: @@ -283,21 +283,14 @@ class SGDatetime(datetime.datetime): return (default, timestamp)[isinstance(timestamp, (float, integer_types))] -if PY2: - """ - Use `timestamp_near` for a timezone aware UTC timestamp in the near future or recent past. - - Under py3, using the faster variable assigned cpython callable, so py2 is set up to mimic the signature types. - Note: the py3 callable is limited to datetime.datetime and does not work with datetime.date. - """ - def _py2timestamp(dt=None): +# noinspection PyUnreachableCode +if False: + # just to trick pycharm in correct type detection + def timestamp_near(d_t): # type: (datetime.datetime) -> float - try: - import time - return int(time.mktime(dt.timetuple())) - except (BaseException, Exception): - return 0 - timestamp_near = _py2timestamp # type: Callable[[datetime.datetime], float] -else: - # py3 native timestamp uses milliseconds - timestamp_near = datetime.datetime.timestamp # type: Callable[[datetime.datetime], float] + pass + + +# py3 native timestamp uses milliseconds +# noinspection PyRedeclaration +timestamp_near = datetime.datetime.timestamp diff --git a/sickgear/show_name_helpers.py b/sickgear/show_name_helpers.py index a18e5878..b0c00027 100644 --- a/sickgear/show_name_helpers.py +++ b/sickgear/show_name_helpers.py @@ -28,7 +28,7 @@ from .name_parser.parser import InvalidNameException, InvalidShowException, Name from .scene_exceptions import get_scene_exceptions from sg_helpers import scantree -from _23 import map_list, quote_plus +from _23 import quote_plus from six import iterkeys, itervalues # noinspection PyUnreachableCode @@ -237,7 +237,7 @@ def get_show_names_all_possible(show_obj, season=-1, scenify=True, spacer='.', f show_names = list(set( all_possible_show_names(show_obj, season=season, force_anime=force_anime))) # type: List[AnyStr] if scenify: - show_names = map_list(sanitize_scene_name, show_names) + show_names = list(map(sanitize_scene_name, show_names)) return url_encode(show_names, spacer) diff --git a/sickgear/trakt_helpers.py b/sickgear/trakt_helpers.py index acbbb398..b1a8314f 100644 --- a/sickgear/trakt_helpers.py +++ b/sickgear/trakt_helpers.py @@ -5,7 +5,7 @@ import re import sickgear from .helpers import try_int -from _23 import decode_bytes, decode_str, list_items +from _23 import decode_bytes, decode_str from six import iteritems, text_type @@ -51,7 +51,7 @@ def build_config_string(config): :param config: dicts of Trakt account id, parent location :return: string csv of parsed config kwargs for config file """ - return text_type(list_items(config)) + return text_type(list(config.items())) def trakt_collection_remove_account(account_id): diff --git a/sickgear/tv.py b/sickgear/tv.py index af779dbd..7edb23d2 100644 --- a/sickgear/tv.py +++ b/sickgear/tv.py @@ -63,8 +63,7 @@ from lib.tvinfo_base import RoleTypes, TVINFO_FACEBOOK, TVINFO_INSTAGRAM, TVINFO from lib.tvinfo_base.exceptions import * from sg_helpers import calc_age, int_to_time, remove_file_perm, time_to_int -from _23 import filter_iter, filter_list, list_keys -from six import integer_types, iteritems, itervalues, moves, PY2, string_types +from six import integer_types, iteritems, itervalues, moves, string_types # noinspection PyUnreachableCode if False: @@ -172,9 +171,9 @@ class TVidProdid(object): if coreid_warnings: logger.log('%s\n' % pre_msg + '|>%s^-- Note: Bootstrap & Tornado startup functions stripped from traceback log.' % - '|>'.join(filter_iter(lambda text: not re.search(r'(?i)bootstrap|traceback\.' - r'format_stack|pydevd|tornado' - r'|webserveinit', text), + '|>'.join(filter(lambda text: not re.search(r'(?i)bootstrap|traceback\.' + r'format_stack|pydevd|tornado' + r'|webserveinit', text), traceback.format_stack(inspect.currentframe())))) except IndexError: pass @@ -379,7 +378,7 @@ class Person(Referential): akas=None, # type: Set[AnyStr] character_obj=None, # type: Character tmp_character_obj=None # type: Character - ): # type: (...) -> Person + ): super(Person, self).__init__(sid) @@ -789,6 +788,8 @@ class Person(Referential): if None is not rp: if confirmed_on_src: for i in (TVINFO_TRAKT, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB): + if not rp.ids.get(i): + continue # in case it's the current source use it's id and lock if from being changed if cur_tv_info_src == i and rp.ids.get(i): source_confirmed[i] = True @@ -803,6 +804,8 @@ class Person(Referential): self.dirty_ids = True for i in (TVINFO_INSTAGRAM, TVINFO_TWITTER, TVINFO_FACEBOOK, TVINFO_WIKIPEDIA): + if not rp.social_ids.get(i): + continue if rp.social_ids.get(i) and not self.ids.get(i) or \ (rp.social_ids.get(i) and rp.social_ids.get(i) != self.ids.get(i)): self.ids[i] = rp.social_ids[i] @@ -892,11 +895,12 @@ class Person(Referential): ] if force or self.dirty_ids: for s, v in iteritems(self.ids): - cl.extend([ - ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]], - ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0" - % (s, v, self.id)] - ]) + if v: + cl.extend([ + ['UPDATE person_ids SET src_id = ? WHERE person_id = ? AND src = ?', [v, self.id, s]], + ["INSERT INTO person_ids (src, src_id, person_id) SELECT %s, '%s', %s WHERE changes() == 0" + % (s, v, self.id)] + ]) if cl: r_id = my_db.mass_action(cl) if r_id and r_id[-1:][0]: @@ -3152,9 +3156,9 @@ class TVShow(TVShowBase): if isinstance(imdb_tv.get('numberOfEpisodes'), (int, string_types)): imdb_info['episode_count'] = try_int(imdb_tv.get('numberOfEpisodes'), 1) if isinstance(imdb_tv.get('genres'), (list, tuple)): - imdb_info['genres'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('genres'))) + imdb_info['genres'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('genres'))) if isinstance(imdb_tv.get('origins'), list): - imdb_info['country_codes'] = '|'.join(filter_iter(lambda _v: _v, imdb_tv.get('origins'))) + imdb_info['country_codes'] = '|'.join(filter(lambda _v: _v, imdb_tv.get('origins'))) # certificate if isinstance(imdb_certificates.get('certificates'), dict): @@ -3256,7 +3260,7 @@ class TVShow(TVShowBase): action = ('delete', 'trash')[sickgear.TRASH_REMOVE_SHOW] # remove self from show list - sickgear.showList = filter_list(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList) + sickgear.showList = list(filter(lambda so: so.tvid_prodid != self.tvid_prodid, sickgear.showList)) try: del sickgear.showDict[self.sid_int] except (BaseException, Exception): @@ -4220,8 +4224,6 @@ class TVEpisode(TVEpisodeBase): tzinfo = self._show_obj.timezone elif isinstance(self._show_obj.network, string_types) and self._show_obj.network: tzinfo = network_timezones.get_network_timezone(self._show_obj.network) - if PY2: - return SGDatetime.combine(self.airdate, ep_time).replace(tzinfo=tzinfo).timestamp_far() return SGDatetime.combine(self.airdate, ep_time, tzinfo=tzinfo).timestamp_far() return None @@ -4964,7 +4966,7 @@ class TVEpisode(TVEpisodeBase): result_name = pattern # do the replacements - for cur_replacement in sorted(list_keys(replace_map), reverse=True): + for cur_replacement in sorted(list(replace_map), reverse=True): result_name = result_name.replace(cur_replacement, helpers.sanitize_filename(replace_map[cur_replacement])) result_name = result_name.replace(cur_replacement.lower(), helpers.sanitize_filename(replace_map[cur_replacement].lower())) diff --git a/sickgear/tvcache.py b/sickgear/tvcache.py index 6450b15d..16d5d967 100644 --- a/sickgear/tvcache.py +++ b/sickgear/tvcache.py @@ -30,9 +30,6 @@ from .rssfeeds import RSSFeeds from .sgdatetime import timestamp_near from .tv import TVEpisode -from _23 import filter_list, map_iter -from six import PY2, text_type - # noinspection PyUnreachableCode if False: from typing import Any, AnyStr, Dict, List, Tuple, Union @@ -315,7 +312,7 @@ class TVCache(object): if season_number and episode_numbers: # store episodes as a separated string - episode_text = '|%s|' % '|'.join(map_iter(str, episode_numbers)) + episode_text = '|%s|' % '|'.join(map(str, episode_numbers)) # get the current timestamp cur_timestamp = int(timestamp_near(datetime.datetime.now())) @@ -323,9 +320,6 @@ class TVCache(object): # get quality of release quality = parse_result.quality - if PY2 and not isinstance(name, text_type): - name = text_type(name, 'utf-8', 'replace') - # get release group release_group = parse_result.release_group @@ -376,7 +370,7 @@ class TVCache(object): if date: sql += ' AND time >= ' + str(int(time.mktime(date.timetuple()))) - return filter_list(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID])) + return list(filter(lambda x: x['indexerid'] != 0, my_db.select(sql, [self.providerID]))) def findNeededEpisodes(self, ep_obj_list, manual_search=False): # type: (Union[TVEpisode, List[TVEpisode]], bool) -> Dict[TVEpisode, SearchResult] diff --git a/sickgear/version_checker.py b/sickgear/version_checker.py index 23609e71..7da64b0e 100644 --- a/sickgear/version_checker.py +++ b/sickgear/version_checker.py @@ -35,7 +35,6 @@ from sg_helpers import cmdline_runner, get_url # noinspection PyUnresolvedReferences from six.moves import urllib from six import string_types -from _23 import list_keys # noinspection PyUnreachableCode if False: @@ -83,7 +82,7 @@ class PackagesUpdater(object): ui.notifications.message(msg) return False - logger.log('Update(s) for %s found %s' % (self.install_type, list_keys(sickgear.UPDATES_TODO))) + logger.log('Update(s) for %s found %s' % (self.install_type, list(sickgear.UPDATES_TODO))) # save updates_todo to config to be loaded after restart sickgear.save_config() diff --git a/sickgear/webapi.py b/sickgear/webapi.py index 691f2c6c..bd0e6807 100644 --- a/sickgear/webapi.py +++ b/sickgear/webapi.py @@ -55,8 +55,8 @@ from .tv import TVEpisode, TVShow, TVidProdid from .webserve import AddShows import dateutil.parser -from _23 import decode_str, list_keys, unquote_plus -from six import integer_types, iteritems, iterkeys, PY2, string_types, text_type +from _23 import decode_str, unquote_plus +from six import integer_types, iteritems, iterkeys, string_types, text_type # noinspection PyUnreachableCode if False: @@ -253,8 +253,6 @@ class Api(webserve.BaseHandler): result = function(*ag) return result except Exception as e: - if PY2: - logger.log('traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(ex(e), logger.ERROR) raise e @@ -1043,7 +1041,7 @@ class CMD_SickGearComingEpisodes(ApiCall): ep['network'] and network_timezones.get_network_timezone(ep['network'], return_name=True)[1]) # remove all field we don't want for api response - for cur_f in list_keys(ep): + for cur_f in list(ep): if cur_f not in [ # fields to preserve 'absolute_number', 'air_by_date', 'airdate', 'airs', 'archive_firstmatch', 'classification', 'data_network', 'data_show_name', diff --git a/sickgear/webserve.py b/sickgear/webserve.py index 6439dfd5..18f14bd9 100644 --- a/sickgear/webserve.py +++ b/sickgear/webserve.py @@ -90,9 +90,9 @@ from lib.api_trakt.exceptions import TraktException, TraktAuthException import lib.rarfile.rarfile as rarfile -from _23 import decode_bytes, decode_str, filter_list, filter_iter, getargspec, list_keys, list_values, \ - map_consume, map_iter, map_list, map_none, ordered_dict, quote_plus, unquote_plus, urlparse -from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types +from _23 import decode_bytes, decode_str, getargspec, \ + map_consume, map_none, quote_plus, unquote_plus, urlparse +from six import binary_type, integer_types, iteritems, iterkeys, itervalues, moves, string_types # noinspection PyUnreachableCode if False: @@ -198,9 +198,7 @@ class RouteHandler(LegacyBaseHandler): return [self.decode_data(d) for d in data] if not isinstance(data, string_types): return data - if not PY2: - return data.encode('latin1').decode('utf-8') - return data.decode('utf-8') + return data.encode('latin1').decode('utf-8') @gen.coroutine def route_method(self, route, use_404=False, limit_route=None, xsrf_filter=True): @@ -240,7 +238,7 @@ class RouteHandler(LegacyBaseHandler): # no filtering for legacy and routes that depend on *args and **kwargs result = yield self.async_call(method, request_kwargs) # method(**request_kwargs) else: - filter_kwargs = dict(filter_iter(lambda kv: kv[0] in method_args, iteritems(request_kwargs))) + filter_kwargs = dict(filter(lambda kv: kv[0] in method_args, iteritems(request_kwargs))) result = yield self.async_call(method, filter_kwargs) # method(**filter_kwargs) self.finish(result) @@ -249,8 +247,6 @@ class RouteHandler(LegacyBaseHandler): try: return function(**kw) except (BaseException, Exception) as e: - if PY2: - raise Exception(traceback.format_exc().replace('\n', '
')) raise e def page_not_found(self): @@ -1393,7 +1389,7 @@ r.close() if data: my_db = db.DBConnection(row_type='dict') - media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(data))) def chunks(lines, n): for c in range(0, len(lines), n): @@ -1553,13 +1549,13 @@ class Home(MainHandler): index = 0 if 'custom' == sickgear.SHOWLIST_TAGVIEW: for name in sickgear.SHOW_TAGS: - results = filter_list(lambda so: so.tag == name, sickgear.showList) + results = list(filter(lambda so: so.tag == name, sickgear.showList)) if results: t.showlists.append(['container%s' % index, name, results]) index += 1 elif 'anime' == sickgear.SHOWLIST_TAGVIEW: - show_results = filter_list(lambda so: not so.anime, sickgear.showList) - anime_results = filter_list(lambda so: so.anime, sickgear.showList) + show_results = list(filter(lambda so: not so.anime, sickgear.showList)) + anime_results = list(filter(lambda so: so.anime, sickgear.showList)) if show_results: t.showlists.append(['container%s' % index, 'Show List', show_results]) index += 1 @@ -1904,7 +1900,7 @@ class Home(MainHandler): ' AND notify_list != ""', [TVidProdid.glue]) notify_lists = {} - for r in filter_iter(lambda x: x['notify_list'].strip(), rows): + for r in filter(lambda x: x['notify_list'].strip(), rows): # noinspection PyTypeChecker notify_lists[r['tvid_prodid']] = r['notify_list'] @@ -2265,7 +2261,7 @@ class Home(MainHandler): del (ep_counts['totals'][0]) ep_counts['eps_all'] = sum(itervalues(ep_counts['totals'])) - ep_counts['eps_most'] = max(list_values(ep_counts['totals']) + [0]) + ep_counts['eps_most'] = max(list(ep_counts['totals'].values()) + [0]) all_seasons = sorted(iterkeys(ep_counts['totals']), reverse=True) t.lowest_season, t.highest_season = all_seasons and (all_seasons[-1], all_seasons[0]) or (0, 0) @@ -2436,7 +2432,7 @@ class Home(MainHandler): if 'custom' == sickgear.SHOWLIST_TAGVIEW: sorted_show_lists = [] for tag in sickgear.SHOW_TAGS: - results = filter_list(lambda _so: _so.tag == tag, sickgear.showList) + results = list(filter(lambda _so: _so.tag == tag, sickgear.showList)) if results: sorted_show_lists.append([tag, sorted(results, key=lambda x: titler(x.unique_name))]) # handle orphaned shows @@ -2841,7 +2837,7 @@ class Home(MainHandler): errors = [] with show_obj.lock: - show_obj.quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities)) + show_obj.quality = Quality.combineQualities(list(map(int, any_qualities)), list(map(int, best_qualities))) show_obj.upgrade_once = upgrade_once # reversed for now @@ -3371,7 +3367,7 @@ class Home(MainHandler): sickgear.search_queue.remove_old_fifo(sickgear.search_queue.MANUAL_SEARCH_HISTORY) results = sickgear.search_queue.MANUAL_SEARCH_HISTORY - for item in filter_iter(lambda q: hasattr(q, 'segment_ns'), queued): + for item in filter(lambda q: hasattr(q, 'segment_ns'), queued): for ep_ns in item.segment_ns: ep_data, uniq_sxe = self.prepare_episode(ep_ns, 'queued') ep_data_list.append(ep_data) @@ -3387,9 +3383,9 @@ class Home(MainHandler): seen_eps.add(uniq_sxe) episode_params = dict(searchstate='finished', retrystate=True, statusoverview=True) - for item in filter_iter(lambda r: hasattr(r, 'segment_ns') and ( + for item in filter(lambda r: hasattr(r, 'segment_ns') and ( not tvid_prodid or tvid_prodid == str(r.show_ns.tvid_prodid)), results): - for ep_ns in filter_iter( + for ep_ns in filter( lambda e: (e.show_ns.tvid, e.show_ns.prodid, e.season, e.episode) not in seen_eps, item.segment_ns): ep_obj = getattr(ep_ns, 'ep_obj', None) if not ep_obj: @@ -3403,7 +3399,7 @@ class Home(MainHandler): ep_data_list.append(ep_data) seen_eps.add(uniq_sxe) - for snatched in filter_iter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps), + for snatched in filter(lambda s: ((s.tvid, s.prodid, s.season, s.episode) not in seen_eps), item.snatched_eps): ep_obj = getattr(snatched, 'ep_obj', None) if not ep_obj: @@ -3941,12 +3937,12 @@ class AddShows(Home): b_term = decode_str(used_search_term).strip() terms = [] try: - for cur_term in ([], [b_term.encode('utf-8')])[PY2] + [unidecode(b_term), b_term]: + for cur_term in [unidecode(b_term), b_term]: if cur_term not in terms: terms += [cur_term] except (BaseException, Exception): text = used_search_term.strip() - terms = [text if not PY2 else text.encode('utf-8')] + terms = text return set(s for s in set([used_search_term] + terms) if s) @@ -4082,7 +4078,7 @@ class AddShows(Home): for tvid, name in iteritems(sickgear.TVInfoAPI().all_sources)} if TVINFO_TRAKT in results and TVINFO_TVDB in results: - tvdb_ids = list_keys(results[TVINFO_TVDB]) + tvdb_ids = list(results[TVINFO_TVDB]) results[TVINFO_TRAKT] = {k: v for k, v in iteritems(results[TVINFO_TRAKT]) if v['ids'].tvdb not in tvdb_ids} def in_db(tvid, prod_id): @@ -4397,9 +4393,9 @@ class AddShows(Home): t.infosrc = sickgear.TVInfoAPI().search_sources search_tvid = None if use_show_name and 1 == show_name.count(':'): # if colon is found once - search_tvid = filter_list(lambda x: bool(x), + search_tvid = list(filter(lambda x: bool(x), [('%s:' % sickgear.TVInfoAPI(_tvid).config['slug']) in show_name and _tvid - for _tvid, _ in iteritems(t.infosrc)]) + for _tvid, _ in iteritems(t.infosrc)])) search_tvid = 1 == len(search_tvid) and search_tvid[0] t.provided_tvid = search_tvid or int(tvid or sickgear.TVINFO_DEFAULT) t.infosrc_icons = [sickgear.TVInfoAPI(cur_tvid).config.get('icon') for cur_tvid in t.infosrc] @@ -4530,7 +4526,7 @@ class AddShows(Home): def info_anidb(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True, is_anime=True) @staticmethod @@ -4617,8 +4613,8 @@ class AddShows(Home): oldest, newest, oldest_dt, newest_dt = None, None, 9999999, 0 show_list = (data or {}).get('list', {}).get('items', {}) - idx_ids = dict(map_iter(lambda so: (so.imdbid, (so.tvid, so.prodid)), - filter_iter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList))) + idx_ids = dict(map(lambda so: (so.imdbid, (so.tvid, so.prodid)), + filter(lambda _so: getattr(_so, 'imdbid', None), sickgear.showList))) # list_id = (data or {}).get('list', {}).get('id', {}) for row in show_list: @@ -4753,7 +4749,7 @@ class AddShows(Home): show_obj = helpers.find_show_by_id({TVINFO_IMDB: int(ids['imdb'].replace('tt', ''))}, no_mapped_ids=False) - for tvid in filter_iter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources): + for tvid in filter(lambda _tvid: _tvid == show_obj.tvid, sickgear.TVInfoAPI().search_sources): infosrc_slug, infosrc_url = (sickgear.TVInfoAPI(tvid).config[x] for x in ('slug', 'show_url')) filtered[-1]['ids'][infosrc_slug] = show_obj.prodid @@ -5114,7 +5110,7 @@ class AddShows(Home): def info_trakt(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True) def ne_default(self): @@ -5428,7 +5424,7 @@ class AddShows(Home): # noinspection PyUnusedLocal def info_tvmaze(self, ids, show_name): - if not filter_list(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' ')): + if not list(filter(lambda tvid_prodid: helpers.find_show_by_id(tvid_prodid), ids.split(' '))): return self.new_show('|'.join(['', '', '', ' '.join([ids, show_name])]), use_show_name=True) def tvc_default(self): @@ -5727,7 +5723,7 @@ class AddShows(Home): dt_ordinal = 0 dt_string = '' - date_tags = filter_list(lambda t: t.find('span'), row.find_all('div', class_='clamp-details')) + date_tags = list(filter(lambda t: t.find('span'), row.find_all('div', class_='clamp-details'))) if date_tags: dt = dateutil.parser.parse(date_tags[0].get_text().strip()) dt_ordinal = dt.toordinal() @@ -5840,11 +5836,11 @@ class AddShows(Home): tvid_prodid_list = [] # first, process known ids - for tvid, infosrc_slug in filter_iter( + for tvid, infosrc_slug in filter( lambda tvid_slug: item['ids'].get(tvid_slug[1]) and not sickgear.TVInfoAPI(tvid_slug[0]).config.get('defunct'), - map_iter(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']), - iterkeys(sickgear.TVInfoAPI().all_sources))): + map(lambda _tvid: (_tvid, sickgear.TVInfoAPI(_tvid).config['slug']), + iterkeys(sickgear.TVInfoAPI().all_sources))): try: src_id = item['ids'][infosrc_slug] tvid_prodid_list += ['%s:%s' % (infosrc_slug, src_id)] @@ -5899,7 +5895,7 @@ class AddShows(Home): known.append(item['show_id']) t.all_shows.append(item) - if any(filter_iter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)): + if any(filter(lambda tp: tp in sickgear.BROWSELIST_HIDDEN, tvid_prodid_list)): item['hide'] = True t.num_hidden += 1 @@ -6035,7 +6031,7 @@ class AddShows(Home): any_qualities = [any_qualities] if type(best_qualities) != list: best_qualities = [best_qualities] - new_quality = Quality.combineQualities(map_list(int, any_qualities), map_list(int, best_qualities)) + new_quality = Quality.combineQualities(list(map(int, any_qualities)), list(map(int, best_qualities))) upgrade_once = config.checkbox_to_value(upgrade_once) wanted_begin = config.minimax(wanted_begin, 0, -1, 10) @@ -6403,8 +6399,7 @@ class Manage(MainHandler): ' AND season != 0' ' AND indexer = ? AND showid = ?', status_list + tvid_prodid_list) - what = (sql_result and '|'.join(map_iter(lambda r: '%sx%s' % (r['season'], r['episode']), - sql_result)) + what = (sql_result and '|'.join(map(lambda r: '%sx%s' % (r['season'], r['episode']), sql_result)) or None) to = new_status @@ -6562,7 +6557,8 @@ class Manage(MainHandler): ' WHERE indexer = ? AND showid = ?' ' AND season != 0 AND status LIKE \'%4\'', TVidProdid(cur_tvid_prodid).list) - to_download[cur_tvid_prodid] = map_list(lambda x: '%sx%s' % (x['season'], x['episode']), sql_result) + to_download[cur_tvid_prodid] = list(map(lambda x: '%sx%s' % (x['season'], x['episode']), + sql_result)) for epResult in to_download[cur_tvid_prodid]: season, episode = epResult.split('x') @@ -7461,12 +7457,12 @@ class History(MainHandler): elif 'failures' in sickgear.HISTORY_LAYOUT: - t.provider_fail_stats = filter_list(lambda stat: len(stat['fails']), [ + t.provider_fail_stats = list(filter(lambda stat: len(stat['fails']), [ dict(name=p.name, id=p.get_id(), active=p.is_active(), prov_img=p.image_name(), prov_id=p.get_id(), # 2020.03.17 legacy var, remove at future date fails=p.fails.fails_sorted, next_try=p.get_next_try_time, has_limit=getattr(p, 'has_limit', False), tmr_limit_time=p.tmr_limit_time) - for p in sickgear.providerList + sickgear.newznabProviderList]) + for p in sickgear.providerList + sickgear.newznabProviderList])) t.provider_fail_cnt = len([p for p in t.provider_fail_stats if len(p['fails'])]) t.provider_fails = t.provider_fail_cnt # 2020.03.17 legacy var, remove at future date @@ -7500,11 +7496,11 @@ class History(MainHandler): return result with sg_helpers.DOMAIN_FAILURES.lock: - t.domain_fail_stats = filter_list(lambda stat: len(stat['fails']), [ + t.domain_fail_stats = list(filter(lambda stat: len(stat['fails']), [ dict(name=k, id=sickgear.GenericProvider.make_id(k), img=img(k), cls=img(k, True), fails=v.fails_sorted, next_try=v.get_next_try_time, has_limit=getattr(v, 'has_limit', False), tmr_limit_time=v.tmr_limit_time) - for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)]) + for k, v in iteritems(sg_helpers.DOMAIN_FAILURES.domain_list)])) t.domain_fail_cnt = len([d for d in t.domain_fail_stats if len(d['fails'])]) @@ -7658,7 +7654,7 @@ class History(MainHandler): ParentId=folder_id, Filters='IsPlayed', format='json'), timeout=10, parse_json=True) or {} - for d in filter_iter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')): + for d in filter(lambda item: 'Episode' == item.get('Type', ''), items.get('Items')): try: root_dir_found = False path_file = d.get('Path') @@ -7700,11 +7696,11 @@ class History(MainHandler): if states: # Prune user removed items that are no longer being returned by API - media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))) sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Emby}"' my_db = db.DBConnection(row_type='dict') files = my_db.select('SELECT location %s' % sql) - for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files): + for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files): loc = i.get('location') if loc: my_db.select('DELETE %s AND location="%s"' % (sql, loc)) @@ -7769,11 +7765,11 @@ class History(MainHandler): if states: # Prune user removed items that are no longer being returned by API - media_paths = map_list(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states)) + media_paths = list(map(lambda arg: os.path.basename(arg[1]['path_file']), iteritems(states))) sql = 'FROM tv_episodes_watched WHERE hide=1 AND label LIKE "%%{Plex}"' my_db = db.DBConnection(row_type='dict') files = my_db.select('SELECT location %s' % sql) - for i in filter_iter(lambda f: os.path.basename(f['location']) not in media_paths, files): + for i in filter(lambda f: os.path.basename(f['location']) not in media_paths, files): loc = i.get('location') if loc: my_db.select('DELETE %s AND location="%s"' % (sql, loc)) @@ -7844,7 +7840,7 @@ class History(MainHandler): for cur_result in sql_result: show_obj = helpers.find_show_by_id(tvid_prodid_dict) ep_obj = show_obj.get_episode(cur_result['season'], cur_result['episode']) - for n in filter_iter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'), + for n in filter(lambda x: x.name.lower() in ('emby', 'kodi', 'plex'), notifiers.NotifierFactory().get_enabled()): if 'PLEX' == n.name: if updating: @@ -7966,7 +7962,7 @@ class ConfigGeneral(Config): seasons = [-1] + seasons[0:-1] # bubble -1 # prepare a seasonal ordered dict for output - alts = ordered_dict([(season, {}) for season in seasons]) + alts = dict([(season, {}) for season in seasons]) # add original show name show_obj = sickgear.helpers.find_show_by_id(tvid_prodid, no_mapped_ids=True) @@ -8057,8 +8053,8 @@ class ConfigGeneral(Config): any_qualities = ([], any_qualities.split(','))[any(any_qualities)] best_qualities = ([], best_qualities.split(','))[any(best_qualities)] - sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(map_list(int, any_qualities), - map_list(int, best_qualities))) + sickgear.QUALITY_DEFAULT = int(Quality.combineQualities(list(map(int, any_qualities)), + list(map(int, best_qualities)))) sickgear.WANTED_BEGIN_DEFAULT = config.minimax(default_wanted_begin, 0, -1, 10) sickgear.WANTED_LATEST_DEFAULT = config.minimax(default_wanted_latest, 0, -1, 10) sickgear.SHOW_TAG_DEFAULT = default_tag @@ -8258,7 +8254,7 @@ class ConfigGeneral(Config): sickgear.WEB_IPV64 = config.checkbox_to_value(web_ipv64) sickgear.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy) sickgear.SEND_SECURITY_HEADERS = config.checkbox_to_value(send_security_headers) - hosts = ','.join(filter_iter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name), + hosts = ','.join(filter(lambda name: not helpers.re_valid_hostname(with_allowed=False).match(name), config.clean_hosts(allowed_hosts).split(','))) if not hosts or self.request.host_name in hosts: sickgear.ALLOWED_HOSTS = hosts @@ -8811,7 +8807,7 @@ class ConfigProviders(Config): [k for k in nzb_src.may_filter if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))]) - for attr in filter_iter(lambda a: hasattr(nzb_src, a), [ + for attr in filter(lambda a: hasattr(nzb_src, a), [ 'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active' ]): @@ -8925,12 +8921,12 @@ class ConfigProviders(Config): elif not starify(key, True): setattr(torrent_src, attr, key) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'username', 'uid', '_seed_ratio', 'scene_or_contain' ]): setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr.replace('_seed_', ''), '')).strip()) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'minseed', 'minleech', 'seed_time' ]): setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr, '')).strip())) @@ -8941,7 +8937,7 @@ class ConfigProviders(Config): [k for k in getattr(torrent_src, 'may_filter', 'nop') if config.checkbox_to_value(kwargs.get('%sfilter_%s' % (src_id_prefix, k)))]) - for attr in filter_iter(lambda a: hasattr(torrent_src, a), [ + for attr in filter(lambda a: hasattr(torrent_src, a), [ 'confirmed', 'freeleech', 'reject_m2ts', 'use_after_get_data', 'enable_recentsearch', 'enable_backlog', 'search_fallback', 'enable_scheduled_backlog', 'scene_only', 'scene_loose', 'scene_loose_active', @@ -8949,7 +8945,7 @@ class ConfigProviders(Config): ]): setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr))) - for attr, default in filter_iter(lambda arg: hasattr(torrent_src, arg[0]), [ + for attr, default in filter(lambda arg: hasattr(torrent_src, arg[0]), [ ('search_mode', 'eponly'), ]): setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip()) @@ -8973,10 +8969,10 @@ class ConfigProviders(Config): setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or not getattr(nzb_src, 'supports_backlog', True)) - for attr in filter_iter(lambda a: hasattr(nzb_src, a), - ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog', - 'scene_only', 'scene_loose', 'scene_loose_active', - 'scene_rej_nuked', 'scene_nuked_active']): + for attr in filter(lambda a: hasattr(nzb_src, a), + ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog', + 'scene_only', 'scene_loose', 'scene_loose_active', + 'scene_rej_nuked', 'scene_nuked_active']): setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr))) for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]: diff --git a/sickgear/webserveInit.py b/sickgear/webserveInit.py index 8fd7b086..8f1e4ad5 100644 --- a/sickgear/webserveInit.py +++ b/sickgear/webserveInit.py @@ -14,7 +14,6 @@ from .helpers import create_https_certificates, re_valid_hostname import sickgear from _23 import PY38 -from six import PY2 # noinspection PyUnreachableCode if False: @@ -255,14 +254,13 @@ class WebServer(threading.Thread): logger.log(u'Starting SickGear on %s://%s:%s/' % (protocol, self.options['host'], self.options['port'])) # python 3 needs to start event loop first - if not PY2: - import asyncio - if 'win32' == platform and PY38: - # noinspection PyUnresolvedReferences - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - asyncio.set_event_loop(asyncio.new_event_loop()) - from tornado.platform.asyncio import AnyThreadEventLoopPolicy - asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) + import asyncio + if 'win32' == platform and PY38: + # noinspection PyUnresolvedReferences + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + asyncio.set_event_loop(asyncio.new_event_loop()) + from tornado.platform.asyncio import AnyThreadEventLoopPolicy + asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy()) try: self.server = self.app.listen(self.options['port'], self.options['host'], ssl_options=ssl_options,