From e08156ce612b6967496c3807deac06cdab93c642 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Tue, 16 May 2023 14:11:34 +0100 Subject: [PATCH] Remove singledispatch. --- CHANGES.md | 1 + lib/singledispatch.py | 219 ---------------------------------- lib/singledispatch_helpers.py | 170 -------------------------- 3 files changed, 1 insertion(+), 389 deletions(-) delete mode 100644 lib/singledispatch.py delete mode 100644 lib/singledispatch_helpers.py diff --git a/CHANGES.md b/CHANGES.md index 141afeed..778d6bb6 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -14,6 +14,7 @@ * Update SimpleJSON 3.18.1 (c891b95) to 3.19.1 (aeb63ee) * Update Tornado Web Server 6.3.0 (7186b86) to 6.3.2 (e3aa6c5) * Update urllib3 1.26.14 (a06c05c) to 1.26.15 (25cca389) +* Remove singledispatch * Change allow rapidfuzz update from 2.x.x to 3.x.x * Change remove redundant py2 import futures * Change add jobs to centralise scheduler activities diff --git a/lib/singledispatch.py b/lib/singledispatch.py deleted file mode 100644 index 87603fd0..00000000 --- a/lib/singledispatch.py +++ /dev/null @@ -1,219 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -__all__ = ['singledispatch'] - -from functools import update_wrapper -from weakref import WeakKeyDictionary -from singledispatch_helpers import MappingProxyType, get_cache_token - -################################################################################ -### singledispatch() - single-dispatch generic function decorator -################################################################################ - -def _c3_merge(sequences): - """Merges MROs in *sequences* to a single MRO using the C3 algorithm. - - Adapted from http://www.python.org/download/releases/2.3/mro/. - - """ - result = [] - while True: - sequences = [s for s in sequences if s] # purge empty sequences - if not sequences: - return result - for s1 in sequences: # find merge candidates among seq heads - candidate = s1[0] - for s2 in sequences: - if candidate in s2[1:]: - candidate = None - break # reject the current head, it appears later - else: - break - if not candidate: - raise RuntimeError("Inconsistent hierarchy") - result.append(candidate) - # remove the chosen candidate - for seq in sequences: - if seq[0] == candidate: - del seq[0] - -def _c3_mro(cls, abcs=None): - """Computes the method resolution order using extended C3 linearization. - - If no *abcs* are given, the algorithm works exactly like the built-in C3 - linearization used for method resolution. - - If given, *abcs* is a list of abstract base classes that should be inserted - into the resulting MRO. Unrelated ABCs are ignored and don't end up in the - result. The algorithm inserts ABCs where their functionality is introduced, - i.e. issubclass(cls, abc) returns True for the class itself but returns - False for all its direct base classes. Implicit ABCs for a given class - (either registered or inferred from the presence of a special method like - __len__) are inserted directly after the last ABC explicitly listed in the - MRO of said class. If two implicit ABCs end up next to each other in the - resulting MRO, their ordering depends on the order of types in *abcs*. - - """ - for i, base in enumerate(reversed(cls.__bases__)): - if hasattr(base, '__abstractmethods__'): - boundary = len(cls.__bases__) - i - break # Bases up to the last explicit ABC are considered first. - else: - boundary = 0 - abcs = list(abcs) if abcs else [] - explicit_bases = list(cls.__bases__[:boundary]) - abstract_bases = [] - other_bases = list(cls.__bases__[boundary:]) - for base in abcs: - if issubclass(cls, base) and not any( - issubclass(b, base) for b in cls.__bases__ - ): - # If *cls* is the class that introduces behaviour described by - # an ABC *base*, insert said ABC to its MRO. - abstract_bases.append(base) - for base in abstract_bases: - abcs.remove(base) - explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases] - abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases] - other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases] - return _c3_merge( - [[cls]] + - explicit_c3_mros + abstract_c3_mros + other_c3_mros + - [explicit_bases] + [abstract_bases] + [other_bases] - ) - -def _compose_mro(cls, types): - """Calculates the method resolution order for a given class *cls*. - - Includes relevant abstract base classes (with their respective bases) from - the *types* iterable. Uses a modified C3 linearization algorithm. - - """ - bases = set(cls.__mro__) - # Remove entries which are already present in the __mro__ or unrelated. - def is_related(typ): - return (typ not in bases and hasattr(typ, '__mro__') - and issubclass(cls, typ)) - types = [n for n in types if is_related(n)] - # Remove entries which are strict bases of other entries (they will end up - # in the MRO anyway. - def is_strict_base(typ): - for other in types: - if typ != other and typ in other.__mro__: - return True - return False - types = [n for n in types if not is_strict_base(n)] - # Subclasses of the ABCs in *types* which are also implemented by - # *cls* can be used to stabilize ABC ordering. - type_set = set(types) - mro = [] - for typ in types: - found = [] - for sub in typ.__subclasses__(): - if sub not in bases and issubclass(cls, sub): - found.append([s for s in sub.__mro__ if s in type_set]) - if not found: - mro.append(typ) - continue - # Favor subclasses with the biggest number of useful bases - found.sort(key=len, reverse=True) - for sub in found: - for subcls in sub: - if subcls not in mro: - mro.append(subcls) - return _c3_mro(cls, abcs=mro) - -def _find_impl(cls, registry): - """Returns the best matching implementation from *registry* for type *cls*. - - Where there is no registered implementation for a specific type, its method - resolution order is used to find a more generic implementation. - - Note: if *registry* does not contain an implementation for the base - *object* type, this function may return None. - - """ - mro = _compose_mro(cls, registry.keys()) - match = None - for t in mro: - if match is not None: - # If *match* is an implicit ABC but there is another unrelated, - # equally matching implicit ABC, refuse the temptation to guess. - if (t in registry and t not in cls.__mro__ - and match not in cls.__mro__ - and not issubclass(match, t)): - raise RuntimeError("Ambiguous dispatch: {0} or {1}".format( - match, t)) - break - if t in registry: - match = t - return registry.get(match) - -def singledispatch(func): - """Single-dispatch generic function decorator. - - Transforms a function into a generic function, which can have different - behaviours depending upon the type of its first argument. The decorated - function acts as the default implementation, and additional - implementations can be registered using the register() attribute of the - generic function. - - """ - registry = {} - dispatch_cache = WeakKeyDictionary() - def ns(): pass - ns.cache_token = None - - def dispatch(cls): - """generic_func.dispatch(cls) -> - - Runs the dispatch algorithm to return the best available implementation - for the given *cls* registered on *generic_func*. - - """ - if ns.cache_token is not None: - current_token = get_cache_token() - if ns.cache_token != current_token: - dispatch_cache.clear() - ns.cache_token = current_token - try: - impl = dispatch_cache[cls] - except KeyError: - try: - impl = registry[cls] - except KeyError: - impl = _find_impl(cls, registry) - dispatch_cache[cls] = impl - return impl - - def register(cls, func=None): - """generic_func.register(cls, func) -> func - - Registers a new implementation for the given *cls* on a *generic_func*. - - """ - if func is None: - return lambda f: register(cls, f) - registry[cls] = func - if ns.cache_token is None and hasattr(cls, '__abstractmethods__'): - ns.cache_token = get_cache_token() - dispatch_cache.clear() - return func - - def wrapper(*args, **kw): - return dispatch(args[0].__class__)(*args, **kw) - - registry[object] = func - wrapper.register = register - wrapper.dispatch = dispatch - wrapper.registry = MappingProxyType(registry) - wrapper._clear_cache = dispatch_cache.clear - update_wrapper(wrapper, func) - return wrapper - diff --git a/lib/singledispatch_helpers.py b/lib/singledispatch_helpers.py deleted file mode 100644 index 8fcdce40..00000000 --- a/lib/singledispatch_helpers.py +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from abc import ABCMeta -from collections import MutableMapping -import sys -try: - from collections import UserDict -except ImportError: - from UserDict import UserDict -try: - from collections import OrderedDict -except ImportError: - from ordereddict import OrderedDict -try: - from thread import get_ident -except ImportError: - try: - from _thread import get_ident - except ImportError: - from _dummy_thread import get_ident - - -def recursive_repr(fillvalue='...'): - 'Decorator to make a repr function return fillvalue for a recursive call' - - def decorating_function(user_function): - repr_running = set() - - def wrapper(self): - key = id(self), get_ident() - if key in repr_running: - return fillvalue - repr_running.add(key) - try: - result = user_function(self) - finally: - repr_running.discard(key) - return result - - # Can't use functools.wraps() here because of bootstrap issues - wrapper.__module__ = getattr(user_function, '__module__') - wrapper.__doc__ = getattr(user_function, '__doc__') - wrapper.__name__ = getattr(user_function, '__name__') - wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) - return wrapper - - return decorating_function - - -class ChainMap(MutableMapping): - ''' A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - ''' - - def __init__(self, *maps): - '''Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - ''' - self.maps = list(maps) or [{}] # always at least one map - - def __missing__(self, key): - raise KeyError(key) - - def __getitem__(self, key): - for mapping in self.maps: - try: - return mapping[key] # can't use 'key in mapping' with defaultdict - except KeyError: - pass - return self.__missing__(key) # support subclasses that define __missing__ - - def get(self, key, default=None): - return self[key] if key in self else default - - def __len__(self): - return len(set().union(*self.maps)) # reuses stored hash values if possible - - def __iter__(self): - return iter(set().union(*self.maps)) - - def __contains__(self, key): - return any(key in m for m in self.maps) - - @recursive_repr() - def __repr__(self): - return '{0.__class__.__name__}({1})'.format( - self, ', '.join(map(repr, self.maps))) - - @classmethod - def fromkeys(cls, iterable, *args): - 'Create a ChainMap with a single dict created from the iterable.' - return cls(dict.fromkeys(iterable, *args)) - - def copy(self): - 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' - return self.__class__(self.maps[0].copy(), *self.maps[1:]) - - __copy__ = copy - - def new_child(self): # like Django's Context.push() - 'New ChainMap with a new dict followed by all previous maps.' - return self.__class__({}, *self.maps) - - @property - def parents(self): # like Django's Context.pop() - 'New ChainMap from maps[1:].' - return self.__class__(*self.maps[1:]) - - def __setitem__(self, key, value): - self.maps[0][key] = value - - def __delitem__(self, key): - try: - del self.maps[0][key] - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def popitem(self): - 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' - try: - return self.maps[0].popitem() - except KeyError: - raise KeyError('No keys found in the first mapping.') - - def pop(self, key, *args): - 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' - try: - return self.maps[0].pop(key, *args) - except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) - - def clear(self): - 'Clear maps[0], leaving maps[1:] intact.' - self.maps[0].clear() - - -class MappingProxyType(UserDict): - def __init__(self, data): - UserDict.__init__(self) - self.data = data - - -def get_cache_token(): - return ABCMeta._abc_invalidation_counter - - - -class Support(object): - def dummy(self): - pass - - def cpython_only(self, func): - if 'PyPy' in sys.version: - return self.dummy - return func