diff --git a/CHANGES.md b/CHANGES.md index 688e6218..0c8844e3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,25 @@ -### 3.30.20 (2024-05-25 09:35:00 UTC) +### 3.31.0 (2024-06-05 08:00:00 UTC) + +* Update Apprise 1.3.0 (6458ab0) to 1.6.0 (0c0d5da) +* Update attr 22.2.0 (683d056) to 23.1.0 (67e4ff2) +* Update Beautiful Soup 4.12.2 to 4.12.2 (30c58a1) +* Update dateutil 2.8.2 (28da62d) to 2.8.2 (296d419) +* Update diskcache 5.6.1 (4d30686) to 5.6.3 (323787f) +* Update hachoir 3.1.2 (f739b43) to 3.2.0 (38d759f) +* Update Pytvmaze library 2.0.8 (81888a5) to 2.0.8 (b451391) +* Update pytz 2022.7.1/2022g (d38ff47) to 2023.3/2023c (488d3eb) +* Update Rarfile 4.1a1 (8a72967) to 4.1 (c9140d8) +* Update soupsieve 2.4.1 (2e66beb) to 2.5.0 (dc71495) +* Update thefuzz 0.19.0 (c2cd4f4) to 0.21.0 (0b49e4a) +* Update Tornado Web Server 6.3.3 (e4d6984) to 6.4 (b3f2a4b) +* Update urllib3 2.0.5 (d9f85a7) to 2.0.7 (56f01e0) +* Add support for Brotli compression +* Add ignore Plex extras +* Fix apply filters to multiple episode releases +* Add use multi episode result as fallback if it's better quality or has an episode that does not have a single ep result + + +### 3.30.20 (2024-05-25 09:35:00 UTC) * Fix FST provider exception raised when no title * Update UnRar x64 for Windows 7.00 to 7.0.1 diff --git a/lib/apprise/Apprise.py b/lib/apprise/Apprise.py index 19dde830..4c83c481 100644 --- a/lib/apprise/Apprise.py +++ b/lib/apprise/Apprise.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -31,8 +27,8 @@ # POSSIBILITY OF SUCH DAMAGE. import asyncio +import concurrent.futures as cf import os -from functools import partial from itertools import chain from . import common from .conversion import convert_between @@ -376,7 +372,7 @@ class Apprise: try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). - sync_partials, async_cors = self._create_notify_calls( + sequential_calls, parallel_calls = self._create_notify_calls( body, title, notify_type=notify_type, body_format=body_format, tag=tag, match_always=match_always, attach=attach, @@ -387,49 +383,13 @@ class Apprise: # No notifications sent, and there was an internal error. return False - if not sync_partials and not async_cors: + if not sequential_calls and not parallel_calls: # Nothing to send return None - sync_result = Apprise._notify_all(*sync_partials) - - if async_cors: - # A single coroutine sends all asynchronous notifications in - # parallel. - all_cor = Apprise._async_notify_all(*async_cors) - - try: - # Python <3.7 automatically starts an event loop if there isn't - # already one for the main thread. - loop = asyncio.get_event_loop() - - except RuntimeError: - # Python >=3.7 raises this exception if there isn't already an - # event loop. So, we can spin up our own. - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.set_debug(self.debug) - - # Run the coroutine and wait for the result. - async_result = loop.run_until_complete(all_cor) - - # Clean up the loop. - loop.close() - asyncio.set_event_loop(None) - - else: - old_debug = loop.get_debug() - loop.set_debug(self.debug) - - # Run the coroutine and wait for the result. - async_result = loop.run_until_complete(all_cor) - - loop.set_debug(old_debug) - - else: - async_result = True - - return sync_result and async_result + sequential_result = Apprise._notify_sequential(*sequential_calls) + parallel_result = Apprise._notify_parallel_threadpool(*parallel_calls) + return sequential_result and parallel_result async def async_notify(self, *args, **kwargs): """ @@ -442,41 +402,42 @@ class Apprise: try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). - sync_partials, async_cors = self._create_notify_calls( + sequential_calls, parallel_calls = self._create_notify_calls( *args, **kwargs) except TypeError: # No notifications sent, and there was an internal error. return False - if not sync_partials and not async_cors: + if not sequential_calls and not parallel_calls: # Nothing to send return None - sync_result = Apprise._notify_all(*sync_partials) - async_result = await Apprise._async_notify_all(*async_cors) - return sync_result and async_result + sequential_result = Apprise._notify_sequential(*sequential_calls) + parallel_result = \ + await Apprise._notify_parallel_asyncio(*parallel_calls) + return sequential_result and parallel_result def _create_notify_calls(self, *args, **kwargs): """ Creates notifications for all the plugins loaded. - Returns a list of synchronous calls (partial functions with no - arguments required) for plugins with async disabled and a list of - asynchronous calls (coroutines) for plugins with async enabled. + Returns a list of (server, notify() kwargs) tuples for plugins with + parallelism disabled and another list for plugins with parallelism + enabled. """ all_calls = list(self._create_notify_gen(*args, **kwargs)) - # Split into synchronous partials and asynchronous coroutines. - sync_partials, async_cors = [], [] - for notify in all_calls: - if asyncio.iscoroutine(notify): - async_cors.append(notify) + # Split into sequential and parallel notify() calls. + sequential, parallel = [], [] + for (server, notify_kwargs) in all_calls: + if server.asset.async_mode: + parallel.append((server, notify_kwargs)) else: - sync_partials.append(notify) + sequential.append((server, notify_kwargs)) - return sync_partials, async_cors + return sequential, parallel def _create_notify_gen(self, body, title='', notify_type=common.NotifyType.INFO, @@ -493,7 +454,7 @@ class Apprise: logger.error(msg) raise TypeError(msg) - if not (title or body): + if not (title or body or attach): msg = "No message content specified to deliver" logger.error(msg) raise TypeError(msg) @@ -533,25 +494,29 @@ class Apprise: # If our code reaches here, we either did not define a tag (it # was set to None), or we did define a tag and the logic above # determined we need to notify the service it's associated with - if server.notify_format not in conversion_body_map: - # Perform Conversion - conversion_body_map[server.notify_format] = \ - convert_between( - body_format, server.notify_format, content=body) + + # First we need to generate a key we will use to determine if we + # need to build our data out. Entries without are merged with + # the body at this stage. + key = server.notify_format if server.title_maxlen > 0\ + else f'_{server.notify_format}' + + if key not in conversion_title_map: # Prepare our title - conversion_title_map[server.notify_format] = \ - '' if not title else title + conversion_title_map[key] = '' if not title else title - # Tidy Title IF required (hence it will become part of the - # body) - if server.title_maxlen <= 0 and \ - conversion_title_map[server.notify_format]: + # Conversion of title only occurs for services where the title + # is blended with the body (title_maxlen <= 0) + if conversion_title_map[key] and server.title_maxlen <= 0: + conversion_title_map[key] = convert_between( + body_format, server.notify_format, + content=conversion_title_map[key]) - conversion_title_map[server.notify_format] = \ - convert_between( - body_format, server.notify_format, - content=conversion_title_map[server.notify_format]) + # Our body is always converted no matter what + conversion_body_map[key] = \ + convert_between( + body_format, server.notify_format, content=body) if interpret_escapes: # @@ -561,13 +526,13 @@ class Apprise: try: # Added overhead required due to Python 3 Encoding Bug # identified here: https://bugs.python.org/issue21331 - conversion_body_map[server.notify_format] = \ - conversion_body_map[server.notify_format]\ + conversion_body_map[key] = \ + conversion_body_map[key]\ .encode('ascii', 'backslashreplace')\ .decode('unicode-escape') - conversion_title_map[server.notify_format] = \ - conversion_title_map[server.notify_format]\ + conversion_title_map[key] = \ + conversion_title_map[key]\ .encode('ascii', 'backslashreplace')\ .decode('unicode-escape') @@ -578,29 +543,26 @@ class Apprise: raise TypeError(msg) kwargs = dict( - body=conversion_body_map[server.notify_format], - title=conversion_title_map[server.notify_format], + body=conversion_body_map[key], + title=conversion_title_map[key], notify_type=notify_type, attach=attach, body_format=body_format ) - if server.asset.async_mode: - yield server.async_notify(**kwargs) - else: - yield partial(server.notify, **kwargs) + yield (server, kwargs) @staticmethod - def _notify_all(*partials): + def _notify_sequential(*servers_kwargs): """ - Process a list of synchronous notify() calls. + Process a list of notify() calls sequentially and synchronously. """ success = True - for notify in partials: + for (server, kwargs) in servers_kwargs: try: # Send notification - result = notify() + result = server.notify(**kwargs) success = success and result except TypeError: @@ -616,14 +578,71 @@ class Apprise: return success @staticmethod - async def _async_notify_all(*cors): + def _notify_parallel_threadpool(*servers_kwargs): """ - Process a list of asynchronous async_notify() calls. + Process a list of notify() calls in parallel and synchronously. """ + n_calls = len(servers_kwargs) + + # 0-length case + if n_calls == 0: + return True + + # There's no need to use a thread pool for just a single notification + if n_calls == 1: + return Apprise._notify_sequential(servers_kwargs[0]) + # Create log entry - logger.info('Notifying %d service(s) asynchronously.', len(cors)) + logger.info( + 'Notifying %d service(s) with threads.', len(servers_kwargs)) + with cf.ThreadPoolExecutor() as executor: + success = True + futures = [executor.submit(server.notify, **kwargs) + for (server, kwargs) in servers_kwargs] + + for future in cf.as_completed(futures): + try: + result = future.result() + success = success and result + + except TypeError: + # These are our internally thrown notifications. + success = False + + except Exception: + # A catch all so we don't have to abort early + # just because one of our plugins has a bug in it. + logger.exception("Unhandled Notification Exception") + success = False + + return success + + @staticmethod + async def _notify_parallel_asyncio(*servers_kwargs): + """ + Process a list of async_notify() calls in parallel and asynchronously. + """ + + n_calls = len(servers_kwargs) + + # 0-length case + if n_calls == 0: + return True + + # (Unlike with the thread pool, we don't optimize for the single- + # notification case because asyncio can do useful work while waiting + # for that thread to complete) + + # Create log entry + logger.info( + 'Notifying %d service(s) asynchronously.', len(servers_kwargs)) + + async def do_call(server, kwargs): + return await server.async_notify(**kwargs) + + cors = (do_call(server, kwargs) for (server, kwargs) in servers_kwargs) results = await asyncio.gather(*cors, return_exceptions=True) if any(isinstance(status, Exception) @@ -665,6 +684,12 @@ class Apprise: 'setup_url': getattr(plugin, 'setup_url', None), # Placeholder - populated below 'details': None, + + # Let upstream service know of the plugins that support + # attachments + 'attachment_support': getattr( + plugin, 'attachment_support', False), + # Differentiat between what is a custom loaded plugin and # which is native. 'category': getattr(plugin, 'category', None) @@ -790,6 +815,36 @@ class Apprise: # If we reach here, then we indexed out of range raise IndexError('list index out of range') + def __getstate__(self): + """ + Pickle Support dumps() + """ + attributes = { + 'asset': self.asset, + # Prepare our URL list as we need to extract the associated tags + # and asset details associated with it + 'urls': [{ + 'url': server.url(privacy=False), + 'tag': server.tags if server.tags else None, + 'asset': server.asset} for server in self.servers], + 'locale': self.locale, + 'debug': self.debug, + 'location': self.location, + } + + return attributes + + def __setstate__(self, state): + """ + Pickle Support loads() + """ + self.servers = list() + self.asset = state['asset'] + self.locale = state['locale'] + self.location = state['location'] + for entry in state['urls']: + self.add(entry['url'], asset=entry['asset'], tag=entry['tag']) + def __bool__(self): """ Allows the Apprise object to be wrapped in an 'if statement'. diff --git a/lib/apprise/AppriseAsset.py b/lib/apprise/AppriseAsset.py index 34821e27..835c3b6a 100644 --- a/lib/apprise/AppriseAsset.py +++ b/lib/apprise/AppriseAsset.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/AppriseAttachment.py b/lib/apprise/AppriseAttachment.py index 0a3913ed..e00645d2 100644 --- a/lib/apprise/AppriseAttachment.py +++ b/lib/apprise/AppriseAttachment.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/AppriseConfig.py b/lib/apprise/AppriseConfig.py index 8f285777..07e7b48e 100644 --- a/lib/apprise/AppriseConfig.py +++ b/lib/apprise/AppriseConfig.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/AppriseLocale.py b/lib/apprise/AppriseLocale.py index ce61d0c9..c80afae2 100644 --- a/lib/apprise/AppriseLocale.py +++ b/lib/apprise/AppriseLocale.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -33,14 +29,13 @@ import ctypes import locale import contextlib +import os +import re from os.path import join from os.path import dirname from os.path import abspath from .logger import logger -# Define our translation domain -DOMAIN = 'apprise' -LOCALE_DIR = abspath(join(dirname(__file__), 'i18n')) # This gets toggled to True if we succeed GETTEXT_LOADED = False @@ -49,17 +44,220 @@ try: # Initialize gettext import gettext - # install() creates a _() in our builtins - gettext.install(DOMAIN, localedir=LOCALE_DIR) - # Toggle our flag GETTEXT_LOADED = True except ImportError: - # gettext isn't available; no problem, just fall back to using - # the library features without multi-language support. - import builtins - builtins.__dict__['_'] = lambda x: x # pragma: no branch + # gettext isn't available; no problem; Use the library features without + # multi-language support. + pass + + +class AppriseLocale: + """ + A wrapper class to gettext so that we can manipulate multiple lanaguages + on the fly if required. + + """ + + # Define our translation domain + _domain = 'apprise' + + # The path to our translations + _locale_dir = abspath(join(dirname(__file__), 'i18n')) + + # Locale regular expression + _local_re = re.compile( + r'^((?PC)|(?P([a-z]{2}))([_:](?P[a-z]{2}))?)' + r'(\.(?P[a-z0-9-]+))?$', re.IGNORECASE) + + # Define our default encoding + _default_encoding = 'utf-8' + + # The function to assign `_` by default + _fn = 'gettext' + + # The language we should fall back to if all else fails + _default_language = 'en' + + def __init__(self, language=None): + """ + Initializes our object, if a language is specified, then we + initialize ourselves to that, otherwise we use whatever we detect + from the local operating system. If all else fails, we resort to the + defined default_language. + + """ + + # Cache previously loaded translations + self._gtobjs = {} + + # Get our language + self.lang = AppriseLocale.detect_language(language) + + # Our mapping to our _fn + self.__fn_map = None + + if GETTEXT_LOADED is False: + # We're done + return + + # Add language + self.add(self.lang) + + def add(self, lang=None, set_default=True): + """ + Add a language to our list + """ + lang = lang if lang else self._default_language + if lang not in self._gtobjs: + # Load our gettext object and install our language + try: + self._gtobjs[lang] = gettext.translation( + self._domain, localedir=self._locale_dir, languages=[lang], + fallback=False) + + # The non-intrusive method of applying the gettext change to + # the global namespace only + self.__fn_map = getattr(self._gtobjs[lang], self._fn) + + except FileNotFoundError: + # The translation directory does not exist + logger.debug( + 'Could not load translation path: %s', + join(self._locale_dir, lang)) + + # Fallback (handle case where self.lang does not exist) + if self.lang not in self._gtobjs: + self._gtobjs[self.lang] = gettext + self.__fn_map = getattr(self._gtobjs[self.lang], self._fn) + + return False + + logger.trace('Loaded language %s', lang) + + if set_default: + logger.debug('Language set to %s', lang) + self.lang = lang + + return True + + @contextlib.contextmanager + def lang_at(self, lang, mapto=_fn): + """ + The syntax works as: + with at.lang_at('fr'): + # apprise works as though the french language has been + # defined. afterwards, the language falls back to whatever + # it was. + """ + + if GETTEXT_LOADED is False: + # Do nothing + yield None + + # we're done + return + + # Tidy the language + lang = AppriseLocale.detect_language(lang, detect_fallback=False) + if lang not in self._gtobjs and not self.add(lang, set_default=False): + # Do Nothing + yield getattr(self._gtobjs[self.lang], mapto) + else: + # Yield + yield getattr(self._gtobjs[lang], mapto) + + return + + @property + def gettext(self): + """ + Return the current language gettext() function + + Useful for assigning to `_` + """ + return self._gtobjs[self.lang].gettext + + @staticmethod + def detect_language(lang=None, detect_fallback=True): + """ + Returns the language (if it's retrievable) + """ + # We want to only use the 2 character version of this language + # hence en_CA becomes en, en_US becomes en. + if not isinstance(lang, str): + if detect_fallback is False: + # no detection enabled; we're done + return None + + # Posix lookup + lookup = os.environ.get + localename = None + for variable in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'): + localename = lookup(variable, None) + if localename: + result = AppriseLocale._local_re.match(localename) + if result and result.group('lang'): + return result.group('lang').lower() + + # Windows handling + if hasattr(ctypes, 'windll'): + windll = ctypes.windll.kernel32 + try: + lang = locale.windows_locale[ + windll.GetUserDefaultUILanguage()] + + # Our detected windows language + return lang[0:2].lower() + + except (TypeError, KeyError): + # Fallback to posix detection + pass + + # Built in locale library check + try: + # Acquire our locale + lang = locale.getlocale()[0] + + except (ValueError, TypeError) as e: + # This occurs when an invalid locale was parsed from the + # environment variable. While we still return None in this + # case, we want to better notify the end user of this. Users + # receiving this error should check their environment + # variables. + logger.warning( + 'Language detection failure / {}'.format(str(e))) + return None + + return None if not lang else lang[0:2].lower() + + def __getstate__(self): + """ + Pickle Support dumps() + """ + state = self.__dict__.copy() + + # Remove the unpicklable entries. + del state['_gtobjs'] + del state['_AppriseLocale__fn_map'] + return state + + def __setstate__(self, state): + """ + Pickle Support loads() + """ + self.__dict__.update(state) + # Our mapping to our _fn + self.__fn_map = None + self._gtobjs = {} + self.add(state['lang'], set_default=True) + + +# +# Prepare our default LOCALE Singleton +# +LOCALE = AppriseLocale() class LazyTranslation: @@ -77,7 +275,7 @@ class LazyTranslation: super().__init__(*args, **kwargs) def __str__(self): - return gettext.gettext(self.text) + return LOCALE.gettext(self.text) if GETTEXT_LOADED else self.text # Lazy translation handling @@ -86,140 +284,3 @@ def gettext_lazy(text): A dummy function that can be referenced """ return LazyTranslation(text=text) - - -class AppriseLocale: - """ - A wrapper class to gettext so that we can manipulate multiple lanaguages - on the fly if required. - - """ - - def __init__(self, language=None): - """ - Initializes our object, if a language is specified, then we - initialize ourselves to that, otherwise we use whatever we detect - from the local operating system. If all else fails, we resort to the - defined default_language. - - """ - - # Cache previously loaded translations - self._gtobjs = {} - - # Get our language - self.lang = AppriseLocale.detect_language(language) - - if GETTEXT_LOADED is False: - # We're done - return - - if self.lang: - # Load our gettext object and install our language - try: - self._gtobjs[self.lang] = gettext.translation( - DOMAIN, localedir=LOCALE_DIR, languages=[self.lang]) - - # Install our language - self._gtobjs[self.lang].install() - - except IOError: - # This occurs if we can't access/load our translations - pass - - @contextlib.contextmanager - def lang_at(self, lang): - """ - The syntax works as: - with at.lang_at('fr'): - # apprise works as though the french language has been - # defined. afterwards, the language falls back to whatever - # it was. - """ - - if GETTEXT_LOADED is False: - # yield - yield - - # we're done - return - - # Tidy the language - lang = AppriseLocale.detect_language(lang, detect_fallback=False) - - # Now attempt to load it - try: - if lang in self._gtobjs: - if lang != self.lang: - # Install our language only if we aren't using it - # already - self._gtobjs[lang].install() - - else: - self._gtobjs[lang] = gettext.translation( - DOMAIN, localedir=LOCALE_DIR, languages=[self.lang]) - - # Install our language - self._gtobjs[lang].install() - - # Yield - yield - - except (IOError, KeyError): - # This occurs if we can't access/load our translations - # Yield reguardless - yield - - finally: - # Fall back to our previous language - if lang != self.lang and lang in self._gtobjs: - # Install our language - self._gtobjs[self.lang].install() - - return - - @staticmethod - def detect_language(lang=None, detect_fallback=True): - """ - returns the language (if it's retrievable) - """ - # We want to only use the 2 character version of this language - # hence en_CA becomes en, en_US becomes en. - if not isinstance(lang, str): - if detect_fallback is False: - # no detection enabled; we're done - return None - - if hasattr(ctypes, 'windll'): - windll = ctypes.windll.kernel32 - try: - lang = locale.windows_locale[ - windll.GetUserDefaultUILanguage()] - - # Our detected windows language - return lang[0:2].lower() - - except (TypeError, KeyError): - # Fallback to posix detection - pass - - try: - # Detect language - lang = locale.getdefaultlocale()[0] - - except ValueError as e: - # This occurs when an invalid locale was parsed from the - # environment variable. While we still return None in this - # case, we want to better notify the end user of this. Users - # receiving this error should check their environment - # variables. - logger.warning( - 'Language detection failure / {}'.format(str(e))) - return None - - except TypeError: - # None is returned if the default can't be determined - # we're done in this case - return None - - return None if not lang else lang[0:2].lower() diff --git a/lib/apprise/URLBase.py b/lib/apprise/URLBase.py index c0036e19..1cea66d1 100644 --- a/lib/apprise/URLBase.py +++ b/lib/apprise/URLBase.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -204,7 +200,14 @@ class URLBase: self.verify_certificate = parse_bool(kwargs.get('verify', True)) # Secure Mode - self.secure = kwargs.get('secure', False) + self.secure = kwargs.get('secure', None) + try: + if not isinstance(self.secure, bool): + # Attempt to detect + self.secure = kwargs.get('schema', '')[-1].lower() == 's' + + except (TypeError, IndexError): + self.secure = False self.host = URLBase.unquote(kwargs.get('host')) self.port = kwargs.get('port') @@ -228,6 +231,11 @@ class URLBase: # Always unquote the password if it exists self.password = URLBase.unquote(self.password) + # Store our full path consistently ensuring it ends with a `/' + self.fullpath = URLBase.unquote(kwargs.get('fullpath')) + if not isinstance(self.fullpath, str) or not self.fullpath: + self.fullpath = '/' + # Store our Timeout Variables if 'rto' in kwargs: try: @@ -307,7 +315,36 @@ class URLBase: arguments provied. """ - raise NotImplementedError("url() is implimented by the child class.") + + # Our default parameters + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + # Determine Authentication + auth = '' + if self.user and self.password: + auth = '{user}:{password}@'.format( + user=URLBase.quote(self.user, safe=''), + password=self.pprint( + self.password, privacy, mode=PrivacyMode.Secret, safe=''), + ) + elif self.user: + auth = '{user}@'.format( + user=URLBase.quote(self.user, safe=''), + ) + + default_port = 443 if self.secure else 80 + + return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + schema='https' if self.secure else 'http', + auth=auth, + # never encode hostname since we're expecting it to be a valid one + hostname=self.host, + port='' if self.port is None or self.port == default_port + else ':{}'.format(self.port), + fullpath=URLBase.quote(self.fullpath, safe='/') + if self.fullpath else '/', + params=URLBase.urlencode(params), + ) def __contains__(self, tags): """ @@ -583,6 +620,33 @@ class URLBase: """ return (self.socket_connect_timeout, self.socket_read_timeout) + @property + def request_auth(self): + """This is primarily used to fullfill the `auth` keyword argument + that is used by requests.get() and requests.put() calls. + """ + return (self.user, self.password) if self.user else None + + @property + def request_url(self): + """ + Assemble a simple URL that can be used by the requests library + + """ + + # Acquire our schema + schema = 'https' if self.secure else 'http' + + # Prepare our URL + url = '%s://%s' % (schema, self.host) + + # Apply Port information if present + if isinstance(self.port, int): + url += ':%d' % self.port + + # Append our full path + return url + self.fullpath + def url_parameters(self, *args, **kwargs): """ Provides a default set of args to work with. This can greatly @@ -603,7 +667,8 @@ class URLBase: } @staticmethod - def parse_url(url, verify_host=True, plus_to_space=False): + def parse_url(url, verify_host=True, plus_to_space=False, + strict_port=False): """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. @@ -624,13 +689,13 @@ class URLBase: results = parse_url( url, default_schema='unknown', verify_host=verify_host, - plus_to_space=plus_to_space) + plus_to_space=plus_to_space, strict_port=strict_port) if not results: # We're done; we failed to parse our url return results - # if our URL ends with an 's', then assueme our secure flag is set. + # if our URL ends with an 's', then assume our secure flag is set. results['secure'] = (results['schema'][-1] == 's') # Support SSL Certificate 'verify' keyword. Default to being enabled @@ -650,6 +715,21 @@ class URLBase: if 'user' in results['qsd']: results['user'] = results['qsd']['user'] + # parse_url() always creates a 'password' and 'user' entry in the + # results returned. Entries are set to None if they weren't specified + if results['password'] is None and 'user' in results['qsd']: + # Handle cases where the user= provided in 2 locations, we want + # the original to fall back as a being a password (if one wasn't + # otherwise defined) + # e.g. + # mailtos://PASSWORD@hostname?user=admin@mail-domain.com + # - the PASSWORD gets lost in the parse url() since a user= + # over-ride is specified. + presults = parse_url(results['url']) + if presults: + # Store our Password + results['password'] = presults['user'] + # Store our socket read timeout if specified if 'rto' in results['qsd']: results['rto'] = results['qsd']['rto'] @@ -685,6 +765,15 @@ class URLBase: return response + def __len__(self): + """ + Should be over-ridden and allows the tracking of how many targets + are associated with each URLBase object. + + Default is always 1 + """ + return 1 + def schemas(self): """A simple function that returns a set of all schemas associated with this object based on the object.protocol and diff --git a/lib/apprise/__init__.py b/lib/apprise/__init__.py index e67ce953..f8bb5c75 100644 --- a/lib/apprise/__init__.py +++ b/lib/apprise/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -31,7 +27,7 @@ # POSSIBILITY OF SUCH DAMAGE. __title__ = 'Apprise' -__version__ = '1.3.0' +__version__ = '1.6.0' __author__ = 'Chris Caron' __license__ = 'BSD' __copywrite__ = 'Copyright (C) 2023 Chris Caron ' diff --git a/lib/apprise/attachment/AttachBase.py b/lib/apprise/attachment/AttachBase.py index 2b05c849..c1cadbf9 100644 --- a/lib/apprise/attachment/AttachBase.py +++ b/lib/apprise/attachment/AttachBase.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -68,7 +64,8 @@ class AttachBase(URLBase): # set to zero (0), then no check is performed # 1 MB = 1048576 bytes # 5 MB = 5242880 bytes - max_file_size = 5242880 + # 1 GB = 1048576000 bytes + max_file_size = 1048576000 # By default all attachments types are inaccessible. # Developers of items identified in the attachment plugin directory diff --git a/lib/apprise/attachment/AttachFile.py b/lib/apprise/attachment/AttachFile.py index f89b915e..d3085555 100644 --- a/lib/apprise/attachment/AttachFile.py +++ b/lib/apprise/attachment/AttachFile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/attachment/AttachHTTP.py b/lib/apprise/attachment/AttachHTTP.py index d8b46ff2..0c859477 100644 --- a/lib/apprise/attachment/AttachHTTP.py +++ b/lib/apprise/attachment/AttachHTTP.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/attachment/__init__.py b/lib/apprise/attachment/__init__.py index 1b0e1bfe..ba7620a4 100644 --- a/lib/apprise/attachment/__init__.py +++ b/lib/apprise/attachment/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/common.py b/lib/apprise/common.py index 2a5a0162..5e3a3567 100644 --- a/lib/apprise/common.py +++ b/lib/apprise/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/config/ConfigBase.py b/lib/apprise/config/ConfigBase.py index 2f3e33b3..0da7a8be 100644 --- a/lib/apprise/config/ConfigBase.py +++ b/lib/apprise/config/ConfigBase.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -355,6 +351,77 @@ class ConfigBase(URLBase): # missing and/or expired. return True + @staticmethod + def __normalize_tag_groups(group_tags): + """ + Used to normalize a tag assign map which looks like: + { + 'group': set('{tag1}', '{group1}', '{tag2}'), + 'group1': set('{tag2}','{tag3}'), + } + + Then normalized it (merging groups); with respect to the above, the + output would be: + { + 'group': set('{tag1}', '{tag2}', '{tag3}), + 'group1': set('{tag2}','{tag3}'), + } + + """ + # Prepare a key set list we can use + tag_groups = set([str(x) for x in group_tags.keys()]) + + def _expand(tags, ignore=None): + """ + Expands based on tag provided and returns a set + + this also updates the group_tags while it goes + """ + + # Prepare ourselves a return set + results = set() + ignore = set() if ignore is None else ignore + + # track groups + groups = set() + + for tag in tags: + if tag in ignore: + continue + + # Track our groups + groups.add(tag) + + # Store what we know is worth keping + results |= group_tags[tag] - tag_groups + + # Get simple tag assignments + found = group_tags[tag] & tag_groups + if not found: + continue + + for gtag in found: + if gtag in ignore: + continue + + # Go deeper (recursion) + ignore.add(tag) + group_tags[gtag] = _expand(set([gtag]), ignore=ignore) + results |= group_tags[gtag] + + # Pop ignore + ignore.remove(tag) + + return results + + for tag in tag_groups: + # Get our tags + group_tags[tag] |= _expand(set([tag])) + if not group_tags[tag]: + ConfigBase.logger.warning( + 'The group {} has no tags assigned to it'.format(tag)) + del group_tags[tag] + @staticmethod def parse_url(url, verify_host=True): """Parses the URL and returns it broken apart into a dictionary. @@ -533,6 +600,9 @@ class ConfigBase(URLBase): # as additional configuration entries when loaded. include + # Assign tag contents to a group identifier + = + """ # A list of loaded Notification Services servers = list() @@ -541,6 +611,12 @@ class ConfigBase(URLBase): # the include keyword configs = list() + # Track all of the tags we want to assign later on + group_tags = {} + + # Track our entries to preload + preloaded = [] + # Prepare our Asset Object asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() @@ -548,7 +624,7 @@ class ConfigBase(URLBase): valid_line_re = re.compile( r'^\s*(?P([;#]+(?P.*))|' r'(\s*(?P[a-z0-9, \t_-]+)\s*=|=)?\s*' - r'(?P[a-z0-9]{2,9}://.*)|' + r'((?P[a-z0-9]{1,12}://.*)|(?P[a-z0-9, \t_-]+))|' r'include\s+(?P.+))?\s*$', re.I) try: @@ -574,8 +650,13 @@ class ConfigBase(URLBase): # otherwise. return (list(), list()) - url, config = result.group('url'), result.group('config') - if not (url or config): + # Retrieve our line + url, assign, config = \ + result.group('url'), \ + result.group('assign'), \ + result.group('config') + + if not (url or config or assign): # Comment/empty line; do nothing continue @@ -595,6 +676,33 @@ class ConfigBase(URLBase): loggable_url = url if not asset.secure_logging \ else cwe312_url(url) + if assign: + groups = set(parse_list(result.group('tags'), cast=str)) + if not groups: + # no tags were assigned + ConfigBase.logger.warning( + 'Unparseable tag assignment - no group(s) ' + 'on line {}'.format(line)) + continue + + # Get our tags + tags = set(parse_list(assign, cast=str)) + if not tags: + # no tags were assigned + ConfigBase.logger.warning( + 'Unparseable tag assignment - no tag(s) to assign ' + 'on line {}'.format(line)) + continue + + # Update our tag group map + for tag_group in groups: + if tag_group not in group_tags: + group_tags[tag_group] = set() + + # ensure our tag group is never included in the assignment + group_tags[tag_group] |= tags - set([tag_group]) + continue + # Acquire our url tokens results = plugins.url_to_dict( url, secure_logging=asset.secure_logging) @@ -607,25 +715,57 @@ class ConfigBase(URLBase): # Build a list of tags to associate with the newly added # notifications if any were set - results['tag'] = set(parse_list(result.group('tags'))) + results['tag'] = set(parse_list(result.group('tags'), cast=str)) # Set our Asset Object results['asset'] = asset + # Store our preloaded entries + preloaded.append({ + 'results': results, + 'line': line, + 'loggable_url': loggable_url, + }) + + # + # Normalize Tag Groups + # - Expand Groups of Groups so that they don't exist + # + ConfigBase.__normalize_tag_groups(group_tags) + + # + # URL Processing + # + for entry in preloaded: + # Point to our results entry for easier reference below + results = entry['results'] + + # + # Apply our tag groups if they're defined + # + for group, tags in group_tags.items(): + # Detect if anything assigned to this tag also maps back to a + # group. If so we want to add the group to our list + if next((True for tag in results['tag'] + if tag in tags), False): + results['tag'].add(group) + try: # Attempt to create an instance of our plugin using the # parsed URL information - plugin = common.NOTIFY_SCHEMA_MAP[results['schema']](**results) + plugin = common.NOTIFY_SCHEMA_MAP[ + results['schema']](**results) # Create log entry of loaded URL ConfigBase.logger.debug( - 'Loaded URL: %s', plugin.url(privacy=asset.secure_logging)) + 'Loaded URL: %s', plugin.url( + privacy=results['asset'].secure_logging)) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( 'Could not load URL {} on line {}.'.format( - loggable_url, line)) + entry['loggable_url'], entry['line'])) ConfigBase.logger.debug('Loading Exception: %s' % str(e)) continue diff --git a/lib/apprise/config/ConfigFile.py b/lib/apprise/config/ConfigFile.py index bfb869a1..a0b9bf69 100644 --- a/lib/apprise/config/ConfigFile.py +++ b/lib/apprise/config/ConfigFile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/config/ConfigHTTP.py b/lib/apprise/config/ConfigHTTP.py index 244b45d3..82cb1f63 100644 --- a/lib/apprise/config/ConfigHTTP.py +++ b/lib/apprise/config/ConfigHTTP.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/config/ConfigMemory.py b/lib/apprise/config/ConfigMemory.py index ec44e9b4..110e04a3 100644 --- a/lib/apprise/config/ConfigMemory.py +++ b/lib/apprise/config/ConfigMemory.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/config/__init__.py b/lib/apprise/config/__init__.py index 7d03a34a..4b7e3fd7 100644 --- a/lib/apprise/config/__init__.py +++ b/lib/apprise/config/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/conversion.py b/lib/apprise/conversion.py index dc4f1169..ffa3e3a0 100644 --- a/lib/apprise/conversion.py +++ b/lib/apprise/conversion.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/decorators/CustomNotifyPlugin.py b/lib/apprise/decorators/CustomNotifyPlugin.py index 9c8e7cb1..5ccfded5 100644 --- a/lib/apprise/decorators/CustomNotifyPlugin.py +++ b/lib/apprise/decorators/CustomNotifyPlugin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -28,6 +24,7 @@ # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE.USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from ..plugins.NotifyBase import NotifyBase diff --git a/lib/apprise/decorators/__init__.py b/lib/apprise/decorators/__init__.py index 699fd0da..5b089bbf 100644 --- a/lib/apprise/decorators/__init__.py +++ b/lib/apprise/decorators/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/decorators/notify.py b/lib/apprise/decorators/notify.py index 36842b41..07b4ceb1 100644 --- a/lib/apprise/decorators/notify.py +++ b/lib/apprise/decorators/notify.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/i18n/apprise.pot b/lib/apprise/i18n/apprise.pot index 677814fe..434ce91d 100644 --- a/lib/apprise/i18n/apprise.pot +++ b/lib/apprise/i18n/apprise.pot @@ -6,16 +6,16 @@ #, fuzzy msgid "" msgstr "" -"Project-Id-Version: apprise 1.3.0\n" +"Project-Id-Version: apprise 1.6.0\n" "Report-Msgid-Bugs-To: lead2gold@gmail.com\n" -"POT-Creation-Date: 2023-02-22 17:31-0500\n" +"POT-Creation-Date: 2023-10-15 15:56-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.9.1\n" +"Generated-By: Babel 2.11.0\n" msgid "A local Gnome environment is required." msgstr "" @@ -164,6 +164,9 @@ msgstr "" msgid "Consumer Secret" msgstr "" +msgid "Content Placement" +msgstr "" + msgid "Country" msgstr "" @@ -209,6 +212,9 @@ msgstr "" msgid "Device Name" msgstr "" +msgid "Discord Event ID" +msgstr "" + msgid "Display Footer" msgstr "" @@ -224,12 +230,6 @@ msgstr "" msgid "Email Header" msgstr "" -msgid "Encrypted Password" -msgstr "" - -msgid "Encrypted Salt" -msgstr "" - msgid "Entity" msgstr "" @@ -272,6 +272,9 @@ msgstr "" msgid "From Name" msgstr "" +msgid "From Phone ID" +msgstr "" + msgid "From Phone No" msgstr "" @@ -317,6 +320,9 @@ msgstr "" msgid "Integration ID" msgstr "" +msgid "Integration Key" +msgstr "" + msgid "Is Ad?" msgstr "" @@ -353,6 +359,9 @@ msgstr "" msgid "Master Key" msgstr "" +msgid "Matrix API Verion" +msgstr "" + msgid "Memory" msgstr "" @@ -433,6 +442,12 @@ msgstr "" msgid "Payload Extras" msgstr "" +msgid "Ping Discord Role" +msgstr "" + +msgid "Ping Discord User" +msgstr "" + msgid "Port" msgstr "" @@ -451,9 +466,15 @@ msgstr "" msgid "Provider Key" msgstr "" +msgid "Pushkey" +msgstr "" + msgid "QOS" msgstr "" +msgid "Query Method" +msgstr "" + msgid "Region" msgstr "" @@ -475,24 +496,27 @@ msgstr "" msgid "Retry" msgstr "" -msgid "Rooms" -msgstr "" - -msgid "Route" +msgid "Room ID" msgstr "" msgid "Route Group" msgstr "" -msgid "Routing Key" -msgstr "" - msgid "SMTP Server" msgstr "" +msgid "Salt" +msgstr "" + msgid "Schema" msgstr "" +msgid "Secret" +msgstr "" + +msgid "Secret API Key" +msgstr "" + msgid "Secret Access Key" msgstr "" @@ -520,6 +544,9 @@ msgstr "" msgid "Severity" msgstr "" +msgid "Short URL" +msgstr "" + msgid "Show Status" msgstr "" @@ -559,9 +586,6 @@ msgstr "" msgid "Subtitle" msgstr "" -msgid "Syslog Mode" -msgstr "" - msgid "Tags" msgstr "" @@ -658,6 +682,15 @@ msgstr "" msgid "Template Data" msgstr "" +msgid "Template ID" +msgstr "" + +msgid "Template Mapping" +msgstr "" + +msgid "Template Name" +msgstr "" + msgid "Template Path" msgstr "" @@ -706,12 +739,18 @@ msgstr "" msgid "Topic" msgstr "" +msgid "Topic Thread ID" +msgstr "" + msgid "Transmitter Groups" msgstr "" msgid "URL" msgstr "" +msgid "URL Prefix" +msgstr "" + msgid "URL Title" msgstr "" @@ -796,3 +835,6 @@ msgstr "" msgid "ttl" msgstr "" +msgid "validity" +msgstr "" + diff --git a/lib/apprise/i18n/en/LC_MESSAGES/apprise.po b/lib/apprise/i18n/en/LC_MESSAGES/apprise.po index 44451262..65deb777 100644 --- a/lib/apprise/i18n/en/LC_MESSAGES/apprise.po +++ b/lib/apprise/i18n/en/LC_MESSAGES/apprise.po @@ -3,9 +3,10 @@ # This file is distributed under the same license as the apprise project. # Chris Caron , 2019. # -msgid "" +msgid "" msgstr "" -"Project-Id-Version: apprise 0.7.6\n" + +"Project-Id-Version: apprise 1.4.5\n" "Report-Msgid-Bugs-To: lead2gold@gmail.com\n" "POT-Creation-Date: 2019-05-28 16:56-0400\n" "PO-Revision-Date: 2019-05-24 20:00-0400\n" @@ -18,276 +19,272 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.6.0\n" -msgid "API Key" -msgstr "" +msgid "API Key" +msgstr "API Key" -msgid "Access Key" -msgstr "" +msgid "Access Key" +msgstr "Access Key" -msgid "Access Key ID" -msgstr "" +msgid "Access Key ID" +msgstr "Access Key ID" -msgid "Access Secret" -msgstr "" +msgid "Access Secret" +msgstr "Access Secret" -msgid "Access Token" -msgstr "" +msgid "Access Token" +msgstr "Access Token" -msgid "Account SID" -msgstr "" +msgid "Account SID" +msgstr "Account SID" -msgid "Add Tokens" -msgstr "" +msgid "Add Tokens" +msgstr "Add Tokens" -msgid "Application Key" -msgstr "" +msgid "Application Key" +msgstr "Application Key" -msgid "Application Secret" -msgstr "" +msgid "Application Secret" +msgstr "Application Secret" -msgid "Auth Token" -msgstr "" +msgid "Auth Token" +msgstr "Auth Token" -msgid "Authorization Token" -msgstr "" +msgid "Authorization Token" +msgstr "Authorization Token" -msgid "Avatar Image" -msgstr "" +msgid "Avatar Image" +msgstr "Avatar Image" -msgid "Bot Name" -msgstr "" +msgid "Bot Name" +msgstr "Bot Name" -msgid "Bot Token" -msgstr "" +msgid "Bot Token" +msgstr "Bot Token" -msgid "Channels" -msgstr "" +msgid "Channels" +msgstr "Channels" -msgid "Consumer Key" -msgstr "" +msgid "Consumer Key" +msgstr "Consumer Key" -msgid "Consumer Secret" -msgstr "" +msgid "Consumer Secret" +msgstr "Consumer Secret" -msgid "Detect Bot Owner" -msgstr "" +msgid "Detect Bot Owner" +msgstr "Detect Bot Owner" -msgid "Device ID" -msgstr "" +msgid "Device ID" +msgstr "Device ID" -msgid "Display Footer" -msgstr "" +msgid "Display Footer" +msgstr "Display Footer" -msgid "Domain" -msgstr "" +msgid "Domain" +msgstr "Domain" -msgid "Duration" -msgstr "" +msgid "Duration" +msgstr "Duration" -msgid "Events" -msgstr "" +msgid "Events" +msgstr "Events" -msgid "Footer Logo" -msgstr "" +msgid "Footer Logo" +msgstr "Footer Logo" -msgid "From Email" -msgstr "" +msgid "From Email" +msgstr "From Email" -msgid "From Name" -msgstr "" +msgid "From Name" +msgstr "From Name" -msgid "From Phone No" -msgstr "" +msgid "From Phone No" +msgstr "From Phone No" -msgid "Group" -msgstr "" +msgid "Group" +msgstr "Group" -msgid "HTTP Header" -msgstr "" +msgid "HTTP Header" +msgstr "HTTP Header" -msgid "Hostname" -msgstr "" +msgid "Hostname" +msgstr "Hostname" -msgid "Include Image" -msgstr "" +msgid "Include Image" +msgstr "Include Image" -msgid "Modal" -msgstr "" +msgid "Modal" +msgstr "Modal" -msgid "Notify Format" -msgstr "" +msgid "Notify Format" +msgstr "Notify Format" -msgid "Organization" -msgstr "" +msgid "Organization" +msgstr "Organization" -msgid "Overflow Mode" -msgstr "" +msgid "Overflow Mode" +msgstr "Overflow Mode" -msgid "Password" -msgstr "" +msgid "Password" +msgstr "Password" -msgid "Port" -msgstr "" +msgid "Port" +msgstr "Port" -msgid "Priority" -msgstr "" +msgid "Priority" +msgstr "Priority" -msgid "Provider Key" -msgstr "" +msgid "Provider Key" +msgstr "Provider Key" -msgid "Region" -msgstr "" +msgid "Region" +msgstr "Region" -msgid "Region Name" -msgstr "" +msgid "Region Name" +msgstr "Region Name" -msgid "Remove Tokens" -msgstr "" +msgid "Remove Tokens" +msgstr "Remove Tokens" -msgid "Rooms" -msgstr "" +msgid "Rooms" +msgstr "Rooms" -msgid "SMTP Server" -msgstr "" +msgid "SMTP Server" +msgstr "SMTP Server" -msgid "Schema" -msgstr "" +msgid "Schema" +msgstr "Schema" -msgid "Secret Access Key" -msgstr "" +msgid "Secret Access Key" +msgstr "Secret Access Key" -msgid "Secret Key" -msgstr "" +msgid "Secret Key" +msgstr "Secret Key" -msgid "Secure Mode" -msgstr "" +msgid "Secure Mode" +msgstr "Secure Mode" -msgid "Server Timeout" -msgstr "" +msgid "Server Timeout" +msgstr "Server Timeout" -msgid "Sound" -msgstr "" +msgid "Sound" +msgstr "Sound" -msgid "Source JID" -msgstr "" +msgid "Source JID" +msgstr "Source JID" -msgid "Target Channel" -msgstr "" +msgid "Target Channel" +msgstr "Target Channel" -msgid "Target Chat ID" -msgstr "" +msgid "Target Chat ID" +msgstr "Target Chat ID" -msgid "Target Device" -msgstr "" +msgid "Target Device" +msgstr "Target Device" -msgid "Target Device ID" -msgstr "" +msgid "Target Device ID" +msgstr "Target Device ID" -msgid "Target Email" -msgstr "" +msgid "Target Email" +msgstr "Target Email" -msgid "Target Emails" -msgstr "" +msgid "Target Emails" +msgstr "Target Emails" -msgid "Target Encoded ID" -msgstr "" +msgid "Target Encoded ID" +msgstr "Target Encoded ID" -msgid "Target JID" -msgstr "" +msgid "Target JID" +msgstr "Target JID" -msgid "Target Phone No" -msgstr "" +msgid "Target Phone No" +msgstr "Target Phone No" -msgid "Target Room Alias" -msgstr "" +msgid "Target Room Alias" +msgstr "Target Room Alias" -msgid "Target Room ID" -msgstr "" +msgid "Target Room ID" +msgstr "Target Room ID" -msgid "Target Short Code" -msgstr "" +msgid "Target Short Code" +msgstr "Target Short Code" -msgid "Target Tag ID" -msgstr "" +msgid "Target Tag ID" +msgstr "Target Tag ID" -msgid "Target Topic" -msgstr "" +msgid "Target Topic" +msgstr "Target Topic" -msgid "Target User" -msgstr "" +msgid "Target User" +msgstr "Target User" -msgid "Targets" -msgstr "" +msgid "Targets" +msgstr "Targets" -msgid "Text To Speech" -msgstr "" +msgid "Text To Speech" +msgstr "Text To Speech" -msgid "To Channel ID" -msgstr "" +msgid "To Channel ID" +msgstr "To Channel ID" -msgid "To Email" -msgstr "" +msgid "To Email" +msgstr "To Email" -msgid "To User ID" -msgstr "" +msgid "To User ID" +msgstr "To User ID" -msgid "Token" -msgstr "" +msgid "Token" +msgstr "Token" -msgid "Token A" -msgstr "" +msgid "Token A" +msgstr "Token A" -msgid "Token B" -msgstr "" +msgid "Token B" +msgstr "Token B" -msgid "Token C" -msgstr "" +msgid "Token C" +msgstr "Token C" -msgid "Urgency" -msgstr "" +msgid "Urgency" +msgstr "Urgency" -msgid "Use Avatar" -msgstr "" +msgid "Use Avatar" +msgstr "Use Avatar" -msgid "User" -msgstr "" +msgid "User" +msgstr "User" -msgid "User Key" -msgstr "" +msgid "User Key" +msgstr "User Key" -msgid "User Name" -msgstr "" +msgid "User Name" +msgstr "User Name" -msgid "Username" -msgstr "" +msgid "Username" +msgstr "Username" -msgid "Verify SSL" -msgstr "" +msgid "Verify SSL" +msgstr "Verify SSL" -msgid "Version" -msgstr "" +msgid "Version" +msgstr "Version" -msgid "Webhook" -msgstr "" +msgid "Webhook" +msgstr "Webhook" -msgid "Webhook ID" -msgstr "" +msgid "Webhook ID" +msgstr "Webhook ID" -msgid "Webhook Mode" -msgstr "" +msgid "Webhook Mode" +msgstr "Webhook Mode" -msgid "Webhook Token" -msgstr "" +msgid "Webhook Token" +msgstr "Webhook Token" -msgid "X-Axis" -msgstr "" +msgid "X-Axis" +msgstr "X-Axis" -msgid "XEP" -msgstr "" - -msgid "Y-Axis" -msgstr "" - -#~ msgid "Access Key Secret" -#~ msgstr "" +msgid "XEP" +msgstr "XEP" +msgid "Y-Axis" +msgstr "Y-Axis" diff --git a/lib/apprise/logger.py b/lib/apprise/logger.py index 005a3e0d..6a594ec6 100644 --- a/lib/apprise/logger.py +++ b/lib/apprise/logger.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyAppriseAPI.py b/lib/apprise/plugins/NotifyAppriseAPI.py index d2f1452a..3c85b8ac 100644 --- a/lib/apprise/plugins/NotifyAppriseAPI.py +++ b/lib/apprise/plugins/NotifyAppriseAPI.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -33,6 +29,7 @@ import re import requests from json import dumps +import base64 from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode @@ -42,6 +39,20 @@ from ..utils import validate_regex from ..AppriseLocale import gettext_lazy as _ +class AppriseAPIMethod: + """ + Defines the method to post data tot he remote server + """ + JSON = 'json' + FORM = 'form' + + +APPRISE_API_METHODS = ( + AppriseAPIMethod.FORM, + AppriseAPIMethod.JSON, +) + + class NotifyAppriseAPI(NotifyBase): """ A wrapper for Apprise (Persistent) API Notifications @@ -62,9 +73,12 @@ class NotifyAppriseAPI(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api' + # Support attachments + attachment_support = True + # Depending on the number of transactions/notifications taking place, this # could take a while. 30 seconds should be enough to perform the task - socket_connect_timeout = 30.0 + socket_read_timeout = 30.0 # Disable throttle rate for Apprise API requests since they are normally # local anyway @@ -119,6 +133,12 @@ class NotifyAppriseAPI(NotifyBase): 'name': _('Tags'), 'type': 'string', }, + 'method': { + 'name': _('Query Method'), + 'type': 'choice:string', + 'values': APPRISE_API_METHODS, + 'default': APPRISE_API_METHODS[0], + }, 'to': { 'alias_of': 'token', }, @@ -132,7 +152,8 @@ class NotifyAppriseAPI(NotifyBase): }, } - def __init__(self, token=None, tags=None, headers=None, **kwargs): + def __init__(self, token=None, tags=None, method=None, headers=None, + **kwargs): """ Initialize Apprise API Object @@ -142,10 +163,6 @@ class NotifyAppriseAPI(NotifyBase): """ super().__init__(**kwargs) - self.fullpath = kwargs.get('fullpath') - if not isinstance(self.fullpath, str): - self.fullpath = '/' - self.token = validate_regex( token, *self.template_tokens['token']['regex']) if not self.token: @@ -154,6 +171,14 @@ class NotifyAppriseAPI(NotifyBase): self.logger.warning(msg) raise TypeError(msg) + self.method = self.template_args['method']['default'] \ + if not isinstance(method, str) else method.lower() + + if self.method not in APPRISE_API_METHODS: + msg = 'The method specified ({}) is invalid.'.format(method) + self.logger.warning(msg) + raise TypeError(msg) + # Build list of tags self.__tags = parse_list(tags) @@ -169,8 +194,13 @@ class NotifyAppriseAPI(NotifyBase): Returns the URL built dynamically based on specified arguments. """ - # Our URL parameters - params = self.url_parameters(privacy=privacy, *args, **kwargs) + # Define any URL parameters + params = { + 'method': self.method, + } + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters params.update({'+{}'.format(k): v for k, v in self.headers.items()}) @@ -209,15 +239,61 @@ class NotifyAppriseAPI(NotifyBase): token=self.pprint(self.token, privacy, safe=''), params=NotifyAppriseAPI.urlencode(params)) - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, + **kwargs): """ Perform Apprise API Notification """ - headers = {} + # Prepare HTTP Headers + headers = { + 'User-Agent': self.app_id, + } + # Apply any/all header over-rides defined headers.update(self.headers) + attachments = [] + files = [] + if attach and self.attachment_support: + for no, attachment in enumerate(attach, start=1): + # Perform some simple error checking + if not attachment: + # We could not access the attachment + self.logger.error( + 'Could not access attachment {}.'.format( + attachment.url(privacy=True))) + return False + + try: + if self.method == AppriseAPIMethod.JSON: + with open(attachment.path, 'rb') as f: + # Output must be in a DataURL format (that's what + # PushSafer calls it): + attachments.append({ + 'filename': attachment.name, + 'base64': base64.b64encode(f.read()) + .decode('utf-8'), + 'mimetype': attachment.mimetype, + }) + + else: # AppriseAPIMethod.FORM + files.append(( + 'file{:02d}'.format(no), + ( + attachment.name, + open(attachment.path, 'rb'), + attachment.mimetype, + ) + )) + + except (OSError, IOError) as e: + self.logger.warning( + 'An I/O error occurred while reading {}.'.format( + attachment.name if attachment else 'attachment')) + self.logger.debug('I/O Exception: %s' % str(e)) + return False + # prepare Apprise API Object payload = { # Apprise API Payload @@ -227,6 +303,14 @@ class NotifyAppriseAPI(NotifyBase): 'format': self.notify_format, } + if self.method == AppriseAPIMethod.JSON: + headers['Content-Type'] = 'application/json' + + if attachments: + payload['attachments'] = attachments + + payload = dumps(payload) + if self.__tags: payload['tag'] = self.__tags @@ -242,13 +326,13 @@ class NotifyAppriseAPI(NotifyBase): url += ':%d' % self.port fullpath = self.fullpath.strip('/') - url += '/{}/'.format(fullpath) if fullpath else '/' - url += 'notify/{}'.format(self.token) + url += '{}'.format('/' + fullpath) if fullpath else '' + url += '/notify/{}'.format(self.token) # Some entries can not be over-ridden headers.update({ - 'User-Agent': self.app_id, - 'Content-Type': 'application/json', + # Our response to be in JSON format always + 'Accept': 'application/json', # Pass our Source UUID4 Identifier 'X-Apprise-ID': self.asset._uid, # Pass our current recursion count to our upstream server @@ -266,9 +350,10 @@ class NotifyAppriseAPI(NotifyBase): try: r = requests.post( url, - data=dumps(payload), + data=payload, headers=headers, auth=auth, + files=files if files else None, verify=self.verify_certificate, timeout=self.request_timeout, ) @@ -290,7 +375,8 @@ class NotifyAppriseAPI(NotifyBase): return False else: - self.logger.info('Sent Apprise API notification.') + self.logger.info( + 'Sent Apprise API notification; method=%s.', self.method) except requests.RequestException as e: self.logger.warning( @@ -301,6 +387,18 @@ class NotifyAppriseAPI(NotifyBase): # Return; we're done return False + except (OSError, IOError) as e: + self.logger.warning( + 'An I/O error occurred while reading one of the ' + 'attached files.') + self.logger.debug('I/O Exception: %s' % str(e)) + return False + + finally: + for file in files: + # Ensure all files are closed + file[1][1].close() + return True @staticmethod @@ -377,4 +475,9 @@ class NotifyAppriseAPI(NotifyBase): # re-assemble our full path results['fullpath'] = '/'.join(entries) + # Set method if specified + if 'method' in results['qsd'] and len(results['qsd']['method']): + results['method'] = \ + NotifyAppriseAPI.unquote(results['qsd']['method']) + return results diff --git a/lib/apprise/plugins/NotifyBark.py b/lib/apprise/plugins/NotifyBark.py index 923788de..edef82bd 100644 --- a/lib/apprise/plugins/NotifyBark.py +++ b/lib/apprise/plugins/NotifyBark.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -127,10 +123,10 @@ class NotifyBark(NotifyBase): # Define object templates templates = ( + '{schema}://{host}/{targets}', '{schema}://{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', - '{schema}://{user}:{password}@{host}/{targets}', ) # Define our template arguments @@ -163,6 +159,7 @@ class NotifyBark(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -280,7 +277,7 @@ class NotifyBark(NotifyBase): # error tracking (used for function return) has_error = False - if not len(self.targets): + if not self.targets: # We have nothing to notify; we're done self.logger.warning('There are no Bark devices to notify') return False @@ -456,6 +453,12 @@ class NotifyBark(NotifyBase): params=NotifyBark.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyBase.py b/lib/apprise/plugins/NotifyBase.py index 1b07baa7..5138c15c 100644 --- a/lib/apprise/plugins/NotifyBase.py +++ b/lib/apprise/plugins/NotifyBase.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -139,6 +135,18 @@ class NotifyBase(URLBase): # Default Overflow Mode overflow_mode = OverflowMode.UPSTREAM + # Support Attachments; this defaults to being disabled. + # Since apprise allows you to send attachments without a body or title + # defined, by letting Apprise know the plugin won't support attachments + # up front, it can quickly pass over and ignore calls to these end points. + + # You must set this to true if your application can handle attachments. + # You must also consider a flow change to your notification if this is set + # to True as well as now there will be cases where both the body and title + # may not be set. There will never be a case where a body, or attachment + # isn't set in the same call to your notify() function. + attachment_support = False + # Default Title HTML Tagging # When a title is specified for a notification service that doesn't accept # titles, by default apprise tries to give a plesant view and convert the @@ -316,7 +324,7 @@ class NotifyBase(URLBase): the_cors = (do_send(**kwargs2) for kwargs2 in send_calls) return all(await asyncio.gather(*the_cors)) - def _build_send_calls(self, body, title=None, + def _build_send_calls(self, body=None, title=None, notify_type=NotifyType.INFO, overflow=None, attach=None, body_format=None, **kwargs): """ @@ -339,6 +347,28 @@ class NotifyBase(URLBase): # bad attachments raise + # Handle situations where the body is None + body = '' if not body else body + + elif not (body or attach): + # If there is not an attachment at the very least, a body must be + # present + msg = "No message body or attachment was specified." + self.logger.warning(msg) + raise TypeError(msg) + + if not body and not self.attachment_support: + # If no body was specified, then we know that an attachment + # was. This is logic checked earlier in the code. + # + # Knowing this, if the plugin itself doesn't support sending + # attachments, there is nothing further to do here, just move + # along. + msg = f"{self.service_name} does not support attachments; " \ + " service skipped" + self.logger.warning(msg) + raise TypeError(msg) + # Handle situations where the title is None title = '' if not title else title diff --git a/lib/apprise/plugins/NotifyBoxcar.py b/lib/apprise/plugins/NotifyBoxcar.py index a613e46e..9d3be6ae 100644 --- a/lib/apprise/plugins/NotifyBoxcar.py +++ b/lib/apprise/plugins/NotifyBoxcar.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -46,6 +42,7 @@ except ImportError: from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode from ..utils import parse_bool +from ..utils import parse_list from ..utils import validate_regex from ..common import NotifyType from ..common import NotifyImageSize @@ -58,7 +55,7 @@ DEFAULT_TAG = '@all' # list of tagged devices that the notification need to be send to, and a # boolean operator (‘and’ / ‘or’) that defines the criteria to match devices # against those tags. -IS_TAG = re.compile(r'^[@](?P[A-Z0-9]{1,63})$', re.I) +IS_TAG = re.compile(r'^[@]?(?P[A-Z0-9]{1,63})$', re.I) # Device tokens are only referenced when developing. # It's not likely you'll send a message directly to a device, but if you do; @@ -150,6 +147,12 @@ class NotifyBoxcar(NotifyBase): 'to': { 'alias_of': 'targets', }, + 'access': { + 'alias_of': 'access_key', + }, + 'secret': { + 'alias_of': 'secret_key', + }, }) def __init__(self, access, secret, targets=None, include_image=True, @@ -160,7 +163,7 @@ class NotifyBoxcar(NotifyBase): super().__init__(**kwargs) # Initialize tag list - self.tags = list() + self._tags = list() # Initialize device_token list self.device_tokens = list() @@ -184,29 +187,27 @@ class NotifyBoxcar(NotifyBase): raise TypeError(msg) if not targets: - self.tags.append(DEFAULT_TAG) + self._tags.append(DEFAULT_TAG) targets = [] - elif isinstance(targets, str): - targets = [x for x in filter(bool, TAGS_LIST_DELIM.split( - targets, - ))] - # Validate targets and drop bad ones: - for target in targets: - if IS_TAG.match(target): + for target in parse_list(targets): + result = IS_TAG.match(target) + if result: # store valid tag/alias - self.tags.append(IS_TAG.match(target).group('name')) + self._tags.append(result.group('name')) + continue - elif IS_DEVICETOKEN.match(target): + result = IS_DEVICETOKEN.match(target) + if result: # store valid device self.device_tokens.append(target) + continue - else: - self.logger.warning( - 'Dropped invalid tag/alias/device_token ' - '({}) specified.'.format(target), - ) + self.logger.warning( + 'Dropped invalid tag/alias/device_token ' + '({}) specified.'.format(target), + ) # Track whether or not we want to send an image with our notification # or not. @@ -235,11 +236,10 @@ class NotifyBoxcar(NotifyBase): if title: payload['aps']['@title'] = title - if body: - payload['aps']['alert'] = body + payload['aps']['alert'] = body - if self.tags: - payload['tags'] = {'or': self.tags} + if self._tags: + payload['tags'] = {'or': self._tags} if self.device_tokens: payload['device_tokens'] = self.device_tokens @@ -341,10 +341,18 @@ class NotifyBoxcar(NotifyBase): self.secret, privacy, mode=PrivacyMode.Secret, safe=''), targets='/'.join([ NotifyBoxcar.quote(x, safe='') for x in chain( - self.tags, self.device_tokens) if x != DEFAULT_TAG]), + self._tags, self.device_tokens) if x != DEFAULT_TAG]), params=NotifyBoxcar.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self._tags) + len(self.device_tokens) + # DEFAULT_TAG is set if no tokens/tags are otherwise set + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ @@ -374,6 +382,16 @@ class NotifyBoxcar(NotifyBase): results['targets'] += \ NotifyBoxcar.parse_list(results['qsd'].get('to')) + # Access + if 'access' in results['qsd'] and results['qsd']['access']: + results['access'] = NotifyBoxcar.unquote( + results['qsd']['access'].strip()) + + # Secret + if 'secret' in results['qsd'] and results['qsd']['secret']: + results['secret'] = NotifyBoxcar.unquote( + results['qsd']['secret'].strip()) + # Include images with our message results['include_image'] = \ parse_bool(results['qsd'].get('image', True)) diff --git a/lib/apprise/plugins/NotifyBulkSMS.py b/lib/apprise/plugins/NotifyBulkSMS.py index 257c1def..cf82a87a 100644 --- a/lib/apprise/plugins/NotifyBulkSMS.py +++ b/lib/apprise/plugins/NotifyBulkSMS.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -121,11 +117,13 @@ class NotifyBulkSMS(NotifyBase): 'user': { 'name': _('User Name'), 'type': 'string', + 'required': True, }, 'password': { 'name': _('Password'), 'type': 'string', 'private': True, + 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), @@ -144,6 +142,7 @@ class NotifyBulkSMS(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -414,6 +413,24 @@ class NotifyBulkSMS(NotifyBase): for x in self.groups])), params=NotifyBulkSMS.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + + # + # Factor batch into calculation + # + # Note: Groups always require a separate request (and can not be + # included in batch calculations) + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets + len(self.groups) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyBurstSMS.py b/lib/apprise/plugins/NotifyBurstSMS.py new file mode 100644 index 00000000..59219b3d --- /dev/null +++ b/lib/apprise/plugins/NotifyBurstSMS.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# Sign-up with https://burstsms.com/ +# +# Define your API Secret here and acquire your API Key +# - https://can.transmitsms.com/profile +# +import requests + +from .NotifyBase import NotifyBase +from ..URLBase import PrivacyMode +from ..common import NotifyType +from ..utils import is_phone_no +from ..utils import parse_phone_no +from ..utils import parse_bool +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + + +class BurstSMSCountryCode: + # Australia + AU = 'au' + # New Zeland + NZ = 'nz' + # United Kingdom + UK = 'gb' + # United States + US = 'us' + + +BURST_SMS_COUNTRY_CODES = ( + BurstSMSCountryCode.AU, + BurstSMSCountryCode.NZ, + BurstSMSCountryCode.UK, + BurstSMSCountryCode.US, +) + + +class NotifyBurstSMS(NotifyBase): + """ + A wrapper for Burst SMS Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'Burst SMS' + + # The services URL + service_url = 'https://burstsms.com/' + + # The default protocol + secure_protocol = 'burstsms' + + # The maximum amount of SMS Messages that can reside within a single + # batch transfer based on: + # https://developer.transmitsms.com/#74911cf8-dec6-4319-a499-7f535a7fd08c + default_batch_size = 500 + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_burst_sms' + + # Burst SMS uses the http protocol with JSON requests + notify_url = 'https://api.transmitsms.com/send-sms.json' + + # The maximum length of the body + body_maxlen = 160 + + # A title can not be used for SMS Messages. Setting this to zero will + # cause any title (if defined) to get placed into the message body. + title_maxlen = 0 + + # Define object templates + templates = ( + '{schema}://{apikey}:{secret}@{sender_id}/{targets}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'apikey': { + 'name': _('API Key'), + 'type': 'string', + 'required': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + 'private': True, + }, + 'secret': { + 'name': _('API Secret'), + 'type': 'string', + 'private': True, + 'required': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + }, + 'sender_id': { + 'name': _('Sender ID'), + 'type': 'string', + 'required': True, + 'map_to': 'source', + }, + 'target_phone': { + 'name': _('Target Phone No'), + 'type': 'string', + 'prefix': '+', + 'regex': (r'^[0-9\s)(+-]+$', 'i'), + 'map_to': 'targets', + }, + 'targets': { + 'name': _('Targets'), + 'type': 'list:string', + 'required': True, + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'to': { + 'alias_of': 'targets', + }, + 'from': { + 'alias_of': 'sender_id', + }, + 'key': { + 'alias_of': 'apikey', + }, + 'secret': { + 'alias_of': 'secret', + }, + 'country': { + 'name': _('Country'), + 'type': 'choice:string', + 'values': BURST_SMS_COUNTRY_CODES, + 'default': BurstSMSCountryCode.US, + }, + # Validity + # Expire a message send if it is undeliverable (defined in minutes) + # If set to Zero (0); this is the default and sets the max validity + # period + 'validity': { + 'name': _('validity'), + 'type': 'int', + 'default': 0 + }, + 'batch': { + 'name': _('Batch Mode'), + 'type': 'bool', + 'default': False, + }, + }) + + def __init__(self, apikey, secret, source, targets=None, country=None, + validity=None, batch=None, **kwargs): + """ + Initialize Burst SMS Object + """ + super().__init__(**kwargs) + + # API Key (associated with project) + self.apikey = validate_regex( + apikey, *self.template_tokens['apikey']['regex']) + if not self.apikey: + msg = 'An invalid Burst SMS API Key ' \ + '({}) was specified.'.format(apikey) + self.logger.warning(msg) + raise TypeError(msg) + + # API Secret (associated with project) + self.secret = validate_regex( + secret, *self.template_tokens['secret']['regex']) + if not self.secret: + msg = 'An invalid Burst SMS API Secret ' \ + '({}) was specified.'.format(secret) + self.logger.warning(msg) + raise TypeError(msg) + + if not country: + self.country = self.template_args['country']['default'] + + else: + self.country = country.lower().strip() + if country not in BURST_SMS_COUNTRY_CODES: + msg = 'An invalid Burst SMS country ' \ + '({}) was specified.'.format(country) + self.logger.warning(msg) + raise TypeError(msg) + + # Set our Validity + self.validity = self.template_args['validity']['default'] + if validity: + try: + self.validity = int(validity) + + except (ValueError, TypeError): + msg = 'The Burst SMS Validity specified ({}) is invalid.'\ + .format(validity) + self.logger.warning(msg) + raise TypeError(msg) + + # Prepare Batch Mode Flag + self.batch = self.template_args['batch']['default'] \ + if batch is None else batch + + # The Sender ID + self.source = validate_regex(source) + if not self.source: + msg = 'The Account Sender ID specified ' \ + '({}) is invalid.'.format(source) + self.logger.warning(msg) + raise TypeError(msg) + + # Parse our targets + self.targets = list() + + for target in parse_phone_no(targets): + # Validate targets and drop bad ones: + result = is_phone_no(target) + if not result: + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) + continue + + # store valid phone number + self.targets.append(result['full']) + + return + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform Burst SMS Notification + """ + + if not self.targets: + self.logger.warning( + 'There are no valid Burst SMS targets to notify.') + return False + + # error tracking (used for function return) + has_error = False + + # Prepare our headers + headers = { + 'User-Agent': self.app_id, + 'Accept': 'application/json', + } + + # Prepare our authentication + auth = (self.apikey, self.secret) + + # Prepare our payload + payload = { + 'countrycode': self.country, + 'message': body, + + # Sender ID + 'from': self.source, + + # The to gets populated in the loop below + 'to': None, + } + + # Send in batches if identified to do so + batch_size = 1 if not self.batch else self.default_batch_size + + # Create a copy of the targets list + targets = list(self.targets) + + for index in range(0, len(targets), batch_size): + + # Prepare our user + payload['to'] = ','.join(self.targets[index:index + batch_size]) + + # Some Debug Logging + self.logger.debug('Burst SMS POST URL: {} (cert_verify={})'.format( + self.notify_url, self.verify_certificate)) + self.logger.debug('Burst SMS Payload: {}' .format(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + self.notify_url, + data=payload, + headers=headers, + auth=auth, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + if r.status_code != requests.codes.ok: + # We had a problem + status_str = \ + NotifyBurstSMS.http_response_code_lookup( + r.status_code) + + self.logger.warning( + 'Failed to send Burst SMS notification to {} ' + 'target(s): {}{}error={}.'.format( + len(self.targets[index:index + batch_size]), + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) + + # Mark our failure + has_error = True + continue + + else: + self.logger.info( + 'Sent Burst SMS notification to %d target(s).' % + len(self.targets[index:index + batch_size])) + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending Burst SMS ' + 'notification to %d target(s).' % + len(self.targets[index:index + batch_size])) + self.logger.debug('Socket Exception: %s' % str(e)) + + # Mark our failure + has_error = True + continue + + return not has_error + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = { + 'country': self.country, + 'batch': 'yes' if self.batch else 'no', + } + + if self.validity: + params['validity'] = str(self.validity) + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + return '{schema}://{key}:{secret}@{source}/{targets}/?{params}'.format( + schema=self.secure_protocol, + key=self.pprint(self.apikey, privacy, safe=''), + secret=self.pprint( + self.secret, privacy, mode=PrivacyMode.Secret, safe=''), + source=NotifyBurstSMS.quote(self.source, safe=''), + targets='/'.join( + [NotifyBurstSMS.quote(x, safe='') for x in self.targets]), + params=NotifyBurstSMS.urlencode(params)) + + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets if targets > 0 else 1 + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + # The hostname is our source (Sender ID) + results['source'] = NotifyBurstSMS.unquote(results['host']) + + # Get any remaining targets + results['targets'] = NotifyBurstSMS.split_path(results['fullpath']) + + # Get our account_side and auth_token from the user/pass config + results['apikey'] = NotifyBurstSMS.unquote(results['user']) + results['secret'] = NotifyBurstSMS.unquote(results['password']) + + # API Key + if 'key' in results['qsd'] and len(results['qsd']['key']): + # Extract the API Key from an argument + results['apikey'] = \ + NotifyBurstSMS.unquote(results['qsd']['key']) + + # API Secret + if 'secret' in results['qsd'] and len(results['qsd']['secret']): + # Extract the API Secret from an argument + results['secret'] = \ + NotifyBurstSMS.unquote(results['qsd']['secret']) + + # Support the 'from' and 'source' variable so that we can support + # targets this way too. + # The 'from' makes it easier to use yaml configuration + if 'from' in results['qsd'] and len(results['qsd']['from']): + results['source'] = \ + NotifyBurstSMS.unquote(results['qsd']['from']) + if 'source' in results['qsd'] and len(results['qsd']['source']): + results['source'] = \ + NotifyBurstSMS.unquote(results['qsd']['source']) + + # Support country + if 'country' in results['qsd'] and len(results['qsd']['country']): + results['country'] = \ + NotifyBurstSMS.unquote(results['qsd']['country']) + + # Support validity value + if 'validity' in results['qsd'] and len(results['qsd']['validity']): + results['validity'] = \ + NotifyBurstSMS.unquote(results['qsd']['validity']) + + # Get Batch Mode Flag + if 'batch' in results['qsd'] and len(results['qsd']['batch']): + results['batch'] = parse_bool(results['qsd']['batch']) + + # Support the 'to' variable so that we can support rooms this way too + # The 'to' makes it easier to use yaml configuration + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'] += \ + NotifyBurstSMS.parse_phone_no(results['qsd']['to']) + + return results diff --git a/lib/apprise/plugins/NotifyClickSend.py b/lib/apprise/plugins/NotifyClickSend.py index c93201be..670e74e8 100644 --- a/lib/apprise/plugins/NotifyClickSend.py +++ b/lib/apprise/plugins/NotifyClickSend.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -288,6 +284,21 @@ class NotifyClickSend(NotifyBase): params=NotifyClickSend.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyD7Networks.py b/lib/apprise/plugins/NotifyD7Networks.py index 7b17f848..3e7787da 100644 --- a/lib/apprise/plugins/NotifyD7Networks.py +++ b/lib/apprise/plugins/NotifyD7Networks.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -114,6 +110,7 @@ class NotifyD7Networks(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -357,6 +354,15 @@ class NotifyD7Networks(NotifyBase): [NotifyD7Networks.quote(x, safe='') for x in self.targets]), params=NotifyD7Networks.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + return len(self.targets) if not self.batch else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyDBus.py b/lib/apprise/plugins/NotifyDBus.py index 336dfac4..46f8b9d0 100644 --- a/lib/apprise/plugins/NotifyDBus.py +++ b/lib/apprise/plugins/NotifyDBus.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyDapnet.py b/lib/apprise/plugins/NotifyDapnet.py index bf1ff333..5848b688 100644 --- a/lib/apprise/plugins/NotifyDapnet.py +++ b/lib/apprise/plugins/NotifyDapnet.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -350,6 +346,21 @@ class NotifyDapnet(NotifyBase): params=NotifyDapnet.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyDingTalk.py b/lib/apprise/plugins/NotifyDingTalk.py index 474d4c88..91bfcd6f 100644 --- a/lib/apprise/plugins/NotifyDingTalk.py +++ b/lib/apprise/plugins/NotifyDingTalk.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -103,13 +99,18 @@ class NotifyDingTalk(NotifyBase): 'regex': (r'^[a-z0-9]+$', 'i'), }, 'secret': { - 'name': _('Token'), + 'name': _('Secret'), 'type': 'string', 'private': True, 'regex': (r'^[a-z0-9]+$', 'i'), }, - 'targets': { + 'target_phone_no': { 'name': _('Target Phone No'), + 'type': 'string', + 'map_to': 'targets', + }, + 'targets': { + 'name': _('Targets'), 'type': 'list:string', }, }) @@ -309,6 +310,13 @@ class NotifyDingTalk(NotifyBase): [NotifyDingTalk.quote(x, safe='') for x in self.targets]), args=NotifyDingTalk.urlencode(args)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyDiscord.py b/lib/apprise/plugins/NotifyDiscord.py index 78cd3265..f87b6694 100644 --- a/lib/apprise/plugins/NotifyDiscord.py +++ b/lib/apprise/plugins/NotifyDiscord.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -50,6 +46,9 @@ import re import requests from json import dumps +from datetime import timedelta +from datetime import datetime +from datetime import timezone from .NotifyBase import NotifyBase from ..common import NotifyImageSize @@ -81,9 +80,23 @@ class NotifyDiscord(NotifyBase): # Discord Webhook notify_url = 'https://discord.com/api/webhooks' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 + # Discord is kind enough to return how many more requests we're allowed to + # continue to make within it's header response as: + # X-RateLimit-Reset: The epoc time (in seconds) we can expect our + # rate-limit to be reset. + # X-RateLimit-Remaining: an integer identifying how many requests we're + # still allow to make. + request_rate_per_sec = 0 + + # Taken right from google.auth.helpers: + clock_skew = timedelta(seconds=10) + # The maximum allowable characters allowed in the body per message body_maxlen = 2000 @@ -135,6 +148,13 @@ class NotifyDiscord(NotifyBase): 'name': _('Avatar URL'), 'type': 'string', }, + 'href': { + 'name': _('URL'), + 'type': 'string', + }, + 'url': { + 'alias_of': 'href', + }, # Send a message to the specified thread within a webhook's channel. # The thread will automatically be unarchived. 'thread': { @@ -166,7 +186,8 @@ class NotifyDiscord(NotifyBase): def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, footer=False, footer_logo=True, include_image=False, - fields=True, avatar_url=None, thread=None, **kwargs): + fields=True, avatar_url=None, href=None, thread=None, + **kwargs): """ Initialize Discord Object @@ -215,6 +236,15 @@ class NotifyDiscord(NotifyBase): # dynamically generated avatar url images self.avatar_url = avatar_url + # A URL to have the title link to + self.href = href + + # For Tracking Purposes + self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) + + # Default to 1.0 + self.ratelimit_remaining = 1.0 + return def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, @@ -235,64 +265,6 @@ class NotifyDiscord(NotifyBase): # Acquire image_url image_url = self.image_url(notify_type) - # our fields variable - fields = [] - - if self.notify_format == NotifyFormat.MARKDOWN: - # Use embeds for payload - payload['embeds'] = [{ - 'author': { - 'name': self.app_id, - 'url': self.app_url, - }, - 'title': title, - 'description': body, - - # Our color associated with our notification - 'color': self.color(notify_type, int), - }] - - if self.footer: - # Acquire logo URL - logo_url = self.image_url(notify_type, logo=True) - - # Set Footer text to our app description - payload['embeds'][0]['footer'] = { - 'text': self.app_desc, - } - - if self.footer_logo and logo_url: - payload['embeds'][0]['footer']['icon_url'] = logo_url - - if self.include_image and image_url: - payload['embeds'][0]['thumbnail'] = { - 'url': image_url, - 'height': 256, - 'width': 256, - } - - if self.fields: - # Break titles out so that we can sort them in embeds - description, fields = self.extract_markdown_sections(body) - - # Swap first entry for description - payload['embeds'][0]['description'] = description - if fields: - # Apply our additional parsing for a better presentation - payload['embeds'][0]['fields'] = \ - fields[:self.discord_max_fields] - - # Remove entry from head of fields - fields = fields[self.discord_max_fields:] - - else: - # not markdown - payload['content'] = \ - body if not title else "{}\r\n{}".format(title, body) - - if self.thread_id: - payload['thread_id'] = self.thread_id - if self.avatar and (image_url or self.avatar_url): payload['avatar_url'] = \ self.avatar_url if self.avatar_url else image_url @@ -301,21 +273,84 @@ class NotifyDiscord(NotifyBase): # Optionally override the default username of the webhook payload['username'] = self.user - if not self._send(payload): - # We failed to post our message - return False + # Associate our thread_id with our message + params = {'thread_id': self.thread_id} if self.thread_id else None - # Process any remaining fields IF set - if fields: - payload['embeds'][0]['description'] = '' - for i in range(0, len(fields), self.discord_max_fields): - payload['embeds'][0]['fields'] = \ - fields[i:i + self.discord_max_fields] - if not self._send(payload): - # We failed to post our message - return False + if body: + # our fields variable + fields = [] - if attach: + if self.notify_format == NotifyFormat.MARKDOWN: + # Use embeds for payload + payload['embeds'] = [{ + 'author': { + 'name': self.app_id, + 'url': self.app_url, + }, + 'title': title, + 'description': body, + + # Our color associated with our notification + 'color': self.color(notify_type, int), + }] + + if self.href: + payload['embeds'][0]['url'] = self.href + + if self.footer: + # Acquire logo URL + logo_url = self.image_url(notify_type, logo=True) + + # Set Footer text to our app description + payload['embeds'][0]['footer'] = { + 'text': self.app_desc, + } + + if self.footer_logo and logo_url: + payload['embeds'][0]['footer']['icon_url'] = logo_url + + if self.include_image and image_url: + payload['embeds'][0]['thumbnail'] = { + 'url': image_url, + 'height': 256, + 'width': 256, + } + + if self.fields: + # Break titles out so that we can sort them in embeds + description, fields = self.extract_markdown_sections(body) + + # Swap first entry for description + payload['embeds'][0]['description'] = description + if fields: + # Apply our additional parsing for a better + # presentation + payload['embeds'][0]['fields'] = \ + fields[:self.discord_max_fields] + + # Remove entry from head of fields + fields = fields[self.discord_max_fields:] + + else: + # not markdown + payload['content'] = \ + body if not title else "{}\r\n{}".format(title, body) + + if not self._send(payload, params=params): + # We failed to post our message + return False + + # Process any remaining fields IF set + if fields: + payload['embeds'][0]['description'] = '' + for i in range(0, len(fields), self.discord_max_fields): + payload['embeds'][0]['fields'] = \ + fields[i:i + self.discord_max_fields] + if not self._send(payload): + # We failed to post our message + return False + + if attach and self.attachment_support: # Update our payload; the idea is to preserve it's other detected # and assigned values for re-use here too payload.update({ @@ -338,14 +373,15 @@ class NotifyDiscord(NotifyBase): for attachment in attach: self.logger.info( 'Posting Discord Attachment {}'.format(attachment.name)) - if not self._send(payload, attach=attachment): + if not self._send(payload, params=params, attach=attachment): # We failed to post our message return False # Otherwise return return True - def _send(self, payload, attach=None, **kwargs): + def _send(self, payload, attach=None, params=None, rate_limit=1, + **kwargs): """ Wrapper to the requests (post) object """ @@ -367,8 +403,25 @@ class NotifyDiscord(NotifyBase): )) self.logger.debug('Discord Payload: %s' % str(payload)) - # Always call throttle before any remote server i/o is made - self.throttle() + # By default set wait to None + wait = None + + if self.ratelimit_remaining <= 0.0: + # Determine how long we should wait for or if we should wait at + # all. This isn't fool-proof because we can't be sure the client + # time (calling this script) is completely synced up with the + # Discord server. One would hope we're on NTP and our clocks are + # the same allowing this to role smoothly: + + now = datetime.now(timezone.utc).replace(tzinfo=None) + if now < self.ratelimit_reset: + # We need to throttle for the difference in seconds + wait = abs( + (self.ratelimit_reset - now + self.clock_skew) + .total_seconds()) + + # Always call throttle before any remote server i/o is made; + self.throttle(wait=wait) # Perform some simple error checking if isinstance(attach, AttachBase): @@ -396,12 +449,29 @@ class NotifyDiscord(NotifyBase): r = requests.post( notify_url, + params=params, data=payload if files else dumps(payload), headers=headers, files=files, verify=self.verify_certificate, timeout=self.request_timeout, ) + + # Handle rate limiting (if specified) + try: + # Store our rate limiting (if provided) + self.ratelimit_remaining = \ + float(r.headers.get( + 'X-RateLimit-Remaining')) + self.ratelimit_reset = datetime.fromtimestamp( + int(r.headers.get('X-RateLimit-Reset')), + timezone.utc).replace(tzinfo=None) + + except (TypeError, ValueError): + # This is returned if we could not retrieve this + # information gracefully accept this state and move on + pass + if r.status_code not in ( requests.codes.ok, requests.codes.no_content): @@ -409,6 +479,20 @@ class NotifyDiscord(NotifyBase): status_str = \ NotifyBase.http_response_code_lookup(r.status_code) + if r.status_code == requests.codes.too_many_requests \ + and rate_limit > 0: + + # handle rate limiting + self.logger.warning( + 'Discord rate limiting in effect; ' + 'blocking for %.2f second(s)', + self.ratelimit_remaining) + + # Try one more time before failing + return self._send( + payload=payload, attach=attach, params=params, + rate_limit=rate_limit - 1, **kwargs) + self.logger.warning( 'Failed to send {}to Discord notification: ' '{}{}error={}.'.format( @@ -466,6 +550,9 @@ class NotifyDiscord(NotifyBase): if self.avatar_url: params['avatar_url'] = self.avatar_url + if self.href: + params['href'] = self.href + if self.thread_id: params['thread'] = self.thread_id @@ -537,10 +624,23 @@ class NotifyDiscord(NotifyBase): results['avatar_url'] = \ NotifyDiscord.unquote(results['qsd']['avatar_url']) + # Extract url if it was specified + if 'href' in results['qsd']: + results['href'] = \ + NotifyDiscord.unquote(results['qsd']['href']) + + elif 'url' in results['qsd']: + results['href'] = \ + NotifyDiscord.unquote(results['qsd']['url']) + # Markdown is implied + results['format'] = NotifyFormat.MARKDOWN + # Extract thread id if it was specified if 'thread' in results['qsd']: results['thread'] = \ NotifyDiscord.unquote(results['qsd']['thread']) + # Markdown is implied + results['format'] = NotifyFormat.MARKDOWN return results diff --git a/lib/apprise/plugins/NotifyEmail.py b/lib/apprise/plugins/NotifyEmail.py index 8698c113..db70c8ef 100644 --- a/lib/apprise/plugins/NotifyEmail.py +++ b/lib/apprise/plugins/NotifyEmail.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -43,6 +39,7 @@ from email import charset from socket import error as SocketError from datetime import datetime +from datetime import timezone from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode @@ -340,6 +337,9 @@ class NotifyEmail(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_email' + # Support attachments + attachment_support = True + # Default Notify Format notify_format = NotifyFormat.HTML @@ -384,8 +384,13 @@ class NotifyEmail(NotifyBase): 'min': 1, 'max': 65535, }, + 'target_email': { + 'name': _('Target Email'), + 'type': 'string', + 'map_to': 'targets', + }, 'targets': { - 'name': _('Target Emails'), + 'name': _('Targets'), 'type': 'list:string', }, }) @@ -764,7 +769,7 @@ class NotifyEmail(NotifyBase): else: base = MIMEText(body, 'plain', 'utf-8') - if attach: + if attach and self.attachment_support: mixed = MIMEMultipart("mixed") mixed.attach(base) # Now store our attachments @@ -805,7 +810,8 @@ class NotifyEmail(NotifyBase): base['To'] = formataddr((to_name, to_addr), charset='utf-8') base['Message-ID'] = make_msgid(domain=self.smtp_host) base['Date'] = \ - datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000") + datetime.now(timezone.utc)\ + .strftime("%a, %d %b %Y %H:%M:%S +0000") base['X-Application'] = self.app_id if cc: @@ -999,6 +1005,13 @@ class NotifyEmail(NotifyBase): params=NotifyEmail.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ @@ -1023,6 +1036,10 @@ class NotifyEmail(NotifyBase): # add one to ourselves results['targets'] = NotifyEmail.split_path(results['fullpath']) + # Attempt to detect 'to' email address + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'].append(results['qsd']['to']) + # Attempt to detect 'from' email address if 'from' in results['qsd'] and len(results['qsd']['from']): from_addr = NotifyEmail.unquote(results['qsd']['from']) @@ -1041,10 +1058,6 @@ class NotifyEmail(NotifyBase): # Extract from name to associate with from address from_addr = NotifyEmail.unquote(results['qsd']['name']) - # Attempt to detect 'to' email address - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'].append(results['qsd']['to']) - # Store SMTP Host if specified if 'smtp' in results['qsd'] and len(results['qsd']['smtp']): # Extract the smtp server diff --git a/lib/apprise/plugins/NotifyEmby.py b/lib/apprise/plugins/NotifyEmby.py index 23d4c611..99f3a9ab 100644 --- a/lib/apprise/plugins/NotifyEmby.py +++ b/lib/apprise/plugins/NotifyEmby.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyEnigma2.py b/lib/apprise/plugins/NotifyEnigma2.py index 10d58179..05472646 100644 --- a/lib/apprise/plugins/NotifyEnigma2.py +++ b/lib/apprise/plugins/NotifyEnigma2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyFCM/__init__.py b/lib/apprise/plugins/NotifyFCM/__init__.py index 098f9ad0..57b03499 100644 --- a/lib/apprise/plugins/NotifyFCM/__init__.py +++ b/lib/apprise/plugins/NotifyFCM/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -157,7 +153,6 @@ class NotifyFCM(NotifyBase): 'project': { 'name': _('Project ID'), 'type': 'string', - 'required': True, }, 'target_device': { 'name': _('Target Device'), @@ -173,6 +168,7 @@ class NotifyFCM(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -555,6 +551,12 @@ class NotifyFCM(NotifyBase): params=NotifyFCM.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyFCM/color.py b/lib/apprise/plugins/NotifyFCM/color.py index 46d0f2a7..69474a30 100644 --- a/lib/apprise/plugins/NotifyFCM/color.py +++ b/lib/apprise/plugins/NotifyFCM/color.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyFCM/common.py b/lib/apprise/plugins/NotifyFCM/common.py index 0ec10eec..af71f881 100644 --- a/lib/apprise/plugins/NotifyFCM/common.py +++ b/lib/apprise/plugins/NotifyFCM/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyFCM/oauth.py b/lib/apprise/plugins/NotifyFCM/oauth.py index a76bc698..f0961039 100644 --- a/lib/apprise/plugins/NotifyFCM/oauth.py +++ b/lib/apprise/plugins/NotifyFCM/oauth.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -47,6 +43,7 @@ from cryptography.hazmat.primitives import asymmetric from cryptography.exceptions import UnsupportedAlgorithm from datetime import datetime from datetime import timedelta +from datetime import timezone from json.decoder import JSONDecodeError from urllib.parse import urlencode as _urlencode @@ -106,7 +103,7 @@ class GoogleOAuth: # Our keys we build using the provided content self.__refresh_token = None self.__access_token = None - self.__access_token_expiry = datetime.utcnow() + self.__access_token_expiry = datetime.now(timezone.utc) def load(self, path): """ @@ -117,7 +114,7 @@ class GoogleOAuth: self.content = None self.private_key = None self.__access_token = None - self.__access_token_expiry = datetime.utcnow() + self.__access_token_expiry = datetime.now(timezone.utc) try: with open(path, mode="r", encoding=self.encoding) as fp: @@ -199,7 +196,7 @@ class GoogleOAuth: 'token with.') return None - if self.__access_token_expiry > datetime.utcnow(): + if self.__access_token_expiry > datetime.now(timezone.utc): # Return our no-expired key return self.__access_token @@ -209,7 +206,7 @@ class GoogleOAuth: key_identifier = self.content.get('private_key_id') # Generate our Assertion - now = datetime.utcnow() + now = datetime.now(timezone.utc) expiry = now + self.access_token_lifetime_sec payload = { @@ -301,7 +298,7 @@ class GoogleOAuth: if 'expires_in' in response: delta = timedelta(seconds=int(response['expires_in'])) self.__access_token_expiry = \ - delta + datetime.utcnow() - self.clock_skew + delta + datetime.now(timezone.utc) - self.clock_skew else: # Allow some grace before we expire diff --git a/lib/apprise/plugins/NotifyFCM/priority.py b/lib/apprise/plugins/NotifyFCM/priority.py index 81976cb6..966a0e14 100644 --- a/lib/apprise/plugins/NotifyFCM/priority.py +++ b/lib/apprise/plugins/NotifyFCM/priority.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyFaast.py b/lib/apprise/plugins/NotifyFaast.py index 3e55e120..be3eff28 100644 --- a/lib/apprise/plugins/NotifyFaast.py +++ b/lib/apprise/plugins/NotifyFaast.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyFlock.py b/lib/apprise/plugins/NotifyFlock.py index 4f34b662..71a15da5 100644 --- a/lib/apprise/plugins/NotifyFlock.py +++ b/lib/apprise/plugins/NotifyFlock.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -97,8 +93,8 @@ class NotifyFlock(NotifyBase): # Define object templates templates = ( '{schema}://{token}', - '{schema}://{user}@{token}', - '{schema}://{user}@{token}/{targets}', + '{schema}://{botname}@{token}', + '{schema}://{botname}@{token}/{targets}', '{schema}://{token}/{targets}', ) @@ -111,9 +107,10 @@ class NotifyFlock(NotifyBase): 'private': True, 'required': True, }, - 'user': { + 'botname': { 'name': _('Bot Name'), 'type': 'string', + 'map_to': 'user', }, 'to_user': { 'name': _('To User ID'), @@ -334,6 +331,13 @@ class NotifyFlock(NotifyBase): params=NotifyFlock.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyForm.py b/lib/apprise/plugins/NotifyForm.py index b14ae5ef..066f299b 100644 --- a/lib/apprise/plugins/NotifyForm.py +++ b/lib/apprise/plugins/NotifyForm.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -40,6 +36,16 @@ from ..common import NotifyType from ..AppriseLocale import gettext_lazy as _ +class FORMPayloadField: + """ + Identifies the fields available in the FORM Payload + """ + VERSION = 'version' + TITLE = 'title' + MESSAGE = 'message' + MESSAGETYPE = 'type' + + # Defines the method to send the notification METHODS = ( 'POST', @@ -89,6 +95,9 @@ class NotifyForm(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_Form' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 @@ -96,6 +105,12 @@ class NotifyForm(NotifyBase): # local anyway request_rate_per_sec = 0 + # Define the FORM version to place in all payloads + # Version: Major.Minor, Major is only updated if the entire schema is + # changed. If just adding new items (or removing old ones, only increment + # the Minor! + form_version = '1.0' + # Define object templates templates = ( '{schema}://{host}', @@ -218,6 +233,18 @@ class NotifyForm(NotifyBase): self.attach_as += self.attach_as_count self.attach_multi_support = True + # A payload map allows users to over-ride the default mapping if + # they're detected with the :overide=value. Normally this would + # create a new key and assign it the value specified. However + # if the key you specify is actually an internally mapped one, + # then a re-mapping takes place using the value + self.payload_map = { + FORMPayloadField.VERSION: FORMPayloadField.VERSION, + FORMPayloadField.TITLE: FORMPayloadField.TITLE, + FORMPayloadField.MESSAGE: FORMPayloadField.MESSAGE, + FORMPayloadField.MESSAGETYPE: FORMPayloadField.MESSAGETYPE, + } + self.params = {} if params: # Store our extra headers @@ -228,10 +255,20 @@ class NotifyForm(NotifyBase): # Store our extra headers self.headers.update(headers) + self.payload_overrides = {} self.payload_extras = {} if payload: # Store our extra payload entries self.payload_extras.update(payload) + for key in list(self.payload_extras.keys()): + # Any values set in the payload to alter a system related one + # alters the system key. Hence :message=msg maps the 'message' + # variable that otherwise already contains the payload to be + # 'msg' instead (containing the payload) + if key in self.payload_map: + self.payload_map[key] = self.payload_extras[key] + self.payload_overrides[key] = self.payload_extras[key] + del self.payload_extras[key] return @@ -257,6 +294,8 @@ class NotifyForm(NotifyBase): # Append our payload extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) + params.update( + {':{}'.format(k): v for k, v in self.payload_overrides.items()}) if self.attach_as != self.attach_as_default: # Provide Attach-As extension details @@ -305,7 +344,7 @@ class NotifyForm(NotifyBase): # Track our potential attachments files = [] - if attach: + if attach and self.attachment_support: for no, attachment in enumerate(attach, start=1): # Perform some simple error checking if not attachment: @@ -337,15 +376,18 @@ class NotifyForm(NotifyBase): 'form:// Multi-Attachment Support not enabled') # prepare Form Object - payload = { - # Version: Major.Minor, Major is only updated if the entire - # schema is changed. If just adding new items (or removing - # old ones, only increment the Minor! - 'version': '1.0', - 'title': title, - 'message': body, - 'type': notify_type, - } + payload = {} + + for key, value in ( + (FORMPayloadField.VERSION, self.form_version), + (FORMPayloadField.TITLE, title), + (FORMPayloadField.MESSAGE, body), + (FORMPayloadField.MESSAGETYPE, notify_type)): + + if not self.payload_map[key]: + # Do not store element in payload response + continue + payload[self.payload_map[key]] = value # Apply any/all payload over-rides defined payload.update(self.payload_extras) diff --git a/lib/apprise/plugins/NotifyGitter.py b/lib/apprise/plugins/NotifyGitter.py deleted file mode 100644 index 48d14c7c..00000000 --- a/lib/apprise/plugins/NotifyGitter.py +++ /dev/null @@ -1,419 +0,0 @@ -# -*- coding: utf-8 -*- -# BSD 3-Clause License -# -# Apprise - Push Notification Library. -# Copyright (c) 2023, Chris Caron -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# 1. Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# 2. Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -# Once you visit: https://developer.gitter.im/apps you'll get a personal -# access token that will look something like this: -# b5647881d563fm846dfbb2c27d1fe8f669b8f026 - -# Don't worry about generating an app; this token is all you need to form -# you're URL with. The syntax is as follows: -# gitter://{token}/{channel} - -# Hence a URL might look like the following: -# gitter://b5647881d563fm846dfbb2c27d1fe8f669b8f026/apprise - -# Note: You must have joined the channel to send a message to it! - -# Official API reference: https://developer.gitter.im/docs/user-resource - -import re -import requests -from json import loads -from json import dumps -from datetime import datetime - -from .NotifyBase import NotifyBase -from ..common import NotifyImageSize -from ..common import NotifyFormat -from ..common import NotifyType -from ..utils import parse_list -from ..utils import parse_bool -from ..utils import validate_regex -from ..AppriseLocale import gettext_lazy as _ - -# API Gitter URL -GITTER_API_URL = 'https://api.gitter.im/v1' - -# Used to break path apart into list of targets -TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+') - - -class NotifyGitter(NotifyBase): - """ - A wrapper for Gitter Notifications - """ - - # The default descriptive name associated with the Notification - service_name = 'Gitter' - - # The services URL - service_url = 'https://gitter.im/' - - # All notification requests are secure - secure_protocol = 'gitter' - - # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_gitter' - - # Allows the user to specify the NotifyImageSize object - image_size = NotifyImageSize.XY_32 - - # Gitter does not support a title - title_maxlen = 0 - - # Gitter is kind enough to return how many more requests we're allowed to - # continue to make within it's header response as: - # X-RateLimit-Reset: The epoc time (in seconds) we can expect our - # rate-limit to be reset. - # X-RateLimit-Remaining: an integer identifying how many requests we're - # still allow to make. - request_rate_per_sec = 0 - - # For Tracking Purposes - ratelimit_reset = datetime.utcnow() - - # Default to 1 - ratelimit_remaining = 1 - - # Default Notification Format - notify_format = NotifyFormat.MARKDOWN - - # Define object templates - templates = ( - '{schema}://{token}/{targets}/', - ) - - # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'token': { - 'name': _('Token'), - 'type': 'string', - 'private': True, - 'required': True, - 'regex': (r'^[a-z0-9]{40}$', 'i'), - }, - 'targets': { - 'name': _('Rooms'), - 'type': 'list:string', - }, - }) - - # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'image': { - 'name': _('Include Image'), - 'type': 'bool', - 'default': False, - 'map_to': 'include_image', - }, - 'to': { - 'alias_of': 'targets', - }, - }) - - def __init__(self, token, targets, include_image=False, **kwargs): - """ - Initialize Gitter Object - """ - super().__init__(**kwargs) - - # Secret Key (associated with project) - self.token = validate_regex( - token, *self.template_tokens['token']['regex']) - if not self.token: - msg = 'An invalid Gitter API Token ' \ - '({}) was specified.'.format(token) - self.logger.warning(msg) - raise TypeError(msg) - - # Parse our targets - self.targets = parse_list(targets) - if not self.targets: - msg = 'There are no valid Gitter targets to notify.' - self.logger.warning(msg) - raise TypeError(msg) - - # Used to track maping of rooms to their numeric id lookup for - # messaging - self._room_mapping = None - - # Track whether or not we want to send an image with our notification - # or not. - self.include_image = include_image - - return - - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Gitter Notification - """ - - # error tracking (used for function return) - has_error = False - - # Set up our image for display if configured to do so - image_url = None if not self.include_image \ - else self.image_url(notify_type) - - if image_url: - body = '![alt]({})\n{}'.format(image_url, body) - - if self._room_mapping is None: - # Populate our room mapping - self._room_mapping = {} - postokay, response = self._fetch(url='rooms') - if not postokay: - return False - - # Response generally looks like this: - # [ - # { - # noindex: False, - # oneToOne: False, - # avatarUrl: 'https://path/to/avatar/url', - # url: '/apprise-notifications/community', - # public: True, - # tags: [], - # lurk: False, - # uri: 'apprise-notifications/community', - # lastAccessTime: '2019-03-25T00:12:28.144Z', - # topic: '', - # roomMember: True, - # groupId: '5c981cecd73408ce4fbbad2f', - # githubType: 'REPO_CHANNEL', - # unreadItems: 0, - # mentions: 0, - # security: 'PUBLIC', - # userCount: 1, - # id: '5c981cecd73408ce4fbbad31', - # name: 'apprise/community' - # } - # ] - for entry in response: - self._room_mapping[entry['name'].lower().split('/')[0]] = { - # The ID of the room - 'id': entry['id'], - - # A descriptive name (useful for logging) - 'uri': entry['uri'], - } - - # Create a copy of the targets list - targets = list(self.targets) - while len(targets): - target = targets.pop(0).lower() - - if target not in self._room_mapping: - self.logger.warning( - 'Failed to locate Gitter room {}'.format(target)) - - # Flag our error - has_error = True - continue - - # prepare our payload - payload = { - 'text': body, - } - - # Our Notification URL - notify_url = 'rooms/{}/chatMessages'.format( - self._room_mapping[target]['id']) - - # Perform our query - postokay, response = self._fetch( - notify_url, payload=dumps(payload), method='POST') - - if not postokay: - # Flag our error - has_error = True - - return not has_error - - def _fetch(self, url, payload=None, method='GET'): - """ - Wrapper to request object - - """ - - # Prepare our headers: - headers = { - 'User-Agent': self.app_id, - 'Accept': 'application/json', - 'Authorization': 'Bearer ' + self.token, - } - if payload: - # Only set our header payload if it's defined - headers['Content-Type'] = 'application/json' - - # Default content response object - content = {} - - # Update our URL - url = '{}/{}'.format(GITTER_API_URL, url) - - # Some Debug Logging - self.logger.debug('Gitter {} URL: {} (cert_verify={})'.format( - method, - url, self.verify_certificate)) - if payload: - self.logger.debug('Gitter Payload: {}' .format(payload)) - - # By default set wait to None - wait = None - - if self.ratelimit_remaining <= 0: - # Determine how long we should wait for or if we should wait at - # all. This isn't fool-proof because we can't be sure the client - # time (calling this script) is completely synced up with the - # Gitter server. One would hope we're on NTP and our clocks are - # the same allowing this to role smoothly: - - now = datetime.utcnow() - if now < self.ratelimit_reset: - # We need to throttle for the difference in seconds - # We add 0.5 seconds to the end just to allow a grace - # period. - wait = (self.ratelimit_reset - now).total_seconds() + 0.5 - - # Always call throttle before any remote server i/o is made - self.throttle(wait=wait) - - # fetch function - fn = requests.post if method == 'POST' else requests.get - try: - r = fn( - url, - data=payload, - headers=headers, - verify=self.verify_certificate, - timeout=self.request_timeout, - ) - - if r.status_code != requests.codes.ok: - # We had a problem - status_str = \ - NotifyGitter.http_response_code_lookup(r.status_code) - - self.logger.warning( - 'Failed to send Gitter {} to {}: ' - '{}error={}.'.format( - method, - url, - ', ' if status_str else '', - r.status_code)) - - self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) - - # Mark our failure - return (False, content) - - try: - content = loads(r.content) - - except (AttributeError, TypeError, ValueError): - # ValueError = r.content is Unparsable - # TypeError = r.content is None - # AttributeError = r is None - content = {} - - try: - self.ratelimit_remaining = \ - int(r.headers.get('X-RateLimit-Remaining')) - self.ratelimit_reset = datetime.utcfromtimestamp( - int(r.headers.get('X-RateLimit-Reset'))) - - except (TypeError, ValueError): - # This is returned if we could not retrieve this information - # gracefully accept this state and move on - pass - - except requests.RequestException as e: - self.logger.warning( - 'Exception received when sending Gitter {} to {}: '. - format(method, url)) - self.logger.debug('Socket Exception: %s' % str(e)) - - # Mark our failure - return (False, content) - - return (True, content) - - def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ - - # Define any URL parameters - params = { - 'image': 'yes' if self.include_image else 'no', - } - - # Extend our parameters - params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - - return '{schema}://{token}/{targets}/?{params}'.format( - schema=self.secure_protocol, - token=self.pprint(self.token, privacy, safe=''), - targets='/'.join( - [NotifyGitter.quote(x, safe='') for x in self.targets]), - params=NotifyGitter.urlencode(params)) - - @staticmethod - def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ - results = NotifyBase.parse_url(url, verify_host=False) - if not results: - # We're done early as we couldn't load the results - return results - - results['token'] = NotifyGitter.unquote(results['host']) - - # Get our entries; split_path() looks after unquoting content for us - # by default - results['targets'] = NotifyGitter.split_path(results['fullpath']) - - # Support the 'to' variable so that we can support targets this way too - # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += NotifyGitter.parse_list(results['qsd']['to']) - - # Include images with our message - results['include_image'] = \ - parse_bool(results['qsd'].get('image', False)) - - return results diff --git a/lib/apprise/plugins/NotifyGnome.py b/lib/apprise/plugins/NotifyGnome.py index dc23f736..26b616ee 100644 --- a/lib/apprise/plugins/NotifyGnome.py +++ b/lib/apprise/plugins/NotifyGnome.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyGoogleChat.py b/lib/apprise/plugins/NotifyGoogleChat.py index f65b6541..7119e742 100644 --- a/lib/apprise/plugins/NotifyGoogleChat.py +++ b/lib/apprise/plugins/NotifyGoogleChat.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyGotify.py b/lib/apprise/plugins/NotifyGotify.py index 37922568..e20aa03d 100644 --- a/lib/apprise/plugins/NotifyGotify.py +++ b/lib/apprise/plugins/NotifyGotify.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -134,7 +130,6 @@ class NotifyGotify(NotifyBase): 'type': 'string', 'map_to': 'fullpath', 'default': '/', - 'required': True, }, 'port': { 'name': _('Port'), diff --git a/lib/apprise/plugins/NotifyGrowl.py b/lib/apprise/plugins/NotifyGrowl.py index 9240d62c..790945f0 100644 --- a/lib/apprise/plugins/NotifyGrowl.py +++ b/lib/apprise/plugins/NotifyGrowl.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyGuilded.py b/lib/apprise/plugins/NotifyGuilded.py index 8bb9aeea..066cddee 100644 --- a/lib/apprise/plugins/NotifyGuilded.py +++ b/lib/apprise/plugins/NotifyGuilded.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyHomeAssistant.py b/lib/apprise/plugins/NotifyHomeAssistant.py index a403356a..25d8f5fb 100644 --- a/lib/apprise/plugins/NotifyHomeAssistant.py +++ b/lib/apprise/plugins/NotifyHomeAssistant.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyIFTTT.py b/lib/apprise/plugins/NotifyIFTTT.py index 70d51aa6..2c386c6b 100644 --- a/lib/apprise/plugins/NotifyIFTTT.py +++ b/lib/apprise/plugins/NotifyIFTTT.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -30,7 +26,6 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -# # For this plugin to work, you need to add the Maker applet to your profile # Simply visit https://ifttt.com/search and search for 'Webhooks' # Or if you're signed in, click here: https://ifttt.com/maker_webhooks @@ -312,6 +307,12 @@ class NotifyIFTTT(NotifyBase): params=NotifyIFTTT.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.events) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyJSON.py b/lib/apprise/plugins/NotifyJSON.py index 509c7627..a8ab7adc 100644 --- a/lib/apprise/plugins/NotifyJSON.py +++ b/lib/apprise/plugins/NotifyJSON.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -41,6 +37,17 @@ from ..common import NotifyType from ..AppriseLocale import gettext_lazy as _ +class JSONPayloadField: + """ + Identifies the fields available in the JSON Payload + """ + VERSION = 'version' + TITLE = 'title' + MESSAGE = 'message' + ATTACHMENTS = 'attachments' + MESSAGETYPE = 'type' + + # Defines the method to send the notification METHODS = ( 'POST', @@ -69,6 +76,9 @@ class NotifyJSON(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_JSON' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 @@ -76,6 +86,12 @@ class NotifyJSON(NotifyBase): # local anyway request_rate_per_sec = 0 + # Define the JSON version to place in all payloads + # Version: Major.Minor, Major is only updated if the entire schema is + # changed. If just adding new items (or removing old ones, only increment + # the Minor! + json_version = '1.0' + # Define object templates templates = ( '{schema}://{host}', @@ -246,7 +262,7 @@ class NotifyJSON(NotifyBase): # Track our potential attachments attachments = [] - if attach: + if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: @@ -274,20 +290,30 @@ class NotifyJSON(NotifyBase): self.logger.debug('I/O Exception: %s' % str(e)) return False - # prepare JSON Object + # Prepare JSON Object payload = { - # Version: Major.Minor, Major is only updated if the entire - # schema is changed. If just adding new items (or removing - # old ones, only increment the Minor! - 'version': '1.0', - 'title': title, - 'message': body, - 'attachments': attachments, - 'type': notify_type, + JSONPayloadField.VERSION: self.json_version, + JSONPayloadField.TITLE: title, + JSONPayloadField.MESSAGE: body, + JSONPayloadField.ATTACHMENTS: attachments, + JSONPayloadField.MESSAGETYPE: notify_type, } - # Apply any/all payload over-rides defined - payload.update(self.payload_extras) + for key, value in self.payload_extras.items(): + + if key in payload: + if not value: + # Do not store element in payload response + del payload[key] + + else: + # Re-map + payload[value] = payload[key] + del payload[key] + + else: + # Append entry + payload[key] = value auth = None if self.user: diff --git a/lib/apprise/plugins/NotifyJoin.py b/lib/apprise/plugins/NotifyJoin.py index 91b2c86b..92af6c3f 100644 --- a/lib/apprise/plugins/NotifyJoin.py +++ b/lib/apprise/plugins/NotifyJoin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -174,7 +170,6 @@ class NotifyJoin(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', - 'required': True, }, }) @@ -373,6 +368,12 @@ class NotifyJoin(NotifyBase): for x in self.targets]), params=NotifyJoin.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyKavenegar.py b/lib/apprise/plugins/NotifyKavenegar.py index 84100b25..d1df47c9 100644 --- a/lib/apprise/plugins/NotifyKavenegar.py +++ b/lib/apprise/plugins/NotifyKavenegar.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -324,6 +320,12 @@ class NotifyKavenegar(NotifyBase): [NotifyKavenegar.quote(x, safe='') for x in self.targets]), params=NotifyKavenegar.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyKumulos.py b/lib/apprise/plugins/NotifyKumulos.py index 27e0995c..6072340f 100644 --- a/lib/apprise/plugins/NotifyKumulos.py +++ b/lib/apprise/plugins/NotifyKumulos.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyLametric.py b/lib/apprise/plugins/NotifyLametric.py index 1b98b694..516ec27c 100644 --- a/lib/apprise/plugins/NotifyLametric.py +++ b/lib/apprise/plugins/NotifyLametric.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -370,6 +366,7 @@ class NotifyLametric(NotifyBase): # Device Mode '{schema}://{apikey}@{host}', + '{schema}://{user}:{apikey}@{host}', '{schema}://{apikey}@{host}:{port}', '{schema}://{user}:{apikey}@{host}:{port}', ) @@ -404,7 +401,6 @@ class NotifyLametric(NotifyBase): 'host': { 'name': _('Hostname'), 'type': 'string', - 'required': True, }, 'port': { 'name': _('Port'), diff --git a/lib/apprise/plugins/NotifyLine.py b/lib/apprise/plugins/NotifyLine.py index 65cd0163..09d72fed 100644 --- a/lib/apprise/plugins/NotifyLine.py +++ b/lib/apprise/plugins/NotifyLine.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -102,6 +98,7 @@ class NotifyLine(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True }, }) @@ -267,6 +264,12 @@ class NotifyLine(NotifyBase): params=NotifyLine.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyMQTT.py b/lib/apprise/plugins/NotifyMQTT.py index 48094e5f..2372c8b4 100644 --- a/lib/apprise/plugins/NotifyMQTT.py +++ b/lib/apprise/plugins/NotifyMQTT.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -427,6 +423,10 @@ class NotifyMQTT(NotifyBase): self.logger.debug('Socket Exception: %s' % str(e)) return False + if not has_error: + # Verbal notice + self.logger.info('Sent MQTT notification') + return not has_error def url(self, privacy=False, *args, **kwargs): @@ -476,6 +476,12 @@ class NotifyMQTT(NotifyBase): params=NotifyMQTT.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.topics) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyMSG91.py b/lib/apprise/plugins/NotifyMSG91.py index ec94ab9a..225a2d3d 100644 --- a/lib/apprise/plugins/NotifyMSG91.py +++ b/lib/apprise/plugins/NotifyMSG91.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -35,50 +31,31 @@ # Get your (authkey) from the dashboard here: # - https://world.msg91.com/user/index.php#api # +# Note: You will need to define a template for this to work +# # Get details on the API used in this plugin here: -# - https://world.msg91.com/apidoc/textsms/send-sms.php - +# - https://docs.msg91.com/reference/send-sms +import re import requests - +from json import dumps from .NotifyBase import NotifyBase from ..common import NotifyType from ..utils import is_phone_no -from ..utils import parse_phone_no +from ..utils import parse_phone_no, parse_bool from ..utils import validate_regex from ..AppriseLocale import gettext_lazy as _ -class MSG91Route: +class MSG91PayloadField: """ - Transactional SMS Routes - route=1 for promotional, route=4 for transactional SMS. + Identifies the fields available in the JSON Payload """ - PROMOTIONAL = 1 - TRANSACTIONAL = 4 + BODY = 'body' + MESSAGETYPE = 'type' -# Used for verification -MSG91_ROUTES = ( - MSG91Route.PROMOTIONAL, - MSG91Route.TRANSACTIONAL, -) - - -class MSG91Country: - """ - Optional value that can be specified on the MSG91 api - """ - INTERNATIONAL = 0 - USA = 1 - INDIA = 91 - - -# Used for verification -MSG91_COUNTRIES = ( - MSG91Country.INTERNATIONAL, - MSG91Country.USA, - MSG91Country.INDIA, -) +# Add entries here that are reserved +RESERVED_KEYWORDS = ('mobiles', ) class NotifyMSG91(NotifyBase): @@ -99,7 +76,7 @@ class NotifyMSG91(NotifyBase): setup_url = 'https://github.com/caronc/apprise/wiki/Notify_msg91' # MSG91 uses the http protocol with JSON requests - notify_url = 'https://world.msg91.com/api/sendhttp.php' + notify_url = 'https://control.msg91.com/api/v5/flow/' # The maximum length of the body body_maxlen = 160 @@ -108,14 +85,24 @@ class NotifyMSG91(NotifyBase): # cause any title (if defined) to get placed into the message body. title_maxlen = 0 + # Our supported mappings and component keys + component_key_re = re.compile( + r'(?P((?P[a-z0-9_-])?|(?Pbody|type)))', re.IGNORECASE) + # Define object templates templates = ( - '{schema}://{authkey}/{targets}', - '{schema}://{sender}@{authkey}/{targets}', + '{schema}://{template}@{authkey}/{targets}', ) # Define our template tokens template_tokens = dict(NotifyBase.template_tokens, **{ + 'template': { + 'name': _('Template ID'), + 'type': 'string', + 'required': True, + 'private': True, + 'regex': (r'^[a-z0-9 _-]+$', 'i'), + }, 'authkey': { 'name': _('Authentication Key'), 'type': 'string', @@ -133,10 +120,7 @@ class NotifyMSG91(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', - }, - 'sender': { - 'name': _('Sender ID'), - 'type': 'string', + 'required': True, }, }) @@ -145,21 +129,23 @@ class NotifyMSG91(NotifyBase): 'to': { 'alias_of': 'targets', }, - 'route': { - 'name': _('Route'), - 'type': 'choice:int', - 'values': MSG91_ROUTES, - 'default': MSG91Route.TRANSACTIONAL, - }, - 'country': { - 'name': _('Country'), - 'type': 'choice:int', - 'values': MSG91_COUNTRIES, + 'short_url': { + 'name': _('Short URL'), + 'type': 'bool', + 'default': False, }, }) - def __init__(self, authkey, targets=None, sender=None, route=None, - country=None, **kwargs): + # Define any kwargs we're using + template_kwargs = { + 'template_mapping': { + 'name': _('Template Mapping'), + 'prefix': ':', + }, + } + + def __init__(self, template, authkey, targets=None, short_url=None, + template_mapping=None, **kwargs): """ Initialize MSG91 Object """ @@ -174,39 +160,20 @@ class NotifyMSG91(NotifyBase): self.logger.warning(msg) raise TypeError(msg) - if route is None: - self.route = self.template_args['route']['default'] + # Template ID + self.template = validate_regex( + template, *self.template_tokens['template']['regex']) + if not self.template: + msg = 'An invalid MSG91 Template ID ' \ + '({}) was specified.'.format(template) + self.logger.warning(msg) + raise TypeError(msg) + + if short_url is None: + self.short_url = self.template_args['short_url']['default'] else: - try: - self.route = int(route) - if self.route not in MSG91_ROUTES: - # Let outer except catch thi - raise ValueError() - - except (ValueError, TypeError): - msg = 'The MSG91 route specified ({}) is invalid.'\ - .format(route) - self.logger.warning(msg) - raise TypeError(msg) - - if country: - try: - self.country = int(country) - if self.country not in MSG91_COUNTRIES: - # Let outer except catch thi - raise ValueError() - - except (ValueError, TypeError): - msg = 'The MSG91 country specified ({}) is invalid.'\ - .format(country) - self.logger.warning(msg) - raise TypeError(msg) - else: - self.country = country - - # Store our sender - self.sender = sender + self.short_url = parse_bool(short_url) # Parse our targets self.targets = list() @@ -224,6 +191,11 @@ class NotifyMSG91(NotifyBase): # store valid phone number self.targets.append(result['full']) + self.template_mapping = {} + if template_mapping: + # Store our extra payload entries + self.template_mapping.update(template_mapping) + return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): @@ -239,23 +211,55 @@ class NotifyMSG91(NotifyBase): # Prepare our headers headers = { 'User-Agent': self.app_id, - 'Content-Type': 'application/x-www-form-urlencoded', + 'Content-Type': 'application/json', + 'authkey': self.authkey, } + # Base + recipient_payload = { + 'mobiles': None, + # Keyword Tokens + MSG91PayloadField.BODY: body, + MSG91PayloadField.MESSAGETYPE: notify_type, + } + + # Prepare Recipient Payload Object + for key, value in self.template_mapping.items(): + + if key in RESERVED_KEYWORDS: + self.logger.warning( + 'Ignoring MSG91 custom payload entry %s', key) + continue + + if key in recipient_payload: + if not value: + # Do not store element in payload response + del recipient_payload[key] + + else: + # Re-map + recipient_payload[value] = recipient_payload[key] + del recipient_payload[key] + + else: + # Append entry + recipient_payload[key] = value + + # Prepare our recipients + recipients = [] + for target in self.targets: + recipient = recipient_payload.copy() + recipient['mobiles'] = target + recipients.append(recipient) + # Prepare our payload payload = { - 'sender': self.sender if self.sender else self.app_id, - 'authkey': self.authkey, - 'message': body, - 'response': 'json', + 'template_id': self.template, + 'short_url': 1 if self.short_url else 0, # target phone numbers are sent with a comma delimiter - 'mobiles': ','.join(self.targets), - 'route': str(self.route), + 'recipients': recipients, } - if self.country: - payload['country'] = str(self.country) - # Some Debug Logging self.logger.debug('MSG91 POST URL: {} (cert_verify={})'.format( self.notify_url, self.verify_certificate)) @@ -267,7 +271,7 @@ class NotifyMSG91(NotifyBase): try: r = requests.post( self.notify_url, - data=payload, + data=dumps(payload), headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, @@ -313,22 +317,32 @@ class NotifyMSG91(NotifyBase): # Define any URL parameters params = { - 'route': str(self.route), + 'short_url': str(self.short_url), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - if self.country: - params['country'] = str(self.country) + # Payload body extras prefixed with a ':' sign + # Append our payload extras into our parameters + params.update( + {':{}'.format(k): v for k, v in self.template_mapping.items()}) - return '{schema}://{authkey}/{targets}/?{params}'.format( + return '{schema}://{template}@{authkey}/{targets}/?{params}'.format( schema=self.secure_protocol, + template=self.pprint(self.template, privacy, safe=''), authkey=self.pprint(self.authkey, privacy, safe=''), targets='/'.join( [NotifyMSG91.quote(x, safe='') for x in self.targets]), params=NotifyMSG91.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ @@ -349,11 +363,11 @@ class NotifyMSG91(NotifyBase): # The hostname is our authentication key results['authkey'] = NotifyMSG91.unquote(results['host']) - if 'route' in results['qsd'] and len(results['qsd']['route']): - results['route'] = results['qsd']['route'] + # The template id is kept in the user field + results['template'] = NotifyMSG91.unquote(results['user']) - if 'country' in results['qsd'] and len(results['qsd']['country']): - results['country'] = results['qsd']['country'] + if 'short_url' in results['qsd'] and len(results['qsd']['short_url']): + results['short_url'] = parse_bool(results['qsd']['short_url']) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration @@ -361,4 +375,10 @@ class NotifyMSG91(NotifyBase): results['targets'] += \ NotifyMSG91.parse_phone_no(results['qsd']['to']) + # store any additional payload extra's defined + results['template_mapping'] = { + NotifyMSG91.unquote(x): NotifyMSG91.unquote(y) + for x, y in results['qsd:'].items() + } + return results diff --git a/lib/apprise/plugins/NotifyMSTeams.py b/lib/apprise/plugins/NotifyMSTeams.py index 19f9fe34..e82fdb8c 100644 --- a/lib/apprise/plugins/NotifyMSTeams.py +++ b/lib/apprise/plugins/NotifyMSTeams.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyMacOSX.py b/lib/apprise/plugins/NotifyMacOSX.py index 59c0620a..ae08da11 100644 --- a/lib/apprise/plugins/NotifyMacOSX.py +++ b/lib/apprise/plugins/NotifyMacOSX.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -197,8 +193,7 @@ class NotifyMacOSX(NotifyBase): self.logger.debug('MacOSX CMD: {}'.format(' '.join(cmd))) # Send our notification - output = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output = subprocess.Popen(cmd) # Wait for process to complete output.wait() diff --git a/lib/apprise/plugins/NotifyMailgun.py b/lib/apprise/plugins/NotifyMailgun.py index f6017c82..5afebc52 100644 --- a/lib/apprise/plugins/NotifyMailgun.py +++ b/lib/apprise/plugins/NotifyMailgun.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -121,6 +117,9 @@ class NotifyMailgun(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mailgun' + # Support attachments + attachment_support = True + # Default Notify Format notify_format = NotifyFormat.HTML @@ -152,8 +151,13 @@ class NotifyMailgun(NotifyBase): 'private': True, 'required': True, }, + 'target_email': { + 'name': _('Target Email'), + 'type': 'string', + 'map_to': 'targets', + }, 'targets': { - 'name': _('Target Emails'), + 'name': _('Targets'), 'type': 'list:string', }, }) @@ -366,7 +370,7 @@ class NotifyMailgun(NotifyBase): # Track our potential files files = {} - if attach: + if attach and self.attachment_support: for idx, attachment in enumerate(attach): # Perform some simple error checking if not attachment: @@ -627,6 +631,20 @@ class NotifyMailgun(NotifyBase): safe='') for e in self.targets]), params=NotifyMailgun.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyMastodon.py b/lib/apprise/plugins/NotifyMastodon.py index cfd7ff48..90c39e14 100644 --- a/lib/apprise/plugins/NotifyMastodon.py +++ b/lib/apprise/plugins/NotifyMastodon.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -35,6 +31,7 @@ import requests from copy import deepcopy from json import dumps, loads from datetime import datetime +from datetime import timezone from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode @@ -110,6 +107,10 @@ class NotifyMastodon(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_mastodon' + # Support attachments + attachment_support = True + + # Allows the user to specify the NotifyImageSize object # Allows the user to specify the NotifyImageSize object; this is supported # through the webhook image_size = NotifyImageSize.XY_128 @@ -150,7 +151,7 @@ class NotifyMastodon(NotifyBase): request_rate_per_sec = 0 # For Tracking Purposes - ratelimit_reset = datetime.utcnow() + ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1000; users can send up to 1000 DM's and 2400 toot a day # This value only get's adjusted if the server sets it that way @@ -378,6 +379,13 @@ class NotifyMastodon(NotifyBase): params=NotifyMastodon.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, **kwargs): """ @@ -406,11 +414,10 @@ class NotifyMastodon(NotifyBase): else: targets.add(myself) - if attach: + if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for attachment in attach: - # Perform some simple error checking if not attachment: # We could not access the attachment @@ -570,7 +577,7 @@ class NotifyMastodon(NotifyBase): _payload = deepcopy(payload) _payload['media_ids'] = media_ids - if no: + if no or not body: # strip text and replace it with the image representation _payload['status'] = \ '{:02d}/{:02d}'.format(no + 1, len(batches)) @@ -827,7 +834,7 @@ class NotifyMastodon(NotifyBase): # Mastodon server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: - now = datetime.utcnow() + now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds # We add 0.5 seconds to the end just to allow a grace @@ -885,8 +892,9 @@ class NotifyMastodon(NotifyBase): # Capture rate limiting if possible self.ratelimit_remaining = \ int(r.headers.get('X-RateLimit-Remaining')) - self.ratelimit_reset = datetime.utcfromtimestamp( - int(r.headers.get('X-RateLimit-Limit'))) + self.ratelimit_reset = datetime.fromtimestamp( + int(r.headers.get('X-RateLimit-Limit')), timezone.utc + ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information diff --git a/lib/apprise/plugins/NotifyMatrix.py b/lib/apprise/plugins/NotifyMatrix.py index ca9692aa..8f3e77ff 100644 --- a/lib/apprise/plugins/NotifyMatrix.py +++ b/lib/apprise/plugins/NotifyMatrix.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -53,8 +49,11 @@ from ..utils import validate_regex from ..AppriseLocale import gettext_lazy as _ # Define default path -MATRIX_V2_API_PATH = '/_matrix/client/r0' MATRIX_V1_WEBHOOK_PATH = '/api/v1/matrix/hook' +MATRIX_V2_API_PATH = '/_matrix/client/r0' +MATRIX_V3_API_PATH = '/_matrix/client/v3' +MATRIX_V3_MEDIA_PATH = '/_matrix/media/v3' +MATRIX_V2_MEDIA_PATH = '/_matrix/media/r0' # Extend HTTP Error Messages MATRIX_HTTP_ERROR_MAP = { @@ -88,6 +87,21 @@ MATRIX_MESSAGE_TYPES = ( ) +class MatrixVersion: + # Version 2 + V2 = "2" + + # Version 3 + V3 = "3" + + +# webhook modes are placed into this list for validation purposes +MATRIX_VERSIONS = ( + MatrixVersion.V2, + MatrixVersion.V3, +) + + class MatrixWebhookMode: # Webhook Mode is disabled DISABLED = "off" @@ -128,6 +142,9 @@ class NotifyMatrix(NotifyBase): # The default secure protocol secure_protocol = 'matrixs' + # Support Attachments + attachment_support = True + # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_matrix' @@ -147,6 +164,9 @@ class NotifyMatrix(NotifyBase): # Throttle a wee-bit to avoid thrashing request_rate_per_sec = 0.5 + # Our Matrix API Version + matrix_api_version = '3' + # How many retry attempts we'll make in the event the server asks us to # throttle back. default_retries = 2 @@ -175,7 +195,6 @@ class NotifyMatrix(NotifyBase): 'host': { 'name': _('Hostname'), 'type': 'string', - 'required': True, }, 'port': { 'name': _('Port'), @@ -194,6 +213,7 @@ class NotifyMatrix(NotifyBase): }, 'token': { 'name': _('Access Token'), + 'private': True, 'map_to': 'password', }, 'target_user': { @@ -234,6 +254,12 @@ class NotifyMatrix(NotifyBase): 'values': MATRIX_WEBHOOK_MODES, 'default': MatrixWebhookMode.DISABLED, }, + 'version': { + 'name': _('Matrix API Verion'), + 'type': 'choice:string', + 'values': MATRIX_VERSIONS, + 'default': MatrixVersion.V3, + }, 'msgtype': { 'name': _('Message Type'), 'type': 'choice:string', @@ -248,7 +274,7 @@ class NotifyMatrix(NotifyBase): }, }) - def __init__(self, targets=None, mode=None, msgtype=None, + def __init__(self, targets=None, mode=None, msgtype=None, version=None, include_image=False, **kwargs): """ Initialize Matrix Object @@ -282,6 +308,14 @@ class NotifyMatrix(NotifyBase): self.logger.warning(msg) raise TypeError(msg) + # Setup our version + self.version = self.template_args['version']['default'] \ + if not isinstance(version, str) else version + if self.version not in MATRIX_VERSIONS: + msg = 'The version specified ({}) is invalid.'.format(version) + self.logger.warning(msg) + raise TypeError(msg) + # Setup our message type self.msgtype = self.template_args['msgtype']['default'] \ if not isinstance(msgtype, str) else msgtype.lower() @@ -521,7 +555,8 @@ class NotifyMatrix(NotifyBase): return payload def _send_server_notification(self, body, title='', - notify_type=NotifyType.INFO, **kwargs): + notify_type=NotifyType.INFO, attach=None, + **kwargs): """ Perform Direct Matrix Server Notification (no webhook) """ @@ -548,6 +583,13 @@ class NotifyMatrix(NotifyBase): # Initiaize our error tracking has_error = False + attachments = None + if attach and self.attachment_support: + attachments = self._send_attachments(attach) + if attachments is False: + # take an early exit + return False + while len(rooms) > 0: # Get our room @@ -568,23 +610,47 @@ class NotifyMatrix(NotifyBase): image_url = None if not self.include_image else \ self.image_url(notify_type) - if image_url: - # Define our payload - image_payload = { - 'msgtype': 'm.image', - 'url': image_url, - 'body': '{}'.format(notify_type if not title else title), - } - # Build our path + # Build our path + if self.version == MatrixVersion.V3: + path = '/rooms/{}/send/m.room.message/0'.format( + NotifyMatrix.quote(room_id)) + + else: path = '/rooms/{}/send/m.room.message'.format( NotifyMatrix.quote(room_id)) - # Post our content - postokay, response = self._fetch(path, payload=image_payload) - if not postokay: - # Mark our failure - has_error = True - continue + if self.version == MatrixVersion.V2: + # + # Attachments don't work beyond V2 at this time + # + if image_url: + # Define our payload + image_payload = { + 'msgtype': 'm.image', + 'url': image_url, + 'body': '{}'.format( + notify_type if not title else title), + } + + # Post our content + postokay, response = self._fetch( + path, payload=image_payload) + if not postokay: + # Mark our failure + has_error = True + continue + + if attachments: + for attachment in attachments: + attachment['room_id'] = room_id + attachment['type'] = 'm.room.message' + + postokay, response = self._fetch( + path, payload=attachment) + if not postokay: + # Mark our failure + has_error = True + continue # Define our payload payload = { @@ -615,12 +681,10 @@ class NotifyMatrix(NotifyBase): ) }) - # Build our path - path = '/rooms/{}/send/m.room.message'.format( - NotifyMatrix.quote(room_id)) - # Post our content - postokay, response = self._fetch(path, payload=payload) + method = 'PUT' if self.version == MatrixVersion.V3 else 'POST' + postokay, response = self._fetch( + path, payload=payload, method=method) if not postokay: # Notify our user self.logger.warning( @@ -632,6 +696,62 @@ class NotifyMatrix(NotifyBase): return not has_error + def _send_attachments(self, attach): + """ + Posts all of the provided attachments + """ + + payloads = [] + if self.version != MatrixVersion.V2: + self.logger.warning( + 'Add ?v=2 to Apprise URL to support Attachments') + return next((False for a in attach if not a), []) + + for attachment in attach: + if not attachment: + # invalid attachment (bad file) + return False + + if not re.match(r'^image/', attachment.mimetype, re.I): + # unsuppored at this time + continue + + postokay, response = \ + self._fetch('/upload', attachment=attachment) + if not (postokay and isinstance(response, dict)): + # Failed to perform upload + return False + + # If we get here, we'll have a response that looks like: + # { + # "content_uri": "mxc://example.com/a-unique-key" + # } + + if self.version == MatrixVersion.V3: + # Prepare our payload + payloads.append({ + "body": attachment.name, + "info": { + "mimetype": attachment.mimetype, + "size": len(attachment), + }, + "msgtype": "m.image", + "url": response.get('content_uri'), + }) + + else: + # Prepare our payload + payloads.append({ + "info": { + "mimetype": attachment.mimetype, + }, + "msgtype": "m.image", + "body": "tta.webp", + "url": response.get('content_uri'), + }) + + return payloads + def _register(self): """ Register with the service if possible. @@ -695,12 +815,23 @@ class NotifyMatrix(NotifyBase): 'user/pass combo is missing.') return False - # Prepare our Registration Payload - payload = { - 'type': 'm.login.password', - 'user': self.user, - 'password': self.password, - } + # Prepare our Authentication Payload + if self.version == MatrixVersion.V3: + payload = { + 'type': 'm.login.password', + 'identifier': { + 'type': 'm.id.user', + 'user': self.user, + }, + 'password': self.password, + } + + else: + payload = { + 'type': 'm.login.password', + 'user': self.user, + 'password': self.password, + } # Build our URL postokay, response = self._fetch('/login', payload=payload) @@ -970,7 +1101,8 @@ class NotifyMatrix(NotifyBase): return None - def _fetch(self, path, payload=None, params=None, method='POST'): + def _fetch(self, path, payload=None, params=None, attachment=None, + method='POST'): """ Wrapper to request.post() to manage it's response better and make the send() function cleaner and easier to maintain. @@ -983,6 +1115,7 @@ class NotifyMatrix(NotifyBase): headers = { 'User-Agent': self.app_id, 'Content-Type': 'application/json', + 'Accept': 'application/json', } if self.access_token is not None: @@ -991,19 +1124,39 @@ class NotifyMatrix(NotifyBase): default_port = 443 if self.secure else 80 url = \ - '{schema}://{hostname}:{port}{matrix_api}{path}'.format( + '{schema}://{hostname}{port}'.format( schema='https' if self.secure else 'http', hostname=self.host, port='' if self.port is None - or self.port == default_port else self.port, - matrix_api=MATRIX_V2_API_PATH, - path=path) + or self.port == default_port else f':{self.port}') + + if path == '/upload': + if self.version == MatrixVersion.V3: + url += MATRIX_V3_MEDIA_PATH + path + + else: + url += MATRIX_V2_MEDIA_PATH + path + + params = {'filename': attachment.name} + with open(attachment.path, 'rb') as fp: + payload = fp.read() + + # Update our content type + headers['Content-Type'] = attachment.mimetype + + else: + if self.version == MatrixVersion.V3: + url += MATRIX_V3_API_PATH + path + + else: + url += MATRIX_V2_API_PATH + path # Our response object response = {} # fetch function - fn = requests.post if method == 'POST' else requests.get + fn = requests.post if method == 'POST' else ( + requests.put if method == 'PUT' else requests.get) # Define how many attempts we'll make if we get caught in a throttle # event @@ -1024,13 +1177,16 @@ class NotifyMatrix(NotifyBase): try: r = fn( url, - data=dumps(payload), + data=dumps(payload) if not attachment else payload, params=params, headers=headers, verify=self.verify_certificate, timeout=self.request_timeout, ) + self.logger.debug( + 'Matrix Response: code=%d, %s' % ( + r.status_code, str(r.content))) response = loads(r.content) if r.status_code == 429: @@ -1094,6 +1250,13 @@ class NotifyMatrix(NotifyBase): # Return; we're done return (False, response) + except (OSError, IOError) as e: + self.logger.warning( + 'An I/O error occurred while reading {}.'.format( + attachment.name if attachment else 'unknown file')) + self.logger.debug('I/O Exception: %s' % str(e)) + return (False, {}) + return (True, response) # If we get here, we ran out of retries @@ -1160,6 +1323,7 @@ class NotifyMatrix(NotifyBase): params = { 'image': 'yes' if self.include_image else 'no', 'mode': self.mode, + 'version': self.version, 'msgtype': self.msgtype, } @@ -1196,6 +1360,13 @@ class NotifyMatrix(NotifyBase): params=NotifyMatrix.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.rooms) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ @@ -1250,6 +1421,14 @@ class NotifyMatrix(NotifyBase): if 'token' in results['qsd'] and len(results['qsd']['token']): results['password'] = NotifyMatrix.unquote(results['qsd']['token']) + # Support the use of the version= or v= keyword + if 'version' in results['qsd'] and len(results['qsd']['version']): + results['version'] = \ + NotifyMatrix.unquote(results['qsd']['version']) + + elif 'v' in results['qsd'] and len(results['qsd']['v']): + results['version'] = NotifyMatrix.unquote(results['qsd']['v']) + return results @staticmethod @@ -1259,7 +1438,7 @@ class NotifyMatrix(NotifyBase): """ result = re.match( - r'^https?://webhooks\.t2bot\.io/api/v1/matrix/hook/' + r'^https?://webhooks\.t2bot\.io/api/v[0-9]+/matrix/hook/' r'(?P[A-Z0-9_-]+)/?' r'(?P\?.+)?$', url, re.I) diff --git a/lib/apprise/plugins/NotifyMatterMost.py b/lib/apprise/plugins/NotifyMatterMost.py index e62f653c..859fed31 100644 --- a/lib/apprise/plugins/NotifyMatterMost.py +++ b/lib/apprise/plugins/NotifyMatterMost.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -91,11 +87,11 @@ class NotifyMattermost(NotifyBase): # Define object templates templates = ( '{schema}://{host}/{token}', - '{schema}://{host}/{token}:{port}', + '{schema}://{host}:{port}/{token}', + '{schema}://{host}/{fullpath}/{token}', + '{schema}://{host}:{port}/{fullpath}/{token}', '{schema}://{botname}@{host}/{token}', '{schema}://{botname}@{host}:{port}/{token}', - '{schema}://{host}/{fullpath}/{token}', - '{schema}://{host}/{fullpath}{token}:{port}', '{schema}://{botname}@{host}/{fullpath}/{token}', '{schema}://{botname}@{host}:{port}/{fullpath}/{token}', ) diff --git a/lib/apprise/plugins/NotifyMessageBird.py b/lib/apprise/plugins/NotifyMessageBird.py index 72c24b6a..4cb9d7b5 100644 --- a/lib/apprise/plugins/NotifyMessageBird.py +++ b/lib/apprise/plugins/NotifyMessageBird.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -311,6 +307,13 @@ class NotifyMessageBird(NotifyBase): [NotifyMessageBird.quote(x, safe='') for x in self.targets]), params=NotifyMessageBird.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyMisskey.py b/lib/apprise/plugins/NotifyMisskey.py index 54c4e628..57633a51 100644 --- a/lib/apprise/plugins/NotifyMisskey.py +++ b/lib/apprise/plugins/NotifyMisskey.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -29,6 +25,7 @@ # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. + # 1. visit https://misskey-hub.net/ and see what it's all about if you want. # Choose a service you want to create an account on from here: # https://misskey-hub.net/en/instances.html diff --git a/lib/apprise/plugins/NotifyNextcloud.py b/lib/apprise/plugins/NotifyNextcloud.py index 085d02d6..b1d623d0 100644 --- a/lib/apprise/plugins/NotifyNextcloud.py +++ b/lib/apprise/plugins/NotifyNextcloud.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -67,6 +63,8 @@ class NotifyNextcloud(NotifyBase): # Define object templates templates = ( + '{schema}://{host}/{targets}', + '{schema}://{host}:{port}/{targets}', '{schema}://{user}:{password}@{host}/{targets}', '{schema}://{user}:{password}@{host}:{port}/{targets}', ) @@ -116,6 +114,10 @@ class NotifyNextcloud(NotifyBase): 'min': 1, 'default': 21, }, + 'url_prefix': { + 'name': _('URL Prefix'), + 'type': 'string', + }, 'to': { 'alias_of': 'targets', }, @@ -129,17 +131,15 @@ class NotifyNextcloud(NotifyBase): }, } - def __init__(self, targets=None, version=None, headers=None, **kwargs): + def __init__(self, targets=None, version=None, headers=None, + url_prefix=None, **kwargs): """ Initialize Nextcloud Object """ super().__init__(**kwargs) + # Store our targets self.targets = parse_list(targets) - if len(self.targets) == 0: - msg = 'At least one Nextcloud target user must be specified.' - self.logger.warning(msg) - raise TypeError(msg) self.version = self.template_args['version']['default'] if version is not None: @@ -155,6 +155,10 @@ class NotifyNextcloud(NotifyBase): self.logger.warning(msg) raise TypeError(msg) + # Support URL Prefix + self.url_prefix = '' if not url_prefix \ + else url_prefix.strip('/') + self.headers = {} if headers: # Store our extra headers @@ -167,6 +171,11 @@ class NotifyNextcloud(NotifyBase): Perform Nextcloud Notification """ + if len(self.targets) == 0: + # There were no services to notify + self.logger.warning('There were no Nextcloud targets to notify.') + return False + # Prepare our Header headers = { 'User-Agent': self.app_id, @@ -198,11 +207,11 @@ class NotifyNextcloud(NotifyBase): auth = (self.user, self.password) # Nextcloud URL based on version used - notify_url = '{schema}://{host}/ocs/v2.php/'\ + notify_url = '{schema}://{host}/{url_prefix}/ocs/v2.php/'\ 'apps/admin_notifications/' \ 'api/v1/notifications/{target}' \ if self.version < 21 else \ - '{schema}://{host}/ocs/v2.php/'\ + '{schema}://{host}/{url_prefix}/ocs/v2.php/'\ 'apps/notifications/'\ 'api/v2/admin_notifications/{target}' @@ -210,6 +219,7 @@ class NotifyNextcloud(NotifyBase): schema='https' if self.secure else 'http', host=self.host if not isinstance(self.port, int) else '{}:{}'.format(self.host, self.port), + url_prefix=self.url_prefix, target=target, ) @@ -279,6 +289,9 @@ class NotifyNextcloud(NotifyBase): # Set our version params['version'] = str(self.version) + if self.url_prefix: + params['url_prefix'] = self.url_prefix + # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) @@ -312,6 +325,13 @@ class NotifyNextcloud(NotifyBase): params=NotifyNextcloud.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets else 1 + @staticmethod def parse_url(url): """ @@ -339,6 +359,12 @@ class NotifyNextcloud(NotifyBase): results['version'] = \ NotifyNextcloud.unquote(results['qsd']['version']) + # Support URL Prefixes + if 'url_prefix' in results['qsd'] \ + and len(results['qsd']['url_prefix']): + results['url_prefix'] = \ + NotifyNextcloud.unquote(results['qsd']['url_prefix']) + # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { diff --git a/lib/apprise/plugins/NotifyNextcloudTalk.py b/lib/apprise/plugins/NotifyNextcloudTalk.py index 18b191c2..4f6dc054 100644 --- a/lib/apprise/plugins/NotifyNextcloudTalk.py +++ b/lib/apprise/plugins/NotifyNextcloudTalk.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -96,6 +92,11 @@ class NotifyNextcloudTalk(NotifyBase): 'private': True, 'required': True, }, + 'target_room_id': { + 'name': _('Room ID'), + 'type': 'string', + 'map_to': 'targets', + }, 'targets': { 'name': _('Targets'), 'type': 'list:string', @@ -103,6 +104,14 @@ class NotifyNextcloudTalk(NotifyBase): }, }) + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'url_prefix': { + 'name': _('URL Prefix'), + 'type': 'string', + }, + }) + # Define any kwargs we're using template_kwargs = { 'headers': { @@ -111,7 +120,7 @@ class NotifyNextcloudTalk(NotifyBase): }, } - def __init__(self, targets=None, headers=None, **kwargs): + def __init__(self, targets=None, headers=None, url_prefix=None, **kwargs): """ Initialize Nextcloud Talk Object """ @@ -122,11 +131,12 @@ class NotifyNextcloudTalk(NotifyBase): self.logger.warning(msg) raise TypeError(msg) + # Store our targets self.targets = parse_list(targets) - if len(self.targets) == 0: - msg = 'At least one Nextcloud Talk Room ID must be specified.' - self.logger.warning(msg) - raise TypeError(msg) + + # Support URL Prefix + self.url_prefix = '' if not url_prefix \ + else url_prefix.strip('/') self.headers = {} if headers: @@ -140,6 +150,12 @@ class NotifyNextcloudTalk(NotifyBase): Perform Nextcloud Talk Notification """ + if len(self.targets) == 0: + # There were no services to notify + self.logger.warning( + 'There were no Nextcloud Talk targets to notify.') + return False + # Prepare our Header headers = { 'User-Agent': self.app_id, @@ -171,13 +187,14 @@ class NotifyNextcloudTalk(NotifyBase): } # Nextcloud Talk URL - notify_url = '{schema}://{host}'\ + notify_url = '{schema}://{host}/{url_prefix}'\ '/ocs/v2.php/apps/spreed/api/v1/chat/{target}' notify_url = notify_url.format( schema='https' if self.secure else 'http', host=self.host if not isinstance(self.port, int) else '{}:{}'.format(self.host, self.port), + url_prefix=self.url_prefix, target=target, ) @@ -200,7 +217,8 @@ class NotifyNextcloudTalk(NotifyBase): verify=self.verify_certificate, timeout=self.request_timeout, ) - if r.status_code != requests.codes.created: + if r.status_code not in ( + requests.codes.created, requests.codes.ok): # We had a problem status_str = \ NotifyNextcloudTalk.http_response_code_lookup( @@ -240,6 +258,14 @@ class NotifyNextcloudTalk(NotifyBase): Returns the URL built dynamically based on specified arguments. """ + # Our default set of parameters + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + # Append our headers into our parameters + params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + if self.url_prefix: + params['url_prefix'] = self.url_prefix + # Determine Authentication auth = '{user}:{password}@'.format( user=NotifyNextcloudTalk.quote(self.user, safe=''), @@ -249,7 +275,7 @@ class NotifyNextcloudTalk(NotifyBase): default_port = 443 if self.secure else 80 - return '{schema}://{auth}{hostname}{port}/{targets}' \ + return '{schema}://{auth}{hostname}{port}/{targets}?{params}' \ .format( schema=self.secure_protocol if self.secure else self.protocol, @@ -261,8 +287,16 @@ class NotifyNextcloudTalk(NotifyBase): else ':{}'.format(self.port), targets='/'.join([NotifyNextcloudTalk.quote(x) for x in self.targets]), + params=NotifyNextcloudTalk.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets else 1 + @staticmethod def parse_url(url): """ @@ -280,6 +314,12 @@ class NotifyNextcloudTalk(NotifyBase): results['targets'] = \ NotifyNextcloudTalk.split_path(results['fullpath']) + # Support URL Prefixes + if 'url_prefix' in results['qsd'] \ + and len(results['qsd']['url_prefix']): + results['url_prefix'] = \ + NotifyNextcloudTalk.unquote(results['qsd']['url_prefix']) + # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them results['headers'] = { diff --git a/lib/apprise/plugins/NotifyNotica.py b/lib/apprise/plugins/NotifyNotica.py index 90bf7ef1..f95baba3 100644 --- a/lib/apprise/plugins/NotifyNotica.py +++ b/lib/apprise/plugins/NotifyNotica.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -112,12 +108,12 @@ class NotifyNotica(NotifyBase): '{schema}://{user}:{password}@{host}:{port}/{token}', # Self-hosted notica servers (with custom path) - '{schema}://{host}{path}{token}', - '{schema}://{host}:{port}{path}{token}', - '{schema}://{user}@{host}{path}{token}', - '{schema}://{user}@{host}:{port}{path}{token}', - '{schema}://{user}:{password}@{host}{path}{token}', - '{schema}://{user}:{password}@{host}:{port}{path}{token}', + '{schema}://{host}{path}/{token}', + '{schema}://{host}:{port}/{path}/{token}', + '{schema}://{user}@{host}/{path}/{token}', + '{schema}://{user}@{host}:{port}{path}/{token}', + '{schema}://{user}:{password}@{host}{path}/{token}', + '{schema}://{user}:{password}@{host}:{port}/{path}/{token}', ) # Define our template tokens diff --git a/lib/apprise/plugins/NotifyNotifiarr.py b/lib/apprise/plugins/NotifyNotifiarr.py new file mode 100644 index 00000000..748e3b7a --- /dev/null +++ b/lib/apprise/plugins/NotifyNotifiarr.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +import re +import requests +from json import dumps +from itertools import chain + +from .NotifyBase import NotifyBase +from ..common import NotifyType +from ..AppriseLocale import gettext_lazy as _ +from ..common import NotifyImageSize +from ..utils import parse_list, parse_bool +from ..utils import validate_regex + +# Used to break path apart into list of channels +CHANNEL_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+') + +CHANNEL_REGEX = re.compile( + r'^\s*(\#|\%35)?(?P[0-9]+)', re.I) + +# For API Details see: +# https://notifiarr.wiki/Client/Installation + +# Another good example: +# https://notifiarr.wiki/en/Website/ \ +# Integrations/Passthrough#payload-example-1 + + +class NotifyNotifiarr(NotifyBase): + """ + A wrapper for Notifiarr Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'Notifiarr' + + # The services URL + service_url = 'https://notifiarr.com/' + + # The default secure protocol + secure_protocol = 'notifiarr' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_notifiarr' + + # The Notification URL + notify_url = 'https://notifiarr.com/api/v1/notification/apprise' + + # Notifiarr Throttling (knowing in advance reduces 429 responses) + # define('NOTIFICATION_LIMIT_SECOND_USER', 5); + # define('NOTIFICATION_LIMIT_SECOND_PATRON', 15); + + # Throttle requests ever so slightly + request_rate_per_sec = 0.04 + + # Allows the user to specify the NotifyImageSize object + image_size = NotifyImageSize.XY_256 + + # Define object templates + templates = ( + '{schema}://{apikey}/{targets}', + ) + + # Define our apikeys; these are the minimum apikeys required required to + # be passed into this function (as arguments). The syntax appends any + # previously defined in the base package and builds onto them + template_tokens = dict(NotifyBase.template_tokens, **{ + 'apikey': { + 'name': _('Token'), + 'type': 'string', + 'required': True, + 'private': True, + }, + 'target_channel': { + 'name': _('Target Channel'), + 'type': 'string', + 'prefix': '#', + 'map_to': 'targets', + }, + 'targets': { + 'name': _('Targets'), + 'type': 'list:string', + 'required': True, + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'key': { + 'alias_of': 'apikey', + }, + 'apikey': { + 'alias_of': 'apikey', + }, + 'discord_user': { + 'name': _('Ping Discord User'), + 'type': 'int', + }, + 'discord_role': { + 'name': _('Ping Discord Role'), + 'type': 'int', + }, + 'event': { + 'name': _('Discord Event ID'), + 'type': 'int', + }, + 'image': { + 'name': _('Include Image'), + 'type': 'bool', + 'default': False, + 'map_to': 'include_image', + }, + 'source': { + 'name': _('Source'), + 'type': 'string', + }, + 'from': { + 'alias_of': 'source' + }, + 'to': { + 'alias_of': 'targets', + }, + }) + + def __init__(self, apikey=None, include_image=None, + discord_user=None, discord_role=None, + event=None, targets=None, source=None, **kwargs): + """ + Initialize Notifiarr Object + + headers can be a dictionary of key/value pairs that you want to + additionally include as part of the server headers to post with + + """ + super().__init__(**kwargs) + + self.apikey = apikey + if not self.apikey: + msg = 'An invalid Notifiarr APIKey ' \ + '({}) was specified.'.format(apikey) + self.logger.warning(msg) + raise TypeError(msg) + + # Place a thumbnail image inline with the message body + self.include_image = include_image \ + if isinstance(include_image, bool) \ + else self.template_args['image']['default'] + + # Set up our user if specified + self.discord_user = 0 + if discord_user: + try: + self.discord_user = int(discord_user) + + except (ValueError, TypeError): + msg = 'An invalid Notifiarr User ID ' \ + '({}) was specified.'.format(discord_user) + self.logger.warning(msg) + raise TypeError(msg) + + # Set up our role if specified + self.discord_role = 0 + if discord_role: + try: + self.discord_role = int(discord_role) + + except (ValueError, TypeError): + msg = 'An invalid Notifiarr Role ID ' \ + '({}) was specified.'.format(discord_role) + self.logger.warning(msg) + raise TypeError(msg) + + # Prepare our source (if set) + self.source = validate_regex(source) + + self.event = 0 + if event: + try: + self.event = int(event) + + except (ValueError, TypeError): + msg = 'An invalid Notifiarr Discord Event ID ' \ + '({}) was specified.'.format(event) + self.logger.warning(msg) + raise TypeError(msg) + + # Prepare our targets + self.targets = { + 'channels': [], + 'invalid': [], + } + + for target in parse_list(targets): + result = CHANNEL_REGEX.match(target) + if result: + # Store role information + self.targets['channels'].append(int(result.group('channel'))) + continue + + self.logger.warning( + 'Dropped invalid channel ' + '({}) specified.'.format(target), + ) + self.targets['invalid'].append(target) + + return + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = { + 'image': 'yes' if self.include_image else 'no', + } + + if self.source: + params['source'] = self.source + + if self.discord_user: + params['discord_user'] = self.discord_user + + if self.discord_role: + params['discord_role'] = self.discord_role + + if self.event: + params['event'] = self.event + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + return '{schema}://{apikey}' \ + '/{targets}?{params}'.format( + schema=self.secure_protocol, + apikey=self.pprint(self.apikey, privacy, safe=''), + targets='/'.join( + [NotifyNotifiarr.quote(x, safe='+#@') for x in chain( + # Channels + ['#{}'.format(x) for x in self.targets['channels']], + # Pass along the same invalid entries as were provided + self.targets['invalid'], + )]), + params=NotifyNotifiarr.urlencode(params), + ) + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform Notifiarr Notification + """ + + if not self.targets['channels']: + # There were no services to notify + self.logger.warning( + 'There were no Notifiarr channels to notify.') + return False + + # No error to start with + has_error = False + + # Acquire image_url + image_url = self.image_url(notify_type) + + for idx, channel in enumerate(self.targets['channels']): + # prepare Notifiarr Object + payload = { + 'source': self.source if self.source else self.app_id, + 'type': notify_type, + 'notification': { + 'update': True if self.event else False, + 'name': self.app_id, + 'event': str(self.event) + if self.event else "", + }, + 'discord': { + 'color': self.color(notify_type), + 'ping': { + 'pingUser': self.discord_user + if not idx and self.discord_user else 0, + 'pingRole': self.discord_role + if not idx and self.discord_role else 0, + }, + 'text': { + 'title': title, + 'content': '', + 'description': body, + 'footer': self.app_desc, + }, + 'ids': { + 'channel': channel, + } + } + } + + if self.include_image and image_url: + payload['discord']['text']['icon'] = image_url + payload['discord']['images'] = { + 'thumbnail': image_url, + } + + if not self._send(payload): + has_error = True + + return not has_error + + def _send(self, payload): + """ + Send notification + """ + self.logger.debug('Notifiarr POST URL: %s (cert_verify=%r)' % ( + self.notify_url, self.verify_certificate, + )) + self.logger.debug('Notifiarr Payload: %s' % str(payload)) + + # Prepare HTTP Headers + headers = { + 'User-Agent': self.app_id, + 'Content-Type': 'application/json', + 'Accept': 'text/plain', + 'X-api-Key': self.apikey, + } + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + self.notify_url, + data=dumps(payload), + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + if r.status_code < 200 or r.status_code >= 300: + # We had a problem + status_str = \ + NotifyNotifiarr.http_response_code_lookup(r.status_code) + + self.logger.warning( + 'Failed to send Notifiarr %s notification: ' + '%serror=%s.', + status_str, + ', ' if status_str else '', + str(r.status_code)) + + self.logger.debug('Response Details:\r\n{}'.format(r.content)) + + # Return; we're done + return False + + else: + self.logger.info('Sent Notifiarr notification.') + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending Notifiarr ' + 'Chat notification to %s.' % self.host) + self.logger.debug('Socket Exception: %s' % str(e)) + + # Return; we're done + return False + + return True + + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets['channels']) + len(self.targets['invalid']) + return targets if targets > 0 else 1 + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + # Get channels + results['targets'] = NotifyNotifiarr.split_path(results['fullpath']) + + if 'discord_user' in results['qsd'] and \ + len(results['qsd']['discord_user']): + results['discord_user'] = \ + NotifyNotifiarr.unquote( + results['qsd']['discord_user']) + + if 'discord_role' in results['qsd'] and \ + len(results['qsd']['discord_role']): + results['discord_role'] = \ + NotifyNotifiarr.unquote(results['qsd']['discord_role']) + + if 'event' in results['qsd'] and \ + len(results['qsd']['event']): + results['event'] = \ + NotifyNotifiarr.unquote(results['qsd']['event']) + + # Include images with our message + results['include_image'] = \ + parse_bool(results['qsd'].get('image', False)) + + # Track if we need to extract the hostname as a target + host_is_potential_target = False + + if 'source' in results['qsd'] and len(results['qsd']['source']): + results['source'] = \ + NotifyNotifiarr.unquote(results['qsd']['source']) + + elif 'from' in results['qsd'] and len(results['qsd']['from']): + results['source'] = \ + NotifyNotifiarr.unquote(results['qsd']['from']) + + # Set our apikey if found as an argument + if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): + results['apikey'] = \ + NotifyNotifiarr.unquote(results['qsd']['apikey']) + + host_is_potential_target = True + + elif 'key' in results['qsd'] and len(results['qsd']['key']): + results['apikey'] = \ + NotifyNotifiarr.unquote(results['qsd']['key']) + + host_is_potential_target = True + + else: + # Pop the first element (this is the api key) + results['apikey'] = \ + NotifyNotifiarr.unquote(results['host']) + + if host_is_potential_target is True and results['host']: + results['targets'].append(NotifyNotifiarr.unquote(results['host'])) + + # Support the 'to' variable so that we can support rooms this way too + # The 'to' makes it easier to use yaml configuration + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'] += [x for x in filter( + bool, CHANNEL_LIST_DELIM.split( + NotifyNotifiarr.unquote(results['qsd']['to'])))] + + return results diff --git a/lib/apprise/plugins/NotifyNotifico.py b/lib/apprise/plugins/NotifyNotifico.py index 9b1661bf..8636e2e0 100644 --- a/lib/apprise/plugins/NotifyNotifico.py +++ b/lib/apprise/plugins/NotifyNotifico.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyNtfy.py b/lib/apprise/plugins/NotifyNtfy.py index 7efe3487..ceab5a2a 100644 --- a/lib/apprise/plugins/NotifyNtfy.py +++ b/lib/apprise/plugins/NotifyNtfy.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -172,6 +168,9 @@ class NotifyNtfy(NotifyBase): # Default upstream/cloud host if none is defined cloud_notify_url = 'https://ntfy.sh' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 @@ -405,14 +404,14 @@ class NotifyNtfy(NotifyBase): # Retrieve our topic topic = topics.pop() - if attach: + if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for no, attachment in enumerate(attach): - # First message only includes the text - _body = body if not no else None - _title = title if not no else None + # First message only includes the text (if defined) + _body = body if not no and body else None + _title = title if not no and title else None # Perform some simple error checking if not attachment: @@ -453,10 +452,6 @@ class NotifyNtfy(NotifyBase): 'User-Agent': self.app_id, } - # Some default values for our request object to which we'll update - # depending on what our payload is - files = None - # See https://ntfy.sh/docs/publish/#publish-as-json data = {} @@ -494,11 +489,23 @@ class NotifyNtfy(NotifyBase): data['topic'] = topic virt_payload = data + if self.attach: + virt_payload['attach'] = self.attach + + if self.filename: + virt_payload['filename'] = self.filename + else: # Point our payload to our parameters virt_payload = params notify_url += '/{topic}'.format(topic=topic) + # Prepare our Header + virt_payload['filename'] = attach.name + + with open(attach.path, 'rb') as fp: + data = fp.read() + if image_url: headers['X-Icon'] = image_url @@ -523,18 +530,6 @@ class NotifyNtfy(NotifyBase): if self.__tags: headers['X-Tags'] = ",".join(self.__tags) - if isinstance(attach, AttachBase): - # Prepare our Header - params['filename'] = attach.name - - # prepare our files object - files = {'file': (attach.name, open(attach.path, 'rb'))} - - elif self.attach is not None: - data['attach'] = self.attach - if self.filename is not None: - data['filename'] = self.filename - self.logger.debug('ntfy POST URL: %s (cert_verify=%r)' % ( notify_url, self.verify_certificate, )) @@ -547,13 +542,15 @@ class NotifyNtfy(NotifyBase): # Default response type response = None + if not attach: + data = dumps(data) + try: r = requests.post( notify_url, params=params if params else None, - data=dumps(data) if data else None, + data=data, headers=headers, - files=files, auth=auth, verify=self.verify_certificate, timeout=self.request_timeout, @@ -608,7 +605,6 @@ class NotifyNtfy(NotifyBase): notify_url) + 'notification.' ) self.logger.debug('Socket Exception: %s' % str(e)) - return False, response except (OSError, IOError) as e: self.logger.warning( @@ -616,13 +612,8 @@ class NotifyNtfy(NotifyBase): attach.name if isinstance(attach, AttachBase) else virt_payload)) self.logger.debug('I/O Exception: %s' % str(e)) - return False, response - finally: - # Close our file (if it's open) stored in the second element - # of our files tuple (index 1) - if files: - files['file'][1].close() + return False, response def url(self, privacy=False, *args, **kwargs): """ @@ -698,6 +689,12 @@ class NotifyNtfy(NotifyBase): params=NotifyNtfy.urlencode(params) ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.topics) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyOffice365.py b/lib/apprise/plugins/NotifyOffice365.py index 0778dd85..f445bc49 100644 --- a/lib/apprise/plugins/NotifyOffice365.py +++ b/lib/apprise/plugins/NotifyOffice365.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -148,8 +144,13 @@ class NotifyOffice365(NotifyBase): 'private': True, 'required': True, }, + 'target_email': { + 'name': _('Target Email'), + 'type': 'string', + 'map_to': 'targets', + }, 'targets': { - 'name': _('Target Emails'), + 'name': _('Targets'), 'type': 'list:string', }, }) @@ -596,6 +597,12 @@ class NotifyOffice365(NotifyBase): safe='') for e in self.targets]), params=NotifyOffice365.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyOneSignal.py b/lib/apprise/plugins/NotifyOneSignal.py index 70cf0a18..39dd7f20 100644 --- a/lib/apprise/plugins/NotifyOneSignal.py +++ b/lib/apprise/plugins/NotifyOneSignal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -51,7 +47,7 @@ from ..utils import is_email from ..AppriseLocale import gettext_lazy as _ -class OneSignalCategory(NotifyBase): +class OneSignalCategory: """ We define the different category types that we can notify via OneSignal """ @@ -92,7 +88,7 @@ class NotifyOneSignal(NotifyBase): image_size = NotifyImageSize.XY_72 # The maximum allowable batch sizes per message - maximum_batch_size = 2000 + default_batch_size = 2000 # Define object templates templates = ( @@ -121,7 +117,7 @@ class NotifyOneSignal(NotifyBase): 'private': True, 'required': True, }, - 'target_device': { + 'target_player': { 'name': _('Target Player ID'), 'type': 'string', 'map_to': 'targets', @@ -146,6 +142,7 @@ class NotifyOneSignal(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -204,7 +201,7 @@ class NotifyOneSignal(NotifyBase): raise TypeError(msg) # Prepare Batch Mode Flag - self.batch_size = self.maximum_batch_size if batch else 1 + self.batch_size = self.default_batch_size if batch else 1 # Place a thumbnail image inline with the message body self.include_image = include_image @@ -432,6 +429,26 @@ class NotifyOneSignal(NotifyBase): params=NotifyOneSignal.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + if self.batch_size > 1: + # Batches can only be sent by group (you can't combine groups into + # a single batch) + total_targets = 0 + for k, m in self.targets.items(): + targets = len(m) + total_targets += int(targets / self.batch_size) + \ + (1 if targets % self.batch_size else 0) + return total_targets + + # Normal batch count; just count the targets + return sum([len(m) for _, m in self.targets.items()]) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyOpsgenie.py b/lib/apprise/plugins/NotifyOpsgenie.py index 858056b2..29cd0a20 100644 --- a/lib/apprise/plugins/NotifyOpsgenie.py +++ b/lib/apprise/plugins/NotifyOpsgenie.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -172,7 +168,7 @@ class NotifyOpsgenie(NotifyBase): opsgenie_default_region = OpsgenieRegion.US # The maximum allowable targets within a notification - maximum_batch_size = 50 + default_batch_size = 50 # Define object templates templates = ( @@ -308,7 +304,7 @@ class NotifyOpsgenie(NotifyBase): self.details.update(details) # Prepare Batch Mode Flag - self.batch_size = self.maximum_batch_size if batch else 1 + self.batch_size = self.default_batch_size if batch else 1 # Assign our tags (if defined) self.__tags = parse_list(tags) @@ -536,6 +532,20 @@ class NotifyOpsgenie(NotifyBase): for x in self.targets]), params=NotifyOpsgenie.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + targets = len(self.targets) + if self.batch_size > 1: + targets = int(targets / self.batch_size) + \ + (1 if targets % self.batch_size else 0) + + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyPagerDuty.py b/lib/apprise/plugins/NotifyPagerDuty.py index a2417275..1592f93c 100644 --- a/lib/apprise/plugins/NotifyPagerDuty.py +++ b/lib/apprise/plugins/NotifyPagerDuty.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -142,7 +138,7 @@ class NotifyPagerDuty(NotifyBase): }, # Optional but triggers V2 API 'integrationkey': { - 'name': _('Routing Key'), + 'name': _('Integration Key'), 'type': 'string', 'private': True, 'required': True diff --git a/lib/apprise/plugins/NotifyPagerTree.py b/lib/apprise/plugins/NotifyPagerTree.py index 65a19f61..a1579c30 100644 --- a/lib/apprise/plugins/NotifyPagerTree.py +++ b/lib/apprise/plugins/NotifyPagerTree.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyParsePlatform.py b/lib/apprise/plugins/NotifyParsePlatform.py index 69efb61c..f3d7d635 100644 --- a/lib/apprise/plugins/NotifyParsePlatform.py +++ b/lib/apprise/plugins/NotifyParsePlatform.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -30,8 +26,6 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -# Official API reference: https://developer.gitter.im/docs/user-resource - import re import requests from json import dumps diff --git a/lib/apprise/plugins/NotifyPopcornNotify.py b/lib/apprise/plugins/NotifyPopcornNotify.py index 9ea0c77e..47a29614 100644 --- a/lib/apprise/plugins/NotifyPopcornNotify.py +++ b/lib/apprise/plugins/NotifyPopcornNotify.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -93,6 +89,7 @@ class NotifyPopcornNotify(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, } }) @@ -265,6 +262,21 @@ class NotifyPopcornNotify(NotifyBase): [NotifyPopcornNotify.quote(x, safe='') for x in self.targets]), params=NotifyPopcornNotify.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyProwl.py b/lib/apprise/plugins/NotifyProwl.py index cebe0701..80f0aca3 100644 --- a/lib/apprise/plugins/NotifyProwl.py +++ b/lib/apprise/plugins/NotifyProwl.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyPushBullet.py b/lib/apprise/plugins/NotifyPushBullet.py index c37532d1..61e8db2d 100644 --- a/lib/apprise/plugins/NotifyPushBullet.py +++ b/lib/apprise/plugins/NotifyPushBullet.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -75,6 +71,9 @@ class NotifyPushBullet(NotifyBase): # PushBullet uses the http protocol with JSON requests notify_url = 'https://api.pushbullet.com/v2/{}' + # Support attachments + attachment_support = True + # Define object templates templates = ( '{schema}://{accesstoken}', @@ -150,7 +149,7 @@ class NotifyPushBullet(NotifyBase): # Build a list of our attachments attachments = [] - if attach: + if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for attachment in attach: @@ -261,14 +260,15 @@ class NotifyPushBullet(NotifyBase): "PushBullet recipient {} parsed as a device" .format(recipient)) - okay, response = self._send( - self.notify_url.format('pushes'), payload) - if not okay: - has_error = True - continue + if body: + okay, response = self._send( + self.notify_url.format('pushes'), payload) + if not okay: + has_error = True + continue - self.logger.info( - 'Sent PushBullet notification to "%s".' % (recipient)) + self.logger.info( + 'Sent PushBullet notification to "%s".' % (recipient)) for attach_payload in attachments: # Send our attachments to our same user (already prepared as @@ -406,6 +406,12 @@ class NotifyPushBullet(NotifyBase): targets=targets, params=NotifyPushBullet.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyPushDeer.py b/lib/apprise/plugins/NotifyPushDeer.py new file mode 100644 index 00000000..76805c34 --- /dev/null +++ b/lib/apprise/plugins/NotifyPushDeer.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +import requests + +from ..common import NotifyType +from .NotifyBase import NotifyBase +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + +# Syntax: +# schan://{key}/ + + +class NotifyPushDeer(NotifyBase): + """ + A wrapper for PushDeer Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'PushDeer' + + # The services URL + service_url = 'https://www.pushdeer.com/' + + # Insecure Protocol Access + protocol = 'pushdeer' + + # Secure Protocol + secure_protocol = 'pushdeers' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_PushDeer' + + # Default hostname + default_hostname = 'api2.pushdeer.com' + + # PushDeer API + notify_url = '{schema}://{host}:{port}/message/push?pushkey={pushKey}' + + # Define object templates + templates = ( + '{schema}://{pushkey}', + '{schema}://{host}/{pushkey}', + '{schema}://{host}:{port}/{pushkey}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'host': { + 'name': _('Hostname'), + 'type': 'string', + }, + 'port': { + 'name': _('Port'), + 'type': 'int', + 'min': 1, + 'max': 65535, + }, + 'pushkey': { + 'name': _('Pushkey'), + 'type': 'string', + 'private': True, + 'required': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + }, + }) + + def __init__(self, pushkey, **kwargs): + """ + Initialize PushDeer Object + """ + super().__init__(**kwargs) + + # PushKey (associated with project) + self.push_key = validate_regex( + pushkey, *self.template_tokens['pushkey']['regex']) + if not self.push_key: + msg = 'An invalid PushDeer API Pushkey ' \ + '({}) was specified.'.format(pushkey) + self.logger.warning(msg) + raise TypeError(msg) + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform PushDeer Notification + """ + + # Prepare our persistent_notification.create payload + payload = { + 'text': title if title else body, + 'type': 'text', + 'desp': body if title else '', + } + + # Set our schema + schema = 'https' if self.secure else 'http' + + # Set host + host = self.default_hostname + if self.host: + host = self.host + + # Set port + port = 443 if self.secure else 80 + if self.port: + port = self.port + + # Our Notification URL + notify_url = self.notify_url.format( + schema=schema, host=host, port=port, pushKey=self.push_key) + + # Some Debug Logging + self.logger.debug('PushDeer URL: {} (cert_verify={})'.format( + notify_url, self.verify_certificate)) + self.logger.debug('PushDeer Payload: {}'.format(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + notify_url, + data=payload, + timeout=self.request_timeout, + ) + + if r.status_code != requests.codes.ok: + # We had a problem + status_str = \ + NotifyPushDeer.http_response_code_lookup( + r.status_code) + + self.logger.warning( + 'Failed to send PushDeer notification: ' + '{}{}error={}.'.format( + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) + return False + + else: + self.logger.info('Sent PushDeer notification.') + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occured sending PushDeer ' + 'notification.' + ) + self.logger.debug('Socket Exception: %s' % str(e)) + return False + + return True + + def url(self, privacy=False): + """ + Returns the URL built dynamically based on specified arguments. + """ + + if self.host: + url = '{schema}://{host}{port}/{pushkey}' + else: + url = '{schema}://{pushkey}' + + return url.format( + schema=self.secure_protocol if self.secure else self.protocol, + host=self.host, + port='' if not self.port else ':{}'.format(self.port), + pushkey=self.pprint(self.push_key, privacy, safe='')) + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to substantiate this object. + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't parse the URL + return results + + fullpaths = NotifyPushDeer.split_path(results['fullpath']) + + if len(fullpaths) == 0: + results['pushkey'] = results['host'] + results['host'] = None + else: + results['pushkey'] = fullpaths.pop() + + return results diff --git a/lib/apprise/plugins/NotifyPushMe.py b/lib/apprise/plugins/NotifyPushMe.py new file mode 100644 index 00000000..8ef3c79c --- /dev/null +++ b/lib/apprise/plugins/NotifyPushMe.py @@ -0,0 +1,221 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +import requests + +from .NotifyBase import NotifyBase +from ..common import NotifyType +from ..common import NotifyFormat +from ..utils import validate_regex +from ..utils import parse_bool +from ..AppriseLocale import gettext_lazy as _ + + +class NotifyPushMe(NotifyBase): + """ + A wrapper for PushMe Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'PushMe' + + # The services URL + service_url = 'https://push.i-i.me/' + + # Insecure protocol (for those self hosted requests) + protocol = 'pushme' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushme' + + # PushMe URL + notify_url = 'https://push.i-i.me/' + + # Define object templates + templates = ( + '{schema}://{token}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'token': { + 'name': _('Token'), + 'type': 'string', + 'private': True, + 'required': True, + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'token': { + 'alias_of': 'token', + }, + 'push_key': { + 'alias_of': 'token', + }, + 'status': { + 'name': _('Show Status'), + 'type': 'bool', + 'default': True, + }, + }) + + def __init__(self, token, status=None, **kwargs): + """ + Initialize PushMe Object + """ + super().__init__(**kwargs) + + # Token (associated with project) + self.token = validate_regex(token) + if not self.token: + msg = 'An invalid PushMe Token ' \ + '({}) was specified.'.format(token) + self.logger.warning(msg) + raise TypeError(msg) + + # Set Status type + self.status = status + + return + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform PushMe Notification + """ + + headers = { + 'User-Agent': self.app_id, + } + + # Prepare our payload + params = { + 'push_key': self.token, + 'title': title if not self.status + else '{} {}'.format(self.asset.ascii(notify_type), title), + 'content': body, + 'type': 'markdown' + if self.notify_format == NotifyFormat.MARKDOWN else 'text' + } + + self.logger.debug('PushMe POST URL: %s (cert_verify=%r)' % ( + self.notify_url, self.verify_certificate, + )) + self.logger.debug('PushMe Payload: %s' % str(params)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + self.notify_url, + params=params, + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + if r.status_code != requests.codes.ok: + # We had a problem + status_str = \ + NotifyPushMe.http_response_code_lookup(r.status_code) + + self.logger.warning( + 'Failed to send PushMe notification:' + '{}{}error={}.'.format( + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug('Response Details:\r\n{}'.format(r.content)) + + # Return; we're done + return False + + else: + self.logger.info('Sent PushMe notification.') + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending PushMe notification.', + ) + self.logger.debug('Socket Exception: %s' % str(e)) + + # Return; we're done + return False + + return True + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = { + 'status': 'yes' if self.status else 'no', + } + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + # Official URLs are easy to assemble + return '{schema}://{token}/?{params}'.format( + schema=self.protocol, + token=self.pprint(self.token, privacy, safe=''), + params=NotifyPushMe.urlencode(params), + ) + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + # Store our token using the host + results['token'] = NotifyPushMe.unquote(results['host']) + + # The 'token' makes it easier to use yaml configuration + if 'token' in results['qsd'] and len(results['qsd']['token']): + results['token'] = NotifyPushMe.unquote(results['qsd']['token']) + + elif 'push_key' in results['qsd'] and len(results['qsd']['push_key']): + # Support 'push_key' if specified + results['token'] = NotifyPushMe.unquote(results['qsd']['push_key']) + + # Get status switch + results['status'] = \ + parse_bool(results['qsd'].get('status', True)) + + return results diff --git a/lib/apprise/plugins/NotifyPushSafer.py b/lib/apprise/plugins/NotifyPushSafer.py index 48a0f3bb..9873bd8e 100644 --- a/lib/apprise/plugins/NotifyPushSafer.py +++ b/lib/apprise/plugins/NotifyPushSafer.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -336,6 +332,9 @@ class NotifyPushSafer(NotifyBase): # The default secure protocol secure_protocol = 'psafers' + # Support attachments + attachment_support = True + # Number of requests to a allow per second request_rate_per_sec = 1.2 @@ -546,7 +545,7 @@ class NotifyPushSafer(NotifyBase): # Initialize our list of attachments attachments = [] - if attach: + if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for attachment in attach: @@ -794,6 +793,12 @@ class NotifyPushSafer(NotifyBase): targets=targets, params=NotifyPushSafer.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyPushed.py b/lib/apprise/plugins/NotifyPushed.py index 822ea1ad..96e2e89d 100644 --- a/lib/apprise/plugins/NotifyPushed.py +++ b/lib/apprise/plugins/NotifyPushed.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -329,6 +325,13 @@ class NotifyPushed(NotifyBase): )]), params=NotifyPushed.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.channels) + len(self.users) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyPushjet.py b/lib/apprise/plugins/NotifyPushjet.py index c6e36a39..50ee16e4 100644 --- a/lib/apprise/plugins/NotifyPushjet.py +++ b/lib/apprise/plugins/NotifyPushjet.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyPushover.py b/lib/apprise/plugins/NotifyPushover.py index 1e943db1..4a76e7d5 100644 --- a/lib/apprise/plugins/NotifyPushover.py +++ b/lib/apprise/plugins/NotifyPushover.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -32,6 +28,7 @@ import re import requests +from itertools import chain from .NotifyBase import NotifyBase from ..common import NotifyType @@ -46,7 +43,7 @@ from ..attachment.AttachBase import AttachBase PUSHOVER_SEND_TO_ALL = 'ALL_DEVICES' # Used to detect a Device -VALIDATE_DEVICE = re.compile(r'^[a-z0-9_]{1,25}$', re.I) +VALIDATE_DEVICE = re.compile(r'^\s*(?P[a-z0-9_-]{1,25})\s*$', re.I) # Priorities @@ -164,6 +161,9 @@ class NotifyPushover(NotifyBase): # Pushover uses the http protocol with JSON requests notify_url = 'https://api.pushover.net/1/messages.json' + # Support attachments + attachment_support = True + # The maximum allowable characters allowed in the body per message body_maxlen = 1024 @@ -201,7 +201,7 @@ class NotifyPushover(NotifyBase): 'target_device': { 'name': _('Target Device'), 'type': 'string', - 'regex': (r'^[a-z0-9_]{1,25}$', 'i'), + 'regex': (r'^[a-z0-9_-]{1,25}$', 'i'), 'map_to': 'targets', }, 'targets': { @@ -276,10 +276,30 @@ class NotifyPushover(NotifyBase): self.logger.warning(msg) raise TypeError(msg) - self.targets = parse_list(targets) - if len(self.targets) == 0: + # Track our valid devices + targets = parse_list(targets) + + # Track any invalid entries + self.invalid_targets = list() + + if len(targets) == 0: self.targets = (PUSHOVER_SEND_TO_ALL, ) + else: + self.targets = [] + for target in targets: + result = VALIDATE_DEVICE.match(target) + if result: + # Store device information + self.targets.append(result.group('device')) + continue + + self.logger.warning( + 'Dropped invalid Pushover device ' + '({}) specified.'.format(target), + ) + self.invalid_targets.append(target) + # Setup supplemental url self.supplemental_url = supplemental_url self.supplemental_url_title = supplemental_url_title @@ -288,9 +308,8 @@ class NotifyPushover(NotifyBase): self.sound = NotifyPushover.default_pushover_sound \ if not isinstance(sound, str) else sound.lower() if self.sound and self.sound not in PUSHOVER_SOUNDS: - msg = 'The sound specified ({}) is invalid.'.format(sound) - self.logger.warning(msg) - raise TypeError(msg) + msg = 'Using custom sound specified ({}). '.format(sound) + self.logger.debug(msg) # The Priority of the message self.priority = int( @@ -338,77 +357,67 @@ class NotifyPushover(NotifyBase): Perform Pushover Notification """ - # error tracking (used for function return) - has_error = False + if not self.targets: + # There were no services to notify + self.logger.warning( + 'There were no Pushover targets to notify.') + return False - # Create a copy of the devices list - devices = list(self.targets) - while len(devices): - device = devices.pop(0) + # prepare JSON Object + payload = { + 'token': self.token, + 'user': self.user_key, + 'priority': str(self.priority), + 'title': title if title else self.app_desc, + 'message': body, + 'device': ','.join(self.targets), + 'sound': self.sound, + } - if VALIDATE_DEVICE.match(device) is None: - self.logger.warning( - 'The device specified (%s) is invalid.' % device, - ) + if self.supplemental_url: + payload['url'] = self.supplemental_url - # Mark our failure - has_error = True - continue + if self.supplemental_url_title: + payload['url_title'] = self.supplemental_url_title - # prepare JSON Object - payload = { - 'token': self.token, - 'user': self.user_key, - 'priority': str(self.priority), - 'title': title if title else self.app_desc, - 'message': body, - 'device': device, - 'sound': self.sound, - } + if self.notify_format == NotifyFormat.HTML: + # https://pushover.net/api#html + payload['html'] = 1 - if self.supplemental_url: - payload['url'] = self.supplemental_url - if self.supplemental_url_title: - payload['url_title'] = self.supplemental_url_title + elif self.notify_format == NotifyFormat.MARKDOWN: + payload['message'] = convert_between( + NotifyFormat.MARKDOWN, NotifyFormat.HTML, body) + payload['html'] = 1 - if self.notify_format == NotifyFormat.HTML: - # https://pushover.net/api#html - payload['html'] = 1 - elif self.notify_format == NotifyFormat.MARKDOWN: - payload['message'] = convert_between( - NotifyFormat.MARKDOWN, NotifyFormat.HTML, body) - payload['html'] = 1 + if self.priority == PushoverPriority.EMERGENCY: + payload.update({'retry': self.retry, 'expire': self.expire}) - if self.priority == PushoverPriority.EMERGENCY: - payload.update({'retry': self.retry, 'expire': self.expire}) - - if attach: - # Create a copy of our payload - _payload = payload.copy() - - # Send with attachments - for attachment in attach: - # Simple send - if not self._send(_payload, attachment): - # Mark our failure - has_error = True - # clean exit from our attachment loop - break + if attach and self.attachment_support: + # Create a copy of our payload + _payload = payload.copy() + # Send with attachments + for no, attachment in enumerate(attach): + if no or not body: # To handle multiple attachments, clean up our message - _payload['title'] = '...' _payload['message'] = attachment.name - # No need to alarm for each consecutive attachment uploaded - # afterwards - _payload['sound'] = PushoverSound.NONE - else: - # Simple send - if not self._send(payload): + if not self._send(_payload, attachment): # Mark our failure - has_error = True + return False - return not has_error + # Clear our title if previously set + _payload['title'] = '' + + # No need to alarm for each consecutive attachment uploaded + # afterwards + _payload['sound'] = PushoverSound.NONE + + else: + # Simple send + return self._send(payload) + + return True def _send(self, payload, attach=None): """ @@ -562,8 +571,9 @@ class NotifyPushover(NotifyBase): params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Escape our devices - devices = '/'.join([NotifyPushover.quote(x, safe='') - for x in self.targets]) + devices = '/'.join( + [NotifyPushover.quote(x, safe='') + for x in chain(self.targets, self.invalid_targets)]) if devices == PUSHOVER_SEND_TO_ALL: # keyword is reserved for internal usage only; it's safe to remove diff --git a/lib/apprise/plugins/NotifyPushy.py b/lib/apprise/plugins/NotifyPushy.py new file mode 100644 index 00000000..2a8a456b --- /dev/null +++ b/lib/apprise/plugins/NotifyPushy.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# API reference: https://pushy.me/docs/api/send-notifications +import re +import requests +from itertools import chain + +from json import dumps, loads +from .NotifyBase import NotifyBase +from ..common import NotifyType +from ..utils import parse_list +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + +# Used to detect a Device and Topic +VALIDATE_DEVICE = re.compile(r'^@(?P[a-z0-9]+)$', re.I) +VALIDATE_TOPIC = re.compile(r'^[#]?(?P[a-z0-9]+)$', re.I) + +# Extend HTTP Error Messages +PUSHY_HTTP_ERROR_MAP = { + 401: 'Unauthorized - Invalid Token.', +} + + +class NotifyPushy(NotifyBase): + """ + A wrapper for Pushy Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'Pushy' + + # The services URL + service_url = 'https://pushy.me/' + + # All Pushy requests are secure + secure_protocol = 'pushy' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_pushy' + + # Pushy uses the http protocol with JSON requests + notify_url = 'https://api.pushy.me/push?api_key={apikey}' + + # The maximum allowable characters allowed in the body per message + body_maxlen = 4096 + + # Define object templates + templates = ( + '{schema}://{apikey}/{targets}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'apikey': { + 'name': _('Secret API Key'), + 'type': 'string', + 'private': True, + 'required': True, + }, + 'target_device': { + 'name': _('Target Device'), + 'type': 'string', + 'prefix': '@', + 'map_to': 'targets', + }, + 'target_topic': { + 'name': _('Target Topic'), + 'type': 'string', + 'prefix': '#', + 'map_to': 'targets', + }, + 'targets': { + 'name': _('Targets'), + 'type': 'list:string', + 'required': True, + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'sound': { + # Specify something like ping.aiff + 'name': _('Sound'), + 'type': 'string', + }, + 'badge': { + 'name': _('Badge'), + 'type': 'int', + 'min': 0, + }, + 'to': { + 'alias_of': 'targets', + }, + 'key': { + 'alias_of': 'apikey', + }, + }) + + def __init__(self, apikey, targets=None, sound=None, badge=None, **kwargs): + """ + Initialize Pushy Object + """ + super().__init__(**kwargs) + + # Access Token (associated with project) + self.apikey = validate_regex(apikey) + if not self.apikey: + msg = 'An invalid Pushy Secret API Key ' \ + '({}) was specified.'.format(apikey) + self.logger.warning(msg) + raise TypeError(msg) + + # Get our targets + self.devices = [] + self.topics = [] + + for target in parse_list(targets): + result = VALIDATE_TOPIC.match(target) + if result: + self.topics.append(result.group('topic')) + continue + + result = VALIDATE_DEVICE.match(target) + if result: + self.devices.append(result.group('device')) + continue + + self.logger.warning( + 'Dropped invalid topic/device ' + '({}) specified.'.format(target), + ) + + # Setup our sound + self.sound = sound + + # Badge + try: + # Acquire our badge count if we can: + # - We accept both the integer form as well as a string + # representation + self.badge = int(badge) + if self.badge < 0: + raise ValueError() + + except TypeError: + # NoneType means use Default; this is an okay exception + self.badge = None + + except ValueError: + self.badge = None + self.logger.warning( + 'The specified Pushy badge ({}) is not valid ', badge) + + return + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform Pushy Notification + """ + + if len(self.topics) + len(self.devices) == 0: + # There were no services to notify + self.logger.warning('There were no Pushy targets to notify.') + return False + + # error tracking (used for function return) + has_error = False + + # Default Header + headers = { + 'User-Agent': self.app_id, + 'Content-Type': 'application/json', + 'Accepts': 'application/json', + } + + # Our URL + notify_url = self.notify_url.format(apikey=self.apikey) + + # Default content response object + content = {} + + # Create a copy of targets (topics and devices) + targets = list(self.topics) + list(self.devices) + while len(targets): + target = targets.pop(0) + + # prepare JSON Object + payload = { + # Mandatory fields + 'to': target, + "data": { + "message": body, + }, + "notification": { + 'body': body, + } + } + + # Optional payload items + if title: + payload['notification']['title'] = title + + if self.sound: + payload['notification']['sound'] = self.sound + + if self.badge is not None: + payload['notification']['badge'] = self.badge + + self.logger.debug('Pushy POST URL: %s (cert_verify=%r)' % ( + notify_url, self.verify_certificate, + )) + self.logger.debug('Pushy Payload: %s' % str(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + notify_url, + data=dumps(payload), + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + # Sample response + # See: https://pushy.me/docs/api/send-notifications + # { + # "success": true, + # "id": "5ea9b214b47cad768a35f13a", + # "info": { + # "devices": 1 + # "failed": ['abc'] + # } + # } + try: + content = loads(r.content) + + except (AttributeError, TypeError, ValueError): + # ValueError = r.content is Unparsable + # TypeError = r.content is None + # AttributeError = r is None + content = { + "success": False, + "id": '', + "info": {}, + } + + if r.status_code != requests.codes.ok \ + or not content.get('success'): + + # We had a problem + status_str = \ + NotifyPushy.http_response_code_lookup( + r.status_code, PUSHY_HTTP_ERROR_MAP) + + self.logger.warning( + 'Failed to send Pushy notification to {}: ' + '{}{}error={}.'.format( + target, + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) + + has_error = True + continue + + else: + self.logger.info( + 'Sent Pushy notification to %s.' % target) + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending Pushy:%s ' + 'notification', target) + self.logger.debug('Socket Exception: %s' % str(e)) + + has_error = True + continue + + return not has_error + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = {} + if self.sound: + params['sound'] = self.sound + + if self.badge is not None: + params['badge'] = str(self.badge) + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + return '{schema}://{apikey}/{targets}/?{params}'.format( + schema=self.secure_protocol, + apikey=self.pprint(self.apikey, privacy, safe=''), + targets='/'.join( + [NotifyPushy.quote(x, safe='@#') for x in chain( + # Topics are prefixed with a pound/hashtag symbol + ['#{}'.format(x) for x in self.topics], + # Devices + ['@{}'.format(x) for x in self.devices], + )]), + params=NotifyPushy.urlencode(params)) + + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.topics) + len(self.devices) + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + # Token + results['apikey'] = NotifyPushy.unquote(results['host']) + + # Retrieve all of our targets + results['targets'] = NotifyPushy.split_path(results['fullpath']) + + # Get the sound + if 'sound' in results['qsd'] and len(results['qsd']['sound']): + results['sound'] = \ + NotifyPushy.unquote(results['qsd']['sound']) + + # Badge + if 'badge' in results['qsd'] and results['qsd']['badge']: + results['badge'] = NotifyPushy.unquote( + results['qsd']['badge'].strip()) + + # Support key variable to store Secret API Key + if 'key' in results['qsd'] and len(results['qsd']['key']): + results['apikey'] = results['qsd']['key'] + + # The 'to' makes it easier to use yaml configuration + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'] += \ + NotifyPushy.parse_list(results['qsd']['to']) + + return results diff --git a/lib/apprise/plugins/NotifyRSyslog.py b/lib/apprise/plugins/NotifyRSyslog.py new file mode 100644 index 00000000..473e4c5c --- /dev/null +++ b/lib/apprise/plugins/NotifyRSyslog.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +import os +import socket + +from .NotifyBase import NotifyBase +from ..common import NotifyType +from ..utils import parse_bool +from ..AppriseLocale import gettext_lazy as _ + + +class syslog: + """ + Extrapoloated information from the syslog library so that this plugin + would not be dependent on it. + """ + # Notification Categories + LOG_KERN = 0 + LOG_USER = 8 + LOG_MAIL = 16 + LOG_DAEMON = 24 + LOG_AUTH = 32 + LOG_SYSLOG = 40 + LOG_LPR = 48 + LOG_NEWS = 56 + LOG_UUCP = 64 + LOG_CRON = 72 + LOG_LOCAL0 = 128 + LOG_LOCAL1 = 136 + LOG_LOCAL2 = 144 + LOG_LOCAL3 = 152 + LOG_LOCAL4 = 160 + LOG_LOCAL5 = 168 + LOG_LOCAL6 = 176 + LOG_LOCAL7 = 184 + + # Notification Types + LOG_INFO = 6 + LOG_NOTICE = 5 + LOG_WARNING = 4 + LOG_CRIT = 2 + + +class SyslogFacility: + """ + All of the supported facilities + """ + KERN = 'kern' + USER = 'user' + MAIL = 'mail' + DAEMON = 'daemon' + AUTH = 'auth' + SYSLOG = 'syslog' + LPR = 'lpr' + NEWS = 'news' + UUCP = 'uucp' + CRON = 'cron' + LOCAL0 = 'local0' + LOCAL1 = 'local1' + LOCAL2 = 'local2' + LOCAL3 = 'local3' + LOCAL4 = 'local4' + LOCAL5 = 'local5' + LOCAL6 = 'local6' + LOCAL7 = 'local7' + + +SYSLOG_FACILITY_MAP = { + SyslogFacility.KERN: syslog.LOG_KERN, + SyslogFacility.USER: syslog.LOG_USER, + SyslogFacility.MAIL: syslog.LOG_MAIL, + SyslogFacility.DAEMON: syslog.LOG_DAEMON, + SyslogFacility.AUTH: syslog.LOG_AUTH, + SyslogFacility.SYSLOG: syslog.LOG_SYSLOG, + SyslogFacility.LPR: syslog.LOG_LPR, + SyslogFacility.NEWS: syslog.LOG_NEWS, + SyslogFacility.UUCP: syslog.LOG_UUCP, + SyslogFacility.CRON: syslog.LOG_CRON, + SyslogFacility.LOCAL0: syslog.LOG_LOCAL0, + SyslogFacility.LOCAL1: syslog.LOG_LOCAL1, + SyslogFacility.LOCAL2: syslog.LOG_LOCAL2, + SyslogFacility.LOCAL3: syslog.LOG_LOCAL3, + SyslogFacility.LOCAL4: syslog.LOG_LOCAL4, + SyslogFacility.LOCAL5: syslog.LOG_LOCAL5, + SyslogFacility.LOCAL6: syslog.LOG_LOCAL6, + SyslogFacility.LOCAL7: syslog.LOG_LOCAL7, +} + +SYSLOG_FACILITY_RMAP = { + syslog.LOG_KERN: SyslogFacility.KERN, + syslog.LOG_USER: SyslogFacility.USER, + syslog.LOG_MAIL: SyslogFacility.MAIL, + syslog.LOG_DAEMON: SyslogFacility.DAEMON, + syslog.LOG_AUTH: SyslogFacility.AUTH, + syslog.LOG_SYSLOG: SyslogFacility.SYSLOG, + syslog.LOG_LPR: SyslogFacility.LPR, + syslog.LOG_NEWS: SyslogFacility.NEWS, + syslog.LOG_UUCP: SyslogFacility.UUCP, + syslog.LOG_CRON: SyslogFacility.CRON, + syslog.LOG_LOCAL0: SyslogFacility.LOCAL0, + syslog.LOG_LOCAL1: SyslogFacility.LOCAL1, + syslog.LOG_LOCAL2: SyslogFacility.LOCAL2, + syslog.LOG_LOCAL3: SyslogFacility.LOCAL3, + syslog.LOG_LOCAL4: SyslogFacility.LOCAL4, + syslog.LOG_LOCAL5: SyslogFacility.LOCAL5, + syslog.LOG_LOCAL6: SyslogFacility.LOCAL6, + syslog.LOG_LOCAL7: SyslogFacility.LOCAL7, +} + +# Used as a lookup when handling the Apprise -> Syslog Mapping +SYSLOG_PUBLISH_MAP = { + NotifyType.INFO: syslog.LOG_INFO, + NotifyType.SUCCESS: syslog.LOG_NOTICE, + NotifyType.FAILURE: syslog.LOG_CRIT, + NotifyType.WARNING: syslog.LOG_WARNING, +} + + +class NotifyRSyslog(NotifyBase): + """ + A wrapper for Remote Syslog Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'Remote Syslog' + + # The services URL + service_url = 'https://tools.ietf.org/html/rfc5424' + + # The default protocol + protocol = 'rsyslog' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_rsyslog' + + # Disable throttle rate for RSyslog requests + request_rate_per_sec = 0 + + # Define object templates + templates = ( + '{schema}://{host}', + '{schema}://{host}:{port}', + '{schema}://{host}/{facility}', + '{schema}://{host}:{port}/{facility}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'facility': { + 'name': _('Facility'), + 'type': 'choice:string', + 'values': [k for k in SYSLOG_FACILITY_MAP.keys()], + 'default': SyslogFacility.USER, + 'required': True, + }, + 'host': { + 'name': _('Hostname'), + 'type': 'string', + 'required': True, + }, + 'port': { + 'name': _('Port'), + 'type': 'int', + 'min': 1, + 'max': 65535, + 'default': 514, + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'facility': { + # We map back to the same element defined in template_tokens + 'alias_of': 'facility', + }, + 'logpid': { + 'name': _('Log PID'), + 'type': 'bool', + 'default': True, + 'map_to': 'log_pid', + }, + }) + + def __init__(self, facility=None, log_pid=True, **kwargs): + """ + Initialize RSyslog Object + """ + super().__init__(**kwargs) + + if facility: + try: + self.facility = SYSLOG_FACILITY_MAP[facility] + + except KeyError: + msg = 'An invalid syslog facility ' \ + '({}) was specified.'.format(facility) + self.logger.warning(msg) + raise TypeError(msg) + + else: + self.facility = \ + SYSLOG_FACILITY_MAP[ + self.template_tokens['facility']['default']] + + # Include PID with each message. + self.log_pid = log_pid + + return + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform RSyslog Notification + """ + + if title: + # Format title + body = '{}: {}'.format(title, body) + + # Always call throttle before any remote server i/o is made + self.throttle() + host = self.host + port = self.port if self.port \ + else self.template_tokens['port']['default'] + + if self.log_pid: + payload = '<%d>- %d - %s' % ( + SYSLOG_PUBLISH_MAP[notify_type] + self.facility * 8, + os.getpid(), body) + + else: + payload = '<%d>- %s' % ( + SYSLOG_PUBLISH_MAP[notify_type] + self.facility * 8, body) + + # send UDP packet to upstream server + self.logger.debug( + 'RSyslog Host: %s:%d/%s', + host, port, SYSLOG_FACILITY_RMAP[self.facility]) + self.logger.debug('RSyslog Payload: %s' % str(payload)) + + # our sent bytes + sent = 0 + + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.settimeout(self.socket_connect_timeout) + sent = sock.sendto(payload.encode('utf-8'), (host, port)) + sock.close() + + except socket.gaierror as e: + self.logger.warning( + 'A connection error occurred sending RSyslog ' + 'notification to %s:%d/%s', host, port, + SYSLOG_FACILITY_RMAP[self.facility] + ) + self.logger.debug('Socket Exception: %s' % str(e)) + return False + + except socket.timeout as e: + self.logger.warning( + 'A connection timeout occurred sending RSyslog ' + 'notification to %s:%d/%s', host, port, + SYSLOG_FACILITY_RMAP[self.facility] + ) + self.logger.debug('Socket Exception: %s' % str(e)) + return False + + if sent < len(payload): + self.logger.warning( + 'RSyslog sent %d byte(s) but intended to send %d byte(s)', + sent, len(payload)) + return False + + self.logger.info('Sent RSyslog notification.') + + return True + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = { + 'logpid': 'yes' if self.log_pid else 'no', + } + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + return '{schema}://{hostname}{port}/{facility}/?{params}'.format( + schema=self.protocol, + hostname=NotifyRSyslog.quote(self.host, safe=''), + port='' if self.port is None + or self.port == self.template_tokens['port']['default'] + else ':{}'.format(self.port), + facility=self.template_tokens['facility']['default'] + if self.facility not in SYSLOG_FACILITY_RMAP + else SYSLOG_FACILITY_RMAP[self.facility], + params=NotifyRSyslog.urlencode(params), + ) + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + tokens = [] + + # Get our path values + tokens.extend(NotifyRSyslog.split_path(results['fullpath'])) + + # Initialization + facility = None + + if tokens: + # Store the last entry as the facility + facility = tokens[-1].lower() + + # However if specified on the URL, that will over-ride what was + # identified + if 'facility' in results['qsd'] and len(results['qsd']['facility']): + facility = results['qsd']['facility'].lower() + + if facility and facility not in SYSLOG_FACILITY_MAP: + # Find first match; if no match is found we set the result + # to the matching key. This allows us to throw a TypeError + # during the __init__() call. The benifit of doing this + # check here is if we do have a valid match, we can support + # short form matches like 'u' which will match against user + facility = next((f for f in SYSLOG_FACILITY_MAP.keys() + if f.startswith(facility)), facility) + + # Save facility if set + if facility: + results['facility'] = facility + + # Include PID as part of the message logged + results['log_pid'] = parse_bool( + results['qsd'].get( + 'logpid', + NotifyRSyslog.template_args['logpid']['default'])) + + return results diff --git a/lib/apprise/plugins/NotifyReddit.py b/lib/apprise/plugins/NotifyReddit.py index 4e54109c..b25e76d0 100644 --- a/lib/apprise/plugins/NotifyReddit.py +++ b/lib/apprise/plugins/NotifyReddit.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -30,7 +26,6 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -# # 1. Visit https://www.reddit.com/prefs/apps and scroll to the bottom # 2. Click on the button that reads 'are you a developer? create an app...' # 3. Set the mode to `script`, @@ -56,6 +51,7 @@ import requests from json import loads from datetime import timedelta from datetime import datetime +from datetime import timezone from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode @@ -133,12 +129,6 @@ class NotifyReddit(NotifyBase): # still allow to make. request_rate_per_sec = 0 - # For Tracking Purposes - ratelimit_reset = datetime.utcnow() - - # Default to 1.0 - ratelimit_remaining = 1.0 - # Taken right from google.auth.helpers: clock_skew = timedelta(seconds=10) @@ -185,6 +175,7 @@ class NotifyReddit(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -275,7 +266,7 @@ class NotifyReddit(NotifyBase): # Our keys we build using the provided content self.__refresh_token = None self.__access_token = None - self.__access_token_expiry = datetime.utcnow() + self.__access_token_expiry = datetime.now(timezone.utc) self.kind = kind.strip().lower() \ if isinstance(kind, str) \ @@ -324,6 +315,13 @@ class NotifyReddit(NotifyBase): if not self.subreddits: self.logger.warning( 'No subreddits were identified to be notified') + + # For Rate Limit Tracking Purposes + self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) + + # Default to 1.0 + self.ratelimit_remaining = 1.0 + return def url(self, privacy=False, *args, **kwargs): @@ -367,6 +365,12 @@ class NotifyReddit(NotifyBase): params=NotifyReddit.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.subreddits) + def login(self): """ A simple wrapper to authenticate with the Reddit Server @@ -411,10 +415,10 @@ class NotifyReddit(NotifyBase): if 'expires_in' in response: delta = timedelta(seconds=int(response['expires_in'])) self.__access_token_expiry = \ - delta + datetime.utcnow() - self.clock_skew + delta + datetime.now(timezone.utc) - self.clock_skew else: self.__access_token_expiry = self.access_token_lifetime_sec + \ - datetime.utcnow() - self.clock_skew + datetime.now(timezone.utc) - self.clock_skew # The Refresh Token self.__refresh_token = response.get( @@ -538,10 +542,10 @@ class NotifyReddit(NotifyBase): # Determine how long we should wait for or if we should wait at # all. This isn't fool-proof because we can't be sure the client # time (calling this script) is completely synced up with the - # Gitter server. One would hope we're on NTP and our clocks are + # Reddit server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: - now = datetime.utcnow() + now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds wait = abs( @@ -665,8 +669,9 @@ class NotifyReddit(NotifyBase): self.ratelimit_remaining = \ float(r.headers.get( 'X-RateLimit-Remaining')) - self.ratelimit_reset = datetime.utcfromtimestamp( - int(r.headers.get('X-RateLimit-Reset'))) + self.ratelimit_reset = datetime.fromtimestamp( + int(r.headers.get('X-RateLimit-Reset')), timezone.utc + ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information diff --git a/lib/apprise/plugins/NotifyRocketChat.py b/lib/apprise/plugins/NotifyRocketChat.py index c8f5e965..6384386e 100644 --- a/lib/apprise/plugins/NotifyRocketChat.py +++ b/lib/apprise/plugins/NotifyRocketChat.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -348,6 +344,13 @@ class NotifyRocketChat(NotifyBase): params=NotifyRocketChat.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.channels) + len(self.rooms) + len(self.users) + return targets if targets > 0 else 1 + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ wrapper to _send since we can alert more then one channel diff --git a/lib/apprise/plugins/NotifyRyver.py b/lib/apprise/plugins/NotifyRyver.py index b8b34a3c..70f2fa43 100644 --- a/lib/apprise/plugins/NotifyRyver.py +++ b/lib/apprise/plugins/NotifyRyver.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -91,7 +87,7 @@ class NotifyRyver(NotifyBase): # Define object templates templates = ( '{schema}://{organization}/{token}', - '{schema}://{user}@{organization}/{token}', + '{schema}://{botname}@{organization}/{token}', ) # Define our template tokens @@ -109,9 +105,10 @@ class NotifyRyver(NotifyBase): 'private': True, 'regex': (r'^[A-Z0-9]{15}$', 'i'), }, - 'user': { + 'botname': { 'name': _('Bot Name'), 'type': 'string', + 'map_to': 'user', }, }) diff --git a/lib/apprise/plugins/NotifySES.py b/lib/apprise/plugins/NotifySES.py index e58893bd..37a0342a 100644 --- a/lib/apprise/plugins/NotifySES.py +++ b/lib/apprise/plugins/NotifySES.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -89,6 +85,7 @@ import base64 import requests from hashlib import sha256 from datetime import datetime +from datetime import timezone from collections import OrderedDict from xml.etree import ElementTree from email.mime.text import MIMEText @@ -135,6 +132,9 @@ class NotifySES(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_ses' + # Support attachments + attachment_support = True + # AWS is pretty good for handling data load so request limits # can occur in much shorter bursts request_rate_per_sec = 2.5 @@ -156,6 +156,7 @@ class NotifySES(NotifyBase): 'name': _('From Email'), 'type': 'string', 'map_to': 'from_addr', + 'required': True, }, 'access_key_id': { 'name': _('Access Key ID'), @@ -173,6 +174,7 @@ class NotifySES(NotifyBase): 'name': _('Region'), 'type': 'string', 'regex': (r'^[a-z]{2}-[a-z-]+?-[0-9]+$', 'i'), + 'required': True, 'map_to': 'region_name', }, 'targets': { @@ -424,7 +426,8 @@ class NotifySES(NotifyBase): content = MIMEText(body, 'plain', 'utf-8') # Create a Multipart container if there is an attachment - base = MIMEMultipart() if attach else content + base = MIMEMultipart() \ + if attach and self.attachment_support else content # TODO: Deduplicate with `NotifyEmail`? base['Subject'] = Header(title, 'utf-8') @@ -436,10 +439,11 @@ class NotifySES(NotifyBase): base['Reply-To'] = formataddr(reply_to, charset='utf-8') base['Cc'] = ','.join(cc) base['Date'] = \ - datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S +0000") + datetime.now( + timezone.utc).strftime("%a, %d %b %Y %H:%M:%S +0000") base['X-Application'] = self.app_id - if attach: + if attach and self.attachment_support: # First attach our body to our content as the first element base.attach(content) @@ -585,7 +589,7 @@ class NotifySES(NotifyBase): } # Get a reference time (used for header construction) - reference = datetime.utcnow() + reference = datetime.now(timezone.utc) # Provide Content-Length headers['Content-Length'] = str(len(payload)) @@ -816,6 +820,13 @@ class NotifySES(NotifyBase): params=NotifySES.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySMSEagle.py b/lib/apprise/plugins/NotifySMSEagle.py index 50b44cf3..3db131fb 100644 --- a/lib/apprise/plugins/NotifySMSEagle.py +++ b/lib/apprise/plugins/NotifySMSEagle.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -73,6 +69,22 @@ SMSEAGLE_PRIORITY_MAP = { } +class SMSEagleCategory: + """ + We define the different category types that we can notify via SMS Eagle + """ + PHONE = 'phone' + GROUP = 'group' + CONTACT = 'contact' + + +SMSEAGLE_CATEGORIES = ( + SMSEagleCategory.PHONE, + SMSEagleCategory.GROUP, + SMSEagleCategory.CONTACT, +) + + class NotifySMSEagle(NotifyBase): """ A wrapper for SMSEagle Notifications @@ -96,6 +108,9 @@ class NotifySMSEagle(NotifyBase): # The path we send our notification to notify_path = '/jsonrpc/sms' + # Support attachments + attachment_support = True + # The maxumum length of the text message # The actual limit is 160 but SMSEagle looks after the handling # of large messages in it's upstream service @@ -129,6 +144,7 @@ class NotifySMSEagle(NotifyBase): 'token': { 'name': _('Access Token'), 'type': 'string', + 'required': True, }, 'target_phone': { 'name': _('Target Phone No'), @@ -154,6 +170,7 @@ class NotifySMSEagle(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, } }) @@ -322,7 +339,7 @@ class NotifySMSEagle(NotifyBase): has_error = False attachments = [] - if attach: + if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: @@ -403,15 +420,15 @@ class NotifySMSEagle(NotifyBase): batch_size = 1 if not self.batch else self.default_batch_size notify_by = { - 'phone': { + SMSEagleCategory.PHONE: { "method": "sms.send_sms", 'target': 'to', }, - 'group': { + SMSEagleCategory.GROUP: { "method": "sms.send_togroup", 'target': 'groupname', }, - 'contact': { + SMSEagleCategory.CONTACT: { "method": "sms.send_tocontact", 'target': 'contactname', }, @@ -420,7 +437,7 @@ class NotifySMSEagle(NotifyBase): # categories separated into a tuple since notify_by.keys() # returns an unpredicable list in Python 2.7 which causes # tests to fail every so often - for category in ('phone', 'group', 'contact'): + for category in SMSEAGLE_CATEGORIES: # Create a copy of our template payload = { 'method': notify_by[category]['method'], @@ -596,6 +613,28 @@ class NotifySMSEagle(NotifyBase): params=NotifySMSEagle.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + if batch_size > 1: + # Batches can only be sent by group (you can't combine groups into + # a single batch) + total_targets = 0 + for c in SMSEAGLE_CATEGORIES: + targets = len(getattr(self, f'target_{c}s')) + total_targets += int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + return total_targets + + # Normal batch count; just count the targets + return len(self.target_phones) + len(self.target_contacts) + \ + len(self.target_groups) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySMTP2Go.py b/lib/apprise/plugins/NotifySMTP2Go.py index 3c672694..45f6615c 100644 --- a/lib/apprise/plugins/NotifySMTP2Go.py +++ b/lib/apprise/plugins/NotifySMTP2Go.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -91,6 +87,9 @@ class NotifySMTP2Go(NotifyBase): # Notify URL notify_url = 'https://api.smtp2go.com/v3/email/send' + # Support attachments + attachment_support = True + # Default Notify Format notify_format = NotifyFormat.HTML @@ -294,8 +293,8 @@ class NotifySMTP2Go(NotifyBase): # Track our potential attachments attachments = [] - if attach: - for idx, attachment in enumerate(attach): + if attach and self.attachment_support: + for attachment in attach: # Perform some simple error checking if not attachment: # We could not access the attachment @@ -513,6 +512,21 @@ class NotifySMTP2Go(NotifyBase): safe='') for e in self.targets]), params=NotifySMTP2Go.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySNS.py b/lib/apprise/plugins/NotifySNS.py index 28eea46b..5edac727 100644 --- a/lib/apprise/plugins/NotifySNS.py +++ b/lib/apprise/plugins/NotifySNS.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -35,6 +31,7 @@ import hmac import requests from hashlib import sha256 from datetime import datetime +from datetime import timezone from collections import OrderedDict from xml.etree import ElementTree from itertools import chain @@ -102,7 +99,7 @@ class NotifySNS(NotifyBase): # Define object templates templates = ( - '{schema}://{access_key_id}/{secret_access_key}{region}/{targets}', + '{schema}://{access_key_id}/{secret_access_key}/{region}/{targets}', ) # Define our template tokens @@ -124,6 +121,7 @@ class NotifySNS(NotifyBase): 'type': 'string', 'required': True, 'regex': (r'^[a-z]{2}-[a-z-]+?-[0-9]+$', 'i'), + 'required': True, 'map_to': 'region_name', }, 'target_phone_no': { @@ -142,6 +140,7 @@ class NotifySNS(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -396,7 +395,7 @@ class NotifySNS(NotifyBase): } # Get a reference time (used for header construction) - reference = datetime.utcnow() + reference = datetime.now(timezone.utc) # Provide Content-Length headers['Content-Length'] = str(len(payload)) @@ -600,6 +599,12 @@ class NotifySNS(NotifyBase): params=NotifySNS.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.phone) + len(self.topics) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySendGrid.py b/lib/apprise/plugins/NotifySendGrid.py index d811fa1d..b7f4a8a6 100644 --- a/lib/apprise/plugins/NotifySendGrid.py +++ b/lib/apprise/plugins/NotifySendGrid.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -30,7 +26,6 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -# # You will need an API Key for this plugin to work. # From the Settings -> API Keys you can click "Create API Key" if you don't # have one already. The key must have at least the "Mail Send" permission @@ -287,6 +282,12 @@ class NotifySendGrid(NotifyBase): params=NotifySendGrid.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform SendGrid Notification diff --git a/lib/apprise/plugins/NotifyServerChan.py b/lib/apprise/plugins/NotifyServerChan.py index 6fa8c557..87a294a3 100644 --- a/lib/apprise/plugins/NotifyServerChan.py +++ b/lib/apprise/plugins/NotifyServerChan.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -68,7 +64,7 @@ class NotifyServerChan(NotifyBase): # Define object templates templates = ( - '{schema}://{token}/', + '{schema}://{token}', ) # Define our template tokens diff --git a/lib/apprise/plugins/NotifySignalAPI.py b/lib/apprise/plugins/NotifySignalAPI.py index 46708d19..a2a31de1 100644 --- a/lib/apprise/plugins/NotifySignalAPI.py +++ b/lib/apprise/plugins/NotifySignalAPI.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -68,6 +64,9 @@ class NotifySignalAPI(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_signal' + # Support attachments + attachment_support = True + # The maximum targets to include when doing batch transfers default_batch_size = 10 @@ -234,7 +233,7 @@ class NotifySignalAPI(NotifyBase): has_error = False attachments = [] - if attach: + if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: @@ -281,7 +280,7 @@ class NotifySignalAPI(NotifyBase): payload = { 'message': "{}{}".format( '' if not self.status else '{} '.format( - self.asset.ascii(notify_type)), body), + self.asset.ascii(notify_type)), body).rstrip(), "number": self.source, "recipients": [] } @@ -431,6 +430,21 @@ class NotifySignalAPI(NotifyBase): params=NotifySignalAPI.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySimplePush.py b/lib/apprise/plugins/NotifySimplePush.py index 25066067..d6bd2ab6 100644 --- a/lib/apprise/plugins/NotifySimplePush.py +++ b/lib/apprise/plugins/NotifySimplePush.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -109,12 +105,12 @@ class NotifySimplePush(NotifyBase): # Used for encrypted logins 'password': { - 'name': _('Encrypted Password'), + 'name': _('Password'), 'type': 'string', 'private': True, }, 'salt': { - 'name': _('Encrypted Salt'), + 'name': _('Salt'), 'type': 'string', 'private': True, 'map_to': 'user', diff --git a/lib/apprise/plugins/NotifySinch.py b/lib/apprise/plugins/NotifySinch.py index c4d400b0..b2c5683f 100644 --- a/lib/apprise/plugins/NotifySinch.py +++ b/lib/apprise/plugins/NotifySinch.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -408,6 +404,13 @@ class NotifySinch(NotifyBase): [NotifySinch.quote(x, safe='') for x in self.targets]), params=NotifySinch.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySlack.py b/lib/apprise/plugins/NotifySlack.py index 1a437ffa..bbd2bf24 100644 --- a/lib/apprise/plugins/NotifySlack.py +++ b/lib/apprise/plugins/NotifySlack.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -143,6 +139,10 @@ class NotifySlack(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_slack' + # Support attachments + attachment_support = True + + # The maximum targets to include when doing batch transfers # Slack Webhook URL webhook_url = 'https://hooks.slack.com/services' @@ -165,10 +165,10 @@ class NotifySlack(NotifyBase): # Define object templates templates = ( # Webhook - '{schema}://{token_a}/{token_b}{token_c}', + '{schema}://{token_a}/{token_b}/{token_c}', '{schema}://{botname}@{token_a}/{token_b}{token_c}', - '{schema}://{token_a}/{token_b}{token_c}/{targets}', - '{schema}://{botname}@{token_a}/{token_b}{token_c}/{targets}', + '{schema}://{token_a}/{token_b}/{token_c}/{targets}', + '{schema}://{botname}@{token_a}/{token_b}/{token_c}/{targets}', # Bot '{schema}://{access_token}/', @@ -198,7 +198,6 @@ class NotifySlack(NotifyBase): 'name': _('Token A'), 'type': 'string', 'private': True, - 'required': True, 'regex': (r'^[A-Z0-9]+$', 'i'), }, # Token required as part of the Webhook request @@ -207,7 +206,6 @@ class NotifySlack(NotifyBase): 'name': _('Token B'), 'type': 'string', 'private': True, - 'required': True, 'regex': (r'^[A-Z0-9]+$', 'i'), }, # Token required as part of the Webhook request @@ -216,7 +214,6 @@ class NotifySlack(NotifyBase): 'name': _('Token C'), 'type': 'string', 'private': True, - 'required': True, 'regex': (r'^[A-Za-z0-9]+$', 'i'), }, 'target_encoded_id': { @@ -354,6 +351,13 @@ class NotifySlack(NotifyBase): r'>': '>', } + # To notify a channel, one uses + self._re_channel_support = re.compile( + r'(?P(?:<|\<)?[ \t]*' + r'!(?P[^| \n]+)' + r'(?:[ \t]*\|[ \t]*(?:(?P[^\n]+?)[ \t]*)?(?:>|\>)' + r'|(?:>|\>)))', re.IGNORECASE) + # The markdown in slack isn't [desc](url), it's # # To accomodate this, we need to ensure we don't escape URLs that match @@ -455,6 +459,21 @@ class NotifySlack(NotifyBase): lambda x: self._re_formatting_map[x.group()], body, ) + # Support , entries + for match in self._re_channel_support.findall(body): + # Swap back any ampersands previously updaated + channel = match[1].strip() + desc = match[2].strip() + + # Update our string + body = re.sub( + re.escape(match[0]), + ''.format( + channel=channel, desc=desc) + if desc else ''.format(channel=channel), + body, + re.IGNORECASE) + # Support , entries for match in self._re_url_support.findall(body): # Swap back any ampersands previously updaated @@ -503,7 +522,8 @@ class NotifySlack(NotifyBase): # Include the footer only if specified to do so payload['attachments'][0]['footer'] = self.app_id - if attach and self.mode is SlackMode.WEBHOOK: + if attach and self.attachment_support \ + and self.mode is SlackMode.WEBHOOK: # Be friendly; let the user know why they can't send their # attachments if using the Webhook mode self.logger.warning( @@ -581,7 +601,8 @@ class NotifySlack(NotifyBase): ' to {}'.format(channel) if channel is not None else '')) - if attach and self.mode is SlackMode.BOT and attach_channel_list: + if attach and self.attachment_support and \ + self.mode is SlackMode.BOT and attach_channel_list: # Send our attachments (can only be done in bot mode) for attachment in attach: @@ -993,6 +1014,12 @@ class NotifySlack(NotifyBase): params=NotifySlack.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.channels) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySparkPost.py b/lib/apprise/plugins/NotifySparkPost.py index bf83a9f5..282f5509 100644 --- a/lib/apprise/plugins/NotifySparkPost.py +++ b/lib/apprise/plugins/NotifySparkPost.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -118,6 +114,9 @@ class NotifySparkPost(NotifyBase): # The services URL service_url = 'https://sparkpost.com/' + # Support attachments + attachment_support = True + # All notification requests are secure secure_protocol = 'sparkpost' @@ -225,7 +224,7 @@ class NotifySparkPost(NotifyBase): } def __init__(self, apikey, targets, cc=None, bcc=None, from_name=None, - region_name=None, headers=None, tokens=None, batch=False, + region_name=None, headers=None, tokens=None, batch=None, **kwargs): """ Initialize SparkPost Object @@ -296,7 +295,8 @@ class NotifySparkPost(NotifyBase): self.tokens.update(tokens) # Prepare Batch Mode Flag - self.batch = batch + self.batch = self.template_args['batch']['default'] \ + if batch is None else batch if targets: # Validate recipients (to:) and drop bad ones: @@ -542,7 +542,7 @@ class NotifySparkPost(NotifyBase): else: payload['content']['text'] = body - if attach: + if attach and self.attachment_support: # Prepare ourselves an attachment object payload['content']['attachments'] = [] @@ -722,6 +722,21 @@ class NotifySparkPost(NotifyBase): safe='') for e in self.targets]), params=NotifySparkPost.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + # + # Factor batch into calculation + # + batch_size = 1 if not self.batch else self.default_batch_size + targets = len(self.targets) + if batch_size > 1: + targets = int(targets / batch_size) + \ + (1 if targets % batch_size else 0) + + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifySpontit.py b/lib/apprise/plugins/NotifySpontit.py index 4df8b538..4705fc05 100644 --- a/lib/apprise/plugins/NotifySpontit.py +++ b/lib/apprise/plugins/NotifySpontit.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -135,7 +131,6 @@ class NotifySpontit(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', - 'required': True, }, }) @@ -350,6 +345,13 @@ class NotifySpontit(NotifyBase): [NotifySpontit.quote(x, safe='') for x in self.targets]), params=NotifySpontit.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyStreamlabs.py b/lib/apprise/plugins/NotifyStreamlabs.py index 3489519a..56b577e4 100644 --- a/lib/apprise/plugins/NotifyStreamlabs.py +++ b/lib/apprise/plugins/NotifyStreamlabs.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -277,8 +273,7 @@ class NotifyStreamlabs(NotifyBase): return - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ Perform Streamlabs notification call (either donation or alert) """ diff --git a/lib/apprise/plugins/NotifySyslog.py b/lib/apprise/plugins/NotifySyslog.py index 433aab9c..3ff1f257 100644 --- a/lib/apprise/plugins/NotifySyslog.py +++ b/lib/apprise/plugins/NotifySyslog.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -30,14 +26,11 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import os import syslog -import socket from .NotifyBase import NotifyBase from ..common import NotifyType from ..utils import parse_bool -from ..utils import is_hostname from ..AppriseLocale import gettext_lazy as _ @@ -107,20 +100,13 @@ SYSLOG_FACILITY_RMAP = { syslog.LOG_LOCAL7: SyslogFacility.LOCAL7, } - -class SyslogMode: - # A local query - LOCAL = "local" - - # A remote query - REMOTE = "remote" - - -# webhook modes are placed ito this list for validation purposes -SYSLOG_MODES = ( - SyslogMode.LOCAL, - SyslogMode.REMOTE, -) +# Used as a lookup when handling the Apprise -> Syslog Mapping +SYSLOG_PUBLISH_MAP = { + NotifyType.INFO: syslog.LOG_INFO, + NotifyType.SUCCESS: syslog.LOG_NOTICE, + NotifyType.FAILURE: syslog.LOG_CRIT, + NotifyType.WARNING: syslog.LOG_WARNING, +} class NotifySyslog(NotifyBase): @@ -134,8 +120,8 @@ class NotifySyslog(NotifyBase): # The services URL service_url = 'https://tools.ietf.org/html/rfc5424' - # The default secure protocol - secure_protocol = 'syslog' + # The default protocol + protocol = 'syslog' # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_syslog' @@ -148,10 +134,6 @@ class NotifySyslog(NotifyBase): templates = ( '{schema}://', '{schema}://{facility}', - '{schema}://{host}', - '{schema}://{host}:{port}', - '{schema}://{host}/{facility}', - '{schema}://{host}:{port}/{facility}', ) # Define our template tokens @@ -162,18 +144,6 @@ class NotifySyslog(NotifyBase): 'values': [k for k in SYSLOG_FACILITY_MAP.keys()], 'default': SyslogFacility.USER, }, - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, - }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - 'default': 514, - }, }) # Define our template arguments @@ -182,12 +152,6 @@ class NotifySyslog(NotifyBase): # We map back to the same element defined in template_tokens 'alias_of': 'facility', }, - 'mode': { - 'name': _('Syslog Mode'), - 'type': 'choice:string', - 'values': SYSLOG_MODES, - 'default': SyslogMode.LOCAL, - }, 'logpid': { 'name': _('Log PID'), 'type': 'bool', @@ -202,8 +166,8 @@ class NotifySyslog(NotifyBase): }, }) - def __init__(self, facility=None, mode=None, log_pid=True, - log_perror=False, **kwargs): + def __init__(self, facility=None, log_pid=True, log_perror=False, + **kwargs): """ Initialize Syslog Object """ @@ -223,14 +187,6 @@ class NotifySyslog(NotifyBase): SYSLOG_FACILITY_MAP[ self.template_tokens['facility']['default']] - self.mode = self.template_args['mode']['default'] \ - if not isinstance(mode, str) else mode.lower() - - if self.mode not in SYSLOG_MODES: - msg = 'The mode specified ({}) is invalid.'.format(mode) - self.logger.warning(msg) - raise TypeError(msg) - # Logging Options self.logoptions = 0 @@ -249,7 +205,7 @@ class NotifySyslog(NotifyBase): if log_perror: self.logoptions |= syslog.LOG_PERROR - # Initialize our loggig + # Initialize our logging syslog.openlog( self.app_id, logoption=self.logoptions, facility=self.facility) return @@ -259,7 +215,7 @@ class NotifySyslog(NotifyBase): Perform Syslog Notification """ - _pmap = { + SYSLOG_PUBLISH_MAP = { NotifyType.INFO: syslog.LOG_INFO, NotifyType.SUCCESS: syslog.LOG_NOTICE, NotifyType.FAILURE: syslog.LOG_CRIT, @@ -272,70 +228,17 @@ class NotifySyslog(NotifyBase): # Always call throttle before any remote server i/o is made self.throttle() - if self.mode == SyslogMode.LOCAL: - try: - syslog.syslog(_pmap[notify_type], body) + try: + syslog.syslog(SYSLOG_PUBLISH_MAP[notify_type], body) - except KeyError: - # An invalid notification type was specified - self.logger.warning( - 'An invalid notification type ' - '({}) was specified.'.format(notify_type)) - return False + except KeyError: + # An invalid notification type was specified + self.logger.warning( + 'An invalid notification type ' + '({}) was specified.'.format(notify_type)) + return False - else: # SyslogMode.REMOTE - - host = self.host - port = self.port if self.port \ - else self.template_tokens['port']['default'] - if self.log_pid: - payload = '<%d>- %d - %s' % ( - _pmap[notify_type] + self.facility * 8, os.getpid(), body) - - else: - payload = '<%d>- %s' % ( - _pmap[notify_type] + self.facility * 8, body) - - # send UDP packet to upstream server - self.logger.debug( - 'Syslog Host: %s:%d/%s', - host, port, SYSLOG_FACILITY_RMAP[self.facility]) - self.logger.debug('Syslog Payload: %s' % str(payload)) - - # our sent bytes - sent = 0 - - try: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.settimeout(self.socket_connect_timeout) - sent = sock.sendto(payload.encode('utf-8'), (host, port)) - sock.close() - - except socket.gaierror as e: - self.logger.warning( - 'A connection error occurred sending Syslog ' - 'notification to %s:%d/%s', host, port, - SYSLOG_FACILITY_RMAP[self.facility] - ) - self.logger.debug('Socket Exception: %s' % str(e)) - return False - - except socket.timeout as e: - self.logger.warning( - 'A connection timeout occurred sending Syslog ' - 'notification to %s:%d/%s', host, port, - SYSLOG_FACILITY_RMAP[self.facility] - ) - self.logger.debug('Socket Exception: %s' % str(e)) - return False - - if sent < len(payload): - self.logger.warning( - 'Syslog sent %d byte(s) but intended to send %d byte(s)', - sent, len(payload)) - return False - - self.logger.info('Sent Syslog (%s) notification.', self.mode) + self.logger.info('Sent Syslog notification.') return True @@ -348,31 +251,16 @@ class NotifySyslog(NotifyBase): params = { 'logperror': 'yes' if self.log_perror else 'no', 'logpid': 'yes' if self.log_pid else 'no', - 'mode': self.mode, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - if self.mode == SyslogMode.LOCAL: - return '{schema}://{facility}/?{params}'.format( - facility=self.template_tokens['facility']['default'] - if self.facility not in SYSLOG_FACILITY_RMAP - else SYSLOG_FACILITY_RMAP[self.facility], - schema=self.secure_protocol, - params=NotifySyslog.urlencode(params), - ) - - # Remote mode: - return '{schema}://{hostname}{port}/{facility}/?{params}'.format( - schema=self.secure_protocol, - hostname=NotifySyslog.quote(self.host, safe=''), - port='' if self.port is None - or self.port == self.template_tokens['port']['default'] - else ':{}'.format(self.port), + return '{schema}://{facility}/?{params}'.format( facility=self.template_tokens['facility']['default'] if self.facility not in SYSLOG_FACILITY_RMAP else SYSLOG_FACILITY_RMAP[self.facility], + schema=self.protocol, params=NotifySyslog.urlencode(params), ) @@ -395,21 +283,12 @@ class NotifySyslog(NotifyBase): # Get our path values tokens.extend(NotifySyslog.split_path(results['fullpath'])) + # Initialization facility = None - if len(tokens) > 1 and is_hostname(tokens[0]): - # syslog://hostname/facility - results['mode'] = SyslogMode.REMOTE - # Store our facility as the first path entry - facility = tokens[-1] - - elif tokens: - # This is a bit ambigious... it could be either: - # syslog://facility -or- syslog://hostname - - # First lets test it as a facility; we'll correct this - # later on if nessisary - facility = tokens[-1] + if tokens: + # Store the last entry as the facility + facility = tokens[-1].lower() # However if specified on the URL, that will over-ride what was # identified @@ -425,20 +304,6 @@ class NotifySyslog(NotifyBase): facility = next((f for f in SYSLOG_FACILITY_MAP.keys() if f.startswith(facility)), facility) - # Attempt to solve our ambiguity - if len(tokens) == 1 and is_hostname(tokens[0]) and ( - results['port'] or facility not in SYSLOG_FACILITY_MAP): - - # facility is likely hostname; update our guessed mode - results['mode'] = SyslogMode.REMOTE - - # Reset our facility value - facility = None - - # Set mode if not otherwise set - if 'mode' in results['qsd'] and len(results['qsd']['mode']): - results['mode'] = NotifySyslog.unquote(results['qsd']['mode']) - # Save facility if set if facility: results['facility'] = facility diff --git a/lib/apprise/plugins/NotifyTechulusPush.py b/lib/apprise/plugins/NotifyTechulusPush.py index 0f3e79e5..3e2085c5 100644 --- a/lib/apprise/plugins/NotifyTechulusPush.py +++ b/lib/apprise/plugins/NotifyTechulusPush.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyTelegram.py b/lib/apprise/plugins/NotifyTelegram.py index e7d75f5a..121bf82a 100644 --- a/lib/apprise/plugins/NotifyTelegram.py +++ b/lib/apprise/plugins/NotifyTelegram.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -84,6 +80,23 @@ IS_CHAT_ID_RE = re.compile( ) +class TelegramContentPlacement: + """ + The Telegram Content Placement + """ + # Before Attachments + BEFORE = "before" + # After Attachments + AFTER = "after" + + +# Identify Placement Categories +TELEGRAM_CONTENT_PLACEMENT = ( + TelegramContentPlacement.BEFORE, + TelegramContentPlacement.AFTER, +) + + class NotifyTelegram(NotifyBase): """ A wrapper for Telegram Notifications @@ -106,6 +119,9 @@ class NotifyTelegram(NotifyBase): # Telegram uses the http protocol with JSON requests notify_url = 'https://api.telegram.org/bot' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_256 @@ -311,13 +327,24 @@ class NotifyTelegram(NotifyBase): 'type': 'bool', 'default': False, }, + 'topic': { + 'name': _('Topic Thread ID'), + 'type': 'int', + }, 'to': { 'alias_of': 'targets', }, + 'content': { + 'name': _('Content Placement'), + 'type': 'choice:string', + 'values': TELEGRAM_CONTENT_PLACEMENT, + 'default': TelegramContentPlacement.BEFORE, + }, }) def __init__(self, bot_token, targets, detect_owner=True, - include_image=False, silent=None, preview=None, **kwargs): + include_image=False, silent=None, preview=None, topic=None, + content=None, **kwargs): """ Initialize Telegram Object """ @@ -343,6 +370,29 @@ class NotifyTelegram(NotifyBase): self.preview = self.template_args['preview']['default'] \ if preview is None else bool(preview) + # Setup our content placement + self.content = self.template_args['content']['default'] \ + if not isinstance(content, str) else content.lower() + if self.content and self.content not in TELEGRAM_CONTENT_PLACEMENT: + msg = 'The content placement specified ({}) is invalid.'\ + .format(content) + self.logger.warning(msg) + raise TypeError(msg) + + if topic: + try: + self.topic = int(topic) + + except (TypeError, ValueError): + # Not a valid integer; ignore entry + err = 'The Telegram Topic ID specified ({}) is invalid.'\ + .format(topic) + self.logger.warning(err) + raise TypeError(err) + else: + # No Topic Thread + self.topic = None + # if detect_owner is set to True, we will attempt to determine who # the bot owner is based on the first person who messaged it. This # is not a fool proof way of doing things as over time Telegram removes @@ -419,11 +469,14 @@ class NotifyTelegram(NotifyBase): # content can arrive together. self.throttle() + payload = {'chat_id': chat_id} + if self.topic: + payload['message_thread_id'] = self.topic + try: with open(path, 'rb') as f: # Configure file payload (for upload) files = {key: (file_name, f)} - payload = {'chat_id': chat_id} self.logger.debug( 'Telegram attachment POST URL: %s (cert_verify=%r)' % ( @@ -632,6 +685,9 @@ class NotifyTelegram(NotifyBase): 'disable_web_page_preview': not self.preview, } + if self.topic: + payload['message_thread_id'] = self.topic + # Prepare Message Body if self.notify_format == NotifyFormat.MARKDOWN: payload['parse_mode'] = 'MARKDOWN' @@ -655,6 +711,10 @@ class NotifyTelegram(NotifyBase): # Prepare our payload based on HTML or TEXT payload['text'] = body + # Handle payloads without a body specified (but an attachment present) + attach_content = \ + TelegramContentPlacement.AFTER if not body else self.content + # Create a copy of the chat_ids list targets = list(self.targets) while len(targets): @@ -688,6 +748,20 @@ class NotifyTelegram(NotifyBase): 'Failed to send Telegram type image to {}.', payload['chat_id']) + if attach and self.attachment_support and \ + attach_content == TelegramContentPlacement.AFTER: + # Send our attachments now (if specified and if it exists) + if not self._send_attachments( + chat_id=payload['chat_id'], notify_type=notify_type, + attach=attach): + + has_error = True + continue + + if not body: + # Nothing more to do; move along to the next attachment + continue + # Always call throttle before any remote server i/o is made; # Telegram throttles to occur before sending the image so that # content can arrive together. @@ -750,19 +824,36 @@ class NotifyTelegram(NotifyBase): self.logger.info('Sent Telegram notification.') - if attach: - # Send our attachments now (if specified and if it exists) - for attachment in attach: - if not self.send_media( - payload['chat_id'], notify_type, - attach=attachment): + if attach and self.attachment_support \ + and attach_content == TelegramContentPlacement.BEFORE: + # Send our attachments now (if specified and if it exists) as + # it was identified to send the content before the attachments + # which is now done. + if not self._send_attachments( + chat_id=payload['chat_id'], + notify_type=notify_type, + attach=attach): - # We failed; don't continue - has_error = True - break + has_error = True + continue - self.logger.info( - 'Sent Telegram attachment: {}.'.format(attachment)) + return not has_error + + def _send_attachments(self, chat_id, notify_type, attach): + """ + Sends our attachments + """ + has_error = False + # Send our attachments now (if specified and if it exists) + for attachment in attach: + if not self.send_media(chat_id, notify_type, attach=attachment): + + # We failed; don't continue + has_error = True + break + + self.logger.info( + 'Sent Telegram attachment: {}.'.format(attachment)) return not has_error @@ -777,8 +868,12 @@ class NotifyTelegram(NotifyBase): 'detect': 'yes' if self.detect_owner else 'no', 'silent': 'yes' if self.silent else 'no', 'preview': 'yes' if self.preview else 'no', + 'content': self.content, } + if self.topic: + params['topic'] = self.topic + # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) @@ -791,6 +886,12 @@ class NotifyTelegram(NotifyBase): [NotifyTelegram.quote('@{}'.format(x)) for x in self.targets]), params=NotifyTelegram.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url, **kwargs): """ @@ -851,6 +952,10 @@ class NotifyTelegram(NotifyBase): # Store our chat ids (as these are the remaining entries) results['targets'] = entries + # content to be displayed 'before' or 'after' attachments + if 'content' in results['qsd'] and len(results['qsd']['content']): + results['content'] = results['qsd']['content'] + # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): @@ -860,6 +965,10 @@ class NotifyTelegram(NotifyBase): # Store our bot token results['bot_token'] = bot_token + # Support Thread Topic + if 'topic' in results['qsd'] and len(results['qsd']['topic']): + results['topic'] = results['qsd']['topic'] + # Silent (Sends the message Silently); users will receive # notification with no sound. results['silent'] = \ diff --git a/lib/apprise/plugins/NotifyTwilio.py b/lib/apprise/plugins/NotifyTwilio.py index 78a1ba82..ab4c88e3 100644 --- a/lib/apprise/plugins/NotifyTwilio.py +++ b/lib/apprise/plugins/NotifyTwilio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -385,6 +381,13 @@ class NotifyTwilio(NotifyBase): [NotifyTwilio.quote(x, safe='') for x in self.targets]), params=NotifyTwilio.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyTwist.py b/lib/apprise/plugins/NotifyTwist.py index a19f03a9..36a55313 100644 --- a/lib/apprise/plugins/NotifyTwist.py +++ b/lib/apprise/plugins/NotifyTwist.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -106,10 +102,12 @@ class NotifyTwist(NotifyBase): 'name': _('Password'), 'type': 'string', 'private': True, + 'required': True, }, 'email': { 'name': _('Email'), 'type': 'string', + 'required': True, }, 'target_channel': { 'name': _('Target Channel'), @@ -256,6 +254,12 @@ class NotifyTwist(NotifyBase): params=NotifyTwist.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.channels) + len(self.channel_ids) + def login(self): """ A simple wrapper to authenticate with the Twist Server diff --git a/lib/apprise/plugins/NotifyTwitter.py b/lib/apprise/plugins/NotifyTwitter.py index 76c1a8e6..3647c8b3 100644 --- a/lib/apprise/plugins/NotifyTwitter.py +++ b/lib/apprise/plugins/NotifyTwitter.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -36,6 +32,7 @@ import re import requests from copy import deepcopy from datetime import datetime +from datetime import timezone from requests_oauthlib import OAuth1 from json import dumps from json import loads @@ -82,11 +79,14 @@ class NotifyTwitter(NotifyBase): service_url = 'https://twitter.com/' # The default secure protocol is twitter. - secure_protocol = ('twitter', 'tweet') + secure_protocol = ('x', 'twitter', 'tweet') # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_twitter' + # Support attachments + attachment_support = True + # Do not set body_maxlen as it is set in a property value below # since the length varies depending if we are doing a direct message # or a tweet @@ -124,13 +124,14 @@ class NotifyTwitter(NotifyBase): request_rate_per_sec = 0 # For Tracking Purposes - ratelimit_reset = datetime.utcnow() + ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1000; users can send up to 1000 DM's and 2400 tweets a day # This value only get's adjusted if the server sets it that way ratelimit_remaining = 1 templates = ( + '{schema}://{ckey}/{csecret}/{akey}/{asecret}', '{schema}://{ckey}/{csecret}/{akey}/{asecret}/{targets}', ) @@ -283,7 +284,7 @@ class NotifyTwitter(NotifyBase): # Build a list of our attachments attachments = [] - if attach: + if attach and self.attachment_support: # We need to upload our payload first so that we can source it # in remaining messages for attachment in attach: @@ -412,7 +413,7 @@ class NotifyTwitter(NotifyBase): _payload = deepcopy(payload) _payload['media_ids'] = media_ids - if no: + if no or not body: # strip text and replace it with the image representation _payload['status'] = \ '{:02d}/{:02d}'.format(no + 1, len(batches)) @@ -512,7 +513,7 @@ class NotifyTwitter(NotifyBase): 'additional_owners': ','.join([str(x) for x in targets.values()]) } - if no: + if no or not body: # strip text and replace it with the image representation _data['text'] = \ '{:02d}/{:02d}'.format(no + 1, len(attachments)) @@ -678,7 +679,7 @@ class NotifyTwitter(NotifyBase): # Twitter server. One would hope we're on NTP and our clocks are # the same allowing this to role smoothly: - now = datetime.utcnow() + now = datetime.now(timezone.utc).replace(tzinfo=None) if now < self.ratelimit_reset: # We need to throttle for the difference in seconds # We add 0.5 seconds to the end just to allow a grace @@ -736,8 +737,9 @@ class NotifyTwitter(NotifyBase): # Capture rate limiting if possible self.ratelimit_remaining = \ int(r.headers.get('x-rate-limit-remaining')) - self.ratelimit_reset = datetime.utcfromtimestamp( - int(r.headers.get('x-rate-limit-reset'))) + self.ratelimit_reset = datetime.fromtimestamp( + int(r.headers.get('x-rate-limit-reset')), timezone.utc + ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this information @@ -793,10 +795,6 @@ class NotifyTwitter(NotifyBase): # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - if len(self.targets) > 0: - params['to'] = ','.join( - [NotifyTwitter.quote(x, safe='') for x in self.targets]) - return '{schema}://{ckey}/{csecret}/{akey}/{asecret}' \ '/{targets}/?{params}'.format( schema=self.secure_protocol[0], @@ -811,6 +809,13 @@ class NotifyTwitter(NotifyBase): for target in self.targets]), params=NotifyTwitter.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ @@ -823,28 +828,22 @@ class NotifyTwitter(NotifyBase): # We're done early as we couldn't load the results return results - # The first token is stored in the hostname - consumer_key = NotifyTwitter.unquote(results['host']) - # Acquire remaining tokens tokens = NotifyTwitter.split_path(results['fullpath']) + # The consumer token is stored in the hostname + results['ckey'] = NotifyTwitter.unquote(results['host']) + + # # Now fetch the remaining tokens - try: - consumer_secret, access_token_key, access_token_secret = \ - tokens[0:3] + # - except (ValueError, AttributeError, IndexError): - # Force some bad values that will get caught - # in parsing later - consumer_secret = None - access_token_key = None - access_token_secret = None - - results['ckey'] = consumer_key - results['csecret'] = consumer_secret - results['akey'] = access_token_key - results['asecret'] = access_token_secret + # Consumer Secret + results['csecret'] = tokens.pop(0) if tokens else None + # Access Token Key + results['akey'] = tokens.pop(0) if tokens else None + # Access Token Secret + results['asecret'] = tokens.pop(0) if tokens else None # The defined twitter mode if 'mode' in results['qsd'] and len(results['qsd']['mode']): @@ -861,7 +860,7 @@ class NotifyTwitter(NotifyBase): results['targets'].append(results.get('user')) # Store any remaining items as potential targets - results['targets'].extend(tokens[3:]) + results['targets'].extend(tokens) # Get Cache Flag (reduces lookup hits) if 'cache' in results['qsd'] and len(results['qsd']['cache']): diff --git a/lib/apprise/plugins/NotifyVoipms.py b/lib/apprise/plugins/NotifyVoipms.py index 42379b6b..c39da4df 100644 --- a/lib/apprise/plugins/NotifyVoipms.py +++ b/lib/apprise/plugins/NotifyVoipms.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -78,7 +74,6 @@ class NotifyVoipms(NotifyBase): # Define object templates templates = ( - '{schema}://{password}:{email}', '{schema}://{password}:{email}/{from_phone}/{targets}', ) @@ -111,6 +106,7 @@ class NotifyVoipms(NotifyBase): 'targets': { 'name': _('Targets'), 'type': 'list:string', + 'required': True, }, }) @@ -329,6 +325,13 @@ class NotifyVoipms(NotifyBase): ['1' + NotifyVoipms.quote(x, safe='') for x in self.targets]), params=NotifyVoipms.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyVonage.py b/lib/apprise/plugins/NotifyVonage.py index 812c3643..48d82319 100644 --- a/lib/apprise/plugins/NotifyVonage.py +++ b/lib/apprise/plugins/NotifyVonage.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -334,6 +330,13 @@ class NotifyVonage(NotifyBase): [NotifyVonage.quote(x, safe='') for x in self.targets]), params=NotifyVonage.urlencode(params)) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/NotifyWebexTeams.py b/lib/apprise/plugins/NotifyWebexTeams.py index 6b953b71..67ed4e4b 100644 --- a/lib/apprise/plugins/NotifyWebexTeams.py +++ b/lib/apprise/plugins/NotifyWebexTeams.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyWhatsApp.py b/lib/apprise/plugins/NotifyWhatsApp.py new file mode 100644 index 00000000..efa90f89 --- /dev/null +++ b/lib/apprise/plugins/NotifyWhatsApp.py @@ -0,0 +1,559 @@ +# -*- coding: utf-8 -*- +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2023, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# API Source: +# https://developers.facebook.com/docs/whatsapp/cloud-api/reference/messages +# +# 1. Register a developer account with Meta: +# https://developers.facebook.com/docs/whatsapp/cloud-api/get-started +# 2. Enable 2 Factor Authentication (2FA) with your account (if not done +# already) +# 3. Create a App using WhatsApp Product. There are 2 to create an app from +# Do NOT chose the WhatsApp Webhook one (choose the other) +# +# When you click on the API Setup section of your new app you need to record +# both the access token and the From Phone Number ID. Note that this not the +# from phone number itself, but it's ID. It's displayed below and contains +# way more numbers then your typical phone number + +import re +import requests +from json import loads, dumps +from .NotifyBase import NotifyBase +from ..common import NotifyType +from ..utils import is_phone_no +from ..utils import parse_phone_no +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + + +class NotifyWhatsApp(NotifyBase): + """ + A wrapper for WhatsApp Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'WhatsApp' + + # The services URL + service_url = \ + 'https://developers.facebook.com/docs/whatsapp/cloud-api/get-started' + + # All notification requests are secure + secure_protocol = 'whatsapp' + + # Allow 300 requests per minute. + # 60/300 = 0.2 + request_rate_per_sec = 0.20 + + # Facebook Graph version + fb_graph_version = 'v17.0' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_whatsapp' + + # WhatsApp Message Notification URL + notify_url = 'https://graph.facebook.com/{fb_ver}/{phone_id}/messages' + + # The maximum length of the body + body_maxlen = 1024 + + # A title can not be used for SMS Messages. Setting this to zero will + # cause any title (if defined) to get placed into the message body. + title_maxlen = 0 + + # Define object templates + templates = ( + '{schema}://{token}@{from_phone_id}/{targets}', + '{schema}://{template}:{token}@{from_phone_id}/{targets}', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'token': { + 'name': _('Access Token'), + 'type': 'string', + 'private': True, + 'required': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + }, + 'template': { + 'name': _('Template Name'), + 'type': 'string', + 'required': False, + 'regex': (r'^[^\s]+$', 'i'), + }, + 'from_phone_id': { + 'name': _('From Phone ID'), + 'type': 'string', + 'private': True, + 'required': True, + 'regex': (r'^[0-9]+$', 'i'), + }, + 'target_phone': { + 'name': _('Target Phone No'), + 'type': 'string', + 'prefix': '+', + 'regex': (r'^[0-9\s)(+-]+$', 'i'), + 'map_to': 'targets', + }, + 'targets': { + 'name': _('Targets'), + 'type': 'list:string', + }, + 'language': { + 'name': _('Language'), + 'type': 'string', + 'default': 'en_US', + 'regex': (r'^[^0-9\s]+$', 'i'), + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'to': { + 'alias_of': 'targets', + }, + 'from': { + 'alias_of': 'from_phone_id', + }, + 'token': { + 'alias_of': 'token', + }, + 'template': { + 'alias_of': 'template', + }, + 'lang': { + 'alias_of': 'language', + }, + }) + + # Our supported mappings and component keys + component_key_re = re.compile( + r'(?P((?P[1-9][0-9]*)|(?Pbody|type)))', re.IGNORECASE) + + # Define any kwargs we're using + template_kwargs = { + 'template_mapping': { + 'name': _('Template Mapping'), + 'prefix': ':', + }, + } + + def __init__(self, token, from_phone_id, template=None, targets=None, + language=None, template_mapping=None, **kwargs): + """ + Initialize WhatsApp Object + """ + super().__init__(**kwargs) + + # The Access Token associated with the account + self.token = validate_regex( + token, *self.template_tokens['token']['regex']) + if not self.token: + msg = 'An invalid WhatsApp Access Token ' \ + '({}) was specified.'.format(token) + self.logger.warning(msg) + raise TypeError(msg) + + # The From Phone ID associated with the account + self.from_phone_id = validate_regex( + from_phone_id, *self.template_tokens['from_phone_id']['regex']) + if not self.from_phone_id: + msg = 'An invalid WhatsApp From Phone ID ' \ + '({}) was specified.'.format(from_phone_id) + self.logger.warning(msg) + raise TypeError(msg) + + # The template to associate with the message + if template: + self.template = validate_regex( + template, *self.template_tokens['template']['regex']) + if not self.template: + msg = 'An invalid WhatsApp Template Name ' \ + '({}) was specified.'.format(template) + self.logger.warning(msg) + raise TypeError(msg) + + # The Template language Code to use + if language: + self.language = validate_regex( + language, *self.template_tokens['language']['regex']) + if not self.language: + msg = 'An invalid WhatsApp Template Language Code ' \ + '({}) was specified.'.format(language) + self.logger.warning(msg) + raise TypeError(msg) + else: + self.language = self.template_tokens['language']['default'] + else: + # + # Message Mode + # + self.template = None + + # Parse our targets + self.targets = list() + + for target in parse_phone_no(targets): + # Validate targets and drop bad ones: + result = is_phone_no(target) + if not result: + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) + continue + + # store valid phone number + self.targets.append('+{}'.format(result['full'])) + + self.template_mapping = {} + if template_mapping: + # Store our extra payload entries + self.template_mapping.update(template_mapping) + + # Validate Mapping and prepare Components + self.components = dict() + self.component_keys = list() + for key, val in self.template_mapping.items(): + matched = self.component_key_re.match(key) + if not matched: + msg = 'An invalid Template Component ID ' \ + '({}) was specified.'.format(key) + self.logger.warning(msg) + raise TypeError(msg) + + if matched.group('id'): + # + # Manual Component Assigment (by id) + # + index = matched.group('id') + map_to = { + "type": "text", + "text": val, + } + + else: # matched.group('map') + map_to = matched.group('map').lower() + matched = self.component_key_re.match(val) + if not (matched and matched.group('id')): + msg = 'An invalid Template Component Mapping ' \ + '(:{}={}) was specified.'.format(key, val) + self.logger.warning(msg) + raise TypeError(msg) + index = matched.group('id') + + if index in self.components: + msg = 'The Template Component index ' \ + '({}) was already assigned.'.format(key) + self.logger.warning(msg) + raise TypeError(msg) + + self.components[index] = map_to + self.component_keys = self.components.keys() + # Adjust sorting and assume that the user put the order correctly; + # if not Facebook just won't be very happy and will reject the + # message + sorted(self.component_keys) + + return + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform WhatsApp Notification + """ + + if not self.targets: + self.logger.warning( + 'There are no valid WhatsApp targets to notify.') + return False + + # error tracking (used for function return) + has_error = False + + # Prepare our URL + url = self.notify_url.format( + fb_ver=self.fb_graph_version, + phone_id=self.from_phone_id, + ) + + # Prepare our headers + headers = { + 'User-Agent': self.app_id, + 'Accept': 'application/json', + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {self.token}', + } + + payload = { + 'messaging_product': 'whatsapp', + # The To gets populated in the loop below + 'to': None, + } + + if not self.template: + # + # Send Message + # + payload.update({ + 'recipient_type': "individual", + 'type': 'text', + 'text': {"body": body}, + }) + + else: + # + # Send Template + # + payload.update({ + 'type': 'template', + "template": { + "name": self.template, + "language": {"code": self.language}, + }, + }) + + if self.components: + payload['template']['components'] = [ + { + "type": "body", + "parameters": [], + } + ] + for key in self.component_keys: + if isinstance(self.components[key], dict): + # Manual Assignment + payload['template']['components'][0]["parameters"]\ + .append(self.components[key]) + continue + + # Mapping of body and/or notify type + payload['template']['components'][0]["parameters"].append({ + "type": "text", + "text": body if self.components[key] == 'body' + else notify_type, + }) + + # Create a copy of the targets list + targets = list(self.targets) + + while len(targets): + # Get our target to notify + target = targets.pop(0) + + # Prepare our user + payload['to'] = target + + # Some Debug Logging + self.logger.debug('WhatsApp POST URL: {} (cert_verify={})'.format( + url, self.verify_certificate)) + self.logger.debug('WhatsApp Payload: {}' .format(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + try: + r = requests.post( + url, + data=dumps(payload), + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + if r.status_code not in ( + requests.codes.created, requests.codes.ok): + # We had a problem + status_str = \ + NotifyBase.http_response_code_lookup(r.status_code) + + # set up our status code to use + status_code = r.status_code + + try: + # Update our status response if we can + json_response = loads(r.content) + status_code = \ + json_response['error'].get('code', status_code) + status_str = \ + json_response['error'].get('message', status_str) + + except (AttributeError, TypeError, ValueError, KeyError): + # KeyError = r.content is parseable but does not + # contain 'error' + # ValueError = r.content is Unparsable + # TypeError = r.content is None + # AttributeError = r is None + + # We could not parse JSON response. + # We will just use the status we already have. + pass + + self.logger.warning( + 'Failed to send WhatsApp notification to {}: ' + '{}{}error={}.'.format( + target, + status_str, + ', ' if status_str else '', + status_code)) + + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) + + # Mark our failure + has_error = True + continue + + else: + self.logger.info( + 'Sent WhatsApp notification to {}.'.format(target)) + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending WhatsApp:%s ' % ( + target) + 'notification.' + ) + self.logger.debug('Socket Exception: %s' % str(e)) + + # Mark our failure + has_error = True + continue + + return not has_error + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any URL parameters + params = {} + if self.template: + # Add language to our URL + params['lang'] = self.language + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + # Payload body extras prefixed with a ':' sign + # Append our payload extras into our parameters + params.update( + {':{}'.format(k): v for k, v in self.template_mapping.items()}) + + return '{schema}://{template}{token}@{from_id}/{targets}/?{params}'\ + .format( + schema=self.secure_protocol, + from_id=self.pprint( + self.from_phone_id, privacy, safe=''), + token=self.pprint(self.token, privacy, safe=''), + template='' if not self.template + else '{}:'.format( + NotifyWhatsApp.quote(self.template, safe='')), + targets='/'.join( + [NotifyWhatsApp.quote(x, safe='') for x in self.targets]), + params=NotifyWhatsApp.urlencode(params)) + + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + targets = len(self.targets) + return targets if targets > 0 else 1 + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + + if not results: + # We're done early as we couldn't load the results + return results + + # Get our entries; split_path() looks after unquoting content for us + # by default + results['targets'] = NotifyWhatsApp.split_path(results['fullpath']) + + # The hostname is our From Phone ID + results['from_phone_id'] = NotifyWhatsApp.unquote(results['host']) + + # Determine if we have a Template, otherwise load our token + if results['password']: + # + # Template Mode + # + results['template'] = NotifyWhatsApp.unquote(results['user']) + results['token'] = NotifyWhatsApp.unquote(results['password']) + + else: + # + # Message Mode + # + results['token'] = NotifyWhatsApp.unquote(results['user']) + + # Access token + if 'token' in results['qsd'] and len(results['qsd']['token']): + # Extract the account sid from an argument + results['token'] = \ + NotifyWhatsApp.unquote(results['qsd']['token']) + + # Template + if 'template' in results['qsd'] and len(results['qsd']['template']): + results['template'] = results['qsd']['template'] + + # Template Language + if 'lang' in results['qsd'] and len(results['qsd']['lang']): + results['language'] = results['qsd']['lang'] + + # Support the 'from' and 'source' variable so that we can support + # targets this way too. + # The 'from' makes it easier to use yaml configuration + if 'from' in results['qsd'] and len(results['qsd']['from']): + results['from_phone_id'] = \ + NotifyWhatsApp.unquote(results['qsd']['from']) + if 'source' in results['qsd'] and \ + len(results['qsd']['source']): + results['from_phone_id'] = \ + NotifyWhatsApp.unquote(results['qsd']['source']) + + # Support the 'to' variable so that we can support targets this way too + # The 'to' makes it easier to use yaml configuration + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'] += \ + NotifyWhatsApp.parse_phone_no(results['qsd']['to']) + + # store any additional payload extra's defined + results['template_mapping'] = { + NotifyWhatsApp.unquote(x): NotifyWhatsApp.unquote(y) + for x, y in results['qsd:'].items() + } + + return results diff --git a/lib/apprise/plugins/NotifyWindows.py b/lib/apprise/plugins/NotifyWindows.py index b05e2ebb..226cf92b 100644 --- a/lib/apprise/plugins/NotifyWindows.py +++ b/lib/apprise/plugins/NotifyWindows.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -142,7 +138,7 @@ class NotifyWindows(NotifyBase): win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid) win32api.PostQuitMessage(0) - return None + return 0 def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ diff --git a/lib/apprise/plugins/NotifyXBMC.py b/lib/apprise/plugins/NotifyXBMC.py index 963a74d8..a973989a 100644 --- a/lib/apprise/plugins/NotifyXBMC.py +++ b/lib/apprise/plugins/NotifyXBMC.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE diff --git a/lib/apprise/plugins/NotifyXML.py b/lib/apprise/plugins/NotifyXML.py index bbb3046a..20eeb114 100644 --- a/lib/apprise/plugins/NotifyXML.py +++ b/lib/apprise/plugins/NotifyXML.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -41,6 +37,16 @@ from ..common import NotifyType from ..AppriseLocale import gettext_lazy as _ +class XMLPayloadField: + """ + Identifies the fields available in the JSON Payload + """ + VERSION = 'Version' + TITLE = 'Subject' + MESSAGE = 'Message' + MESSAGETYPE = 'MessageType' + + # Defines the method to send the notification METHODS = ( 'POST', @@ -69,6 +75,9 @@ class NotifyXML(NotifyBase): # A URL that takes you to the setup/help of the specific protocol setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_XML' + # Support attachments + attachment_support = True + # Allows the user to specify the NotifyImageSize object image_size = NotifyImageSize.XY_128 @@ -78,7 +87,8 @@ class NotifyXML(NotifyBase): # XSD Information xsd_ver = '1.1' - xsd_url = 'https://raw.githubusercontent.com/caronc/apprise/master' \ + xsd_default_url = \ + 'https://raw.githubusercontent.com/caronc/apprise/master' \ '/apprise/assets/NotifyXML-{version}.xsd' # Define object templates @@ -161,7 +171,7 @@ class NotifyXML(NotifyBase): xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> - + {{CORE}} {{ATTACHMENTS}} @@ -180,6 +190,18 @@ class NotifyXML(NotifyBase): self.logger.warning(msg) raise TypeError(msg) + # A payload map allows users to over-ride the default mapping if + # they're detected with the :overide=value. Normally this would + # create a new key and assign it the value specified. However + # if the key you specify is actually an internally mapped one, + # then a re-mapping takes place using the value + self.payload_map = { + XMLPayloadField.VERSION: XMLPayloadField.VERSION, + XMLPayloadField.TITLE: XMLPayloadField.TITLE, + XMLPayloadField.MESSAGE: XMLPayloadField.MESSAGE, + XMLPayloadField.MESSAGETYPE: XMLPayloadField.MESSAGETYPE, + } + self.params = {} if params: # Store our extra headers @@ -190,6 +212,7 @@ class NotifyXML(NotifyBase): # Store our extra headers self.headers.update(headers) + self.payload_overrides = {} self.payload_extras = {} if payload: # Store our extra payload entries (but tidy them up since they will @@ -201,7 +224,21 @@ class NotifyXML(NotifyBase): 'Ignoring invalid XML Stanza element name({})' .format(k)) continue - self.payload_extras[key] = v + + # Any values set in the payload to alter a system related one + # alters the system key. Hence :message=msg maps the 'message' + # variable that otherwise already contains the payload to be + # 'msg' instead (containing the payload) + if key in self.payload_map: + self.payload_map[key] = v + self.payload_overrides[key] = v + + else: + self.payload_extras[key] = v + + # Set our xsd url + self.xsd_url = None if self.payload_overrides or self.payload_extras \ + else self.xsd_default_url.format(version=self.xsd_ver) return @@ -227,6 +264,8 @@ class NotifyXML(NotifyBase): # Append our payload extra's into our parameters params.update( {':{}'.format(k): v for k, v in self.payload_extras.items()}) + params.update( + {':{}'.format(k): v for k, v in self.payload_overrides.items()}) # Determine Authentication auth = '' @@ -273,14 +312,21 @@ class NotifyXML(NotifyBase): # Our XML Attachmement subsitution xml_attachments = '' - # Our Payload Base - payload_base = { - 'Version': self.xsd_ver, - 'Subject': NotifyXML.escape_html(title, whitespace=False), - 'MessageType': NotifyXML.escape_html( - notify_type, whitespace=False), - 'Message': NotifyXML.escape_html(body, whitespace=False), - } + payload_base = {} + + for key, value in ( + (XMLPayloadField.VERSION, self.xsd_ver), + (XMLPayloadField.TITLE, NotifyXML.escape_html( + title, whitespace=False)), + (XMLPayloadField.MESSAGE, NotifyXML.escape_html( + body, whitespace=False)), + (XMLPayloadField.MESSAGETYPE, NotifyXML.escape_html( + notify_type, whitespace=False))): + + if not self.payload_map[key]: + # Do not store element in payload response + continue + payload_base[self.payload_map[key]] = value # Apply our payload extras payload_base.update( @@ -292,7 +338,7 @@ class NotifyXML(NotifyBase): ['<{}>{}'.format(k, v, k) for k, v in payload_base.items()]) attachments = [] - if attach: + if attach and self.attachment_support: for attachment in attach: # Perform some simple error checking if not attachment: @@ -328,7 +374,8 @@ class NotifyXML(NotifyBase): ''.join(attachments) + '' re_map = { - '{{XSD_URL}}': self.xsd_url.format(version=self.xsd_ver), + '{{XSD_URL}}': + f' xmlns:xsi="{self.xsd_url}"' if self.xsd_url else '', '{{ATTACHMENTS}}': xml_attachments, '{{CORE}}': xml_base, } diff --git a/lib/apprise/plugins/NotifyZulip.py b/lib/apprise/plugins/NotifyZulip.py index 19f3e29e..f0d0cd8d 100644 --- a/lib/apprise/plugins/NotifyZulip.py +++ b/lib/apprise/plugins/NotifyZulip.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -131,6 +127,7 @@ class NotifyZulip(NotifyBase): 'name': _('Bot Name'), 'type': 'string', 'regex': (r'^[A-Z0-9_-]{1,32}$', 'i'), + 'required': True, }, 'organization': { 'name': _('Organization'), @@ -359,6 +356,12 @@ class NotifyZulip(NotifyBase): params=NotifyZulip.urlencode(params), ) + def __len__(self): + """ + Returns the number of targets associated with this notification + """ + return len(self.targets) + @staticmethod def parse_url(url): """ diff --git a/lib/apprise/plugins/__init__.py b/lib/apprise/plugins/__init__.py index 5560568b..27afef05 100644 --- a/lib/apprise/plugins/__init__.py +++ b/lib/apprise/plugins/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -165,6 +161,9 @@ def _sanitize_token(tokens, default_delimiter): """ + # Used for tracking groups + group_map = {} + # Iterate over our tokens for key in tokens.keys(): @@ -181,14 +180,27 @@ def _sanitize_token(tokens, default_delimiter): # Default type to key tokens[key]['map_to'] = key + # Track our map_to objects + if tokens[key]['map_to'] not in group_map: + group_map[tokens[key]['map_to']] = set() + group_map[tokens[key]['map_to']].add(key) + if 'type' not in tokens[key]: # Default type to string tokens[key]['type'] = 'string' - elif tokens[key]['type'].startswith('list') \ - and 'delim' not in tokens[key]: - # Default list delimiter (if not otherwise specified) - tokens[key]['delim'] = default_delimiter + elif tokens[key]['type'].startswith('list'): + if 'delim' not in tokens[key]: + # Default list delimiter (if not otherwise specified) + tokens[key]['delim'] = default_delimiter + + if key in group_map[tokens[key]['map_to']]: # pragma: no branch + # Remove ourselves from the list + group_map[tokens[key]['map_to']].remove(key) + + # Pointing to the set directly so we can dynamically update + # ourselves + tokens[key]['group'] = group_map[tokens[key]['map_to']] elif tokens[key]['type'].startswith('choice') \ and 'default' not in tokens[key] \ @@ -266,6 +278,13 @@ def details(plugin): # # Identifies if the entry specified is required or not # 'required': True, # + # # Identifies all tokens detected to be associated with the + # # list:string + # # This is ony present in list:string objects and is only set + # # if this element acts as an alias for several other + # # kwargs/fields. + # 'group': [], + # # # Identify a default value # 'default': 'http', # diff --git a/lib/apprise/py3compat/__init__.py b/lib/apprise/py3compat/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/apprise/py3compat/asyncio.py b/lib/apprise/py3compat/asyncio.py deleted file mode 100644 index a5313906..00000000 --- a/lib/apprise/py3compat/asyncio.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (C) 2020 Chris Caron -# All rights reserved. -# -# This code is licensed under the MIT License. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files(the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions : -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import sys -import asyncio -from functools import partial -from ..URLBase import URLBase -from ..logger import logger - - -# A global flag that tracks if we are Python v3.7 or higher -ASYNCIO_RUN_SUPPORT = \ - sys.version_info.major > 3 or \ - (sys.version_info.major == 3 and sys.version_info.minor >= 7) - - -async def notify(coroutines): - """ - An async wrapper to the AsyncNotifyBase.async_notify() calls allowing us - to call gather() and collect the responses - """ - - # Create log entry - logger.info( - 'Notifying {} service(s) asynchronously.'.format(len(coroutines))) - - results = await asyncio.gather(*coroutines, return_exceptions=True) - - # Returns True if all notifications succeeded, otherwise False is - # returned. - failed = any(not status or isinstance(status, Exception) - for status in results) - return not failed - - -def tosync(cor, debug=False): - """ - Await a coroutine from non-async code. - """ - - if ASYNCIO_RUN_SUPPORT: - try: - loop = asyncio.get_running_loop() - - except RuntimeError: - # There is no existing event loop, so we can start our own. - return asyncio.run(cor, debug=debug) - - else: - # Enable debug mode - loop.set_debug(debug) - - # Run the coroutine and wait for the result. - task = loop.create_task(cor) - return asyncio.ensure_future(task, loop=loop) - - else: - # The Deprecated Way (<= Python v3.6) - try: - # acquire access to our event loop - loop = asyncio.get_event_loop() - - except RuntimeError: - # This happens if we're inside a thread of another application - # where there is no running event_loop(). Pythong v3.7 and - # higher automatically take care of this case for us. But for - # the lower versions we need to do the following: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - # Enable debug mode - loop.set_debug(debug) - - return loop.run_until_complete(cor) - - -async def toasyncwrapvalue(v): - """ - Create a coroutine that, when run, returns the provided value. - """ - - return v - - -async def toasyncwrap(fn): - """ - Create a coroutine that, when run, executes the provided function. - """ - - return fn() - - -class AsyncNotifyBase(URLBase): - """ - asyncio wrapper for the NotifyBase object - """ - - async def async_notify(self, *args, **kwargs): - """ - Async Notification Wrapper - """ - - loop = asyncio.get_event_loop() - - try: - return await loop.run_in_executor( - None, partial(self.notify, *args, **kwargs)) - - except TypeError: - # These are our internally thrown notifications - pass - - except Exception: - # A catch-all so we don't have to abort early - # just because one of our plugins has a bug in it. - logger.exception("Notification Exception") - - return False diff --git a/lib/apprise/utils.py b/lib/apprise/utils.py index 60db30b1..8d644ce9 100644 --- a/lib/apprise/utils.py +++ b/lib/apprise/utils.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# BSD 3-Clause License +# BSD 2-Clause License # # Apprise - Push Notification Library. # Copyright (c) 2023, Chris Caron @@ -14,10 +14,6 @@ # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # -# 3. Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -36,6 +32,7 @@ import json import contextlib import os import hashlib +import locale from itertools import chain from os.path import expanduser from functools import reduce @@ -63,7 +60,13 @@ def import_module(path, name): except Exception as e: # module isn't loadable - del sys.modules[name] + try: + del sys.modules[name] + + except KeyError: + # nothing to clean up + pass + module = None logger.debug( @@ -136,14 +139,14 @@ NOTIFY_CUSTOM_DEL_TOKENS = re.compile(r'^-(?P.*)\s*') NOTIFY_CUSTOM_COLON_TOKENS = re.compile(r'^:(?P.*)\s*') # Used for attempting to acquire the schema if the URL can't be parsed. -GET_SCHEMA_RE = re.compile(r'\s*(?P[a-z0-9]{2,9})://.*$', re.I) +GET_SCHEMA_RE = re.compile(r'\s*(?P[a-z0-9]{1,12})://.*$', re.I) # Used for validating that a provided entry is indeed a schema # this is slightly different then the GET_SCHEMA_RE above which # insists the schema is only valid with a :// entry. this one # extrapolates the individual entries URL_DETAILS_RE = re.compile( - r'\s*(?P[a-z0-9]{2,9})(://(?P.*))?$', re.I) + r'\s*(?P[a-z0-9]{1,12})(://(?P.*))?$', re.I) # Regular expression based and expanded from: # http://www.regular-expressions.info/email.html @@ -187,7 +190,7 @@ CALL_SIGN_DETECTION_RE = re.compile( # Regular expression used to destinguish between multiple URLs URL_DETECTION_RE = re.compile( - r'([a-z0-9]+?:\/\/.*?)(?=$|[\s,]+[a-z0-9]{2,9}?:\/\/)', re.I) + r'([a-z0-9]+?:\/\/.*?)(?=$|[\s,]+[a-z0-9]{1,12}?:\/\/)', re.I) EMAIL_DETECTION_RE = re.compile( r'[\s,]*([^@]+@.*?)(?=$|[\s,]+' @@ -200,6 +203,9 @@ UUID4_RE = re.compile( r'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}', re.IGNORECASE) +# Validate if we're a loadable Python file or not +VALID_PYTHON_FILE_RE = re.compile(r'.+\.py(o|c)?$', re.IGNORECASE) + # validate_regex() utilizes this mapping to track and re-use pre-complied # regular expressions REGEX_VALIDATE_LOOKUP = {} @@ -1110,7 +1116,7 @@ def urlencode(query, doseq=False, safe='', encoding=None, errors=None): errors=errors) -def parse_list(*args): +def parse_list(*args, cast=None): """ Take a string list and break it into a delimited list of arguments. This funciton also supports @@ -1133,6 +1139,9 @@ def parse_list(*args): result = [] for arg in args: + if not isinstance(arg, (str, set, list, bool, tuple)) and arg and cast: + arg = cast(arg) + if isinstance(arg, str): result += re.split(STRING_DELIMITERS, arg) @@ -1145,7 +1154,6 @@ def parse_list(*args): # Since Python v3 returns a filter (iterator) whereas Python v2 returned # a list, we need to change it into a list object to remain compatible with # both distribution types. - # TODO: Review after dropping support for Python 2. return sorted([x for x in filter(bool, list(set(result)))]) @@ -1479,7 +1487,7 @@ def environ(*remove, **update): # Create a backup of our environment for restoration purposes env_orig = os.environ.copy() - + loc_orig = locale.getlocale() try: os.environ.update(update) [os.environ.pop(k, None) for k in remove] @@ -1488,6 +1496,13 @@ def environ(*remove, **update): finally: # Restore our snapshot os.environ = env_orig.copy() + try: + # Restore locale + locale.setlocale(locale.LC_ALL, loc_orig) + + except locale.Error: + # Thrown in py3.6 + pass def apply_template(template, app_mode=TemplateType.RAW, **kwargs): @@ -1565,6 +1580,11 @@ def module_detection(paths, cache=True): # Since our plugin name can conflict (as a module) with another # we want to generate random strings to avoid steping on # another's namespace + if not (path and VALID_PYTHON_FILE_RE.match(path)): + # Ignore file/module type + logger.trace('Plugin Scan: Skipping %s', path) + return None + module_name = hashlib.sha1(path.encode('utf-8')).hexdigest() module_pyname = "{prefix}.{name}".format( prefix='apprise.custom.module', name=module_name) diff --git a/lib/attr/__init__.py b/lib/attr/__init__.py index 7cfa792f..9226258a 100644 --- a/lib/attr/__init__.py +++ b/lib/attr/__init__.py @@ -9,6 +9,7 @@ from typing import Callable from . import converters, exceptions, filters, setters, validators from ._cmp import cmp_using +from ._compat import Protocol from ._config import get_run_validators, set_run_validators from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types from ._make import ( @@ -31,7 +32,7 @@ ib = attr = attrib dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) -class AttrsInstance: +class AttrsInstance(Protocol): pass @@ -90,8 +91,9 @@ def _make_getattr(mod_name: str) -> Callable: "__email__": "", "__license__": "license", } - if name not in dunder_to_metadata.keys(): - raise AttributeError(f"module {mod_name} has no attribute {name}") + if name not in dunder_to_metadata: + msg = f"module {mod_name} has no attribute {name}" + raise AttributeError(msg) import sys import warnings @@ -101,7 +103,7 @@ def _make_getattr(mod_name: str) -> Callable: else: from importlib.metadata import metadata - if name != "__version_info__": + if name not in ("__version__", "__version_info__"): warnings.warn( f"Accessing {mod_name}.{name} is deprecated and will be " "removed in a future release. Use importlib.metadata directly " @@ -113,15 +115,15 @@ def _make_getattr(mod_name: str) -> Callable: meta = metadata("attrs") if name == "__license__": return "MIT" - elif name == "__copyright__": + if name == "__copyright__": return "Copyright (c) 2015 Hynek Schlawack" - elif name in ("__uri__", "__url__"): + if name in ("__uri__", "__url__"): return meta["Project-URL"].split(" ", 1)[-1] - elif name == "__version_info__": + if name == "__version_info__": return VersionInfo._from_version_string(meta["version"]) - elif name == "__author__": + if name == "__author__": return meta["Author-email"].rsplit(" ", 1)[0] - elif name == "__email__": + if name == "__email__": return meta["Author-email"].rsplit("<", 1)[1][:-1] return meta[dunder_to_metadata[name]] diff --git a/lib/attr/__init__.pyi b/lib/attr/__init__.pyi index ced5a3fd..0f641501 100644 --- a/lib/attr/__init__.pyi +++ b/lib/attr/__init__.pyi @@ -33,6 +33,11 @@ if sys.version_info >= (3, 10): else: from typing_extensions import TypeGuard +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + __version__: str __version_info__: VersionInfo __title__: str @@ -69,8 +74,7 @@ _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] class AttrsInstance(AttrsInstance_, Protocol): pass -_A = TypeVar("_A", bound=AttrsInstance) -# _make -- +_A = TypeVar("_A", bound=type[AttrsInstance]) class _Nothing(enum.Enum): NOTHING = enum.auto() @@ -104,23 +108,6 @@ else: takes_self: bool = ..., ) -> _T: ... -# Static type inference support via __dataclass_transform__ implemented as per: -# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md -# This annotation must be applied to all overloads of "define" and "attrs" -# -# NOTE: This is a typing construct and does not exist at runtime. Extensions -# wrapping attrs decorators should declare a separate __dataclass_transform__ -# signature in the extension module using the specification linked above to -# provide pyright support. -def __dataclass_transform__( - *, - eq_default: bool = True, - order_default: bool = False, - kw_only_default: bool = False, - frozen_default: bool = False, - field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), -) -> Callable[[_T], _T]: ... - class Attribute(Generic[_T]): name: str default: Optional[_T] @@ -323,7 +310,7 @@ def field( type: Optional[type] = ..., ) -> Any: ... @overload -@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) def attrs( maybe_cls: _C, these: Optional[Dict[str, Any]] = ..., @@ -351,7 +338,7 @@ def attrs( unsafe_hash: Optional[bool] = ..., ) -> _C: ... @overload -@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) def attrs( maybe_cls: None = ..., these: Optional[Dict[str, Any]] = ..., @@ -379,7 +366,7 @@ def attrs( unsafe_hash: Optional[bool] = ..., ) -> Callable[[_C], _C]: ... @overload -@__dataclass_transform__(field_descriptors=(attrib, field)) +@dataclass_transform(field_specifiers=(attrib, field)) def define( maybe_cls: _C, *, @@ -405,7 +392,7 @@ def define( match_args: bool = ..., ) -> _C: ... @overload -@__dataclass_transform__(field_descriptors=(attrib, field)) +@dataclass_transform(field_specifiers=(attrib, field)) def define( maybe_cls: None = ..., *, @@ -434,9 +421,7 @@ def define( mutable = define @overload -@__dataclass_transform__( - frozen_default=True, field_descriptors=(attrib, field) -) +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) def frozen( maybe_cls: _C, *, @@ -462,9 +447,7 @@ def frozen( match_args: bool = ..., ) -> _C: ... @overload -@__dataclass_transform__( - frozen_default=True, field_descriptors=(attrib, field) -) +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) def frozen( maybe_cls: None = ..., *, diff --git a/lib/attr/_cmp.py b/lib/attr/_cmp.py index d9cbe22c..a4a35e08 100644 --- a/lib/attr/_cmp.py +++ b/lib/attr/_cmp.py @@ -92,10 +92,8 @@ def cmp_using( if not has_eq_function: # functools.total_ordering requires __eq__ to be defined, # so raise early error here to keep a nice stack. - raise ValueError( - "eq must be define is order to complete ordering from " - "lt, le, gt, ge." - ) + msg = "eq must be define is order to complete ordering from lt, le, gt, ge." + raise ValueError(msg) type_ = functools.total_ordering(type_) return type_ @@ -142,10 +140,7 @@ def _is_comparable_to(self, other): """ Check whether `other` is comparable to `self`. """ - for func in self._requirements: - if not func(self, other): - return False - return True + return all(func(self, other) for func in self._requirements) def _check_same_type(self, other): diff --git a/lib/attr/_compat.py b/lib/attr/_compat.py index c3bf5e33..41fcf046 100644 --- a/lib/attr/_compat.py +++ b/lib/attr/_compat.py @@ -1,6 +1,5 @@ # SPDX-License-Identifier: MIT - import inspect import platform import sys @@ -8,7 +7,7 @@ import threading import types import warnings -from collections.abc import Mapping, Sequence # noqa +from collections.abc import Mapping, Sequence # noqa: F401 from typing import _GenericAlias @@ -18,6 +17,15 @@ PY310 = sys.version_info[:2] >= (3, 10) PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) +if sys.version_info < (3, 8): + try: + from typing_extensions import Protocol + except ImportError: # pragma: no cover + Protocol = object +else: + from typing import Protocol # noqa: F401 + + def just_warn(*args, **kw): warnings.warn( "Running interpreter doesn't sufficiently support code object " @@ -155,7 +163,7 @@ def make_set_closure_cell(): if cell.cell_contents != 100: raise AssertionError # pragma: no cover - except Exception: + except Exception: # noqa: BLE001 return just_warn else: return set_closure_cell diff --git a/lib/attr/_config.py b/lib/attr/_config.py index 96d42007..9c245b14 100644 --- a/lib/attr/_config.py +++ b/lib/attr/_config.py @@ -1,6 +1,5 @@ # SPDX-License-Identifier: MIT - __all__ = ["set_run_validators", "get_run_validators"] _run_validators = True @@ -15,7 +14,8 @@ def set_run_validators(run): instead. """ if not isinstance(run, bool): - raise TypeError("'run' must be bool.") + msg = "'run' must be bool." + raise TypeError(msg) global _run_validators _run_validators = run diff --git a/lib/attr/_funcs.py b/lib/attr/_funcs.py index 7f5d9610..a888991d 100644 --- a/lib/attr/_funcs.py +++ b/lib/attr/_funcs.py @@ -72,19 +72,25 @@ def asdict( ) elif isinstance(v, (tuple, list, set, frozenset)): cf = v.__class__ if retain_collection_types is True else list - rv[a.name] = cf( - [ - _asdict_anything( - i, - is_key=False, - filter=filter, - dict_factory=dict_factory, - retain_collection_types=retain_collection_types, - value_serializer=value_serializer, - ) - for i in v - ] - ) + items = [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + try: + rv[a.name] = cf(items) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv[a.name] = cf(*items) elif isinstance(v, dict): df = dict_factory rv[a.name] = df( @@ -241,22 +247,26 @@ def astuple( ) elif isinstance(v, (tuple, list, set, frozenset)): cf = v.__class__ if retain is True else list - rv.append( - cf( - [ - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - for j in v - ] + items = [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, ) - ) + if has(j.__class__) + else j + for j in v + ] + try: + rv.append(cf(items)) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv.append(cf(*items)) elif isinstance(v, dict): df = v.__class__ if retain is True else dict rv.append( @@ -344,9 +354,8 @@ def assoc(inst, **changes): for k, v in changes.items(): a = getattr(attrs, k, NOTHING) if a is NOTHING: - raise AttrsAttributeNotFoundError( - f"{k} is not an attrs attribute on {new.__class__}." - ) + msg = f"{k} is not an attrs attribute on {new.__class__}." + raise AttrsAttributeNotFoundError(msg) _obj_setattr(new, k, v) return new @@ -379,17 +388,14 @@ def evolve(*args, **changes): try: (inst,) = args except ValueError: - raise TypeError( - f"evolve() takes 1 positional argument, but {len(args)} " - "were given" - ) from None + msg = f"evolve() takes 1 positional argument, but {len(args)} were given" + raise TypeError(msg) from None else: try: inst = changes.pop("inst") except KeyError: - raise TypeError( - "evolve() missing 1 required positional argument: 'inst'" - ) from None + msg = "evolve() missing 1 required positional argument: 'inst'" + raise TypeError(msg) from None import warnings diff --git a/lib/attr/_make.py b/lib/attr/_make.py index d72f738e..fd106367 100644 --- a/lib/attr/_make.py +++ b/lib/attr/_make.py @@ -1,7 +1,9 @@ # SPDX-License-Identifier: MIT +import contextlib import copy import enum +import inspect import linecache import sys import types @@ -87,7 +89,7 @@ class _CacheHashWrapper(int): See GH #613 for more details. """ - def __reduce__(self, _none_constructor=type(None), _args=()): + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 return _none_constructor, _args @@ -248,18 +250,18 @@ def attrib( ) if hash is not None and hash is not True and hash is not False: - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) if factory is not None: if default is not NOTHING: - raise ValueError( - "The `default` and `factory` arguments are mutually " - "exclusive." + msg = ( + "The `default` and `factory` arguments are mutually exclusive." ) + raise ValueError(msg) if not callable(factory): - raise ValueError("The `factory` argument must be a callable.") + msg = "The `factory` argument must be a callable." + raise ValueError(msg) default = Factory(factory) if metadata is None: @@ -323,9 +325,9 @@ def _make_method(name, script, filename, globs): old_val = linecache.cache.setdefault(filename, linecache_tuple) if old_val == linecache_tuple: break - else: - filename = f"{base_filename[:-1]}-{count}>" - count += 1 + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 _compile_and_eval(script, globs, locs, filename) @@ -430,7 +432,7 @@ def _collect_base_attrs(cls, taken_attr_names): if a.inherited or a.name in taken_attr_names: continue - a = a.evolve(inherited=True) + a = a.evolve(inherited=True) # noqa: PLW2901 base_attrs.append(a) base_attr_map[a.name] = base_cls @@ -468,7 +470,7 @@ def _collect_base_attrs_broken(cls, taken_attr_names): if a.name in taken_attr_names: continue - a = a.evolve(inherited=True) + a = a.evolve(inherited=True) # noqa: PLW2901 taken_attr_names.add(a.name) base_attrs.append(a) base_attr_map[a.name] = base_cls @@ -493,7 +495,7 @@ def _transform_attrs( anns = _get_annotations(cls) if these is not None: - ca_list = [(name, ca) for name, ca in these.items()] + ca_list = list(these.items()) elif auto_attribs is True: ca_names = { name @@ -509,10 +511,7 @@ def _transform_attrs( a = cd.get(attr_name, NOTHING) if not isinstance(a, _CountingAttr): - if a is NOTHING: - a = attrib() - else: - a = attrib(default=a) + a = attrib() if a is NOTHING else attrib(default=a) ca_list.append((attr_name, a)) unannotated = ca_names - annot_names @@ -563,10 +562,8 @@ def _transform_attrs( had_default = False for a in (a for a in attrs if a.init is not False and a.kw_only is False): if had_default is True and a.default is NOTHING: - raise ValueError( - "No mandatory attributes allowed after an attribute with a " - f"default value or factory. Attribute in question: {a!r}" - ) + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) if had_default is False and a.default is not NOTHING: had_default = True @@ -628,6 +625,7 @@ class _ClassBuilder: "_delete_attribs", "_frozen", "_has_pre_init", + "_pre_init_has_args", "_has_post_init", "_is_exc", "_on_setattr", @@ -674,6 +672,13 @@ class _ClassBuilder: self._weakref_slot = weakref_slot self._cache_hash = cache_hash self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) self._delete_attribs = not bool(these) self._is_exc = is_exc @@ -768,13 +773,11 @@ class _ClassBuilder: name not in base_names and getattr(cls, name, _sentinel) is not _sentinel ): - try: + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): delattr(cls, name) - except AttributeError: - # This can happen if a base class defines a class - # variable and we want to set an attribute with the - # same name by using only a type annotation. - pass # Attach our dunder methods. for name, value in self._cls_dict.items(): @@ -799,7 +802,7 @@ class _ClassBuilder: cd = { k: v for k, v in self._cls_dict.items() - if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") } # If our class doesn't have its own implementation of __setattr__ @@ -821,7 +824,7 @@ class _ClassBuilder: # Traverse the MRO to collect existing slots # and check for an existing __weakref__. - existing_slots = dict() + existing_slots = {} weakref_inherited = False for base_cls in self._cls.__mro__[1:-1]: if base_cls.__dict__.get("__weakref__", None) is not None: @@ -890,7 +893,8 @@ class _ClassBuilder: for cell in closure_cells: try: match = cell.cell_contents is self._cls - except ValueError: # ValueError: Cell is empty + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty pass else: if match: @@ -907,9 +911,8 @@ class _ClassBuilder: def add_str(self): repr = self._cls_dict.get("__repr__") if repr is None: - raise ValueError( - "__str__ can only be generated if a __repr__ exists." - ) + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) def __str__(self): return self.__repr__() @@ -980,6 +983,7 @@ class _ClassBuilder: self._cls, self._attrs, self._has_pre_init, + self._pre_init_has_args, self._has_post_init, self._frozen, self._slots, @@ -1006,6 +1010,7 @@ class _ClassBuilder: self._cls, self._attrs, self._has_pre_init, + self._pre_init_has_args, self._has_post_init, self._frozen, self._slots, @@ -1054,9 +1059,8 @@ class _ClassBuilder: if self._has_custom_setattr: # We need to write a __setattr__ but there already is one! - raise ValueError( - "Can't combine custom __setattr__ with on_setattr hooks." - ) + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) # docstring comes from _add_method_dunders def __setattr__(self, name, val): @@ -1079,25 +1083,17 @@ class _ClassBuilder: """ Add __module__ and __qualname__ to a *method* if possible. """ - try: + with contextlib.suppress(AttributeError): method.__module__ = self._cls.__module__ - except AttributeError: - pass - try: - method.__qualname__ = ".".join( - (self._cls.__qualname__, method.__name__) - ) - except AttributeError: - pass + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" - try: + with contextlib.suppress(AttributeError): method.__doc__ = ( "Method generated by attrs for class " f"{self._cls.__qualname__}." ) - except AttributeError: - pass return method @@ -1108,7 +1104,8 @@ def _determine_attrs_eq_order(cmp, eq, order, default_eq): values of eq and order. If *eq* is None, set it to *default_eq*. """ if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) # cmp takes precedence due to bw-compatibility. if cmp is not None: @@ -1123,7 +1120,8 @@ def _determine_attrs_eq_order(cmp, eq, order, default_eq): order = eq if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) return eq, order @@ -1134,7 +1132,8 @@ def _determine_attrib_eq_order(cmp, eq, order, default_eq): values of eq and order. If *eq* is None, set it to *default_eq*. """ if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) def decide_callable_or_boolean(value): """ @@ -1164,7 +1163,8 @@ def _determine_attrib_eq_order(cmp, eq, order, default_eq): order, order_key = decide_callable_or_boolean(order) if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) return eq, eq_key, order, order_key @@ -1494,7 +1494,8 @@ def attrs( ) if has_own_setattr and is_frozen: - raise ValueError("Can't freeze a class with a custom __setattr__.") + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) builder = _ClassBuilder( cls, @@ -1547,18 +1548,15 @@ def attrs( if hash is not True and hash is not False and hash is not None: # Can't use `hash in` because 1 == True for example. - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - elif hash is False or (hash is None and eq is False) or is_exc: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if hash is False or (hash is None and eq is False) or is_exc: # Don't do anything. Should fall back to __object__'s __hash__ # which is by id. if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) elif hash is True or ( hash is None and eq is True and is_frozen is True ): @@ -1567,11 +1565,8 @@ def attrs( else: # Raise TypeError on attempts to hash. if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) builder.make_unhashable() if _determine_whether_to_implement( @@ -1581,10 +1576,8 @@ def attrs( else: builder.add_attrs_init() if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " init must be True." - ) + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) if ( PY310 @@ -1599,8 +1592,8 @@ def attrs( # if it's used as `@attrs` but ``None`` if used as `@attrs()`. if maybe_cls is None: return wrap - else: - return wrap(maybe_cls) + + return wrap(maybe_cls) _attrs = attrs @@ -1648,10 +1641,7 @@ def _make_hash(cls, attrs, frozen, cache_hash): else: hash_def += ", *" - hash_def += ( - ", _cache_wrapper=" - + "__import__('attr._make')._make._CacheHashWrapper):" - ) + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" hash_func = "_cache_wrapper(" + hash_func closing_braces += ")" @@ -1760,7 +1750,7 @@ def _make_eq(cls, attrs): lines.append(f" self.{a.name},") others.append(f" other.{a.name},") - lines += others + [" )"] + lines += [*others, " )"] else: lines.append(" return True") @@ -1928,7 +1918,8 @@ def fields(cls): generic_base = get_generic_base(cls) if generic_base is None and not isinstance(cls, type): - raise TypeError("Passed object must be a class.") + msg = "Passed object must be a class." + raise TypeError(msg) attrs = getattr(cls, "__attrs_attrs__", None) @@ -1941,7 +1932,8 @@ def fields(cls): # efficient. cls.__attrs_attrs__ = attrs return attrs - raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) return attrs @@ -1962,10 +1954,12 @@ def fields_dict(cls): .. versionadded:: 18.1.0 """ if not isinstance(cls, type): - raise TypeError("Passed object must be a class.") + msg = "Passed object must be a class." + raise TypeError(msg) attrs = getattr(cls, "__attrs_attrs__", None) if attrs is None: - raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) return {a.name: a for a in attrs} @@ -2001,6 +1995,7 @@ def _make_init( cls, attrs, pre_init, + pre_init_has_args, post_init, frozen, slots, @@ -2015,7 +2010,8 @@ def _make_init( ) if frozen and has_cls_on_setattr: - raise ValueError("Frozen classes can't use on_setattr.") + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) needs_cached_setattr = cache_hash or frozen filtered_attrs = [] @@ -2029,7 +2025,8 @@ def _make_init( if a.on_setattr is not None: if frozen is True: - raise ValueError("Frozen classes can't use on_setattr.") + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) needs_cached_setattr = True elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: @@ -2042,6 +2039,7 @@ def _make_init( frozen, slots, pre_init, + pre_init_has_args, post_init, cache_hash, base_attr_map, @@ -2122,6 +2120,7 @@ def _attrs_to_init_script( frozen, slots, pre_init, + pre_init_has_args, post_init, cache_hash, base_attr_map, @@ -2208,10 +2207,7 @@ def _attrs_to_init_script( arg_name = a.alias has_factory = isinstance(a.default, Factory) - if has_factory and a.default.takes_self: - maybe_self = "self" - else: - maybe_self = "" + maybe_self = "self" if has_factory and a.default.takes_self else "" if a.init is False: if has_factory: @@ -2235,25 +2231,24 @@ def _attrs_to_init_script( ) ) names_for_globals[init_factory_name] = a.default.factory + elif a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter else: - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - ) - ) - conv_name = _init_converter_pat % (a.name,) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - f"attr_dict['{attr_name}'].default", - has_on_setattr, - ) + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, ) + ) elif a.default is not NOTHING and not has_factory: arg = f"{arg_name}=attr_dict['{attr_name}'].default" if a.kw_only: @@ -2362,7 +2357,7 @@ def _attrs_to_init_script( # hash code would result in silent bugs. if cache_hash: if frozen: - if slots: + if slots: # noqa: SIM108 # if frozen and slots, then _setattr defined above init_hash_cache = "_setattr('%s', %s)" else: @@ -2380,11 +2375,23 @@ def _attrs_to_init_script( lines.append(f"BaseException.__init__(self, {vals})") args = ", ".join(args) + pre_init_args = args if kw_only_args: args += "%s*, %s" % ( ", " if args else "", # leading comma ", ".join(kw_only_args), # kw_only args ) + pre_init_kw_only_args = ", ".join( + ["%s=%s" % (kw_arg, kw_arg) for kw_arg in kw_only_args] + ) + pre_init_args += ( + ", " if pre_init_args else "" + ) # handle only kwargs and no regular args + pre_init_args += pre_init_kw_only_args + + if pre_init and pre_init_has_args: + # If pre init method has arguments, pass same arguments as `__init__` + lines[0] = "self.__attrs_pre_init__(%s)" % pre_init_args return ( "def %s(self, %s):\n %s\n" @@ -2537,9 +2544,8 @@ class Attribute: if type is None: type = ca.type elif ca.type is not None: - raise ValueError( - "Type annotation and type argument cannot both be present" - ) + msg = "Type annotation and type argument cannot both be present" + raise ValueError(msg) inst_dict = { k: getattr(ca, k) for k in Attribute.__slots__ @@ -2663,36 +2669,37 @@ class _CountingAttr: "on_setattr", "alias", ) - __attrs_attrs__ = tuple( - Attribute( - name=name, - alias=_default_init_alias_for(name), - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - eq_key=None, - order=False, - order_key=None, - inherited=False, - on_setattr=None, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "on_setattr", - "alias", - ) - ) + ( + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), Attribute( name="metadata", alias="metadata", @@ -2868,7 +2875,8 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): elif isinstance(attrs, (list, tuple)): cls_dict = {a: attrib() for a in attrs} else: - raise TypeError("attrs argument must be a dict or a list.") + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) pre_init = cls_dict.pop("__attrs_pre_init__", None) post_init = cls_dict.pop("__attrs_post_init__", None) @@ -2888,12 +2896,10 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): # frame where the class is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). - try: + with contextlib.suppress(AttributeError, ValueError): type_.__module__ = sys._getframe(1).f_globals.get( "__name__", "__main__" ) - except (AttributeError, ValueError): - pass # We do it here for proper warnings with meaningful stacklevel. cmp = attributes_arguments.pop("cmp", None) diff --git a/lib/attr/_next_gen.py b/lib/attr/_next_gen.py index 7c4d5db0..8b44e8b3 100644 --- a/lib/attr/_next_gen.py +++ b/lib/attr/_next_gen.py @@ -59,7 +59,7 @@ def define( .. caution:: Usually this has only upsides and few visible effects in everyday - programming. But it *can* lead to some suprising behaviors, so please + programming. But it *can* lead to some surprising behaviors, so please make sure to read :term:`slotted classes`. - *auto_exc=True* - *auto_detect=True* @@ -131,10 +131,8 @@ def define( for base_cls in cls.__bases__: if base_cls.__setattr__ is _frozen_setattrs: if had_on_setattr: - raise ValueError( - "Frozen classes can't use on_setattr " - "(frozen-ness was inherited)." - ) + msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." + raise ValueError(msg) on_setattr = setters.NO_OP break @@ -151,8 +149,8 @@ def define( # if it's used as `@attrs` but ``None`` if used as `@attrs()`. if maybe_cls is None: return wrap - else: - return wrap(maybe_cls) + + return wrap(maybe_cls) mutable = define @@ -180,10 +178,9 @@ def field( Identical to `attr.ib`, except keyword-only and with some arguments removed. - .. versionadded:: 22.3.0 - The *type* parameter has been re-added; mostly for - {func}`attrs.make_class`. Please note that type checkers ignore this - metadata. + .. versionadded:: 23.1.0 + The *type* parameter has been re-added; mostly for `attrs.make_class`. + Please note that type checkers ignore this metadata. .. versionadded:: 20.1.0 """ return attrib( diff --git a/lib/attr/converters.py b/lib/attr/converters.py index 4cada106..2bf4c902 100644 --- a/lib/attr/converters.py +++ b/lib/attr/converters.py @@ -70,21 +70,20 @@ def default_if_none(default=NOTHING, factory=None): .. versionadded:: 18.2.0 """ if default is NOTHING and factory is None: - raise TypeError("Must pass either `default` or `factory`.") + msg = "Must pass either `default` or `factory`." + raise TypeError(msg) if default is not NOTHING and factory is not None: - raise TypeError( - "Must pass either `default` or `factory` but not both." - ) + msg = "Must pass either `default` or `factory` but not both." + raise TypeError(msg) if factory is not None: default = Factory(factory) if isinstance(default, Factory): if default.takes_self: - raise ValueError( - "`takes_self` is not supported by default_if_none." - ) + msg = "`takes_self` is not supported by default_if_none." + raise ValueError(msg) def default_if_none_converter(val): if val is not None: @@ -141,4 +140,5 @@ def to_bool(val): except TypeError: # Raised when "val" is not hashable (e.g., lists) pass - raise ValueError(f"Cannot convert value to bool: {val}") + msg = f"Cannot convert value to bool: {val}" + raise ValueError(msg) diff --git a/lib/attr/exceptions.py b/lib/attr/exceptions.py index 28834930..3b7abb81 100644 --- a/lib/attr/exceptions.py +++ b/lib/attr/exceptions.py @@ -1,5 +1,9 @@ # SPDX-License-Identifier: MIT +from __future__ import annotations + +from typing import ClassVar + class FrozenError(AttributeError): """ @@ -13,7 +17,7 @@ class FrozenError(AttributeError): """ msg = "can't set attribute" - args = [msg] + args: ClassVar[tuple[str]] = [msg] class FrozenInstanceError(FrozenError): diff --git a/lib/attr/filters.py b/lib/attr/filters.py index baa25e94..a1e40c98 100644 --- a/lib/attr/filters.py +++ b/lib/attr/filters.py @@ -13,6 +13,7 @@ def _split_what(what): """ return ( frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), frozenset(cls for cls in what if isinstance(cls, Attribute)), ) @@ -22,14 +23,21 @@ def include(*what): Include *what*. :param what: What to include. - :type what: `list` of `type` or `attrs.Attribute`\\ s + :type what: `list` of classes `type`, field names `str` or + `attrs.Attribute`\\ s :rtype: `callable` + + .. versionchanged:: 23.1.0 Accept strings with field names. """ - cls, attrs = _split_what(what) + cls, names, attrs = _split_what(what) def include_(attribute, value): - return value.__class__ in cls or attribute in attrs + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) return include_ @@ -39,13 +47,20 @@ def exclude(*what): Exclude *what*. :param what: What to exclude. - :type what: `list` of classes or `attrs.Attribute`\\ s. + :type what: `list` of classes `type`, field names `str` or + `attrs.Attribute`\\ s. :rtype: `callable` + + .. versionchanged:: 23.3.0 Accept field name string as input argument """ - cls, attrs = _split_what(what) + cls, names, attrs = _split_what(what) def exclude_(attribute, value): - return value.__class__ not in cls and attribute not in attrs + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) return exclude_ diff --git a/lib/attr/filters.pyi b/lib/attr/filters.pyi index 99386686..8a02fa0f 100644 --- a/lib/attr/filters.pyi +++ b/lib/attr/filters.pyi @@ -2,5 +2,5 @@ from typing import Any, Union from . import Attribute, _FilterType -def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... -def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def include(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/lib/attr/validators.py b/lib/attr/validators.py index 1488554f..18617fe6 100644 --- a/lib/attr/validators.py +++ b/lib/attr/validators.py @@ -97,23 +97,21 @@ class _InstanceOfValidator: We use a callable class to be able to change the ``__repr__``. """ if not isinstance(value, self.type): + msg = "'{name}' must be {type!r} (got {value!r} that is a {actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ) raise TypeError( - "'{name}' must be {type!r} (got {value!r} that is a " - "{actual!r}).".format( - name=attr.name, - type=self.type, - actual=value.__class__, - value=value, - ), + msg, attr, self.type, value, ) def __repr__(self): - return "".format( - type=self.type - ) + return f"" def instance_of(type): @@ -142,20 +140,18 @@ class _MatchesReValidator: We use a callable class to be able to change the ``__repr__``. """ if not self.match_func(value): + msg = "'{name}' must match regex {pattern!r} ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ) raise ValueError( - "'{name}' must match regex {pattern!r}" - " ({value!r} doesn't)".format( - name=attr.name, pattern=self.pattern.pattern, value=value - ), + msg, attr, self.pattern, value, ) def __repr__(self): - return "".format( - pattern=self.pattern - ) + return f"" def matches_re(regex, flags=0, func=None): @@ -176,22 +172,17 @@ def matches_re(regex, flags=0, func=None): """ valid_funcs = (re.fullmatch, None, re.search, re.match) if func not in valid_funcs: - raise ValueError( - "'func' must be one of {}.".format( - ", ".join( - sorted( - e and e.__name__ or "None" for e in set(valid_funcs) - ) - ) + msg = "'func' must be one of {}.".format( + ", ".join( + sorted(e and e.__name__ or "None" for e in set(valid_funcs)) ) ) + raise ValueError(msg) if isinstance(regex, Pattern): if flags: - raise TypeError( - "'flags' can only be used with a string pattern; " - "pass flags to re.compile() instead" - ) + msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" + raise TypeError(msg) pattern = regex else: pattern = re.compile(regex, flags) @@ -215,20 +206,18 @@ class _ProvidesValidator: We use a callable class to be able to change the ``__repr__``. """ if not self.interface.providedBy(value): + msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format( + name=attr.name, interface=self.interface, value=value + ) raise TypeError( - "'{name}' must provide {interface!r} which {value!r} " - "doesn't.".format( - name=attr.name, interface=self.interface, value=value - ), + msg, attr, self.interface, value, ) def __repr__(self): - return "".format( - interface=self.interface - ) + return f"" def provides(interface): @@ -269,9 +258,7 @@ class _OptionalValidator: self.validator(inst, attr, value) def __repr__(self): - return "".format( - what=repr(self.validator) - ) + return f"" def optional(validator): @@ -304,19 +291,16 @@ class _InValidator: in_options = False if not in_options: + msg = f"'{attr.name}' must be in {self.options!r} (got {value!r})" raise ValueError( - "'{name}' must be in {options!r} (got {value!r})".format( - name=attr.name, options=self.options, value=value - ), + msg, attr, self.options, value, ) def __repr__(self): - return "".format( - options=self.options - ) + return f"" def in_(options): @@ -402,11 +386,8 @@ class _DeepIterable: else f" {self.iterable_validator!r}" ) return ( - "" - ).format( - iterable_identifier=iterable_identifier, - member=self.member_validator, + f"" ) @@ -477,19 +458,11 @@ class _NumberValidator: We use a callable class to be able to change the ``__repr__``. """ if not self.compare_func(value, self.bound): - raise ValueError( - "'{name}' must be {op} {bound}: {value}".format( - name=attr.name, - op=self.compare_op, - bound=self.bound, - value=value, - ) - ) + msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" + raise ValueError(msg) def __repr__(self): - return "".format( - op=self.compare_op, bound=self.bound - ) + return f"" def lt(val): @@ -549,11 +522,8 @@ class _MaxLengthValidator: We use a callable class to be able to change the ``__repr__``. """ if len(value) > self.max_length: - raise ValueError( - "Length of '{name}' must be <= {max}: {len}".format( - name=attr.name, max=self.max_length, len=len(value) - ) - ) + msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" + raise ValueError(msg) def __repr__(self): return f"" @@ -580,11 +550,8 @@ class _MinLengthValidator: We use a callable class to be able to change the ``__repr__``. """ if len(value) < self.min_length: - raise ValueError( - "Length of '{name}' must be => {min}: {len}".format( - name=attr.name, min=self.min_length, len=len(value) - ) - ) + msg = f"Length of '{attr.name}' must be => {self.min_length}: {len(value)}" + raise ValueError(msg) def __repr__(self): return f"" @@ -611,22 +578,16 @@ class _SubclassOfValidator: We use a callable class to be able to change the ``__repr__``. """ if not issubclass(value, self.type): + msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." raise TypeError( - "'{name}' must be a subclass of {type!r} " - "(got {value!r}).".format( - name=attr.name, - type=self.type, - value=value, - ), + msg, attr, self.type, value, ) def __repr__(self): - return "".format( - type=self.type - ) + return f"" def _subclass_of(type): @@ -680,7 +641,7 @@ class _NotValidator: def __repr__(self): return ( - "" + "" ).format( what=self.validator, exc_types=self.exc_types, diff --git a/lib/bs4/builder/_htmlparser.py b/lib/bs4/builder/_htmlparser.py index 4c5ced93..59911043 100644 --- a/lib/bs4/builder/_htmlparser.py +++ b/lib/bs4/builder/_htmlparser.py @@ -378,10 +378,10 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder): parser.soup = self.soup try: parser.feed(markup) + parser.close() except AssertionError as e: # html.parser raises AssertionError in rare cases to # indicate a fatal problem with the markup, especially # when there's an error in the doctype declaration. raise ParserRejectedMarkup(e) - parser.close() parser.already_closed_empty_element = [] diff --git a/lib/bs4/element.py b/lib/bs4/element.py index 99fc8137..38ca2dc2 100644 --- a/lib/bs4/element.py +++ b/lib/bs4/element.py @@ -1356,7 +1356,7 @@ class Tag(PageElement): This is the first step in the deepcopy process. """ clone = type(self)( - None, self.builder, self.name, self.namespace, + None, None, self.name, self.namespace, self.prefix, self.attrs, is_xml=self._is_xml, sourceline=self.sourceline, sourcepos=self.sourcepos, can_be_empty_element=self.can_be_empty_element, @@ -1845,6 +1845,11 @@ class Tag(PageElement): return space_before + s + space_after def _format_tag(self, eventual_encoding, formatter, opening): + if self.hidden: + # A hidden tag is invisible, although its contents + # are visible. + return '' + # A tag starts with the < character (see below). # Then the / character, if this is a closing tag. diff --git a/lib/dateutil/parser/isoparser.py b/lib/dateutil/parser/isoparser.py index 9b42b0e2..727890d8 100644 --- a/lib/dateutil/parser/isoparser.py +++ b/lib/dateutil/parser/isoparser.py @@ -72,7 +72,7 @@ class isoparser(object): Common: - ``YYYY`` - - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM`` - ``YYYY-MM-DD`` or ``YYYYMMDD`` Uncommon: diff --git a/lib/dateutil/relativedelta.py b/lib/dateutil/relativedelta.py index fa7f649c..3d9c4fdf 100644 --- a/lib/dateutil/relativedelta.py +++ b/lib/dateutil/relativedelta.py @@ -48,7 +48,7 @@ class relativedelta(object): the corresponding arithmetic operation on the original datetime value with the information in the relativedelta. - weekday: + weekday: One of the weekday instances (MO, TU, etc) available in the relativedelta module. These instances may receive a parameter N, specifying the Nth weekday, which could be positive or negative diff --git a/lib/dateutil/rrule.py b/lib/dateutil/rrule.py index adbb118e..a134dcd5 100644 --- a/lib/dateutil/rrule.py +++ b/lib/dateutil/rrule.py @@ -182,7 +182,7 @@ class rrulebase(object): # __len__() introduces a large performance penalty. def count(self): """ Returns the number of recurrences in this set. It will have go - trough the whole recurrence, if this hasn't been done before. """ + through the whole recurrence, if this hasn't been done before. """ if self._len is None: for x in self: pass diff --git a/lib/dateutil/tz/tz.py b/lib/dateutil/tz/tz.py index 8d2ad778..bb45251b 100644 --- a/lib/dateutil/tz/tz.py +++ b/lib/dateutil/tz/tz.py @@ -34,7 +34,7 @@ except ImportError: from warnings import warn ZERO = datetime.timedelta(0) -EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCH = datetime.datetime(1970, 1, 1, 0, 0) EPOCHORDINAL = EPOCH.toordinal() diff --git a/lib/dateutil/zoneinfo/__init__.py b/lib/dateutil/zoneinfo/__init__.py index 27cc9df4..5628bfc3 100644 --- a/lib/dateutil/zoneinfo/__init__.py +++ b/lib/dateutil/zoneinfo/__init__.py @@ -9,6 +9,8 @@ from io import BytesIO from dateutil.tz import tzfile as _tzfile +# noinspection PyPep8Naming +import encodingKludge as ek import sickgear __all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] @@ -25,10 +27,10 @@ class tzfile(_tzfile): def getzoneinfofile_stream(): try: # return BytesIO(get_data(__name__, ZONEFILENAME)) - zonefile = os.path.join(sickgear.ZONEINFO_DIR, ZONEFILENAME) - if not os.path.isfile(zonefile): + zonefile = ek.ek(os.path.join, sickgear.ZONEINFO_DIR, ZONEFILENAME) + if not ek.ek(os.path.isfile, zonefile): warnings.warn('Falling back to included zoneinfo file') - zonefile = os.path.join(os.path.dirname(__file__), ZONEFILENAME) + zonefile = ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), ZONEFILENAME) with open(zonefile, 'rb') as f: return BytesIO(f.read()) except IOError as e: # TODO switch to FileNotFoundError? diff --git a/lib/diskcache/__init__.py b/lib/diskcache/__init__.py index 1931a0dd..7757d660 100644 --- a/lib/diskcache/__init__.py +++ b/lib/diskcache/__init__.py @@ -61,8 +61,8 @@ except Exception: # pylint: disable=broad-except # pragma: no cover pass __title__ = 'diskcache' -__version__ = '5.6.1' -__build__ = 0x050601 +__version__ = '5.6.3' +__build__ = 0x050603 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2023 Grant Jenks' diff --git a/lib/diskcache/core.py b/lib/diskcache/core.py index 85f2e72e..46f5532b 100644 --- a/lib/diskcache/core.py +++ b/lib/diskcache/core.py @@ -1705,9 +1705,6 @@ class Cache: except IOError: # Key was deleted before we could retrieve result. continue - finally: - if name is not None: - self._disk.remove(name) break if expire_time and tag: diff --git a/lib/hachoir/__init__.py b/lib/hachoir/__init__.py index d9f41b78..4e2da0af 100644 --- a/lib/hachoir/__init__.py +++ b/lib/hachoir/__init__.py @@ -1,2 +1,2 @@ -VERSION = (3, 1, 2) +VERSION = (3, 2, 0) __version__ = ".".join(map(str, VERSION)) diff --git a/lib/hachoir/core/bits.py b/lib/hachoir/core/bits.py index 3de3e202..a031b42a 100644 --- a/lib/hachoir/core/bits.py +++ b/lib/hachoir/core/bits.py @@ -4,7 +4,7 @@ string, number, hexadecimal, etc. """ from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN -from struct import calcsize, unpack, error as struct_error +from struct import calcsize, error as struct_error def swap16(value): @@ -292,20 +292,11 @@ def str2long(data, endian): >>> str2long(b"\x0b\x0a\x0d\x0c", MIDDLE_ENDIAN) == 0x0a0b0c0d True """ - assert 1 <= len(data) <= 32 # arbitrary limit: 256 bits - try: - return unpack(_struct_format[endian][len(data)], data)[0] - except KeyError: - pass - - assert endian in (BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN) - shift = 0 - value = 0 - if endian is BIG_ENDIAN: - data = reversed(data) - elif endian is MIDDLE_ENDIAN: - data = reversed(strswapmid(data)) - for byte in data: - value += (byte << shift) - shift += 8 - return value + if endian == LITTLE_ENDIAN: + return int.from_bytes(data, "little") + elif endian == BIG_ENDIAN: + return int.from_bytes(data, "big") + elif endian == MIDDLE_ENDIAN: + return int.from_bytes(strswapmid(data), "big") + else: + raise ValueError("Invalid endian %s" % (endian,)) diff --git a/lib/hachoir/core/dict.py b/lib/hachoir/core/dict.py index 053bd4b2..c55e3f4c 100644 --- a/lib/hachoir/core/dict.py +++ b/lib/hachoir/core/dict.py @@ -168,7 +168,7 @@ class Dict(object): _index = index if index < 0: index += len(self._value_list) - if not(0 <= index <= len(self._value_list)): + if not (0 <= index <= len(self._value_list)): raise IndexError("Insert error: index '%s' is invalid" % _index) for item_key, item_index in self._index.items(): if item_index >= index: diff --git a/lib/hachoir/core/tools.py b/lib/hachoir/core/tools.py index 7655c0e8..43575f22 100644 --- a/lib/hachoir/core/tools.py +++ b/lib/hachoir/core/tools.py @@ -493,7 +493,7 @@ def timestampUNIX(value): """ if not isinstance(value, (float, int)): raise TypeError("timestampUNIX(): an integer or float is required") - if not(0 <= value <= 2147483647): + if not (0 <= value <= 2147483647): raise ValueError("timestampUNIX(): value have to be in 0..2147483647") return UNIX_TIMESTAMP_T0 + timedelta(seconds=value) @@ -514,7 +514,7 @@ def timestampMac32(value): """ if not isinstance(value, (float, int)): raise TypeError("an integer or float is required") - if not(0 <= value <= 4294967295): + if not (0 <= value <= 4294967295): return "invalid Mac timestamp (%s)" % value return MAC_TIMESTAMP_T0 + timedelta(seconds=value) diff --git a/lib/hachoir/field/__init__.py b/lib/hachoir/field/__init__.py index cb84a561..cbc2d937 100644 --- a/lib/hachoir/field/__init__.py +++ b/lib/hachoir/field/__init__.py @@ -4,27 +4,27 @@ from hachoir.field.bit_field import Bit, Bits, RawBits # noqa from hachoir.field.byte_field import Bytes, RawBytes # noqa from hachoir.field.sub_file import SubFile, CompressedField # noqa from hachoir.field.character import Character # noqa -from hachoir.field.integer import (Int8, Int16, Int24, Int32, Int64, # noqa - UInt8, UInt16, UInt24, UInt32, UInt64, - GenericInteger) +from hachoir.field.integer import (Int8, Int16, Int24, Int32, Int64, # noqa + UInt8, UInt16, UInt24, UInt32, UInt64, + GenericInteger) from hachoir.field.enum import Enum # noqa from hachoir.field.string_field import (GenericString, # noqa - String, CString, UnixLine, - PascalString8, PascalString16, - PascalString32) + String, CString, UnixLine, + PascalString8, PascalString16, + PascalString32) from hachoir.field.padding import (PaddingBits, PaddingBytes, # noqa - NullBits, NullBytes) + NullBits, NullBytes) # Functions from hachoir.field.helper import (isString, isInteger, # noqa - createPaddingField, createNullField, - createRawField, writeIntoFile, - createOrphanField) + createPaddingField, createNullField, + createRawField, writeIntoFile, + createOrphanField) # FieldSet classes from hachoir.field.fake_array import FakeArray # noqa from hachoir.field.basic_field_set import (BasicFieldSet, # noqa - ParserError, MatchError) + ParserError, MatchError) from hachoir.field.generic_field_set import GenericFieldSet # noqa from hachoir.field.seekable_field_set import SeekableFieldSet, RootSeekableFieldSet # noqa from hachoir.field.field_set import FieldSet # noqa diff --git a/lib/hachoir/field/byte_field.py b/lib/hachoir/field/byte_field.py index c372ad83..e0bdb083 100644 --- a/lib/hachoir/field/byte_field.py +++ b/lib/hachoir/field/byte_field.py @@ -20,7 +20,7 @@ class RawBytes(Field): def __init__(self, parent, name, length, description="Raw data"): assert issubclass(parent.__class__, Field) - if not(0 < length <= MAX_LENGTH): + if not (0 < length <= MAX_LENGTH): raise FieldError("Invalid RawBytes length (%s)!" % length) Field.__init__(self, parent, name, length * 8, description) self._display = None diff --git a/lib/hachoir/field/field.py b/lib/hachoir/field/field.py index fbdd4290..baa86800 100644 --- a/lib/hachoir/field/field.py +++ b/lib/hachoir/field/field.py @@ -41,7 +41,7 @@ class Field(Logger): None: field size is computed dynamically. int: field size, in bits. callable: function that receives the same arguments as the constructor, - without ``parent``. + without ``parent``. """ is_field_set = False @@ -232,7 +232,7 @@ class Field(Logger): Args: key (str): relative or absolute path for the desired field. const (bool): For field sets, whether to consume additional input to - find a matching field. + find a matching field. Returns: Field: The field matching the provided path. diff --git a/lib/hachoir/field/generic_field_set.py b/lib/hachoir/field/generic_field_set.py index cfc24fad..74d8898f 100644 --- a/lib/hachoir/field/generic_field_set.py +++ b/lib/hachoir/field/generic_field_set.py @@ -1,5 +1,5 @@ from hachoir.field import (MissingField, BasicFieldSet, Field, ParserError, - createRawField, createNullField, createPaddingField, FakeArray) + createRawField, createNullField, createPaddingField, FakeArray) from hachoir.core.dict import Dict, UniqKeyError from hachoir.core.tools import lowerBound, makeUnicode import hachoir.core.config as config @@ -117,7 +117,7 @@ class GenericFieldSet(BasicFieldSet): _getSize, doc="Size in bits, may create all fields to get size") def _getCurrentSize(self): - assert not(self.done) + assert not (self.done) return self._current_size current_size = property(_getCurrentSize) diff --git a/lib/hachoir/field/helper.py b/lib/hachoir/field/helper.py index 1654088f..4d7b7cad 100644 --- a/lib/hachoir/field/helper.py +++ b/lib/hachoir/field/helper.py @@ -1,8 +1,8 @@ from hachoir.field import (FieldError, - RawBits, RawBytes, - PaddingBits, PaddingBytes, - NullBits, NullBytes, - GenericString, GenericInteger) + RawBits, RawBytes, + PaddingBits, PaddingBytes, + NullBits, NullBytes, + GenericString, GenericInteger) from hachoir.stream import FileOutputStream diff --git a/lib/hachoir/field/padding.py b/lib/hachoir/field/padding.py index 80b082dc..4c7265c8 100644 --- a/lib/hachoir/field/padding.py +++ b/lib/hachoir/field/padding.py @@ -23,7 +23,7 @@ class PaddingBits(Bits): self._display_pattern = self.checkPattern() def checkPattern(self): - if not(config.check_padding_pattern): + if not (config.check_padding_pattern): return False if self.pattern != 0: return False @@ -72,7 +72,7 @@ class PaddingBytes(Bytes): self._display_pattern = self.checkPattern() def checkPattern(self): - if not(config.check_padding_pattern): + if not (config.check_padding_pattern): return False if self.pattern is None: return False diff --git a/lib/hachoir/field/string_field.py b/lib/hachoir/field/string_field.py index 41e47d28..742634d2 100644 --- a/lib/hachoir/field/string_field.py +++ b/lib/hachoir/field/string_field.py @@ -244,7 +244,7 @@ class GenericString(Bytes): and err.end == len(text) \ and self._charset == "UTF-16-LE": try: - text = str(text + "\0", self._charset, "strict") + text = str(text + b"\0", self._charset, "strict") self.warning( "Fix truncated %s string: add missing nul byte" % self._charset) return text diff --git a/lib/hachoir/field/timestamp.py b/lib/hachoir/field/timestamp.py index 8b4e6446..e45b8320 100644 --- a/lib/hachoir/field/timestamp.py +++ b/lib/hachoir/field/timestamp.py @@ -1,6 +1,6 @@ from hachoir.core.tools import (humanDatetime, humanDuration, - timestampUNIX, timestampMac32, timestampUUID60, - timestampWin64, durationWin64, durationMillisWin64) + timestampUNIX, timestampMac32, timestampUUID60, + timestampWin64, durationWin64, durationMillisWin64) from hachoir.field import Bits, FieldSet from datetime import datetime @@ -61,7 +61,7 @@ class TimeDateMSDOS32(FieldSet): def createValue(self): return datetime( - 1980 + self["year"].value, self["month"].value, self["day"].value, + 1980 + self["year"].value, self["month"].value or 1, self["day"].value or 1, self["hour"].value, self["minute"].value, 2 * self["second"].value) def createDisplay(self): diff --git a/lib/hachoir/field/vector.py b/lib/hachoir/field/vector.py index 8b5474e6..fabb70e1 100644 --- a/lib/hachoir/field/vector.py +++ b/lib/hachoir/field/vector.py @@ -7,7 +7,7 @@ class GenericVector(FieldSet): # Sanity checks assert issubclass(item_class, Field) assert isinstance(item_class.static_size, int) - if not(0 < nb_items): + if not (0 < nb_items): raise ParserError('Unable to create empty vector "%s" in %s' % (name, parent.path)) size = nb_items * item_class.static_size diff --git a/lib/hachoir/metadata/__init__.py b/lib/hachoir/metadata/__init__.py index e6cfa6d7..add12ac5 100644 --- a/lib/hachoir/metadata/__init__.py +++ b/lib/hachoir/metadata/__init__.py @@ -11,4 +11,4 @@ import hachoir.metadata.misc # noqa import hachoir.metadata.program # noqa import hachoir.metadata.riff # noqa import hachoir.metadata.video # noqa -import hachoir.metadata.cr2 # noqa +import hachoir.metadata.cr2 # noqa \ No newline at end of file diff --git a/lib/hachoir/metadata/archive.py b/lib/hachoir/metadata/archive.py index ac5ae4b5..4905f314 100644 --- a/lib/hachoir/metadata/archive.py +++ b/lib/hachoir/metadata/archive.py @@ -3,7 +3,7 @@ from hachoir.metadata.safe import fault_tolerant, getValue from hachoir.metadata.metadata import ( RootMetadata, Metadata, MultipleMetadata, registerExtractor) from hachoir.parser.archive import (Bzip2Parser, CabFile, GzipParser, - TarFile, ZipFile, MarFile) + TarFile, ZipFile, MarFile) from hachoir.core.tools import humanUnixAttributes diff --git a/lib/hachoir/metadata/audio.py b/lib/hachoir/metadata/audio.py index 15ce72f0..4bcedf8c 100644 --- a/lib/hachoir/metadata/audio.py +++ b/lib/hachoir/metadata/audio.py @@ -1,12 +1,12 @@ from hachoir.metadata.metadata import (registerExtractor, Metadata, - RootMetadata, MultipleMetadata) + RootMetadata, MultipleMetadata) from hachoir.parser.audio import (AuFile, MpegAudioFile, RealAudioFile, - AiffFile, FlacParser) + AiffFile, FlacParser) from hachoir.parser.container import OggFile, RealMediaFile from hachoir.core.tools import makePrintable, timedelta2seconds, humanBitRate from datetime import timedelta from hachoir.metadata.metadata_item import (QUALITY_FAST, QUALITY_NORMAL, - QUALITY_BEST) + QUALITY_BEST) from hachoir.metadata.safe import fault_tolerant, getValue diff --git a/lib/hachoir/metadata/image.py b/lib/hachoir/metadata/image.py index 1538b11d..17654b0e 100644 --- a/lib/hachoir/metadata/image.py +++ b/lib/hachoir/metadata/image.py @@ -1,5 +1,5 @@ from hachoir.metadata.metadata import (registerExtractor, Metadata, - RootMetadata, MultipleMetadata) + RootMetadata, MultipleMetadata) from hachoir.parser.image import ( BmpFile, IcoFile, PcxFile, GifFile, PngFile, TiffFile, XcfFile, TargaFile, WMF_File, PsdFile) diff --git a/lib/hachoir/metadata/main.py b/lib/hachoir/metadata/main.py index b652f9ec..7f2e9873 100644 --- a/lib/hachoir/metadata/main.py +++ b/lib/hachoir/metadata/main.py @@ -85,7 +85,7 @@ def processFile(values, filename, with parser: # Extract metadata - extract_metadata = not(values.mime or values.type) + extract_metadata = not (values.mime or values.type) if extract_metadata: try: metadata = extractMetadata(parser, values.quality) @@ -124,7 +124,7 @@ def processFile(values, filename, def processFiles(values, filenames, display=True): - human = not(values.raw) + human = not values.raw ok = True priority = int(values.level) * 100 + 99 display_filename = (1 < len(filenames)) diff --git a/lib/hachoir/metadata/riff.py b/lib/hachoir/metadata/riff.py index f96a02dc..5c4d44b1 100644 --- a/lib/hachoir/metadata/riff.py +++ b/lib/hachoir/metadata/riff.py @@ -3,7 +3,7 @@ Extract metadata from RIFF file format: AVI video and WAV sound. """ from hachoir.metadata.metadata import (Metadata, MultipleMetadata, - registerExtractor) + registerExtractor) from hachoir.metadata.safe import fault_tolerant, getValue from hachoir.parser.container.riff import RiffFile from hachoir.parser.video.fourcc import UNCOMPRESSED_AUDIO diff --git a/lib/hachoir/metadata/video.py b/lib/hachoir/metadata/video.py index d8b0aab2..41fd002e 100644 --- a/lib/hachoir/metadata/video.py +++ b/lib/hachoir/metadata/video.py @@ -1,6 +1,6 @@ from hachoir.field import MissingField from hachoir.metadata.metadata import (registerExtractor, - Metadata, RootMetadata, MultipleMetadata) + Metadata, RootMetadata, MultipleMetadata) from hachoir.metadata.metadata_item import QUALITY_GOOD from hachoir.metadata.safe import fault_tolerant from hachoir.parser.video import AsfFile, FlvFile diff --git a/lib/hachoir/parser/__init__.py b/lib/hachoir/parser/__init__.py index 72ee7bfb..12bbc7e6 100644 --- a/lib/hachoir/parser/__init__.py +++ b/lib/hachoir/parser/__init__.py @@ -2,5 +2,5 @@ from hachoir.parser.parser import ValidateError, HachoirParser, Parser # noqa from hachoir.parser.parser_list import ParserList, HachoirParserList # noqa from hachoir.parser.guess import QueryParser, guessParser, createParser # noqa from hachoir.parser import (archive, audio, container, # noqa - file_system, image, game, misc, network, program, - video) + file_system, image, game, misc, network, program, + video) diff --git a/lib/hachoir/parser/archive/__init__.py b/lib/hachoir/parser/archive/__init__.py index d35ea0e8..3ec59338 100644 --- a/lib/hachoir/parser/archive/__init__.py +++ b/lib/hachoir/parser/archive/__init__.py @@ -1,5 +1,6 @@ from hachoir.parser.archive.ace import AceFile # noqa from hachoir.parser.archive.ar import ArchiveFile # noqa +from hachoir.parser.archive.arj import ArjParser # noqa from hachoir.parser.archive.bomstore import BomFile # noqa from hachoir.parser.archive.bzip2_parser import Bzip2Parser # noqa from hachoir.parser.archive.cab import CabFile # noqa diff --git a/lib/hachoir/parser/archive/ace.py b/lib/hachoir/parser/archive/ace.py index 8a4b0590..c47bcbd0 100644 --- a/lib/hachoir/parser/archive/ace.py +++ b/lib/hachoir/parser/archive/ace.py @@ -13,10 +13,10 @@ Creation date: 19 january 2006 from hachoir.parser import Parser from hachoir.field import (StaticFieldSet, FieldSet, - Bit, Bits, NullBits, RawBytes, Enum, - UInt8, UInt16, UInt32, - PascalString8, PascalString16, String, - TimeDateMSDOS32) + Bit, Bits, NullBits, RawBytes, Enum, + UInt8, UInt16, UInt32, + PascalString8, PascalString16, String, + TimeDateMSDOS32) from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser.common.msdos import MSDOSFileAttr32 diff --git a/lib/hachoir/parser/archive/ar.py b/lib/hachoir/parser/archive/ar.py index 3f23041f..d7d92823 100644 --- a/lib/hachoir/parser/archive/ar.py +++ b/lib/hachoir/parser/archive/ar.py @@ -4,7 +4,7 @@ GNU ar archive : archive file (.a) and Debian (.deb) archive. from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - String, RawBytes, UnixLine) + String, RawBytes, UnixLine) from hachoir.core.endian import BIG_ENDIAN diff --git a/lib/hachoir/parser/archive/arj.py b/lib/hachoir/parser/archive/arj.py new file mode 100644 index 00000000..4b8a9bd2 --- /dev/null +++ b/lib/hachoir/parser/archive/arj.py @@ -0,0 +1,155 @@ +""" +ARJ archive file parser + +https://github.com/FarGroup/FarManager/blob/master/plugins/multiarc/arc.doc/arj.txt +""" + +from hachoir.core.endian import LITTLE_ENDIAN +from hachoir.field import (FieldSet, ParserError, + CString, Enum, RawBytes, + UInt8, UInt16, UInt32, + Bytes) +from hachoir.parser import Parser + +HOST_OS = { + 0: "MSDOS", + 1: "PRIMOS", + 2: "UNIX", + 3: "AMIGA", + 4: "MACDOS", + 5: "OS/2", + 6: "APPLE GS", + 7: "ATARI ST", + 8: "NEXT", + 9: "VAX VMS", + 10: "WIN95", + 11: "WIN32", +} + +FILE_TYPE = { + 0: "BINARY", + 1: "TEXT", + 2: "COMMENT", + 3: "DIRECTORY", + 4: "VOLUME", + 5: "CHAPTER", +} + +MAGIC = b"\x60\xEA" + + +class BaseBlock(FieldSet): + @property + def isEmpty(self): + return self["basic_header_size"].value == 0 + + def _header_start_fields(self): + yield Bytes(self, "magic", len(MAGIC)) + if self["magic"].value != MAGIC: + raise ParserError("Wrong header magic") + yield UInt16(self, "basic_header_size", "zero if end of archive") + if not self.isEmpty: + yield UInt8(self, "first_hdr_size") + yield UInt8(self, "archiver_version") + yield UInt8(self, "min_archiver_version") + yield Enum(UInt8(self, "host_os"), HOST_OS) + yield UInt8(self, "arj_flags") + + def _header_end_fields(self): + yield UInt8(self, "last_chapter") + fhs = self["first_hdr_size"] + name_position = fhs.address // 8 + fhs.value + current_position = self["last_chapter"].address // 8 + 1 + if name_position > current_position: + yield RawBytes(self, "reserved2", name_position - current_position) + + yield CString(self, "filename", "File name", charset="ASCII") + yield CString(self, "comment", "Comment", charset="ASCII") + yield UInt32(self, "crc", "Header CRC") + + i = 0 + while not self.eof: + yield UInt16(self, f"extended_header_size_{i}") + cur_size = self[f"extended_header_size_{i}"].value + if cur_size == 0: + break + yield RawBytes(self, "extended_header_data", cur_size) + yield UInt32(self, f"extended_header_crc_{i}") + i += 1 + + def validate(self): + if self.stream.readBytes(0, 2) != MAGIC: + return "Invalid magic" + return True + + +class Header(BaseBlock): + def createFields(self): + yield from self._header_start_fields() + if not self.isEmpty: + yield UInt8(self, "security_version") + yield Enum(UInt8(self, "file_type"), FILE_TYPE) + yield UInt8(self, "reserved") + yield UInt32(self, "date_time_created") + yield UInt32(self, "date_time_modified") + yield UInt32(self, "archive_size") + yield UInt32(self, "security_envelope_file_position") + yield UInt16(self, "filespec_position") + yield UInt16(self, "security_envelope_data_len") + yield UInt8(self, "encryption_version") + yield from self._header_end_fields() + + def createDescription(self): + if self.isEmpty: + return "Empty main header" + return "Main header of '%s'" % self["filename"].value + + +class Block(BaseBlock): + def createFields(self): + yield from self._header_start_fields() + if not self.isEmpty: + yield UInt8(self, "method") + yield Enum(UInt8(self, "file_type"), FILE_TYPE) + yield UInt8(self, "reserved") + yield UInt32(self, "date_time_modified") + yield UInt32(self, "compressed_size") + yield UInt32(self, "original_size") + yield UInt32(self, "original_file_crc") + yield UInt16(self, "filespec_position") + yield UInt16(self, "file_access_mode") + yield UInt8(self, "first_chapter") + yield from self._header_end_fields() + compressed_size = self["compressed_size"].value + if compressed_size > 0: + yield RawBytes(self, "compressed_data", compressed_size) + + def createDescription(self): + if self.isEmpty: + return "Empty file header" + return "File header of '%s'" % self["filename"].value + + +class ArjParser(Parser): + endian = LITTLE_ENDIAN + PARSER_TAGS = { + "id": "arj", + "category": "archive", + "file_ext": ("arj",), + "min_size": 4 * 8, + "description": "ARJ archive" + } + + def validate(self): + if self.stream.readBytes(0, 2) != MAGIC: + return "Invalid magic" + return True + + def createFields(self): + yield Header(self, "header") + if not self["header"].isEmpty: + while not self.eof: + block = Block(self, "file_header[]") + yield block + if block.isEmpty: + break diff --git a/lib/hachoir/parser/archive/bomstore.py b/lib/hachoir/parser/archive/bomstore.py index a0e376a5..483c58f5 100644 --- a/lib/hachoir/parser/archive/bomstore.py +++ b/lib/hachoir/parser/archive/bomstore.py @@ -11,7 +11,7 @@ Created: 2015-05-14 from hachoir.parser import HachoirParser from hachoir.field import (RootSeekableFieldSet, FieldSet, - UInt32, Bytes, NullBytes, RawBytes) + UInt32, Bytes, NullBytes, RawBytes) from hachoir.core.endian import BIG_ENDIAN diff --git a/lib/hachoir/parser/archive/bzip2_parser.py b/lib/hachoir/parser/archive/bzip2_parser.py index 176a3a7d..2e91d690 100644 --- a/lib/hachoir/parser/archive/bzip2_parser.py +++ b/lib/hachoir/parser/archive/bzip2_parser.py @@ -7,9 +7,9 @@ Author: Victor Stinner, Robert Xiao from hachoir.parser import Parser from hachoir.core.tools import paddingSize from hachoir.field import (Field, FieldSet, GenericVector, - ParserError, String, - PaddingBits, Bit, Bits, Character, - UInt32, Enum, CompressedField) + ParserError, String, + PaddingBits, Bit, Bits, Character, + UInt32, Enum, CompressedField) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.parser.archive.zlib import build_tree, HuffmanCode @@ -218,7 +218,7 @@ class Bzip2Parser(Parser): def validate(self): if self.stream.readBytes(0, 3) != b'BZh': return "Wrong file signature" - if not("1" <= self["blocksize"].value <= "9"): + if not ("1" <= self["blocksize"].value <= "9"): return "Wrong blocksize" return True diff --git a/lib/hachoir/parser/archive/cab.py b/lib/hachoir/parser/archive/cab.py index dca1555e..0d4c0d54 100644 --- a/lib/hachoir/parser/archive/cab.py +++ b/lib/hachoir/parser/archive/cab.py @@ -10,9 +10,9 @@ Creation date: 31 january 2007 from hachoir.parser import Parser from hachoir.field import (FieldSet, Enum, - CString, String, - UInt8, UInt16, UInt32, Bit, Bits, PaddingBits, NullBits, - DateTimeMSDOS32, RawBytes, CustomFragment) + CString, String, + UInt8, UInt16, UInt32, Bit, Bits, PaddingBits, NullBits, + DateTimeMSDOS32, RawBytes, CustomFragment) from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import paddingSize diff --git a/lib/hachoir/parser/archive/lzx.py b/lib/hachoir/parser/archive/lzx.py index b02235b8..b3217793 100644 --- a/lib/hachoir/parser/archive/lzx.py +++ b/lib/hachoir/parser/archive/lzx.py @@ -8,11 +8,12 @@ Creation date: July 18, 2007 """ from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt32, Bit, Bits, PaddingBits, - RawBytes, ParserError) + UInt32, Bit, Bits, PaddingBits, + RawBytes, ParserError) from hachoir.core.endian import MIDDLE_ENDIAN, LITTLE_ENDIAN from hachoir.core.tools import paddingSize from hachoir.parser.archive.zlib import build_tree, HuffmanCode, extend_data +import struct class LZXPreTreeEncodedTree(FieldSet): @@ -146,6 +147,8 @@ class LZXBlock(FieldSet): self.window_size = self.WINDOW_SIZE[self.compression_level] self.block_type = self["block_type"].value curlen = len(self.parent.uncompressed_data) + intel_started = False # Do we perform Intel jump fixups on this block? + if self.block_type in (1, 2): # Verbatim or aligned offset block if self.block_type == 2: for i in range(8): @@ -156,6 +159,8 @@ class LZXBlock(FieldSet): yield LZXPreTreeEncodedTree(self, "main_tree_rest", self.window_size * 8) main_tree = build_tree( self["main_tree_start"].lengths + self["main_tree_rest"].lengths) + if self["main_tree_start"].lengths[0xE8]: + intel_started = True yield LZXPreTreeEncodedTree(self, "length_tree", 249) length_tree = build_tree(self["length_tree"].lengths) current_decoded_size = 0 @@ -169,7 +174,7 @@ class LZXBlock(FieldSet): field._description = "Literal value %r" % chr( field.realvalue) current_decoded_size += 1 - self.parent.uncompressed_data += chr(field.realvalue) + self.parent._lzx_window.append(field.realvalue) yield field continue position_header, length_header = divmod( @@ -243,8 +248,7 @@ class LZXBlock(FieldSet): self.parent.r2 = self.parent.r1 self.parent.r1 = self.parent.r0 self.parent.r0 = position - self.parent.uncompressed_data = extend_data( - self.parent.uncompressed_data, length, position) + extend_data(self.parent._lzx_window, length, position) current_decoded_size += length elif self.block_type == 3: # Uncompressed block padding = paddingSize(self.address + self.current_size, 16) @@ -253,6 +257,7 @@ class LZXBlock(FieldSet): else: yield PaddingBits(self, "padding[]", 16) self.endian = LITTLE_ENDIAN + intel_started = True # apparently intel fixup may be needed on uncompressed blocks? yield UInt32(self, "r[]", "New value of R0") yield UInt32(self, "r[]", "New value of R1") yield UInt32(self, "r[]", "New value of R2") @@ -260,18 +265,50 @@ class LZXBlock(FieldSet): self.parent.r1 = self["r[1]"].value self.parent.r2 = self["r[2]"].value yield RawBytes(self, "data", self.uncompressed_size) - self.parent.uncompressed_data += self["data"].value + self.parent._lzx_window += self["data"].value if self["block_size"].value % 2: yield PaddingBits(self, "padding", 8) else: raise ParserError("Unknown block type %d!" % self.block_type) + # Fixup Intel jumps if necessary (fixups are only applied to the final output, not to the LZX window) + self.parent.uncompressed_data += self.parent._lzx_window[-self.uncompressed_size:] + self.parent._lzx_window = self.parent._lzx_window[-(1 << self.root.compr_level):] + + if ( + intel_started + and self.parent["filesize_indicator"].value + and self.parent["filesize"].value > 0 + ): + # Note that we're decoding a block-at-a-time instead of a frame-at-a-time, + # so we need to handle the frame boundaries carefully. + filesize = self.parent["filesize"].value + start_pos = max(0, curlen - 10) # We may need to correct something from the last block + end_pos = len(self.parent.uncompressed_data) - 10 + while 1: + jmp_pos = self.parent.uncompressed_data.find(b"\xE8", start_pos, end_pos) + if jmp_pos == -1: + break + if (jmp_pos % 32768) >= (32768 - 10): + # jumps at the end of frames are not fixed up + start_pos = jmp_pos + 1 + continue + abs_off, = struct.unpack("= offset: new_data = data[-offset:] * (alignValue(length, offset) // offset) - return data + new_data[:length] + data += new_data[:length] else: - return data + data[-offset:-offset + length] + data += data[-offset:-offset + length] def build_tree(lengths): @@ -136,9 +136,9 @@ class DeflateBlock(FieldSet): CODE_LENGTH_ORDER = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15] - def __init__(self, parent, name, uncomp_data="", *args, **kwargs): + def __init__(self, parent, name, uncomp_data=b"", *args, **kwargs): FieldSet.__init__(self, parent, name, *args, **kwargs) - self.uncomp_data = uncomp_data + self.uncomp_data = bytearray(uncomp_data) def createFields(self): yield Bit(self, "final", "Is this the final block?") # BFINAL @@ -227,7 +227,7 @@ class DeflateBlock(FieldSet): field._description = "Literal Code %r (Huffman Code %i)" % ( chr(value), field.value) yield field - self.uncomp_data += chr(value) + self.uncomp_data.append(value) if value == 256: field._description = "Block Terminator Code (256) (Huffman Code %i)" % field.value yield field @@ -267,15 +267,14 @@ class DeflateBlock(FieldSet): extrafield._description = "Distance Extra Bits (%i), total length %i" % ( extrafield.value, distance) yield extrafield - self.uncomp_data = extend_data( - self.uncomp_data, length, distance) + extend_data(self.uncomp_data, length, distance) class DeflateData(GenericFieldSet): endian = LITTLE_ENDIAN def createFields(self): - uncomp_data = "" + uncomp_data = bytearray() blk = DeflateBlock(self, "compressed_block[]", uncomp_data) yield blk uncomp_data = blk.uncomp_data @@ -326,11 +325,11 @@ class ZlibData(Parser): yield textHandler(UInt32(self, "data_checksum", "ADLER32 checksum of compressed data"), hexadecimal) -def zlib_inflate(stream, wbits=None, prevdata=""): +def zlib_inflate(stream, wbits=None): if wbits is None or wbits >= 0: return ZlibData(stream)["data"].uncompressed_data else: data = DeflateData(None, "root", stream, "", stream.askSize(None)) - for unused in data: + for _ in data: pass return data.uncompressed_data diff --git a/lib/hachoir/parser/audio/aiff.py b/lib/hachoir/parser/audio/aiff.py index 1b8ae40e..8a9ddc78 100644 --- a/lib/hachoir/parser/audio/aiff.py +++ b/lib/hachoir/parser/audio/aiff.py @@ -7,9 +7,9 @@ Creation: 27 december 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt16, UInt32, Float80, TimestampMac32, - RawBytes, NullBytes, - String, Enum, PascalString32) + UInt16, UInt32, Float80, TimestampMac32, + RawBytes, NullBytes, + String, Enum, PascalString32) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import filesizeHandler from hachoir.core.tools import alignValue diff --git a/lib/hachoir/parser/audio/id3.py b/lib/hachoir/parser/audio/id3.py index 5348728d..e6f11312 100644 --- a/lib/hachoir/parser/audio/id3.py +++ b/lib/hachoir/parser/audio/id3.py @@ -7,9 +7,9 @@ Author: Victor Stinner """ from hachoir.field import (FieldSet, MatchError, ParserError, - Enum, UInt8, UInt24, UInt32, - CString, String, RawBytes, - Bit, Bits, NullBytes, NullBits) + Enum, UInt8, UInt24, UInt32, + CString, String, RawBytes, + Bit, Bits, NullBytes, NullBits) from hachoir.core.text_handler import textHandler from hachoir.core.tools import humanDuration from hachoir.core.endian import NETWORK_ENDIAN @@ -451,7 +451,7 @@ class ID3_Chunk(FieldSet): if size: cls = None - if not(is_compressed): + if not is_compressed: tag = self["tag"].value if tag in ID3_Chunk.handler: cls = ID3_Chunk.handler[tag] diff --git a/lib/hachoir/parser/audio/itunesdb.py b/lib/hachoir/parser/audio/itunesdb.py index ef2cd808..095679dc 100644 --- a/lib/hachoir/parser/audio/itunesdb.py +++ b/lib/hachoir/parser/audio/itunesdb.py @@ -10,8 +10,8 @@ Creation date: 19 august 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt16, UInt32, Int32, UInt64, TimestampMac32, - String, Float32, NullBytes, Enum, RawBytes) + UInt8, UInt16, UInt32, Int32, UInt64, TimestampMac32, + String, Float32, NullBytes, Enum, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import humanDuration from hachoir.core.text_handler import displayHandler, filesizeHandler @@ -128,7 +128,7 @@ class DataObject(FieldSet): yield padding for i in range(self["entry_count"].value): yield UInt32(self, "index[" + str(i) + "]", "Index of the " + str(i) + "nth mhit") - elif(self["type"].value < 15) or (self["type"].value > 17) or (self["type"].value >= 200): + elif (self["type"].value < 15) or (self["type"].value > 17) or (self["type"].value >= 200): yield UInt32(self, "unknown[]") yield UInt32(self, "unknown[]") yield UInt32(self, "position", "Position") diff --git a/lib/hachoir/parser/audio/midi.py b/lib/hachoir/parser/audio/midi.py index c52ef911..b9ed1338 100644 --- a/lib/hachoir/parser/audio/midi.py +++ b/lib/hachoir/parser/audio/midi.py @@ -10,7 +10,7 @@ Creation: 27 december 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, Bits, ParserError, - String, UInt32, UInt24, UInt16, UInt8, Enum, RawBits, RawBytes) + String, UInt32, UInt24, UInt16, UInt8, Enum, RawBits, RawBytes) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.tools import createDict, humanDurationNanosec @@ -29,7 +29,7 @@ class Integer(Bits): while True: bits = stream.readBits(addr, 8, parent.endian) value = (value << 7) + (bits & 127) - if not(bits & 128): + if not (bits & 128): break addr += 8 self._size += 8 diff --git a/lib/hachoir/parser/audio/mod.py b/lib/hachoir/parser/audio/mod.py index 483cb444..4e246c6c 100644 --- a/lib/hachoir/parser/audio/mod.py +++ b/lib/hachoir/parser/audio/mod.py @@ -20,8 +20,8 @@ Creation: 18th February 2007 from math import log10 from hachoir.parser import Parser from hachoir.field import (FieldSet, - Bits, UInt16, UInt8, - RawBytes, String, GenericVector) + Bits, UInt16, UInt8, + RawBytes, String, GenericVector) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler diff --git a/lib/hachoir/parser/audio/modplug.py b/lib/hachoir/parser/audio/modplug.py index 9cf9bf44..05b86431 100644 --- a/lib/hachoir/parser/audio/modplug.py +++ b/lib/hachoir/parser/audio/modplug.py @@ -9,8 +9,8 @@ Creation: 10th February 2007 """ from hachoir.field import (FieldSet, - UInt32, UInt16, UInt8, Int8, Float32, - RawBytes, String, GenericVector, ParserError) + UInt32, UInt16, UInt8, Int8, Float32, + RawBytes, String, GenericVector, ParserError) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/audio/mpeg_audio.py b/lib/hachoir/parser/audio/mpeg_audio.py index 91162e99..645daa04 100644 --- a/lib/hachoir/parser/audio/mpeg_audio.py +++ b/lib/hachoir/parser/audio/mpeg_audio.py @@ -7,10 +7,10 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (FieldSet, - MissingField, ParserError, createOrphanField, - Bit, Bits, Enum, - PaddingBits, PaddingBytes, - RawBytes) + MissingField, ParserError, createOrphanField, + Bit, Bits, Enum, + PaddingBits, PaddingBytes, + RawBytes) from hachoir.parser.audio.id3 import ID3v1, ID3v2 from hachoir.core.endian import BIG_ENDIAN from hachoir.core.tools import humanFrequency, humanBitSize diff --git a/lib/hachoir/parser/audio/real_audio.py b/lib/hachoir/parser/audio/real_audio.py index cd8cb9cc..daa73b6a 100644 --- a/lib/hachoir/parser/audio/real_audio.py +++ b/lib/hachoir/parser/audio/real_audio.py @@ -10,9 +10,9 @@ Samples: from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt16, UInt32, - Bytes, RawBytes, String, - PascalString8) + UInt8, UInt16, UInt32, + Bytes, RawBytes, String, + PascalString8) from hachoir.core.tools import humanFrequency from hachoir.core.text_handler import displayHandler from hachoir.core.endian import BIG_ENDIAN diff --git a/lib/hachoir/parser/audio/s3m.py b/lib/hachoir/parser/audio/s3m.py index 11200052..b9784e6c 100644 --- a/lib/hachoir/parser/audio/s3m.py +++ b/lib/hachoir/parser/audio/s3m.py @@ -11,10 +11,10 @@ Creation: 11th February 2007 from hachoir.parser import Parser from hachoir.field import (StaticFieldSet, FieldSet, Field, - Bit, Bits, - UInt32, UInt16, UInt8, Enum, - PaddingBytes, RawBytes, NullBytes, - String, GenericVector, ParserError) + Bit, Bits, + UInt32, UInt16, UInt8, Enum, + PaddingBytes, RawBytes, NullBytes, + String, GenericVector, ParserError) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.tools import alignValue diff --git a/lib/hachoir/parser/audio/xm.py b/lib/hachoir/parser/audio/xm.py index fbfceaa0..84c999df 100644 --- a/lib/hachoir/parser/audio/xm.py +++ b/lib/hachoir/parser/audio/xm.py @@ -15,9 +15,9 @@ Creation: 8th February 2007 from hachoir.parser import Parser from hachoir.field import (StaticFieldSet, FieldSet, - Bit, RawBits, Bits, - UInt32, UInt16, UInt8, Int8, Enum, - RawBytes, String, GenericVector) + Bit, RawBits, Bits, + UInt32, UInt16, UInt8, Int8, Enum, + RawBytes, String, GenericVector) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.parser.audio.modplug import ParseModplugMetadata diff --git a/lib/hachoir/parser/common/win32.py b/lib/hachoir/parser/common/win32.py index 35a2821f..31e6845a 100644 --- a/lib/hachoir/parser/common/win32.py +++ b/lib/hachoir/parser/common/win32.py @@ -1,5 +1,5 @@ from hachoir.field import (FieldSet, - UInt16, UInt32, Enum, String, Bytes, Bits, TimestampUUID60) + UInt16, UInt32, Enum, String, Bytes, Bits, TimestampUUID60) from hachoir.parser.video.fourcc import video_fourcc_name from hachoir.core.bits import str2hex from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/container/action_script.py b/lib/hachoir/parser/container/action_script.py index f28b1a5e..d0064243 100644 --- a/lib/hachoir/parser/container/action_script.py +++ b/lib/hachoir/parser/container/action_script.py @@ -13,8 +13,8 @@ Creation date: 26 April 2008 """ from hachoir.field import (FieldSet, ParserError, - Bit, Bits, UInt8, UInt32, Int16, UInt16, Float32, Float64, CString, Enum, - RawBytes, String) + Bit, Bits, UInt8, UInt32, Int16, UInt16, Float32, Float64, CString, Enum, + RawBytes, String) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.field.float import FloatExponent from struct import unpack diff --git a/lib/hachoir/parser/container/asn1.py b/lib/hachoir/parser/container/asn1.py index 46e4d9ff..2d849af7 100644 --- a/lib/hachoir/parser/container/asn1.py +++ b/lib/hachoir/parser/container/asn1.py @@ -41,9 +41,9 @@ Creation date: 24 september 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, - FieldError, ParserError, - Bit, Bits, Bytes, UInt8, GenericInteger, String, - Field, Enum, RawBytes) + FieldError, ParserError, + Bit, Bits, Bytes, UInt8, GenericInteger, String, + Field, Enum, RawBytes) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.tools import createDict, humanDatetime from hachoir.stream import InputStreamError diff --git a/lib/hachoir/parser/container/mkv.py b/lib/hachoir/parser/container/mkv.py index 966c19d8..a23f2c7c 100644 --- a/lib/hachoir/parser/container/mkv.py +++ b/lib/hachoir/parser/container/mkv.py @@ -6,11 +6,11 @@ from hachoir.parser import Parser from hachoir.field import (FieldSet, Link, - MissingField, ParserError, - Enum as _Enum, String as _String, - Float32, Float64, - NullBits, Bits, Bit, RawBytes, Bytes, - Int16, GenericInteger) + MissingField, ParserError, + Enum as _Enum, String as _String, + Float32, Float64, + NullBits, Bits, Bit, RawBytes, Bytes, + Int16, GenericInteger) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.iso639 import ISO639_2 from hachoir.core.tools import humanDatetime diff --git a/lib/hachoir/parser/container/mp4.py b/lib/hachoir/parser/container/mp4.py index b6ccd1ae..3360eb66 100644 --- a/lib/hachoir/parser/container/mp4.py +++ b/lib/hachoir/parser/container/mp4.py @@ -20,10 +20,10 @@ Creation: 2 august 2006 from hachoir.parser import Parser from hachoir.parser.common.win32 import GUID from hachoir.field import (ParserError, FieldSet, MissingField, - Enum, - Bit, NullBits, Bits, UInt8, Int16, UInt16, UInt24, Int32, UInt32, Int64, UInt64, TimestampMac32, - String, PascalString8, PascalString16, CString, - RawBytes, NullBytes) + Enum, + Bit, NullBits, Bits, UInt8, Int16, UInt16, UInt24, Int32, UInt32, Int64, UInt64, TimestampMac32, + String, PascalString8, PascalString16, CString, + RawBytes, NullBytes) from hachoir.field.timestamp import timestampFactory from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler @@ -1312,7 +1312,7 @@ class MP4File(Parser): if size < 8: return "Invalid first atom size" tag = self.stream.readBytes(4 * 8, 4) - if tag not in (b"ftyp", b"moov", b"free"): + if tag not in (b"ftyp", b"moov", b"free", b"skip"): return "Unknown MOV file type" return True diff --git a/lib/hachoir/parser/container/ogg.py b/lib/hachoir/parser/container/ogg.py index f2e527e5..46b6a4bb 100644 --- a/lib/hachoir/parser/container/ogg.py +++ b/lib/hachoir/parser/container/ogg.py @@ -6,9 +6,9 @@ from hachoir.parser import Parser from hachoir.field import (Field, FieldSet, createOrphanField, - NullBits, Bit, Bits, Enum, Fragment, MissingField, ParserError, - UInt8, UInt16, UInt24, UInt32, UInt64, - RawBytes, String, PascalString32, NullBytes) + NullBits, Bit, Bits, Enum, Fragment, MissingField, ParserError, + UInt8, UInt16, UInt24, UInt32, UInt64, + RawBytes, String, PascalString32, NullBytes) from hachoir.stream import FragmentedStream, InputStreamError from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.tools import humanDurationNanosec diff --git a/lib/hachoir/parser/container/realmedia.py b/lib/hachoir/parser/container/realmedia.py index f185e5f6..f27b06a3 100644 --- a/lib/hachoir/parser/container/realmedia.py +++ b/lib/hachoir/parser/container/realmedia.py @@ -15,8 +15,8 @@ Samples: from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt16, UInt32, Bit, RawBits, - RawBytes, String, PascalString8, PascalString16) + UInt16, UInt32, Bit, RawBits, + RawBytes, String, PascalString8, PascalString16) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.endian import BIG_ENDIAN diff --git a/lib/hachoir/parser/container/riff.py b/lib/hachoir/parser/container/riff.py index b4caa66e..6e7956ab 100644 --- a/lib/hachoir/parser/container/riff.py +++ b/lib/hachoir/parser/container/riff.py @@ -29,10 +29,10 @@ Thanks to: from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - UInt8, UInt16, UInt32, Enum, - Bit, NullBits, NullBytes, - RawBytes, String, PaddingBytes, - SubFile) + UInt8, UInt16, UInt32, Enum, + Bit, NullBits, NullBytes, + RawBytes, String, PaddingBytes, + SubFile) from hachoir.core.tools import alignValue, humanDuration from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import filesizeHandler, textHandler diff --git a/lib/hachoir/parser/container/swf.py b/lib/hachoir/parser/container/swf.py index 037382e9..1c038120 100644 --- a/lib/hachoir/parser/container/swf.py +++ b/lib/hachoir/parser/container/swf.py @@ -15,8 +15,8 @@ Creation date: 29 october 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - Bit, Bits, UInt8, UInt16, Int32, UInt32, Int64, CString, Enum, - Bytes, RawBytes, NullBits, String, SubFile) + Bit, Bits, UInt8, UInt16, Int32, UInt32, Int64, CString, Enum, + Bytes, RawBytes, NullBits, String, SubFile) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.text_handler import textHandler, filesizeHandler from hachoir.core.tools import paddingSize, humanFrequency diff --git a/lib/hachoir/parser/file_system/ext2.py b/lib/hachoir/parser/file_system/ext2.py index efed5205..bd607fb4 100644 --- a/lib/hachoir/parser/file_system/ext2.py +++ b/lib/hachoir/parser/file_system/ext2.py @@ -14,9 +14,9 @@ Sources: from hachoir.parser import HachoirParser, Parser from hachoir.field import (RootSeekableFieldSet, SeekableFieldSet, FieldSet, ParserError, - Bit, Bits, UInt8, UInt16, UInt32, - Enum, String, TimestampUnix32, RawBytes, - NullBytes, PaddingBits, PaddingBytes, FragmentGroup, CustomFragment) + Bit, Bits, UInt8, UInt16, UInt32, + Enum, String, TimestampUnix32, RawBytes, + NullBytes, PaddingBits, PaddingBytes, FragmentGroup, CustomFragment) from hachoir.core.tools import (humanDuration, humanFilesize) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler @@ -240,11 +240,13 @@ class Inode(FieldSet): return out def is_fast_symlink(self): - self.seekByte(4 * 15 + 4) - acl = UInt32(self, "file_acl") + acl_addr = self.absolute_address + self.current_size + # skip 15 blocks + version field + acl_addr += (4 * 15 + 4) * 8 + acl = self.stream.readBits(acl_addr, 32, self.endian) b = 0 - if acl.value > 0: + if acl > 0: b = (2 << self["/superblock/log_block_size"].value) return (self['blocks'].value - b == 0) @@ -747,7 +749,7 @@ class EXT2_FS(HachoirParser, RootSeekableFieldSet): def validate(self): if self.stream.readBytes((1024 + 56) * 8, 2) != b"\x53\xEF": return "Invalid magic number" - if not(0 <= self["superblock/log_block_size"].value <= 2): + if not (0 <= self["superblock/log_block_size"].value <= 2): return "Invalid (log) block size" if self["superblock/inode_size"].value not in (0, 128): return "Unsupported inode size" diff --git a/lib/hachoir/parser/file_system/fat.py b/lib/hachoir/parser/file_system/fat.py index aef0f5d6..802fab63 100644 --- a/lib/hachoir/parser/file_system/fat.py +++ b/lib/hachoir/parser/file_system/fat.py @@ -1,8 +1,8 @@ from hachoir.parser import Parser from hachoir.field import (FieldSet, StaticFieldSet, - RawBytes, PaddingBytes, createPaddingField, Link, Fragment, - Bit, Bits, UInt8, UInt16, UInt32, - String, Bytes, NullBytes) + RawBytes, PaddingBytes, createPaddingField, Link, Fragment, + Bit, Bits, UInt8, UInt16, UInt32, + String, Bytes, NullBytes) from hachoir.field.integer import GenericInteger from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/file_system/iso9660.py b/lib/hachoir/parser/file_system/iso9660.py index cc001f61..ecac4a3e 100644 --- a/lib/hachoir/parser/file_system/iso9660.py +++ b/lib/hachoir/parser/file_system/iso9660.py @@ -11,8 +11,8 @@ Creation: 11 july 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - UInt8, UInt32, UInt64, Enum, - NullBytes, RawBytes, String) + UInt8, UInt32, UInt64, Enum, + NullBytes, RawBytes, String) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN diff --git a/lib/hachoir/parser/file_system/linux_swap.py b/lib/hachoir/parser/file_system/linux_swap.py index fd3effbf..79e1c83f 100644 --- a/lib/hachoir/parser/file_system/linux_swap.py +++ b/lib/hachoir/parser/file_system/linux_swap.py @@ -11,8 +11,8 @@ Creation date: 25 december 2006 (christmas ;-)) from hachoir.parser import Parser from hachoir.field import (ParserError, GenericVector, - UInt32, String, - Bytes, NullBytes, RawBytes) + UInt32, String, + Bytes, NullBytes, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import humanFilesize from hachoir.core.bits import str2hex diff --git a/lib/hachoir/parser/file_system/mbr.py b/lib/hachoir/parser/file_system/mbr.py index c8ecbb0e..362afa65 100644 --- a/lib/hachoir/parser/file_system/mbr.py +++ b/lib/hachoir/parser/file_system/mbr.py @@ -14,8 +14,8 @@ Master Boot Record. from hachoir.parser import Parser from hachoir.field import (FieldSet, - Enum, Bits, UInt8, UInt16, UInt32, - RawBytes) + Enum, Bits, UInt8, UInt16, UInt32, + RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import humanFilesize from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/file_system/ntfs.py b/lib/hachoir/parser/file_system/ntfs.py index 0d3394a0..84804e27 100644 --- a/lib/hachoir/parser/file_system/ntfs.py +++ b/lib/hachoir/parser/file_system/ntfs.py @@ -13,9 +13,9 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (FieldSet, Enum, - UInt8, UInt16, UInt32, UInt64, TimestampWin64, - String, Bytes, Bit, Bits, - NullBits, NullBytes, PaddingBytes, RawBytes) + UInt8, UInt16, UInt32, UInt64, TimestampWin64, + String, Bytes, Bit, Bits, + NullBits, NullBytes, PaddingBytes, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.tools import humanFilesize, createDict diff --git a/lib/hachoir/parser/file_system/reiser_fs.py b/lib/hachoir/parser/file_system/reiser_fs.py index 2314ae85..da6ffdea 100644 --- a/lib/hachoir/parser/file_system/reiser_fs.py +++ b/lib/hachoir/parser/file_system/reiser_fs.py @@ -22,7 +22,7 @@ Kurz. from hachoir.parser import Parser from hachoir.field import (FieldSet, Enum, - UInt16, UInt32, String, RawBytes, NullBytes, SeekableFieldSet, Bit) + UInt16, UInt32, String, RawBytes, NullBytes, SeekableFieldSet, Bit) from hachoir.core.endian import LITTLE_ENDIAN diff --git a/lib/hachoir/parser/game/laf.py b/lib/hachoir/parser/game/laf.py index 2a858abb..12edd405 100644 --- a/lib/hachoir/parser/game/laf.py +++ b/lib/hachoir/parser/game/laf.py @@ -7,7 +7,7 @@ Creation date: 1 January 2007 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt16, UInt32, GenericVector) + UInt8, UInt16, UInt32, GenericVector) from hachoir.core.endian import LITTLE_ENDIAN diff --git a/lib/hachoir/parser/game/zsnes.py b/lib/hachoir/parser/game/zsnes.py index 8d02c5bf..79ec662d 100644 --- a/lib/hachoir/parser/game/zsnes.py +++ b/lib/hachoir/parser/game/zsnes.py @@ -7,8 +7,8 @@ Creation date: 2006-09-15 from hachoir.parser import Parser from hachoir.field import (FieldSet, StaticFieldSet, - UInt8, UInt16, UInt32, - String, PaddingBytes, Bytes, RawBytes) + UInt8, UInt16, UInt32, + String, PaddingBytes, Bytes, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN diff --git a/lib/hachoir/parser/guess.py b/lib/hachoir/parser/guess.py index ce98ea64..23fb70ba 100644 --- a/lib/hachoir/parser/guess.py +++ b/lib/hachoir/parser/guess.py @@ -139,4 +139,7 @@ def createParser(filename, real_filename=None, tags=None): if not tags: tags = [] stream = FileInputStream(filename, real_filename, tags=tags) - return guessParser(stream) + guess = guessParser(stream) + if guess is None: + stream.close() + return guess diff --git a/lib/hachoir/parser/image/bmp.py b/lib/hachoir/parser/image/bmp.py index 6d855abc..518fdc22 100644 --- a/lib/hachoir/parser/image/bmp.py +++ b/lib/hachoir/parser/image/bmp.py @@ -8,9 +8,9 @@ Creation: 16 december 2005 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt16, UInt32, Bits, - String, RawBytes, Enum, - PaddingBytes, NullBytes, createPaddingField) + UInt8, UInt16, UInt32, Bits, + String, RawBytes, Enum, + PaddingBytes, NullBytes, createPaddingField) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.parser.image.common import RGB, PaletteRGBA diff --git a/lib/hachoir/parser/image/exif.py b/lib/hachoir/parser/image/exif.py index 4331048b..7efbdfbe 100644 --- a/lib/hachoir/parser/image/exif.py +++ b/lib/hachoir/parser/image/exif.py @@ -11,11 +11,11 @@ References: """ from hachoir.field import (FieldSet, SeekableFieldSet, ParserError, - UInt8, UInt16, UInt32, - Int8, Int16, Int32, - Float32, Float64, - Enum, String, Bytes, SubFile, - NullBits, NullBytes) + UInt8, UInt16, UInt32, + Int8, Int16, Int32, + Float32, Float64, + Enum, String, Bytes, SubFile, + NullBits, NullBytes) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.tools import createDict diff --git a/lib/hachoir/parser/image/gif.py b/lib/hachoir/parser/image/gif.py index c9da6520..a33b1283 100644 --- a/lib/hachoir/parser/image/gif.py +++ b/lib/hachoir/parser/image/gif.py @@ -11,11 +11,11 @@ Author: Victor Stinner, Robert Xiao from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - Enum, UInt8, UInt16, - Bit, Bits, NullBytes, - String, PascalString8, Character, - NullBits, RawBytes, - CustomFragment) + Enum, UInt8, UInt16, + Bit, Bits, NullBytes, + String, PascalString8, Character, + NullBits, RawBytes, + CustomFragment) from hachoir.parser.image.common import PaletteRGB from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import humanDuration, paddingSize diff --git a/lib/hachoir/parser/image/ico.py b/lib/hachoir/parser/image/ico.py index 1ff4cc0e..63556e08 100644 --- a/lib/hachoir/parser/image/ico.py +++ b/lib/hachoir/parser/image/ico.py @@ -6,7 +6,7 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - UInt8, UInt16, UInt32, Enum, RawBytes) + UInt8, UInt16, UInt32, Enum, RawBytes) from hachoir.parser.image.common import PaletteRGBA from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser.common.win32 import BitmapInfoHeader diff --git a/lib/hachoir/parser/image/iptc.py b/lib/hachoir/parser/image/iptc.py index abe32941..1b6da5a0 100644 --- a/lib/hachoir/parser/image/iptc.py +++ b/lib/hachoir/parser/image/iptc.py @@ -12,7 +12,7 @@ Author: Victor Stinner """ from hachoir.field import (FieldSet, ParserError, - UInt8, UInt16, String, RawBytes, NullBytes) + UInt8, UInt16, String, RawBytes, NullBytes) from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/image/jpeg.py b/lib/hachoir/parser/image/jpeg.py index 58701ccb..419d6f48 100644 --- a/lib/hachoir/parser/image/jpeg.py +++ b/lib/hachoir/parser/image/jpeg.py @@ -16,9 +16,9 @@ Author: Victor Stinner, Robert Xiao from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, FieldError, - UInt8, UInt16, Enum, Field, - Bit, Bits, NullBits, NullBytes, PaddingBits, - String, RawBytes) + UInt8, UInt16, Enum, Field, + Bit, Bits, NullBits, NullBytes, PaddingBits, + String, RawBytes) from hachoir.parser.image.common import PaletteRGB from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal @@ -205,7 +205,7 @@ class SOSComponent(FieldSet): def createFields(self): comp_id = UInt8(self, "component_id") yield comp_id - if not(1 <= comp_id.value <= self["../nr_components"].value): + if not (1 <= comp_id.value <= self["../nr_components"].value): raise ParserError("JPEG error: Invalid component-id") yield Bits(self, "dc_coding_table", 4, "DC entropy coding table destination selector") yield Bits(self, "ac_coding_table", 4, "AC entropy coding table destination selector") @@ -387,7 +387,10 @@ class JpegImageData(FieldSet): end = self.stream.searchBytes(b"\xff", start, MAX_FILESIZE * 8) if end is None: # this is a bad sign, since it means there is no terminator - # we ignore this; it likely means a truncated image + # this likely means a truncated image: + # set the size to the remaining length of the stream + # to avoid being forced to parse subfields to calculate size + self._size = self.stream._size - self.absolute_address break if self.stream.readBytes(end, 2) == b'\xff\x00': # padding: false alarm diff --git a/lib/hachoir/parser/image/photoshop_metadata.py b/lib/hachoir/parser/image/photoshop_metadata.py index 867b23b7..1879d37c 100644 --- a/lib/hachoir/parser/image/photoshop_metadata.py +++ b/lib/hachoir/parser/image/photoshop_metadata.py @@ -5,9 +5,9 @@ References: """ from hachoir.field import (FieldSet, ParserError, - UInt8, UInt16, UInt32, Float32, Enum, - SubFile, String, CString, PascalString8, - NullBytes, RawBytes) + UInt8, UInt16, UInt32, Float32, Enum, + SubFile, String, CString, PascalString8, + NullBytes, RawBytes) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.tools import alignValue, createDict from hachoir.parser.image.iptc import IPTC diff --git a/lib/hachoir/parser/image/png.py b/lib/hachoir/parser/image/png.py index 6c541953..19a99be8 100644 --- a/lib/hachoir/parser/image/png.py +++ b/lib/hachoir/parser/image/png.py @@ -10,12 +10,12 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (FieldSet, Fragment, - ParserError, MissingField, - UInt8, UInt16, UInt32, - String, CString, - Bytes, RawBytes, - Bit, NullBits, - Enum, CompressedField) + ParserError, MissingField, + UInt8, UInt16, UInt32, + String, CString, + Bytes, RawBytes, + Bit, NullBits, + Enum, CompressedField) from hachoir.parser.image.common import RGB from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.endian import NETWORK_ENDIAN @@ -45,7 +45,7 @@ UNIT_NAME = {1: "Meter"} COMPRESSION_NAME = { 0: "deflate" # with 32K sliding window } -MAX_CHUNK_SIZE = 5 * 1024 * 1024 # Maximum chunk size (5 MB) +MAX_CHUNK_SIZE = 64 * 1024 * 1024 # Maximum chunk size heuristic (64 MB) def headerParse(parent): diff --git a/lib/hachoir/parser/image/psd.py b/lib/hachoir/parser/image/psd.py index 76f2123d..df59f6b9 100644 --- a/lib/hachoir/parser/image/psd.py +++ b/lib/hachoir/parser/image/psd.py @@ -7,7 +7,7 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt16, UInt32, String, NullBytes, Enum, RawBytes) + UInt16, UInt32, String, NullBytes, Enum, RawBytes) from hachoir.core.endian import BIG_ENDIAN from hachoir.parser.image.photoshop_metadata import Photoshop8BIM diff --git a/lib/hachoir/parser/image/wmf.py b/lib/hachoir/parser/image/wmf.py index 3fcfa9f9..2a60951a 100644 --- a/lib/hachoir/parser/image/wmf.py +++ b/lib/hachoir/parser/image/wmf.py @@ -16,8 +16,8 @@ Creation date: 26 december 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, StaticFieldSet, Enum, - MissingField, ParserError, - UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String) + MissingField, ParserError, + UInt32, Int32, UInt16, Int16, UInt8, NullBytes, RawBytes, String) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.tools import createDict @@ -597,7 +597,7 @@ class WMF_File(Parser): yield UInt32(self, "max_record_size", "The size of largest record in 16-bit words") yield UInt16(self, "nb_params", "Not Used (always 0)") - while not(self.eof): + while not self.eof: yield Function(self, "func[]") def isEMF(self): diff --git a/lib/hachoir/parser/image/xcf.py b/lib/hachoir/parser/image/xcf.py index 8690329f..c1f420fe 100644 --- a/lib/hachoir/parser/image/xcf.py +++ b/lib/hachoir/parser/image/xcf.py @@ -11,7 +11,7 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (StaticFieldSet, FieldSet, ParserError, - UInt8, UInt32, Enum, Float32, String, PascalString32, RawBytes) + UInt8, UInt32, Enum, Float32, String, PascalString32, RawBytes) from hachoir.parser.image.common import RGBA from hachoir.core.endian import NETWORK_ENDIAN diff --git a/lib/hachoir/parser/misc/__init__.py b/lib/hachoir/parser/misc/__init__.py index ccb72fb2..208ffe06 100644 --- a/lib/hachoir/parser/misc/__init__.py +++ b/lib/hachoir/parser/misc/__init__.py @@ -16,3 +16,4 @@ from hachoir.parser.misc.word_doc import WordDocumentParser # noqa from hachoir.parser.misc.word_2 import Word2DocumentParser # noqa from hachoir.parser.misc.mstask import MSTaskFile # noqa from hachoir.parser.misc.mapsforge_map import MapsforgeMapFile # noqa +from hachoir.parser.misc.fit import FITFile # noqa diff --git a/lib/hachoir/parser/misc/bplist.py b/lib/hachoir/parser/misc/bplist.py index b82b4b59..097a1978 100644 --- a/lib/hachoir/parser/misc/bplist.py +++ b/lib/hachoir/parser/misc/bplist.py @@ -17,7 +17,7 @@ Created: 2008-09-21 from hachoir.parser import HachoirParser from hachoir.field import (RootSeekableFieldSet, FieldSet, Enum, - Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String) + Bits, GenericInteger, Float32, Float64, UInt8, UInt64, Bytes, NullBytes, RawBytes, String) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import displayHandler from hachoir.core.tools import humanDatetime diff --git a/lib/hachoir/parser/misc/chm.py b/lib/hachoir/parser/misc/chm.py index 03ab1ee2..7e76965a 100644 --- a/lib/hachoir/parser/misc/chm.py +++ b/lib/hachoir/parser/misc/chm.py @@ -16,9 +16,9 @@ Creation date: 2007-03-04 """ from hachoir.field import (Field, FieldSet, ParserError, RootSeekableFieldSet, - Int32, UInt16, UInt32, UInt64, - RawBytes, PaddingBytes, - Enum, String) + Int32, UInt16, UInt32, UInt64, + RawBytes, PaddingBytes, + Enum, String) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser import HachoirParser from hachoir.parser.common.win32 import GUID diff --git a/lib/hachoir/parser/misc/dsstore.py b/lib/hachoir/parser/misc/dsstore.py index f277a1ba..f4a7beca 100644 --- a/lib/hachoir/parser/misc/dsstore.py +++ b/lib/hachoir/parser/misc/dsstore.py @@ -9,10 +9,10 @@ Created: 2010-09-01 from hachoir.parser import HachoirParser from hachoir.field import (RootSeekableFieldSet, FieldSet, - NullBytes, RawBytes, PaddingBytes, Bytes, SubFile, String, PascalString8, - Bits, UInt8, UInt16, UInt32, - Link, - ParserError) + NullBytes, RawBytes, PaddingBytes, Bytes, SubFile, String, PascalString8, + Bits, UInt8, UInt16, UInt32, + Link, + ParserError) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import displayHandler from hachoir.core.tools import paddingSize diff --git a/lib/hachoir/parser/misc/file_3do.py b/lib/hachoir/parser/misc/file_3do.py index ec0d0074..e27c8edc 100644 --- a/lib/hachoir/parser/misc/file_3do.py +++ b/lib/hachoir/parser/misc/file_3do.py @@ -7,8 +7,8 @@ Creation date: 28 september 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt32, Int32, String, Float32, - RawBytes, PaddingBytes) + UInt32, Int32, String, Float32, + RawBytes, PaddingBytes) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.parser.misc.common import Vertex, MapUV diff --git a/lib/hachoir/parser/misc/file_3ds.py b/lib/hachoir/parser/misc/file_3ds.py index b2409048..2998e20b 100644 --- a/lib/hachoir/parser/misc/file_3ds.py +++ b/lib/hachoir/parser/misc/file_3ds.py @@ -5,7 +5,7 @@ Author: Victor Stinner from hachoir.parser import Parser from hachoir.field import (StaticFieldSet, FieldSet, - UInt16, UInt32, RawBytes, Enum, CString) + UInt16, UInt32, RawBytes, Enum, CString) from hachoir.parser.image.common import RGB from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/misc/fit.py b/lib/hachoir/parser/misc/fit.py new file mode 100644 index 00000000..8e51f877 --- /dev/null +++ b/lib/hachoir/parser/misc/fit.py @@ -0,0 +1,173 @@ +""" +Garmin fit file Format parser. + +Author: Sebastien Ponce +""" + +from hachoir.parser import Parser +from hachoir.field import FieldSet, Int8, UInt8, Int16, UInt16, Int32, UInt32, Int64, UInt64, RawBytes, Bit, Bits, Bytes, String, Float32, Float64 +from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN + +field_types = { + 0: UInt8, # enum + 1: Int8, # signed int of 8 bits + 2: UInt8, # unsigned int of 8 bits + 131: Int16, # signed int of 16 bits + 132: UInt16, # unsigned int of 16 bits + 133: Int32, # signed int of 32 bits + 134: UInt32, # unsigned int of 32 bits + 7: String, # string + 136: Float32, # float + 137: Float64, # double + 10: UInt8, # unsigned int of 8 bits with 0 as invalid value + 139: UInt16, # unsigned int of 16 bits with 0 as invalid value + 140: UInt32, # unsigned int of 32 bits with 0 as invalid value + 13: Bytes, # bytes + 142: Int64, # signed int of 64 bits + 143: UInt64, # unsigned int of 64 bits + 144: UInt64 # unsigned int of 64 bits with 0 as invalid value +} + + +class Header(FieldSet): + endian = LITTLE_ENDIAN + + def createFields(self): + yield UInt8(self, "size", "Header size") + yield UInt8(self, "protocol", "Protocol version") + yield UInt16(self, "profile", "Profile version") + yield UInt32(self, "datasize", "Data size") + yield RawBytes(self, "datatype", 4) + yield UInt16(self, "crc", "CRC of first 11 bytes or 0x0") + + def createDescription(self): + return "Header of fit file. Data size is %d" % (self["datasize"].value) + + +class NormalRecordHeader(FieldSet): + + def createFields(self): + yield Bit(self, "normal", "Normal header (0)") + yield Bit(self, "type", "Message type (0 data, 1 definition") + yield Bit(self, "typespecific", "0") + yield Bit(self, "reserved", "0") + yield Bits(self, "msgType", 4, description="Message type") + + def createDescription(self): + return "Record header, this is a %s message" % ("definition" if self["type"].value else "data") + + +class FieldDefinition(FieldSet): + + def createFields(self): + yield UInt8(self, "number", "Field definition number") + yield UInt8(self, "size", "Size in bytes") + yield UInt8(self, "type", "Base type") + + def createDescription(self): + return "Field Definition. Number %d, Size %d" % (self["number"].value, self["size"].value) + + +class DefinitionMessage(FieldSet): + + def createFields(self): + yield NormalRecordHeader(self, "RecordHeader") + yield UInt8(self, "reserved", "Reserved (0)") + yield UInt8(self, "architecture", "Architecture (0 little, 1 big endian") + self.endian = BIG_ENDIAN if self["architecture"].value else LITTLE_ENDIAN + yield UInt16(self, "msgNumber", "Message Number") + yield UInt8(self, "nbFields", "Number of fields") + for n in range(self["nbFields"].value): + yield FieldDefinition(self, "fieldDefinition[]") + + def createDescription(self): + return "Definition Message. Contains %d fields" % (self["nbFields"].value) + + +class DataMessage(FieldSet): + + def createFields(self): + hdr = NormalRecordHeader(self, "RecordHeader") + yield hdr + msgType = self["RecordHeader"]["msgType"].value + msgDef = self.parent.msgDefs[msgType] + for n in range(msgDef["nbFields"].value): + desc = msgDef["fieldDefinition[%d]" % n] + typ = field_types[desc["type"].value] + self.endian = BIG_ENDIAN if msgDef["architecture"].value else LITTLE_ENDIAN + if typ == String or typ == Bytes: + yield typ(self, "field%d" % n, desc["size"].value) + else: + if typ.static_size // 8 == desc["size"].value: + yield typ(self, "field%d" % n, desc["size"].value) + else: + for p in range(desc["size"].value * 8 // typ.static_size): + yield typ(self, "field%d[]" % n) + + def createDescription(self): + return "Data Message" + + +class TimeStamp(FieldSet): + + def createFields(self): + yield Bit(self, "timestamp", "TimeStamp (1)") + yield Bits(self, "msgType", 3, description="Message type") + yield Bits(self, "time", 4, description="TimeOffset") + + def createDescription(self): + return "TimeStamp" + + +class CRC(FieldSet): + + def createFields(self): + yield UInt16(self, "crc", "CRC") + + def createDescription(self): + return "CRC" + + +class FITFile(Parser): + endian = BIG_ENDIAN + PARSER_TAGS = { + "id": "fit", + "category": "misc", + "file_ext": ("fit",), + "mime": ("application/fit",), + "min_size": 14 * 8, + "description": "Garmin binary fit format" + } + + def __init__(self, *args, **kwargs): + Parser.__init__(self, *args, **kwargs) + self.msgDefs = {} + + def validate(self): + s = self.stream.readBytes(0, 12) + if s[8:12] != b'.FIT': + return "Invalid header %d %d %d %d" % tuple([int(b) for b in s[8:12]]) + return True + + def createFields(self): + yield Header(self, "header") + while self.current_size < self["header"]["datasize"].value * 8: + b = self.stream.readBits(self.absolute_address + self.current_size, 2, self.endian) + if b == 1: + defMsg = DefinitionMessage(self, "definition[]") + msgType = defMsg["RecordHeader"]["msgType"].value + sizes = '' + ts = 0 + for n in range(defMsg["nbFields"].value): + fname = "fieldDefinition[%d]" % n + size = defMsg[fname]["size"].value + ts += size + sizes += "%d/" % size + sizes += "%d" % ts + self.msgDefs[msgType] = defMsg + yield defMsg + elif b == 0: + yield DataMessage(self, "data[]") + else: + yield TimeStamp(self, "timestamp[]") + yield CRC(self, "crc") diff --git a/lib/hachoir/parser/misc/gnome_keyring.py b/lib/hachoir/parser/misc/gnome_keyring.py index 4fee2951..421014a7 100644 --- a/lib/hachoir/parser/misc/gnome_keyring.py +++ b/lib/hachoir/parser/misc/gnome_keyring.py @@ -12,10 +12,10 @@ Creation date: 2008-04-09 from hachoir.core.tools import paddingSize from hachoir.parser import Parser from hachoir.field import (FieldSet, - Bit, NullBits, NullBytes, - UInt8, UInt32, String, RawBytes, Enum, - TimestampUnix64, CompressedField, - SubFile) + Bit, NullBits, NullBytes, + UInt8, UInt32, String, RawBytes, Enum, + TimestampUnix64, CompressedField, + SubFile) from hachoir.core.endian import BIG_ENDIAN try: diff --git a/lib/hachoir/parser/misc/hlp.py b/lib/hachoir/parser/misc/hlp.py index 50206d72..b16792cb 100644 --- a/lib/hachoir/parser/misc/hlp.py +++ b/lib/hachoir/parser/misc/hlp.py @@ -12,11 +12,11 @@ Creation date: 2007-09-03 from hachoir.parser import Parser from hachoir.field import (FieldSet, - Bits, Int32, UInt16, UInt32, - NullBytes, RawBytes, PaddingBytes, String) + Bits, Int32, UInt16, UInt32, + NullBytes, RawBytes, PaddingBytes, String) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import (textHandler, hexadecimal, - displayHandler, humanFilesize) + displayHandler, humanFilesize) class FileEntry(FieldSet): diff --git a/lib/hachoir/parser/misc/lnk.py b/lib/hachoir/parser/misc/lnk.py index b736e6d0..3a6dfef2 100644 --- a/lib/hachoir/parser/misc/lnk.py +++ b/lib/hachoir/parser/misc/lnk.py @@ -24,11 +24,11 @@ Changes: from hachoir.parser import Parser from hachoir.field import (FieldSet, - CString, String, - UInt32, UInt16, UInt8, - Bit, Bits, PaddingBits, - TimestampWin64, DateTimeMSDOS32, - NullBytes, PaddingBytes, RawBytes, Enum) + CString, String, + UInt32, UInt16, UInt8, + Bit, Bits, PaddingBits, + TimestampWin64, DateTimeMSDOS32, + NullBytes, PaddingBytes, RawBytes, Enum) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.parser.common.win32 import GUID diff --git a/lib/hachoir/parser/misc/mapsforge_map.py b/lib/hachoir/parser/misc/mapsforge_map.py index 06298ab7..a393942b 100644 --- a/lib/hachoir/parser/misc/mapsforge_map.py +++ b/lib/hachoir/parser/misc/mapsforge_map.py @@ -10,8 +10,8 @@ References: from hachoir.parser import Parser from hachoir.field import (Bit, Bits, UInt8, UInt16, UInt32, Int32, UInt64, String, - PaddingBits, - Enum, Field, FieldSet, SeekableFieldSet, RootSeekableFieldSet) + PaddingBits, + Enum, Field, FieldSet, SeekableFieldSet, RootSeekableFieldSet) from hachoir.core.endian import BIG_ENDIAN @@ -41,7 +41,7 @@ class UIntVbe(Field): size += 1 assert size < 100, "UIntVBE is too large" - if not(haveMoreData): + if not haveMoreData: break self._size = size * 8 @@ -71,7 +71,7 @@ class IntVbe(Field): size += 1 assert size < 100, "IntVBE is too large" - if not(haveMoreData): + if not haveMoreData: break if isNegative: @@ -142,7 +142,7 @@ class TileHeader(FieldSet): def createFields(self): numLevels = int(self.zoomIntervalCfg[ "max_zoom_level"].value - self.zoomIntervalCfg["min_zoom_level"].value) + 1 - assert(numLevels < 50) + assert (numLevels < 50) for i in range(numLevels): yield TileZoomTable(self, "zoom_table_entry[]") yield UIntVbe(self, "first_way_offset") diff --git a/lib/hachoir/parser/misc/msoffice.py b/lib/hachoir/parser/misc/msoffice.py index b734b658..7f84dd85 100644 --- a/lib/hachoir/parser/misc/msoffice.py +++ b/lib/hachoir/parser/misc/msoffice.py @@ -11,8 +11,8 @@ Creation: 8 january 2005 """ from hachoir.field import (SubFile, FieldSet, - UInt8, UInt16, UInt32, Enum, String, CString, - Bits, RawBytes, CustomFragment) + UInt8, UInt16, UInt32, Enum, String, CString, + Bits, RawBytes, CustomFragment) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.parser.misc.ole2_util import OLE2FragmentParser, RawParser from hachoir.parser.misc.msoffice_summary import Summary, DocSummary, CompObj diff --git a/lib/hachoir/parser/misc/msoffice_summary.py b/lib/hachoir/parser/misc/msoffice_summary.py index 2dcf76b6..34c7a353 100644 --- a/lib/hachoir/parser/misc/msoffice_summary.py +++ b/lib/hachoir/parser/misc/msoffice_summary.py @@ -9,11 +9,11 @@ Documents """ from hachoir.core.endian import BIG_ENDIAN from hachoir.field import (FieldSet, ParserError, - SeekableFieldSet, - Bit, Bits, NullBits, - UInt8, UInt16, UInt32, TimestampWin64, TimedeltaWin64, Enum, - Bytes, RawBytes, NullBytes, PaddingBits, String, - Int8, Int32, Float32, Float64, PascalString32) + SeekableFieldSet, + Bit, Bits, NullBits, + UInt8, UInt16, UInt32, TimestampWin64, TimedeltaWin64, Enum, + Bytes, RawBytes, NullBytes, PaddingBits, String, + Int8, Int32, Float32, Float64, PascalString32) from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.tools import createDict, paddingSize from hachoir.parser.common.win32 import GUID, PascalStringWin32, CODEPAGE_CHARSET diff --git a/lib/hachoir/parser/misc/mstask.py b/lib/hachoir/parser/misc/mstask.py index f867c737..92a582a2 100644 --- a/lib/hachoir/parser/misc/mstask.py +++ b/lib/hachoir/parser/misc/mstask.py @@ -12,8 +12,8 @@ http://technet.microsoft.com/en-us/library/bb490996.aspx from hachoir.parser import Parser from hachoir.field import (FieldSet, RootSeekableFieldSet, - UInt32, UInt16, - Bit, RawBits, RawBytes, Enum) + UInt32, UInt16, + Bit, RawBits, RawBytes, Enum) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser.common.win32 import PascalStringWin16, GUID diff --git a/lib/hachoir/parser/misc/ole2.py b/lib/hachoir/parser/misc/ole2.py index 74b2168e..bfc1f7d8 100644 --- a/lib/hachoir/parser/misc/ole2.py +++ b/lib/hachoir/parser/misc/ole2.py @@ -211,7 +211,7 @@ class OLE2_File(HachoirParser, RootSeekableFieldSet): return "Unknown major version (%s)" % self["header/ver_maj"].value if self["header/endian"].value not in (b"\xFF\xFE", b"\xFE\xFF"): return "Unknown endian (%s)" % self["header/endian"].raw_display - if not(MIN_BIG_BLOCK_LOG2 <= self["header/bb_shift"].value <= MAX_BIG_BLOCK_LOG2): + if not (MIN_BIG_BLOCK_LOG2 <= self["header/bb_shift"].value <= MAX_BIG_BLOCK_LOG2): return "Invalid (log 2 of) big block size (%s)" % self["header/bb_shift"].value if self["header/bb_shift"].value < self["header/sb_shift"].value: return "Small block size (log2=%s) is bigger than big block size (log2=%s)!" \ diff --git a/lib/hachoir/parser/misc/pcf.py b/lib/hachoir/parser/misc/pcf.py index 07326e72..418cfe40 100644 --- a/lib/hachoir/parser/misc/pcf.py +++ b/lib/hachoir/parser/misc/pcf.py @@ -12,8 +12,8 @@ Creation date: 2007-03-20 from hachoir.parser import Parser from hachoir.field import (FieldSet, Enum, - UInt8, UInt32, Bytes, RawBytes, NullBytes, - Bit, Bits, PaddingBits, CString) + UInt8, UInt32, Bytes, RawBytes, NullBytes, + Bit, Bits, PaddingBits, CString) from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler from hachoir.core.tools import paddingSize diff --git a/lib/hachoir/parser/misc/pdf.py b/lib/hachoir/parser/misc/pdf.py index 2fccc6a1..73c491e3 100644 --- a/lib/hachoir/parser/misc/pdf.py +++ b/lib/hachoir/parser/misc/pdf.py @@ -44,7 +44,7 @@ def getElementEnd(s, limit=b' ', offset=0): class PDFNumber(Field): - LIMITS = [b'[', b'/', b'\x0D', b']'] + LIMITS = [b'[', b'/', b'\x0A', b'\x0D', b'>', b']'] """ sprintf("%i") or sprinf("%.?f") """ @@ -81,18 +81,18 @@ class PDFString(Field): def __init__(self, parent, name, desc=None): Field.__init__(self, parent, name, description=desc) - val = "" + val = bytearray() count = 1 off = 1 while not parent.eof: char = parent.stream.readBytes(self.absolute_address + 8 * off, 1) # Non-ASCII - if not char.isalpha() or char == '\\': + if not char.isalpha() or char == b'\\': off += 1 continue - if char == '(': + if char == b'(': count += 1 - if char == ')': + if char == b')': count -= 1 # Parenthesis block = 0 => end of string if count == 0: @@ -101,13 +101,15 @@ class PDFString(Field): # Add it to the string val += char + off += 1 + val = bytes(val) self._size = 8 * off self.createValue = lambda: val class PDFName(Field): - LIMITS = [b'[', b'/', b'<', b']'] + LIMITS = [b'[', b'/', b'<', b'>', b']'] """ String starting with '/', where characters may be written using their ASCII code (exemple: '#20' would be ' ' @@ -145,7 +147,7 @@ class PDFID(Field): def __init__(self, parent, name, desc=None): Field.__init__(self, parent, name, description=desc) - self._size = 8 * getElementEnd(parent, '>') + self._size = 8 * getElementEnd(parent, b'>') self.createValue = lambda: parent.stream.readBytes( self.absolute_address + 8, (self._size // 8) - 1) @@ -254,7 +256,7 @@ def parsePDFType(s): else: # First parse size size = getElementEnd(s) - for limit in ['/', '>', '<']: + for limit in [b'/', b'>', b'<']: other_size = getElementEnd(s, limit) if other_size is not None: other_size -= 1 @@ -424,7 +426,7 @@ class Catalog(FieldSet): new_length = getElementEnd(self, limit) if length is None or (new_length is not None and new_length - len(limit) < length): length = new_length - len(limit) - yield String(self, "object", length, strip=' ') + yield String(self, "object", length, strip=' \n') if self.stream.readBytes(self.absolute_address + self.current_size, 2) == b'<<': yield PDFDictionary(self, "key_list") # End of catalog: this one has "endobj" @@ -441,9 +443,9 @@ class Trailer(FieldSet): yield RawBytes(self, "marker", len(self.MAGIC)) yield WhiteSpace(self, "sep[]") yield String(self, "start_attribute_marker", 2) + yield WhiteSpace(self, "sep[]") addr = self.absolute_address + self.current_size while self.stream.readBytes(addr, 2) != b'>>': - yield WhiteSpace(self, "sep[]") t = PDFName(self, "type[]") yield t name = t.value.decode() @@ -462,6 +464,7 @@ class Trailer(FieldSet): yield PDFDictionary(self, "decrypt") else: raise ParserError("Don't know trailer type '%s'" % name) + yield WhiteSpace(self, "sep[]") addr = self.absolute_address + self.current_size yield String(self, "end_attribute_marker", 2) yield LineEnd(self, "line_end[]") diff --git a/lib/hachoir/parser/misc/pifv.py b/lib/hachoir/parser/misc/pifv.py index 7daed795..0d9ec8f2 100644 --- a/lib/hachoir/parser/misc/pifv.py +++ b/lib/hachoir/parser/misc/pifv.py @@ -7,8 +7,8 @@ Creation date: 08 jul 2007 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt16, UInt24, UInt32, UInt64, Enum, - CString, String, PaddingBytes, RawBytes, NullBytes) + UInt8, UInt16, UInt24, UInt32, UInt64, Enum, + CString, String, PaddingBytes, RawBytes, NullBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import paddingSize, humanFilesize from hachoir.parser.common.win32 import GUID diff --git a/lib/hachoir/parser/misc/torrent.py b/lib/hachoir/parser/misc/torrent.py index eb877fa3..a718c245 100644 --- a/lib/hachoir/parser/misc/torrent.py +++ b/lib/hachoir/parser/misc/torrent.py @@ -9,7 +9,7 @@ Author: Christophe Gisquet from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - String, RawBytes) + String, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.tools import makePrintable, timestampUNIX, humanFilesize diff --git a/lib/hachoir/parser/misc/ttf.py b/lib/hachoir/parser/misc/ttf.py index 41682ef8..ca5e7c49 100644 --- a/lib/hachoir/parser/misc/ttf.py +++ b/lib/hachoir/parser/misc/ttf.py @@ -2,6 +2,8 @@ TrueType Font parser. Documents: + - "The OpenType Specification" + https://docs.microsoft.com/en-us/typography/opentype/spec/ - "An Introduction to TrueType Fonts: A look inside the TTF format" written by "NRSI: Computers & Writing Systems" http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&item_id=IWS-Chapter08 @@ -11,11 +13,26 @@ Creation date: 2007-02-08 """ from hachoir.parser import Parser -from hachoir.field import (FieldSet, ParserError, - UInt16, UInt32, Bit, Bits, - PaddingBits, NullBytes, - String, RawBytes, Bytes, Enum, - TimestampMac32) +from hachoir.field import ( + FieldSet, + ParserError, + UInt8, + UInt16, + UInt24, + UInt32, + Int16, + Bit, + Bits, + PaddingBits, + NullBytes, + String, + RawBytes, + Bytes, + Enum, + TimestampMac32, + GenericVector, + PascalString8, +) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler @@ -69,11 +86,65 @@ CHARSET_MAP = { 3: {1: "UTF-16-BE"}, } +PERMISSIONS = { + 0: "Installable embedding", + 2: "Restricted License embedding", + 4: "Preview & Print embedding", + 8: "Editable embedding", +} -class TableHeader(FieldSet): +FWORD = Int16 +UFWORD = UInt16 + + +class Tag(String): + def __init__(self, parent, name, description=None): + String.__init__(self, parent, name, 4, description) + + +class Version16Dot16(FieldSet): + static_size = 32 def createFields(self): - yield String(self, "tag", 4) + yield UInt16(self, "major") + yield UInt16(self, "minor") + + def createValue(self): + return float("%u.%x" % (self["major"].value, self["minor"].value)) + + +class Fixed(FieldSet): + def createFields(self): + yield UInt16(self, "int_part") + yield UInt16(self, "float_part") + + def createValue(self): + return self["int_part"].value + float(self["float_part"].value) / 65536 + + +class Tuple(FieldSet): + def __init__(self, parent, name, axisCount): + super().__init__(parent, name, description="Tuple Record") + self.axisCount = axisCount + + def createFields(self): + for _ in range(self.axisCount): + yield (Fixed(self, "coordinate[]")) + + +class F2DOT14(FieldSet): + static_size = 16 + + def createFields(self): + yield Int16(self, "int_part") + + def createValue(self): + return self["int_part"].value / 16384 + + +class TableHeader(FieldSet): + def createFields(self): + yield Tag(self, "tag") yield textHandler(UInt32(self, "checksum"), hexadecimal) yield UInt32(self, "offset") yield filesizeHandler(UInt32(self, "size")) @@ -83,7 +154,6 @@ class TableHeader(FieldSet): class NameHeader(FieldSet): - def createFields(self): yield Enum(UInt16(self, "platformID"), PLATFORM_NAME) yield UInt16(self, "encodingID") @@ -135,7 +205,7 @@ def parseFontHeader(self): yield Bits(self, "adobe", 2, "(used by Adobe)") yield UInt16(self, "unit_per_em", "Units per em") - if not(16 <= self["unit_per_em"].value <= 16384): + if not (16 <= self["unit_per_em"].value <= 16384): raise ParserError("TTF: Invalid unit/em value") yield UInt32(self, "created_high") yield TimestampMac32(self, "created") @@ -162,17 +232,273 @@ def parseFontHeader(self): yield UInt16(self, "glyph_format", "(=0)") +class AxisValueMap(FieldSet): + static_size = 32 + + def createFields(self): + yield F2DOT14(self, "fromCoordinate") + yield F2DOT14(self, "toCoordinate") + + +class SegmentMaps(FieldSet): + def createFields(self): + yield UInt16( + self, "positionMapCount", "The number of correspondence pairs for this axis" + ) + for _ in range(self["positionMapCount"].value): + yield (AxisValueMap(self, "axisValueMaps[]")) + + +def parseAvar(self): + yield UInt16(self, "majorVersion", "Major version") + yield UInt16(self, "minorVersion", "Minor version") + yield PaddingBits(self, "reserved[]", 16) + yield UInt16(self, "axisCount", "The number of variation axes for this font") + for _ in range(self["axisCount"].value): + yield (SegmentMaps(self, "segmentMaps[]")) + + +class VariationAxisRecord(FieldSet): + def createFields(self): + yield Tag(self, "axisTag", "Tag identifying the design variation for the axis") + yield Fixed(self, "minValue", "The minimum coordinate value for the axis") + yield Fixed(self, "defaultValue", "The default coordinate value for the axis") + yield Fixed(self, "maxValue", "The maximum coordinate value for the axis") + yield PaddingBits(self, "reservedFlags", 15) + yield Bit( + self, "hidden", "The axis should not be exposed directly in user interfaces" + ) + yield UInt16( + self, + "axisNameID", + "The name ID for entries in the 'name' table that provide a display name for this axis", + ) + + +class InstanceRecord(FieldSet): + def __init__(self, parent, name, axisCount, hasPSNameID=False): + super().__init__(parent, name, description="Instance record") + self.axisCount = axisCount + self.hasPSNameID = hasPSNameID + + def createFields(self): + yield UInt16( + self, "subfamilyNameID", "Name ID for subfamily names for this instance" + ) + yield PaddingBits(self, "reservedFlags", 16) + yield Tuple(self, "coordinates", axisCount=self.axisCount) + if self.hasPSNameID: + yield UInt16( + self, + "postScriptNameID", + "Name ID for PostScript names for this instance", + ) + + +def parseFvar(self): + yield UInt16(self, "majorVersion", "Major version") + yield UInt16(self, "minorVersion", "Minor version") + yield UInt16( + self, "axisArrayOffset", "Offset to the start of the VariationAxisRecord array." + ) + yield PaddingBits(self, "reserved[]", 16) + yield UInt16(self, "axisCount", "The number of variation axes for this font") + yield UInt16(self, "axisSize", "The size in bytes of each VariationAxisRecord") + yield UInt16(self, "instanceCount", "The number of named instances for this font") + yield UInt16(self, "instanceSize", "The size in bytes of each InstanceRecord") + if self["axisArrayOffset"].value > 16: + yield PaddingBits(self, "padding", 8 * (self["axisArrayOffset"].value - 16)) + for _ in range(self["axisCount"].value): + yield (VariationAxisRecord(self, "axes[]")) + for _ in range(self["instanceCount"].value): + yield ( + InstanceRecord( + self, + "instances[]", + axisCount=self["axisCount"].value, + hasPSNameID=( + self["instanceSize"].value == (2 * self["axisCount"].value + 6) + ), + ) + ) + + +class EncodingRecord(FieldSet): + static_size = 64 + + def createFields(self): + yield Enum(UInt16(self, "platformID"), PLATFORM_NAME) + yield UInt16(self, "encodingID") + self.offset = UInt32(self, "subtableOffset") + yield self.offset + + +class CmapTable0(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield UInt16(self, "length", "Length in bytes") + yield UInt16(self, "language", "Language ID") + yield GenericVector(self, "mapping", 256, UInt8) + + +class CmapTable4(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield UInt16(self, "length", "Length in bytes") + yield UInt16(self, "language", "Language ID") + yield UInt16(self, "segCountX2", "Twice the number of segments") + segments = self["segCountX2"].value // 2 + yield UInt16(self, "searchRange") + yield UInt16(self, "entrySelector") + yield UInt16(self, "rangeShift") + yield GenericVector(self, "endCode", segments, UInt16) + yield PaddingBits(self, "reserved[]", 16) + yield GenericVector(self, "startCode", segments, UInt16) + yield GenericVector(self, "idDelta", segments, Int16) + yield GenericVector(self, "idRangeOffsets", segments, UInt16) + remainder = (self["length"].value - (self.current_size / 8)) / 2 + if remainder: + yield GenericVector(self, "glyphIdArray", remainder, UInt16) + + +class CmapTable6(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield UInt16(self, "length", "Length in bytes") + yield UInt16(self, "language", "Language ID") + yield UInt16(self, "firstCode", "First character code of subrange") + yield UInt16(self, "entryCount", "Number of character codes in subrange") + yield GenericVector(self, "glyphIdArray", self["entryCount"].value, UInt16) + + +class SequentialMapGroup(FieldSet): + def createFields(self): + yield UInt32(self, "startCharCode", "First character code in this group") + yield UInt32(self, "endCharCode", "First character code in this group") + yield UInt32( + self, + "startGlyphID", + "Glyph index corresponding to the starting character code", + ) + + +class CmapTable12(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield PaddingBits(self, "reserved[]", 16) + yield UInt32(self, "length", "Length in bytes") + yield UInt32(self, "language", "Language ID") + yield UInt32(self, "numGroups", "Number of groupings which follow") + for i in range(self["numGroups"].value): + yield SequentialMapGroup(self, "mapgroup[]") + + +class VariationSelector(FieldSet): + def createFields(self): + yield UInt24(self, "varSelector", "Variation selector") + yield UInt32(self, "defaultUVSOffset", "Offset to default UVS table") + yield UInt32(self, "nonDefaultUVSOffset", "Offset to non-default UVS table") + + +class CmapTable14(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield UInt32(self, "length", "Length in bytes") + yield UInt32( + self, "numVarSelectorRecords", "Number of variation selector records" + ) + for i in range(self["numVarSelectorRecords"].value): + yield VariationSelector(self, "variationSelector[]") + + +def parseCmap(self): + yield UInt16(self, "version") + numTables = UInt16(self, "numTables", "Number of encoding tables") + yield numTables + encodingRecords = [] + for index in range(numTables.value): + entry = EncodingRecord(self, "encodingRecords[]") + yield entry + encodingRecords.append(entry) + encodingRecords.sort(key=lambda field: field["subtableOffset"].value) + last = None + for er in encodingRecords: + offset = er["subtableOffset"].value + if last and last == offset: + continue + last = offset + + # Add padding if any + padding = self.seekByte(offset, relative=True, null=False) + if padding: + yield padding + format = UInt16(self, "format").value + if format == 0: + yield CmapTable0(self, "cmap table format 0") + elif format == 4: + yield CmapTable4(self, "cmap table format 4") + elif format == 6: + yield CmapTable6(self, "cmap table format 6") + elif format == 12: + yield CmapTable12(self, "cmap table format 12") + elif format == 14: + yield CmapTable14(self, "cmap table format 14") + + +class SignatureRecord(FieldSet): + def createFields(self): + yield UInt16(self, "format", "Table format") + yield UInt16(self, "length", "Length of signature") + yield UInt16(self, "signatureBlockOffset", "Offset to signature block") + + +class SignatureBlock(FieldSet): + def createFields(self): + yield PaddingBits(self, "reserved[]", 32) + yield UInt32( + self, + "length", + "Length (in bytes) of the PKCS#7 packet in the signature field", + ) + yield String(self, "signature", self["length"].value, "Signature block") + + +def parseDSIG(self): + yield UInt32(self, "version") + yield UInt16(self, "numSignatures", "Number of signatures in the table") + yield Bit(self, "flag", "Cannot be resigned") + yield PaddingBits(self, "reserved[]", 7) + entries = [] + for i in range(self["numSignatures"].value): + record = SignatureRecord(self, "signatureRecords[]") + entries.append(record) + yield record + entries.sort(key=lambda field: field["signatureBlockOffset"].value) + last = None + for entry in entries: + offset = entry["signatureBlockOffset"].value + if last and last == offset: + continue + last = offset + # Add padding if any + padding = self.seekByte(offset, relative=True, null=False) + if padding: + yield padding + + padding = (self.size - self.current_size) // 8 + if padding: + yield NullBytes(self, "padding_end", padding) + + def parseNames(self): # Read header yield UInt16(self, "format") if self["format"].value != 0: - raise ParserError("TTF (names): Invalid format (%u)" % - self["format"].value) + raise ParserError("TTF (names): Invalid format (%u)" % self["format"].value) yield UInt16(self, "count") yield UInt16(self, "offset") if MAX_NAME_COUNT < self["count"].value: - raise ParserError("Invalid number of names (%s)" - % self["count"].value) + raise ParserError("Invalid number of names (%s)" % self["count"].value) # Read name index entries = [] @@ -208,17 +534,210 @@ def parseNames(self): # Read value size = entry["length"].value if size: - yield String(self, "value[]", size, entry.description, charset=entry.getCharset()) + yield String( + self, "value[]", size, entry.description, charset=entry.getCharset() + ) padding = (self.size - self.current_size) // 8 if padding: yield NullBytes(self, "padding_end", padding) +def parseMaxp(self): + # Read header + yield Version16Dot16(self, "format", "format version") + yield UInt16(self, "numGlyphs", "Number of glyphs") + if self["format"].value >= 1: + yield UInt16(self, "maxPoints", "Maximum points in a non-composite glyph") + yield UInt16(self, "maxContours", "Maximum contours in a non-composite glyph") + yield UInt16(self, "maxCompositePoints", "Maximum points in a composite glyph") + yield UInt16( + self, "maxCompositeContours", "Maximum contours in a composite glyph" + ) + yield UInt16(self, "maxZones", "Do instructions use the twilight zone?") + yield UInt16(self, "maxTwilightPoints", "Maximum points used in Z0") + yield UInt16(self, "maxStorage", "Number of Storage Area locations") + yield UInt16(self, "maxFunctionDefs", "Number of function definitions") + yield UInt16(self, "maxInstructionDefs", "Number of instruction definitions") + yield UInt16(self, "maxStackElements", "Maximum stack depth") + yield UInt16( + self, "maxSizeOfInstructions", "Maximum byte count for glyph instructions" + ) + yield UInt16( + self, + "maxComponentElements", + "Maximum number of components at glyph top level", + ) + yield UInt16(self, "maxComponentDepth", "Maximum level of recursion") + + +def parseHhea(self): + yield UInt16(self, "majorVersion", "Major version") + yield UInt16(self, "minorVersion", "Minor version") + yield FWORD(self, "ascender", "Typographic ascent") + yield FWORD(self, "descender", "Typographic descent") + yield FWORD(self, "lineGap", "Typographic linegap") + yield UFWORD(self, "advanceWidthMax", "Maximum advance width") + yield FWORD(self, "minLeftSideBearing", "Minimum left sidebearing value") + yield FWORD(self, "minRightSideBearing", "Minimum right sidebearing value") + yield FWORD(self, "xMaxExtent", "Maximum X extent") + yield Int16(self, "caretSlopeRise", "Caret slope rise") + yield Int16(self, "caretSlopeRun", "Caret slope run") + yield Int16(self, "caretOffset", "Caret offset") + yield GenericVector(self, "reserved", 4, Int16) + yield Int16(self, "metricDataFormat", "Metric data format") + yield UInt16(self, "numberOfHMetrics", "Number of horizontal metrics") + + +class fsType(FieldSet): + def createFields(self): + yield Enum(Bits(self, "usage_permissions", 4), PERMISSIONS) + yield PaddingBits(self, "reserved[]", 4) + yield Bit(self, "no_subsetting", "Font may not be subsetted prior to embedding") + yield Bit( + self, + "bitmap_embedding", + "Only bitmaps contained in the font may be embedded", + ) + yield PaddingBits(self, "reserved[]", 6) + + +def parseOS2(self): + yield UInt16(self, "version", "Table version") + yield Int16(self, "xAvgCharWidth") + yield UInt16(self, "usWeightClass") + yield UInt16(self, "usWidthClass") + yield fsType(self, "fsType") + yield Int16(self, "ySubscriptXSize") + yield Int16(self, "ySubscriptYSize") + yield Int16(self, "ySubscriptXOffset") + yield Int16(self, "ySubscriptYOffset") + yield Int16(self, "ySuperscriptXSize") + yield Int16(self, "ySuperscriptYSize") + yield Int16(self, "ySuperscriptXOffset") + yield Int16(self, "ySuperscriptYOffset") + yield Int16(self, "yStrikeoutSize") + yield Int16(self, "yStrikeoutPosition") + yield Int16(self, "sFamilyClass") + yield GenericVector(self, "panose", 10, UInt8) + yield UInt32(self, "ulUnicodeRange1") + yield UInt32(self, "ulUnicodeRange2") + yield UInt32(self, "ulUnicodeRange3") + yield UInt32(self, "ulUnicodeRange4") + yield Tag(self, "achVendID", "Vendor ID") + yield UInt16(self, "fsSelection") + yield UInt16(self, "usFirstCharIndex") + yield UInt16(self, "usLastCharIndex") + yield Int16(self, "sTypoAscender") + yield Int16(self, "sTypoDescender") + yield Int16(self, "sTypoLineGap") + yield UInt16(self, "usWinAscent") + yield UInt16(self, "usWinDescent") + if self["version"].value >= 1: + yield UInt32(self, "ulCodePageRange1") + yield UInt32(self, "ulCodePageRange2") + if self["version"].value >= 2: + yield Int16(self, "sxHeight") + yield Int16(self, "sCapHeight") + yield UInt16(self, "usDefaultChar") + yield UInt16(self, "usBreakChar") + yield UInt16(self, "usMaxContext") + if self["version"].value >= 5: + yield UInt16(self, "usLowerOpticalPointSize") + yield UInt16(self, "usUpperOpticalPointSize") + + +def parsePost(self): + yield Version16Dot16(self, "version", "Table version") + yield Fixed( + self, + "italicAngle", + "Italic angle in counter-clockwise degrees from the vertical.", + ) + yield FWORD(self, "underlinePosition", "Top of underline to baseline") + yield FWORD(self, "underlineThickness", "Suggested underline thickness") + yield UInt32(self, "isFixedPitch", "Is the font fixed pitch?") + yield UInt32(self, "minMemType42", "Minimum memory usage (OpenType)") + yield UInt32(self, "maxMemType42", "Maximum memory usage (OpenType)") + yield UInt32(self, "minMemType1", "Minimum memory usage (Type 1)") + yield UInt32(self, "maxMemType1", "Maximum memory usage (Type 1)") + if self["version"].value == 2.0: + yield UInt16(self, "numGlyphs") + indices = GenericVector( + self, + "Array of indices into the string data", + self["numGlyphs"].value, + UInt16, + "glyphNameIndex", + ) + yield indices + for gid, index in enumerate(indices): + if index.value >= 258: + yield PascalString8(self, "glyphname[%i]" % gid) + elif self["version"].value == 2.0: + yield UInt16(self, "numGlyphs") + indices = GenericVector( + self, + "Difference between graphic index and standard order of glyph", + self["numGlyphs"].value, + UInt16, + "offset", + ) + yield indices + + +# This is work-in-progress until I work out good ways to do random-access on offsets +parseScriptList = ( + parseFeatureList +) = parseLookupList = parseFeatureVariationsTable = lambda x: None + + +def parseGSUB(self): + yield UInt16(self, "majorVersion", "Major version") + yield UInt16(self, "minorVersion", "Minor version") + SUBTABLES = [ + ("script list", parseScriptList), + ("feature list", parseFeatureList), + ("lookup list", parseLookupList), + ] + offsets = [] + for description, parser in SUBTABLES: + name = description.title().replace(" ", "") + offset = UInt16( + self, name[0].lower() + name[1:], "Offset to %s table" % description + ) + yield offset + offsets.append((offset.value, parser)) + if self["min_ver"].value == 1: + offset = UInt32( + self, "featureVariationsOffset", "Offset to feature variations table" + ) + offsets.append((offset.value, parseFeatureVariationsTable)) + + offsets.sort(key=lambda field: field[0]) + padding = self.seekByte(offsets[0][0], null=True) + if padding: + yield padding + lastOffset, first_parser = offsets[0] + for offset, parser in offsets[1:]: + # yield parser(self) + yield RawBytes(self, "content", offset - lastOffset) + lastOffset = offset + + class Table(FieldSet): TAG_INFO = { + "DSIG": ("DSIG", "Digital Signature", parseDSIG), + "GSUB": ("GSUB", "Glyph Substitutions", parseGSUB), + "avar": ("avar", "Axis variation table", parseAvar), + "cmap": ("cmap", "Character to Glyph Index Mapping", parseCmap), + "fvar": ("fvar", "Font variations table", parseFvar), "head": ("header", "Font header", parseFontHeader), + "hhea": ("hhea", "Horizontal Header", parseHhea), + "maxp": ("maxp", "Maximum Profile", parseMaxp), "name": ("names", "Names", parseNames), + "OS/2": ("OS_2", "OS/2 and Windows Metrics", parseOS2), + "post": ("post", "PostScript", parsePost), } def __init__(self, parent, name, table, **kw): @@ -251,10 +770,15 @@ class TrueTypeFontFile(Parser): } def validate(self): - if self["maj_ver"].value != 1: - return "Invalid major version (%u)" % self["maj_ver"].value - if self["min_ver"].value != 0: - return "Invalid minor version (%u)" % self["min_ver"].value + if self["maj_ver"].value == 1 and self["min_ver"].value == 0: + pass + elif self["maj_ver"].value == 0x4F54 and self["min_ver"].value == 0x544F: + pass + else: + return "Invalid version (%u.%u)" % ( + self["maj_ver"].value, + self["min_ver"].value, + ) if not (MIN_NB_TABLE <= self["nb_table"].value <= MAX_NB_TABLE): return "Invalid number of table (%u)" % self["nb_table"].value return True diff --git a/lib/hachoir/parser/misc/word_2.py b/lib/hachoir/parser/misc/word_2.py index 6f1ea30c..5569218b 100644 --- a/lib/hachoir/parser/misc/word_2.py +++ b/lib/hachoir/parser/misc/word_2.py @@ -6,10 +6,10 @@ Documents: """ from hachoir.field import (FieldSet, Enum, - Bit, Bits, - UInt8, Int16, UInt16, UInt32, Int32, - NullBytes, Bytes, RawBytes, - DateTimeMSDOS32) + Bit, Bits, + UInt8, Int16, UInt16, UInt32, Int32, + NullBytes, Bytes, RawBytes, + DateTimeMSDOS32) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser.misc.ole2_util import OLE2FragmentParser from hachoir.core.tools import paddingSize diff --git a/lib/hachoir/parser/misc/word_doc.py b/lib/hachoir/parser/misc/word_doc.py index 400e24a3..eb7efcf1 100644 --- a/lib/hachoir/parser/misc/word_doc.py +++ b/lib/hachoir/parser/misc/word_doc.py @@ -12,10 +12,10 @@ Documents: """ from hachoir.field import (FieldSet, Enum, - Bit, Bits, - UInt8, Int16, UInt16, UInt32, Int32, - NullBytes, Bytes, RawBytes, PascalString8, CString, String, - TimestampMac32, TimestampWin64) + Bit, Bits, + UInt8, Int16, UInt16, UInt32, Int32, + NullBytes, Bytes, RawBytes, PascalString8, CString, String, + TimestampMac32, TimestampWin64) from hachoir.core.text_handler import displayHandler from hachoir.core.endian import LITTLE_ENDIAN from hachoir.parser import guessParser diff --git a/lib/hachoir/parser/network/tcpdump.py b/lib/hachoir/parser/network/tcpdump.py index c0d11e16..52a84b6e 100644 --- a/lib/hachoir/parser/network/tcpdump.py +++ b/lib/hachoir/parser/network/tcpdump.py @@ -14,9 +14,9 @@ Creation: 23 march 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - Enum, Bytes, NullBytes, RawBytes, - UInt8, UInt16, UInt32, Int32, TimestampUnix32, - Bit, Bits, NullBits) + Enum, Bytes, NullBytes, RawBytes, + UInt8, UInt16, UInt32, Int32, TimestampUnix32, + Bit, Bits, NullBits) from hachoir.core.endian import NETWORK_ENDIAN, LITTLE_ENDIAN from hachoir.core.tools import humanDuration from hachoir.core.text_handler import textHandler, hexadecimal diff --git a/lib/hachoir/parser/parser.py b/lib/hachoir/parser/parser.py index 39015b90..a4ce8576 100644 --- a/lib/hachoir/parser/parser.py +++ b/lib/hachoir/parser/parser.py @@ -13,7 +13,7 @@ class HachoirParser(object): """ A parser is the root of all other fields. It create first level of fields and have special attributes and methods: - - tags: dictionnary with keys: + - tags: dictionary with keys: - "file_ext": classical file extensions (string or tuple of strings) ; - "mime": MIME type(s) (string or tuple of strings) ; - "description": String describing the parser. diff --git a/lib/hachoir/parser/program/elf.py b/lib/hachoir/parser/program/elf.py index c11b3407..8a01c2d0 100644 --- a/lib/hachoir/parser/program/elf.py +++ b/lib/hachoir/parser/program/elf.py @@ -10,8 +10,8 @@ Reference: from hachoir.parser import HachoirParser from hachoir.field import (RootSeekableFieldSet, FieldSet, Bit, NullBits, RawBits, - UInt8, UInt16, UInt32, UInt64, Enum, - String, RawBytes, Bytes) + UInt8, UInt16, UInt32, UInt64, Enum, + String, RawBytes, Bytes) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN diff --git a/lib/hachoir/parser/program/exe.py b/lib/hachoir/parser/program/exe.py index d043c07b..d0f8ad01 100644 --- a/lib/hachoir/parser/program/exe.py +++ b/lib/hachoir/parser/program/exe.py @@ -12,14 +12,14 @@ Creation date: 2006-08-13 from hachoir.parser import HachoirParser from hachoir.core.endian import LITTLE_ENDIAN from hachoir.field import (FieldSet, RootSeekableFieldSet, - UInt16, UInt32, String, - RawBytes, PaddingBytes) + UInt16, UInt32, String, + RawBytes, PaddingBytes) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.parser.program.exe_ne import NE_Header from hachoir.parser.program.exe_pe import PE_Header, PE_OptHeader, SectionHeader from hachoir.parser.program.exe_res import PE_Resource, NE_VersionInfoNode -MAX_NB_SECTION = 50 +MAX_NB_SECTION = 100 class MSDosHeader(FieldSet): diff --git a/lib/hachoir/parser/program/exe_ne.py b/lib/hachoir/parser/program/exe_ne.py index 82944980..e04d7ae5 100644 --- a/lib/hachoir/parser/program/exe_ne.py +++ b/lib/hachoir/parser/program/exe_ne.py @@ -1,6 +1,6 @@ from hachoir.field import (FieldSet, - Bit, UInt8, UInt16, UInt32, Bytes, - PaddingBits, PaddingBytes, NullBits, NullBytes) + Bit, UInt8, UInt16, UInt32, Bytes, + PaddingBits, PaddingBytes, NullBits, NullBytes) from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler diff --git a/lib/hachoir/parser/program/exe_pe.py b/lib/hachoir/parser/program/exe_pe.py index 24d14e42..576d1a08 100644 --- a/lib/hachoir/parser/program/exe_pe.py +++ b/lib/hachoir/parser/program/exe_pe.py @@ -1,7 +1,7 @@ from hachoir.field import (FieldSet, ParserError, - Bit, UInt8, UInt16, UInt32, TimestampUnix32, - Bytes, String, Enum, - PaddingBytes, PaddingBits, NullBytes, NullBits) + Bit, UInt8, UInt16, UInt32, UInt64, TimestampUnix32, + Bytes, String, Enum, + PaddingBytes, PaddingBits, NullBytes, NullBits) from hachoir.core.text_handler import textHandler, hexadecimal, filesizeHandler @@ -175,10 +175,13 @@ class PE_OptHeader(FieldSet): } def createFields(self): - yield UInt16(self, "signature", "PE optional header signature (0x010b)") - # TODO: Support PE32+ (signature=0x020b) - if self["signature"].value != 0x010b: + yield UInt16(self, "signature", "PE optional header signature (0x010b | 0x020b)") + + if self["signature"].value != 0x010b and self["signature"].value != 0x020b: raise ParserError("Invalid PE optional header signature") + is_pe32plus = self["signature"].value == 0x020b + VarUInt = UInt64 if is_pe32plus else UInt32 + yield UInt8(self, "maj_lnk_ver", "Major linker version") yield UInt8(self, "min_lnk_ver", "Minor linker version") yield filesizeHandler(UInt32(self, "size_code", "Size of code")) @@ -186,8 +189,9 @@ class PE_OptHeader(FieldSet): yield filesizeHandler(UInt32(self, "size_uninit_data", "Size of uninitialized data")) yield textHandler(UInt32(self, "entry_point", "Address (RVA) of the code entry point"), hexadecimal) yield textHandler(UInt32(self, "base_code", "Base (RVA) of code"), hexadecimal) - yield textHandler(UInt32(self, "base_data", "Base (RVA) of data"), hexadecimal) - yield textHandler(UInt32(self, "image_base", "Image base (RVA)"), hexadecimal) + if not is_pe32plus: + yield textHandler(UInt32(self, "base_data", "Base (RVA) of data"), hexadecimal) + yield textHandler(VarUInt(self, "image_base", "Image base (RVA)"), hexadecimal) yield filesizeHandler(UInt32(self, "sect_align", "Section alignment")) yield filesizeHandler(UInt32(self, "file_align", "File alignment")) yield UInt16(self, "maj_os_ver", "Major OS version") @@ -202,10 +206,10 @@ class PE_OptHeader(FieldSet): yield textHandler(UInt32(self, "checksum"), hexadecimal) yield Enum(UInt16(self, "subsystem"), self.SUBSYSTEM_NAME) yield UInt16(self, "dll_flags") - yield filesizeHandler(UInt32(self, "size_stack_reserve")) - yield filesizeHandler(UInt32(self, "size_stack_commit")) - yield filesizeHandler(UInt32(self, "size_heap_reserve")) - yield filesizeHandler(UInt32(self, "size_heap_commit")) + yield filesizeHandler(VarUInt(self, "size_stack_reserve")) + yield filesizeHandler(VarUInt(self, "size_stack_commit")) + yield filesizeHandler(VarUInt(self, "size_heap_reserve")) + yield filesizeHandler(VarUInt(self, "size_heap_commit")) yield UInt32(self, "loader_flags") yield UInt32(self, "nb_directory", "Number of RVA and sizes") for index in range(self["nb_directory"].value): diff --git a/lib/hachoir/parser/program/exe_res.py b/lib/hachoir/parser/program/exe_res.py index 6a8aab42..e943b815 100644 --- a/lib/hachoir/parser/program/exe_res.py +++ b/lib/hachoir/parser/program/exe_res.py @@ -10,10 +10,10 @@ Creation date: 2007-01-19 """ from hachoir.field import (FieldSet, ParserError, Enum, - Bit, Bits, SeekableFieldSet, - UInt16, UInt32, TimestampUnix32, - Bytes, RawBytes, PaddingBytes, NullBytes, NullBits, - CString, String) + Bit, Bits, SeekableFieldSet, + UInt16, UInt32, TimestampUnix32, + Bytes, RawBytes, PaddingBytes, NullBytes, NullBits, + CString, String) from hachoir.core.text_handler import textHandler, filesizeHandler, hexadecimal from hachoir.core.tools import createDict, paddingSize, alignValue, makePrintable from hachoir.parser.common.win32 import BitmapInfoHeader diff --git a/lib/hachoir/parser/program/java.py b/lib/hachoir/parser/program/java.py index 406dbc2b..f0238add 100644 --- a/lib/hachoir/parser/program/java.py +++ b/lib/hachoir/parser/program/java.py @@ -435,6 +435,19 @@ class OpcodeSpecial_invokeinterface(JavaOpcode): return "%s(%i,%i,%i)" % (self.op, self["index"].value, self["count"].value, self["zero"].value) +class OpcodeSpecial_invokedynamic(JavaOpcode): + OPSIZE = 5 + + def createFields(self): + yield UInt8(self, "opcode") + yield CPIndex(self, "index") + yield UInt8(self, "zero1", "Must be zero.") + yield UInt8(self, "zero2", "Must be zero.") + + def createDisplay(self): + return "%s(%i,%i,%i)" % (self.op, self["index"].value, self["zero1"].value, self["zero2"].value) + + class OpcodeSpecial_newarray(JavaOpcode): OPSIZE = 2 @@ -659,6 +672,7 @@ class JavaBytecode(FieldSet): 0x98: ("dcmpg", OpcodeNoArgs, "compares two doubles. Stack: value1, value2 -> result"), 0x99: ("ifeq", OpcodeShortJump, "if 'value' is 0, branch to the 16-bit instruction offset argument. Stack: value ->"), 0x9a: ("ifne", OpcodeShortJump, "if 'value' is not 0, branch to the 16-bit instruction offset argument. Stack: value ->"), + 0x9b: ("iflt", OpcodeShortJump, "if 'value' is less than 0, branch to the 16-bit instruction offset argument. Stack: value ->"), 0x9c: ("ifge", OpcodeShortJump, "if 'value' is greater than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"), 0x9d: ("ifgt", OpcodeShortJump, "if 'value' is greater than 0, branch to the 16-bit instruction offset argument. Stack: value ->"), 0x9e: ("ifle", OpcodeShortJump, "if 'value' is less than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"), @@ -689,7 +703,7 @@ class JavaBytecode(FieldSet): 0xb7: ("invokespecial", OpcodeCPIndex, "invoke instance method on object 'objectref', where the method is identified by method reference in constant pool. Stack: objectref, [arg1, arg2, ...] ->"), 0xb8: ("invokestatic", OpcodeCPIndex, "invoke a static method, where the method is identified by method reference in the constant pool. Stack: [arg1, arg2, ...] ->"), 0xb9: ("invokeinterface", OpcodeSpecial_invokeinterface, "invokes an interface method on object 'objectref', where the interface method is identified by method reference in constant pool. Stack: objectref, [arg1, arg2, ...] ->"), - 0xba: ("xxxunusedxxx", OpcodeNoArgs, "this opcode is reserved for historical reasons. Stack: "), + 0xba: ("invokedynamic", OpcodeSpecial_invokedynamic, "invokes a dynamically-computed call site, where the bootstrap method is identified by in constant pool. Stack: [arg1, arg2, ...] -> "), 0xbb: ("new", OpcodeCPIndex, "creates new object of type identified by class reference in constant pool. Stack: -> objectref"), 0xbc: ("newarray", OpcodeSpecial_newarray, "creates new array with 'count' elements of primitive type given in the argument. Stack: count -> arrayref"), 0xbd: ("anewarray", OpcodeCPIndex, "creates a new array of references of length 'count' and component type identified by the class reference in the constant pool. Stack: count -> arrayref"), @@ -762,6 +776,33 @@ class CPInfo(FieldSet): elif self.constant_type == "NameAndType": yield CPIndex(self, "name_index", target_types="Utf8") yield CPIndex(self, "descriptor_index", target_types="Utf8") + elif self.constant_type == "MethodHandle": + refkind_map = { + 1: ("getField", "Fieldref"), + 2: ("getStatic", "Fieldref"), + 3: ("putField", "Fieldref"), + 4: ("putStatic", "Fieldref"), + 5: ("invokeVirtual", "Methodref"), + 6: ("invokeStatic", ("Methodref", "InterfaceMethodref")), + 7: ("invokeSpecial", ("Methodref", "InterfaceMethodref")), + 8: ("newInvokeSpecial", "Methodref"), + 9: ("invokeInterface", "InterfaceMethodref"), + } + yield Enum(UInt8(self, "reference_kind"), {k: v[0] for k, v in refkind_map.items()}) + target_types = refkind_map[self["reference_kind"].value][1] + yield CPIndex(self, "reference_index", target_types=target_types) + elif self.constant_type == "MethodType": + yield CPIndex(self, "descriptor_index", target_types="Utf8") + elif self.constant_type == "Dynamic": + yield UInt16(self, "bootstrap_method_attr_index") + yield CPIndex(self, "name_and_type_index", target_types="NameAndType") + elif self.constant_type == "InvokeDynamic": + yield UInt16(self, "bootstrap_method_attr_index") + yield CPIndex(self, "name_and_type_index", target_types="NameAndType") + elif self.constant_type == "Module": + yield CPIndex(self, "name_index", target_types="Utf8") + elif self.constant_type == "Package": + yield CPIndex(self, "name_index", target_types="Utf8") else: raise ParserError("Not a valid constant pool element type: " + self["tag"].value) @@ -785,6 +826,21 @@ class CPInfo(FieldSet): elif self.constant_type == "NameAndType": return (self["descriptor_index"].rawvalue(), self["name_index"].rawvalue()) + elif self.constant_type == "MethodHandle": + return (self["reference_kind"].display, + self["reference_index"].rawvalue()) + elif self.constant_type == "MethodType": + return self["descriptor_index"].rawvalue() + elif self.constant_type == "Dynamic": + return (self["bootstrap_method_attr_index"].value, + self["name_and_type_index"].rawvalue()) + elif self.constant_type == "InvokeDynamic": + return (self["bootstrap_method_attr_index"].value, + self["name_and_type_index"].rawvalue()) + elif self.constant_type == "Module": + return self["name_index"].rawvalue() + elif self.constant_type == "Package": + return self["name_index"].rawvalue() else: # FIXME: Return "" instead of raising an exception? raise ParserError("Not a valid constant pool element type: " @@ -811,6 +867,24 @@ class CPInfo(FieldSet): elif self.constant_type == "NameAndType": descriptor, name = self.rawvalue() return parse_any_descriptor(descriptor, name=name) + elif self.constant_type == "MethodHandle": + return "%s(%s)" % (self["reference_kind"].display, self["reference_index"].str()) + elif self.constant_type == "MethodType": + return self["descriptor_index"].str() + elif self.constant_type == "Dynamic": + return "%d, %s" % ( + self["bootstrap_method_attr_index"].value, + self["name_and_type_index"].str() + ) + elif self.constant_type == "InvokeDynamic": + return "%d, %s" % ( + self["bootstrap_method_attr_index"].value, + self["name_and_type_index"].str() + ) + elif self.constant_type == "Module": + return self["name_index"].str() + elif self.constant_type == "Package": + return self["name_index"].str() else: # FIXME: Return "" instead of raising an exception? raise ParserError("Not a valid constant pool element type: " @@ -1192,6 +1266,12 @@ class JavaCompiledClassFile(Parser): "50.0": "JDK 1.6", "51.0": "JDK 1.7", "52.0": "JDK 1.8", + "53.0": "JDK 9", + "54.0": "JDK 10", + "55.0": "JDK 11", + "56.0": "JDK 12", + "57.0": "JDK 13", + "58.0": "JDK 14", } # Constants go here since they will probably depend on the detected format @@ -1208,7 +1288,13 @@ class JavaCompiledClassFile(Parser): 9: "Fieldref", 10: "Methodref", 11: "InterfaceMethodref", - 12: "NameAndType" + 12: "NameAndType", + 15: "MethodHandle", + 16: "MethodType", + 17: "Dynamic", + 18: "InvokeDynamic", + 19: "Module", + 20: "Package", } def validate(self): diff --git a/lib/hachoir/parser/program/macho.py b/lib/hachoir/parser/program/macho.py index e2eeba0a..227a6882 100644 --- a/lib/hachoir/parser/program/macho.py +++ b/lib/hachoir/parser/program/macho.py @@ -14,9 +14,9 @@ Updated: January 13, 2017 from hachoir.parser import HachoirParser from hachoir.field import (RootSeekableFieldSet, FieldSet, - Bit, NullBits, String, CString, - RawBytes, Bytes, PaddingBytes, - Int32, UInt32, UInt64, Enum) + Bit, NullBits, String, CString, + RawBytes, Bytes, PaddingBytes, + Int32, UInt32, UInt64, Enum) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.endian import LITTLE_ENDIAN, BIG_ENDIAN from hachoir.core.bits import str2hex diff --git a/lib/hachoir/parser/program/nds.py b/lib/hachoir/parser/program/nds.py index 6a895602..3509d8b5 100644 --- a/lib/hachoir/parser/program/nds.py +++ b/lib/hachoir/parser/program/nds.py @@ -11,7 +11,7 @@ File format references: from hachoir.parser import Parser from hachoir.field import (UInt8, UInt16, UInt32, UInt64, String, RawBytes, SubFile, FieldSet, NullBits, Bit, Bits, Bytes, - SeekableFieldSet, RootSeekableFieldSet) + SeekableFieldSet, RootSeekableFieldSet) from hachoir.core.text_handler import textHandler, hexadecimal from hachoir.core.endian import LITTLE_ENDIAN diff --git a/lib/hachoir/parser/program/prc.py b/lib/hachoir/parser/program/prc.py index d93ca5cd..9153c646 100644 --- a/lib/hachoir/parser/program/prc.py +++ b/lib/hachoir/parser/program/prc.py @@ -7,8 +7,8 @@ Creation date: 29 october 2008 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt16, UInt32, TimestampMac32, - String, RawBytes) + UInt16, UInt32, TimestampMac32, + String, RawBytes) from hachoir.core.endian import BIG_ENDIAN diff --git a/lib/hachoir/parser/program/python.py b/lib/hachoir/parser/program/python.py index de52c943..8a5fd79c 100644 --- a/lib/hachoir/parser/program/python.py +++ b/lib/hachoir/parser/program/python.py @@ -10,13 +10,16 @@ Creation: 25 march 2005 """ from hachoir.parser import Parser -from hachoir.field import (FieldSet, UInt8, - UInt16, Int32, UInt32, Int64, ParserError, Float64, - Character, RawBytes, PascalString8, TimestampUnix32, - Bit, String) +from hachoir.field import ( + Field, FieldSet, UInt8, + UInt16, Int32, UInt32, Int64, UInt64, + ParserError, Float64, + Character, RawBytes, PascalString8, TimestampUnix32, + Bit, String, NullBits) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.bits import long2raw from hachoir.core.text_handler import textHandler, hexadecimal +from hachoir.core import config DISASSEMBLE = False @@ -51,6 +54,12 @@ def parseString(parent): disassembleBytecode(parent["text"]) +def createStringValue(parent): + if parent.name == "lnotab": + return "" + return parent["text"] + + def parseStringRef(parent): yield textHandler(UInt32(parent, "ref"), hexadecimal) @@ -58,6 +67,13 @@ def parseStringRef(parent): def createStringRefDesc(parent): return "String ref: %s" % parent["ref"].display + +def createStringRefValue(parent): + value = parent["ref"].value + if hasattr(parent.root, 'string_table') and 0 <= value < len(parent.root.string_table): + return parent.root.string_table[value] + return None + # --- Integers --- @@ -69,17 +85,37 @@ def parseInt64(parent): yield Int64(parent, "value") +def createIntValue(parent): + return parent["value"] + + def parseLong(parent): yield Int32(parent, "digit_count") for index in range(abs(parent["digit_count"].value)): yield UInt16(parent, "digit[]") +def createLongValue(parent): + is_negative = parent["digit_count"].value < 0 + count = abs(parent["digit_count"].value) + total = 0 + for index in range(count - 1, -1, -1): + total <<= 15 + total += parent["digit[%u]" % index].value + if is_negative: + total = -total + return total + + # --- Float and complex --- def parseFloat(parent): yield PascalString8(parent, "value") +def createFloatValue(parent): + return float(parent["value"].value) + + def parseBinaryFloat(parent): yield Float64(parent, "value") @@ -94,6 +130,12 @@ def parseBinaryComplex(parent): yield Float64(parent, "complex") +def createComplexValue(parent): + return complex( + float(parent["real"].value), + float(parent["complex"].value)) + + # --- Tuple and list --- def parseTuple(parent): yield UInt32(parent, "count", "Item count") @@ -119,6 +161,12 @@ def createTupleDesc(parent): return "%s: %s" % (parent.code_info[2], items) +def tupleValueCreator(constructor): + def createTupleValue(parent): + return constructor([v.value for v in parent.array("item")]) + return createTupleValue + + # --- Dict --- def parseDict(parent): """ @@ -139,26 +187,58 @@ def createDictDesc(parent): return "Dict: %s" % ("%s keys" % parent.count) +def createDictValue(parent): + return {k.value: v.value for k, v in zip(parent.array("key"), parent.array("value"))} + + def parseRef(parent): yield UInt32(parent, "n", "Reference") +def createRefDesc(parent): + value = parent["n"].value + if hasattr(parent.root, 'object_table') and 0 <= value < len(parent.root.object_table): + return 'Reference: %s' % parent.root.object_table[value].description + else: + return 'Reference: %d' % value + + +def createRefValue(parent): + value = parent["n"].value + if hasattr(parent.root, 'object_table') and 0 <= value < len(parent.root.object_table): + return parent.root.object_table[value] + else: + return None + + +def parseASCII(parent): + size = UInt32(parent, "len", "Number of ASCII characters") + yield size + if size.value: + yield String(parent, "text", size.value, "String content", charset="ASCII") + + def parseShortASCII(parent): size = UInt8(parent, "len", "Number of ASCII characters") yield size - yield String(parent, "text", size.value, "String content", charset="ASCII") + if size.value: + yield String(parent, "text", size.value, "String content", charset="ASCII") # --- Code --- def parseCode(parent): - if 0x3000000 <= parent.root.getVersion(): + version = parent.root.getVersion() + if 0x3000000 <= version: yield UInt32(parent, "arg_count", "Argument count") + if 0x3080000 <= version: + yield UInt32(parent, "posonlyargcount", "Positional only argument count") yield UInt32(parent, "kwonlyargcount", "Keyword only argument count") - yield UInt32(parent, "nb_locals", "Number of local variables") + if version < 0x30B0000: + yield UInt32(parent, "nb_locals", "Number of local variables") yield UInt32(parent, "stack_size", "Stack size") yield UInt32(parent, "flags") - elif 0x2030000 <= parent.root.getVersion(): + elif 0x2030000 <= version: yield UInt32(parent, "arg_count", "Argument count") yield UInt32(parent, "nb_locals", "Number of local variables") yield UInt32(parent, "stack_size", "Stack size") @@ -168,54 +248,70 @@ def parseCode(parent): yield UInt16(parent, "nb_locals", "Number of local variables") yield UInt16(parent, "stack_size", "Stack size") yield UInt16(parent, "flags") + yield Object(parent, "compiled_code") yield Object(parent, "consts") yield Object(parent, "names") - yield Object(parent, "varnames") - if 0x2000000 <= parent.root.getVersion(): - yield Object(parent, "freevars") - yield Object(parent, "cellvars") + if 0x30B0000 <= version: + yield Object(parent, "co_localsplusnames") + yield Object(parent, "co_localspluskinds") + else: + yield Object(parent, "varnames") + if 0x2000000 <= version: + yield Object(parent, "freevars") + yield Object(parent, "cellvars") + yield Object(parent, "filename") yield Object(parent, "name") - if 0x2030000 <= parent.root.getVersion(): + if 0x30B0000 <= version: + yield Object(parent, "qualname") + + if 0x2030000 <= version: yield UInt32(parent, "firstlineno", "First line number") else: yield UInt16(parent, "firstlineno", "First line number") - yield Object(parent, "lnotab") + if 0x30A0000 <= version: + yield Object(parent, "linetable") + if 0x30B0000 <= version: + yield Object(parent, "exceptiontable") + else: + yield Object(parent, "lnotab") class Object(FieldSet): bytecode_info = { # Don't contains any data - '0': ("null", None, "NULL", None), - 'N': ("none", None, "None", None), - 'F': ("false", None, "False", None), - 'T': ("true", None, "True", None), - 'S': ("stop_iter", None, "StopIter", None), - '.': ("ellipsis", None, "ELLIPSIS", None), - '?': ("unknown", None, "Unknown", None), + '0': ("null", None, "NULL", None, None), + 'N': ("none", None, "None", None, lambda parent: None), + 'F': ("false", None, "False", None, lambda parent: False), + 'T': ("true", None, "True", None, lambda parent: True), + 'S': ("stop_iter", None, "StopIter", None, None), + '.': ("ellipsis", None, "ELLIPSIS", None, lambda parent: ...), + '?': ("unknown", None, "Unknown", None, None), - 'i': ("int32", parseInt32, "Int32", None), - 'I': ("int64", parseInt64, "Int64", None), - 'f': ("float", parseFloat, "Float", None), - 'g': ("bin_float", parseBinaryFloat, "Binary float", None), - 'x': ("complex", parseComplex, "Complex", None), - 'y': ("bin_complex", parseBinaryComplex, "Binary complex", None), - 'l': ("long", parseLong, "Long", None), - 's': ("string", parseString, "String", None), - 't': ("interned", parseString, "Interned", None), - 'u': ("unicode", parseString, "Unicode", None), - 'R': ("string_ref", parseStringRef, "String ref", createStringRefDesc), - '(': ("tuple", parseTuple, "Tuple", createTupleDesc), - ')': ("small_tuple", parseSmallTuple, "Tuple", createTupleDesc), - '[': ("list", parseTuple, "List", createTupleDesc), - '<': ("set", parseTuple, "Set", createTupleDesc), - '>': ("frozenset", parseTuple, "Frozen set", createTupleDesc), - '{': ("dict", parseDict, "Dict", createDictDesc), - 'c': ("code", parseCode, "Code", None), - 'r': ("ref", parseRef, "Reference", None), - 'z': ("short_ascii", parseShortASCII, "Short ASCII", None), - 'Z': ("short_ascii_interned", parseShortASCII, "Short ASCII interned", None), + 'i': ("int32", parseInt32, "Int32", None, createIntValue), + 'I': ("int64", parseInt64, "Int64", None, createIntValue), + 'f': ("float", parseFloat, "Float", None, createFloatValue), + 'g': ("bin_float", parseBinaryFloat, "Binary float", None, createFloatValue), + 'x': ("complex", parseComplex, "Complex", None, createComplexValue), + 'y': ("bin_complex", parseBinaryComplex, "Binary complex", None, createComplexValue), + 'l': ("long", parseLong, "Long", None, createLongValue), + 's': ("string", parseString, "String", None, createStringValue), + 't': ("interned", parseString, "Interned", None, createStringValue), + 'u': ("unicode", parseString, "Unicode", None, createStringValue), + 'R': ("string_ref", parseStringRef, "String ref", createStringRefDesc, createStringRefValue), + '(': ("tuple", parseTuple, "Tuple", createTupleDesc, tupleValueCreator(tuple)), + ')': ("small_tuple", parseSmallTuple, "Tuple", createTupleDesc, tupleValueCreator(tuple)), + '[': ("list", parseTuple, "List", createTupleDesc, tupleValueCreator(list)), + '<': ("set", parseTuple, "Set", createTupleDesc, tupleValueCreator(set)), + '>': ("frozenset", parseTuple, "Frozen set", createTupleDesc, tupleValueCreator(frozenset)), + '{': ("dict", parseDict, "Dict", createDictDesc, createDictValue), + 'c': ("code", parseCode, "Code", None, None), + 'r': ("ref", parseRef, "Reference", createRefDesc, createRefValue), + 'a': ("ascii", parseASCII, "ASCII", None, createStringValue), + 'A': ("ascii_interned", parseASCII, "ASCII interned", None, createStringValue), + 'z': ("short_ascii", parseShortASCII, "Short ASCII", None, createStringValue), + 'Z': ("short_ascii_interned", parseShortASCII, "Short ASCII interned", None, createStringValue), } def __init__(self, parent, name, **kw): @@ -227,64 +323,40 @@ class Object(FieldSet): self.code_info = self.bytecode_info[code] if not name: self._name = self.code_info[0] - if code == "l": - self.createValue = self.createValueLong - elif code in ("i", "I", "f", "g"): - self.createValue = lambda: self["value"].value - elif code == "T": - self.createValue = lambda: True - elif code == "F": - self.createValue = lambda: False - elif code in ("x", "y"): - self.createValue = self.createValueComplex - elif code in ("s", "t", "u"): - self.createValue = self.createValueString - self.createDisplay = self.createDisplayString - if code == 't': - if not hasattr(self.root, 'string_table'): - self.root.string_table = [] - self.root.string_table.append(self) - elif code == 'R': - if hasattr(self.root, 'string_table'): - self.createValue = self.createValueStringRef + if code in ("t", "A", "Z"): + if not hasattr(self.root, 'string_table'): + self.root.string_table = [] + self.root.string_table.append(self) - def createValueString(self): - if "text" in self: - return self["text"].value - else: - return "" + def createValue(self): + create = self.code_info[4] + if create: + res = create(self) + if isinstance(res, Field): + return res.value + else: + return res + return None - def createDisplayString(self): - if "text" in self: - return self["text"].display - else: - return "(empty)" - - def createValueLong(self): - is_negative = self["digit_count"].value < 0 - count = abs(self["digit_count"].value) - total = 0 - for index in range(count - 1, -1, -1): - total <<= 15 - total += self["digit[%u]" % index].value - if is_negative: - total = -total - return total - - def createValueStringRef(self): - return self.root.string_table[self['ref'].value].value - - def createDisplayStringRef(self): - return self.root.string_table[self['ref'].value].display - - def createValueComplex(self): - return complex( - float(self["real"].value), - float(self["complex"].value)) + def createDisplay(self): + create = self.code_info[4] + if create: + res = create(self) + if isinstance(res, Field): + return res.display + res = repr(res) + if len(res) >= config.max_string_length: + res = res[:config.max_string_length] + "..." + return res + return None def createFields(self): yield BytecodeChar(self, "bytecode", "Bytecode") yield Bit(self, "flag_ref", "Is a reference?") + if self["flag_ref"].value: + if not hasattr(self.root, 'object_table'): + self.root.object_table = [] + self.root.object_table.append(self) parser = self.code_info[1] if parser: yield from parser(self) @@ -301,6 +373,16 @@ class BytecodeChar(Character): static_size = 7 +PY_RELEASE_LEVEL_ALPHA = 0xA +PY_RELEASE_LEVEL_FINAL = 0xF + + +def VERSION(major, minor, release_level=PY_RELEASE_LEVEL_FINAL, serial=0): + micro = 0 + return ((major << 24) + (minor << 16) + (micro << 8) + + (release_level << 4) + (serial << 0)) + + class PythonCompiledFile(Parser): PARSER_TAGS = { "id": "python", @@ -394,7 +476,90 @@ class PythonCompiledFile(Parser): 3377: ("Python 3.6b1 ", 0x3060000), 3378: ("Python 3.6b2 ", 0x3060000), 3379: ("Python 3.6rc1", 0x3060000), - 3390: ("Python 3.7a0 ", 0x3070000), + 3390: ("Python 3.7a1", 0x30700A1), + 3391: ("Python 3.7a2", 0x30700A2), + 3392: ("Python 3.7a4", 0x30700A4), + 3393: ("Python 3.7b1", 0x30700B1), + 3394: ("Python 3.7b5", 0x30700B5), + 3400: ("Python 3.8a1", VERSION(3, 8)), + 3401: ("Python 3.8a1", VERSION(3, 8)), + 3410: ("Python 3.8a1", VERSION(3, 8)), + 3411: ("Python 3.8b2", VERSION(3, 8)), + 3412: ("Python 3.8b2", VERSION(3, 8)), + 3413: ("Python 3.8b4", VERSION(3, 8)), + 3420: ("Python 3.9a0", VERSION(3, 9)), + 3421: ("Python 3.9a0", VERSION(3, 9)), + 3422: ("Python 3.9a0", VERSION(3, 9)), + 3423: ("Python 3.9a2", VERSION(3, 9)), + 3424: ("Python 3.9a2", VERSION(3, 9)), + 3425: ("Python 3.9a2", VERSION(3, 9)), + 3430: ("Python 3.10a1", VERSION(3, 10)), + 3431: ("Python 3.10a1", VERSION(3, 10)), + 3432: ("Python 3.10a2", VERSION(3, 10)), + 3433: ("Python 3.10a2", VERSION(3, 10)), + 3434: ("Python 3.10a6", VERSION(3, 10)), + 3435: ("Python 3.10a7", VERSION(3, 10)), + 3436: ("Python 3.10b1", VERSION(3, 10)), + 3437: ("Python 3.10b1", VERSION(3, 10)), + 3438: ("Python 3.10b1", VERSION(3, 10)), + 3439: ("Python 3.10b1", VERSION(3, 10)), + 3450: ("Python 3.11a1", VERSION(3, 11)), + 3451: ("Python 3.11a1", VERSION(3, 11)), + 3452: ("Python 3.11a1", VERSION(3, 11)), + 3453: ("Python 3.11a1", VERSION(3, 11)), + 3454: ("Python 3.11a1", VERSION(3, 11)), + 3455: ("Python 3.11a1", VERSION(3, 11)), + 3456: ("Python 3.11a1", VERSION(3, 11)), + 3457: ("Python 3.11a1", VERSION(3, 11)), + 3458: ("Python 3.11a1", VERSION(3, 11)), + 3459: ("Python 3.11a1", VERSION(3, 11)), + 3460: ("Python 3.11a1", VERSION(3, 11)), + 3461: ("Python 3.11a1", VERSION(3, 11)), + 3462: ("Python 3.11a2", VERSION(3, 11)), + 3463: ("Python 3.11a3", VERSION(3, 11)), + 3464: ("Python 3.11a3", VERSION(3, 11)), + 3465: ("Python 3.11a3", VERSION(3, 11)), + 3466: ("Python 3.11a4", VERSION(3, 11)), + 3467: ("Python 3.11a4", VERSION(3, 11)), + 3468: ("Python 3.11a4", VERSION(3, 11)), + 3469: ("Python 3.11a4", VERSION(3, 11)), + 3470: ("Python 3.11a4", VERSION(3, 11)), + 3471: ("Python 3.11a4", VERSION(3, 11)), + 3472: ("Python 3.11a4", VERSION(3, 11)), + 3473: ("Python 3.11a4", VERSION(3, 11)), + 3474: ("Python 3.11a4", VERSION(3, 11)), + 3475: ("Python 3.11a5", VERSION(3, 11)), + 3476: ("Python 3.11a5", VERSION(3, 11)), + 3477: ("Python 3.11a5", VERSION(3, 11)), + 3478: ("Python 3.11a5", VERSION(3, 11)), + 3479: ("Python 3.11a5", VERSION(3, 11)), + 3480: ("Python 3.11a5", VERSION(3, 11)), + 3481: ("Python 3.11a5", VERSION(3, 11)), + 3482: ("Python 3.11a5", VERSION(3, 11)), + 3483: ("Python 3.11a5", VERSION(3, 11)), + 3484: ("Python 3.11a5", VERSION(3, 11)), + 3485: ("Python 3.11a5", VERSION(3, 11)), + 3486: ("Python 3.11a6", VERSION(3, 11)), + 3487: ("Python 3.11a6", VERSION(3, 11)), + 3488: ("Python 3.11a6", VERSION(3, 11)), + 3489: ("Python 3.11a6", VERSION(3, 11)), + 3490: ("Python 3.11a6", VERSION(3, 11)), + 3491: ("Python 3.11a6", VERSION(3, 11)), + 3492: ("Python 3.11a7", VERSION(3, 11)), + 3493: ("Python 3.11a7", VERSION(3, 11)), + 3494: ("Python 3.11a7", VERSION(3, 11)), + 3500: ("Python 3.12a1", VERSION(3, 12)), + 3501: ("Python 3.12a1", VERSION(3, 12)), + 3502: ("Python 3.12a1", VERSION(3, 12)), + 3503: ("Python 3.12a1", VERSION(3, 12)), + 3504: ("Python 3.12a1", VERSION(3, 12)), + 3505: ("Python 3.12a1", VERSION(3, 12)), + 3506: ("Python 3.12a1", VERSION(3, 12)), + 3507: ("Python 3.12a1", VERSION(3, 12)), + 3508: ("Python 3.12a1", VERSION(3, 12)), + 3509: ("Python 3.12a1", VERSION(3, 12)), + 3510: ("Python 3.12a1", VERSION(3, 12)), + 3511: ("Python 3.12a1", VERSION(3, 12)), } # Dictionnary which associate the pyc signature (4-byte long string) @@ -411,13 +576,7 @@ class PythonCompiledFile(Parser): if self["magic_string"].value != "\r\n": return r"Wrong magic string (\r\n)" - version = self.getVersion() - if version >= 0x3030000 and self['magic_number'].value >= 3200: - offset = 12 - else: - offset = 8 - value = self.stream.readBits(offset * 8, 7, self.endian) - if value != ord(b'c'): + if self["content/bytecode"].value != "c": return "First object bytecode is not code" return True @@ -430,8 +589,23 @@ class PythonCompiledFile(Parser): def createFields(self): yield UInt16(self, "magic_number", "Magic number") yield String(self, "magic_string", 2, r"Magic string \r\n", charset="ASCII") - yield TimestampUnix32(self, "timestamp", "Timestamp") + version = self.getVersion() - if version >= 0x3030000 and self['magic_number'].value >= 3200: - yield UInt32(self, "filesize", "Size of the Python source file (.py) modulo 2**32") + + # PEP 552: Deterministic pycs #31650 (Python 3.7a4); magic=3392 + if version >= 0x30700A4: + yield Bit(self, "use_hash", "Is hash based?") + yield Bit(self, "checked") + yield NullBits(self, "reserved", 30) + use_hash = self['use_hash'].value + else: + use_hash = False + + if use_hash: + yield UInt64(self, "hash", "SipHash hash of the source file") + else: + yield TimestampUnix32(self, "timestamp", "Timestamp modulo 2**32") + if version >= 0x3030000 and self['magic_number'].value >= 3200: + yield UInt32(self, "filesize", "Size of the Python source file (.py) modulo 2**32") + yield Object(self, "content") diff --git a/lib/hachoir/parser/video/amf.py b/lib/hachoir/parser/video/amf.py index 1327cf47..d82ea31b 100644 --- a/lib/hachoir/parser/video/amf.py +++ b/lib/hachoir/parser/video/amf.py @@ -11,7 +11,7 @@ Creation date: 4 november 2006 """ from hachoir.field import (FieldSet, ParserError, - UInt8, UInt16, UInt32, PascalString16, Float64) + UInt8, UInt16, UInt32, PascalString16, Float64) from hachoir.core.tools import timestampUNIX diff --git a/lib/hachoir/parser/video/asf.py b/lib/hachoir/parser/video/asf.py index 3dc3a7a5..fc41624b 100644 --- a/lib/hachoir/parser/video/asf.py +++ b/lib/hachoir/parser/video/asf.py @@ -12,12 +12,12 @@ Creation: 5 august 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, - UInt16, UInt32, UInt64, - TimestampWin64, TimedeltaWin64, - TimedeltaMillisWin64, - String, PascalString16, Enum, - Bit, Bits, PaddingBits, - PaddingBytes, NullBytes, RawBytes) + UInt16, UInt32, UInt64, + TimestampWin64, TimedeltaWin64, + TimedeltaMillisWin64, + String, PascalString16, Enum, + Bit, Bits, PaddingBits, + PaddingBytes, NullBytes, RawBytes) from hachoir.core.endian import LITTLE_ENDIAN from hachoir.core.text_handler import ( displayHandler, filesizeHandler) @@ -355,7 +355,7 @@ class AsfFile(Parser): if self.stream.readBytes(0, len(magic)) != magic: return "Invalid magic" header = self[0] - if not(30 <= header["size"].value <= MAX_HEADER_SIZE): + if not (30 <= header["size"].value <= MAX_HEADER_SIZE): return "Invalid header size (%u)" % header["size"].value return True diff --git a/lib/hachoir/parser/video/flv.py b/lib/hachoir/parser/video/flv.py index 33afb48d..8d49ae4f 100644 --- a/lib/hachoir/parser/video/flv.py +++ b/lib/hachoir/parser/video/flv.py @@ -14,8 +14,8 @@ Creation date: 4 november 2006 from hachoir.parser import Parser from hachoir.field import (FieldSet, - UInt8, UInt24, UInt32, NullBits, NullBytes, - Bit, Bits, String, RawBytes, Enum) + UInt8, UInt24, UInt32, NullBits, NullBytes, + Bit, Bits, String, RawBytes, Enum) from hachoir.core.endian import BIG_ENDIAN from hachoir.parser.audio.mpeg_audio import Frame from hachoir.parser.video.amf import AMFObject diff --git a/lib/hachoir/parser/video/mpeg_ts.py b/lib/hachoir/parser/video/mpeg_ts.py index 2753e7dc..e626e70c 100644 --- a/lib/hachoir/parser/video/mpeg_ts.py +++ b/lib/hachoir/parser/video/mpeg_ts.py @@ -11,7 +11,7 @@ Creation date: 13 january 2007 from hachoir.parser import Parser from hachoir.field import (FieldSet, ParserError, MissingField, - UInt8, Enum, Bit, Bits, RawBytes, RawBits) + UInt8, Enum, Bit, Bits, RawBytes, RawBits) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal @@ -134,7 +134,7 @@ class MPEG_TS(Parser): # FIXME: detect using file content, not file name # maybe detect sync at offset+4 bytes? source = self.stream.source - if not(source and source.startswith("file:")): + if not (source and source.startswith("file:")): return True filename = source[5:].lower() return filename.endswith((".m2ts", ".mts")) diff --git a/lib/hachoir/parser/video/mpeg_video.py b/lib/hachoir/parser/video/mpeg_video.py index 8c4e3ba9..d77d758c 100644 --- a/lib/hachoir/parser/video/mpeg_video.py +++ b/lib/hachoir/parser/video/mpeg_video.py @@ -16,11 +16,11 @@ Creation date: 15 september 2006 from hachoir.parser import Parser from hachoir.parser.audio.mpeg_audio import MpegAudioFile from hachoir.field import (FieldSet, - FieldError, ParserError, - Bit, Bits, Bytes, RawBits, PaddingBits, NullBits, - UInt8, UInt16, - RawBytes, PaddingBytes, - Enum, CustomFragment) + FieldError, ParserError, + Bit, Bits, Bytes, RawBits, PaddingBits, NullBits, + UInt8, UInt16, + RawBytes, PaddingBytes, + Enum, CustomFragment) from hachoir.core.endian import BIG_ENDIAN from hachoir.core.text_handler import textHandler, hexadecimal @@ -244,7 +244,7 @@ class PacketElement(FieldSet): yield Bits(self, "sync[]", 4) # =2, or 3 if has_dts=True yield Timestamp(self, "pts") if self["has_dts"].value: - if not(self["has_pts"].value): + if not self["has_pts"].value: raise ParserError("Invalid PTS/DTS values") yield Bits(self, "sync[]", 4) # =1 yield Timestamp(self, "dts") diff --git a/lib/hachoir/regex/__init__.py b/lib/hachoir/regex/__init__.py index bfebb1ff..e18bc8e2 100644 --- a/lib/hachoir/regex/__init__.py +++ b/lib/hachoir/regex/__init__.py @@ -1,7 +1,7 @@ from hachoir.regex.regex import (RegexEmpty, # noqa - RegexString, createString, - RegexRangeItem, RegexRangeCharacter, RegexRange, createRange, - RegexAnd, RegexOr, RegexRepeat, - RegexDot, RegexStart, RegexEnd, RegexWord) + RegexString, createString, + RegexRangeItem, RegexRangeCharacter, RegexRange, createRange, + RegexAnd, RegexOr, RegexRepeat, + RegexDot, RegexStart, RegexEnd, RegexWord) from hachoir.regex.parser import parse # noqa from hachoir.regex.pattern import PatternMatching # noqa diff --git a/lib/hachoir/regex/parser.py b/lib/hachoir/regex/parser.py index fdd2b0d8..234c935f 100644 --- a/lib/hachoir/regex/parser.py +++ b/lib/hachoir/regex/parser.py @@ -8,8 +8,8 @@ TODO: """ from hachoir.regex import (RegexString, RegexEmpty, RegexRepeat, - RegexDot, RegexWord, RegexStart, RegexEnd, - RegexRange, RegexRangeItem, RegexRangeCharacter) + RegexDot, RegexWord, RegexStart, RegexEnd, + RegexRange, RegexRangeItem, RegexRangeCharacter) import re REGEX_COMMAND_CHARACTERS = '.^$[](){}|+?*\\' @@ -164,7 +164,7 @@ def _parse(text, start=0, until=None): if char == 'b': new_regex = RegexWord() else: - if not(char in REGEX_COMMAND_CHARACTERS or char in " '"): + if not (char in REGEX_COMMAND_CHARACTERS or char in " '"): raise SyntaxError( "Operator '\\%s' is not supported" % char) new_regex = RegexString(char) diff --git a/lib/hachoir/regex/pattern.py b/lib/hachoir/regex/pattern.py index 49624cba..e5256630 100644 --- a/lib/hachoir/regex/pattern.py +++ b/lib/hachoir/regex/pattern.py @@ -125,7 +125,7 @@ class PatternMatching: item = RegexPattern(regex, user) if item.regex.maxLength() is None: raise ValueError( - "Regular expression with no maximum size has forbidden") + "Regular expression with no maximum size is forbidden") self.regex_patterns.append(item) self._need_commit = True diff --git a/lib/hachoir/stream/__init__.py b/lib/hachoir/stream/__init__.py index fe4b31b2..4c30071c 100644 --- a/lib/hachoir/stream/__init__.py +++ b/lib/hachoir/stream/__init__.py @@ -1,9 +1,9 @@ from hachoir.core.endian import BIG_ENDIAN, LITTLE_ENDIAN # noqa from hachoir.stream.stream import StreamError # noqa from hachoir.stream.input import (InputStreamError, # noqa - InputStream, InputIOStream, StringInputStream, - InputSubStream, InputFieldStream, - FragmentedStream, ConcatStream) + InputStream, InputIOStream, StringInputStream, + InputSubStream, InputFieldStream, + FragmentedStream, ConcatStream) from hachoir.stream.input_helper import FileInputStream, guessStreamCharset # noqa from hachoir.stream.output import (OutputStreamError, # noqa - FileOutputStream, StringOutputStream, OutputStream) + FileOutputStream, StringOutputStream, OutputStream) diff --git a/lib/hachoir/subfile/__main__.py b/lib/hachoir/subfile/__main__.py index 0777c4f9..ff501336 100644 --- a/lib/hachoir/subfile/__main__.py +++ b/lib/hachoir/subfile/__main__.py @@ -1,3 +1,3 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 from hachoir.subfile.main import main main() diff --git a/lib/hachoir/subfile/main.py b/lib/hachoir/subfile/main.py index a4c477e5..fe895819 100644 --- a/lib/hachoir/subfile/main.py +++ b/lib/hachoir/subfile/main.py @@ -85,7 +85,7 @@ def main(): stream = FileInputStream(filename) with stream: subfile = SearchSubfile(stream, values.offset, values.size) - subfile.verbose = not(values.quiet) + subfile.verbose = not values.quiet subfile.debug = values.debug if output: subfile.setOutput(output) diff --git a/lib/hachoir/subfile/search.py b/lib/hachoir/subfile/search.py index 9cb9ad98..f7ae929d 100644 --- a/lib/hachoir/subfile/search.py +++ b/lib/hachoir/subfile/search.py @@ -95,7 +95,7 @@ class SearchSubfile: print("[!] Memory error!", file=stderr) self.mainFooter() self.stream.close() - return not(main_error) + return (not main_error) def mainHeader(self): # Fix slice size if needed @@ -149,7 +149,7 @@ class SearchSubfile: if parser.content_size is not None: text += " size=%s (%s)" % (parser.content_size // 8, humanFilesize(parser.content_size // 8)) - if not(parser.content_size) or parser.content_size // 8 < FILE_MAX_SIZE: + if not parser.content_size or parser.content_size // 8 < FILE_MAX_SIZE: text += ": " + parser.description else: text += ": " + parser.__class__.__name__ diff --git a/lib/pytvmaze/tvmaze.py b/lib/pytvmaze/tvmaze.py index 07020cd5..3e4d46dc 100644 --- a/lib/pytvmaze/tvmaze.py +++ b/lib/pytvmaze/tvmaze.py @@ -1,26 +1,23 @@ -from __future__ import unicode_literals - import re -import six from datetime import datetime import requests -from requests.packages.urllib3.util.retry import Retry +from urllib.parse import quote +from urllib3.util.retry import Retry from requests.adapters import HTTPAdapter -from . import endpoints, logger +from . import endpoints from .exceptions import * # noinspection PyUnreachableCode if False: - from typing import AnyStr, Dict, List, Optional - from six import integer_types + from typing import Any, AnyStr, Dict, List, Optional, Union class Show(object): def __init__(self, data): self.status = data.get('status') # type: Optional[AnyStr] - self.rating = data.get('rating') # type: Optional[integer_types] + self.rating = data.get('rating') # type: Optional[int] self.genres = data.get('genres') # type: Optional[List[AnyStr]] - self.weight = data.get('weight') # type: Optional[integer_types] + self.weight = data.get('weight') # type: Optional[int] self.updated = data.get('updated') self.name = data.get('name') # type: Optional[AnyStr] self.language = data.get('language') # type: Optional[AnyStr] @@ -36,11 +33,11 @@ class Show(object): self.web_channel = WebChannel(data.get('webChannel')) # type: Optional[WebChannel] else: self.web_channel = None - self.runtime = data.get('runtime') # type: Optional[integer_types] + self.runtime = data.get('runtime') # type: Optional[int] self.average_runtime = data.get('averageRuntime') self.type = data.get('type') # type: Optional[AnyStr] - self.id = data.get('id') # type: integer_types - self.maze_id = self.id # type: integer_types + self.id = data.get('id') # type: int + self.maze_id = self.id # type: int if data.get('network'): self.network = Network(data.get('network')) # type: Optional[Network] else: @@ -71,25 +68,13 @@ class Show(object): platform = '' network = '' - return _valid_encoding(''.format( - id=self.maze_id, - name=self.name, - year=year, - platform=platform, - network=network) - ) + return f'' def __str__(self): n = '' if self.network: n = ' (%s)' % self.network.name - return _valid_encoding('%s%s [%s]' % (self.name, n, self.premiered)) - - def __unicode__(self): - n = '' - if self.network: - n = ' (%s)' % self.network.name - return '%s%s (%s)' % (self.name, n, self.premiered) + return f'{self.name}{n} [{self.premiered}]' def __iter__(self): return iter(self.seasons.values()) @@ -109,7 +94,7 @@ class Show(object): try: return self.seasons[item] except KeyError: - raise SeasonNotFound('Season {0} does not exist for show {1}.'.format(item, self.name)) + raise SeasonNotFound(f'Season {item} does not exist for show {self.name}.') @property def akas(self): @@ -163,7 +148,9 @@ class Show(object): @property def seasons(self): + # type: (...) -> Dict if None is self._seasons: + # noinspection PyStatementEffect self.episodes return self._seasons @@ -217,25 +204,19 @@ class Season(object): self.links = data.get('_links') def __repr__(self): - return _valid_encoding(''.format( - id=self.id, - number=str(self.season_number).zfill(2) - )) + return f'' def _get_showname(self): sn = '' if self.show: sn = '%s - ' % self.show.name - return '%sSeason %02d (%s/%s)' % (sn, self.season_number, len(self.episodes), self.episode_order) - - def __unicode__(self): - return self._get_showname() + return f'{sn}Season {self.season_number:0>2} ({len(self.episodes)}/{self.episode_order})' def __str__(self): - return _valid_encoding(self._get_showname()) + return self._get_showname() def __iter__(self): - return iter(self.episodes.values()) + return iter(self.episodes) def __len__(self): return len(self.episodes) @@ -244,9 +225,7 @@ class Season(object): try: return self.episodes[item] except KeyError: - raise EpisodeNotFound( - 'Episode {0} does not exist for season {1} of show {2}.'.format(item, self.season_number, - self.show)) + raise EpisodeNotFound(f'Episode {item} does not exist for season {self.season_number} of show {self.show}.') # Python 3 bool evaluation def __bool__(self): @@ -299,26 +278,20 @@ class Episode(object): epnum = 'Special' else: epnum = self.episode_number - return _valid_encoding(''.format( - season=str(self.season_number).zfill(2), - number=str(epnum).zfill(2)) - ) + return f'2}, episode_number={epnum:0>2})>' def _gen_ep_name(self): - season = 'S' + str(self.season_number).zfill(2) + season = f'S{self.season_number:0>2}' if self.special: episode = ' Special' else: - episode = 'E' + str(self.episode_number).zfill(2) + episode = f'E{self.episode_number:0>2}' sn = '' if self.show: - sn = '%s - ' % self.show.name - return sn + season + episode + ' ' + self.title + sn = f'{self.show.name} - ' % self.show.name + return f'{sn}{season}{episode} {self.title}' def __str__(self): - return _valid_encoding(self._gen_ep_name()) - - def __unicode__(self): return self._gen_ep_name() def is_special(self): @@ -380,28 +353,22 @@ class Person(object): return self._crewcredits def __repr__(self): - return _valid_encoding(''.format( - name=self.name, - id=self.id - )) + return f'' def _gen_lifetime(self): - l = '' + life = '' if self.birthday: - l = '%s' % self.birthday + life = f'{self.birthday}' if self.death_day: - if 0 < len(l): - l += ' - ' - l = '%s' % self.death_day - if 0 < len(l): - l = ' (%s)' % l - return l + if 0 < len(life): + life += ' - ' + life = f'{self.death_day}' + if 0 < len(life): + life = f' ({life})' + return life def __str__(self): - return _valid_encoding('%s%s' % (self.name, self._gen_lifetime())) - - def __unicode__(self): - return '%s%s' % (self.name, self._gen_lifetime()) + return f'{self.name}{self._gen_lifetime()}' class Character(object): @@ -417,22 +384,16 @@ class Character(object): self.person = None # type: Optional[Person] def __repr__(self): - return _valid_encoding(''.format( - name=self.name, - id=self.id - )) + return f'' def _get_person(self): p = '' if self.person: - p = ' (%s)' % self.person.name + p = f' ({self.person.name})' return p def __str__(self): - return _valid_encoding('%s%s' % (self.name, self._get_person())) - - def __unicode__(self): - return '%s%s' % (self.name, self._get_person()) + return f'{self.name}{self._get_person()}' class Cast(object): @@ -453,10 +414,7 @@ class Cast(object): return self.__str__() def __str__(self): - return _valid_encoding('%s (%s)' % (self.characters, self.people)) - - def __unicode__(self): - return '%s (%s)' % (self.characters, self.people) + return f'{self.characters} ({self.people})' class CastCredit(object): @@ -480,10 +438,7 @@ class CastCredit(object): return self.__str__() def __str__(self): - return _valid_encoding('%s (%s)' % (self.character, self.show)) - - def __unicode__(self): - return '%s (%s)' % (self.character, self.show) + return f'{self.character} ({self.show})' class CrewCredit(object): @@ -502,10 +457,7 @@ class CrewCredit(object): return self.__str__() def __str__(self): - return _valid_encoding('%s (%s)' % (self.type, self.show)) - - def __unicode__(self): - return '%s (%s)' % (self.type, self.show) + return f'{self.type} ({self.show})' class Crew(object): @@ -513,45 +465,31 @@ class Crew(object): self.person = Person(data.get('person')) self.type = data.get('type') - def __unicode__(self): - return '%s: %s' % (self.type, self.person) - def __str__(self): - return _valid_encoding('%s: %s' % (self.type, self.person)) + return f'{self.type}: {self.person}' def __repr__(self): - return _valid_encoding(''.format( - name=self.person.name, - id=self.person.id, - type=self.type - )) + return f'' class Image(object): def __init__(self, data): self.main = data.get('main') # type: bool self.type = data.get('type') # type: AnyStr - self.id = data.get('id') # type: integer_types + self.id = data.get('id') # type: int self.resolutions = data.get('resolutions') # type: Dict[AnyStr, AnyStr] def _get_type_name(self): - return ('unknown', self.type)[isinstance(self.type, six.string_types)] + return ('unknown', self.type)[isinstance(self.type, str)] def _get_main_str(self): return ('Other', 'Main')[True is self.main] - def __unicode__(self): - return '%s: %s' % (self._get_type_name(), self._get_main_str()) - def __str__(self): - return _valid_encoding('%s: %s' % (self._get_type_name(), self._get_main_str())) + return f'{self._get_type_name()}: {self._get_main_str()}' def __repr__(self): - return _valid_encoding(''.format( - main=self.main, - id=self.id, - type=self._get_type_name() - )) + return f'' class Updates(object): @@ -567,8 +505,8 @@ class Updates(object): try: return self.updates[item] except KeyError: - logger.error('No update found for Maze id {}.'.format(item)) - raise UpdateNotFound('No update found for Maze id {}.'.format(item)) + logger.error(f'No update found for Maze id {item}.') + raise UpdateNotFound(f'No update found for Maze id {item}.') def __iter__(self): return iter(self.updates.values()) @@ -581,10 +519,7 @@ class Update(object): self.timestamp = datetime.fromtimestamp(time) def __repr__(self): - return _valid_encoding(''.format( - maze_id=self.maze_id, - time=self.seconds_since_epoch - )) + return f'' class AKA(object): @@ -593,19 +528,16 @@ class AKA(object): self.country = data.get('country') def __repr__(self): - return _valid_encoding(''.format(name=self.name, country=self.country)) + return f'' def _get_country(self): c = '' if self.country: - c = ' (%s)' % self.country.get('name') + c = f' ({self.country.get("name")})' return c - def __unicode__(self): - return '%s%s' % (self.name, self._get_country()) - def __str__(self): - return _valid_encoding('%s%s' % (self.name, self._get_country())) + return f'{self.name}{self.name}' class NetworkBase(object): @@ -624,14 +556,11 @@ class NetworkBase(object): def _get_country(self): c = '' if self.country: - c = ' (%s)' % self.country + c = f' ({self.country})' return c - def __unicode__(self): - return '%s%s' % (self.name, self._get_country()) - def __str__(self): - return _valid_encoding('%s%s' % (self.name, self._get_country())) + return f'{self.name}{self._get_country()}' class Network(NetworkBase): @@ -639,7 +568,7 @@ class Network(NetworkBase): super(Network, self).__init__(data) def __repr__(self): - return _valid_encoding(''.format(name=self.name, country=self.country)) + return f'' class WebChannel(NetworkBase): @@ -647,7 +576,7 @@ class WebChannel(NetworkBase): super(WebChannel, self).__init__(data) def __repr__(self): - return _valid_encoding(''.format(name=self.name, country=self.country)) + return f'' class FollowedShow(object): @@ -658,7 +587,7 @@ class FollowedShow(object): self.show = Show(data['_embedded'].get('show')) def __repr__(self): - return _valid_encoding(''.format(self.maze_id)) + return f'' class FollowedPerson(object): @@ -669,7 +598,7 @@ class FollowedPerson(object): self.person = Person(data['_embedded'].get('person')) def __repr__(self): - return _valid_encoding(''.format(id=self.person_id)) + return f'' class FollowedNetwork(object): @@ -680,7 +609,7 @@ class FollowedNetwork(object): self.network = Network(data['_embedded'].get('network')) def __repr__(self): - return _valid_encoding(''.format(id=self.network_id)) + return f'' class FollowedWebChannel(object): @@ -691,7 +620,7 @@ class FollowedWebChannel(object): self.web_channel = WebChannel(data['_embedded'].get('webchannel')) def __repr__(self): - return _valid_encoding(''.format(id=self.web_channel_id)) + return f'' class MarkedEpisode(object): @@ -703,8 +632,7 @@ class MarkedEpisode(object): self.type = types[type_] def __repr__(self): - return _valid_encoding(''.format( - id=self.episode_id, marked_at=self.marked_at, type=self.type)) + return f'' class VotedShow(object): @@ -716,9 +644,7 @@ class VotedShow(object): self.show = Show(data['_embedded'].get('show')) def __repr__(self): - return _valid_encoding(''.format(id=self.maze_id, - voted_at=self.voted_at, - vote=self.vote)) + return f'' class VotedEpisode(object): @@ -728,22 +654,11 @@ class VotedEpisode(object): self.vote = data.get('vote') def __repr__(self): - return _valid_encoding(''.format(id=self.episode_id, - voted_at=self.voted_at, - vote=self.vote)) - - -def _valid_encoding(text): - if not text: - return - if sys.version_info > (3,): - return text - else: - return unicode(text).encode('utf-8') + return f'' def _url_quote(show): - return requests.compat.quote(show.encode('UTF-8')) + return quote(show.encode('UTF-8')) def _remove_tags(text): @@ -755,14 +670,14 @@ def _remove_tags(text): # noinspection PyUnusedLocal def _record_hook(r, *args, **kwargs): r.hook_called = True - if 301 == r.status_code and isinstance(r.headers.get('Location'), six.string_types) \ + if 301 == r.status_code and isinstance(r.headers.get('Location'), str) \ and r.headers.get('Location').startswith('http://api.tvmaze'): r.headers['Location'] = r.headers['Location'].replace('http://', 'https://') return r def _embed_url(base_url, embed, possible_embeds, glue): - if isinstance(embed, six.string_types): + if isinstance(embed, str): embed_words = [em.strip() for em in embed.split(',')] elif None is embed: embed_words = [] @@ -787,8 +702,8 @@ class TVmaze(object): TVmaze features. Attributes: - username (str): Username for http://www.tvmaze.com - api_key (str): TVmaze api key. Find your key at http://www.tvmaze.com/dashboard + username (str): Username for https://www.tvmaze.com + api_key (str): TVmaze api key. Find your key at https://www.tvmaze.com/dashboard """ @@ -799,6 +714,7 @@ class TVmaze(object): # Query TVmaze free endpoints @staticmethod def endpoint_standard_get(url): + # type: (str) -> Any s = requests.Session() retries = Retry(total=5, backoff_factor=0.1, @@ -817,13 +733,14 @@ class TVmaze(object): return None if r.status_code == 400: - raise BadRequest('Bad Request for url {}'.format(url)) + raise BadRequest(f'Bad Request for url {url}') results = r.json() return results # Query TVmaze Premium endpoints def _endpoint_premium_get(self, url): + # type: (str) -> Any s = requests.Session() retries = Retry(total=5, backoff_factor=0.1, @@ -842,12 +759,13 @@ class TVmaze(object): return None if r.status_code == 400: - raise BadRequest('Bad Request for url {}'.format(url)) + raise BadRequest(f'Bad Request for url {url}') results = r.json() return results def _endpoint_premium_delete(self, url): + # type: (str) -> Any s = requests.Session() retries = Retry(total=5, backoff_factor=0.1, @@ -863,8 +781,8 @@ class TVmaze(object): s.close() if r.status_code == 400: - logger.error('Bad Request for url {}'.format(url)) - raise BadRequest('Bad Request for url {}'.format(url)) + logger.error(f'Bad Request for url {url}') + raise BadRequest(f'Bad Request for url {url}') if r.status_code == 200: return True @@ -873,6 +791,7 @@ class TVmaze(object): return None def _endpoint_premium_put(self, url, payload=None): + # type: (str, Any) -> Any s = requests.Session() retries = Retry(total=5, backoff_factor=0.1, @@ -888,7 +807,7 @@ class TVmaze(object): s.close() if r.status_code == 400: - raise BadRequest('Bad Request for url {}'.format(url)) + raise BadRequest(f'Bad Request for url {url}') if r.status_code == 200: return True @@ -919,7 +838,7 @@ class TVmaze(object): show_web_channel: Show Web Channel (like Netflix, Amazon, etc.) show_language: Show language show_country: Show country - embed: embed parameter to include additional data. Currently 'episodes', 'cast', 'episodeswithspecials' are supported + embed: embed parameter to include additional data. Values: 'episodes', 'cast', 'episodeswithspecials' """ errors = [] if not (maze_id or tvdb_id or tvrage_id or imdb_id or show_name): @@ -956,6 +875,7 @@ class TVmaze(object): @staticmethod def _get_show_with_qualifiers(show_name, qualifiers): + # type: (str, List) -> Show shows = get_show_list(show_name) best_match = -1 # Initialize match value score show_match = None @@ -995,6 +915,7 @@ class TVmaze(object): # Search with user-defined qualifiers, used by get_show() method def _get_show_by_search(self, show_name, show_year, show_network, show_language, show_country, show_web_channel, embed): + # type: (str, int, str, str, str, str, str) -> Show if show_year: show_year = str(show_year) qualifiers = list(filter(None, [show_year, show_network, show_language, show_country, show_web_channel])) @@ -1011,6 +932,7 @@ class TVmaze(object): # TVmaze Premium Endpoints # NOT DONE OR TESTED def get_followed_shows(self, embed=None): + # type: (str) -> List[FollowedShow] if embed not in [None, 'show']: raise InvalidEmbedValue('Value for embed must be "show" or None') url = endpoints.followed_shows.format('/') @@ -1024,26 +946,30 @@ class TVmaze(object): raise NoFollowedShows('You have not followed any shows yet') def get_followed_show(self, maze_id): - url = endpoints.followed_shows.format('/' + str(maze_id)) + # type: (int) -> FollowedShow + url = endpoints.followed_shows.format(f'/{maze_id}') q = self._endpoint_premium_get(url) if q: return FollowedShow(q) else: - raise ShowNotFollowed('Show with ID {} is not followed'.format(maze_id)) + raise ShowNotFollowed(f'Show with ID {maze_id} is not followed') def follow_show(self, maze_id): - url = endpoints.followed_shows.format('/' + str(maze_id)) + # type: (int) -> None + url = endpoints.followed_shows.format(f'/{maze_id}') q = self._endpoint_premium_put(url) if not q: - raise ShowNotFound('Show with ID {} does not exist'.format(maze_id)) + raise ShowNotFound(f'Show with ID {maze_id} does not exist') def unfollow_show(self, maze_id): - url = endpoints.followed_shows.format('/' + str(maze_id)) + # type: (int) -> None + url = endpoints.followed_shows.format(f'/{maze_id}') q = self._endpoint_premium_delete(url) if not q: - raise ShowNotFollowed('Show with ID {} was not followed'.format(maze_id)) + raise ShowNotFollowed(f'Show with ID {maze_id} was not followed') def get_followed_people(self, embed=None): + # type: (str) -> List[FollowedPerson] if embed not in [None, 'person']: raise InvalidEmbedValue('Value for embed must be "person" or None') url = endpoints.followed_people.format('/') @@ -1056,26 +982,30 @@ class TVmaze(object): raise NoFollowedPeople('You have not followed any people yet') def get_followed_person(self, person_id): - url = endpoints.followed_people.format('/' + str(person_id)) + # type: (int) -> FollowedPerson + url = endpoints.followed_people.format(f'/{person_id}') q = self._endpoint_premium_get(url) if q: return FollowedPerson(q) else: - raise PersonNotFound('Person with ID {} is not followed'.format(person_id)) + raise PersonNotFound(f'Person with ID {person_id} is not followed') def follow_person(self, person_id): - url = endpoints.followed_people.format('/' + str(person_id)) + # type: (int) -> None + url = endpoints.followed_people.format(f'/{person_id}') q = self._endpoint_premium_put(url) if not q: - raise PersonNotFound('Person with ID {} does not exist'.format(person_id)) + raise PersonNotFound(f'Person with ID {person_id} does not exist') def unfollow_person(self, person_id): - url = endpoints.followed_people.format('/' + str(person_id)) + # type: (int) -> None + url = endpoints.followed_people.format(f'/{person_id}') q = self._endpoint_premium_delete(url) if not q: - raise PersonNotFollowed('Person with ID {} was not followed'.format(person_id)) + raise PersonNotFollowed(f'Person with ID {person_id} was not followed') def get_followed_networks(self, embed=None): + # type: (str) -> List[FollowedNetwork] if embed not in [None, 'network']: raise InvalidEmbedValue('Value for embed must be "network" or None') url = endpoints.followed_networks.format('/') @@ -1088,26 +1018,30 @@ class TVmaze(object): raise NoFollowedNetworks('You have not followed any networks yet') def get_followed_network(self, network_id): - url = endpoints.followed_networks.format('/' + str(network_id)) + # type: (int) -> FollowedNetwork + url = endpoints.followed_networks.format(f'/{network_id}') q = self._endpoint_premium_get(url) if q: return FollowedNetwork(q) else: - raise NetworkNotFound('Network with ID {} is not followed'.format(network_id)) + raise NetworkNotFound(f'Network with ID {network_id} is not followed') def follow_network(self, network_id): - url = endpoints.followed_networks.format('/' + str(network_id)) + # type: (int) -> None + url = endpoints.followed_networks.format(f'/{network_id}') q = self._endpoint_premium_put(url) if not q: - raise NetworkNotFound('Network with ID {} does not exist'.format(network_id)) + raise NetworkNotFound(f'Network with ID {network_id} does not exist') def unfollow_network(self, network_id): - url = endpoints.followed_networks.format('/' + str(network_id)) + # type: (int) -> None + url = endpoints.followed_networks.format(f'/{network_id}') q = self._endpoint_premium_delete(url) if not q: - raise NetworkNotFollowed('Network with ID {} was not followed'.format(network_id)) + raise NetworkNotFollowed(f'Network with ID {network_id} was not followed') def get_followed_web_channels(self, embed=None): + # type: (str) -> List[FollowedWebChannel] if embed not in [None, 'webchannel']: raise InvalidEmbedValue('Value for embed must be "webchannel" or None') url = endpoints.followed_web_channels.format('/') @@ -1120,7 +1054,8 @@ class TVmaze(object): raise NoFollowedWebChannels('You have not followed any Web Channels yet') def get_followed_web_channel(self, webchannel_id): - url = endpoints.followed_web_channels.format('/' + str(webchannel_id)) + # type: (int) -> FollowedWebChannel + url = endpoints.followed_web_channels.format(f'/{webchannel_id}') q = self._endpoint_premium_get(url) if q: return FollowedWebChannel(q) @@ -1128,18 +1063,21 @@ class TVmaze(object): raise NetworkNotFound('Web Channel with ID {} is not followed'.format(webchannel_id)) def follow_web_channel(self, webchannel_id): - url = endpoints.followed_web_channels.format('/' + str(webchannel_id)) + # type: (int) -> None + url = endpoints.followed_web_channels.format(f'/{webchannel_id}') q = self._endpoint_premium_put(url) if not q: - raise WebChannelNotFound('Web Channel with ID {} does not exist'.format(webchannel_id)) + raise WebChannelNotFound(f'Web Channel with ID {webchannel_id} does not exist') def unfollow_web_channel(self, webchannel_id): - url = endpoints.followed_web_channels.format('/' + str(webchannel_id)) + # type: (int) -> None + url = endpoints.followed_web_channels.format(f'/{webchannel_id}') q = self._endpoint_premium_delete(url) if not q: - raise WebChannelNotFollowed('Web Channel with ID {} was not followed'.format(webchannel_id)) + raise WebChannelNotFollowed(f'Web Channel with ID {webchannel_id} was not followed') def get_marked_episodes(self, maze_id=None): + # type: (int) -> List[MarkedEpisode] if not maze_id: url = endpoints.marked_episodes.format('/') else: @@ -1152,35 +1090,36 @@ class TVmaze(object): raise NoMarkedEpisodes('You have not marked any episodes yet') def get_marked_episode(self, episode_id): - path = '/{}'.format(episode_id) - url = endpoints.marked_episodes.format(path) + # type: (int) -> MarkedEpisode + url = endpoints.marked_episodes.format(f'/{episode_id}') q = self._endpoint_premium_get(url) if q: return MarkedEpisode(q) else: - raise EpisodeNotMarked('Episode with ID {} is not marked'.format(episode_id)) + raise EpisodeNotMarked(f'Episode with ID {episode_id} is not marked') def mark_episode(self, episode_id, mark_type): + # type: (int, str) -> None types = {'watched': 0, 'acquired': 1, 'skipped': 2} try: status = types[mark_type] except IndexError: raise InvalidMarkedEpisodeType('Episode must be marked as "watched", "acquired", or "skipped"') payload = {'type': str(status)} - path = '/{}'.format(episode_id) - url = endpoints.marked_episodes.format(path) + url = endpoints.marked_episodes.format(f'/{episode_id}') q = self._endpoint_premium_put(url, payload=payload) if not q: - raise EpisodeNotFound('Episode with ID {} does not exist'.format(episode_id)) + raise EpisodeNotFound(f'Episode with ID {episode_id} does not exist') def unmark_episode(self, episode_id): - path = '/{}'.format(episode_id) - url = endpoints.marked_episodes.format(path) + # type: (int) -> None + url = endpoints.marked_episodes.format(f'/{episode_id}') q = self._endpoint_premium_delete(url) if not q: - raise EpisodeNotMarked('Episode with ID {} was not marked'.format(episode_id)) + raise EpisodeNotMarked(f'Episode with ID {episode_id} was not marked') def get_voted_shows(self, embed=None): + # type: (str) -> List[VotedShow] if embed not in [None, 'show']: raise InvalidEmbedValue('Value for embed must be "show" or None') url = endpoints.voted_shows.format('/') @@ -1193,29 +1132,33 @@ class TVmaze(object): raise NoVotedShows('You have not voted for any shows yet') def get_voted_show(self, maze_id): - url = endpoints.voted_shows.format('/' + str(maze_id)) + # type: (int) -> VotedShow + url = endpoints.voted_shows.format(f'/{maze_id}') q = self._endpoint_premium_get(url) if q: return VotedShow(q) else: - raise ShowNotVotedFor('Show with ID {} not voted for'.format(maze_id)) + raise ShowNotVotedFor(f'Show with ID {maze_id} not voted for') def remove_show_vote(self, maze_id): - url = endpoints.voted_shows.format('/' + str(maze_id)) + # type: (int) -> None + url = endpoints.voted_shows.format(f'/{maze_id}') q = self._endpoint_premium_delete(url) if not q: - raise ShowNotVotedFor('Show with ID {} was not voted for'.format(maze_id)) + raise ShowNotVotedFor(f'Show with ID {maze_id} was not voted for') def vote_show(self, maze_id, vote): + # type: (int, int) -> None if not 1 <= vote <= 10: raise InvalidVoteValue('Vote must be an integer between 1 and 10') payload = {'vote': int(vote)} - url = endpoints.voted_shows.format('/' + str(maze_id)) + url = endpoints.voted_shows.format(f'/{maze_id}') q = self._endpoint_premium_put(url, payload=payload) if not q: - raise ShowNotFound('Show with ID {} does not exist'.format(maze_id)) + raise ShowNotFound(f'Show with ID {maze_id} does not exist') def get_voted_episodes(self): + # type: (...) -> List[VotedEpisode] url = endpoints.voted_episodes.format('/') q = self._endpoint_premium_get(url) if q: @@ -1224,34 +1167,35 @@ class TVmaze(object): raise NoVotedEpisodes('You have not voted for any episodes yet') def get_voted_episode(self, episode_id): - path = '/{}'.format(episode_id) - url = endpoints.voted_episodes.format(path) + # type: (int) -> VotedEpisode + url = endpoints.voted_episodes.format(f'/{episode_id}') q = self._endpoint_premium_get(url) if q: return VotedEpisode(q) else: - raise EpisodeNotVotedFor('Episode with ID {} not voted for'.format(episode_id)) + raise EpisodeNotVotedFor(f'Episode with ID {episode_id} not voted for') def remove_episode_vote(self, episode_id): - path = '/{}'.format(episode_id) - url = endpoints.voted_episodes.format(path) + # type: (int) -> None + url = endpoints.voted_episodes.format(f'/{episode_id}') q = self._endpoint_premium_delete(url) if not q: - raise EpisodeNotVotedFor('Episode with ID {} was not voted for'.format(episode_id)) + raise EpisodeNotVotedFor(f'Episode with ID {episode_id} was not voted for') def vote_episode(self, episode_id, vote): + # type: (int, int) -> None if not 1 <= vote <= 10: raise InvalidVoteValue('Vote must be an integer between 1 and 10') payload = {'vote': int(vote)} - path = '/{}'.format(episode_id) - url = endpoints.voted_episodes.format(path) + url = endpoints.voted_episodes.format(f'/{episode_id}') q = self._endpoint_premium_put(url, payload=payload) if not q: - raise EpisodeNotFound('Episode with ID {} does not exist'.format(episode_id)) + raise EpisodeNotFound(f'Episode with ID {episode_id} does not exist') # Return list of Show objects def get_show_list(show_name): + # type: (str) -> List[Show] """ Return list of Show objects from the TVmaze "Show Search" endpoint @@ -1266,6 +1210,7 @@ def get_show_list(show_name): # Get list of Person objects def get_people(name): + # type: (str) -> List[Person] """ Return list of Person objects from the TVmaze "People Search" endpoint :param name: Name of person @@ -1277,6 +1222,7 @@ def get_people(name): def show_search(show): + # type: (str) -> List[Show] _show = _url_quote(show) url = endpoints.show_search.format(_show) q = TVmaze.endpoint_standard_get(url) @@ -1288,10 +1234,11 @@ def show_search(show): shows.append(show) return shows else: - raise ShowNotFound('Show {0} not found'.format(show)) + raise ShowNotFound(f'Show {show} not found') def show_single_search(show, embed=None): + # type: (str, str) -> Show _show = _url_quote(show) url = _embed_url(endpoints.show_single_search.format(_show), embed, [None, 'episodes', 'cast', 'previousepisode', 'nextepisode'], '&') @@ -1299,47 +1246,52 @@ def show_single_search(show, embed=None): if q: return Show(q) else: - raise ShowNotFound('show name "{0}" not found'.format(show)) + raise ShowNotFound(f'show name "{show}" not found') def lookup_tvrage(tvrage_id): + # type: (Union[int, str]) -> Show url = endpoints.lookup_tvrage.format(tvrage_id) q = TVmaze.endpoint_standard_get(url) if q: return Show(q) else: - raise IDNotFound('TVRage id {0} not found'.format(tvrage_id)) + raise IDNotFound(f'TVRage id {tvrage_id} not found') def lookup_tvdb(tvdb_id): + # type: (Union[int, str]) -> Show url = endpoints.lookup_tvdb.format(tvdb_id) q = TVmaze.endpoint_standard_get(url) if q: return Show(q) else: - raise IDNotFound('TVDB ID {0} not found'.format(tvdb_id)) + raise IDNotFound(f'TVDB ID {tvdb_id} not found') def lookup_imdb(imdb_id): + # type: (str) -> Show url = endpoints.lookup_imdb.format(imdb_id) q = TVmaze.endpoint_standard_get(url) if q: return Show(q) else: - raise IDNotFound('IMDB ID {0} not found'.format(imdb_id)) + raise IDNotFound(f'IMDB ID {imdb_id} not found') def get_schedule(country='US', date=str(datetime.today().date())): + # type: (str, str) -> List[Episode] url = endpoints.get_schedule.format(country, date) q = TVmaze.endpoint_standard_get(url) if q: return [Episode(episode) for episode in q] else: - raise ScheduleNotFound('Schedule for country {0} at date {1} not found'.format(country, date)) + raise ScheduleNotFound(f'Schedule for country {country} at date {date} not found') # ALL known future episodes, several MB large, cached for 24 hours def get_full_schedule(): + # type: (...) -> List[Episode] url = endpoints.get_full_schedule q = TVmaze.endpoint_standard_get(url) if q: @@ -1349,29 +1301,32 @@ def get_full_schedule(): def show_main_info(maze_id, embed=None): + # type: (int, str) -> Show url = _embed_url(endpoints.show_main_info.format(maze_id), embed, [None, 'episodes', 'cast', 'previousepisode', 'nextepisode', 'episodeswithspecials'], '?') q = TVmaze.endpoint_standard_get(url) if q: return Show(q) else: - raise IDNotFound('Maze id {0} not found'.format(maze_id)) + raise IDNotFound(f'Maze id {maze_id} not found') def episode_list(maze_id, specials=None, raise_error=True, show=None): + # type: (int, bool, bool, Show) -> List[Episode] if specials: - url = endpoints.episode_list.format(maze_id) + '?specials=1' + url = f'{endpoints.episode_list.format(maze_id)}?specials=1' else: url = endpoints.episode_list.format(maze_id) q = TVmaze.endpoint_standard_get(url) if type(q) == list: return [Episode(episode, show) for episode in q] elif raise_error: - raise IDNotFound('Maze id {0} not found'.format(maze_id)) + raise IDNotFound(f'Maze id {maze_id} not found'.format) return [] def episode_by_number(maze_id, season_number, episode_number): + # type: (int, int, int) -> Episode url = endpoints.episode_by_number.format(maze_id, season_number, episode_number) @@ -1379,13 +1334,11 @@ def episode_by_number(maze_id, season_number, episode_number): if q: return Episode(q) else: - raise EpisodeNotFound( - 'Couldn\'t find season {0} episode {1} for TVmaze ID {2}'.format(season_number, - episode_number, - maze_id)) + raise EpisodeNotFound(f'Couldn\'t find season {season_number} episode {episode_number} for TVmaze ID {maze_id}') def episodes_by_date(maze_id, airdate): + # type: (int, str) -> List[Episode] try: datetime.strptime(airdate, '%Y-%m-%d') except ValueError: @@ -1395,21 +1348,22 @@ def episodes_by_date(maze_id, airdate): if q: return [Episode(episode) for episode in q] else: - raise NoEpisodesForAirdate( - 'Couldn\'t find an episode airing {0} for TVmaze ID {1}'.format(airdate, maze_id)) + raise NoEpisodesForAirdate(f'Couldn\'t find an episode airing {airdate} for TVmaze ID {maze_id}') def show_cast(maze_id, raise_error=True): + # type: (int, bool) -> Cast url = endpoints.show_cast.format(maze_id) q = TVmaze.endpoint_standard_get(url) if q: return Cast(q) elif raise_error: - raise CastNotFound('Couldn\'nt find show cast for TVmaze ID {0}'.format(maze_id)) + raise CastNotFound(f'Couldn\'nt find show cast for TVmaze ID {maze_id}') return Cast({}) def show_index(page=1): + # type: (int) -> List[Show] url = endpoints.show_index.format(page) q = TVmaze.endpoint_standard_get(url) if q: @@ -1426,70 +1380,75 @@ def people_search(person): if q: return [Person(person) for person in q] else: - raise PersonNotFound('Couldn\'t find person {0}'.format(person)) + raise PersonNotFound(f'Couldn\'t find person {person}') def person_main_info(person_id, embed=None): - # type: (integer_types, AnyStr) -> Person + # type: (int, AnyStr) -> Person url = _embed_url(endpoints.person_main_info.format(person_id), embed, [None, 'castcredits', 'crewcredits'], '?') q = TVmaze.endpoint_standard_get(url) if q: return Person(q) else: - raise PersonNotFound('Couldn\'t find person {0}'.format(person_id)) + raise PersonNotFound(f'Couldn\'t find person {person_id}') def person_cast_credits(person_id, embed=None, raise_error=True): + # type: (int, str, bool) -> List[CastCredit] url = _embed_url(endpoints.person_cast_credits.format(person_id), embed, [None, 'show', 'character'], '?') q = TVmaze.endpoint_standard_get(url) if q: return [CastCredit(credit) for credit in q] elif raise_error: - raise CreditsNotFound('Couldn\'t find cast credits for person ID {0}'.format(person_id)) + raise CreditsNotFound(f'Couldn\'t find cast credits for person ID {person_id}') return [] def person_guestcast_credits(person_id, embed=None, raise_error=True): + # type: (int, str, bool) -> List[CastCredit] url = _embed_url(endpoints.person_guestcast_credits.format(person_id), embed, [None, 'episode', 'character'], '?') q = TVmaze.endpoint_standard_get(url) if q: return [CastCredit(credit) for credit in q] elif raise_error: - raise CreditsNotFound('Couldn\'t find cast credits for person ID {0}'.format(person_id)) + raise CreditsNotFound(f'Couldn\'t find cast credits for person ID {person_id}') return [] def person_crew_credits(person_id, embed=None, raise_error=True): + # type: (int, str, bool) -> List[CrewCredit] url = _embed_url(endpoints.person_crew_credits.format(person_id), embed, [None, 'show'], '?') q = TVmaze.endpoint_standard_get(url) if q: return [CrewCredit(credit) for credit in q] elif raise_error: - raise CreditsNotFound('Couldn\'t find crew credits for person ID {0}'.format(person_id)) + raise CreditsNotFound(f'Couldn\'t find crew credits for person ID {person_id}') return [] def get_show_crew(maze_id, raise_error=True): + # type: (int, bool) -> List[Crew] url = endpoints.show_crew.format(maze_id) q = TVmaze.endpoint_standard_get(url) if q: return [Crew(crew) for crew in (isinstance(q, list) and q[:5]) or []] elif raise_error: - raise CrewNotFound('Couldn\'t find crew for TVmaze ID {}'.format(maze_id)) + raise CrewNotFound(f'Couldn\'t find crew for TVmaze ID {maze_id}') return [] def get_show_images(maze_id, raise_error=True): + # type: (int, bool) -> List[Image] url = endpoints.show_images.format(maze_id) q = TVmaze.endpoint_standard_get(url) if q: return [Image(img) for img in q] elif raise_error: - raise ShowImagesNotFound('Couldn\'t find images for TVmaze ID {}'.format(maze_id)) + raise ShowImagesNotFound(f'Couldn\'t find images for TVmaze ID {maze_id}') return [] @@ -1511,16 +1470,18 @@ def show_updates(since=None): def show_akas(maze_id, raise_error=True): + # type: (int, bool) -> List[AKA] url = endpoints.show_akas.format(maze_id) q = TVmaze.endpoint_standard_get(url) if q: return [AKA(aka) for aka in q] elif raise_error: - raise AKASNotFound('Couldn\'t find AKA\'s for TVmaze ID {0}'.format(maze_id)) + raise AKASNotFound(f'Couldn\'t find AKA\'s for TVmaze ID {maze_id}') return [] def show_seasons(maze_id, raise_error=True, show=None): + # type: (int, bool, Show) -> Dict[int, Season] url = endpoints.show_seasons.format(maze_id) q = TVmaze.endpoint_standard_get(url) if q: @@ -1529,45 +1490,49 @@ def show_seasons(maze_id, raise_error=True, show=None): season_dict[season['number']] = Season(season, show=show, season_number=season['number']) return season_dict elif raise_error: - raise SeasonNotFound('Couldn\'t find Season\'s for TVmaze ID {0}'.format(maze_id)) + raise SeasonNotFound(f'Couldn\'t find Season\'s for TVmaze ID {maze_id}') return {} def season_by_id(season_id, embed=None): + # type: (int, str) -> Season url = _embed_url(endpoints.season_by_id.format(season_id), embed, [None, 'episodes'], '?') q = TVmaze.endpoint_standard_get(url) if q: return Season(q) else: - raise SeasonNotFound('Couldn\'t find Season with ID {0}'.format(season_id)) + raise SeasonNotFound(f'Couldn\'t find Season with ID {season_id}') def episode_by_id(episode_id, show=None, raise_error=True, embed=None): + # type: (int, Show, bool, str) -> Episode url = _embed_url(endpoints.episode_by_id.format(episode_id), embed, [None, 'show', 'guestcast', 'guestcrew'], '?') q = TVmaze.endpoint_standard_get(url) if q: return Episode(q, show=show) elif raise_error: - raise EpisodeNotFound('Couldn\'t find Episode with ID {0}'.format(episode_id)) + raise EpisodeNotFound(f'Couldn\'t find Episode with ID {episode_id}') def episode_guestcast_credits(episode_id, raise_error=True): + # type: (int, bool) -> List[CastCredit] url = endpoints.episode_guestcast.format(episode_id) q = TVmaze.endpoint_standard_get(url) if q: return [CastCredit(credit) for credit in q] elif raise_error: - raise CreditsNotFound('Couldn\'t find cast credits for episode ID {0}'.format(episode_id)) + raise CreditsNotFound(f'Couldn\'t find cast credits for episode ID {episode_id}') return [] def episode_crew_credits(episode_id, raise_error=True): + # type: (int, bool) -> List[CrewCredit] url = endpoints.episode_guestcrew.format(episode_id) q = TVmaze.endpoint_standard_get(url) if q: return [CrewCredit(credit) for credit in q] elif raise_error: - raise CreditsNotFound('Couldn\'t find crew credits for episode ID {0}'.format(episode_id)) + raise CreditsNotFound(f'Couldn\'t find crew credits for episode ID {episode_id}') return [] diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py index 11d2e2ca..48e6c369 100644 --- a/lib/pytz/__init__.py +++ b/lib/pytz/__init__.py @@ -22,8 +22,8 @@ from pytz.tzfile import build_tzinfo # The IANA (nee Olson) database is updated several times a year. -OLSON_VERSION = '2022g' -VERSION = '2022.7.1' # pip compatible version number. +OLSON_VERSION = '2023c' +VERSION = '2023.3.post1' # pip compatible version number. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling diff --git a/lib/pytz/tzinfo.py b/lib/pytz/tzinfo.py index 725978d5..49b5c3fe 100644 --- a/lib/pytz/tzinfo.py +++ b/lib/pytz/tzinfo.py @@ -24,7 +24,8 @@ def memorized_timedelta(seconds): _timedelta_cache[seconds] = delta return delta -_epoch = datetime.utcfromtimestamp(0) + +_epoch = datetime(1970, 1, 1, 0, 0) # datetime.utcfromtimestamp(0) _datetime_cache = {0: _epoch} @@ -33,12 +34,13 @@ def memorized_datetime(seconds): try: return _datetime_cache[seconds] except KeyError: - # NB. We can't just do datetime.utcfromtimestamp(seconds) as this - # fails with negative values under Windows (Bug #90096) + # NB. We can't just do datetime.fromtimestamp(seconds, tz=timezone.utc).replace(tzinfo=None) + # as this fails with negative values under Windows (Bug #90096) dt = _epoch + timedelta(seconds=seconds) _datetime_cache[seconds] = dt return dt + _ttinfo_cache = {} @@ -55,6 +57,7 @@ def memorized_ttinfo(*args): _ttinfo_cache[args] = ttinfo return ttinfo + _notime = memorized_timedelta(0) @@ -355,7 +358,7 @@ class DstTzInfo(BaseTzInfo): is_dst=False) + timedelta(hours=6) # If we get this far, we have multiple possible timezones - this - # is an ambiguous case occuring during the end-of-DST transition. + # is an ambiguous case occurring during the end-of-DST transition. # If told to be strict, raise an exception since we have an # ambiguous case diff --git a/lib/pytz/zoneinfo/Africa/Cairo b/lib/pytz/zoneinfo/Africa/Cairo index d3f81962..dd538c65 100644 Binary files a/lib/pytz/zoneinfo/Africa/Cairo and b/lib/pytz/zoneinfo/Africa/Cairo differ diff --git a/lib/pytz/zoneinfo/Africa/Casablanca b/lib/pytz/zoneinfo/Africa/Casablanca index 17e0d1b8..d39016b8 100644 Binary files a/lib/pytz/zoneinfo/Africa/Casablanca and b/lib/pytz/zoneinfo/Africa/Casablanca differ diff --git a/lib/pytz/zoneinfo/Africa/El_Aaiun b/lib/pytz/zoneinfo/Africa/El_Aaiun index 64f1b769..066fbed0 100644 Binary files a/lib/pytz/zoneinfo/Africa/El_Aaiun and b/lib/pytz/zoneinfo/Africa/El_Aaiun differ diff --git a/lib/pytz/zoneinfo/America/Godthab b/lib/pytz/zoneinfo/America/Godthab index 883dfa05..adb7934a 100644 Binary files a/lib/pytz/zoneinfo/America/Godthab and b/lib/pytz/zoneinfo/America/Godthab differ diff --git a/lib/pytz/zoneinfo/America/Nuuk b/lib/pytz/zoneinfo/America/Nuuk index 883dfa05..adb7934a 100644 Binary files a/lib/pytz/zoneinfo/America/Nuuk and b/lib/pytz/zoneinfo/America/Nuuk differ diff --git a/lib/pytz/zoneinfo/America/Yellowknife b/lib/pytz/zoneinfo/America/Yellowknife index efc9ab03..cd78a6f8 100644 Binary files a/lib/pytz/zoneinfo/America/Yellowknife and b/lib/pytz/zoneinfo/America/Yellowknife differ diff --git a/lib/pytz/zoneinfo/Asia/Gaza b/lib/pytz/zoneinfo/Asia/Gaza index 22c47593..c9b2ff90 100644 Binary files a/lib/pytz/zoneinfo/Asia/Gaza and b/lib/pytz/zoneinfo/Asia/Gaza differ diff --git a/lib/pytz/zoneinfo/Asia/Hebron b/lib/pytz/zoneinfo/Asia/Hebron index 0ee46480..64194fd8 100644 Binary files a/lib/pytz/zoneinfo/Asia/Hebron and b/lib/pytz/zoneinfo/Asia/Hebron differ diff --git a/lib/pytz/zoneinfo/Egypt b/lib/pytz/zoneinfo/Egypt index d3f81962..dd538c65 100644 Binary files a/lib/pytz/zoneinfo/Egypt and b/lib/pytz/zoneinfo/Egypt differ diff --git a/lib/pytz/zoneinfo/Europe/Kirov b/lib/pytz/zoneinfo/Europe/Kirov index 61e17f2b..0cfb956b 100644 Binary files a/lib/pytz/zoneinfo/Europe/Kirov and b/lib/pytz/zoneinfo/Europe/Kirov differ diff --git a/lib/pytz/zoneinfo/Europe/Volgograd b/lib/pytz/zoneinfo/Europe/Volgograd index 5539bac6..9d51a38c 100644 Binary files a/lib/pytz/zoneinfo/Europe/Volgograd and b/lib/pytz/zoneinfo/Europe/Volgograd differ diff --git a/lib/pytz/zoneinfo/iso3166.tab b/lib/pytz/zoneinfo/iso3166.tab index 911af5e8..be3348d1 100644 --- a/lib/pytz/zoneinfo/iso3166.tab +++ b/lib/pytz/zoneinfo/iso3166.tab @@ -238,7 +238,7 @@ SY Syria SZ Eswatini (Swaziland) TC Turks & Caicos Is TD Chad -TF French Southern Territories +TF French S. Terr. TG Togo TH Thailand TJ Tajikistan diff --git a/lib/pytz/zoneinfo/leapseconds b/lib/pytz/zoneinfo/leapseconds index 6826ac4a..a6a170aa 100644 --- a/lib/pytz/zoneinfo/leapseconds +++ b/lib/pytz/zoneinfo/leapseconds @@ -72,11 +72,11 @@ Leap 2016 Dec 31 23:59:60 + S # Any additional leap seconds will come after this. # This Expires line is commented out for now, # so that pre-2020a zic implementations do not reject this file. -#Expires 2023 Jun 28 00:00:00 +#Expires 2023 Dec 28 00:00:00 # POSIX timestamps for the data in this file: #updated 1467936000 (2016-07-08 00:00:00 UTC) -#expires 1687910400 (2023-06-28 00:00:00 UTC) +#expires 1703721600 (2023-12-28 00:00:00 UTC) -# Updated through IERS Bulletin C64 -# File expires on: 28 June 2023 +# Updated through IERS Bulletin C65 +# File expires on: 28 December 2023 diff --git a/lib/pytz/zoneinfo/tzdata.zi b/lib/pytz/zoneinfo/tzdata.zi index 7c88530c..23d99be4 100644 --- a/lib/pytz/zoneinfo/tzdata.zi +++ b/lib/pytz/zoneinfo/tzdata.zi @@ -75,6 +75,8 @@ R K 2014 o - May 15 24 1 S R K 2014 o - Jun 26 24 0 - R K 2014 o - Jul 31 24 1 S R K 2014 o - S lastTh 24 0 - +R K 2023 ma - Ap lastF 0 1 S +R K 2023 ma - O lastTh 24 0 - Z Africa/Cairo 2:5:9 - LMT 1900 O 2 K EE%sT Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u @@ -172,7 +174,7 @@ R M 2021 o - May 16 2 0 - R M 2022 o - Mar 27 3 -1 - R M 2022 o - May 8 2 0 - R M 2023 o - Mar 19 3 -1 - -R M 2023 o - Ap 30 2 0 - +R M 2023 o - Ap 23 2 0 - R M 2024 o - Mar 10 3 -1 - R M 2024 o - Ap 14 2 0 - R M 2025 o - F 23 3 -1 - @@ -188,7 +190,7 @@ R M 2029 o - F 18 2 0 - R M 2029 o - D 30 3 -1 - R M 2030 o - F 10 2 0 - R M 2030 o - D 22 3 -1 - -R M 2031 o - F 2 2 0 - +R M 2031 o - Ja 26 2 0 - R M 2031 o - D 14 3 -1 - R M 2032 o - Ja 18 2 0 - R M 2032 o - N 28 3 -1 - @@ -204,7 +206,7 @@ R M 2036 o - N 23 2 0 - R M 2037 o - O 4 3 -1 - R M 2037 o - N 15 2 0 - R M 2038 o - S 26 3 -1 - -R M 2038 o - N 7 2 0 - +R M 2038 o - O 31 2 0 - R M 2039 o - S 18 3 -1 - R M 2039 o - O 23 2 0 - R M 2040 o - S 2 3 -1 - @@ -220,7 +222,7 @@ R M 2044 o - Au 28 2 0 - R M 2045 o - Jul 9 3 -1 - R M 2045 o - Au 20 2 0 - R M 2046 o - Jul 1 3 -1 - -R M 2046 o - Au 12 2 0 - +R M 2046 o - Au 5 2 0 - R M 2047 o - Jun 23 3 -1 - R M 2047 o - Jul 28 2 0 - R M 2048 o - Jun 7 3 -1 - @@ -236,7 +238,7 @@ R M 2052 o - Jun 2 2 0 - R M 2053 o - Ap 13 3 -1 - R M 2053 o - May 25 2 0 - R M 2054 o - Ap 5 3 -1 - -R M 2054 o - May 17 2 0 - +R M 2054 o - May 10 2 0 - R M 2055 o - Mar 28 3 -1 - R M 2055 o - May 2 2 0 - R M 2056 o - Mar 12 3 -1 - @@ -252,7 +254,7 @@ R M 2060 o - Mar 7 2 0 - R M 2061 o - Ja 16 3 -1 - R M 2061 o - F 27 2 0 - R M 2062 o - Ja 8 3 -1 - -R M 2062 o - F 19 2 0 - +R M 2062 o - F 12 2 0 - R M 2062 o - D 31 3 -1 - R M 2063 o - F 4 2 0 - R M 2063 o - D 16 3 -1 - @@ -268,7 +270,7 @@ R M 2067 o - D 11 2 0 - R M 2068 o - O 21 3 -1 - R M 2068 o - D 2 2 0 - R M 2069 o - O 13 3 -1 - -R M 2069 o - N 24 2 0 - +R M 2069 o - N 17 2 0 - R M 2070 o - O 5 3 -1 - R M 2070 o - N 9 2 0 - R M 2071 o - S 20 3 -1 - @@ -284,7 +286,7 @@ R M 2075 o - S 15 2 0 - R M 2076 o - Jul 26 3 -1 - R M 2076 o - S 6 2 0 - R M 2077 o - Jul 18 3 -1 - -R M 2077 o - Au 29 2 0 - +R M 2077 o - Au 22 2 0 - R M 2078 o - Jul 10 3 -1 - R M 2078 o - Au 14 2 0 - R M 2079 o - Jun 25 3 -1 - @@ -294,13 +296,13 @@ R M 2080 o - Jul 21 2 0 - R M 2081 o - Jun 1 3 -1 - R M 2081 o - Jul 13 2 0 - R M 2082 o - May 24 3 -1 - -R M 2082 o - Jul 5 2 0 - +R M 2082 o - Jun 28 2 0 - R M 2083 o - May 16 3 -1 - R M 2083 o - Jun 20 2 0 - R M 2084 o - Ap 30 3 -1 - R M 2084 o - Jun 11 2 0 - R M 2085 o - Ap 22 3 -1 - -R M 2085 o - Jun 3 2 0 - +R M 2085 o - May 27 2 0 - R M 2086 o - Ap 14 3 -1 - R M 2086 o - May 19 2 0 - R M 2087 o - Mar 30 3 -1 - @@ -997,8 +999,86 @@ R P 2020 2021 - Mar Sa<=30 0 1 S R P 2020 o - O 24 1 0 - R P 2021 o - O 29 1 0 - R P 2022 o - Mar 27 0 1 S -R P 2022 ma - O Sa<=30 2 0 - -R P 2023 ma - Mar Sa<=30 2 1 S +R P 2022 2035 - O Sa<=30 2 0 - +R P 2023 o - Ap 29 2 1 S +R P 2024 o - Ap 13 2 1 S +R P 2025 o - Ap 5 2 1 S +R P 2026 2054 - Mar Sa<=30 2 1 S +R P 2036 o - O 18 2 0 - +R P 2037 o - O 10 2 0 - +R P 2038 o - S 25 2 0 - +R P 2039 o - S 17 2 0 - +R P 2039 o - O 22 2 1 S +R P 2039 2067 - O Sa<=30 2 0 - +R P 2040 o - S 1 2 0 - +R P 2040 o - O 13 2 1 S +R P 2041 o - Au 24 2 0 - +R P 2041 o - S 28 2 1 S +R P 2042 o - Au 16 2 0 - +R P 2042 o - S 20 2 1 S +R P 2043 o - Au 1 2 0 - +R P 2043 o - S 12 2 1 S +R P 2044 o - Jul 23 2 0 - +R P 2044 o - Au 27 2 1 S +R P 2045 o - Jul 15 2 0 - +R P 2045 o - Au 19 2 1 S +R P 2046 o - Jun 30 2 0 - +R P 2046 o - Au 11 2 1 S +R P 2047 o - Jun 22 2 0 - +R P 2047 o - Jul 27 2 1 S +R P 2048 o - Jun 6 2 0 - +R P 2048 o - Jul 18 2 1 S +R P 2049 o - May 29 2 0 - +R P 2049 o - Jul 3 2 1 S +R P 2050 o - May 21 2 0 - +R P 2050 o - Jun 25 2 1 S +R P 2051 o - May 6 2 0 - +R P 2051 o - Jun 17 2 1 S +R P 2052 o - Ap 27 2 0 - +R P 2052 o - Jun 1 2 1 S +R P 2053 o - Ap 12 2 0 - +R P 2053 o - May 24 2 1 S +R P 2054 o - Ap 4 2 0 - +R P 2054 o - May 16 2 1 S +R P 2055 o - May 1 2 1 S +R P 2056 o - Ap 22 2 1 S +R P 2057 o - Ap 7 2 1 S +R P 2058 ma - Mar Sa<=30 2 1 S +R P 2068 o - O 20 2 0 - +R P 2069 o - O 12 2 0 - +R P 2070 o - O 4 2 0 - +R P 2071 o - S 19 2 0 - +R P 2072 o - S 10 2 0 - +R P 2072 o - O 15 2 1 S +R P 2073 o - S 2 2 0 - +R P 2073 o - O 7 2 1 S +R P 2074 o - Au 18 2 0 - +R P 2074 o - S 29 2 1 S +R P 2075 o - Au 10 2 0 - +R P 2075 o - S 14 2 1 S +R P 2075 ma - O Sa<=30 2 0 - +R P 2076 o - Jul 25 2 0 - +R P 2076 o - S 5 2 1 S +R P 2077 o - Jul 17 2 0 - +R P 2077 o - Au 28 2 1 S +R P 2078 o - Jul 9 2 0 - +R P 2078 o - Au 13 2 1 S +R P 2079 o - Jun 24 2 0 - +R P 2079 o - Au 5 2 1 S +R P 2080 o - Jun 15 2 0 - +R P 2080 o - Jul 20 2 1 S +R P 2081 o - Jun 7 2 0 - +R P 2081 o - Jul 12 2 1 S +R P 2082 o - May 23 2 0 - +R P 2082 o - Jul 4 2 1 S +R P 2083 o - May 15 2 0 - +R P 2083 o - Jun 19 2 1 S +R P 2084 o - Ap 29 2 0 - +R P 2084 o - Jun 10 2 1 S +R P 2085 o - Ap 21 2 0 - +R P 2085 o - Jun 2 2 1 S +R P 2086 o - Ap 13 2 0 - +R P 2086 o - May 18 2 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 @@ -1754,8 +1834,8 @@ Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28 -1 E -01/+00 Z America/Nuuk -3:26:56 - LMT 1916 Jul 28 -3 - -03 1980 Ap 6 2 --3 E -03/-02 2023 Mar 25 22 --2 - -02 +-3 E -03/-02 2023 O 29 1u +-2 E -02/-01 Z America/Thule -4:35:8 - LMT 1916 Jul 28 -4 Th A%sT Z Europe/Tallinn 1:39 - LMT 1880 @@ -2175,13 +2255,13 @@ Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3 3 - +03 1930 Jun 21 4 - +04 1961 N 11 4 R +04/+05 1988 Mar 27 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 2018 O 28 2s +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK 2018 O 28 2s 4 - +04 2020 D 27 2s -3 - +03 +3 - MSK Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1988 Mar 27 2s @@ -2194,11 +2274,11 @@ Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s -3 R +03/+04 1991 Mar 31 2s +3 R MSK/MSD 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s -3 R +03/+04 2011 Mar 27 2s -4 - +04 2014 O 26 2s -3 - +03 +3 R MSK/MSD 2011 Mar 27 2s +4 - MSK 2014 O 26 2s +3 - MSK Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 - +04 1935 Ja 27 @@ -3070,9 +3150,6 @@ Z America/Cambridge_Bay 0 - -00 1920 -5 - EST 2000 N 5 -6 - CST 2001 Ap 1 3 -7 C M%sT -Z America/Yellowknife 0 - -00 1935 --7 Y M%sT 1980 --7 C M%sT Z America/Inuvik 0 - -00 1953 -8 Y P%sT 1979 Ap lastSu 2 -7 Y M%sT 1980 @@ -4171,6 +4248,7 @@ L America/Argentina/Cordoba America/Rosario L America/Tijuana America/Santa_Isabel L America/Denver America/Shiprock L America/Toronto America/Thunder_Bay +L America/Edmonton America/Yellowknife L Pacific/Auckland Antarctica/South_Pole L Asia/Shanghai Asia/Chongqing L Asia/Shanghai Asia/Harbin diff --git a/lib/pytz/zoneinfo/zone.tab b/lib/pytz/zoneinfo/zone.tab index 6e5adb9f..dbcb6179 100644 --- a/lib/pytz/zoneinfo/zone.tab +++ b/lib/pytz/zoneinfo/zone.tab @@ -121,9 +121,8 @@ CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +4906-11631 America/Creston MST - BC (Creston) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) @@ -139,7 +138,7 @@ CG -0416+01517 Africa/Brazzaville CH +4723+00832 Europe/Zurich CI +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CM +0403+00942 Africa/Douala @@ -151,10 +150,10 @@ CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde CW +1211-06900 America/Curacao CX -1025+10543 Indian/Christmas -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ +5005+01426 Europe/Prague -DE +5230+01322 Europe/Berlin Germany (most areas) +DE +5230+01322 Europe/Berlin most of Germany DE +4742+00841 Europe/Busingen Busingen DJ +1136+04309 Africa/Djibouti DK +5540+01235 Europe/Copenhagen @@ -187,7 +186,7 @@ GF +0456-05220 America/Cayenne GG +492717-0023210 Europe/Guernsey GH +0533-00013 Africa/Accra GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -235,7 +234,7 @@ KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul KW +2920+04759 Asia/Kuwait KY +1918-08123 America/Cayman -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe @@ -259,12 +258,12 @@ MD +4700+02850 Europe/Chisinau ME +4226+01916 Europe/Podgorica MF +1804-06305 America/Marigot MG -1855+04731 Indian/Antananarivo -MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) +MH +0709+17112 Pacific/Majuro most of Marshall Islands MH +0905+16720 Pacific/Kwajalein Kwajalein MK +4159+02126 Europe/Skopje ML +1239-00800 Africa/Bamako MM +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar MO +221150+1133230 Asia/Macau @@ -302,7 +301,7 @@ NO +5955+01045 Europe/Oslo NP +2743+08519 Asia/Kathmandu NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue -NZ -3652+17446 Pacific/Auckland New Zealand (most areas) +NZ -3652+17446 Pacific/Auckland most of New Zealand NZ -4357-17633 Pacific/Chatham Chatham Islands OM +2336+05835 Asia/Muscat PA +0858-07932 America/Panama @@ -310,7 +309,7 @@ PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands -PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) +PG -0930+14710 Pacific/Port_Moresby most of Papua New Guinea PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi @@ -356,7 +355,7 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RW -0157+03004 Africa/Kigali @@ -397,7 +396,7 @@ TT +1039-06131 America/Port_of_Spain TV -0831+17913 Pacific/Funafuti TW +2503+12130 Asia/Taipei TZ -0648+03917 Africa/Dar_es_Salaam -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine UG +0019+03225 Africa/Kampala UM +2813-17722 Pacific/Midway Midway Islands UM +1917+16637 Pacific/Wake Wake Island @@ -420,7 +419,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) +US +332654-1120424 America/Phoenix MST - AZ (except Navajo) US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -428,7 +427,7 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands +US +515248-1763929 America/Adak Alaska - western Aleutians US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) diff --git a/lib/pytz/zoneinfo/zone1970.tab b/lib/pytz/zoneinfo/zone1970.tab index a9b36d36..1f1cecb8 100644 --- a/lib/pytz/zoneinfo/zone1970.tab +++ b/lib/pytz/zoneinfo/zone1970.tab @@ -18,7 +18,10 @@ # Please see the theory.html file for how these names are chosen. # If multiple timezones overlap a country, each has a row in the # table, with each column 1 containing the country code. -# 4. Comments; present if and only if a country has multiple timezones. +# 4. Comments; present if and only if countries have multiple timezones, +# and useful only for those countries. For example, the comments +# for the row with countries CH,DE,LI and name Europe/Zurich +# are useful only for DE, since CH and LI have no other timezones. # # If a timezone covers multiple countries, the most-populous city is used, # and that country is listed first in column 1; any other countries @@ -34,7 +37,7 @@ #country- #codes coordinates TZ comments AD +4230+00131 Europe/Andorra -AE,OM,RE,SC,TF +2518+05518 Asia/Dubai UAE, Oman, Réunion, Seychelles, Crozet, Scattered Is +AE,OM,RE,SC,TF +2518+05518 Asia/Dubai Crozet, Scattered Is AF +3431+06912 Asia/Kabul AL +4120+01950 Europe/Tirane AM +4011+04430 Asia/Yerevan @@ -45,7 +48,7 @@ AQ -6448-06406 Antarctica/Palmer Palmer AQ -6734-06808 Antarctica/Rothera Rothera AQ -720041+0023206 Antarctica/Troll Troll AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) -AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) +AR -3124-06411 America/Argentina/Cordoba most areas: CB, CC, CN, ER, FM, MN, SE, SF AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) @@ -56,7 +59,7 @@ AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3319-06621 America/Argentina/San_Luis San Luis (SL) AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) -AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway +AS,UM -1416-17042 Pacific/Pago_Pago Midway AT +4813+01620 Europe/Vienna AU -3133+15905 Australia/Lord_Howe Lord Howe Island AU -5430+15857 Antarctica/Macquarie Macquarie Island @@ -101,26 +104,25 @@ CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) -CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas), Bahamas +CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +6344-06828 America/Iqaluit Eastern - NU (most areas) CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) -CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) +CA +5333-11328 America/Edmonton Mountain - AB; BC (E); NT (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) -CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +5546-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) CA +6043-13503 America/Whitehorse MST - Yukon (east) CA +6404-13925 America/Dawson MST - Yukon (west) CA +4916-12307 America/Vancouver Pacific - BC (most areas) -CH,DE,LI +4723+00832 Europe/Zurich Swiss time +CH,DE,LI +4723+00832 Europe/Zurich Büsingen CI,BF,GH,GM,GN,IS,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga -CL -3327-07040 America/Santiago Chile (most areas) +CL -3327-07040 America/Santiago most of Chile CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CN +3114+12128 Asia/Shanghai Beijing Time @@ -129,10 +131,10 @@ CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde -CY +3510+03322 Asia/Nicosia Cyprus (most areas) +CY +3510+03322 Asia/Nicosia most of Cyprus CY +3507+03357 Asia/Famagusta Northern Cyprus CZ,SK +5005+01426 Europe/Prague -DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin Germany (most areas), Scandinavia +DE,DK,NO,SE,SJ +5230+01322 Europe/Berlin most of Germany DO +1828-06954 America/Santo_Domingo DZ +3647+00303 Africa/Algiers EC -0210-07950 America/Guayaquil Ecuador (mainland) @@ -153,7 +155,7 @@ GB,GG,IM,JE +513030-0000731 Europe/London GE +4143+04449 Asia/Tbilisi GF +0456-05220 America/Cayenne GI +3608-00521 Europe/Gibraltar -GL +6411-05144 America/Nuuk Greenland (most areas) +GL +6411-05144 America/Nuuk most of Greenland GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik @@ -183,12 +185,12 @@ JO +3157+03556 Asia/Amman JP +353916+1394441 Asia/Tokyo KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi KG +4254+07436 Asia/Bishkek -KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Tuvalu, Wallis & Futuna, Wake +KI,MH,TV,UM,WF +0125+17300 Pacific/Tarawa Gilberts, Marshalls, Wake KI -0247-17143 Pacific/Kanton Phoenix Islands KI +0152-15720 Pacific/Kiritimati Line Islands KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul -KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) +KZ +4315+07657 Asia/Almaty most of Kazakhstan KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe @@ -205,14 +207,14 @@ MA +3339-00735 Africa/Casablanca MD +4700+02850 Europe/Chisinau MH +0905+16720 Pacific/Kwajalein Kwajalein MM,CC +1647+09610 Asia/Yangon -MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) +MN +4755+10653 Asia/Ulaanbaatar most of Mongolia MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar MO +221150+1133230 Asia/Macau MQ +1436-06105 America/Martinique MT +3554+01431 Europe/Malta MU -2010+05730 Indian/Mauritius -MV,TF +0410+07330 Indian/Maldives Maldives, Kerguelen, St Paul I, Amsterdam I +MV,TF +0410+07330 Indian/Maldives Kerguelen, St Paul I, Amsterdam I MX +1924-09909 America/Mexico_City Central Mexico MX +2105-08646 America/Cancun Quintana Roo MX +2058-08937 America/Merida Campeche, Yucatán @@ -225,7 +227,7 @@ MX +2313-10625 America/Mazatlan Baja California Sur, Nayarit (most areas), Sinal MX +2048-10515 America/Bahia_Banderas Bahía de Banderas MX +2904-11058 America/Hermosillo Sonora MX +3232-11701 America/Tijuana Baja California -MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak, Brunei +MY,BN +0133+11020 Asia/Kuching Sabah, Sarawak MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time NA -2234+01706 Africa/Windhoek NC -2216+16627 Pacific/Noumea @@ -237,7 +239,7 @@ NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue NZ,AQ -3652+17446 Pacific/Auckland New Zealand time NZ -4357-17633 Pacific/Chatham Chatham Islands -PA,CA,KY +0858-07932 America/Panama EST - Panama, Cayman, ON (Atikokan), NU (Coral H) +PA,CA,KY +0858-07932 America/Panama EST - ON (Atikokan), NU (Coral H) PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands @@ -285,13 +287,13 @@ RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island -RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is +RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); N Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea -SA,AQ,KW,YE +2438+04643 Asia/Riyadh Arabia, Syowa -SB,FM -0932+16012 Pacific/Guadalcanal Solomons, Pohnpei +SA,AQ,KW,YE +2438+04643 Asia/Riyadh Syowa +SB,FM -0932+16012 Pacific/Guadalcanal Pohnpei SD +1536+03232 Africa/Khartoum -SG,MY +0117+10351 Asia/Singapore Singapore, peninsular Malaysia +SG,MY +0117+10351 Asia/Singapore peninsular Malaysia SR +0550-05510 America/Paramaribo SS +0451+03137 Africa/Juba ST +0020+00644 Africa/Sao_Tome @@ -299,7 +301,7 @@ SV +1342-08912 America/El_Salvador SY +3330+03618 Asia/Damascus TC +2128-07108 America/Grand_Turk TD +1207+01503 Africa/Ndjamena -TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas) +TH,CX,KH,LA,VN +1345+10031 Asia/Bangkok north Vietnam TJ +3835+06848 Asia/Dushanbe TK -0922-17114 Pacific/Fakaofo TL -0833+12535 Asia/Dili @@ -308,7 +310,7 @@ TN +3648+01011 Africa/Tunis TO -210800-1751200 Pacific/Tongatapu TR +4101+02858 Europe/Istanbul TW +2503+12130 Asia/Taipei -UA +5026+03031 Europe/Kyiv Ukraine (most areas) +UA +5026+03031 Europe/Kyiv most of Ukraine US +404251-0740023 America/New_York Eastern (most areas) US +421953-0830245 America/Detroit Eastern - MI (most areas) US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) @@ -328,7 +330,7 @@ US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) -US,CA +332654-1120424 America/Phoenix MST - Arizona (except Navajo), Creston BC +US,CA +332654-1120424 America/Phoenix MST - AZ (most areas), Creston BC US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area @@ -336,13 +338,13 @@ US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) -US +515248-1763929 America/Adak Aleutian Islands -US,UM +211825-1575130 Pacific/Honolulu Hawaii +US +515248-1763929 America/Adak Alaska - western Aleutians +US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) UZ +4120+06918 Asia/Tashkent Uzbekistan (east) VE +1030-06656 America/Caracas -VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south) +VN +1045+10640 Asia/Ho_Chi_Minh south Vietnam VU -1740+16825 Pacific/Efate WS -1350-17144 Pacific/Apia ZA,LS,SZ -2615+02800 Africa/Johannesburg diff --git a/lib/rarfile/rarfile.py b/lib/rarfile/rarfile.py index ba6611cb..37c64992 100644 --- a/lib/rarfile/rarfile.py +++ b/lib/rarfile/rarfile.py @@ -59,7 +59,7 @@ import sys import warnings from binascii import crc32, hexlify from datetime import datetime, timezone -from hashlib import blake2s, pbkdf2_hmac, sha1 +from hashlib import blake2s, pbkdf2_hmac, sha1, sha256 from pathlib import Path from struct import Struct, pack, unpack from subprocess import DEVNULL, PIPE, STDOUT, Popen @@ -92,7 +92,7 @@ class AES_CBC_Decrypt: self.decrypt = ciph.decryptor().update -__version__ = "4.1a1" +__version__ = "4.1" # export only interesting items __all__ = ["get_rar_version", "is_rarfile", "is_rarfile_sfx", "RarInfo", "RarFile", "RarExtFile"] @@ -110,6 +110,12 @@ UNAR_TOOL = "unar" #: executable for bsdtar tool BSDTAR_TOOL = "bsdtar" +#: executable for p7zip/7z tool +SEVENZIP_TOOL = "7z" + +#: executable for alternative 7z tool +SEVENZIP2_TOOL = "7zz" + #: default fallback charset DEFAULT_CHARSET = "windows-1252" @@ -282,6 +288,9 @@ DOS_MODE_SYSTEM = 0x04 DOS_MODE_HIDDEN = 0x02 DOS_MODE_READONLY = 0x01 +RAR5_PW_CHECK_SIZE = 8 +RAR5_PW_SUM_SIZE = 4 + ## ## internal constants ## @@ -300,6 +309,8 @@ _BAD_CHARS = r"""\x00-\x1F<>|"?*""" RC_BAD_CHARS_UNIX = re.compile(r"[%s]" % _BAD_CHARS) RC_BAD_CHARS_WIN32 = re.compile(r"[%s:^\\]" % _BAD_CHARS) +FORCE_TOOL = False + def _find_sfx_header(xfile): sig = RAR_ID[:-1] @@ -641,6 +652,27 @@ class RarInfo: class RarFile: """Parse RAR structure, provide access to files in archive. + + Parameters: + + file + archive file name or file-like object. + mode + only "r" is supported. + charset + fallback charset to use, if filenames are not already Unicode-enabled. + info_callback + debug callback, gets to see all archive entries. + crc_check + set to False to disable CRC checks + errors + Either "stop" to quietly stop parsing on errors, + or "strict" to raise errors. Default is "stop". + part_only + If True, read only single file and allow it to be middle-part + of multi-volume archive. + + .. versionadded:: 4.0 """ #: File name, if available. Unicode string or None. @@ -651,27 +683,6 @@ class RarFile: def __init__(self, file, mode="r", charset=None, info_callback=None, crc_check=True, errors="stop", part_only=False): - """Open and parse a RAR archive. - - Parameters: - - file - archive file name or file-like object. - mode - only "r" is supported. - charset - fallback charset to use, if filenames are not already Unicode-enabled. - info_callback - debug callback, gets to see all archive entries. - crc_check - set to False to disable CRC checks - errors - Either "stop" to quietly stop parsing on errors, - or "strict" to raise errors. Default is "stop". - part_only - If True, read only single file and allow it to be middle-part - of multi-volume archive. - """ if is_filelike(file): self.filename = getattr(file, "name", None) else: @@ -751,6 +762,16 @@ class RarFile: """ return self._file_parser.getinfo(name) + def getinfo_orig(self, name): + """Return RarInfo for file source. + + RAR5: if name is hard-linked or copied file, + returns original entry with original filename. + + .. versionadded:: 4.1 + """ + return self._file_parser.getinfo_orig(name) + def open(self, name, mode="r", pwd=None): """Returns file-like object (:class:`RarExtFile`) from where the data can be read. @@ -1058,6 +1079,15 @@ class CommonParser: except KeyError: raise NoRarEntry("No such file: %s" % fname) from None + def getinfo_orig(self, member): + inf = self.getinfo(member) + if inf.file_redir: + redir_type, redir_flags, redir_name = inf.file_redir + # cannot leave to unrar as it expects copied file to exist + if redir_type in (RAR5_XREDIR_FILE_COPY, RAR5_XREDIR_HARD_LINK): + inf = self.getinfo(redir_name) + return inf + def parse(self): """Process file.""" self._fd = None @@ -1111,6 +1141,7 @@ class CommonParser: endarc = False self._vol_list.append(volfile) self._main = None + self._hdrenc_main = None continue break h.volume = volume @@ -1255,6 +1286,8 @@ class CommonParser: return self._open_unrar(self._rarfile, inf, pwd) def _open_clear(self, inf): + if FORCE_TOOL: + return self._open_unrar(self._rarfile, inf) return DirectReader(self, inf) def _open_hack_core(self, inf, pwd, prefix, suffix): @@ -1302,7 +1335,7 @@ class CommonParser: # not giving filename avoids encoding related problems fn = None if not tmpfile or force_file: - fn = inf.filename + fn = inf.filename.replace("/", os.path.sep) # read from unrar pipe cmd = setup.open_cmdline(pwd, rarfile, fn) @@ -1768,14 +1801,18 @@ class RAR5Parser(CommonParser): # AES encrypted headers _last_aes256_key = (-1, None, None) # (kdf_count, salt, key) + def _get_utf8_password(self): + pwd = self._password + if isinstance(pwd, str): + return pwd.encode("utf8") + return pwd + def _gen_key(self, kdf_count, salt): if self._last_aes256_key[:2] == (kdf_count, salt): return self._last_aes256_key[2] if kdf_count > 24: raise BadRarFile("Too large kdf_count") - pwd = self._password - if isinstance(pwd, str): - pwd = pwd.encode("utf8") + pwd = self._get_utf8_password() key = pbkdf2_hmac("sha256", pwd, salt, 1 << kdf_count) self._last_aes256_key = (kdf_count, salt, key) return key @@ -1938,15 +1975,39 @@ class RAR5Parser(CommonParser): h.flags |= RAR_ENDARC_NEXT_VOLUME return h + def _check_password(self, check_value, kdf_count_shift, salt): + if len(check_value) != RAR5_PW_CHECK_SIZE + RAR5_PW_SUM_SIZE: + return + + hdr_check = check_value[:RAR5_PW_CHECK_SIZE] + hdr_sum = check_value[RAR5_PW_CHECK_SIZE:] + sum_hash = sha256(hdr_check).digest() + if sum_hash[:RAR5_PW_SUM_SIZE] != hdr_sum: + return + + kdf_count = (1 << kdf_count_shift) + 32 + pwd = self._get_utf8_password() + pwd_hash = pbkdf2_hmac("sha256", pwd, salt, kdf_count) + + pwd_check = bytearray(RAR5_PW_CHECK_SIZE) + len_mask = RAR5_PW_CHECK_SIZE - 1 + for i, v in enumerate(pwd_hash): + pwd_check[i & len_mask] ^= v + + if pwd_check != hdr_check: + raise RarWrongPassword() + def _parse_encryption_block(self, h, hdata, pos): h.encryption_algo, pos = load_vint(hdata, pos) h.encryption_flags, pos = load_vint(hdata, pos) h.encryption_kdf_count, pos = load_byte(hdata, pos) h.encryption_salt, pos = load_bytes(hdata, 16, pos) if h.encryption_flags & RAR5_ENC_FLAG_HAS_CHECKVAL: - h.encryption_check_value = load_bytes(hdata, 12, pos) + h.encryption_check_value, pos = load_bytes(hdata, 12, pos) if h.encryption_algo != RAR5_XENC_CIPHER_AES256: raise BadRarFile("Unsupported header encryption cipher") + if h.encryption_check_value and self._password: + self._check_password(h.encryption_check_value, h.encryption_kdf_count, h.encryption_salt) self._hdrenc_main = h return h @@ -2183,6 +2244,7 @@ class RarExtFile(io.RawIOBase): _remain = 0 _returncode = 0 _md_context = None + _seeking = False def _open_extfile(self, parser, inf): self.name = inf.filename @@ -2191,7 +2253,10 @@ class RarExtFile(io.RawIOBase): if self._fd: self._fd.close() - md_class = self._inf._md_class or NoHashContext + if self._seeking: + md_class = NoHashContext + else: + md_class = self._inf._md_class or NoHashContext self._md_context = md_class() self._fd = None self._remain = self._inf.file_size @@ -2282,7 +2347,9 @@ class RarExtFile(io.RawIOBase): """ # disable crc check when seeking - self._md_context = NoHashContext() + if not self._seeking: + self._md_context = NoHashContext() + self._seeking = True fsize = self._inf.file_size cur_ofs = self.tell() @@ -2463,6 +2530,11 @@ class DirectReader(RarExtFile): def _read(self, cnt): """Read from potentially multi-volume archive.""" + pos = self._fd.tell() + need = self._cur.data_offset + self._cur.add_size - self._cur_avail + if pos != need: + self._fd.seek(need, 0) + buf = [] while cnt > 0: # next vol needed? @@ -3290,6 +3362,8 @@ class ToolSetup: def get_cmdline(self, key, pwd, nodash=False): cmdline = list(self.setup[key]) cmdline[0] = globals()[cmdline[0]] + if key == "check_cmd": + return cmdline self.add_password_arg(cmdline, pwd) if not nodash: cmdline.append("--") @@ -3352,10 +3426,30 @@ BSDTAR_CONFIG = { "errmap": [None], } +SEVENZIP_CONFIG = { + "open_cmd": ("SEVENZIP_TOOL", "e", "-so", "-bb0"), + "check_cmd": ("SEVENZIP_TOOL", "i"), + "password": "-p", + "no_password": ("-p",), + "errmap": [None, + RarWarning, RarFatalError, None, None, # 1..4 + None, None, RarUserError, RarMemoryError] # 5..8 +} + +SEVENZIP2_CONFIG = { + "open_cmd": ("SEVENZIP2_TOOL", "e", "-so", "-bb0"), + "check_cmd": ("SEVENZIP2_TOOL", "i"), + "password": "-p", + "no_password": ("-p",), + "errmap": [None, + RarWarning, RarFatalError, None, None, # 1..4 + None, None, RarUserError, RarMemoryError] # 5..8 +} + CURRENT_SETUP = None -def tool_setup(unrar=True, unar=True, bsdtar=True, force=False): +def tool_setup(unrar=True, unar=True, bsdtar=True, sevenzip=True, sevenzip2=True, force=False): """Pick a tool, return cached ToolSetup. """ global CURRENT_SETUP @@ -3368,6 +3462,10 @@ def tool_setup(unrar=True, unar=True, bsdtar=True, force=False): lst.append(UNRAR_CONFIG) if unar: lst.append(UNAR_CONFIG) + if sevenzip: + lst.append(SEVENZIP_CONFIG) + if sevenzip2: + lst.append(SEVENZIP2_CONFIG) if bsdtar: lst.append(BSDTAR_CONFIG) diff --git a/lib/sg_helpers.py b/lib/sg_helpers.py index 7255ec21..08c2d416 100644 --- a/lib/sg_helpers.py +++ b/lib/sg_helpers.py @@ -39,6 +39,17 @@ from _23 import decode_bytes, html_unescape, list_range, \ Popen, scandir, urlparse, urlsplit, urlunparse from six import integer_types, iteritems, iterkeys, itervalues, moves, PY2, string_types, text_type +ACCEPT_ENCODING = "gzip,deflate" +try: + try: + import brotlicffi as _unused_module_brotli # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + import zipfile # py7z hardwired removed, see comment below py7zr = None @@ -863,7 +874,7 @@ def get_url(url, # type: AnyStr # session main headers req_headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', - 'Accept-Encoding': 'gzip,deflate'} + 'Accept-Encoding': ACCEPT_ENCODING} if headers: req_headers.update(headers) if hasattr(session, 'reserved') and 'headers' in session.reserved: @@ -1068,43 +1079,71 @@ def save_failure(url, domain, log_failure_url, post_data, post_json): def scantree(path, # type: AnyStr exclude=None, # type: Optional[AnyStr, List[AnyStr]] + exclude_dirs=None, # type: Optional[AnyStr, List[AnyStr]] include=None, # type: Optional[AnyStr, List[AnyStr]] follow_symlinks=False, # type: bool filter_kind=None, # type: Optional[bool] recurse=True, # type: bool - exclude_folders_with_files=None # type: Optional[List[AnyStr]] + exclude_folders_with_files=None, # type: Optional[List[AnyStr]] + internal_call=False, # type: bool + rc_exc=None, # type: List + rc_exc_dir=None, # type: List + rc_inc=None, # type: List + has_exclude=False, # type: bool + has_exclude_dirs=False, # type: bool + has_include=False # type: bool ): # type: (...) -> Generator[DirEntry, None, None] """Yield DirEntry objects for given path. Returns without yield if path fails sanity check :param path: Path to scan, sanity check is_dir and exists - :param exclude: Escaped regex string(s) to exclude + :param exclude: Escaped regex string(s) to exclude (files and directories) + :param exclude_dirs: Escaped regex string(s) to exclude (directories only) :param include: Escaped regex string(s) to include :param follow_symlinks: Follow symlinks :param filter_kind: None to yield everything, True yields directories, False yields files :param recurse: Recursively scan the tree :param exclude_folders_with_files: exclude folder that contain the listed file(s) + :param internal_call: internal use + :param rc_exc: internal use + :param rc_exc_dir: internal use + :param rc_inc: internal use + :param has_exclude: internal use + :param has_exclude_dirs: internal use + :param has_include: internal_use """ if isinstance(path, string_types) and path and os.path.isdir(path): - rc_exc, rc_inc = [re.compile(rx % '|'.join( - [x for x in (param, ([param], [])[None is param])[not isinstance(param, list)]])) - for rx, param in ((r'(?i)^(?:(?!%s).)*$', exclude), (r'(?i)%s', include))] - for entry in scandir(path): - is_dir = entry.is_dir(follow_symlinks=follow_symlinks) - is_file = entry.is_file(follow_symlinks=follow_symlinks) - no_filter = any([None is filter_kind, filter_kind and is_dir, not filter_kind and is_file]) - if (rc_exc.search(entry.name), True)[not exclude] and (rc_inc.search(entry.name), True)[not include] \ - and (no_filter or (not filter_kind and is_dir and recurse)): - if is_dir and exclude_folders_with_files and any(os.path.isfile(os.path.join(entry.path, e_f)) - for e_f in exclude_folders_with_files): - logger.debug(f'Ignoring Folder: "{entry.path}", because it contains a exclude file' - f' "{", ".join(exclude_folders_with_files)}"') - continue - if recurse and is_dir: - for subentry in scantree(entry.path, exclude, include, follow_symlinks, filter_kind, recurse): - yield subentry - if no_filter: - yield entry + if not internal_call: + rc_exc, rc_exc_dir, rc_inc = [re.compile(rx % '|'.join( + [x for x in (param, ([param], [])[None is param])[not isinstance(param, list)]])) + for rx, param in ((r'(?i)^(?:(?!%s).)*$', exclude), (r'(?i)^(?:(?!%s).)*$', exclude_dirs), + (r'(?i)%s', include))] + has_exclude, has_exclude_dirs, has_include = bool(exclude), bool(exclude_dirs), bool(include) + + with scandir(path) as s_d: + for entry in s_d: + is_dir = entry.is_dir(follow_symlinks=follow_symlinks) + is_file = entry.is_file(follow_symlinks=follow_symlinks) + no_filter = any([None is filter_kind, filter_kind and is_dir, not filter_kind and is_file]) + if ((not has_exclude or rc_exc.search(entry.name)) + and (not has_exclude_dirs or not is_dir or rc_exc_dir.search(entry.name)) + and (not has_include or rc_inc.search(entry.name)) + and (no_filter or (not filter_kind and is_dir and recurse))): + if is_dir and exclude_folders_with_files and any(os.path.isfile(os.path.join(entry.path, e_f)) + for e_f in exclude_folders_with_files): + logger.debug(f'Ignoring Folder: "{entry.path}", because it contains a exclude file' + f' "{", ".join(exclude_folders_with_files)}"') + continue + if recurse and is_dir: + for subentry in scantree( + path=entry.path, exclude=exclude, exclude_dirs=exclude_dirs, include=include, + follow_symlinks=follow_symlinks, filter_kind=filter_kind, recurse=recurse, + exclude_folders_with_files=exclude_folders_with_files, internal_call=True, + rc_exc=rc_exc, rc_exc_dir=rc_exc_dir, rc_inc=rc_inc, has_exclude=has_exclude, + has_exclude_dirs=has_exclude_dirs, has_include=has_include): + yield subentry + if no_filter: + yield entry def copy_file(src_file, dest_file): diff --git a/lib/soupsieve/__init__.py b/lib/soupsieve/__init__.py index 4c928fd5..549b4c5c 100644 --- a/lib/soupsieve/__init__.py +++ b/lib/soupsieve/__init__.py @@ -78,13 +78,13 @@ def purge() -> None: def closest( select: str, - tag: 'bs4.Tag', + tag: bs4.Tag, namespaces: dict[str, str] | None = None, flags: int = 0, *, custom: dict[str, str] | None = None, **kwargs: Any -) -> 'bs4.Tag': +) -> bs4.Tag: """Match closest ancestor.""" return compile(select, namespaces, flags, **kwargs).closest(tag) @@ -92,7 +92,7 @@ def closest( def match( select: str, - tag: 'bs4.Tag', + tag: bs4.Tag, namespaces: dict[str, str] | None = None, flags: int = 0, *, @@ -106,13 +106,13 @@ def match( def filter( # noqa: A001 select: str, - iterable: Iterable['bs4.Tag'], + iterable: Iterable[bs4.Tag], namespaces: dict[str, str] | None = None, flags: int = 0, *, custom: dict[str, str] | None = None, **kwargs: Any -) -> list['bs4.Tag']: +) -> list[bs4.Tag]: """Filter list of nodes.""" return compile(select, namespaces, flags, **kwargs).filter(iterable) @@ -120,13 +120,13 @@ def filter( # noqa: A001 def select_one( select: str, - tag: 'bs4.Tag', + tag: bs4.Tag, namespaces: dict[str, str] | None = None, flags: int = 0, *, custom: dict[str, str] | None = None, **kwargs: Any -) -> 'bs4.Tag': +) -> bs4.Tag: """Select a single tag.""" return compile(select, namespaces, flags, **kwargs).select_one(tag) @@ -134,14 +134,14 @@ def select_one( def select( select: str, - tag: 'bs4.Tag', + tag: bs4.Tag, namespaces: dict[str, str] | None = None, limit: int = 0, flags: int = 0, *, custom: dict[str, str] | None = None, **kwargs: Any -) -> list['bs4.Tag']: +) -> list[bs4.Tag]: """Select the specified tags.""" return compile(select, namespaces, flags, **kwargs).select(tag, limit) @@ -149,18 +149,17 @@ def select( def iselect( select: str, - tag: 'bs4.Tag', + tag: bs4.Tag, namespaces: dict[str, str] | None = None, limit: int = 0, flags: int = 0, *, custom: dict[str, str] | None = None, **kwargs: Any -) -> Iterator['bs4.Tag']: +) -> Iterator[bs4.Tag]: """Iterate the specified tags.""" - for el in compile(select, namespaces, flags, **kwargs).iselect(tag, limit): - yield el + yield from compile(select, namespaces, flags, **kwargs).iselect(tag, limit) def escape(ident: str) -> str: diff --git a/lib/soupsieve/__meta__.py b/lib/soupsieve/__meta__.py index 1a5cfe41..df341145 100644 --- a/lib/soupsieve/__meta__.py +++ b/lib/soupsieve/__meta__.py @@ -93,7 +93,7 @@ class Version(namedtuple("Version", ["major", "minor", "micro", "release", "pre" raise ValueError("All version parts except 'release' should be integers.") if release not in REL_MAP: - raise ValueError("'{}' is not a valid release type.".format(release)) + raise ValueError(f"'{release}' is not a valid release type.") # Ensure valid pre-release (we do not allow implicit pre-releases). if ".dev-candidate" < release < "final": @@ -118,7 +118,7 @@ class Version(namedtuple("Version", ["major", "minor", "micro", "release", "pre" elif dev: raise ValueError("Version is not a development release.") - return super(Version, cls).__new__(cls, major, minor, micro, release, pre, post, dev) + return super().__new__(cls, major, minor, micro, release, pre, post, dev) def _is_pre(self) -> bool: """Is prerelease.""" @@ -145,15 +145,15 @@ class Version(namedtuple("Version", ["major", "minor", "micro", "release", "pre" # Assemble major, minor, micro version and append `pre`, `post`, or `dev` if needed.. if self.micro == 0: - ver = "{}.{}".format(self.major, self.minor) + ver = f"{self.major}.{self.minor}" else: - ver = "{}.{}.{}".format(self.major, self.minor, self.micro) + ver = f"{self.major}.{self.minor}.{self.micro}" if self._is_pre(): - ver += '{}{}'.format(REL_MAP[self.release], self.pre) + ver += f'{REL_MAP[self.release]}{self.pre}' if self._is_post(): - ver += ".post{}".format(self.post) + ver += f".post{self.post}" if self._is_dev(): - ver += ".dev{}".format(self.dev) + ver += f".dev{self.dev}" return ver @@ -164,7 +164,7 @@ def parse_version(ver: str) -> Version: m = RE_VER.match(ver) if m is None: - raise ValueError("'{}' is not a valid version".format(ver)) + raise ValueError(f"'{ver}' is not a valid version") # Handle major, minor, micro major = int(m.group('major')) @@ -193,5 +193,5 @@ def parse_version(ver: str) -> Version: return Version(major, minor, micro, release, pre, post, dev) -__version_info__ = Version(2, 4, 1, "final") +__version_info__ = Version(2, 5, 0, "final") __version__ = __version_info__._get_canonical() diff --git a/lib/soupsieve/css_match.py b/lib/soupsieve/css_match.py index 9acf500e..4504e7b3 100644 --- a/lib/soupsieve/css_match.py +++ b/lib/soupsieve/css_match.py @@ -85,7 +85,7 @@ class _DocumentNav: # Fail on unexpected types. if not cls.is_tag(tag): - raise TypeError("Expected a BeautifulSoup 'Tag', but instead received type {}".format(type(tag))) + raise TypeError(f"Expected a BeautifulSoup 'Tag', but instead received type {type(tag)}") @staticmethod def is_doc(obj: bs4.Tag) -> bool: @@ -165,8 +165,7 @@ class _DocumentNav: def get_contents(self, el: bs4.Tag, no_iframe: bool = False) -> Iterator[bs4.PageElement]: """Get contents or contents in reverse.""" if not no_iframe or not self.is_iframe(el): - for content in el.contents: - yield content + yield from el.contents def get_children( self, @@ -283,7 +282,7 @@ class _DocumentNav: like we do in the case of `is_html_tag`. """ - ns = getattr(el, 'namespace') if el else None + ns = getattr(el, 'namespace') if el else None # noqa: B009 return bool(ns and ns == NS_XHTML) @staticmethod @@ -394,7 +393,7 @@ class Inputs: def validate_week(year: int, week: int) -> bool: """Validate week.""" - max_week = datetime.strptime("{}-{}-{}".format(12, 31, year), "%m-%d-%Y").isocalendar()[1] + max_week = datetime.strptime(f"{12}-{31}-{year}", "%m-%d-%Y").isocalendar()[1] if max_week == 1: max_week = 53 return 1 <= week <= max_week @@ -1272,11 +1271,7 @@ class CSSMatch(_DocumentNav): # Auto handling for text inputs if ((is_input and itype in ('text', 'search', 'tel', 'url', 'email')) or is_textarea) and direction == 0: if is_textarea: - temp = [] - for node in self.get_contents(el, no_iframe=True): - if self.is_content_string(node): - temp.append(node) - value = ''.join(temp) + value = ''.join(node for node in self.get_contents(el, no_iframe=True) if self.is_content_string(node)) else: value = cast(str, self.get_attribute_by_name(el, 'value', '')) if value: @@ -1571,17 +1566,14 @@ class SoupSieve(ct.Immutable): def iselect(self, tag: bs4.Tag, limit: int = 0) -> Iterator[bs4.Tag]: """Iterate the specified tags.""" - for el in CSSMatch(self.selectors, tag, self.namespaces, self.flags).select(limit): - yield el + yield from CSSMatch(self.selectors, tag, self.namespaces, self.flags).select(limit) def __repr__(self) -> str: # pragma: no cover """Representation.""" - return "SoupSieve(pattern={!r}, namespaces={!r}, custom={!r}, flags={!r})".format( - self.pattern, - self.namespaces, - self.custom, - self.flags + return ( + f"SoupSieve(pattern={self.pattern!r}, namespaces={self.namespaces!r}, " + f"custom={self.custom!r}, flags={self.flags!r})" ) __str__ = __repr__ diff --git a/lib/soupsieve/css_parser.py b/lib/soupsieve/css_parser.py index 739ab74b..4a9ea39d 100644 --- a/lib/soupsieve/css_parser.py +++ b/lib/soupsieve/css_parser.py @@ -92,94 +92,79 @@ PSEUDO_SUPPORTED = PSEUDO_SIMPLE | PSEUDO_SIMPLE_NO_MATCH | PSEUDO_COMPLEX | PSE # Sub-patterns parts # Whitespace NEWLINE = r'(?:\r\n|(?!\r\n)[\n\f\r])' -WS = r'(?:[ \t]|{})'.format(NEWLINE) +WS = fr'(?:[ \t]|{NEWLINE})' # Comments COMMENTS = r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)' # Whitespace with comments included -WSC = r'(?:{ws}|{comments})'.format(ws=WS, comments=COMMENTS) +WSC = fr'(?:{WS}|{COMMENTS})' # CSS escapes -CSS_ESCAPES = r'(?:\\(?:[a-f0-9]{{1,6}}{ws}?|[^\r\n\f]|$))'.format(ws=WS) -CSS_STRING_ESCAPES = r'(?:\\(?:[a-f0-9]{{1,6}}{ws}?|[^\r\n\f]|$|{nl}))'.format(ws=WS, nl=NEWLINE) +CSS_ESCAPES = fr'(?:\\(?:[a-f0-9]{{1,6}}{WS}?|[^\r\n\f]|$))' +CSS_STRING_ESCAPES = fr'(?:\\(?:[a-f0-9]{{1,6}}{WS}?|[^\r\n\f]|$|{NEWLINE}))' # CSS Identifier -IDENTIFIER = r''' -(?:(?:-?(?:[^\x00-\x2f\x30-\x40\x5B-\x5E\x60\x7B-\x9f]|{esc})+|--) -(?:[^\x00-\x2c\x2e\x2f\x3A-\x40\x5B-\x5E\x60\x7B-\x9f]|{esc})*) -'''.format(esc=CSS_ESCAPES) +IDENTIFIER = fr''' +(?:(?:-?(?:[^\x00-\x2f\x30-\x40\x5B-\x5E\x60\x7B-\x9f]|{CSS_ESCAPES})+|--) +(?:[^\x00-\x2c\x2e\x2f\x3A-\x40\x5B-\x5E\x60\x7B-\x9f]|{CSS_ESCAPES})*) +''' # `nth` content -NTH = r'(?:[-+])?(?:[0-9]+n?|n)(?:(?<=n){ws}*(?:[-+]){ws}*(?:[0-9]+))?'.format(ws=WSC) +NTH = fr'(?:[-+])?(?:[0-9]+n?|n)(?:(?<=n){WSC}*(?:[-+]){WSC}*(?:[0-9]+))?' # Value: quoted string or identifier -VALUE = r''' -(?:"(?:\\(?:.|{nl})|[^\\"\r\n\f]+)*?"|'(?:\\(?:.|{nl})|[^\\'\r\n\f]+)*?'|{ident}+) -'''.format(nl=NEWLINE, ident=IDENTIFIER) +VALUE = fr'''(?:"(?:\\(?:.|{NEWLINE})|[^\\"\r\n\f]+)*?"|'(?:\\(?:.|{NEWLINE})|[^\\'\r\n\f]+)*?'|{IDENTIFIER}+)''' # Attribute value comparison. `!=` is handled special as it is non-standard. -ATTR = r''' -(?:{ws}*(?P[!~^|*$]?=){ws}*(?P{value})(?:{ws}*(?P[is]))?)?{ws}*\] -'''.format(ws=WSC, value=VALUE) +ATTR = fr'(?:{WSC}*(?P[!~^|*$]?=){WSC}*(?P{VALUE})(?:{WSC}*(?P[is]))?)?{WSC}*\]' # Selector patterns # IDs (`#id`) -PAT_ID = r'\#{ident}'.format(ident=IDENTIFIER) +PAT_ID = fr'\#{IDENTIFIER}' # Classes (`.class`) -PAT_CLASS = r'\.{ident}'.format(ident=IDENTIFIER) +PAT_CLASS = fr'\.{IDENTIFIER}' # Prefix:Tag (`prefix|tag`) -PAT_TAG = r'(?P(?:{ident}|\*)?\|)?(?P{ident}|\*)'.format(ident=IDENTIFIER) +PAT_TAG = fr'(?P(?:{IDENTIFIER}|\*)?\|)?(?P{IDENTIFIER}|\*)' # Attributes (`[attr]`, `[attr=value]`, etc.) -PAT_ATTR = r''' -\[{ws}*(?P(?:{ident}|\*)?\|)?(?P{ident}){attr} -'''.format(ws=WSC, ident=IDENTIFIER, attr=ATTR) +PAT_ATTR = fr'\[{WSC}*(?P(?:{IDENTIFIER}|\*)?\|)?(?P{IDENTIFIER}){ATTR}' # Pseudo class (`:pseudo-class`, `:pseudo-class(`) -PAT_PSEUDO_CLASS = r'(?P:{ident})(?P\({ws}*)?'.format(ws=WSC, ident=IDENTIFIER) +PAT_PSEUDO_CLASS = fr'(?P:{IDENTIFIER})(?P\({WSC}*)?' # Pseudo class special patterns. Matches `:pseudo-class(` for special case pseudo classes. -PAT_PSEUDO_CLASS_SPECIAL = r'(?P:{ident})(?P\({ws}*)'.format(ws=WSC, ident=IDENTIFIER) +PAT_PSEUDO_CLASS_SPECIAL = fr'(?P:{IDENTIFIER})(?P\({WSC}*)' # Custom pseudo class (`:--custom-pseudo`) -PAT_PSEUDO_CLASS_CUSTOM = r'(?P:(?=--){ident})'.format(ident=IDENTIFIER) +PAT_PSEUDO_CLASS_CUSTOM = fr'(?P:(?=--){IDENTIFIER})' # Closing pseudo group (`)`) -PAT_PSEUDO_CLOSE = r'{ws}*\)'.format(ws=WSC) +PAT_PSEUDO_CLOSE = fr'{WSC}*\)' # Pseudo element (`::pseudo-element`) -PAT_PSEUDO_ELEMENT = r':{}'.format(PAT_PSEUDO_CLASS) +PAT_PSEUDO_ELEMENT = fr':{PAT_PSEUDO_CLASS}' # At rule (`@page`, etc.) (not supported) -PAT_AT_RULE = r'@P{ident}'.format(ident=IDENTIFIER) +PAT_AT_RULE = fr'@P{IDENTIFIER}' # Pseudo class `nth-child` (`:nth-child(an+b [of S]?)`, `:first-child`, etc.) -PAT_PSEUDO_NTH_CHILD = r''' -(?P{name} -(?P{nth}|even|odd))(?:{wsc}*\)|(?P{comments}*{ws}{wsc}*of{comments}*{ws}{wsc}*)) -'''.format(name=PAT_PSEUDO_CLASS_SPECIAL, wsc=WSC, comments=COMMENTS, ws=WS, nth=NTH) +PAT_PSEUDO_NTH_CHILD = fr''' +(?P{PAT_PSEUDO_CLASS_SPECIAL} +(?P{NTH}|even|odd))(?:{WSC}*\)|(?P{COMMENTS}*{WS}{WSC}*of{COMMENTS}*{WS}{WSC}*)) +''' # Pseudo class `nth-of-type` (`:nth-of-type(an+b)`, `:first-of-type`, etc.) -PAT_PSEUDO_NTH_TYPE = r''' -(?P{name} -(?P{nth}|even|odd)){ws}*\) -'''.format(name=PAT_PSEUDO_CLASS_SPECIAL, ws=WSC, nth=NTH) +PAT_PSEUDO_NTH_TYPE = fr''' +(?P{PAT_PSEUDO_CLASS_SPECIAL} +(?P{NTH}|even|odd)){WSC}*\) +''' # Pseudo class language (`:lang("*-de", en)`) -PAT_PSEUDO_LANG = r'{name}(?P{value}(?:{ws}*,{ws}*{value})*){ws}*\)'.format( - name=PAT_PSEUDO_CLASS_SPECIAL, ws=WSC, value=VALUE -) +PAT_PSEUDO_LANG = fr'{PAT_PSEUDO_CLASS_SPECIAL}(?P{VALUE}(?:{WSC}*,{WSC}*{VALUE})*){WSC}*\)' # Pseudo class direction (`:dir(ltr)`) -PAT_PSEUDO_DIR = r'{name}(?Pltr|rtl){ws}*\)'.format(name=PAT_PSEUDO_CLASS_SPECIAL, ws=WSC) +PAT_PSEUDO_DIR = fr'{PAT_PSEUDO_CLASS_SPECIAL}(?Pltr|rtl){WSC}*\)' # Combining characters (`>`, `~`, ` `, `+`, `,`) -PAT_COMBINE = r'{wsc}*?(?P[,+>~]|{ws}(?![,+>~])){wsc}*'.format(ws=WS, wsc=WSC) +PAT_COMBINE = fr'{WSC}*?(?P[,+>~]|{WS}(?![,+>~])){WSC}*' # Extra: Contains (`:contains(text)`) -PAT_PSEUDO_CONTAINS = r'{name}(?P{value}(?:{ws}*,{ws}*{value})*){ws}*\)'.format( - name=PAT_PSEUDO_CLASS_SPECIAL, ws=WSC, value=VALUE -) +PAT_PSEUDO_CONTAINS = fr'{PAT_PSEUDO_CLASS_SPECIAL}(?P{VALUE}(?:{WSC}*,{WSC}*{VALUE})*){WSC}*\)' # Regular expressions # CSS escape pattern -RE_CSS_ESC = re.compile(r'(?:(\\[a-f0-9]{{1,6}}{ws}?)|(\\[^\r\n\f])|(\\$))'.format(ws=WSC), re.I) -RE_CSS_STR_ESC = re.compile( - r'(?:(\\[a-f0-9]{{1,6}}{ws}?)|(\\[^\r\n\f])|(\\$)|(\\{nl}))'.format(ws=WS, nl=NEWLINE), re.I -) +RE_CSS_ESC = re.compile(fr'(?:(\\[a-f0-9]{{1,6}}{WSC}?)|(\\[^\r\n\f])|(\\$))', re.I) +RE_CSS_STR_ESC = re.compile(fr'(?:(\\[a-f0-9]{{1,6}}{WS}?)|(\\[^\r\n\f])|(\\$)|(\\{NEWLINE}))', re.I) # Pattern to break up `nth` specifiers -RE_NTH = re.compile( - r'(?P[-+])?(?P[0-9]+n?|n)(?:(?<=n){ws}*(?P[-+]){ws}*(?P[0-9]+))?'.format(ws=WSC), - re.I -) +RE_NTH = re.compile(fr'(?P[-+])?(?P[0-9]+n?|n)(?:(?<=n){WSC}*(?P[-+]){WSC}*(?P[0-9]+))?', re.I) # Pattern to iterate multiple values. -RE_VALUES = re.compile(r'(?:(?P{value})|(?P{ws}*,{ws}*))'.format(ws=WSC, value=VALUE), re.X) +RE_VALUES = re.compile(fr'(?:(?P{VALUE})|(?P{WSC}*,{WSC}*))', re.X) # Whitespace checks RE_WS = re.compile(WS) -RE_WS_BEGIN = re.compile('^{}*'.format(WSC)) -RE_WS_END = re.compile('{}*$'.format(WSC)) -RE_CUSTOM = re.compile(r'^{}$'.format(PAT_PSEUDO_CLASS_CUSTOM), re.X) +RE_WS_BEGIN = re.compile(fr'^{WSC}*') +RE_WS_END = re.compile(fr'{WSC}*$') +RE_CUSTOM = re.compile(fr'^{PAT_PSEUDO_CLASS_CUSTOM}$', re.X) # Constants # List split token @@ -241,9 +226,9 @@ def process_custom(custom: ct.CustomSelectors | None) -> dict[str, str | ct.Sele for key, value in custom.items(): name = util.lower(key) if RE_CUSTOM.match(name) is None: - raise SelectorSyntaxError("The name '{}' is not a valid custom pseudo-class name".format(name)) + raise SelectorSyntaxError(f"The name '{name}' is not a valid custom pseudo-class name") if name in custom_selectors: - raise KeyError("The custom selector '{}' has already been registered".format(name)) + raise KeyError(f"The custom selector '{name}' has already been registered") custom_selectors[css_unescape(name)] = value return custom_selectors @@ -283,23 +268,23 @@ def escape(ident: str) -> str: start_dash = length > 0 and ident[0] == '-' if length == 1 and start_dash: # Need to escape identifier that is a single `-` with no other characters - string.append('\\{}'.format(ident)) + string.append(f'\\{ident}') else: for index, c in enumerate(ident): codepoint = ord(c) if codepoint == 0x00: string.append('\ufffd') elif (0x01 <= codepoint <= 0x1F) or codepoint == 0x7F: - string.append('\\{:x} '.format(codepoint)) + string.append(f'\\{codepoint:x} ') elif (index == 0 or (start_dash and index == 1)) and (0x30 <= codepoint <= 0x39): - string.append('\\{:x} '.format(codepoint)) + string.append(f'\\{codepoint:x} ') elif ( codepoint in (0x2D, 0x5F) or codepoint >= 0x80 or (0x30 <= codepoint <= 0x39) or (0x30 <= codepoint <= 0x39) or (0x41 <= codepoint <= 0x5A) or (0x61 <= codepoint <= 0x7A) ): string.append(c) else: - string.append('\\{}'.format(c)) + string.append(f'\\{c}') return ''.join(string) @@ -419,11 +404,10 @@ class _Selector: """String representation.""" return ( - '_Selector(tag={!r}, ids={!r}, classes={!r}, attributes={!r}, nth={!r}, selectors={!r}, ' - 'relations={!r}, rel_type={!r}, contains={!r}, lang={!r}, flags={!r}, no_match={!r})' - ).format( - self.tag, self.ids, self.classes, self.attributes, self.nth, self.selectors, - self.relations, self.rel_type, self.contains, self.lang, self.flags, self.no_match + f'_Selector(tag={self.tag!r}, ids={self.ids!r}, classes={self.classes!r}, attributes={self.attributes!r}, ' + f'nth={self.nth!r}, selectors={self.selectors!r}, relations={self.relations!r}, ' + f'rel_type={self.rel_type!r}, contains={self.contains!r}, lang={self.lang!r}, flags={self.flags!r}, ' + f'no_match={self.no_match!r})' ) __repr__ = __str__ @@ -563,7 +547,7 @@ class CSSParser: selector = self.custom.get(pseudo) if selector is None: raise SelectorSyntaxError( - "Undefined custom selector '{}' found at position {}".format(pseudo, m.end(0)), + f"Undefined custom selector '{pseudo}' found at position {m.end(0)}", self.pattern, m.end(0) ) @@ -663,13 +647,13 @@ class CSSParser: has_selector = True elif pseudo in PSEUDO_SUPPORTED: raise SelectorSyntaxError( - "Invalid syntax for pseudo class '{}'".format(pseudo), + f"Invalid syntax for pseudo class '{pseudo}'", self.pattern, m.start(0) ) else: raise NotImplementedError( - "'{}' pseudo-class is not implemented at this time".format(pseudo) + f"'{pseudo}' pseudo-class is not implemented at this time" ) return has_selector, is_html @@ -793,7 +777,7 @@ class CSSParser: # multiple non-whitespace combinators. So if the current combinator is not a whitespace, # then we've hit the multiple combinator case, so we should fail. raise SelectorSyntaxError( - 'The multiple combinators at position {}'.format(index), + f'The multiple combinators at position {index}', self.pattern, index ) @@ -824,7 +808,7 @@ class CSSParser: if not has_selector: if not is_forgive or combinator != COMMA_COMBINATOR: raise SelectorSyntaxError( - "The combinator '{}' at position {}, must have a selector before it".format(combinator, index), + f"The combinator '{combinator}' at position {index}, must have a selector before it", self.pattern, index ) @@ -869,7 +853,7 @@ class CSSParser: pseudo = util.lower(css_unescape(m.group('name'))) if pseudo == ":contains": - warnings.warn( + warnings.warn( # noqa: B028 "The pseudo class ':contains' is deprecated, ':-soup-contains' should be used moving forward.", FutureWarning ) @@ -982,13 +966,13 @@ class CSSParser: # Handle parts if key == "at_rule": - raise NotImplementedError("At-rules found at position {}".format(m.start(0))) + raise NotImplementedError(f"At-rules found at position {m.start(0)}") elif key == 'pseudo_class_custom': has_selector = self.parse_pseudo_class_custom(sel, m, has_selector) elif key == 'pseudo_class': has_selector, is_html = self.parse_pseudo_class(sel, m, has_selector, iselector, is_html) elif key == 'pseudo_element': - raise NotImplementedError("Pseudo-element found at position {}".format(m.start(0))) + raise NotImplementedError(f"Pseudo-element found at position {m.start(0)}") elif key == 'pseudo_contains': has_selector = self.parse_pseudo_contains(sel, m, has_selector) elif key in ('pseudo_nth_type', 'pseudo_nth_child'): @@ -1003,7 +987,7 @@ class CSSParser: if not has_selector: if not is_forgive: raise SelectorSyntaxError( - "Expected a selector at position {}".format(m.start(0)), + f"Expected a selector at position {m.start(0)}", self.pattern, m.start(0) ) @@ -1013,7 +997,7 @@ class CSSParser: break else: raise SelectorSyntaxError( - "Unmatched pseudo-class close at position {}".format(m.start(0)), + f"Unmatched pseudo-class close at position {m.start(0)}", self.pattern, m.start(0) ) @@ -1031,7 +1015,7 @@ class CSSParser: elif key == 'tag': if has_selector: raise SelectorSyntaxError( - "Tag name found at position {} instead of at the start".format(m.start(0)), + f"Tag name found at position {m.start(0)} instead of at the start", self.pattern, m.start(0) ) @@ -1046,7 +1030,7 @@ class CSSParser: # Handle selectors that are not closed if is_open and not closed: raise SelectorSyntaxError( - "Unclosed pseudo-class at position {}".format(index), + f"Unclosed pseudo-class at position {index}", self.pattern, index ) @@ -1076,7 +1060,7 @@ class CSSParser: # We will always need to finish a selector when `:has()` is used as it leads with combining. # May apply to others as well. raise SelectorSyntaxError( - 'Expected a selector at position {}'.format(index), + f'Expected a selector at position {index}', self.pattern, index ) @@ -1108,7 +1092,7 @@ class CSSParser: end = (m.start(0) - 1) if m else (len(pattern) - 1) if self.debug: # pragma: no cover - print('## PARSING: {!r}'.format(pattern)) + print(f'## PARSING: {pattern!r}') while index <= end: m = None for v in self.css_tokens: @@ -1116,7 +1100,7 @@ class CSSParser: if m: name = v.get_name() if self.debug: # pragma: no cover - print("TOKEN: '{}' --> {!r} at position {}".format(name, m.group(0), m.start(0))) + print(f"TOKEN: '{name}' --> {m.group(0)!r} at position {m.start(0)}") index = m.end(0) yield name, m break @@ -1126,15 +1110,15 @@ class CSSParser: # throw an exception mentioning that the known selector type is in error; # otherwise, report the invalid character. if c == '[': - msg = "Malformed attribute selector at position {}".format(index) + msg = f"Malformed attribute selector at position {index}" elif c == '.': - msg = "Malformed class selector at position {}".format(index) + msg = f"Malformed class selector at position {index}" elif c == '#': - msg = "Malformed id selector at position {}".format(index) + msg = f"Malformed id selector at position {index}" elif c == ':': - msg = "Malformed pseudo-class selector at position {}".format(index) + msg = f"Malformed pseudo-class selector at position {index}" else: - msg = "Invalid character {!r} position {}".format(c, index) + msg = f"Invalid character {c!r} position {index}" raise SelectorSyntaxError(msg, self.pattern, index) if self.debug: # pragma: no cover print('## END PARSING') diff --git a/lib/soupsieve/css_types.py b/lib/soupsieve/css_types.py index 90fb4134..621ec7eb 100644 --- a/lib/soupsieve/css_types.py +++ b/lib/soupsieve/css_types.py @@ -45,11 +45,11 @@ class Immutable: for k, v in kwargs.items(): temp.append(type(v)) temp.append(v) - super(Immutable, self).__setattr__(k, v) - super(Immutable, self).__setattr__('_hash', hash(tuple(temp))) + super().__setattr__(k, v) + super().__setattr__('_hash', hash(tuple(temp))) @classmethod - def __base__(cls) -> "type[Immutable]": + def __base__(cls) -> type[Immutable]: """Get base class.""" return cls @@ -59,7 +59,7 @@ class Immutable: return ( isinstance(other, self.__base__()) and - all([getattr(other, key) == getattr(self, key) for key in self.__slots__ if key != '_hash']) + all(getattr(other, key) == getattr(self, key) for key in self.__slots__ if key != '_hash') ) def __ne__(self, other: Any) -> bool: @@ -67,7 +67,7 @@ class Immutable: return ( not isinstance(other, self.__base__()) or - any([getattr(other, key) != getattr(self, key) for key in self.__slots__ if key != '_hash']) + any(getattr(other, key) != getattr(self, key) for key in self.__slots__ if key != '_hash') ) def __hash__(self) -> int: @@ -78,14 +78,13 @@ class Immutable: def __setattr__(self, name: str, value: Any) -> None: """Prevent mutability.""" - raise AttributeError("'{}' is immutable".format(self.__class__.__name__)) + raise AttributeError(f"'{self.__class__.__name__}' is immutable") def __repr__(self) -> str: # pragma: no cover """Representation.""" - return "{}({})".format( - self.__class__.__name__, ', '.join(["{}={!r}".format(k, getattr(self, k)) for k in self.__slots__[:-1]]) - ) + r = ', '.join([f"{k}={getattr(self, k)!r}" for k in self.__slots__[:-1]]) + return f"{self.__class__.__name__}({r})" __str__ = __repr__ @@ -112,10 +111,10 @@ class ImmutableDict(Mapping[Any, Any]): """Validate arguments.""" if isinstance(arg, dict): - if not all([isinstance(v, Hashable) for v in arg.values()]): - raise TypeError('{} values must be hashable'.format(self.__class__.__name__)) - elif not all([isinstance(k, Hashable) and isinstance(v, Hashable) for k, v in arg]): - raise TypeError('{} values must be hashable'.format(self.__class__.__name__)) + if not all(isinstance(v, Hashable) for v in arg.values()): + raise TypeError(f'{self.__class__.__name__} values must be hashable') + elif not all(isinstance(k, Hashable) and isinstance(v, Hashable) for k, v in arg): + raise TypeError(f'{self.__class__.__name__} values must be hashable') def __iter__(self) -> Iterator[Any]: """Iterator.""" @@ -140,7 +139,7 @@ class ImmutableDict(Mapping[Any, Any]): def __repr__(self) -> str: # pragma: no cover """Representation.""" - return "{!r}".format(self._d) + return f"{self._d!r}" __str__ = __repr__ @@ -157,10 +156,10 @@ class Namespaces(ImmutableDict): """Validate arguments.""" if isinstance(arg, dict): - if not all([isinstance(v, str) for v in arg.values()]): - raise TypeError('{} values must be hashable'.format(self.__class__.__name__)) - elif not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]): - raise TypeError('{} keys and values must be Unicode strings'.format(self.__class__.__name__)) + if not all(isinstance(v, str) for v in arg.values()): + raise TypeError(f'{self.__class__.__name__} values must be hashable') + elif not all(isinstance(k, str) and isinstance(v, str) for k, v in arg): + raise TypeError(f'{self.__class__.__name__} keys and values must be Unicode strings') class CustomSelectors(ImmutableDict): @@ -175,10 +174,10 @@ class CustomSelectors(ImmutableDict): """Validate arguments.""" if isinstance(arg, dict): - if not all([isinstance(v, str) for v in arg.values()]): - raise TypeError('{} values must be hashable'.format(self.__class__.__name__)) - elif not all([isinstance(k, str) and isinstance(v, str) for k, v in arg]): - raise TypeError('{} keys and values must be Unicode strings'.format(self.__class__.__name__)) + if not all(isinstance(v, str) for v in arg.values()): + raise TypeError(f'{self.__class__.__name__} values must be hashable') + elif not all(isinstance(k, str) and isinstance(v, str) for k, v in arg): + raise TypeError(f'{self.__class__.__name__} keys and values must be Unicode strings') class Selector(Immutable): @@ -367,7 +366,7 @@ class SelectorList(Immutable): """Initialize.""" super().__init__( - selectors=tuple(selectors) if selectors is not None else tuple(), + selectors=tuple(selectors) if selectors is not None else (), is_not=is_not, is_html=is_html ) diff --git a/lib/soupsieve/pretty.py b/lib/soupsieve/pretty.py index 4c883347..810b87aa 100644 --- a/lib/soupsieve/pretty.py +++ b/lib/soupsieve/pretty.py @@ -10,7 +10,7 @@ The format and various output types is fairly known (though it hasn't been tested extensively to make sure we aren't missing corners). Example: - +------- ``` >>> import soupsieve as sv >>> sv.compile('this > that.class[name=value]').selectors.pretty() @@ -64,6 +64,7 @@ SelectorList( is_not=False, is_html=False) ``` + """ from __future__ import annotations import re @@ -123,16 +124,16 @@ def pretty(obj: Any) -> str: # pragma: no cover index = m.end(0) if name in ('class', 'lstrt', 'dstrt', 'tstrt'): indent += 4 - output.append('{}\n{}'.format(m.group(0), " " * indent)) + output.append(f'{m.group(0)}\n{" " * indent}') elif name in ('param', 'int', 'kword', 'sqstr', 'dqstr', 'empty'): output.append(m.group(0)) elif name in ('lend', 'dend', 'tend'): indent -= 4 output.append(m.group(0)) elif name in ('sep',): - output.append('{}\n{}'.format(m.group(1), " " * indent)) + output.append(f'{m.group(1)}\n{" " * indent}') elif name in ('dsep',): - output.append('{} '.format(m.group(1))) + output.append(f'{m.group(1)} ') break return ''.join(output) diff --git a/lib/soupsieve/util.py b/lib/soupsieve/util.py index 84821bb4..1d0505f4 100644 --- a/lib/soupsieve/util.py +++ b/lib/soupsieve/util.py @@ -37,7 +37,7 @@ class SelectorSyntaxError(Exception): if pattern is not None and index is not None: # Format pattern to show line and column position self.context, self.line, self.col = get_pattern_context(pattern, index) - msg = '{}\n line {}:\n{}'.format(msg, self.line, self.context) + msg = f'{msg}\n line {self.line}:\n{self.context}' super().__init__(msg) @@ -105,7 +105,7 @@ def get_pattern_context(pattern: str, index: int) -> tuple[str, int, int]: # we will render the output with just `\n`. We will still log the column # correctly though. text.append('\n') - text.append('{}{}'.format(indent, linetext)) + text.append(f'{indent}{linetext}') if offset is not None: text.append('\n') text.append(' ' * (col + offset) + '^') diff --git a/lib/thefuzz/__init__.py b/lib/thefuzz/__init__.py index 482e4a19..e4533712 100644 --- a/lib/thefuzz/__init__.py +++ b/lib/thefuzz/__init__.py @@ -1 +1 @@ -__version__ = '0.19.0' +__version__ = '0.21.0' diff --git a/lib/thefuzz/process.py b/lib/thefuzz/process.py index f6b15eaa..bdfe900e 100644 --- a/lib/thefuzz/process.py +++ b/lib/thefuzz/process.py @@ -4,6 +4,7 @@ from . import utils import logging from rapidfuzz import fuzz as rfuzz from rapidfuzz import process as rprocess +from functools import partial _logger = logging.getLogger(__name__) @@ -23,11 +24,14 @@ def _get_processor(processor, scorer): fuzz.UWRatio, fuzz.UQRatio): return processor - if not processor: - return utils.full_process + force_ascii = scorer not in [fuzz.UWRatio, fuzz.UQRatio] + pre_processor = partial(utils.full_process, force_ascii=force_ascii) + + if not processor or processor == utils.full_process: + return pre_processor def wrapper(s): - return utils.full_process(processor(s)) + return pre_processor(processor(s)) return wrapper diff --git a/lib/thefuzz/py.typed b/lib/thefuzz/py.typed new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/lib/thefuzz/py.typed @@ -0,0 +1 @@ + diff --git a/lib/tornado/__init__.py b/lib/tornado/__init__.py index c2a8f25b..a0ae714d 100644 --- a/lib/tornado/__init__.py +++ b/lib/tornado/__init__.py @@ -22,8 +22,8 @@ # is zero for an official release, positive for a development branch, # or negative for a release candidate or beta (after the base version # number has been incremented) -version = "6.3.3" -version_info = (6, 3, 3, 0) +version = "6.4" +version_info = (6, 4, 0, 0) import importlib import typing diff --git a/lib/tornado/__init__.pyi b/lib/tornado/__init__.pyi new file mode 100644 index 00000000..60c2a7e7 --- /dev/null +++ b/lib/tornado/__init__.pyi @@ -0,0 +1,33 @@ +import typing + +version: str +version_info: typing.Tuple[int, int, int, int] + +from . import auth +from . import autoreload +from . import concurrent +from . import curl_httpclient +from . import escape +from . import gen +from . import http1connection +from . import httpclient +from . import httpserver +from . import httputil +from . import ioloop +from . import iostream +from . import locale +from . import locks +from . import log +from . import netutil +from . import options +from . import platform +from . import process +from . import queues +from . import routing +from . import simple_httpclient +from . import tcpclient +from . import tcpserver +from . import template +from . import testing +from . import util +from . import web diff --git a/lib/tornado/auth.py b/lib/tornado/auth.py index 59501f56..d1edcc65 100644 --- a/lib/tornado/auth.py +++ b/lib/tornado/auth.py @@ -33,23 +33,39 @@ See the individual service classes below for complete documentation. Example usage for Google OAuth: +.. testsetup:: + + import urllib + .. testcode:: class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, - tornado.auth.GoogleOAuth2Mixin): + tornado.auth.GoogleOAuth2Mixin): async def get(self): - if self.get_argument('code', False): - user = await self.get_authenticated_user( - redirect_uri='http://your.site.com/auth/google', - code=self.get_argument('code')) - # Save the user with e.g. set_signed_cookie - else: - self.authorize_redirect( - redirect_uri='http://your.site.com/auth/google', - client_id=self.settings['google_oauth']['key'], - scope=['profile', 'email'], - response_type='code', - extra_params={'approval_prompt': 'auto'}) + # Google requires an exact match for redirect_uri, so it's + # best to get it from your app configuration instead of from + # self.request.full_uri(). + redirect_uri = urllib.parse.urljoin(self.application.settings['redirect_base_uri'], + self.reverse_url('google_oauth')) + async def get(self): + if self.get_argument('code', False): + access = await self.get_authenticated_user( + redirect_uri=redirect_uri, + code=self.get_argument('code')) + user = await self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token. For example: + user_cookie = dict(id=user["id"], access_token=access["access_token"]) + self.set_signed_cookie("user", json.dumps(user_cookie)) + self.redirect("/") + else: + self.authorize_redirect( + redirect_uri=redirect_uri, + client_id=self.get_google_oauth_settings()['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) .. testoutput:: :hide: @@ -63,6 +79,7 @@ import hmac import time import urllib.parse import uuid +import warnings from tornado import httpclient from tornado import escape @@ -571,7 +588,13 @@ class OAuth2Mixin(object): The ``callback`` argument and returned awaitable were removed; this is now an ordinary synchronous function. + + .. deprecated:: 6.4 + The ``client_secret`` argument (which has never had any effect) + is deprecated and will be removed in Tornado 7.0. """ + if client_secret is not None: + warnings.warn("client_secret argument is deprecated", DeprecationWarning) handler = cast(RequestHandler, self) args = {"response_type": response_type} if redirect_uri is not None: @@ -705,6 +728,12 @@ class TwitterMixin(OAuthMixin): includes the attributes ``username``, ``name``, ``access_token``, and all of the custom Twitter user attributes described at https://dev.twitter.com/docs/api/1.1/get/users/show + + .. deprecated:: 6.3 + This class refers to version 1.1 of the Twitter API, which has been + deprecated by Twitter. Since Twitter has begun to limit access to its + API, this class will no longer be updated and will be removed in the + future. """ _OAUTH_REQUEST_TOKEN_URL = "https://api.twitter.com/oauth/request_token" @@ -839,12 +868,18 @@ class GoogleOAuth2Mixin(OAuth2Mixin): * Go to the Google Dev Console at http://console.developers.google.com * Select a project, or create a new one. + * Depending on permissions required, you may need to set your app to + "testing" mode and add your account as a test user, or go through + a verfication process. You may also need to use the "Enable + APIs and Services" command to enable specific services. * In the sidebar on the left, select Credentials. * Click CREATE CREDENTIALS and click OAuth client ID. * Under Application type, select Web application. * Name OAuth 2.0 client and click Create. * Copy the "Client secret" and "Client ID" to the application settings as ``{"google_oauth": {"key": CLIENT_ID, "secret": CLIENT_SECRET}}`` + * You must register the ``redirect_uri`` you plan to use with this class + on the Credentials page. .. versionadded:: 3.2 """ @@ -890,27 +925,39 @@ class GoogleOAuth2Mixin(OAuth2Mixin): Example usage: + .. testsetup:: + + import urllib + .. testcode:: class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, tornado.auth.GoogleOAuth2Mixin): async def get(self): - if self.get_argument('code', False): - access = await self.get_authenticated_user( - redirect_uri='http://your.site.com/auth/google', - code=self.get_argument('code')) - user = await self.oauth2_request( - "https://www.googleapis.com/oauth2/v1/userinfo", - access_token=access["access_token"]) - # Save the user and access token with - # e.g. set_signed_cookie. - else: - self.authorize_redirect( - redirect_uri='http://your.site.com/auth/google', - client_id=self.get_google_oauth_settings()['key'], - scope=['profile', 'email'], - response_type='code', - extra_params={'approval_prompt': 'auto'}) + # Google requires an exact match for redirect_uri, so it's + # best to get it from your app configuration instead of from + # self.request.full_uri(). + redirect_uri = urllib.parse.urljoin(self.application.settings['redirect_base_uri'], + self.reverse_url('google_oauth')) + async def get(self): + if self.get_argument('code', False): + access = await self.get_authenticated_user( + redirect_uri=redirect_uri, + code=self.get_argument('code')) + user = await self.oauth2_request( + "https://www.googleapis.com/oauth2/v1/userinfo", + access_token=access["access_token"]) + # Save the user and access token. For example: + user_cookie = dict(id=user["id"], access_token=access["access_token"]) + self.set_signed_cookie("user", json.dumps(user_cookie)) + self.redirect("/") + else: + self.authorize_redirect( + redirect_uri=redirect_uri, + client_id=self.get_google_oauth_settings()['key'], + scope=['profile', 'email'], + response_type='code', + extra_params={'approval_prompt': 'auto'}) .. testoutput:: :hide: @@ -971,18 +1018,21 @@ class FacebookGraphMixin(OAuth2Mixin): class FacebookGraphLoginHandler(tornado.web.RequestHandler, tornado.auth.FacebookGraphMixin): async def get(self): - if self.get_argument("code", False): - user = await self.get_authenticated_user( - redirect_uri='/auth/facebookgraph/', - client_id=self.settings["facebook_api_key"], - client_secret=self.settings["facebook_secret"], - code=self.get_argument("code")) - # Save the user with e.g. set_signed_cookie - else: - self.authorize_redirect( - redirect_uri='/auth/facebookgraph/', - client_id=self.settings["facebook_api_key"], - extra_params={"scope": "read_stream,offline_access"}) + redirect_uri = urllib.parse.urljoin( + self.application.settings['redirect_base_uri'], + self.reverse_url('facebook_oauth')) + if self.get_argument("code", False): + user = await self.get_authenticated_user( + redirect_uri=redirect_uri, + client_id=self.settings["facebook_api_key"], + client_secret=self.settings["facebook_secret"], + code=self.get_argument("code")) + # Save the user with e.g. set_signed_cookie + else: + self.authorize_redirect( + redirect_uri=redirect_uri, + client_id=self.settings["facebook_api_key"], + extra_params={"scope": "user_posts"}) .. testoutput:: :hide: diff --git a/lib/tornado/autoreload.py b/lib/tornado/autoreload.py index 0ac44966..c6a6e82d 100644 --- a/lib/tornado/autoreload.py +++ b/lib/tornado/autoreload.py @@ -60,8 +60,7 @@ import sys # may become relative in spite of the future import. # # We address the former problem by reconstructing the original command -# line (Python >= 3.4) or by setting the $PYTHONPATH environment -# variable (Python < 3.4) before re-execution so the new process will +# line before re-execution so the new process will # see the correct path. We attempt to address the latter problem when # tornado.autoreload is run as __main__. @@ -76,8 +75,9 @@ if __name__ == "__main__": del sys.path[0] import functools +import importlib.abc import os -import pkgutil # type: ignore +import pkgutil import sys import traceback import types @@ -87,18 +87,13 @@ import weakref from tornado import ioloop from tornado.log import gen_log from tornado import process -from tornado.util import exec_in try: import signal except ImportError: signal = None # type: ignore -import typing -from typing import Callable, Dict - -if typing.TYPE_CHECKING: - from typing import List, Optional, Union # noqa: F401 +from typing import Callable, Dict, Optional, List, Union # os.execv is broken on Windows and can't properly parse command line # arguments and executable name if they contain whitespaces. subprocess @@ -108,9 +103,11 @@ _has_execv = sys.platform != "win32" _watched_files = set() _reload_hooks = [] _reload_attempted = False -_io_loops = weakref.WeakKeyDictionary() # type: ignore +_io_loops: "weakref.WeakKeyDictionary[ioloop.IOLoop, bool]" = ( + weakref.WeakKeyDictionary() +) _autoreload_is_main = False -_original_argv = None # type: Optional[List[str]] +_original_argv: Optional[List[str]] = None _original_spec = None @@ -126,7 +123,7 @@ def start(check_time: int = 500) -> None: _io_loops[io_loop] = True if len(_io_loops) > 1: gen_log.warning("tornado.autoreload started more than once in the same process") - modify_times = {} # type: Dict[str, float] + modify_times: Dict[str, float] = {} callback = functools.partial(_reload_on_update, modify_times) scheduler = ioloop.PeriodicCallback(callback, check_time) scheduler.start() @@ -214,10 +211,7 @@ def _reload() -> None: # sys.path fixes: see comments at top of file. If __main__.__spec__ # exists, we were invoked with -m and the effective path is about to # change on re-exec. Reconstruct the original command line to - # ensure that the new process sees the same path we did. If - # __spec__ is not available (Python < 3.4), check instead if - # sys.path[0] is an empty string and add the current directory to - # $PYTHONPATH. + # ensure that the new process sees the same path we did. if _autoreload_is_main: assert _original_argv is not None spec = _original_spec @@ -225,43 +219,25 @@ def _reload() -> None: else: spec = getattr(sys.modules["__main__"], "__spec__", None) argv = sys.argv - if spec: + if spec and spec.name != "__main__": + # __spec__ is set in two cases: when running a module, and when running a directory. (when + # running a file, there is no spec). In the former case, we must pass -m to maintain the + # module-style behavior (setting sys.path), even though python stripped -m from its argv at + # startup. If sys.path is exactly __main__, we're running a directory and should fall + # through to the non-module behavior. + # + # Some of this, including the use of exactly __main__ as a spec for directory mode, + # is documented at https://docs.python.org/3/library/runpy.html#runpy.run_path argv = ["-m", spec.name] + argv[1:] - else: - path_prefix = "." + os.pathsep - if sys.path[0] == "" and not os.environ.get("PYTHONPATH", "").startswith( - path_prefix - ): - os.environ["PYTHONPATH"] = path_prefix + os.environ.get("PYTHONPATH", "") + if not _has_execv: subprocess.Popen([sys.executable] + argv) os._exit(0) else: - try: - os.execv(sys.executable, [sys.executable] + argv) - except OSError: - # Mac OS X versions prior to 10.6 do not support execv in - # a process that contains multiple threads. Instead of - # re-executing in the current process, start a new one - # and cause the current process to exit. This isn't - # ideal since the new process is detached from the parent - # terminal and thus cannot easily be killed with ctrl-C, - # but it's better than not being able to autoreload at - # all. - # Unfortunately the errno returned in this case does not - # appear to be consistent, so we can't easily check for - # this error specifically. - os.spawnv( - os.P_NOWAIT, sys.executable, [sys.executable] + argv # type: ignore - ) - # At this point the IOLoop has been closed and finally - # blocks will experience errors if we allow the stack to - # unwind, so just exit uncleanly. - os._exit(0) + os.execv(sys.executable, [sys.executable] + argv) -_USAGE = """\ -Usage: +_USAGE = """ python -m tornado.autoreload -m module.to.run [args...] python -m tornado.autoreload path/to/script.py [args...] """ @@ -283,6 +259,12 @@ def main() -> None: # Remember that we were launched with autoreload as main. # The main module can be tricky; set the variables both in our globals # (which may be __main__) and the real importable version. + # + # We use optparse instead of the newer argparse because we want to + # mimic the python command-line interface which requires stopping + # parsing at the first positional argument. optparse supports + # this but as far as I can tell argparse does not. + import optparse import tornado.autoreload global _autoreload_is_main @@ -292,38 +274,43 @@ def main() -> None: tornado.autoreload._original_argv = _original_argv = original_argv original_spec = getattr(sys.modules["__main__"], "__spec__", None) tornado.autoreload._original_spec = _original_spec = original_spec - sys.argv = sys.argv[:] - if len(sys.argv) >= 3 and sys.argv[1] == "-m": - mode = "module" - module = sys.argv[2] - del sys.argv[1:3] - elif len(sys.argv) >= 2: - mode = "script" - script = sys.argv[1] - sys.argv = sys.argv[1:] + + parser = optparse.OptionParser( + prog="python -m tornado.autoreload", + usage=_USAGE, + epilog="Either -m or a path must be specified, but not both", + ) + parser.disable_interspersed_args() + parser.add_option("-m", dest="module", metavar="module", help="module to run") + parser.add_option( + "--until-success", + action="store_true", + help="stop reloading after the program exist successfully (status code 0)", + ) + opts, rest = parser.parse_args() + if opts.module is None: + if not rest: + print("Either -m or a path must be specified", file=sys.stderr) + sys.exit(1) + path = rest[0] + sys.argv = rest[:] else: - print(_USAGE, file=sys.stderr) - sys.exit(1) + path = None + sys.argv = [sys.argv[0]] + rest + # SystemExit.code is typed funny: https://github.com/python/typeshed/issues/8513 + # All we care about is truthiness + exit_status: Union[int, str, None] = 1 try: - if mode == "module": - import runpy + import runpy - runpy.run_module(module, run_name="__main__", alter_sys=True) - elif mode == "script": - with open(script) as f: - # Execute the script in our namespace instead of creating - # a new one so that something that tries to import __main__ - # (e.g. the unittest module) will see names defined in the - # script instead of just those defined in this module. - global __file__ - __file__ = script - # If __package__ is defined, imports may be incorrectly - # interpreted as relative to this module. - global __package__ - del __package__ - exec_in(f.read(), globals(), globals()) + if opts.module is not None: + runpy.run_module(opts.module, run_name="__main__", alter_sys=True) + else: + assert path is not None + runpy.run_path(path, run_name="__main__") except SystemExit as e: + exit_status = e.code gen_log.info("Script exited with status %s", e.code) except Exception as e: gen_log.warning("Script exited with uncaught exception", exc_info=True) @@ -331,7 +318,7 @@ def main() -> None: # never made it into sys.modules and so we won't know to watch it. # Just to make sure we've covered everything, walk the stack trace # from the exception and watch every file. - for (filename, lineno, name, line) in traceback.extract_tb(sys.exc_info()[2]): + for filename, lineno, name, line in traceback.extract_tb(sys.exc_info()[2]): watch(filename) if isinstance(e, SyntaxError): # SyntaxErrors are special: their innermost stack frame is fake @@ -340,17 +327,20 @@ def main() -> None: if e.filename is not None: watch(e.filename) else: + exit_status = 0 gen_log.info("Script exited normally") # restore sys.argv so subsequent executions will include autoreload sys.argv = original_argv - if mode == "module": + if opts.module is not None: + assert opts.module is not None # runpy did a fake import of the module as __main__, but now it's # no longer in sys.modules. Figure out where it is and watch it. - loader = pkgutil.get_loader(module) - if loader is not None: - watch(loader.get_filename()) # type: ignore - + loader = pkgutil.get_loader(opts.module) + if loader is not None and isinstance(loader, importlib.abc.FileLoader): + watch(loader.get_filename()) + if opts.until_success and not exit_status: + return wait() diff --git a/lib/tornado/concurrent.py b/lib/tornado/concurrent.py index 6e05346b..86bbd703 100644 --- a/lib/tornado/concurrent.py +++ b/lib/tornado/concurrent.py @@ -54,7 +54,7 @@ def is_future(x: Any) -> bool: class DummyExecutor(futures.Executor): - def submit( + def submit( # type: ignore[override] self, fn: Callable[..., _T], *args: Any, **kwargs: Any ) -> "futures.Future[_T]": future = futures.Future() # type: futures.Future[_T] @@ -64,8 +64,15 @@ class DummyExecutor(futures.Executor): future_set_exc_info(future, sys.exc_info()) return future - def shutdown(self, wait: bool = True) -> None: - pass + if sys.version_info >= (3, 9): + + def shutdown(self, wait: bool = True, cancel_futures: bool = False) -> None: + pass + + else: + + def shutdown(self, wait: bool = True) -> None: + pass dummy_executor = DummyExecutor() @@ -150,8 +157,7 @@ def chain_future(a: "Future[_T]", b: "Future[_T]") -> None: """ - def copy(future: "Future[_T]") -> None: - assert future is a + def copy(a: "Future[_T]") -> None: if b.done(): return if hasattr(a, "exc_info") and a.exc_info() is not None: # type: ignore diff --git a/lib/tornado/escape.py b/lib/tornado/escape.py index 55354c30..84abfca6 100644 --- a/lib/tornado/escape.py +++ b/lib/tornado/escape.py @@ -17,9 +17,15 @@ Also includes a few other miscellaneous string manipulation functions that have crept in over time. + +Many functions in this module have near-equivalents in the standard library +(the differences mainly relate to handling of bytes and unicode strings, +and were more relevant in Python 2). In new code, the standard library +functions are encouraged instead of this module where applicable. See the +docstrings on each function for details. """ -import html.entities +import html import json import re import urllib.parse @@ -30,16 +36,6 @@ import typing from typing import Union, Any, Optional, Dict, List, Callable -_XHTML_ESCAPE_RE = re.compile("[&<>\"']") -_XHTML_ESCAPE_DICT = { - "&": "&", - "<": "<", - ">": ">", - '"': """, - "'": "'", -} - - def xhtml_escape(value: Union[str, bytes]) -> str: """Escapes a string so it is valid within HTML or XML. @@ -47,25 +43,50 @@ def xhtml_escape(value: Union[str, bytes]) -> str: When used in attribute values the escaped strings must be enclosed in quotes. + Equivalent to `html.escape` except that this function always returns + type `str` while `html.escape` returns `bytes` if its input is `bytes`. + .. versionchanged:: 3.2 Added the single quote to the list of escaped characters. + + .. versionchanged:: 6.4 + + Now simply wraps `html.escape`. This is equivalent to the old behavior + except that single quotes are now escaped as ``'`` instead of + ``'`` and performance may be different. """ - return _XHTML_ESCAPE_RE.sub( - lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value) - ) + return html.escape(to_unicode(value)) def xhtml_unescape(value: Union[str, bytes]) -> str: - """Un-escapes an XML-escaped string.""" - return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value)) + """Un-escapes an XML-escaped string. + + Equivalent to `html.unescape` except that this function always returns + type `str` while `html.unescape` returns `bytes` if its input is `bytes`. + + .. versionchanged:: 6.4 + + Now simply wraps `html.unescape`. This changes behavior for some inputs + as required by the HTML 5 specification + https://html.spec.whatwg.org/multipage/parsing.html#numeric-character-reference-end-state + + Some invalid inputs such as surrogates now raise an error, and numeric + references to certain ISO-8859-1 characters are now handled correctly. + """ + return html.unescape(to_unicode(value)) # The fact that json_encode wraps json.dumps is an implementation detail. # Please see https://github.com/tornadoweb/tornado/pull/706 # before sending a pull request that adds **kwargs to this function. def json_encode(value: Any) -> str: - """JSON-encodes the given Python object.""" + """JSON-encodes the given Python object. + + Equivalent to `json.dumps` with the additional guarantee that the output + will never contain the character sequence ```` tag. + """ # JSON permits but does not require forward slashes to be escaped. # This is useful when json data is emitted in a tags from prematurely terminating @@ -78,9 +99,9 @@ def json_encode(value: Any) -> str: def json_decode(value: Union[str, bytes]) -> Any: """Returns Python objects for the given JSON string. - Supports both `str` and `bytes` inputs. + Supports both `str` and `bytes` inputs. Equvalent to `json.loads`. """ - return json.loads(to_basestring(value)) + return json.loads(value) def squeeze(value: str) -> str: @@ -91,16 +112,20 @@ def squeeze(value: str) -> str: def url_escape(value: Union[str, bytes], plus: bool = True) -> str: """Returns a URL-encoded version of the given value. - If ``plus`` is true (the default), spaces will be represented - as "+" instead of "%20". This is appropriate for query strings - but not for the path component of a URL. Note that this default - is the reverse of Python's urllib module. + Equivalent to either `urllib.parse.quote_plus` or `urllib.parse.quote` depending on the ``plus`` + argument. + + If ``plus`` is true (the default), spaces will be represented as ``+`` and slashes will be + represented as ``%2F``. This is appropriate for query strings. If ``plus`` is false, spaces + will be represented as ``%20`` and slashes are left as-is. This is appropriate for the path + component of a URL. Note that the default of ``plus=True`` is effectively the + reverse of Python's urllib module. .. versionadded:: 3.1 The ``plus`` argument """ quote = urllib.parse.quote_plus if plus else urllib.parse.quote - return quote(utf8(value)) + return quote(value) @typing.overload @@ -108,28 +133,29 @@ def url_unescape(value: Union[str, bytes], encoding: None, plus: bool = True) -> pass -@typing.overload # noqa: F811 +@typing.overload def url_unescape( value: Union[str, bytes], encoding: str = "utf-8", plus: bool = True ) -> str: pass -def url_unescape( # noqa: F811 +def url_unescape( value: Union[str, bytes], encoding: Optional[str] = "utf-8", plus: bool = True ) -> Union[str, bytes]: """Decodes the given value from a URL. The argument may be either a byte or unicode string. - If encoding is None, the result will be a byte string. Otherwise, - the result is a unicode string in the specified encoding. + If encoding is None, the result will be a byte string and this function is equivalent to + `urllib.parse.unquote_to_bytes` if ``plus=False``. Otherwise, the result is a unicode string in + the specified encoding and this function is equivalent to either `urllib.parse.unquote_plus` or + `urllib.parse.unquote` except that this function also accepts `bytes` as input. - If ``plus`` is true (the default), plus signs will be interpreted - as spaces (literal plus signs must be represented as "%2B"). This - is appropriate for query strings and form-encoded values but not - for the path component of a URL. Note that this default is the - reverse of Python's urllib module. + If ``plus`` is true (the default), plus signs will be interpreted as spaces (literal plus signs + must be represented as "%2B"). This is appropriate for query strings and form-encoded values + but not for the path component of a URL. Note that this default is the reverse of Python's + urllib module. .. versionadded:: 3.1 The ``plus`` argument @@ -175,17 +201,17 @@ def utf8(value: bytes) -> bytes: pass -@typing.overload # noqa: F811 +@typing.overload def utf8(value: str) -> bytes: pass -@typing.overload # noqa: F811 +@typing.overload def utf8(value: None) -> None: pass -def utf8(value: Union[None, str, bytes]) -> Optional[bytes]: # noqa: F811 +def utf8(value: Union[None, str, bytes]) -> Optional[bytes]: """Converts a string argument to a byte string. If the argument is already a byte string or None, it is returned unchanged. @@ -206,17 +232,17 @@ def to_unicode(value: str) -> str: pass -@typing.overload # noqa: F811 +@typing.overload def to_unicode(value: bytes) -> str: pass -@typing.overload # noqa: F811 +@typing.overload def to_unicode(value: None) -> None: pass -def to_unicode(value: Union[None, str, bytes]) -> Optional[str]: # noqa: F811 +def to_unicode(value: Union[None, str, bytes]) -> Optional[str]: """Converts a string argument to a unicode string. If the argument is already a unicode string or None, it is returned @@ -375,28 +401,3 @@ def linkify( # that we won't pick up ", etc. text = _unicode(xhtml_escape(text)) return _URL_RE.sub(make_link, text) - - -def _convert_entity(m: typing.Match) -> str: - if m.group(1) == "#": - try: - if m.group(2)[:1].lower() == "x": - return chr(int(m.group(2)[1:], 16)) - else: - return chr(int(m.group(2))) - except ValueError: - return "&#%s;" % m.group(2) - try: - return _HTML_UNICODE_MAP[m.group(2)] - except KeyError: - return "&%s;" % m.group(2) - - -def _build_unicode_map() -> Dict[str, str]: - unicode_map = {} - for name, value in html.entities.name2codepoint.items(): - unicode_map[name] = chr(value) - return unicode_map - - -_HTML_UNICODE_MAP = _build_unicode_map() diff --git a/lib/tornado/gen.py b/lib/tornado/gen.py index 4819b857..dab4fd09 100644 --- a/lib/tornado/gen.py +++ b/lib/tornado/gen.py @@ -840,13 +840,17 @@ class Runner(object): return False -# Convert Awaitables into Futures. -try: - _wrap_awaitable = asyncio.ensure_future -except AttributeError: - # asyncio.ensure_future was introduced in Python 3.4.4, but - # Debian jessie still ships with 3.4.2 so try the old name. - _wrap_awaitable = getattr(asyncio, "async") +def _wrap_awaitable(awaitable: Awaitable) -> Future: + # Convert Awaitables into Futures. + # Note that we use ensure_future, which handles both awaitables + # and coroutines, rather than create_task, which only accepts + # coroutines. (ensure_future calls create_task if given a coroutine) + fut = asyncio.ensure_future(awaitable) + # See comments on IOLoop._pending_tasks. + loop = IOLoop.current() + loop._register_task(fut) + fut.add_done_callback(lambda f: loop._unregister_task(f)) + return fut def convert_yielded(yielded: _Yieldable) -> Future: diff --git a/lib/tornado/httpserver.py b/lib/tornado/httpserver.py index 77dc541e..757f711b 100644 --- a/lib/tornado/httpserver.py +++ b/lib/tornado/httpserver.py @@ -74,7 +74,7 @@ class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate) To make this server serve SSL traffic, send the ``ssl_options`` keyword argument with an `ssl.SSLContext` object. For compatibility with older versions of Python ``ssl_options`` may also be a dictionary of keyword - arguments for the `ssl.wrap_socket` method.:: + arguments for the `ssl.SSLContext.wrap_socket` method.:: ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), diff --git a/lib/tornado/httputil.py b/lib/tornado/httputil.py index 9c341d47..b21d8046 100644 --- a/lib/tornado/httputil.py +++ b/lib/tornado/httputil.py @@ -856,7 +856,8 @@ def format_timestamp( The argument may be a numeric timestamp as returned by `time.time`, a time tuple as returned by `time.gmtime`, or a `datetime.datetime` - object. + object. Naive `datetime.datetime` objects are assumed to represent + UTC; aware objects are converted to UTC before formatting. >>> format_timestamp(1359312200) 'Sun, 27 Jan 2013 18:43:20 GMT' diff --git a/lib/tornado/ioloop.py b/lib/tornado/ioloop.py index bcdcca09..3fb1359a 100644 --- a/lib/tornado/ioloop.py +++ b/lib/tornado/ioloop.py @@ -50,7 +50,7 @@ import typing from typing import Union, Any, Type, Optional, Callable, TypeVar, Tuple, Awaitable if typing.TYPE_CHECKING: - from typing import Dict, List # noqa: F401 + from typing import Dict, List, Set # noqa: F401 from typing_extensions import Protocol else: @@ -159,6 +159,18 @@ class IOLoop(Configurable): # In Python 3, _ioloop_for_asyncio maps from asyncio loops to IOLoops. _ioloop_for_asyncio = dict() # type: Dict[asyncio.AbstractEventLoop, IOLoop] + # Maintain a set of all pending tasks to follow the warning in the docs + # of asyncio.create_tasks: + # https://docs.python.org/3.11/library/asyncio-task.html#asyncio.create_task + # This ensures that all pending tasks have a strong reference so they + # will not be garbage collected before they are finished. + # (Thus avoiding "task was destroyed but it is pending" warnings) + # An analogous change has been proposed in cpython for 3.13: + # https://github.com/python/cpython/issues/91887 + # If that change is accepted, this can eventually be removed. + # If it is not, we will consider the rationale and may remove this. + _pending_tasks = set() # type: Set[Future] + @classmethod def configure( cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any @@ -632,9 +644,6 @@ class IOLoop(Configurable): other interaction with the `IOLoop` must be done from that `IOLoop`'s thread. `add_callback()` may be used to transfer control from other threads to the `IOLoop`'s thread. - - To add a callback from a signal handler, see - `add_callback_from_signal`. """ raise NotImplementedError() @@ -643,8 +652,13 @@ class IOLoop(Configurable): ) -> None: """Calls the given callback on the next I/O loop iteration. - Safe for use from a Python signal handler; should not be used - otherwise. + Intended to be afe for use from a Python signal handler; should not be + used otherwise. + + .. deprecated:: 6.4 + Use ``asyncio.AbstractEventLoop.add_signal_handler`` instead. + This method is suspected to have been broken since Tornado 5.0 and + will be removed in version 7.0. """ raise NotImplementedError() @@ -682,22 +696,20 @@ class IOLoop(Configurable): # the error logging (i.e. it goes to tornado.log.app_log # instead of asyncio's log). future.add_done_callback( - lambda f: self._run_callback(functools.partial(callback, future)) + lambda f: self._run_callback(functools.partial(callback, f)) ) else: assert is_future(future) # For concurrent futures, we use self.add_callback, so # it's fine if future_add_done_callback inlines that call. - future_add_done_callback( - future, lambda f: self.add_callback(callback, future) - ) + future_add_done_callback(future, lambda f: self.add_callback(callback, f)) def run_in_executor( self, executor: Optional[concurrent.futures.Executor], func: Callable[..., _T], *args: Any - ) -> Awaitable[_T]: + ) -> "Future[_T]": """Runs a function in a ``concurrent.futures.Executor``. If ``executor`` is ``None``, the IO loop's default executor will be used. @@ -803,6 +815,12 @@ class IOLoop(Configurable): except OSError: pass + def _register_task(self, f: Future) -> None: + self._pending_tasks.add(f) + + def _unregister_task(self, f: Future) -> None: + self._pending_tasks.discard(f) + class _Timeout(object): """An IOLoop timeout, a UNIX timestamp and a callback""" diff --git a/lib/tornado/iostream.py b/lib/tornado/iostream.py index e7291263..6cee1fe4 100644 --- a/lib/tornado/iostream.py +++ b/lib/tornado/iostream.py @@ -1219,7 +1219,7 @@ class IOStream(BaseIOStream): The ``ssl_options`` argument may be either an `ssl.SSLContext` object or a dictionary of keyword arguments for the - `ssl.wrap_socket` function. The ``server_hostname`` argument + `ssl.SSLContext.wrap_socket` function. The ``server_hostname`` argument will be used for certificate validation unless disabled in the ``ssl_options``. @@ -1324,7 +1324,7 @@ class SSLIOStream(IOStream): If the socket passed to the constructor is already connected, it should be wrapped with:: - ssl.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) + ssl.SSLContext(...).wrap_socket(sock, do_handshake_on_connect=False, **kwargs) before constructing the `SSLIOStream`. Unconnected sockets will be wrapped when `IOStream.connect` is finished. @@ -1335,7 +1335,7 @@ class SSLIOStream(IOStream): def __init__(self, *args: Any, **kwargs: Any) -> None: """The ``ssl_options`` keyword argument may either be an `ssl.SSLContext` object or a dictionary of keywords arguments - for `ssl.wrap_socket` + for `ssl.SSLContext.wrap_socket` """ self._ssl_options = kwargs.pop("ssl_options", _client_ssl_defaults) super().__init__(*args, **kwargs) @@ -1413,9 +1413,9 @@ class SSLIOStream(IOStream): return self.close(exc_info=err) else: self._ssl_accepting = False - if not self._verify_cert(self.socket.getpeercert()): - self.close() - return + # Prior to the introduction of SNI, this is where we would check + # the server's claimed hostname. + assert ssl.HAS_SNI self._finish_ssl_connect() def _finish_ssl_connect(self) -> None: @@ -1424,33 +1424,6 @@ class SSLIOStream(IOStream): self._ssl_connect_future = None future_set_result_unless_cancelled(future, self) - def _verify_cert(self, peercert: Any) -> bool: - """Returns ``True`` if peercert is valid according to the configured - validation mode and hostname. - - The ssl handshake already tested the certificate for a valid - CA signature; the only thing that remains is to check - the hostname. - """ - if isinstance(self._ssl_options, dict): - verify_mode = self._ssl_options.get("cert_reqs", ssl.CERT_NONE) - elif isinstance(self._ssl_options, ssl.SSLContext): - verify_mode = self._ssl_options.verify_mode - assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) - if verify_mode == ssl.CERT_NONE or self._server_hostname is None: - return True - cert = self.socket.getpeercert() - if cert is None and verify_mode == ssl.CERT_REQUIRED: - gen_log.warning("No SSL certificate given") - return False - try: - ssl.match_hostname(peercert, self._server_hostname) - except ssl.CertificateError as e: - gen_log.warning("Invalid SSL certificate: %s" % e) - return False - else: - return True - def _handle_read(self) -> None: if self._ssl_accepting: self._do_ssl_handshake() diff --git a/lib/tornado/locale.py b/lib/tornado/locale.py index 55072af2..c5526703 100644 --- a/lib/tornado/locale.py +++ b/lib/tornado/locale.py @@ -333,7 +333,7 @@ class Locale(object): shorter: bool = False, full_format: bool = False, ) -> str: - """Formats the given date (which should be GMT). + """Formats the given date. By default, we return a relative time (e.g., "2 minutes ago"). You can return an absolute date string with ``relative=False``. @@ -343,10 +343,16 @@ class Locale(object): This method is primarily intended for dates in the past. For dates in the future, we fall back to full format. + + .. versionchanged:: 6.4 + Aware `datetime.datetime` objects are now supported (naive + datetimes are still assumed to be UTC). """ if isinstance(date, (int, float)): - date = datetime.datetime.utcfromtimestamp(date) - now = datetime.datetime.utcnow() + date = datetime.datetime.fromtimestamp(date, datetime.timezone.utc) + if date.tzinfo is None: + date = date.replace(tzinfo=datetime.timezone.utc) + now = datetime.datetime.now(datetime.timezone.utc) if date > now: if relative and (date - now).seconds < 60: # Due to click skew, things are some things slightly diff --git a/lib/tornado/netutil.py b/lib/tornado/netutil.py index 04db085a..18c91e67 100644 --- a/lib/tornado/netutil.py +++ b/lib/tornado/netutil.py @@ -594,7 +594,7 @@ def ssl_options_to_context( `~ssl.SSLContext` object. The ``ssl_options`` dictionary contains keywords to be passed to - `ssl.wrap_socket`. In Python 2.7.9+, `ssl.SSLContext` objects can + ``ssl.SSLContext.wrap_socket``. In Python 2.7.9+, `ssl.SSLContext` objects can be used instead. This function converts the dict form to its `~ssl.SSLContext` equivalent, and may be used when a component which accepts both forms needs to upgrade to the `~ssl.SSLContext` version @@ -652,9 +652,7 @@ def ssl_wrap_socket( ``ssl_options`` may be either an `ssl.SSLContext` object or a dictionary (as accepted by `ssl_options_to_context`). Additional - keyword arguments are passed to ``wrap_socket`` (either the - `~ssl.SSLContext` method or the `ssl` module function as - appropriate). + keyword arguments are passed to `ssl.SSLContext.wrap_socket`. .. versionchanged:: 6.2 @@ -664,14 +662,10 @@ def ssl_wrap_socket( context = ssl_options_to_context(ssl_options, server_side=server_side) if server_side is None: server_side = False - if ssl.HAS_SNI: - # In python 3.4, wrap_socket only accepts the server_hostname - # argument if HAS_SNI is true. - # TODO: add a unittest (python added server-side SNI support in 3.4) - # In the meantime it can be manually tested with - # python3 -m tornado.httpclient https://sni.velox.ch - return context.wrap_socket( - socket, server_hostname=server_hostname, server_side=server_side, **kwargs - ) - else: - return context.wrap_socket(socket, server_side=server_side, **kwargs) + assert ssl.HAS_SNI + # TODO: add a unittest for hostname validation (python added server-side SNI support in 3.4) + # In the meantime it can be manually tested with + # python3 -m tornado.httpclient https://sni.velox.ch + return context.wrap_socket( + socket, server_hostname=server_hostname, server_side=server_side, **kwargs + ) diff --git a/lib/tornado/platform/asyncio.py b/lib/tornado/platform/asyncio.py index a15a74df..79e60848 100644 --- a/lib/tornado/platform/asyncio.py +++ b/lib/tornado/platform/asyncio.py @@ -36,23 +36,32 @@ import warnings from tornado.gen import convert_yielded from tornado.ioloop import IOLoop, _Selectable -from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Dict +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Protocol, + Set, + Tuple, + TypeVar, + Union, +) -if typing.TYPE_CHECKING: - from typing import Set, Tuple # noqa: F401 - from typing_extensions import Protocol - class _HasFileno(Protocol): - def fileno(self) -> int: - pass +class _HasFileno(Protocol): + def fileno(self) -> int: + pass - _FileDescriptorLike = Union[int, _HasFileno] + +_FileDescriptorLike = Union[int, _HasFileno] _T = TypeVar("_T") # Collection of selector thread event loops to shut down on exit. -_selector_loops = set() # type: Set[AddThreadSelectorEventLoop] +_selector_loops: Set["SelectorThread"] = set() def _atexit_callback() -> None: @@ -64,11 +73,12 @@ def _atexit_callback() -> None: loop._waker_w.send(b"a") except BlockingIOError: pass - # If we don't join our (daemon) thread here, we may get a deadlock - # during interpreter shutdown. I don't really understand why. This - # deadlock happens every time in CI (both travis and appveyor) but - # I've never been able to reproduce locally. - loop._thread.join() + if loop._thread is not None: + # If we don't join our (daemon) thread here, we may get a deadlock + # during interpreter shutdown. I don't really understand why. This + # deadlock happens every time in CI (both travis and appveyor) but + # I've never been able to reproduce locally. + loop._thread.join() _selector_loops.clear() @@ -87,16 +97,16 @@ class BaseAsyncIOLoop(IOLoop): # as windows where the default event loop does not implement these methods. self.selector_loop = asyncio_loop if hasattr(asyncio, "ProactorEventLoop") and isinstance( - asyncio_loop, asyncio.ProactorEventLoop # type: ignore + asyncio_loop, asyncio.ProactorEventLoop ): # Ignore this line for mypy because the abstract method checker # doesn't understand dynamic proxies. self.selector_loop = AddThreadSelectorEventLoop(asyncio_loop) # type: ignore # Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler) - self.handlers = {} # type: Dict[int, Tuple[Union[int, _Selectable], Callable]] + self.handlers: Dict[int, Tuple[Union[int, _Selectable], Callable]] = {} # Set of fds listening for reads/writes - self.readers = set() # type: Set[int] - self.writers = set() # type: Set[int] + self.readers: Set[int] = set() + self.writers: Set[int] = set() self.closing = False # If an asyncio loop was closed through an asyncio interface # instead of IOLoop.close(), we'd never hear about it and may @@ -239,6 +249,7 @@ class BaseAsyncIOLoop(IOLoop): def add_callback_from_signal( self, callback: Callable, *args: Any, **kwargs: Any ) -> None: + warnings.warn("add_callback_from_signal is deprecated", DeprecationWarning) try: self.asyncio_loop.call_soon_threadsafe( self._run_callback, functools.partial(callback, *args, **kwargs) @@ -251,7 +262,7 @@ class BaseAsyncIOLoop(IOLoop): executor: Optional[concurrent.futures.Executor], func: Callable[..., _T], *args: Any, - ) -> Awaitable[_T]: + ) -> "asyncio.Future[_T]": return self.asyncio_loop.run_in_executor(executor, func, *args) def set_default_executor(self, executor: concurrent.futures.Executor) -> None: @@ -417,87 +428,51 @@ class AnyThreadEventLoopPolicy(_BasePolicy): # type: ignore def get_event_loop(self) -> asyncio.AbstractEventLoop: try: return super().get_event_loop() - except (RuntimeError, AssertionError): - # This was an AssertionError in Python 3.4.2 (which ships with Debian Jessie) - # and changed to a RuntimeError in 3.4.3. + except RuntimeError: # "There is no current event loop in thread %r" loop = self.new_event_loop() self.set_event_loop(loop) return loop -class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): - """Wrap an event loop to add implementations of the ``add_reader`` method family. +class SelectorThread: + """Define ``add_reader`` methods to be called in a background select thread. Instances of this class start a second thread to run a selector. - This thread is completely hidden from the user; all callbacks are - run on the wrapped event loop's thread. - - This class is used automatically by Tornado; applications should not need - to refer to it directly. - - It is safe to wrap any event loop with this class, although it only makes sense - for event loops that do not implement the ``add_reader`` family of methods - themselves (i.e. ``WindowsProactorEventLoop``) - - Closing the ``AddThreadSelectorEventLoop`` also closes the wrapped event loop. + This thread is completely hidden from the user; + all callbacks are run on the wrapped event loop's thread. + Typically used via ``AddThreadSelectorEventLoop``, + but can be attached to a running asyncio loop. """ - # This class is a __getattribute__-based proxy. All attributes other than those - # in this set are proxied through to the underlying loop. - MY_ATTRIBUTES = { - "_consume_waker", - "_select_cond", - "_select_args", - "_closing_selector", - "_thread", - "_handle_event", - "_readers", - "_real_loop", - "_start_select", - "_run_select", - "_handle_select", - "_wake_selector", - "_waker_r", - "_waker_w", - "_writers", - "add_reader", - "add_writer", - "close", - "remove_reader", - "remove_writer", - } - - def __getattribute__(self, name: str) -> Any: - if name in AddThreadSelectorEventLoop.MY_ATTRIBUTES: - return super().__getattribute__(name) - return getattr(self._real_loop, name) + _closed = False def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None: self._real_loop = real_loop - # Create a thread to run the select system call. We manage this thread - # manually so we can trigger a clean shutdown from an atexit hook. Note - # that due to the order of operations at shutdown, only daemon threads - # can be shut down in this way (non-daemon threads would require the - # introduction of a new hook: https://bugs.python.org/issue41962) self._select_cond = threading.Condition() - self._select_args = ( - None - ) # type: Optional[Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]] + self._select_args: Optional[ + Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]] + ] = None self._closing_selector = False - self._thread = threading.Thread( - name="Tornado selector", - daemon=True, - target=self._run_select, - ) - self._thread.start() - # Start the select loop once the loop is started. - self._real_loop.call_soon(self._start_select) + self._thread: Optional[threading.Thread] = None + self._thread_manager_handle = self._thread_manager() - self._readers = {} # type: Dict[_FileDescriptorLike, Callable] - self._writers = {} # type: Dict[_FileDescriptorLike, Callable] + async def thread_manager_anext() -> None: + # the anext builtin wasn't added until 3.10. We just need to iterate + # this generator one step. + await self._thread_manager_handle.__anext__() + + # When the loop starts, start the thread. Not too soon because we can't + # clean up if we get to this point but the event loop is closed without + # starting. + self._real_loop.call_soon( + lambda: self._real_loop.create_task(thread_manager_anext()) + ) + + self._readers: Dict[_FileDescriptorLike, Callable] = {} + self._writers: Dict[_FileDescriptorLike, Callable] = {} # Writing to _waker_w will wake up the selector thread, which # watches for _waker_r to be readable. @@ -507,28 +482,49 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): _selector_loops.add(self) self.add_reader(self._waker_r, self._consume_waker) - def __del__(self) -> None: - # If the top-level application code uses asyncio interfaces to - # start and stop the event loop, no objects created in Tornado - # can get a clean shutdown notification. If we're just left to - # be GC'd, we must explicitly close our sockets to avoid - # logging warnings. - _selector_loops.discard(self) - self._waker_r.close() - self._waker_w.close() - def close(self) -> None: + if self._closed: + return with self._select_cond: self._closing_selector = True self._select_cond.notify() self._wake_selector() - self._thread.join() + if self._thread is not None: + self._thread.join() _selector_loops.discard(self) + self.remove_reader(self._waker_r) self._waker_r.close() self._waker_w.close() - self._real_loop.close() + self._closed = True + + async def _thread_manager(self) -> typing.AsyncGenerator[None, None]: + # Create a thread to run the select system call. We manage this thread + # manually so we can trigger a clean shutdown from an atexit hook. Note + # that due to the order of operations at shutdown, only daemon threads + # can be shut down in this way (non-daemon threads would require the + # introduction of a new hook: https://bugs.python.org/issue41962) + self._thread = threading.Thread( + name="Tornado selector", + daemon=True, + target=self._run_select, + ) + self._thread.start() + self._start_select() + try: + # The presense of this yield statement means that this coroutine + # is actually an asynchronous generator, which has a special + # shutdown protocol. We wait at this yield point until the + # event loop's shutdown_asyncgens method is called, at which point + # we will get a GeneratorExit exception and can shut down the + # selector thread. + yield + except GeneratorExit: + self.close() + raise def _wake_selector(self) -> None: + if self._closed: + return try: self._waker_w.send(b"a") except BlockingIOError: @@ -614,7 +610,7 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): pass def _handle_select( - self, rs: List["_FileDescriptorLike"], ws: List["_FileDescriptorLike"] + self, rs: List[_FileDescriptorLike], ws: List[_FileDescriptorLike] ) -> None: for r in rs: self._handle_event(r, self._readers) @@ -624,8 +620,8 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): def _handle_event( self, - fd: "_FileDescriptorLike", - cb_map: Dict["_FileDescriptorLike", Callable], + fd: _FileDescriptorLike, + cb_map: Dict[_FileDescriptorLike, Callable], ) -> None: try: callback = cb_map[fd] @@ -634,18 +630,18 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): callback() def add_reader( - self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any + self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any ) -> None: self._readers[fd] = functools.partial(callback, *args) self._wake_selector() def add_writer( - self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any + self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any ) -> None: self._writers[fd] = functools.partial(callback, *args) self._wake_selector() - def remove_reader(self, fd: "_FileDescriptorLike") -> bool: + def remove_reader(self, fd: _FileDescriptorLike) -> bool: try: del self._readers[fd] except KeyError: @@ -653,10 +649,70 @@ class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): self._wake_selector() return True - def remove_writer(self, fd: "_FileDescriptorLike") -> bool: + def remove_writer(self, fd: _FileDescriptorLike) -> bool: try: del self._writers[fd] except KeyError: return False self._wake_selector() return True + + +class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop): + """Wrap an event loop to add implementations of the ``add_reader`` method family. + + Instances of this class start a second thread to run a selector. + This thread is completely hidden from the user; all callbacks are + run on the wrapped event loop's thread. + + This class is used automatically by Tornado; applications should not need + to refer to it directly. + + It is safe to wrap any event loop with this class, although it only makes sense + for event loops that do not implement the ``add_reader`` family of methods + themselves (i.e. ``WindowsProactorEventLoop``) + + Closing the ``AddThreadSelectorEventLoop`` also closes the wrapped event loop. + + """ + + # This class is a __getattribute__-based proxy. All attributes other than those + # in this set are proxied through to the underlying loop. + MY_ATTRIBUTES = { + "_real_loop", + "_selector", + "add_reader", + "add_writer", + "close", + "remove_reader", + "remove_writer", + } + + def __getattribute__(self, name: str) -> Any: + if name in AddThreadSelectorEventLoop.MY_ATTRIBUTES: + return super().__getattribute__(name) + return getattr(self._real_loop, name) + + def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None: + self._real_loop = real_loop + self._selector = SelectorThread(real_loop) + + def close(self) -> None: + self._selector.close() + self._real_loop.close() + + def add_reader( + self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any + ) -> None: + return self._selector.add_reader(fd, callback, *args) + + def add_writer( + self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any + ) -> None: + return self._selector.add_writer(fd, callback, *args) + + def remove_reader(self, fd: "_FileDescriptorLike") -> bool: + return self._selector.remove_reader(fd) + + def remove_writer(self, fd: "_FileDescriptorLike") -> bool: + return self._selector.remove_writer(fd) diff --git a/lib/tornado/process.py b/lib/tornado/process.py index 26428feb..12e3eb64 100644 --- a/lib/tornado/process.py +++ b/lib/tornado/process.py @@ -17,6 +17,7 @@ the server into multiple processes and managing subprocesses. """ +import asyncio import os import multiprocessing import signal @@ -210,7 +211,6 @@ class Subprocess(object): _initialized = False _waiting = {} # type: ignore - _old_sigchld = None def __init__(self, *args: Any, **kwargs: Any) -> None: self.io_loop = ioloop.IOLoop.current() @@ -322,11 +322,8 @@ class Subprocess(object): """ if cls._initialized: return - io_loop = ioloop.IOLoop.current() - cls._old_sigchld = signal.signal( - signal.SIGCHLD, - lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup), - ) + loop = asyncio.get_event_loop() + loop.add_signal_handler(signal.SIGCHLD, cls._cleanup) cls._initialized = True @classmethod @@ -334,7 +331,8 @@ class Subprocess(object): """Removes the ``SIGCHLD`` handler.""" if not cls._initialized: return - signal.signal(signal.SIGCHLD, cls._old_sigchld) + loop = asyncio.get_event_loop() + loop.remove_signal_handler(signal.SIGCHLD) cls._initialized = False @classmethod @@ -352,7 +350,7 @@ class Subprocess(object): return assert ret_pid == pid subproc = cls._waiting.pop(pid) - subproc.io_loop.add_callback_from_signal(subproc._set_returncode, status) + subproc.io_loop.add_callback(subproc._set_returncode, status) def _set_returncode(self, status: int) -> None: if sys.platform == "win32": diff --git a/lib/tornado/tcpserver.py b/lib/tornado/tcpserver.py index deab8f2a..02c0ca0c 100644 --- a/lib/tornado/tcpserver.py +++ b/lib/tornado/tcpserver.py @@ -61,7 +61,7 @@ class TCPServer(object): To make this server serve SSL traffic, send the ``ssl_options`` keyword argument with an `ssl.SSLContext` object. For compatibility with older versions of Python ``ssl_options`` may also be a dictionary of keyword - arguments for the `ssl.wrap_socket` method.:: + arguments for the `ssl.SSLContext.wrap_socket` method.:: ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"), diff --git a/lib/tornado/testing.py b/lib/tornado/testing.py index 9bfadf45..bdbff87b 100644 --- a/lib/tornado/testing.py +++ b/lib/tornado/testing.py @@ -206,10 +206,7 @@ class AsyncTestCase(unittest.TestCase): # this always happens in tests, so cancel any tasks that are # still pending by the time we get here. asyncio_loop = self.io_loop.asyncio_loop # type: ignore - if hasattr(asyncio, "all_tasks"): # py37 - tasks = asyncio.all_tasks(asyncio_loop) # type: ignore - else: - tasks = asyncio.Task.all_tasks(asyncio_loop) + tasks = asyncio.all_tasks(asyncio_loop) # Tasks that are done may still appear here and may contain # non-cancellation exceptions, so filter them out. tasks = [t for t in tasks if not t.done()] # type: ignore @@ -520,7 +517,9 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase): def default_ssl_options() -> Dict[str, Any]: # Testing keys were generated with: # openssl req -new -keyout tornado/test/test.key \ - # -out tornado/test/test.crt -nodes -days 3650 -x509 + # -out tornado/test/test.crt \ + # -nodes -days 3650 -x509 \ + # -subj "/CN=foo.example.com" -addext "subjectAltName = DNS:foo.example.com" module_dir = os.path.dirname(__file__) return dict( certfile=os.path.join(module_dir, "test", "test.crt"), diff --git a/lib/tornado/web.py b/lib/tornado/web.py index 56514049..03939647 100644 --- a/lib/tornado/web.py +++ b/lib/tornado/web.py @@ -647,7 +647,9 @@ class RequestHandler(object): if domain: morsel["domain"] = domain if expires_days is not None and not expires: - expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days) + expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + days=expires_days + ) if expires: morsel["expires"] = httputil.format_timestamp(expires) if path: @@ -698,7 +700,9 @@ class RequestHandler(object): raise TypeError( f"clear_cookie() got an unexpected keyword argument '{excluded_arg}'" ) - expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) + expires = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta( + days=365 + ) self.set_cookie(name, value="", expires=expires, **kwargs) def clear_all_cookies(self, **kwargs: Any) -> None: @@ -2793,7 +2797,8 @@ class StaticFileHandler(RequestHandler): if cache_time > 0: self.set_header( "Expires", - datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_time), + datetime.datetime.now(datetime.timezone.utc) + + datetime.timedelta(seconds=cache_time), ) self.set_header("Cache-Control", "max-age=" + str(cache_time)) @@ -2812,12 +2817,12 @@ class StaticFileHandler(RequestHandler): # content has not been modified ims_value = self.request.headers.get("If-Modified-Since") if ims_value is not None: - date_tuple = email.utils.parsedate(ims_value) - if date_tuple is not None: - if_since = datetime.datetime(*date_tuple[:6]) - assert self.modified is not None - if if_since >= self.modified: - return True + if_since = email.utils.parsedate_to_datetime(ims_value) + if if_since.tzinfo is None: + if_since = if_since.replace(tzinfo=datetime.timezone.utc) + assert self.modified is not None + if if_since >= self.modified: + return True return False @@ -2981,6 +2986,10 @@ class StaticFileHandler(RequestHandler): object or None. .. versionadded:: 3.1 + + .. versionchanged:: 6.4 + Now returns an aware datetime object instead of a naive one. + Subclasses that override this method may return either kind. """ stat_result = self._stat() # NOTE: Historically, this used stat_result[stat.ST_MTIME], @@ -2991,7 +3000,9 @@ class StaticFileHandler(RequestHandler): # consistency with the past (and because we have a unit test # that relies on this), we truncate the float here, although # I'm not sure that's the right thing to do. - modified = datetime.datetime.utcfromtimestamp(int(stat_result.st_mtime)) + modified = datetime.datetime.fromtimestamp( + int(stat_result.st_mtime), datetime.timezone.utc + ) return modified def get_content_type(self) -> str: @@ -3125,7 +3136,7 @@ class FallbackHandler(RequestHandler): django.core.handlers.wsgi.WSGIHandler()) application = tornado.web.Application([ (r"/foo", FooHandler), - (r".*", FallbackHandler, dict(fallback=wsgi_app), + (r".*", FallbackHandler, dict(fallback=wsgi_app)), ]) """ diff --git a/lib/tornado/websocket.py b/lib/tornado/websocket.py index d0abd425..fbfd7008 100644 --- a/lib/tornado/websocket.py +++ b/lib/tornado/websocket.py @@ -20,6 +20,7 @@ import sys import struct import tornado from urllib.parse import urlparse +import warnings import zlib from tornado.concurrent import Future, future_set_result_unless_cancelled @@ -1356,7 +1357,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): ping_interval: Optional[float] = None, ping_timeout: Optional[float] = None, max_message_size: int = _default_max_message_size, - subprotocols: Optional[List[str]] = [], + subprotocols: Optional[List[str]] = None, resolver: Optional[Resolver] = None, ) -> None: self.connect_future = Future() # type: Future[WebSocketClientConnection] @@ -1410,6 +1411,15 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): 104857600, ) + def __del__(self) -> None: + if self.protocol is not None: + # Unclosed client connections can sometimes log "task was destroyed but + # was pending" warnings if shutdown strikes at the wrong time (such as + # while a ping is being processed due to ping_interval). Log our own + # warning to make it a little more deterministic (although it's still + # dependent on GC timing). + warnings.warn("Unclosed WebSocketClientConnection", ResourceWarning) + def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None: """Closes the websocket connection. diff --git a/lib/urllib3/_collections.py b/lib/urllib3/_collections.py index 7f9dca7f..8bdfb767 100644 --- a/lib/urllib3/_collections.py +++ b/lib/urllib3/_collections.py @@ -8,7 +8,7 @@ from threading import RLock if typing.TYPE_CHECKING: # We can only import Protocol if TYPE_CHECKING because it's a development # dependency, and is not available at runtime. - from typing_extensions import Protocol + from typing_extensions import Protocol, Self class HasGettableStringKeys(Protocol): def keys(self) -> typing.Iterator[str]: @@ -391,6 +391,24 @@ class HTTPHeaderDict(typing.MutableMapping[str, str]): # meets our external interface requirement of `Union[List[str], _DT]`. return vals[1:] + def _prepare_for_method_change(self) -> Self: + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist diff --git a/lib/urllib3/_version.py b/lib/urllib3/_version.py index c9641905..e2b88f1d 100644 --- a/lib/urllib3/_version.py +++ b/lib/urllib3/_version.py @@ -1,4 +1,4 @@ # This file is protected via CODEOWNERS from __future__ import annotations -__version__ = "2.0.5" +__version__ = "2.0.7" diff --git a/lib/urllib3/connectionpool.py b/lib/urllib3/connectionpool.py index 2479405b..c6ca3902 100644 --- a/lib/urllib3/connectionpool.py +++ b/lib/urllib3/connectionpool.py @@ -11,6 +11,7 @@ from socket import timeout as SocketTimeout from types import TracebackType from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict from ._request_methods import RequestMethods from .connection import ( BaseSSLError, @@ -893,7 +894,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() try: retries = retries.increment(method, url, response=response, _pool=self) diff --git a/lib/urllib3/poolmanager.py b/lib/urllib3/poolmanager.py index 02b2f622..3c92a14d 100644 --- a/lib/urllib3/poolmanager.py +++ b/lib/urllib3/poolmanager.py @@ -7,7 +7,7 @@ import warnings from types import TracebackType from urllib.parse import urljoin -from ._collections import RecentlyUsedContainer +from ._collections import HTTPHeaderDict, RecentlyUsedContainer from ._request_methods import RequestMethods from .connection import ProxyConfig from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme @@ -449,9 +449,12 @@ class PoolManager(RequestMethods): # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) - # RFC 7231, Section 6.4.4 if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() retries = kw.get("retries") if not isinstance(retries, Retry): diff --git a/lib/urllib3/util/retry.py b/lib/urllib3/util/retry.py index ea48afe3..7572bfd2 100644 --- a/lib/urllib3/util/retry.py +++ b/lib/urllib3/util/retry.py @@ -187,7 +187,7 @@ class Retry: RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) #: Default maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/lib/urllib3/util/ssl_.py b/lib/urllib3/util/ssl_.py index 77628032..e35e3940 100644 --- a/lib/urllib3/util/ssl_.py +++ b/lib/urllib3/util/ssl_.py @@ -411,8 +411,10 @@ def ssl_wrap_socket( tls_in_tls: bool = False, ) -> ssl.SSLSocket | SSLTransportType: """ - All arguments except for server_hostname, ssl_context, and ca_cert_dir have - the same meaning as they do when using :func:`ssl.wrap_socket`. + All arguments except for server_hostname, ssl_context, tls_in_tls, ca_cert_data and + ca_cert_dir have the same meaning as they do when using + :func:`ssl.create_default_context`, :meth:`ssl.SSLContext.load_cert_chain`, + :meth:`ssl.SSLContext.set_ciphers` and :meth:`ssl.SSLContext.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate diff --git a/recommended.txt b/recommended.txt index a2998cf4..abf98ed9 100644 --- a/recommended.txt +++ b/recommended.txt @@ -1,4 +1,5 @@ --extra-index-url https://gitlab+deploy-token-1599941:UNupqjtDab_zxNzvP2gA@gitlab.com/api/v4/projects/279215/packages/pypi/simple +Brotli cffi cryptography != 38.0.2; 'Windows' == platform_system cryptography != 38.0.2; 'Linux' == platform_system and ('x86_64' == platform_machine or 'aarch64' == platform_machine) diff --git a/sickgear/__init__.py b/sickgear/__init__.py index 24901649..9807f01a 100644 --- a/sickgear/__init__.py +++ b/sickgear/__init__.py @@ -1721,9 +1721,10 @@ def init_stage_2(): MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit']) try: - for f in scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')): - if f.is_file(): - MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True + with scandir(os.path.join(PROG_DIR, 'gui', GUI_NAME, 'images', 'flags')) as s_d: + for f in s_d: + if f.is_file(): + MEMCACHE_FLAG_IMAGES[os.path.splitext(f.name)[0].lower()] = True except (BaseException, Exception): pass diff --git a/sickgear/db.py b/sickgear/db.py index 6e414f99..755932e9 100644 --- a/sickgear/db.py +++ b/sickgear/db.py @@ -112,7 +112,7 @@ class DBConnection(object): helpers.copy_file(db_alt, db_src) self.filename = filename - self.connection = sqlite3.connect(db_src, 20) + self.connection = sqlite3.connect(db_src, timeout=20) # enable legacy double quote support if db_supports_setconfig_dqs: self.connection.setconfig(sqlite3.SQLITE_DBCONFIG_DQS_DDL, True) @@ -156,7 +156,7 @@ class DBConnection(object): try: # copy into this DB - backup_con = sqlite3.connect(target_db, 20) + backup_con = sqlite3.connect(target_db, timeout=20) with backup_con: with db_lock: self.connection.backup(backup_con, progress=progress) @@ -752,13 +752,13 @@ def cleanup_old_db_backups(filename): d, filename = os.path.split(filename) if not d: d = sickgear.DATA_DIR - for f in filter(lambda fn: fn.is_file() and filename in fn.name and - re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), - scandir(d)): - try: - os.unlink(f.path) - except (BaseException, Exception): - pass + with scandir(d) as s_d: + for f in filter(lambda fn: fn.is_file() and filename in fn.name and + re.search(r'\.db(\.v\d+)?\.r\d+$', fn.name), s_d): + try: + os.unlink(f.path) + except (BaseException, Exception): + pass except (BaseException, Exception): pass diff --git a/sickgear/helpers.py b/sickgear/helpers.py index 1643a1db..9c682585 100644 --- a/sickgear/helpers.py +++ b/sickgear/helpers.py @@ -352,8 +352,11 @@ def list_media_files(path): if [direntry for direntry in scantree(path, include=[r'\.sickgearignore'], filter_kind=False, recurse=False)]: logger.debug('Skipping folder "%s" because it contains ".sickgearignore"' % path) else: - result = [direntry.path for direntry in scantree(path, exclude=['Extras'], filter_kind=False, - exclude_folders_with_files=['.sickgearignore']) + result = [direntry.path for direntry in scantree(path, exclude_dirs=[ + '^Extras$', + '^Behind The Scenes$', '^Deleted Scenes$', '^Featurettes$', + '^Interviews$', '^Scenes$', '^Shorts$', '^Trailers$', '^Other$' + ], filter_kind=False, exclude_folders_with_files=['.sickgearignore']) if has_media_ext(direntry.name)] return result @@ -1013,7 +1016,7 @@ def clear_cache(force=False): dirty = None del_time = SGDatetime.timestamp_near(td=datetime.timedelta(hours=12)) direntry_args = dict(follow_symlinks=False) - for direntry in scantree(sickgear.CACHE_DIR, ['images|rss|zoneinfo'], follow_symlinks=True): + for direntry in scantree(sickgear.CACHE_DIR, exclude_dirs=['images|rss|zoneinfo'], follow_symlinks=True): if direntry.is_file(**direntry_args) and (force or del_time > direntry.stat(**direntry_args).st_mtime): dirty = dirty or False if remove_file_perm(direntry.path) else True elif direntry.is_dir(**direntry_args) and direntry.name not in ['cheetah', 'sessions', 'indexers']: @@ -1564,15 +1567,19 @@ def count_files_dirs(base_dir): """ f = d = 0 try: - files = scandir(base_dir) + with scandir(base_dir) as s_d: + try: + files = s_d + except OSError as e: + logger.warning('Unable to count files %s / %s' % (repr(e), ex(e))) + else: + for e in files: + if e.is_file(): + f += 1 + elif e.is_dir(): + d += 1 except OSError as e: logger.warning('Unable to count files %s / %s' % (repr(e), ex(e))) - else: - for e in files: - if e.is_file(): - f += 1 - elif e.is_dir(): - d += 1 return f, d @@ -1617,93 +1624,108 @@ def upgrade_new_naming(): cf = 0 p_text = 'Upgrading %s' % (d, 'banner/poster')[not d] _set_progress(p_text, 0, 0) - for entry in scandir(bd): - if entry.is_file(): - cf += 1 - _set_progress(p_text, cf, step) - b_s = bp_match.search(entry.name) - if b_s: - old_id = int(b_s.group(1)) - tvid = show_list.get(old_id) - if tvid: - nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d) - if not os.path.isdir(nb_dir): + with scandir(bd) as s_d: + for entry in scandir(bd): + if entry.is_file(): + cf += 1 + _set_progress(p_text, cf, step) + b_s = bp_match.search(entry.name) + if b_s: + old_id = int(b_s.group(1)) + tvid = show_list.get(old_id) + if tvid: + nb_dir = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', '%s-%s' % (tvid, old_id), d) + if not os.path.isdir(nb_dir): + try: + os.makedirs(nb_dir) + except (BaseException, Exception): + pass + new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name)) try: - os.makedirs(nb_dir) + move_file(entry.path, new_name) + except (BaseException, Exception) as e: + logger.warning('Unable to rename %s to %s: %s / %s' + % (entry.path, new_name, repr(e), ex(e))) + else: + # clean up files without reference in db + try: + os.remove(entry.path) except (BaseException, Exception): pass - new_name = os.path.join(nb_dir, bp_match.sub(r'\2', entry.name)) - try: - move_file(entry.path, new_name) - except (BaseException, Exception) as e: - logger.warning('Unable to rename %s to %s: %s / %s' - % (entry.path, new_name, repr(e), ex(e))) - else: - # clean up files without reference in db - try: - os.remove(entry.path) - except (BaseException, Exception): - pass - elif entry.is_dir(): - if entry.name in ['shows', 'browse']: - continue - elif 'fanart' == entry.name: - _set_progress(p_text, 0, 1) - fc_fan, dc_fan = count_files_dirs(entry.path) - step_fan = dc_fan / float(100) - cf_fan = 0 - p_text = 'Upgrading fanart' - _set_progress(p_text, 0, 0) - try: - entries = scandir(entry.path) - except OSError as e: - logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e))) + elif entry.is_dir(): + if entry.name in ['shows', 'browse']: continue - for d_entry in entries: - if d_entry.is_dir(): - cf_fan += 1 - _set_progress(p_text, cf_fan, step_fan) - old_id = try_int(d_entry.name) - if old_id: - new_id = show_list.get(old_id) - if new_id: - new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', - '%s-%s' % (new_id, old_id), 'fanart') - try: - move_file(d_entry.path, new_dir_name) - except (BaseException, Exception) as e: - logger.warning(f'Unable to rename {d_entry.path} to {new_dir_name}:' - f' {repr(e)} / {ex(e)}') - if os.path.isdir(new_dir_name): - try: - f_n = filter(lambda fn: fn.is_file(), scandir(new_dir_name)) - except OSError as e: - logger.warning('Unable to rename %s / %s' % (repr(e), ex(e))) - else: - rename_args = [] - # noinspection PyTypeChecker - for f_entry in f_n: - rename_args += [(f_entry.path, bp_match.sub(r'\2', f_entry.path))] - - for args in rename_args: + elif 'fanart' == entry.name: + _set_progress(p_text, 0, 1) + fc_fan, dc_fan = count_files_dirs(entry.path) + step_fan = dc_fan / float(100) + cf_fan = 0 + p_text = 'Upgrading fanart' + _set_progress(p_text, 0, 0) + try: + with scandir(entry.path) as s_p: + try: + entries = s_p + except OSError as e: + logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e))) + continue + for d_entry in entries: + if d_entry.is_dir(): + cf_fan += 1 + _set_progress(p_text, cf_fan, step_fan) + old_id = try_int(d_entry.name) + if old_id: + new_id = show_list.get(old_id) + if new_id: + new_dir_name = os.path.join(sickgear.CACHE_DIR, 'images', 'shows', + '%s-%s' % (new_id, old_id), 'fanart') try: - move_file(*args) + move_file(d_entry.path, new_dir_name) except (BaseException, Exception) as e: - logger.warning(f'Unable to rename {args[0]} to {args[1]}:' - f' {repr(e)} / {ex(e)}') - else: - try: - shutil.rmtree(d_entry.path) - except (BaseException, Exception): - pass - try: - shutil.rmtree(d_entry.path) - except (BaseException, Exception): - pass - try: - os.rmdir(entry.path) - except (BaseException, Exception): - pass + logger.warning(f'Unable to rename {d_entry.path}' + f' to {new_dir_name}: {repr(e)} / {ex(e)}') + if os.path.isdir(new_dir_name): + try: + with scandir(new_dir_name) as s_d_n: + try: + f_n = filter(lambda fn: fn.is_file(), s_d_n) + except OSError as e: + logger.warning( + f'Unable to rename {repr(e)} / {ex(d)}') + else: + rename_args = [] + # noinspection PyTypeChecker + for f_entry in f_n: + rename_args += [ + (f_entry.path, + bp_match.sub(r'\2', f_entry.path))] + + for args in rename_args: + try: + move_file(*args) + except (BaseException, Exception) as e: + logger.warning( + f'Unable to rename {args[0]}' + f' to {args[1]}: {repr(e)} / {ex(e)}') + except OSError as e: + logger.warning( + 'Unable to rename %s / %s' % (repr(e), ex(e))) + else: + try: + shutil.rmtree(d_entry.path) + except (BaseException, Exception): + pass + try: + shutil.rmtree(d_entry.path) + except (BaseException, Exception): + pass + except OSError as e: + logger.warning('Unable to stat dirs %s / %s' % (repr(e), ex(e))) + continue + try: + os.rmdir(entry.path) + except (BaseException, Exception): + pass if 'thumbnails' == d: try: os.rmdir(bd) diff --git a/sickgear/search.py b/sickgear/search.py index 9c80f7d8..6ed0af9e 100644 --- a/sickgear/search.py +++ b/sickgear/search.py @@ -27,20 +27,21 @@ from sg_helpers import write_file import sickgear from . import clients, common, db, failed_history, helpers, history, logger, \ notifiers, nzbget, nzbSplitter, show_name_helpers, sab, ui -from .classes import NZBDataSearchResult, NZBSearchResult, TorrentSearchResult from .common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, MULTI_EP_RESULT, SEASON_RESULT, Quality from .providers.generic import GenericProvider from .tv import TVEpisode, TVShow from six import iteritems, itervalues, string_types -# noinspection PyUnreachableCode +# noinspection PyUnreachableCode, PyStatementEffect if False: from typing import AnyStr, Dict, List, Optional, Tuple, Union + from .classes import NZBDataSearchResult, NZBSearchResult, SearchResult, TorrentSearchResult + search_result_type = Union[NZBDataSearchResult, NZBSearchResult, SearchResult, TorrentSearchResult] def _download_result(result): - # type: (Union[NZBDataSearchResult, NZBSearchResult, TorrentSearchResult]) -> bool + # type: (search_result_type) -> bool """ Downloads a result to the appropriate black hole folder. @@ -87,7 +88,7 @@ def _download_result(result): def snatch_episode(result, end_status=SNATCHED): - # type: (Union[NZBDataSearchResult, NZBSearchResult, TorrentSearchResult], int) -> bool + # type: (search_result_type, int) -> bool """ Contains the internal logic necessary to actually "snatch" a result that has been found. @@ -208,12 +209,12 @@ def pass_show_wordlist_checks(name, show_obj): def pick_best_result( - results, # type: List[Union[NZBDataSearchResult, NZBSearchResult, TorrentSearchResult]] + results, # type: List[search_result_type] show_obj, # type: TVShow quality_list=None, # type: List[int] filter_rls='' # type: AnyStr ): - # type: (...) -> sickgear.classes.SearchResult + # type: (...) -> search_result_type """ picks best result from given search result list for given show object @@ -314,7 +315,7 @@ def pick_best_result( def best_candidate(best_result, cur_result): - # type: (sickgear.classes.SearchResult, sickgear.classes.SearchResult) -> sickgear.classes.SearchResult + # type: (search_result_type, search_result_type) -> search_result_type """ compare 2 search results and return best @@ -345,7 +346,7 @@ def best_candidate(best_result, cur_result): def is_final_result(result): - # type: (sickgear.classes.SearchResult) -> bool + # type: (search_result_type) -> bool """ Checks if the given result is good enough quality that we can stop searching for other ones. @@ -386,7 +387,7 @@ def is_final_result(result): def is_first_best_match(ep_status, result): - # type: (int, sickgear.classes.SearchResult) -> bool + # type: (int, search_result_type) -> bool """ Checks if the given result is the best quality match and if we want to archive the episode on first match. @@ -571,7 +572,7 @@ def wanted_episodes(show_obj, # type: TVShow def search_for_needed_episodes(ep_obj_list): - # type: (List[TVEpisode]) -> List[Union[NZBDataSearchResult, NZBSearchResult, TorrentSearchResult]] + # type: (List[TVEpisode]) -> List[search_result_type] """ search for episodes in list @@ -796,7 +797,7 @@ def cache_torrent_file( if search_result.provider.get_id() in ['tvchaosuk'] \ and hasattr(search_result.provider, 'regulate_cache_torrent_file'): torrent_name = search_result.provider.regulate_cache_torrent_file(torrent_name) - if not pick_best_result([search_result], show_obj, **kwargs) or \ + if not _pick_best_result_helper([search_result], show_obj=show_obj, **kwargs) or \ not show_name_helpers.pass_wordlist_checks(torrent_name, indexer_lookup=False, show_obj=show_obj): logger.log(f'Ignored {result_name} that contains {torrent_name} (debug log has detail)') return @@ -804,6 +805,40 @@ def cache_torrent_file( return search_result +def _pick_best_result_helper( + results, # type: List[search_result_type] + show_obj, # type: TVShow + old_status=None, # type: int + use_quality_list=None, # type: List[int] + best_qualities=None, # type: List[int] + orig_thread_name='' # type: AnyStr + ): + # type: (...) -> search_result_type + """ + helper to apply pick_best_result with filters + + :param results: SearchResults + :param show_obj: show object + :param old_status: old status of checked episode + :param use_quality_list: default quality list + :param best_qualities: best qualites for the episode + :param orig_thread_name: original thread name + :return: best matching result + """ + internal_quality_list = None + if None is not use_quality_list and any([results[0].ep_obj_list]): + old_status = old_status or \ + failed_history.find_old_status(results[0].ep_obj_list[0]) or results[0].ep_obj_list[0].status + if old_status: + status, quality = Quality.split_composite_status(old_status) + internal_quality_list = (status not in ( + common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) + + quality_list = use_quality_list or (internal_quality_list and (None, best_qualities)[any(best_qualities)] or None) + + return pick_best_result(results=results, show_obj=show_obj, quality_list=quality_list, filter_rls=orig_thread_name) + + def search_providers( show_obj, # type: TVShow ep_obj_list, # type: List[TVEpisode] @@ -813,7 +848,7 @@ def search_providers( old_status=None, # type: int scheduled=False # type: bool ): - # type: (...) -> List[sickgear.classes.SearchResult] + # type: (...) -> List[search_result_type] """ search provider for given episode objects from given show object @@ -827,7 +862,7 @@ def search_providers( :return: list of search result objects """ found_results = {} - final_results = [] + final_results = [] # type: List[search_result_type] search_done = False search_threads = [] @@ -864,6 +899,10 @@ def search_providers( for s_t in search_threads: s_t.join() + any_qualities, best_qualities = Quality.split_quality(show_obj.quality) + params = dict(show_obj=show_obj, old_status=old_status, best_qualities=best_qualities, + orig_thread_name=orig_thread_name) + # now look in all the results for cur_provider in provider_list: provider_id = cur_provider.get_id() @@ -872,13 +911,12 @@ def search_providers( if provider_id not in found_results or not len(found_results[provider_id]): continue - any_qualities, best_qualities = Quality.split_quality(show_obj.quality) - # pick the best season NZB best_season_result = None if SEASON_RESULT in found_results[provider_id]: - best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show_obj, - any_qualities + best_qualities) + best_season_result = _pick_best_result_helper( + found_results[provider_id][SEASON_RESULT], show_obj=show_obj, + use_quality_list=any_qualities + best_qualities, orig_thread_name=orig_thread_name) highest_quality_overall = 0 for cur_episode in found_results[provider_id]: @@ -962,7 +1000,7 @@ def search_providers( if not best_season_result.url.startswith('magnet'): best_season_result = cache_torrent_file( - best_season_result, show_obj=show_obj, filter_rls=orig_thread_name) + best_season_result, show_obj=show_obj, orig_thread_name=orig_thread_name) if best_season_result: ep_num = MULTI_EP_RESULT @@ -972,7 +1010,7 @@ def search_providers( found_results[provider_id][ep_num] = [best_season_result] # go through multi-ep results and see if we really want them or not, get rid of the rest - multi_results = {} + multi_result_groups = {} # type: Dict[AnyStr, Union[List, search_result_type]] if MULTI_EP_RESULT in found_results[provider_id]: for multi_result in found_results[provider_id][MULTI_EP_RESULT]: @@ -983,58 +1021,18 @@ def search_providers( logger.log(f'Rejecting previously failed multi episode result [{multi_result.name}]') continue - # see how many of the eps that this result covers aren't covered by single results - needed_eps = [] - not_needed_eps = [] - for ep_obj in multi_result.ep_obj_list: - ep_num = ep_obj.episode - # if we have results for the episode - if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]): - needed_eps.append(ep_num) - else: - not_needed_eps.append(ep_num) + group_name = '-'.join(f'{_num}' for _num in sorted(_ep.episode for _ep in multi_result.ep_obj_list)) + multi_result_groups.setdefault(group_name, []).append(multi_result) - logger.debug(f'Single episode check result is... needed episodes: {needed_eps},' - f' not needed episodes: {not_needed_eps}') - - if not not_needed_eps: - logger.debug('All of these episodes were covered by single episode results,' - ' ignoring this multi episode result') - continue - - # check if these eps are already covered by another multi-result - multi_needed_eps = [] - multi_not_needed_eps = [] - for ep_obj in multi_result.ep_obj_list: - ep_num = ep_obj.episode - if ep_num in multi_results: - multi_not_needed_eps.append(ep_num) - else: - multi_needed_eps.append(ep_num) - - logger.debug(f'Multi episode check result is...' - f' multi needed episodes: {multi_needed_eps},' - f' multi not needed episodes: {multi_not_needed_eps}') - - if not multi_needed_eps: - logger.debug('All of these episodes were covered by another multi episode nzb,' - ' ignoring this multi episode result') - continue - - # if we're keeping this multi-result then remember it - for ep_obj in multi_result.ep_obj_list: - multi_results[ep_obj.episode] = multi_result - - # don't bother with the single result if we're going to get it with a multi result - for ep_obj in multi_result.ep_obj_list: - ep_num = ep_obj.episode - if ep_num in found_results[provider_id]: - logger.debug(f'A needed multi episode result overlaps with a single episode result' - f' for episode #{ep_num}, removing the single episode results from the list') - del found_results[provider_id][ep_num] - - # of all the single ep results narrow it down to the best one for each episode - final_results += set(itervalues(multi_results)) + remove_list = [] + for multi_group_name, multi_group_result in multi_result_groups.items(): + best_result = _pick_best_result_helper(multi_group_result, **params) + if best_result: + multi_result_groups[multi_group_name] = best_result + else: + remove_list.append(multi_group_name) + if remove_list: + multi_result_groups = {k: v for k, v in multi_result_groups.items() if k not in remove_list} for cur_search_result in found_results[provider_id]: # type: int if cur_search_result in (MULTI_EP_RESULT, SEASON_RESULT): @@ -1043,23 +1041,7 @@ def search_providers( if 0 == len(found_results[provider_id][cur_search_result]): continue - use_quality_list = None - if 0 < len(found_results[provider_id][cur_search_result]) and \ - any([found_results[provider_id][cur_search_result][0].ep_obj_list]): - old_status = old_status or \ - failed_history.find_old_status( - found_results[provider_id][cur_search_result][0].ep_obj_list[0]) or \ - found_results[provider_id][cur_search_result][0].ep_obj_list[0].status - if old_status: - status, quality = Quality.split_composite_status(old_status) - use_quality_list = (status not in ( - common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) - - quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None - - params = dict(show_obj=show_obj, quality_list=quality_list, filter_rls=orig_thread_name) - - best_result = pick_best_result(found_results[provider_id][cur_search_result], **params) + best_result = _pick_best_result_helper(results=found_results[provider_id][cur_search_result], **params) # if all results were rejected move on to the next episode if not best_result: @@ -1099,6 +1081,18 @@ def search_providers( if not found: final_results += [best_result] + # check if we have multi episode results that should be taken + if len(multi_result_groups): + found_eps = {_ep: _res.quality for _res in final_results for _ep in _res.ep_obj_list} + for multi_group_name, multi_group_result in multi_result_groups.items(): # type: AnyStr, search_result_type + # if a episode result is only available in multi result or is better quality then single + if (any(_ep not in found_eps for _ep in multi_group_result.ep_obj_list) or + any(multi_group_result.quality > found_eps[_ep] + for _ep in multi_group_result.ep_obj_list if _ep in found_eps)): + final_results = [_res for _res in final_results + if not any(_ep in _res.ep_obj_list for _ep in multi_group_result.ep_obj_list)] + final_results += [multi_group_result] + # check that we got all the episodes we wanted first before doing a match and snatch wanted_ep_count = 0 for wanted_ep in ep_obj_list: diff --git a/sickgear/tv.py b/sickgear/tv.py index 2108073a..c3df7072 100644 --- a/sickgear/tv.py +++ b/sickgear/tv.py @@ -3768,9 +3768,8 @@ class TVShow(TVShowBase): return False cur_status, cur_quality = Quality.split_composite_status(int(sql_result[0]['status'])) - ep_status_text = statusStrings[cur_status] - logger.debug('Existing episode status: %s (%s)' % (statusStrings[cur_status], ep_status_text)) + logger.debug(f'Existing episode status: {statusStrings[int(sql_result[0]["status"])]}') # if we know we don't want it then just say no if cur_status in [IGNORED, ARCHIVED] + ([SKIPPED], [])[multi_ep] and not manual_search: @@ -4542,9 +4541,9 @@ class TVEpisode(TVEpisodeBase): # shouldn't get here probably else: - msg = '(2) Status changes from %s to ' % statusStrings[self._status] - self.status = UNKNOWN - logger.debug('%s%s' % (msg, statusStrings[self._status])) + logger.warning(f'Status not changed from {statusStrings[self._status]}' + f' for episode {self._season}x{self._episode} because file "{self._location}"' + f' has no media file extension') def load_from_nfo(self, location): """ diff --git a/tests/tvinfo_api_tests.py b/tests/tvinfo_api_tests.py index a008476b..4f0178eb 100644 --- a/tests/tvinfo_api_tests.py +++ b/tests/tvinfo_api_tests.py @@ -592,15 +592,16 @@ class TVInfoTests(test.SickbeardTestDBCase): @classmethod def tearDownClass(cls): super(TVInfoTests, cls).tearDownClass() - files = {_f.name for _f in os.scandir(mock_data_dir) if _f.is_file()} - unused_files = files - used_files - if delete_unused_mock_files: - for _u_f in unused_files: - full_filename = os.path.join(mock_data_dir, _u_f) - try: - os.remove(full_filename) - except (BaseException, Exception) as e: - print('errror deleting: [%s], error: %s' % (full_filename, e)) + with os.scandir(mock_data_dir) as s_d: + files = {_f.name for _f in os.scandir(mock_data_dir) if _f.is_file()} + unused_files = files - used_files + if delete_unused_mock_files: + for _u_f in unused_files: + full_filename = os.path.join(mock_data_dir, _u_f) + try: + os.remove(full_filename) + except (BaseException, Exception) as e: + print('errror deleting: [%s], error: %s' % (full_filename, e)) if unused_files: print('unused files: %s' % unused_files) print('reset mock methods')